@xata.io/client 0.0.0-next.v34f1d64a4f4c1ffd896bbb285ab38efd8315b259 → 0.0.0-next.v403cdd55cb26b69c074dbc07b44daa0c2a0a77b6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -24,6 +24,1789 @@ const TraceAttributes = {
24
24
  CLOUDFLARE_RAY_ID: "cf.ray"
25
25
  };
26
26
 
27
+ const lookup = [];
28
+ const revLookup = [];
29
+ const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
30
+ for (let i = 0, len = code.length; i < len; ++i) {
31
+ lookup[i] = code[i];
32
+ revLookup[code.charCodeAt(i)] = i;
33
+ }
34
+ revLookup["-".charCodeAt(0)] = 62;
35
+ revLookup["_".charCodeAt(0)] = 63;
36
+ function getLens(b64) {
37
+ const len = b64.length;
38
+ if (len % 4 > 0) {
39
+ throw new Error("Invalid string. Length must be a multiple of 4");
40
+ }
41
+ let validLen = b64.indexOf("=");
42
+ if (validLen === -1) validLen = len;
43
+ const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
44
+ return [validLen, placeHoldersLen];
45
+ }
46
+ function _byteLength(_b64, validLen, placeHoldersLen) {
47
+ return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
48
+ }
49
+ function toByteArray(b64) {
50
+ let tmp;
51
+ const lens = getLens(b64);
52
+ const validLen = lens[0];
53
+ const placeHoldersLen = lens[1];
54
+ const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
55
+ let curByte = 0;
56
+ const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
57
+ let i;
58
+ for (i = 0; i < len; i += 4) {
59
+ tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
60
+ arr[curByte++] = tmp >> 16 & 255;
61
+ arr[curByte++] = tmp >> 8 & 255;
62
+ arr[curByte++] = tmp & 255;
63
+ }
64
+ if (placeHoldersLen === 2) {
65
+ tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
66
+ arr[curByte++] = tmp & 255;
67
+ }
68
+ if (placeHoldersLen === 1) {
69
+ tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
70
+ arr[curByte++] = tmp >> 8 & 255;
71
+ arr[curByte++] = tmp & 255;
72
+ }
73
+ return arr;
74
+ }
75
+ function tripletToBase64(num) {
76
+ return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
77
+ }
78
+ function encodeChunk(uint8, start, end) {
79
+ let tmp;
80
+ const output = [];
81
+ for (let i = start; i < end; i += 3) {
82
+ tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
83
+ output.push(tripletToBase64(tmp));
84
+ }
85
+ return output.join("");
86
+ }
87
+ function fromByteArray(uint8) {
88
+ let tmp;
89
+ const len = uint8.length;
90
+ const extraBytes = len % 3;
91
+ const parts = [];
92
+ const maxChunkLength = 16383;
93
+ for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
94
+ parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
95
+ }
96
+ if (extraBytes === 1) {
97
+ tmp = uint8[len - 1];
98
+ parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
99
+ } else if (extraBytes === 2) {
100
+ tmp = (uint8[len - 2] << 8) + uint8[len - 1];
101
+ parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
102
+ }
103
+ return parts.join("");
104
+ }
105
+
106
+ const K_MAX_LENGTH = 2147483647;
107
+ const MAX_ARGUMENTS_LENGTH = 4096;
108
+ class Buffer extends Uint8Array {
109
+ /**
110
+ * Constructs a new `Buffer` instance.
111
+ *
112
+ * @param value
113
+ * @param encodingOrOffset
114
+ * @param length
115
+ */
116
+ constructor(value, encodingOrOffset, length) {
117
+ if (typeof value === "number") {
118
+ if (typeof encodingOrOffset === "string") {
119
+ throw new TypeError("The first argument must be of type string, received type number");
120
+ }
121
+ if (value < 0) {
122
+ throw new RangeError("The buffer size cannot be negative");
123
+ }
124
+ super(value < 0 ? 0 : Buffer._checked(value) | 0);
125
+ } else if (typeof value === "string") {
126
+ if (typeof encodingOrOffset !== "string") {
127
+ encodingOrOffset = "utf8";
128
+ }
129
+ if (!Buffer.isEncoding(encodingOrOffset)) {
130
+ throw new TypeError("Unknown encoding: " + encodingOrOffset);
131
+ }
132
+ const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
133
+ super(length2);
134
+ const written = this.write(value, 0, this.length, encodingOrOffset);
135
+ if (written !== length2) {
136
+ throw new TypeError(
137
+ "Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
138
+ );
139
+ }
140
+ } else if (ArrayBuffer.isView(value)) {
141
+ if (Buffer._isInstance(value, Uint8Array)) {
142
+ const copy = new Uint8Array(value);
143
+ const array = copy.buffer;
144
+ const byteOffset = copy.byteOffset;
145
+ const length2 = copy.byteLength;
146
+ if (byteOffset < 0 || array.byteLength < byteOffset) {
147
+ throw new RangeError("offset is outside of buffer bounds");
148
+ }
149
+ if (array.byteLength < byteOffset + (length2 || 0)) {
150
+ throw new RangeError("length is outside of buffer bounds");
151
+ }
152
+ super(new Uint8Array(array, byteOffset, length2));
153
+ } else {
154
+ const array = value;
155
+ const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
156
+ super(new Uint8Array(length2));
157
+ for (let i = 0; i < length2; i++) {
158
+ this[i] = array[i] & 255;
159
+ }
160
+ }
161
+ } else if (value == null) {
162
+ throw new TypeError(
163
+ "The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
164
+ );
165
+ } else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
166
+ const array = value;
167
+ const byteOffset = encodingOrOffset;
168
+ if (byteOffset < 0 || array.byteLength < byteOffset) {
169
+ throw new RangeError("offset is outside of buffer bounds");
170
+ }
171
+ if (array.byteLength < byteOffset + (length || 0)) {
172
+ throw new RangeError("length is outside of buffer bounds");
173
+ }
174
+ super(new Uint8Array(array, byteOffset, length));
175
+ } else if (Array.isArray(value)) {
176
+ const array = value;
177
+ const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
178
+ super(new Uint8Array(length2));
179
+ for (let i = 0; i < length2; i++) {
180
+ this[i] = array[i] & 255;
181
+ }
182
+ } else {
183
+ throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
184
+ }
185
+ }
186
+ /**
187
+ * Return JSON representation of the buffer.
188
+ */
189
+ toJSON() {
190
+ return {
191
+ type: "Buffer",
192
+ data: Array.prototype.slice.call(this)
193
+ };
194
+ }
195
+ /**
196
+ * Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
197
+ * parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
198
+ * only part of `string` will be written. However, partially encoded characters will not be written.
199
+ *
200
+ * @param string String to write to `buf`.
201
+ * @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
202
+ * @param length Maximum number of bytes to write: Default: `buf.length - offset`.
203
+ * @param encoding The character encoding of `string`. Default: `utf8`.
204
+ */
205
+ write(string, offset, length, encoding) {
206
+ if (typeof offset === "undefined") {
207
+ encoding = "utf8";
208
+ length = this.length;
209
+ offset = 0;
210
+ } else if (typeof length === "undefined" && typeof offset === "string") {
211
+ encoding = offset;
212
+ length = this.length;
213
+ offset = 0;
214
+ } else if (typeof offset === "number" && isFinite(offset)) {
215
+ offset = offset >>> 0;
216
+ if (typeof length === "number" && isFinite(length)) {
217
+ length = length >>> 0;
218
+ encoding ?? (encoding = "utf8");
219
+ } else if (typeof length === "string") {
220
+ encoding = length;
221
+ length = void 0;
222
+ }
223
+ } else {
224
+ throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
225
+ }
226
+ const remaining = this.length - offset;
227
+ if (typeof length === "undefined" || length > remaining) {
228
+ length = remaining;
229
+ }
230
+ if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
231
+ throw new RangeError("Attempt to write outside buffer bounds");
232
+ }
233
+ encoding || (encoding = "utf8");
234
+ switch (Buffer._getEncoding(encoding)) {
235
+ case "hex":
236
+ return Buffer._hexWrite(this, string, offset, length);
237
+ case "utf8":
238
+ return Buffer._utf8Write(this, string, offset, length);
239
+ case "ascii":
240
+ case "latin1":
241
+ case "binary":
242
+ return Buffer._asciiWrite(this, string, offset, length);
243
+ case "ucs2":
244
+ case "utf16le":
245
+ return Buffer._ucs2Write(this, string, offset, length);
246
+ case "base64":
247
+ return Buffer._base64Write(this, string, offset, length);
248
+ }
249
+ }
250
+ /**
251
+ * Decodes the buffer to a string according to the specified character encoding.
252
+ * Passing `start` and `end` will decode only a subset of the buffer.
253
+ *
254
+ * Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
255
+ * will be replaced with `U+FFFD`.
256
+ *
257
+ * @param encoding
258
+ * @param start
259
+ * @param end
260
+ */
261
+ toString(encoding, start, end) {
262
+ const length = this.length;
263
+ if (length === 0) {
264
+ return "";
265
+ }
266
+ if (arguments.length === 0) {
267
+ return Buffer._utf8Slice(this, 0, length);
268
+ }
269
+ if (typeof start === "undefined" || start < 0) {
270
+ start = 0;
271
+ }
272
+ if (start > this.length) {
273
+ return "";
274
+ }
275
+ if (typeof end === "undefined" || end > this.length) {
276
+ end = this.length;
277
+ }
278
+ if (end <= 0) {
279
+ return "";
280
+ }
281
+ end >>>= 0;
282
+ start >>>= 0;
283
+ if (end <= start) {
284
+ return "";
285
+ }
286
+ if (!encoding) {
287
+ encoding = "utf8";
288
+ }
289
+ switch (Buffer._getEncoding(encoding)) {
290
+ case "hex":
291
+ return Buffer._hexSlice(this, start, end);
292
+ case "utf8":
293
+ return Buffer._utf8Slice(this, start, end);
294
+ case "ascii":
295
+ return Buffer._asciiSlice(this, start, end);
296
+ case "latin1":
297
+ case "binary":
298
+ return Buffer._latin1Slice(this, start, end);
299
+ case "ucs2":
300
+ case "utf16le":
301
+ return Buffer._utf16leSlice(this, start, end);
302
+ case "base64":
303
+ return Buffer._base64Slice(this, start, end);
304
+ }
305
+ }
306
+ /**
307
+ * Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
308
+ *
309
+ * @param otherBuffer
310
+ */
311
+ equals(otherBuffer) {
312
+ if (!Buffer.isBuffer(otherBuffer)) {
313
+ throw new TypeError("Argument must be a Buffer");
314
+ }
315
+ if (this === otherBuffer) {
316
+ return true;
317
+ }
318
+ return Buffer.compare(this, otherBuffer) === 0;
319
+ }
320
+ /**
321
+ * Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
322
+ * or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
323
+ * buffer.
324
+ *
325
+ * - `0` is returned if `otherBuffer` is the same as this buffer.
326
+ * - `1` is returned if `otherBuffer` should come before this buffer when sorted.
327
+ * - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
328
+ *
329
+ * @param otherBuffer The buffer to compare to.
330
+ * @param targetStart The offset within `otherBuffer` at which to begin comparison.
331
+ * @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
332
+ * @param sourceStart The offset within this buffer at which to begin comparison.
333
+ * @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
334
+ */
335
+ compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
336
+ if (Buffer._isInstance(otherBuffer, Uint8Array)) {
337
+ otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
338
+ }
339
+ if (!Buffer.isBuffer(otherBuffer)) {
340
+ throw new TypeError("Argument must be a Buffer or Uint8Array");
341
+ }
342
+ targetStart ?? (targetStart = 0);
343
+ targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
344
+ sourceStart ?? (sourceStart = 0);
345
+ sourceEnd ?? (sourceEnd = this.length);
346
+ if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
347
+ throw new RangeError("Out of range index");
348
+ }
349
+ if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
350
+ return 0;
351
+ }
352
+ if (sourceStart >= sourceEnd) {
353
+ return -1;
354
+ }
355
+ if (targetStart >= targetEnd) {
356
+ return 1;
357
+ }
358
+ targetStart >>>= 0;
359
+ targetEnd >>>= 0;
360
+ sourceStart >>>= 0;
361
+ sourceEnd >>>= 0;
362
+ if (this === otherBuffer) {
363
+ return 0;
364
+ }
365
+ let x = sourceEnd - sourceStart;
366
+ let y = targetEnd - targetStart;
367
+ const len = Math.min(x, y);
368
+ const thisCopy = this.slice(sourceStart, sourceEnd);
369
+ const targetCopy = otherBuffer.slice(targetStart, targetEnd);
370
+ for (let i = 0; i < len; ++i) {
371
+ if (thisCopy[i] !== targetCopy[i]) {
372
+ x = thisCopy[i];
373
+ y = targetCopy[i];
374
+ break;
375
+ }
376
+ }
377
+ if (x < y) return -1;
378
+ if (y < x) return 1;
379
+ return 0;
380
+ }
381
+ /**
382
+ * Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
383
+ * region overlaps with this buffer.
384
+ *
385
+ * @param targetBuffer The target buffer to copy into.
386
+ * @param targetStart The offset within `targetBuffer` at which to begin writing.
387
+ * @param sourceStart The offset within this buffer at which to begin copying.
388
+ * @param sourceEnd The offset within this buffer at which to end copying (exclusive).
389
+ */
390
+ copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
391
+ if (!Buffer.isBuffer(targetBuffer)) throw new TypeError("argument should be a Buffer");
392
+ if (!sourceStart) sourceStart = 0;
393
+ if (!targetStart) targetStart = 0;
394
+ if (!sourceEnd && sourceEnd !== 0) sourceEnd = this.length;
395
+ if (targetStart >= targetBuffer.length) targetStart = targetBuffer.length;
396
+ if (!targetStart) targetStart = 0;
397
+ if (sourceEnd > 0 && sourceEnd < sourceStart) sourceEnd = sourceStart;
398
+ if (sourceEnd === sourceStart) return 0;
399
+ if (targetBuffer.length === 0 || this.length === 0) return 0;
400
+ if (targetStart < 0) {
401
+ throw new RangeError("targetStart out of bounds");
402
+ }
403
+ if (sourceStart < 0 || sourceStart >= this.length) throw new RangeError("Index out of range");
404
+ if (sourceEnd < 0) throw new RangeError("sourceEnd out of bounds");
405
+ if (sourceEnd > this.length) sourceEnd = this.length;
406
+ if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
407
+ sourceEnd = targetBuffer.length - targetStart + sourceStart;
408
+ }
409
+ const len = sourceEnd - sourceStart;
410
+ if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
411
+ this.copyWithin(targetStart, sourceStart, sourceEnd);
412
+ } else {
413
+ Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
414
+ }
415
+ return len;
416
+ }
417
+ /**
418
+ * Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
419
+ * and `end` indices. This is the same behavior as `buf.subarray()`.
420
+ *
421
+ * This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
422
+ * the slice, use `Uint8Array.prototype.slice()`.
423
+ *
424
+ * @param start
425
+ * @param end
426
+ */
427
+ slice(start, end) {
428
+ if (!start) {
429
+ start = 0;
430
+ }
431
+ const len = this.length;
432
+ start = ~~start;
433
+ end = end === void 0 ? len : ~~end;
434
+ if (start < 0) {
435
+ start += len;
436
+ if (start < 0) {
437
+ start = 0;
438
+ }
439
+ } else if (start > len) {
440
+ start = len;
441
+ }
442
+ if (end < 0) {
443
+ end += len;
444
+ if (end < 0) {
445
+ end = 0;
446
+ }
447
+ } else if (end > len) {
448
+ end = len;
449
+ }
450
+ if (end < start) {
451
+ end = start;
452
+ }
453
+ const newBuf = this.subarray(start, end);
454
+ Object.setPrototypeOf(newBuf, Buffer.prototype);
455
+ return newBuf;
456
+ }
457
+ /**
458
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
459
+ * of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
460
+ *
461
+ * @param value Number to write.
462
+ * @param offset Number of bytes to skip before starting to write.
463
+ * @param byteLength Number of bytes to write, between 0 and 6.
464
+ * @param noAssert
465
+ * @returns `offset` plus the number of bytes written.
466
+ */
467
+ writeUIntLE(value, offset, byteLength, noAssert) {
468
+ value = +value;
469
+ offset = offset >>> 0;
470
+ byteLength = byteLength >>> 0;
471
+ if (!noAssert) {
472
+ const maxBytes = Math.pow(2, 8 * byteLength) - 1;
473
+ Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
474
+ }
475
+ let mul = 1;
476
+ let i = 0;
477
+ this[offset] = value & 255;
478
+ while (++i < byteLength && (mul *= 256)) {
479
+ this[offset + i] = value / mul & 255;
480
+ }
481
+ return offset + byteLength;
482
+ }
483
+ /**
484
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
485
+ * accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
486
+ *
487
+ * @param value Number to write.
488
+ * @param offset Number of bytes to skip before starting to write.
489
+ * @param byteLength Number of bytes to write, between 0 and 6.
490
+ * @param noAssert
491
+ * @returns `offset` plus the number of bytes written.
492
+ */
493
+ writeUIntBE(value, offset, byteLength, noAssert) {
494
+ value = +value;
495
+ offset = offset >>> 0;
496
+ byteLength = byteLength >>> 0;
497
+ if (!noAssert) {
498
+ const maxBytes = Math.pow(2, 8 * byteLength) - 1;
499
+ Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
500
+ }
501
+ let i = byteLength - 1;
502
+ let mul = 1;
503
+ this[offset + i] = value & 255;
504
+ while (--i >= 0 && (mul *= 256)) {
505
+ this[offset + i] = value / mul & 255;
506
+ }
507
+ return offset + byteLength;
508
+ }
509
+ /**
510
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
511
+ * of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
512
+ *
513
+ * @param value Number to write.
514
+ * @param offset Number of bytes to skip before starting to write.
515
+ * @param byteLength Number of bytes to write, between 0 and 6.
516
+ * @param noAssert
517
+ * @returns `offset` plus the number of bytes written.
518
+ */
519
+ writeIntLE(value, offset, byteLength, noAssert) {
520
+ value = +value;
521
+ offset = offset >>> 0;
522
+ if (!noAssert) {
523
+ const limit = Math.pow(2, 8 * byteLength - 1);
524
+ Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
525
+ }
526
+ let i = 0;
527
+ let mul = 1;
528
+ let sub = 0;
529
+ this[offset] = value & 255;
530
+ while (++i < byteLength && (mul *= 256)) {
531
+ if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
532
+ sub = 1;
533
+ }
534
+ this[offset + i] = (value / mul >> 0) - sub & 255;
535
+ }
536
+ return offset + byteLength;
537
+ }
538
+ /**
539
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
540
+ * of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
541
+ *
542
+ * @param value Number to write.
543
+ * @param offset Number of bytes to skip before starting to write.
544
+ * @param byteLength Number of bytes to write, between 0 and 6.
545
+ * @param noAssert
546
+ * @returns `offset` plus the number of bytes written.
547
+ */
548
+ writeIntBE(value, offset, byteLength, noAssert) {
549
+ value = +value;
550
+ offset = offset >>> 0;
551
+ if (!noAssert) {
552
+ const limit = Math.pow(2, 8 * byteLength - 1);
553
+ Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
554
+ }
555
+ let i = byteLength - 1;
556
+ let mul = 1;
557
+ let sub = 0;
558
+ this[offset + i] = value & 255;
559
+ while (--i >= 0 && (mul *= 256)) {
560
+ if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
561
+ sub = 1;
562
+ }
563
+ this[offset + i] = (value / mul >> 0) - sub & 255;
564
+ }
565
+ return offset + byteLength;
566
+ }
567
+ /**
568
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
569
+ * unsigned, little-endian integer supporting up to 48 bits of accuracy.
570
+ *
571
+ * @param offset Number of bytes to skip before starting to read.
572
+ * @param byteLength Number of bytes to read, between 0 and 6.
573
+ * @param noAssert
574
+ */
575
+ readUIntLE(offset, byteLength, noAssert) {
576
+ offset = offset >>> 0;
577
+ byteLength = byteLength >>> 0;
578
+ if (!noAssert) {
579
+ Buffer._checkOffset(offset, byteLength, this.length);
580
+ }
581
+ let val = this[offset];
582
+ let mul = 1;
583
+ let i = 0;
584
+ while (++i < byteLength && (mul *= 256)) {
585
+ val += this[offset + i] * mul;
586
+ }
587
+ return val;
588
+ }
589
+ /**
590
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
591
+ * unsigned, big-endian integer supporting up to 48 bits of accuracy.
592
+ *
593
+ * @param offset Number of bytes to skip before starting to read.
594
+ * @param byteLength Number of bytes to read, between 0 and 6.
595
+ * @param noAssert
596
+ */
597
+ readUIntBE(offset, byteLength, noAssert) {
598
+ offset = offset >>> 0;
599
+ byteLength = byteLength >>> 0;
600
+ if (!noAssert) {
601
+ Buffer._checkOffset(offset, byteLength, this.length);
602
+ }
603
+ let val = this[offset + --byteLength];
604
+ let mul = 1;
605
+ while (byteLength > 0 && (mul *= 256)) {
606
+ val += this[offset + --byteLength] * mul;
607
+ }
608
+ return val;
609
+ }
610
+ /**
611
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
612
+ * little-endian, two's complement signed value supporting up to 48 bits of accuracy.
613
+ *
614
+ * @param offset Number of bytes to skip before starting to read.
615
+ * @param byteLength Number of bytes to read, between 0 and 6.
616
+ * @param noAssert
617
+ */
618
+ readIntLE(offset, byteLength, noAssert) {
619
+ offset = offset >>> 0;
620
+ byteLength = byteLength >>> 0;
621
+ if (!noAssert) {
622
+ Buffer._checkOffset(offset, byteLength, this.length);
623
+ }
624
+ let val = this[offset];
625
+ let mul = 1;
626
+ let i = 0;
627
+ while (++i < byteLength && (mul *= 256)) {
628
+ val += this[offset + i] * mul;
629
+ }
630
+ mul *= 128;
631
+ if (val >= mul) {
632
+ val -= Math.pow(2, 8 * byteLength);
633
+ }
634
+ return val;
635
+ }
636
+ /**
637
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
638
+ * big-endian, two's complement signed value supporting up to 48 bits of accuracy.
639
+ *
640
+ * @param offset Number of bytes to skip before starting to read.
641
+ * @param byteLength Number of bytes to read, between 0 and 6.
642
+ * @param noAssert
643
+ */
644
+ readIntBE(offset, byteLength, noAssert) {
645
+ offset = offset >>> 0;
646
+ byteLength = byteLength >>> 0;
647
+ if (!noAssert) {
648
+ Buffer._checkOffset(offset, byteLength, this.length);
649
+ }
650
+ let i = byteLength;
651
+ let mul = 1;
652
+ let val = this[offset + --i];
653
+ while (i > 0 && (mul *= 256)) {
654
+ val += this[offset + --i] * mul;
655
+ }
656
+ mul *= 128;
657
+ if (val >= mul) {
658
+ val -= Math.pow(2, 8 * byteLength);
659
+ }
660
+ return val;
661
+ }
662
+ /**
663
+ * Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
664
+ *
665
+ * @param offset Number of bytes to skip before starting to read.
666
+ * @param noAssert
667
+ */
668
+ readUInt8(offset, noAssert) {
669
+ offset = offset >>> 0;
670
+ if (!noAssert) {
671
+ Buffer._checkOffset(offset, 1, this.length);
672
+ }
673
+ return this[offset];
674
+ }
675
+ /**
676
+ * Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
677
+ *
678
+ * @param offset Number of bytes to skip before starting to read.
679
+ * @param noAssert
680
+ */
681
+ readUInt16LE(offset, noAssert) {
682
+ offset = offset >>> 0;
683
+ if (!noAssert) {
684
+ Buffer._checkOffset(offset, 2, this.length);
685
+ }
686
+ return this[offset] | this[offset + 1] << 8;
687
+ }
688
+ /**
689
+ * Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
690
+ *
691
+ * @param offset Number of bytes to skip before starting to read.
692
+ * @param noAssert
693
+ */
694
+ readUInt16BE(offset, noAssert) {
695
+ offset = offset >>> 0;
696
+ if (!noAssert) {
697
+ Buffer._checkOffset(offset, 2, this.length);
698
+ }
699
+ return this[offset] << 8 | this[offset + 1];
700
+ }
701
+ /**
702
+ * Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
703
+ *
704
+ * @param offset Number of bytes to skip before starting to read.
705
+ * @param noAssert
706
+ */
707
+ readUInt32LE(offset, noAssert) {
708
+ offset = offset >>> 0;
709
+ if (!noAssert) {
710
+ Buffer._checkOffset(offset, 4, this.length);
711
+ }
712
+ return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
713
+ }
714
+ /**
715
+ * Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
716
+ *
717
+ * @param offset Number of bytes to skip before starting to read.
718
+ * @param noAssert
719
+ */
720
+ readUInt32BE(offset, noAssert) {
721
+ offset = offset >>> 0;
722
+ if (!noAssert) {
723
+ Buffer._checkOffset(offset, 4, this.length);
724
+ }
725
+ return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
726
+ }
727
+ /**
728
+ * Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
729
+ * as two's complement signed values.
730
+ *
731
+ * @param offset Number of bytes to skip before starting to read.
732
+ * @param noAssert
733
+ */
734
+ readInt8(offset, noAssert) {
735
+ offset = offset >>> 0;
736
+ if (!noAssert) {
737
+ Buffer._checkOffset(offset, 1, this.length);
738
+ }
739
+ if (!(this[offset] & 128)) {
740
+ return this[offset];
741
+ }
742
+ return (255 - this[offset] + 1) * -1;
743
+ }
744
+ /**
745
+ * Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
746
+ * are interpreted as two's complement signed values.
747
+ *
748
+ * @param offset Number of bytes to skip before starting to read.
749
+ * @param noAssert
750
+ */
751
+ readInt16LE(offset, noAssert) {
752
+ offset = offset >>> 0;
753
+ if (!noAssert) {
754
+ Buffer._checkOffset(offset, 2, this.length);
755
+ }
756
+ const val = this[offset] | this[offset + 1] << 8;
757
+ return val & 32768 ? val | 4294901760 : val;
758
+ }
759
+ /**
760
+ * Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
761
+ * are interpreted as two's complement signed values.
762
+ *
763
+ * @param offset Number of bytes to skip before starting to read.
764
+ * @param noAssert
765
+ */
766
+ readInt16BE(offset, noAssert) {
767
+ offset = offset >>> 0;
768
+ if (!noAssert) {
769
+ Buffer._checkOffset(offset, 2, this.length);
770
+ }
771
+ const val = this[offset + 1] | this[offset] << 8;
772
+ return val & 32768 ? val | 4294901760 : val;
773
+ }
774
+ /**
775
+ * Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
776
+ * are interpreted as two's complement signed values.
777
+ *
778
+ * @param offset Number of bytes to skip before starting to read.
779
+ * @param noAssert
780
+ */
781
+ readInt32LE(offset, noAssert) {
782
+ offset = offset >>> 0;
783
+ if (!noAssert) {
784
+ Buffer._checkOffset(offset, 4, this.length);
785
+ }
786
+ return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
787
+ }
788
+ /**
789
+ * Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
790
+ * are interpreted as two's complement signed values.
791
+ *
792
+ * @param offset Number of bytes to skip before starting to read.
793
+ * @param noAssert
794
+ */
795
+ readInt32BE(offset, noAssert) {
796
+ offset = offset >>> 0;
797
+ if (!noAssert) {
798
+ Buffer._checkOffset(offset, 4, this.length);
799
+ }
800
+ return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
801
+ }
802
+ /**
803
+ * Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
804
+ * Throws a `RangeError` if `buf.length` is not a multiple of 2.
805
+ */
806
+ swap16() {
807
+ const len = this.length;
808
+ if (len % 2 !== 0) {
809
+ throw new RangeError("Buffer size must be a multiple of 16-bits");
810
+ }
811
+ for (let i = 0; i < len; i += 2) {
812
+ this._swap(this, i, i + 1);
813
+ }
814
+ return this;
815
+ }
816
+ /**
817
+ * Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
818
+ * Throws a `RangeError` if `buf.length` is not a multiple of 4.
819
+ */
820
+ swap32() {
821
+ const len = this.length;
822
+ if (len % 4 !== 0) {
823
+ throw new RangeError("Buffer size must be a multiple of 32-bits");
824
+ }
825
+ for (let i = 0; i < len; i += 4) {
826
+ this._swap(this, i, i + 3);
827
+ this._swap(this, i + 1, i + 2);
828
+ }
829
+ return this;
830
+ }
831
+ /**
832
+ * Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
833
+ * Throws a `RangeError` if `buf.length` is not a multiple of 8.
834
+ */
835
+ swap64() {
836
+ const len = this.length;
837
+ if (len % 8 !== 0) {
838
+ throw new RangeError("Buffer size must be a multiple of 64-bits");
839
+ }
840
+ for (let i = 0; i < len; i += 8) {
841
+ this._swap(this, i, i + 7);
842
+ this._swap(this, i + 1, i + 6);
843
+ this._swap(this, i + 2, i + 5);
844
+ this._swap(this, i + 3, i + 4);
845
+ }
846
+ return this;
847
+ }
848
+ /**
849
+ * Swaps two octets.
850
+ *
851
+ * @param b
852
+ * @param n
853
+ * @param m
854
+ */
855
+ _swap(b, n, m) {
856
+ const i = b[n];
857
+ b[n] = b[m];
858
+ b[m] = i;
859
+ }
860
+ /**
861
+ * Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
862
+ * Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
863
+ *
864
+ * @param value Number to write.
865
+ * @param offset Number of bytes to skip before starting to write.
866
+ * @param noAssert
867
+ * @returns `offset` plus the number of bytes written.
868
+ */
869
+ writeUInt8(value, offset, noAssert) {
870
+ value = +value;
871
+ offset = offset >>> 0;
872
+ if (!noAssert) {
873
+ Buffer._checkInt(this, value, offset, 1, 255, 0);
874
+ }
875
+ this[offset] = value & 255;
876
+ return offset + 1;
877
+ }
878
+ /**
879
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
880
+ * integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
881
+ *
882
+ * @param value Number to write.
883
+ * @param offset Number of bytes to skip before starting to write.
884
+ * @param noAssert
885
+ * @returns `offset` plus the number of bytes written.
886
+ */
887
+ writeUInt16LE(value, offset, noAssert) {
888
+ value = +value;
889
+ offset = offset >>> 0;
890
+ if (!noAssert) {
891
+ Buffer._checkInt(this, value, offset, 2, 65535, 0);
892
+ }
893
+ this[offset] = value & 255;
894
+ this[offset + 1] = value >>> 8;
895
+ return offset + 2;
896
+ }
897
+ /**
898
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
899
+ * integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
900
+ *
901
+ * @param value Number to write.
902
+ * @param offset Number of bytes to skip before starting to write.
903
+ * @param noAssert
904
+ * @returns `offset` plus the number of bytes written.
905
+ */
906
+ writeUInt16BE(value, offset, noAssert) {
907
+ value = +value;
908
+ offset = offset >>> 0;
909
+ if (!noAssert) {
910
+ Buffer._checkInt(this, value, offset, 2, 65535, 0);
911
+ }
912
+ this[offset] = value >>> 8;
913
+ this[offset + 1] = value & 255;
914
+ return offset + 2;
915
+ }
916
+ /**
917
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
918
+ * integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
919
+ *
920
+ * @param value Number to write.
921
+ * @param offset Number of bytes to skip before starting to write.
922
+ * @param noAssert
923
+ * @returns `offset` plus the number of bytes written.
924
+ */
925
+ writeUInt32LE(value, offset, noAssert) {
926
+ value = +value;
927
+ offset = offset >>> 0;
928
+ if (!noAssert) {
929
+ Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
930
+ }
931
+ this[offset + 3] = value >>> 24;
932
+ this[offset + 2] = value >>> 16;
933
+ this[offset + 1] = value >>> 8;
934
+ this[offset] = value & 255;
935
+ return offset + 4;
936
+ }
937
+ /**
938
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
939
+ * integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
940
+ *
941
+ * @param value Number to write.
942
+ * @param offset Number of bytes to skip before starting to write.
943
+ * @param noAssert
944
+ * @returns `offset` plus the number of bytes written.
945
+ */
946
+ writeUInt32BE(value, offset, noAssert) {
947
+ value = +value;
948
+ offset = offset >>> 0;
949
+ if (!noAssert) {
950
+ Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
951
+ }
952
+ this[offset] = value >>> 24;
953
+ this[offset + 1] = value >>> 16;
954
+ this[offset + 2] = value >>> 8;
955
+ this[offset + 3] = value & 255;
956
+ return offset + 4;
957
+ }
958
+ /**
959
+ * Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
960
+ * Behavior is undefined when `value` is anything other than a signed 8-bit integer.
961
+ *
962
+ * @param value Number to write.
963
+ * @param offset Number of bytes to skip before starting to write.
964
+ * @param noAssert
965
+ * @returns `offset` plus the number of bytes written.
966
+ */
967
+ writeInt8(value, offset, noAssert) {
968
+ value = +value;
969
+ offset = offset >>> 0;
970
+ if (!noAssert) {
971
+ Buffer._checkInt(this, value, offset, 1, 127, -128);
972
+ }
973
+ if (value < 0) {
974
+ value = 255 + value + 1;
975
+ }
976
+ this[offset] = value & 255;
977
+ return offset + 1;
978
+ }
979
+ /**
980
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
981
+ * integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
982
+ *
983
+ * @param value Number to write.
984
+ * @param offset Number of bytes to skip before starting to write.
985
+ * @param noAssert
986
+ * @returns `offset` plus the number of bytes written.
987
+ */
988
+ writeInt16LE(value, offset, noAssert) {
989
+ value = +value;
990
+ offset = offset >>> 0;
991
+ if (!noAssert) {
992
+ Buffer._checkInt(this, value, offset, 2, 32767, -32768);
993
+ }
994
+ this[offset] = value & 255;
995
+ this[offset + 1] = value >>> 8;
996
+ return offset + 2;
997
+ }
998
+ /**
999
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
1000
+ * integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
1001
+ *
1002
+ * @param value Number to write.
1003
+ * @param offset Number of bytes to skip before starting to write.
1004
+ * @param noAssert
1005
+ * @returns `offset` plus the number of bytes written.
1006
+ */
1007
+ writeInt16BE(value, offset, noAssert) {
1008
+ value = +value;
1009
+ offset = offset >>> 0;
1010
+ if (!noAssert) {
1011
+ Buffer._checkInt(this, value, offset, 2, 32767, -32768);
1012
+ }
1013
+ this[offset] = value >>> 8;
1014
+ this[offset + 1] = value & 255;
1015
+ return offset + 2;
1016
+ }
1017
+ /**
1018
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
1019
+ * integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
1020
+ *
1021
+ * @param value Number to write.
1022
+ * @param offset Number of bytes to skip before starting to write.
1023
+ * @param noAssert
1024
+ * @returns `offset` plus the number of bytes written.
1025
+ */
1026
+ writeInt32LE(value, offset, noAssert) {
1027
+ value = +value;
1028
+ offset = offset >>> 0;
1029
+ if (!noAssert) {
1030
+ Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
1031
+ }
1032
+ this[offset] = value & 255;
1033
+ this[offset + 1] = value >>> 8;
1034
+ this[offset + 2] = value >>> 16;
1035
+ this[offset + 3] = value >>> 24;
1036
+ return offset + 4;
1037
+ }
1038
+ /**
1039
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
1040
+ * integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
1041
+ *
1042
+ * @param value Number to write.
1043
+ * @param offset Number of bytes to skip before starting to write.
1044
+ * @param noAssert
1045
+ * @returns `offset` plus the number of bytes written.
1046
+ */
1047
+ writeInt32BE(value, offset, noAssert) {
1048
+ value = +value;
1049
+ offset = offset >>> 0;
1050
+ if (!noAssert) {
1051
+ Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
1052
+ }
1053
+ if (value < 0) {
1054
+ value = 4294967295 + value + 1;
1055
+ }
1056
+ this[offset] = value >>> 24;
1057
+ this[offset + 1] = value >>> 16;
1058
+ this[offset + 2] = value >>> 8;
1059
+ this[offset + 3] = value & 255;
1060
+ return offset + 4;
1061
+ }
1062
+ /**
1063
+ * Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
1064
+ * filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
1065
+ * integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
1066
+ *
1067
+ * If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
1068
+ * character that fit into `buf` are written.
1069
+ *
1070
+ * If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
1071
+ *
1072
+ * @param value
1073
+ * @param encoding
1074
+ */
1075
+ fill(value, offset, end, encoding) {
1076
+ if (typeof value === "string") {
1077
+ if (typeof offset === "string") {
1078
+ encoding = offset;
1079
+ offset = 0;
1080
+ end = this.length;
1081
+ } else if (typeof end === "string") {
1082
+ encoding = end;
1083
+ end = this.length;
1084
+ }
1085
+ if (encoding !== void 0 && typeof encoding !== "string") {
1086
+ throw new TypeError("encoding must be a string");
1087
+ }
1088
+ if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
1089
+ throw new TypeError("Unknown encoding: " + encoding);
1090
+ }
1091
+ if (value.length === 1) {
1092
+ const code = value.charCodeAt(0);
1093
+ if (encoding === "utf8" && code < 128) {
1094
+ value = code;
1095
+ }
1096
+ }
1097
+ } else if (typeof value === "number") {
1098
+ value = value & 255;
1099
+ } else if (typeof value === "boolean") {
1100
+ value = Number(value);
1101
+ }
1102
+ offset ?? (offset = 0);
1103
+ end ?? (end = this.length);
1104
+ if (offset < 0 || this.length < offset || this.length < end) {
1105
+ throw new RangeError("Out of range index");
1106
+ }
1107
+ if (end <= offset) {
1108
+ return this;
1109
+ }
1110
+ offset = offset >>> 0;
1111
+ end = end === void 0 ? this.length : end >>> 0;
1112
+ value || (value = 0);
1113
+ let i;
1114
+ if (typeof value === "number") {
1115
+ for (i = offset; i < end; ++i) {
1116
+ this[i] = value;
1117
+ }
1118
+ } else {
1119
+ const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
1120
+ const len = bytes.length;
1121
+ if (len === 0) {
1122
+ throw new TypeError('The value "' + value + '" is invalid for argument "value"');
1123
+ }
1124
+ for (i = 0; i < end - offset; ++i) {
1125
+ this[i + offset] = bytes[i % len];
1126
+ }
1127
+ }
1128
+ return this;
1129
+ }
1130
+ /**
1131
+ * Returns the index of the specified value.
1132
+ *
1133
+ * If `value` is:
1134
+ * - a string, `value` is interpreted according to the character encoding in `encoding`.
1135
+ * - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
1136
+ * - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
1137
+ *
1138
+ * Any other types will throw a `TypeError`.
1139
+ *
1140
+ * @param value What to search for.
1141
+ * @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
1142
+ * @param encoding If `value` is a string, this is the encoding used to search.
1143
+ * @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
1144
+ */
1145
+ indexOf(value, byteOffset, encoding) {
1146
+ return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
1147
+ }
1148
+ /**
1149
+ * Gets the last index of the specified value.
1150
+ *
1151
+ * @see indexOf()
1152
+ * @param value
1153
+ * @param byteOffset
1154
+ * @param encoding
1155
+ */
1156
+ lastIndexOf(value, byteOffset, encoding) {
1157
+ return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
1158
+ }
1159
+ _bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
1160
+ if (buffer.length === 0) {
1161
+ return -1;
1162
+ }
1163
+ if (typeof byteOffset === "string") {
1164
+ encoding = byteOffset;
1165
+ byteOffset = 0;
1166
+ } else if (typeof byteOffset === "undefined") {
1167
+ byteOffset = 0;
1168
+ } else if (byteOffset > 2147483647) {
1169
+ byteOffset = 2147483647;
1170
+ } else if (byteOffset < -2147483648) {
1171
+ byteOffset = -2147483648;
1172
+ }
1173
+ byteOffset = +byteOffset;
1174
+ if (byteOffset !== byteOffset) {
1175
+ byteOffset = dir ? 0 : buffer.length - 1;
1176
+ }
1177
+ if (byteOffset < 0) {
1178
+ byteOffset = buffer.length + byteOffset;
1179
+ }
1180
+ if (byteOffset >= buffer.length) {
1181
+ if (dir) {
1182
+ return -1;
1183
+ } else {
1184
+ byteOffset = buffer.length - 1;
1185
+ }
1186
+ } else if (byteOffset < 0) {
1187
+ if (dir) {
1188
+ byteOffset = 0;
1189
+ } else {
1190
+ return -1;
1191
+ }
1192
+ }
1193
+ if (typeof val === "string") {
1194
+ val = Buffer.from(val, encoding);
1195
+ }
1196
+ if (Buffer.isBuffer(val)) {
1197
+ if (val.length === 0) {
1198
+ return -1;
1199
+ }
1200
+ return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
1201
+ } else if (typeof val === "number") {
1202
+ val = val & 255;
1203
+ if (typeof Uint8Array.prototype.indexOf === "function") {
1204
+ if (dir) {
1205
+ return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
1206
+ } else {
1207
+ return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
1208
+ }
1209
+ }
1210
+ return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
1211
+ }
1212
+ throw new TypeError("val must be string, number or Buffer");
1213
+ }
1214
+ /**
1215
+ * Equivalent to `buf.indexOf() !== -1`.
1216
+ *
1217
+ * @param value
1218
+ * @param byteOffset
1219
+ * @param encoding
1220
+ */
1221
+ includes(value, byteOffset, encoding) {
1222
+ return this.indexOf(value, byteOffset, encoding) !== -1;
1223
+ }
1224
+ /**
1225
+ * Creates a new buffer from the given parameters.
1226
+ *
1227
+ * @param data
1228
+ * @param encoding
1229
+ */
1230
+ static from(a, b, c) {
1231
+ return new Buffer(a, b, c);
1232
+ }
1233
+ /**
1234
+ * Returns true if `obj` is a Buffer.
1235
+ *
1236
+ * @param obj
1237
+ */
1238
+ static isBuffer(obj) {
1239
+ return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
1240
+ }
1241
+ /**
1242
+ * Returns true if `encoding` is a supported encoding.
1243
+ *
1244
+ * @param encoding
1245
+ */
1246
+ static isEncoding(encoding) {
1247
+ switch (encoding.toLowerCase()) {
1248
+ case "hex":
1249
+ case "utf8":
1250
+ case "ascii":
1251
+ case "binary":
1252
+ case "latin1":
1253
+ case "ucs2":
1254
+ case "utf16le":
1255
+ case "base64":
1256
+ return true;
1257
+ default:
1258
+ return false;
1259
+ }
1260
+ }
1261
+ /**
1262
+ * Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
1263
+ * returns the number of characters in the string.
1264
+ *
1265
+ * @param string The string to test.
1266
+ * @param encoding The encoding to use for calculation. Defaults is `utf8`.
1267
+ */
1268
+ static byteLength(string, encoding) {
1269
+ if (Buffer.isBuffer(string)) {
1270
+ return string.length;
1271
+ }
1272
+ if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
1273
+ return string.byteLength;
1274
+ }
1275
+ if (typeof string !== "string") {
1276
+ throw new TypeError(
1277
+ 'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
1278
+ );
1279
+ }
1280
+ const len = string.length;
1281
+ const mustMatch = arguments.length > 2 && arguments[2] === true;
1282
+ if (!mustMatch && len === 0) {
1283
+ return 0;
1284
+ }
1285
+ switch (encoding?.toLowerCase()) {
1286
+ case "ascii":
1287
+ case "latin1":
1288
+ case "binary":
1289
+ return len;
1290
+ case "utf8":
1291
+ return Buffer._utf8ToBytes(string).length;
1292
+ case "hex":
1293
+ return len >>> 1;
1294
+ case "ucs2":
1295
+ case "utf16le":
1296
+ return len * 2;
1297
+ case "base64":
1298
+ return Buffer._base64ToBytes(string).length;
1299
+ default:
1300
+ return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
1301
+ }
1302
+ }
1303
+ /**
1304
+ * Returns a Buffer which is the result of concatenating all the buffers in the list together.
1305
+ *
1306
+ * - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
1307
+ * - If the list has exactly one item, then the first item is returned.
1308
+ * - If the list has more than one item, then a new buffer is created.
1309
+ *
1310
+ * It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
1311
+ * a small computational expense.
1312
+ *
1313
+ * @param list An array of Buffer objects to concatenate.
1314
+ * @param totalLength Total length of the buffers when concatenated.
1315
+ */
1316
+ static concat(list, totalLength) {
1317
+ if (!Array.isArray(list)) {
1318
+ throw new TypeError('"list" argument must be an Array of Buffers');
1319
+ }
1320
+ if (list.length === 0) {
1321
+ return Buffer.alloc(0);
1322
+ }
1323
+ let i;
1324
+ if (totalLength === void 0) {
1325
+ totalLength = 0;
1326
+ for (i = 0; i < list.length; ++i) {
1327
+ totalLength += list[i].length;
1328
+ }
1329
+ }
1330
+ const buffer = Buffer.allocUnsafe(totalLength);
1331
+ let pos = 0;
1332
+ for (i = 0; i < list.length; ++i) {
1333
+ let buf = list[i];
1334
+ if (Buffer._isInstance(buf, Uint8Array)) {
1335
+ if (pos + buf.length > buffer.length) {
1336
+ if (!Buffer.isBuffer(buf)) {
1337
+ buf = Buffer.from(buf);
1338
+ }
1339
+ buf.copy(buffer, pos);
1340
+ } else {
1341
+ Uint8Array.prototype.set.call(buffer, buf, pos);
1342
+ }
1343
+ } else if (!Buffer.isBuffer(buf)) {
1344
+ throw new TypeError('"list" argument must be an Array of Buffers');
1345
+ } else {
1346
+ buf.copy(buffer, pos);
1347
+ }
1348
+ pos += buf.length;
1349
+ }
1350
+ return buffer;
1351
+ }
1352
+ /**
1353
+ * The same as `buf1.compare(buf2)`.
1354
+ */
1355
+ static compare(buf1, buf2) {
1356
+ if (Buffer._isInstance(buf1, Uint8Array)) {
1357
+ buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
1358
+ }
1359
+ if (Buffer._isInstance(buf2, Uint8Array)) {
1360
+ buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
1361
+ }
1362
+ if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
1363
+ throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
1364
+ }
1365
+ if (buf1 === buf2) {
1366
+ return 0;
1367
+ }
1368
+ let x = buf1.length;
1369
+ let y = buf2.length;
1370
+ for (let i = 0, len = Math.min(x, y); i < len; ++i) {
1371
+ if (buf1[i] !== buf2[i]) {
1372
+ x = buf1[i];
1373
+ y = buf2[i];
1374
+ break;
1375
+ }
1376
+ }
1377
+ if (x < y) {
1378
+ return -1;
1379
+ }
1380
+ if (y < x) {
1381
+ return 1;
1382
+ }
1383
+ return 0;
1384
+ }
1385
+ /**
1386
+ * Allocates a new buffer of `size` octets.
1387
+ *
1388
+ * @param size The number of octets to allocate.
1389
+ * @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
1390
+ * @param encoding The encoding used for the call to `buf.fill()` while initializing.
1391
+ */
1392
+ static alloc(size, fill, encoding) {
1393
+ if (typeof size !== "number") {
1394
+ throw new TypeError('"size" argument must be of type number');
1395
+ } else if (size < 0) {
1396
+ throw new RangeError('The value "' + size + '" is invalid for option "size"');
1397
+ }
1398
+ if (size <= 0) {
1399
+ return new Buffer(size);
1400
+ }
1401
+ if (fill !== void 0) {
1402
+ return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
1403
+ }
1404
+ return new Buffer(size);
1405
+ }
1406
+ /**
1407
+ * Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
1408
+ *
1409
+ * @param size
1410
+ */
1411
+ static allocUnsafe(size) {
1412
+ if (typeof size !== "number") {
1413
+ throw new TypeError('"size" argument must be of type number');
1414
+ } else if (size < 0) {
1415
+ throw new RangeError('The value "' + size + '" is invalid for option "size"');
1416
+ }
1417
+ return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
1418
+ }
1419
+ /**
1420
+ * Returns true if the given `obj` is an instance of `type`.
1421
+ *
1422
+ * @param obj
1423
+ * @param type
1424
+ */
1425
+ static _isInstance(obj, type) {
1426
+ return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
1427
+ }
1428
+ static _checked(length) {
1429
+ if (length >= K_MAX_LENGTH) {
1430
+ throw new RangeError(
1431
+ "Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
1432
+ );
1433
+ }
1434
+ return length | 0;
1435
+ }
1436
+ static _blitBuffer(src, dst, offset, length) {
1437
+ let i;
1438
+ for (i = 0; i < length; ++i) {
1439
+ if (i + offset >= dst.length || i >= src.length) {
1440
+ break;
1441
+ }
1442
+ dst[i + offset] = src[i];
1443
+ }
1444
+ return i;
1445
+ }
1446
+ static _utf8Write(buf, string, offset, length) {
1447
+ return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
1448
+ }
1449
+ static _asciiWrite(buf, string, offset, length) {
1450
+ return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
1451
+ }
1452
+ static _base64Write(buf, string, offset, length) {
1453
+ return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
1454
+ }
1455
+ static _ucs2Write(buf, string, offset, length) {
1456
+ return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
1457
+ }
1458
+ static _hexWrite(buf, string, offset, length) {
1459
+ offset = Number(offset) || 0;
1460
+ const remaining = buf.length - offset;
1461
+ if (!length) {
1462
+ length = remaining;
1463
+ } else {
1464
+ length = Number(length);
1465
+ if (length > remaining) {
1466
+ length = remaining;
1467
+ }
1468
+ }
1469
+ const strLen = string.length;
1470
+ if (length > strLen / 2) {
1471
+ length = strLen / 2;
1472
+ }
1473
+ let i;
1474
+ for (i = 0; i < length; ++i) {
1475
+ const parsed = parseInt(string.substr(i * 2, 2), 16);
1476
+ if (parsed !== parsed) {
1477
+ return i;
1478
+ }
1479
+ buf[offset + i] = parsed;
1480
+ }
1481
+ return i;
1482
+ }
1483
+ static _utf8ToBytes(string, units) {
1484
+ units = units || Infinity;
1485
+ const length = string.length;
1486
+ const bytes = [];
1487
+ let codePoint;
1488
+ let leadSurrogate = null;
1489
+ for (let i = 0; i < length; ++i) {
1490
+ codePoint = string.charCodeAt(i);
1491
+ if (codePoint > 55295 && codePoint < 57344) {
1492
+ if (!leadSurrogate) {
1493
+ if (codePoint > 56319) {
1494
+ if ((units -= 3) > -1) {
1495
+ bytes.push(239, 191, 189);
1496
+ }
1497
+ continue;
1498
+ } else if (i + 1 === length) {
1499
+ if ((units -= 3) > -1) {
1500
+ bytes.push(239, 191, 189);
1501
+ }
1502
+ continue;
1503
+ }
1504
+ leadSurrogate = codePoint;
1505
+ continue;
1506
+ }
1507
+ if (codePoint < 56320) {
1508
+ if ((units -= 3) > -1) {
1509
+ bytes.push(239, 191, 189);
1510
+ }
1511
+ leadSurrogate = codePoint;
1512
+ continue;
1513
+ }
1514
+ codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
1515
+ } else if (leadSurrogate) {
1516
+ if ((units -= 3) > -1) {
1517
+ bytes.push(239, 191, 189);
1518
+ }
1519
+ }
1520
+ leadSurrogate = null;
1521
+ if (codePoint < 128) {
1522
+ if ((units -= 1) < 0) {
1523
+ break;
1524
+ }
1525
+ bytes.push(codePoint);
1526
+ } else if (codePoint < 2048) {
1527
+ if ((units -= 2) < 0) {
1528
+ break;
1529
+ }
1530
+ bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
1531
+ } else if (codePoint < 65536) {
1532
+ if ((units -= 3) < 0) {
1533
+ break;
1534
+ }
1535
+ bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
1536
+ } else if (codePoint < 1114112) {
1537
+ if ((units -= 4) < 0) {
1538
+ break;
1539
+ }
1540
+ bytes.push(
1541
+ codePoint >> 18 | 240,
1542
+ codePoint >> 12 & 63 | 128,
1543
+ codePoint >> 6 & 63 | 128,
1544
+ codePoint & 63 | 128
1545
+ );
1546
+ } else {
1547
+ throw new Error("Invalid code point");
1548
+ }
1549
+ }
1550
+ return bytes;
1551
+ }
1552
+ static _base64ToBytes(str) {
1553
+ return toByteArray(base64clean(str));
1554
+ }
1555
+ static _asciiToBytes(str) {
1556
+ const byteArray = [];
1557
+ for (let i = 0; i < str.length; ++i) {
1558
+ byteArray.push(str.charCodeAt(i) & 255);
1559
+ }
1560
+ return byteArray;
1561
+ }
1562
+ static _utf16leToBytes(str, units) {
1563
+ let c, hi, lo;
1564
+ const byteArray = [];
1565
+ for (let i = 0; i < str.length; ++i) {
1566
+ if ((units -= 2) < 0) break;
1567
+ c = str.charCodeAt(i);
1568
+ hi = c >> 8;
1569
+ lo = c % 256;
1570
+ byteArray.push(lo);
1571
+ byteArray.push(hi);
1572
+ }
1573
+ return byteArray;
1574
+ }
1575
+ static _hexSlice(buf, start, end) {
1576
+ const len = buf.length;
1577
+ if (!start || start < 0) {
1578
+ start = 0;
1579
+ }
1580
+ if (!end || end < 0 || end > len) {
1581
+ end = len;
1582
+ }
1583
+ let out = "";
1584
+ for (let i = start; i < end; ++i) {
1585
+ out += hexSliceLookupTable[buf[i]];
1586
+ }
1587
+ return out;
1588
+ }
1589
+ static _base64Slice(buf, start, end) {
1590
+ if (start === 0 && end === buf.length) {
1591
+ return fromByteArray(buf);
1592
+ } else {
1593
+ return fromByteArray(buf.slice(start, end));
1594
+ }
1595
+ }
1596
+ static _utf8Slice(buf, start, end) {
1597
+ end = Math.min(buf.length, end);
1598
+ const res = [];
1599
+ let i = start;
1600
+ while (i < end) {
1601
+ const firstByte = buf[i];
1602
+ let codePoint = null;
1603
+ let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
1604
+ if (i + bytesPerSequence <= end) {
1605
+ let secondByte, thirdByte, fourthByte, tempCodePoint;
1606
+ switch (bytesPerSequence) {
1607
+ case 1:
1608
+ if (firstByte < 128) {
1609
+ codePoint = firstByte;
1610
+ }
1611
+ break;
1612
+ case 2:
1613
+ secondByte = buf[i + 1];
1614
+ if ((secondByte & 192) === 128) {
1615
+ tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
1616
+ if (tempCodePoint > 127) {
1617
+ codePoint = tempCodePoint;
1618
+ }
1619
+ }
1620
+ break;
1621
+ case 3:
1622
+ secondByte = buf[i + 1];
1623
+ thirdByte = buf[i + 2];
1624
+ if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
1625
+ tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
1626
+ if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
1627
+ codePoint = tempCodePoint;
1628
+ }
1629
+ }
1630
+ break;
1631
+ case 4:
1632
+ secondByte = buf[i + 1];
1633
+ thirdByte = buf[i + 2];
1634
+ fourthByte = buf[i + 3];
1635
+ if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
1636
+ tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
1637
+ if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
1638
+ codePoint = tempCodePoint;
1639
+ }
1640
+ }
1641
+ }
1642
+ }
1643
+ if (codePoint === null) {
1644
+ codePoint = 65533;
1645
+ bytesPerSequence = 1;
1646
+ } else if (codePoint > 65535) {
1647
+ codePoint -= 65536;
1648
+ res.push(codePoint >>> 10 & 1023 | 55296);
1649
+ codePoint = 56320 | codePoint & 1023;
1650
+ }
1651
+ res.push(codePoint);
1652
+ i += bytesPerSequence;
1653
+ }
1654
+ return Buffer._decodeCodePointsArray(res);
1655
+ }
1656
+ static _decodeCodePointsArray(codePoints) {
1657
+ const len = codePoints.length;
1658
+ if (len <= MAX_ARGUMENTS_LENGTH) {
1659
+ return String.fromCharCode.apply(String, codePoints);
1660
+ }
1661
+ let res = "";
1662
+ let i = 0;
1663
+ while (i < len) {
1664
+ res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
1665
+ }
1666
+ return res;
1667
+ }
1668
+ static _asciiSlice(buf, start, end) {
1669
+ let ret = "";
1670
+ end = Math.min(buf.length, end);
1671
+ for (let i = start; i < end; ++i) {
1672
+ ret += String.fromCharCode(buf[i] & 127);
1673
+ }
1674
+ return ret;
1675
+ }
1676
+ static _latin1Slice(buf, start, end) {
1677
+ let ret = "";
1678
+ end = Math.min(buf.length, end);
1679
+ for (let i = start; i < end; ++i) {
1680
+ ret += String.fromCharCode(buf[i]);
1681
+ }
1682
+ return ret;
1683
+ }
1684
+ static _utf16leSlice(buf, start, end) {
1685
+ const bytes = buf.slice(start, end);
1686
+ let res = "";
1687
+ for (let i = 0; i < bytes.length - 1; i += 2) {
1688
+ res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
1689
+ }
1690
+ return res;
1691
+ }
1692
+ static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
1693
+ let indexSize = 1;
1694
+ let arrLength = arr.length;
1695
+ let valLength = val.length;
1696
+ if (encoding !== void 0) {
1697
+ encoding = Buffer._getEncoding(encoding);
1698
+ if (encoding === "ucs2" || encoding === "utf16le") {
1699
+ if (arr.length < 2 || val.length < 2) {
1700
+ return -1;
1701
+ }
1702
+ indexSize = 2;
1703
+ arrLength /= 2;
1704
+ valLength /= 2;
1705
+ byteOffset /= 2;
1706
+ }
1707
+ }
1708
+ function read(buf, i2) {
1709
+ if (indexSize === 1) {
1710
+ return buf[i2];
1711
+ } else {
1712
+ return buf.readUInt16BE(i2 * indexSize);
1713
+ }
1714
+ }
1715
+ let i;
1716
+ if (dir) {
1717
+ let foundIndex = -1;
1718
+ for (i = byteOffset; i < arrLength; i++) {
1719
+ if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
1720
+ if (foundIndex === -1) foundIndex = i;
1721
+ if (i - foundIndex + 1 === valLength) return foundIndex * indexSize;
1722
+ } else {
1723
+ if (foundIndex !== -1) i -= i - foundIndex;
1724
+ foundIndex = -1;
1725
+ }
1726
+ }
1727
+ } else {
1728
+ if (byteOffset + valLength > arrLength) {
1729
+ byteOffset = arrLength - valLength;
1730
+ }
1731
+ for (i = byteOffset; i >= 0; i--) {
1732
+ let found = true;
1733
+ for (let j = 0; j < valLength; j++) {
1734
+ if (read(arr, i + j) !== read(val, j)) {
1735
+ found = false;
1736
+ break;
1737
+ }
1738
+ }
1739
+ if (found) {
1740
+ return i;
1741
+ }
1742
+ }
1743
+ }
1744
+ return -1;
1745
+ }
1746
+ static _checkOffset(offset, ext, length) {
1747
+ if (offset % 1 !== 0 || offset < 0) throw new RangeError("offset is not uint");
1748
+ if (offset + ext > length) throw new RangeError("Trying to access beyond buffer length");
1749
+ }
1750
+ static _checkInt(buf, value, offset, ext, max, min) {
1751
+ if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance');
1752
+ if (value > max || value < min) throw new RangeError('"value" argument is out of bounds');
1753
+ if (offset + ext > buf.length) throw new RangeError("Index out of range");
1754
+ }
1755
+ static _getEncoding(encoding) {
1756
+ let toLowerCase = false;
1757
+ let originalEncoding = "";
1758
+ for (; ; ) {
1759
+ switch (encoding) {
1760
+ case "hex":
1761
+ return "hex";
1762
+ case "utf8":
1763
+ return "utf8";
1764
+ case "ascii":
1765
+ return "ascii";
1766
+ case "binary":
1767
+ return "binary";
1768
+ case "latin1":
1769
+ return "latin1";
1770
+ case "ucs2":
1771
+ return "ucs2";
1772
+ case "utf16le":
1773
+ return "utf16le";
1774
+ case "base64":
1775
+ return "base64";
1776
+ default: {
1777
+ if (toLowerCase) {
1778
+ throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
1779
+ }
1780
+ toLowerCase = true;
1781
+ originalEncoding = encoding;
1782
+ encoding = encoding.toLowerCase();
1783
+ }
1784
+ }
1785
+ }
1786
+ }
1787
+ }
1788
+ const hexSliceLookupTable = function() {
1789
+ const alphabet = "0123456789abcdef";
1790
+ const table = new Array(256);
1791
+ for (let i = 0; i < 16; ++i) {
1792
+ const i16 = i * 16;
1793
+ for (let j = 0; j < 16; ++j) {
1794
+ table[i16 + j] = alphabet[i] + alphabet[j];
1795
+ }
1796
+ }
1797
+ return table;
1798
+ }();
1799
+ const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
1800
+ function base64clean(str) {
1801
+ str = str.split("=")[0];
1802
+ str = str.trim().replace(INVALID_BASE64_RE, "");
1803
+ if (str.length < 2) return "";
1804
+ while (str.length % 4 !== 0) {
1805
+ str = str + "=";
1806
+ }
1807
+ return str;
1808
+ }
1809
+
27
1810
  function notEmpty(value) {
28
1811
  return value !== null && value !== void 0;
29
1812
  }
@@ -118,155 +1901,15 @@ function promiseMap(inputValues, mapper) {
118
1901
  return inputValues.reduce(reducer, Promise.resolve([]));
119
1902
  }
120
1903
 
121
- function getEnvironment() {
122
- try {
123
- if (isDefined(process) && isDefined(process.env)) {
124
- return {
125
- apiKey: process.env.XATA_API_KEY ?? getGlobalApiKey(),
126
- databaseURL: process.env.XATA_DATABASE_URL ?? getGlobalDatabaseURL(),
127
- branch: process.env.XATA_BRANCH ?? getGlobalBranch(),
128
- deployPreview: process.env.XATA_PREVIEW,
129
- deployPreviewBranch: process.env.XATA_PREVIEW_BRANCH,
130
- vercelGitCommitRef: process.env.VERCEL_GIT_COMMIT_REF,
131
- vercelGitRepoOwner: process.env.VERCEL_GIT_REPO_OWNER
132
- };
133
- }
134
- } catch (err) {
135
- }
136
- try {
137
- if (isObject(Deno) && isObject(Deno.env)) {
138
- return {
139
- apiKey: Deno.env.get("XATA_API_KEY") ?? getGlobalApiKey(),
140
- databaseURL: Deno.env.get("XATA_DATABASE_URL") ?? getGlobalDatabaseURL(),
141
- branch: Deno.env.get("XATA_BRANCH") ?? getGlobalBranch(),
142
- deployPreview: Deno.env.get("XATA_PREVIEW"),
143
- deployPreviewBranch: Deno.env.get("XATA_PREVIEW_BRANCH"),
144
- vercelGitCommitRef: Deno.env.get("VERCEL_GIT_COMMIT_REF"),
145
- vercelGitRepoOwner: Deno.env.get("VERCEL_GIT_REPO_OWNER")
146
- };
147
- }
148
- } catch (err) {
149
- }
150
- return {
151
- apiKey: getGlobalApiKey(),
152
- databaseURL: getGlobalDatabaseURL(),
153
- branch: getGlobalBranch(),
154
- deployPreview: void 0,
155
- deployPreviewBranch: void 0,
156
- vercelGitCommitRef: void 0,
157
- vercelGitRepoOwner: void 0
158
- };
159
- }
160
- function getEnableBrowserVariable() {
161
- try {
162
- if (isObject(process) && isObject(process.env) && process.env.XATA_ENABLE_BROWSER !== void 0) {
163
- return process.env.XATA_ENABLE_BROWSER === "true";
164
- }
165
- } catch (err) {
166
- }
167
- try {
168
- if (isObject(Deno) && isObject(Deno.env) && Deno.env.get("XATA_ENABLE_BROWSER") !== void 0) {
169
- return Deno.env.get("XATA_ENABLE_BROWSER") === "true";
170
- }
171
- } catch (err) {
172
- }
173
- try {
174
- return XATA_ENABLE_BROWSER === true || XATA_ENABLE_BROWSER === "true";
175
- } catch (err) {
176
- return void 0;
177
- }
178
- }
179
- function getGlobalApiKey() {
180
- try {
181
- return XATA_API_KEY;
182
- } catch (err) {
183
- return void 0;
184
- }
185
- }
186
- function getGlobalDatabaseURL() {
187
- try {
188
- return XATA_DATABASE_URL;
189
- } catch (err) {
190
- return void 0;
191
- }
192
- }
193
- function getGlobalBranch() {
194
- try {
195
- return XATA_BRANCH;
196
- } catch (err) {
197
- return void 0;
198
- }
199
- }
200
- function getDatabaseURL() {
201
- try {
202
- const { databaseURL } = getEnvironment();
203
- return databaseURL;
204
- } catch (err) {
205
- return void 0;
206
- }
207
- }
208
- function getAPIKey() {
209
- try {
210
- const { apiKey } = getEnvironment();
211
- return apiKey;
212
- } catch (err) {
213
- return void 0;
214
- }
215
- }
216
- function getBranch() {
217
- try {
218
- const { branch } = getEnvironment();
219
- return branch;
220
- } catch (err) {
221
- return void 0;
222
- }
223
- }
224
- function buildPreviewBranchName({ org, branch }) {
225
- return `preview-${org}-${branch}`;
226
- }
227
- function getPreviewBranch() {
228
- try {
229
- const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = getEnvironment();
230
- if (deployPreviewBranch)
231
- return deployPreviewBranch;
232
- switch (deployPreview) {
233
- case "vercel": {
234
- if (!vercelGitCommitRef || !vercelGitRepoOwner) {
235
- console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
236
- return void 0;
237
- }
238
- return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
239
- }
240
- }
241
- return void 0;
242
- } catch (err) {
243
- return void 0;
244
- }
245
- }
246
-
247
- var __accessCheck$6 = (obj, member, msg) => {
248
- if (!member.has(obj))
249
- throw TypeError("Cannot " + msg);
250
- };
251
- var __privateGet$5 = (obj, member, getter) => {
252
- __accessCheck$6(obj, member, "read from private field");
253
- return getter ? getter.call(obj) : member.get(obj);
254
- };
255
- var __privateAdd$6 = (obj, member, value) => {
256
- if (member.has(obj))
257
- throw TypeError("Cannot add the same private member more than once");
258
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
259
- };
260
- var __privateSet$4 = (obj, member, value, setter) => {
261
- __accessCheck$6(obj, member, "write to private field");
262
- setter ? setter.call(obj, value) : member.set(obj, value);
263
- return value;
264
- };
265
- var __privateMethod$4 = (obj, member, method) => {
266
- __accessCheck$6(obj, member, "access private method");
267
- return method;
1904
+ var __typeError$6 = (msg) => {
1905
+ throw TypeError(msg);
268
1906
  };
269
- var _fetch, _queue, _concurrency, _enqueue, enqueue_fn;
1907
+ var __accessCheck$6 = (obj, member, msg) => member.has(obj) || __typeError$6("Cannot " + msg);
1908
+ var __privateGet$5 = (obj, member, getter) => (__accessCheck$6(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
1909
+ var __privateAdd$6 = (obj, member, value) => member.has(obj) ? __typeError$6("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
1910
+ var __privateSet$4 = (obj, member, value, setter) => (__accessCheck$6(obj, member, "write to private field"), member.set(obj, value), value);
1911
+ var __privateMethod$4 = (obj, member, method) => (__accessCheck$6(obj, member, "access private method"), method);
1912
+ var _fetch, _queue, _concurrency, _ApiRequestPool_instances, enqueue_fn;
270
1913
  const REQUEST_TIMEOUT = 5 * 60 * 1e3;
271
1914
  function getFetchImplementation(userFetch) {
272
1915
  const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
@@ -279,10 +1922,10 @@ function getFetchImplementation(userFetch) {
279
1922
  }
280
1923
  class ApiRequestPool {
281
1924
  constructor(concurrency = 10) {
282
- __privateAdd$6(this, _enqueue);
283
- __privateAdd$6(this, _fetch, void 0);
284
- __privateAdd$6(this, _queue, void 0);
285
- __privateAdd$6(this, _concurrency, void 0);
1925
+ __privateAdd$6(this, _ApiRequestPool_instances);
1926
+ __privateAdd$6(this, _fetch);
1927
+ __privateAdd$6(this, _queue);
1928
+ __privateAdd$6(this, _concurrency);
286
1929
  __privateSet$4(this, _queue, []);
287
1930
  __privateSet$4(this, _concurrency, concurrency);
288
1931
  this.running = 0;
@@ -317,7 +1960,7 @@ class ApiRequestPool {
317
1960
  }
318
1961
  return response;
319
1962
  };
320
- return __privateMethod$4(this, _enqueue, enqueue_fn).call(this, async () => {
1963
+ return __privateMethod$4(this, _ApiRequestPool_instances, enqueue_fn).call(this, async () => {
321
1964
  return await runRequest();
322
1965
  });
323
1966
  }
@@ -325,7 +1968,7 @@ class ApiRequestPool {
325
1968
  _fetch = new WeakMap();
326
1969
  _queue = new WeakMap();
327
1970
  _concurrency = new WeakMap();
328
- _enqueue = new WeakSet();
1971
+ _ApiRequestPool_instances = new WeakSet();
329
1972
  enqueue_fn = function(task) {
330
1973
  const promise = new Promise((resolve) => __privateGet$5(this, _queue).push(resolve)).finally(() => {
331
1974
  this.started--;
@@ -528,7 +2171,7 @@ function defaultOnOpen(response) {
528
2171
  }
529
2172
  }
530
2173
 
531
- const VERSION = "0.29.3";
2174
+ const VERSION = "0.30.0";
532
2175
 
533
2176
  class ErrorWithCause extends Error {
534
2177
  constructor(message, options) {
@@ -608,35 +2251,30 @@ function parseProviderString(provider = "production") {
608
2251
  return provider;
609
2252
  }
610
2253
  const [main, workspaces] = provider.split(",");
611
- if (!main || !workspaces)
612
- return null;
2254
+ if (!main || !workspaces) return null;
613
2255
  return { main, workspaces };
614
2256
  }
615
2257
  function buildProviderString(provider) {
616
- if (isHostProviderAlias(provider))
617
- return provider;
2258
+ if (isHostProviderAlias(provider)) return provider;
618
2259
  return `${provider.main},${provider.workspaces}`;
619
2260
  }
620
2261
  function parseWorkspacesUrlParts(url) {
621
- if (!isString(url))
622
- return null;
2262
+ if (!isString(url)) return null;
623
2263
  const matches = {
624
2264
  production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
625
2265
  staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
626
2266
  dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
627
- local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:([^:]+):?(.*)?/)
2267
+ local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
628
2268
  };
629
2269
  const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
630
- if (!isHostProviderAlias(host) || !match)
631
- return null;
2270
+ if (!isHostProviderAlias(host) || !match) return null;
632
2271
  return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
633
2272
  }
634
2273
 
635
2274
  const pool = new ApiRequestPool();
636
2275
  const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
637
2276
  const cleanQueryParams = Object.entries(queryParams).reduce((acc, [key, value]) => {
638
- if (value === void 0 || value === null)
639
- return acc;
2277
+ if (value === void 0 || value === null) return acc;
640
2278
  return { ...acc, [key]: value };
641
2279
  }, {});
642
2280
  const query = new URLSearchParams(cleanQueryParams).toString();
@@ -684,8 +2322,7 @@ function hostHeader(url) {
684
2322
  return groups?.host ? { Host: groups.host } : {};
685
2323
  }
686
2324
  async function parseBody(body, headers) {
687
- if (!isDefined(body))
688
- return void 0;
2325
+ if (!isDefined(body)) return void 0;
689
2326
  if (isBlob(body) || typeof body.text === "function") {
690
2327
  return body;
691
2328
  }
@@ -764,8 +2401,7 @@ async function fetch$1({
764
2401
  [TraceAttributes.CLOUDFLARE_RAY_ID]: response.headers?.get("cf-ray") ?? void 0
765
2402
  });
766
2403
  const message = response.headers?.get("x-xata-message");
767
- if (message)
768
- console.warn(message);
2404
+ if (message) console.warn(message);
769
2405
  if (response.status === 204) {
770
2406
  return {};
771
2407
  }
@@ -849,7 +2485,72 @@ function parseUrl(url) {
849
2485
 
850
2486
  const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
851
2487
 
852
- const applyMigration = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/apply", method: "post", ...variables, signal });
2488
+ const getTasks = (variables, signal) => dataPlaneFetch({
2489
+ url: "/tasks",
2490
+ method: "get",
2491
+ ...variables,
2492
+ signal
2493
+ });
2494
+ const getTaskStatus = (variables, signal) => dataPlaneFetch({
2495
+ url: "/tasks/{taskId}",
2496
+ method: "get",
2497
+ ...variables,
2498
+ signal
2499
+ });
2500
+ const listClusterBranches = (variables, signal) => dataPlaneFetch({
2501
+ url: "/cluster/{clusterId}/branches",
2502
+ method: "get",
2503
+ ...variables,
2504
+ signal
2505
+ });
2506
+ const listClusterExtensions = (variables, signal) => dataPlaneFetch({
2507
+ url: "/cluster/{clusterId}/extensions",
2508
+ method: "get",
2509
+ ...variables,
2510
+ signal
2511
+ });
2512
+ const installClusterExtension = (variables, signal) => dataPlaneFetch({
2513
+ url: "/cluster/{clusterId}/extensions",
2514
+ method: "post",
2515
+ ...variables,
2516
+ signal
2517
+ });
2518
+ const dropClusterExtension = (variables, signal) => dataPlaneFetch({
2519
+ url: "/cluster/{clusterId}/extensions",
2520
+ method: "delete",
2521
+ ...variables,
2522
+ signal
2523
+ });
2524
+ const getClusterMetrics = (variables, signal) => dataPlaneFetch({
2525
+ url: "/cluster/{clusterId}/metrics",
2526
+ method: "get",
2527
+ ...variables,
2528
+ signal
2529
+ });
2530
+ const applyMigration = (variables, signal) => dataPlaneFetch({
2531
+ url: "/db/{dbBranchName}/migrations/apply",
2532
+ method: "post",
2533
+ ...variables,
2534
+ signal
2535
+ });
2536
+ const startMigration = (variables, signal) => dataPlaneFetch({
2537
+ url: "/db/{dbBranchName}/migrations/start",
2538
+ method: "post",
2539
+ ...variables,
2540
+ signal
2541
+ });
2542
+ const completeMigration = (variables, signal) => dataPlaneFetch({
2543
+ url: "/db/{dbBranchName}/migrations/complete",
2544
+ method: "post",
2545
+ ...variables,
2546
+ signal
2547
+ });
2548
+ const rollbackMigration = (variables, signal) => dataPlaneFetch({
2549
+ url: "/db/{dbBranchName}/migrations/rollback",
2550
+ method: "post",
2551
+ ...variables,
2552
+ signal
2553
+ });
853
2554
  const adaptTable = (variables, signal) => dataPlaneFetch({
854
2555
  url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
855
2556
  method: "post",
@@ -862,9 +2563,30 @@ const adaptAllTables = (variables, signal) => dataPlaneFetch({
862
2563
  ...variables,
863
2564
  signal
864
2565
  });
865
- const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/status", method: "get", ...variables, signal });
866
- const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/jobs/{jobId}", method: "get", ...variables, signal });
867
- const getMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/history", method: "get", ...variables, signal });
2566
+ const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
2567
+ url: "/db/{dbBranchName}/migrations/status",
2568
+ method: "get",
2569
+ ...variables,
2570
+ signal
2571
+ });
2572
+ const getMigrationJobs = (variables, signal) => dataPlaneFetch({
2573
+ url: "/db/{dbBranchName}/migrations/jobs",
2574
+ method: "get",
2575
+ ...variables,
2576
+ signal
2577
+ });
2578
+ const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({
2579
+ url: "/db/{dbBranchName}/migrations/jobs/{jobId}",
2580
+ method: "get",
2581
+ ...variables,
2582
+ signal
2583
+ });
2584
+ const getMigrationHistory = (variables, signal) => dataPlaneFetch({
2585
+ url: "/db/{dbBranchName}/migrations/history",
2586
+ method: "get",
2587
+ ...variables,
2588
+ signal
2589
+ });
868
2590
  const getBranchList = (variables, signal) => dataPlaneFetch({
869
2591
  url: "/dbs/{dbName}",
870
2592
  method: "get",
@@ -878,6 +2600,7 @@ const getDatabaseSettings = (variables, signal) => dataPlaneFetch({
878
2600
  signal
879
2601
  });
880
2602
  const updateDatabaseSettings = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/settings", method: "patch", ...variables, signal });
2603
+ const createBranchAsync = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/async", method: "put", ...variables, signal });
881
2604
  const getBranchDetails = (variables, signal) => dataPlaneFetch({
882
2605
  url: "/db/{dbBranchName}",
883
2606
  method: "get",
@@ -891,68 +2614,166 @@ const deleteBranch = (variables, signal) => dataPlaneFetch({
891
2614
  ...variables,
892
2615
  signal
893
2616
  });
894
- const getSchema = (variables, signal) => dataPlaneFetch({
895
- url: "/db/{dbBranchName}/schema",
896
- method: "get",
2617
+ const getSchema = (variables, signal) => dataPlaneFetch({
2618
+ url: "/db/{dbBranchName}/schema",
2619
+ method: "get",
2620
+ ...variables,
2621
+ signal
2622
+ });
2623
+ const getSchemas = (variables, signal) => dataPlaneFetch({
2624
+ url: "/db/{dbBranchName}/schemas",
2625
+ method: "get",
2626
+ ...variables,
2627
+ signal
2628
+ });
2629
+ const copyBranch = (variables, signal) => dataPlaneFetch({
2630
+ url: "/db/{dbBranchName}/copy",
2631
+ method: "post",
2632
+ ...variables,
2633
+ signal
2634
+ });
2635
+ const getBranchMoveStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/move", method: "get", ...variables, signal });
2636
+ const moveBranch = (variables, signal) => dataPlaneFetch({
2637
+ url: "/db/{dbBranchName}/move",
2638
+ method: "put",
2639
+ ...variables,
2640
+ signal
2641
+ });
2642
+ const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
2643
+ url: "/db/{dbBranchName}/metadata",
2644
+ method: "put",
2645
+ ...variables,
2646
+ signal
2647
+ });
2648
+ const getBranchMetadata = (variables, signal) => dataPlaneFetch({
2649
+ url: "/db/{dbBranchName}/metadata",
2650
+ method: "get",
2651
+ ...variables,
2652
+ signal
2653
+ });
2654
+ const getBranchStats = (variables, signal) => dataPlaneFetch({
2655
+ url: "/db/{dbBranchName}/stats",
2656
+ method: "get",
2657
+ ...variables,
2658
+ signal
2659
+ });
2660
+ const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
2661
+ const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
2662
+ const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({
2663
+ url: "/dbs/{dbName}/gitBranches",
2664
+ method: "delete",
2665
+ ...variables,
2666
+ signal
2667
+ });
2668
+ const resolveBranch = (variables, signal) => dataPlaneFetch({
2669
+ url: "/dbs/{dbName}/resolveBranch",
2670
+ method: "get",
2671
+ ...variables,
2672
+ signal
2673
+ });
2674
+ const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({
2675
+ url: "/db/{dbBranchName}/migrations",
2676
+ method: "get",
2677
+ ...variables,
2678
+ signal
2679
+ });
2680
+ const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
2681
+ url: "/db/{dbBranchName}/migrations/plan",
2682
+ method: "post",
2683
+ ...variables,
2684
+ signal
2685
+ });
2686
+ const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
2687
+ url: "/db/{dbBranchName}/migrations/execute",
2688
+ method: "post",
2689
+ ...variables,
2690
+ signal
2691
+ });
2692
+ const queryMigrationRequests = (variables, signal) => dataPlaneFetch({
2693
+ url: "/dbs/{dbName}/migrations/query",
2694
+ method: "post",
2695
+ ...variables,
2696
+ signal
2697
+ });
2698
+ const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
2699
+ const getMigrationRequest = (variables, signal) => dataPlaneFetch({
2700
+ url: "/dbs/{dbName}/migrations/{mrNumber}",
2701
+ method: "get",
2702
+ ...variables,
2703
+ signal
2704
+ });
2705
+ const updateMigrationRequest = (variables, signal) => dataPlaneFetch({
2706
+ url: "/dbs/{dbName}/migrations/{mrNumber}",
2707
+ method: "patch",
2708
+ ...variables,
2709
+ signal
2710
+ });
2711
+ const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({
2712
+ url: "/dbs/{dbName}/migrations/{mrNumber}/commits",
2713
+ method: "post",
2714
+ ...variables,
2715
+ signal
2716
+ });
2717
+ const compareMigrationRequest = (variables, signal) => dataPlaneFetch({
2718
+ url: "/dbs/{dbName}/migrations/{mrNumber}/compare",
2719
+ method: "post",
2720
+ ...variables,
2721
+ signal
2722
+ });
2723
+ const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({
2724
+ url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
2725
+ method: "get",
2726
+ ...variables,
2727
+ signal
2728
+ });
2729
+ const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
2730
+ url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
2731
+ method: "post",
2732
+ ...variables,
2733
+ signal
2734
+ });
2735
+ const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({
2736
+ url: "/db/{dbBranchName}/schema/history",
2737
+ method: "post",
897
2738
  ...variables,
898
2739
  signal
899
2740
  });
900
- const copyBranch = (variables, signal) => dataPlaneFetch({
901
- url: "/db/{dbBranchName}/copy",
2741
+ const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({
2742
+ url: "/db/{dbBranchName}/schema/compare",
902
2743
  method: "post",
903
2744
  ...variables,
904
2745
  signal
905
2746
  });
906
- const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
907
- url: "/db/{dbBranchName}/metadata",
908
- method: "put",
2747
+ const compareBranchSchemas = (variables, signal) => dataPlaneFetch({
2748
+ url: "/db/{dbBranchName}/schema/compare/{branchName}",
2749
+ method: "post",
909
2750
  ...variables,
910
2751
  signal
911
2752
  });
912
- const getBranchMetadata = (variables, signal) => dataPlaneFetch({
913
- url: "/db/{dbBranchName}/metadata",
914
- method: "get",
2753
+ const updateBranchSchema = (variables, signal) => dataPlaneFetch({
2754
+ url: "/db/{dbBranchName}/schema/update",
2755
+ method: "post",
915
2756
  ...variables,
916
2757
  signal
917
2758
  });
918
- const getBranchStats = (variables, signal) => dataPlaneFetch({
919
- url: "/db/{dbBranchName}/stats",
920
- method: "get",
2759
+ const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
2760
+ url: "/db/{dbBranchName}/schema/preview",
2761
+ method: "post",
921
2762
  ...variables,
922
2763
  signal
923
2764
  });
924
- const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
925
- const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
926
- const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "delete", ...variables, signal });
927
- const resolveBranch = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/resolveBranch", method: "get", ...variables, signal });
928
- const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations", method: "get", ...variables, signal });
929
- const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/plan", method: "post", ...variables, signal });
930
- const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/execute", method: "post", ...variables, signal });
931
- const queryMigrationRequests = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/query", method: "post", ...variables, signal });
932
- const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
933
- const getMigrationRequest = (variables, signal) => dataPlaneFetch({
934
- url: "/dbs/{dbName}/migrations/{mrNumber}",
935
- method: "get",
2765
+ const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
2766
+ url: "/db/{dbBranchName}/schema/apply",
2767
+ method: "post",
936
2768
  ...variables,
937
2769
  signal
938
2770
  });
939
- const updateMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}", method: "patch", ...variables, signal });
940
- const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/commits", method: "post", ...variables, signal });
941
- const compareMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/compare", method: "post", ...variables, signal });
942
- const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/merge", method: "get", ...variables, signal });
943
- const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
944
- url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
2771
+ const pushBranchMigrations = (variables, signal) => dataPlaneFetch({
2772
+ url: "/db/{dbBranchName}/schema/push",
945
2773
  method: "post",
946
2774
  ...variables,
947
2775
  signal
948
2776
  });
949
- const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/history", method: "post", ...variables, signal });
950
- const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare", method: "post", ...variables, signal });
951
- const compareBranchSchemas = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare/{branchName}", method: "post", ...variables, signal });
952
- const updateBranchSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/update", method: "post", ...variables, signal });
953
- const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/preview", method: "post", ...variables, signal });
954
- const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/apply", method: "post", ...variables, signal });
955
- const pushBranchMigrations = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/push", method: "post", ...variables, signal });
956
2777
  const createTable = (variables, signal) => dataPlaneFetch({
957
2778
  url: "/db/{dbBranchName}/tables/{tableName}",
958
2779
  method: "put",
@@ -965,14 +2786,24 @@ const deleteTable = (variables, signal) => dataPlaneFetch({
965
2786
  ...variables,
966
2787
  signal
967
2788
  });
968
- const updateTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}", method: "patch", ...variables, signal });
2789
+ const updateTable = (variables, signal) => dataPlaneFetch({
2790
+ url: "/db/{dbBranchName}/tables/{tableName}",
2791
+ method: "patch",
2792
+ ...variables,
2793
+ signal
2794
+ });
969
2795
  const getTableSchema = (variables, signal) => dataPlaneFetch({
970
2796
  url: "/db/{dbBranchName}/tables/{tableName}/schema",
971
2797
  method: "get",
972
2798
  ...variables,
973
2799
  signal
974
2800
  });
975
- const setTableSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/schema", method: "put", ...variables, signal });
2801
+ const setTableSchema = (variables, signal) => dataPlaneFetch({
2802
+ url: "/db/{dbBranchName}/tables/{tableName}/schema",
2803
+ method: "put",
2804
+ ...variables,
2805
+ signal
2806
+ });
976
2807
  const getTableColumns = (variables, signal) => dataPlaneFetch({
977
2808
  url: "/db/{dbBranchName}/tables/{tableName}/columns",
978
2809
  method: "get",
@@ -980,7 +2811,12 @@ const getTableColumns = (variables, signal) => dataPlaneFetch({
980
2811
  signal
981
2812
  });
982
2813
  const addTableColumn = (variables, signal) => dataPlaneFetch(
983
- { url: "/db/{dbBranchName}/tables/{tableName}/columns", method: "post", ...variables, signal }
2814
+ {
2815
+ url: "/db/{dbBranchName}/tables/{tableName}/columns",
2816
+ method: "post",
2817
+ ...variables,
2818
+ signal
2819
+ }
984
2820
  );
985
2821
  const getColumn = (variables, signal) => dataPlaneFetch({
986
2822
  url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
@@ -988,15 +2824,30 @@ const getColumn = (variables, signal) => dataPlaneFetch({
988
2824
  ...variables,
989
2825
  signal
990
2826
  });
991
- const updateColumn = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}", method: "patch", ...variables, signal });
2827
+ const updateColumn = (variables, signal) => dataPlaneFetch({
2828
+ url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
2829
+ method: "patch",
2830
+ ...variables,
2831
+ signal
2832
+ });
992
2833
  const deleteColumn = (variables, signal) => dataPlaneFetch({
993
2834
  url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
994
2835
  method: "delete",
995
2836
  ...variables,
996
2837
  signal
997
2838
  });
998
- const branchTransaction = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/transaction", method: "post", ...variables, signal });
999
- const insertRecord = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data", method: "post", ...variables, signal });
2839
+ const branchTransaction = (variables, signal) => dataPlaneFetch({
2840
+ url: "/db/{dbBranchName}/transaction",
2841
+ method: "post",
2842
+ ...variables,
2843
+ signal
2844
+ });
2845
+ const insertRecord = (variables, signal) => dataPlaneFetch({
2846
+ url: "/db/{dbBranchName}/tables/{tableName}/data",
2847
+ method: "post",
2848
+ ...variables,
2849
+ signal
2850
+ });
1000
2851
  const getFileItem = (variables, signal) => dataPlaneFetch({
1001
2852
  url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
1002
2853
  method: "get",
@@ -1039,11 +2890,36 @@ const getRecord = (variables, signal) => dataPlaneFetch({
1039
2890
  ...variables,
1040
2891
  signal
1041
2892
  });
1042
- const insertRecordWithID = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}", method: "put", ...variables, signal });
1043
- const updateRecordWithID = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}", method: "patch", ...variables, signal });
1044
- const upsertRecordWithID = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}", method: "post", ...variables, signal });
1045
- const deleteRecord = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}", method: "delete", ...variables, signal });
1046
- const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/bulk", method: "post", ...variables, signal });
2893
+ const insertRecordWithID = (variables, signal) => dataPlaneFetch({
2894
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
2895
+ method: "put",
2896
+ ...variables,
2897
+ signal
2898
+ });
2899
+ const updateRecordWithID = (variables, signal) => dataPlaneFetch({
2900
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
2901
+ method: "patch",
2902
+ ...variables,
2903
+ signal
2904
+ });
2905
+ const upsertRecordWithID = (variables, signal) => dataPlaneFetch({
2906
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
2907
+ method: "post",
2908
+ ...variables,
2909
+ signal
2910
+ });
2911
+ const deleteRecord = (variables, signal) => dataPlaneFetch({
2912
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
2913
+ method: "delete",
2914
+ ...variables,
2915
+ signal
2916
+ });
2917
+ const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({
2918
+ url: "/db/{dbBranchName}/tables/{tableName}/bulk",
2919
+ method: "post",
2920
+ ...variables,
2921
+ signal
2922
+ });
1047
2923
  const queryTable = (variables, signal) => dataPlaneFetch({
1048
2924
  url: "/db/{dbBranchName}/tables/{tableName}/query",
1049
2925
  method: "post",
@@ -1062,16 +2938,36 @@ const searchTable = (variables, signal) => dataPlaneFetch({
1062
2938
  ...variables,
1063
2939
  signal
1064
2940
  });
1065
- const vectorSearchTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch", method: "post", ...variables, signal });
2941
+ const vectorSearchTable = (variables, signal) => dataPlaneFetch({
2942
+ url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch",
2943
+ method: "post",
2944
+ ...variables,
2945
+ signal
2946
+ });
1066
2947
  const askTable = (variables, signal) => dataPlaneFetch({
1067
2948
  url: "/db/{dbBranchName}/tables/{tableName}/ask",
1068
2949
  method: "post",
1069
2950
  ...variables,
1070
2951
  signal
1071
2952
  });
1072
- const askTableSession = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}", method: "post", ...variables, signal });
1073
- const summarizeTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/summarize", method: "post", ...variables, signal });
1074
- const aggregateTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/aggregate", method: "post", ...variables, signal });
2953
+ const askTableSession = (variables, signal) => dataPlaneFetch({
2954
+ url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}",
2955
+ method: "post",
2956
+ ...variables,
2957
+ signal
2958
+ });
2959
+ const summarizeTable = (variables, signal) => dataPlaneFetch({
2960
+ url: "/db/{dbBranchName}/tables/{tableName}/summarize",
2961
+ method: "post",
2962
+ ...variables,
2963
+ signal
2964
+ });
2965
+ const aggregateTable = (variables, signal) => dataPlaneFetch({
2966
+ url: "/db/{dbBranchName}/tables/{tableName}/aggregate",
2967
+ method: "post",
2968
+ ...variables,
2969
+ signal
2970
+ });
1075
2971
  const fileAccess = (variables, signal) => dataPlaneFetch({
1076
2972
  url: "/file/{fileId}",
1077
2973
  method: "get",
@@ -1090,15 +2986,34 @@ const sqlQuery = (variables, signal) => dataPlaneFetch({
1090
2986
  ...variables,
1091
2987
  signal
1092
2988
  });
2989
+ const sqlBatchQuery = (variables, signal) => dataPlaneFetch({
2990
+ url: "/db/{dbBranchName}/sql/batch",
2991
+ method: "post",
2992
+ ...variables,
2993
+ signal
2994
+ });
1093
2995
  const operationsByTag$2 = {
2996
+ tasks: { getTasks, getTaskStatus },
2997
+ cluster: {
2998
+ listClusterBranches,
2999
+ listClusterExtensions,
3000
+ installClusterExtension,
3001
+ dropClusterExtension,
3002
+ getClusterMetrics
3003
+ },
1094
3004
  migrations: {
1095
3005
  applyMigration,
3006
+ startMigration,
3007
+ completeMigration,
3008
+ rollbackMigration,
1096
3009
  adaptTable,
1097
3010
  adaptAllTables,
1098
3011
  getBranchMigrationJobStatus,
3012
+ getMigrationJobs,
1099
3013
  getMigrationJobStatus,
1100
3014
  getMigrationHistory,
1101
3015
  getSchema,
3016
+ getSchemas,
1102
3017
  getBranchMigrationHistory,
1103
3018
  getBranchMigrationPlan,
1104
3019
  executeBranchMigrationPlan,
@@ -1112,10 +3027,13 @@ const operationsByTag$2 = {
1112
3027
  },
1113
3028
  branch: {
1114
3029
  getBranchList,
3030
+ createBranchAsync,
1115
3031
  getBranchDetails,
1116
3032
  createBranch,
1117
3033
  deleteBranch,
1118
3034
  copyBranch,
3035
+ getBranchMoveStatus,
3036
+ moveBranch,
1119
3037
  updateBranchMetadata,
1120
3038
  getBranchMetadata,
1121
3039
  getBranchStats,
@@ -1157,7 +3075,16 @@ const operationsByTag$2 = {
1157
3075
  deleteRecord,
1158
3076
  bulkInsertTableRecords
1159
3077
  },
1160
- files: { getFileItem, putFileItem, deleteFileItem, getFile, putFile, deleteFile, fileAccess, fileUpload },
3078
+ files: {
3079
+ getFileItem,
3080
+ putFileItem,
3081
+ deleteFileItem,
3082
+ getFile,
3083
+ putFile,
3084
+ deleteFile,
3085
+ fileAccess,
3086
+ fileUpload
3087
+ },
1161
3088
  searchAndFilter: {
1162
3089
  queryTable,
1163
3090
  searchBranch,
@@ -1168,7 +3095,7 @@ const operationsByTag$2 = {
1168
3095
  summarizeTable,
1169
3096
  aggregateTable
1170
3097
  },
1171
- sql: { sqlQuery }
3098
+ sql: { sqlQuery, sqlBatchQuery }
1172
3099
  };
1173
3100
 
1174
3101
  const controlPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "controlPlane" });
@@ -1235,7 +3162,12 @@ const deleteOAuthAccessToken = (variables, signal) => controlPlaneFetch({
1235
3162
  ...variables,
1236
3163
  signal
1237
3164
  });
1238
- const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({ url: "/user/oauth/tokens/{token}", method: "patch", ...variables, signal });
3165
+ const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
3166
+ url: "/user/oauth/tokens/{token}",
3167
+ method: "patch",
3168
+ ...variables,
3169
+ signal
3170
+ });
1239
3171
  const getWorkspacesList = (variables, signal) => controlPlaneFetch({
1240
3172
  url: "/workspaces",
1241
3173
  method: "get",
@@ -1266,49 +3198,150 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
1266
3198
  ...variables,
1267
3199
  signal
1268
3200
  });
1269
- const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/settings", method: "get", ...variables, signal });
1270
- const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/settings", method: "patch", ...variables, signal });
1271
- const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/members", method: "get", ...variables, signal });
1272
- const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/members/{userId}", method: "put", ...variables, signal });
3201
+ const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
3202
+ url: "/workspaces/{workspaceId}/settings",
3203
+ method: "get",
3204
+ ...variables,
3205
+ signal
3206
+ });
3207
+ const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({
3208
+ url: "/workspaces/{workspaceId}/settings",
3209
+ method: "patch",
3210
+ ...variables,
3211
+ signal
3212
+ });
3213
+ const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({
3214
+ url: "/workspaces/{workspaceId}/members",
3215
+ method: "get",
3216
+ ...variables,
3217
+ signal
3218
+ });
3219
+ const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({
3220
+ url: "/workspaces/{workspaceId}/members/{userId}",
3221
+ method: "put",
3222
+ ...variables,
3223
+ signal
3224
+ });
1273
3225
  const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
1274
3226
  url: "/workspaces/{workspaceId}/members/{userId}",
1275
3227
  method: "delete",
1276
3228
  ...variables,
1277
3229
  signal
1278
3230
  });
1279
- const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites", method: "post", ...variables, signal });
1280
- const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteId}", method: "patch", ...variables, signal });
1281
- const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteId}", method: "delete", ...variables, signal });
1282
- const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept", method: "post", ...variables, signal });
1283
- const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteId}/resend", method: "post", ...variables, signal });
1284
- const listClusters = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/clusters", method: "get", ...variables, signal });
1285
- const createCluster = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/clusters", method: "post", ...variables, signal });
3231
+ const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
3232
+ url: "/workspaces/{workspaceId}/invites",
3233
+ method: "post",
3234
+ ...variables,
3235
+ signal
3236
+ });
3237
+ const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
3238
+ url: "/workspaces/{workspaceId}/invites/{inviteId}",
3239
+ method: "patch",
3240
+ ...variables,
3241
+ signal
3242
+ });
3243
+ const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
3244
+ url: "/workspaces/{workspaceId}/invites/{inviteId}",
3245
+ method: "delete",
3246
+ ...variables,
3247
+ signal
3248
+ });
3249
+ const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
3250
+ url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept",
3251
+ method: "post",
3252
+ ...variables,
3253
+ signal
3254
+ });
3255
+ const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
3256
+ url: "/workspaces/{workspaceId}/invites/{inviteId}/resend",
3257
+ method: "post",
3258
+ ...variables,
3259
+ signal
3260
+ });
3261
+ const listClusters = (variables, signal) => controlPlaneFetch({
3262
+ url: "/workspaces/{workspaceId}/clusters",
3263
+ method: "get",
3264
+ ...variables,
3265
+ signal
3266
+ });
3267
+ const createCluster = (variables, signal) => controlPlaneFetch({
3268
+ url: "/workspaces/{workspaceId}/clusters",
3269
+ method: "post",
3270
+ ...variables,
3271
+ signal
3272
+ });
1286
3273
  const getCluster = (variables, signal) => controlPlaneFetch({
1287
3274
  url: "/workspaces/{workspaceId}/clusters/{clusterId}",
1288
3275
  method: "get",
1289
3276
  ...variables,
1290
3277
  signal
1291
3278
  });
1292
- const updateCluster = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/clusters/{clusterId}", method: "patch", ...variables, signal });
3279
+ const updateCluster = (variables, signal) => controlPlaneFetch({
3280
+ url: "/workspaces/{workspaceId}/clusters/{clusterId}",
3281
+ method: "patch",
3282
+ ...variables,
3283
+ signal
3284
+ });
3285
+ const deleteCluster = (variables, signal) => controlPlaneFetch({
3286
+ url: "/workspaces/{workspaceId}/clusters/{clusterId}",
3287
+ method: "delete",
3288
+ ...variables,
3289
+ signal
3290
+ });
1293
3291
  const getDatabaseList = (variables, signal) => controlPlaneFetch({
1294
3292
  url: "/workspaces/{workspaceId}/dbs",
1295
3293
  method: "get",
1296
3294
  ...variables,
1297
3295
  signal
1298
3296
  });
1299
- const createDatabase = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}", method: "put", ...variables, signal });
3297
+ const createDatabase = (variables, signal) => controlPlaneFetch({
3298
+ url: "/workspaces/{workspaceId}/dbs/{dbName}",
3299
+ method: "put",
3300
+ ...variables,
3301
+ signal
3302
+ });
1300
3303
  const deleteDatabase = (variables, signal) => controlPlaneFetch({
1301
3304
  url: "/workspaces/{workspaceId}/dbs/{dbName}",
1302
3305
  method: "delete",
1303
3306
  ...variables,
1304
3307
  signal
1305
3308
  });
1306
- const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}", method: "get", ...variables, signal });
1307
- const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}", method: "patch", ...variables, signal });
1308
- const renameDatabase = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/rename", method: "post", ...variables, signal });
1309
- const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "get", ...variables, signal });
1310
- const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "put", ...variables, signal });
1311
- const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "delete", ...variables, signal });
3309
+ const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
3310
+ url: "/workspaces/{workspaceId}/dbs/{dbName}",
3311
+ method: "get",
3312
+ ...variables,
3313
+ signal
3314
+ });
3315
+ const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({
3316
+ url: "/workspaces/{workspaceId}/dbs/{dbName}",
3317
+ method: "patch",
3318
+ ...variables,
3319
+ signal
3320
+ });
3321
+ const renameDatabase = (variables, signal) => controlPlaneFetch({
3322
+ url: "/workspaces/{workspaceId}/dbs/{dbName}/rename",
3323
+ method: "post",
3324
+ ...variables,
3325
+ signal
3326
+ });
3327
+ const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
3328
+ url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
3329
+ method: "get",
3330
+ ...variables,
3331
+ signal
3332
+ });
3333
+ const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
3334
+ url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
3335
+ method: "put",
3336
+ ...variables,
3337
+ signal
3338
+ });
3339
+ const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
3340
+ url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
3341
+ method: "delete",
3342
+ ...variables,
3343
+ signal
3344
+ });
1312
3345
  const listRegions = (variables, signal) => controlPlaneFetch({
1313
3346
  url: "/workspaces/{workspaceId}/regions",
1314
3347
  method: "get",
@@ -1346,7 +3379,13 @@ const operationsByTag$1 = {
1346
3379
  acceptWorkspaceMemberInvite,
1347
3380
  resendWorkspaceMemberInvite
1348
3381
  },
1349
- xbcontrolOther: { listClusters, createCluster, getCluster, updateCluster },
3382
+ xbcontrolOther: {
3383
+ listClusters,
3384
+ createCluster,
3385
+ getCluster,
3386
+ updateCluster,
3387
+ deleteCluster
3388
+ },
1350
3389
  databases: {
1351
3390
  getDatabaseList,
1352
3391
  createDatabase,
@@ -1366,7 +3405,7 @@ const operationsByTag = deepMerge(operationsByTag$2, operationsByTag$1);
1366
3405
  const buildApiClient = () => class {
1367
3406
  constructor(options = {}) {
1368
3407
  const provider = options.host ?? "production";
1369
- const apiKey = options.apiKey ?? getAPIKey();
3408
+ const apiKey = options.apiKey;
1370
3409
  const trace = options.trace ?? defaultTrace;
1371
3410
  const clientID = generateUUID();
1372
3411
  if (!apiKey) {
@@ -1433,8 +3472,7 @@ function buildTransformString(transformations) {
1433
3472
  ).join(",");
1434
3473
  }
1435
3474
  function transformImage(url, ...transformations) {
1436
- if (!isDefined(url))
1437
- return void 0;
3475
+ if (!isDefined(url)) return void 0;
1438
3476
  const newTransformations = buildTransformString(transformations);
1439
3477
  const { hostname, pathname, search } = new URL(url);
1440
3478
  const pathParts = pathname.split("/");
@@ -1547,8 +3585,7 @@ class XataFile {
1547
3585
  }
1548
3586
  }
1549
3587
  const parseInputFileEntry = async (entry) => {
1550
- if (!isDefined(entry))
1551
- return null;
3588
+ if (!isDefined(entry)) return null;
1552
3589
  const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout, uploadUrlTimeout } = await entry;
1553
3590
  return compactObject({
1554
3591
  id,
@@ -1563,24 +3600,19 @@ const parseInputFileEntry = async (entry) => {
1563
3600
  };
1564
3601
 
1565
3602
  function cleanFilter(filter) {
1566
- if (!isDefined(filter))
1567
- return void 0;
1568
- if (!isObject(filter))
1569
- return filter;
3603
+ if (!isDefined(filter)) return void 0;
3604
+ if (!isObject(filter)) return filter;
1570
3605
  const values = Object.fromEntries(
1571
3606
  Object.entries(filter).reduce((acc, [key, value]) => {
1572
- if (!isDefined(value))
1573
- return acc;
3607
+ if (!isDefined(value)) return acc;
1574
3608
  if (Array.isArray(value)) {
1575
3609
  const clean = value.map((item) => cleanFilter(item)).filter((item) => isDefined(item));
1576
- if (clean.length === 0)
1577
- return acc;
3610
+ if (clean.length === 0) return acc;
1578
3611
  return [...acc, [key, clean]];
1579
3612
  }
1580
3613
  if (isObject(value)) {
1581
3614
  const clean = cleanFilter(value);
1582
- if (!isDefined(clean))
1583
- return acc;
3615
+ if (!isDefined(clean)) return acc;
1584
3616
  return [...acc, [key, clean]];
1585
3617
  }
1586
3618
  return [...acc, [key, value]];
@@ -1590,10 +3622,8 @@ function cleanFilter(filter) {
1590
3622
  }
1591
3623
 
1592
3624
  function stringifyJson(value) {
1593
- if (!isDefined(value))
1594
- return value;
1595
- if (isString(value))
1596
- return value;
3625
+ if (!isDefined(value)) return value;
3626
+ if (isString(value)) return value;
1597
3627
  try {
1598
3628
  return JSON.stringify(value);
1599
3629
  } catch (e) {
@@ -1608,28 +3638,17 @@ function parseJson(value) {
1608
3638
  }
1609
3639
  }
1610
3640
 
1611
- var __accessCheck$5 = (obj, member, msg) => {
1612
- if (!member.has(obj))
1613
- throw TypeError("Cannot " + msg);
1614
- };
1615
- var __privateGet$4 = (obj, member, getter) => {
1616
- __accessCheck$5(obj, member, "read from private field");
1617
- return getter ? getter.call(obj) : member.get(obj);
1618
- };
1619
- var __privateAdd$5 = (obj, member, value) => {
1620
- if (member.has(obj))
1621
- throw TypeError("Cannot add the same private member more than once");
1622
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
1623
- };
1624
- var __privateSet$3 = (obj, member, value, setter) => {
1625
- __accessCheck$5(obj, member, "write to private field");
1626
- setter ? setter.call(obj, value) : member.set(obj, value);
1627
- return value;
3641
+ var __typeError$5 = (msg) => {
3642
+ throw TypeError(msg);
1628
3643
  };
3644
+ var __accessCheck$5 = (obj, member, msg) => member.has(obj) || __typeError$5("Cannot " + msg);
3645
+ var __privateGet$4 = (obj, member, getter) => (__accessCheck$5(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
3646
+ var __privateAdd$5 = (obj, member, value) => member.has(obj) ? __typeError$5("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
3647
+ var __privateSet$3 = (obj, member, value, setter) => (__accessCheck$5(obj, member, "write to private field"), member.set(obj, value), value);
1629
3648
  var _query, _page;
1630
3649
  class Page {
1631
3650
  constructor(query, meta, records = []) {
1632
- __privateAdd$5(this, _query, void 0);
3651
+ __privateAdd$5(this, _query);
1633
3652
  __privateSet$3(this, _query, query);
1634
3653
  this.meta = meta;
1635
3654
  this.records = new PageRecordArray(this, records);
@@ -1716,7 +3735,7 @@ class RecordArray extends Array {
1716
3735
  const _PageRecordArray = class _PageRecordArray extends Array {
1717
3736
  constructor(...args) {
1718
3737
  super(..._PageRecordArray.parseConstructorParams(...args));
1719
- __privateAdd$5(this, _page, void 0);
3738
+ __privateAdd$5(this, _page);
1720
3739
  __privateSet$3(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
1721
3740
  }
1722
3741
  static parseConstructorParams(...args) {
@@ -1787,34 +3806,20 @@ const _PageRecordArray = class _PageRecordArray extends Array {
1787
3806
  _page = new WeakMap();
1788
3807
  let PageRecordArray = _PageRecordArray;
1789
3808
 
1790
- var __accessCheck$4 = (obj, member, msg) => {
1791
- if (!member.has(obj))
1792
- throw TypeError("Cannot " + msg);
1793
- };
1794
- var __privateGet$3 = (obj, member, getter) => {
1795
- __accessCheck$4(obj, member, "read from private field");
1796
- return getter ? getter.call(obj) : member.get(obj);
1797
- };
1798
- var __privateAdd$4 = (obj, member, value) => {
1799
- if (member.has(obj))
1800
- throw TypeError("Cannot add the same private member more than once");
1801
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
1802
- };
1803
- var __privateSet$2 = (obj, member, value, setter) => {
1804
- __accessCheck$4(obj, member, "write to private field");
1805
- setter ? setter.call(obj, value) : member.set(obj, value);
1806
- return value;
1807
- };
1808
- var __privateMethod$3 = (obj, member, method) => {
1809
- __accessCheck$4(obj, member, "access private method");
1810
- return method;
3809
+ var __typeError$4 = (msg) => {
3810
+ throw TypeError(msg);
1811
3811
  };
1812
- var _table$1, _repository, _data, _cleanFilterConstraint, cleanFilterConstraint_fn;
3812
+ var __accessCheck$4 = (obj, member, msg) => member.has(obj) || __typeError$4("Cannot " + msg);
3813
+ var __privateGet$3 = (obj, member, getter) => (__accessCheck$4(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
3814
+ var __privateAdd$4 = (obj, member, value) => member.has(obj) ? __typeError$4("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
3815
+ var __privateSet$2 = (obj, member, value, setter) => (__accessCheck$4(obj, member, "write to private field"), member.set(obj, value), value);
3816
+ var __privateMethod$3 = (obj, member, method) => (__accessCheck$4(obj, member, "access private method"), method);
3817
+ var _table$1, _repository, _data, _Query_instances, cleanFilterConstraint_fn;
1813
3818
  const _Query = class _Query {
1814
3819
  constructor(repository, table, data, rawParent) {
1815
- __privateAdd$4(this, _cleanFilterConstraint);
1816
- __privateAdd$4(this, _table$1, void 0);
1817
- __privateAdd$4(this, _repository, void 0);
3820
+ __privateAdd$4(this, _Query_instances);
3821
+ __privateAdd$4(this, _table$1);
3822
+ __privateAdd$4(this, _repository);
1818
3823
  __privateAdd$4(this, _data, { filter: {} });
1819
3824
  // Implements pagination
1820
3825
  this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
@@ -1892,12 +3897,12 @@ const _Query = class _Query {
1892
3897
  filter(a, b) {
1893
3898
  if (arguments.length === 1) {
1894
3899
  const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
1895
- [column]: __privateMethod$3(this, _cleanFilterConstraint, cleanFilterConstraint_fn).call(this, column, constraint)
3900
+ [column]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, column, constraint)
1896
3901
  }));
1897
3902
  const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
1898
3903
  return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
1899
3904
  } else {
1900
- const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _cleanFilterConstraint, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
3905
+ const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
1901
3906
  const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
1902
3907
  return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
1903
3908
  }
@@ -1976,8 +3981,7 @@ const _Query = class _Query {
1976
3981
  }
1977
3982
  async getFirstOrThrow(options = {}) {
1978
3983
  const records = await this.getMany({ ...options, pagination: { size: 1 } });
1979
- if (records[0] === void 0)
1980
- throw new Error("No results found.");
3984
+ if (records[0] === void 0) throw new Error("No results found.");
1981
3985
  return records[0];
1982
3986
  }
1983
3987
  async summarize(params = {}) {
@@ -2032,7 +4036,7 @@ const _Query = class _Query {
2032
4036
  _table$1 = new WeakMap();
2033
4037
  _repository = new WeakMap();
2034
4038
  _data = new WeakMap();
2035
- _cleanFilterConstraint = new WeakSet();
4039
+ _Query_instances = new WeakSet();
2036
4040
  cleanFilterConstraint_fn = function(column, value) {
2037
4041
  const columnType = __privateGet$3(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
2038
4042
  if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
@@ -2093,8 +4097,7 @@ function isSortFilterString(value) {
2093
4097
  }
2094
4098
  function isSortFilterBase(filter) {
2095
4099
  return isObject(filter) && Object.entries(filter).every(([key, value]) => {
2096
- if (key === "*")
2097
- return value === "random";
4100
+ if (key === "*") return value === "random";
2098
4101
  return value === "asc" || value === "desc";
2099
4102
  });
2100
4103
  }
@@ -2115,29 +4118,15 @@ function buildSortFilter(filter) {
2115
4118
  }
2116
4119
  }
2117
4120
 
2118
- var __accessCheck$3 = (obj, member, msg) => {
2119
- if (!member.has(obj))
2120
- throw TypeError("Cannot " + msg);
4121
+ var __typeError$3 = (msg) => {
4122
+ throw TypeError(msg);
2121
4123
  };
2122
- var __privateGet$2 = (obj, member, getter) => {
2123
- __accessCheck$3(obj, member, "read from private field");
2124
- return getter ? getter.call(obj) : member.get(obj);
2125
- };
2126
- var __privateAdd$3 = (obj, member, value) => {
2127
- if (member.has(obj))
2128
- throw TypeError("Cannot add the same private member more than once");
2129
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
2130
- };
2131
- var __privateSet$1 = (obj, member, value, setter) => {
2132
- __accessCheck$3(obj, member, "write to private field");
2133
- setter ? setter.call(obj, value) : member.set(obj, value);
2134
- return value;
2135
- };
2136
- var __privateMethod$2 = (obj, member, method) => {
2137
- __accessCheck$3(obj, member, "access private method");
2138
- return method;
2139
- };
2140
- var _table, _getFetchProps, _db, _schemaTables, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn, _insertRecords, insertRecords_fn, _updateRecordWithID, updateRecordWithID_fn, _updateRecords, updateRecords_fn, _upsertRecordWithID, upsertRecordWithID_fn, _deleteRecord, deleteRecord_fn, _deleteRecords, deleteRecords_fn, _getSchemaTables, getSchemaTables_fn, _transformObjectToApi, transformObjectToApi_fn;
4124
+ var __accessCheck$3 = (obj, member, msg) => member.has(obj) || __typeError$3("Cannot " + msg);
4125
+ var __privateGet$2 = (obj, member, getter) => (__accessCheck$3(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
4126
+ var __privateAdd$3 = (obj, member, value) => member.has(obj) ? __typeError$3("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
4127
+ var __privateSet$1 = (obj, member, value, setter) => (__accessCheck$3(obj, member, "write to private field"), member.set(obj, value), value);
4128
+ var __privateMethod$2 = (obj, member, method) => (__accessCheck$3(obj, member, "access private method"), method);
4129
+ var _table, _getFetchProps, _db, _schemaTables, _trace, _RestRepository_instances, insertRecordWithoutId_fn, insertRecordWithId_fn, insertRecords_fn, updateRecordWithID_fn, updateRecords_fn, upsertRecordWithID_fn, deleteRecord_fn, deleteRecords_fn, getSchemaTables_fn, transformObjectToApi_fn;
2141
4130
  const BULK_OPERATION_MAX_SIZE = 1e3;
2142
4131
  class Repository extends Query {
2143
4132
  }
@@ -2148,21 +4137,12 @@ class RestRepository extends Query {
2148
4137
  { name: options.table, schema: options.schemaTables?.find((table) => table.name === options.table) },
2149
4138
  {}
2150
4139
  );
2151
- __privateAdd$3(this, _insertRecordWithoutId);
2152
- __privateAdd$3(this, _insertRecordWithId);
2153
- __privateAdd$3(this, _insertRecords);
2154
- __privateAdd$3(this, _updateRecordWithID);
2155
- __privateAdd$3(this, _updateRecords);
2156
- __privateAdd$3(this, _upsertRecordWithID);
2157
- __privateAdd$3(this, _deleteRecord);
2158
- __privateAdd$3(this, _deleteRecords);
2159
- __privateAdd$3(this, _getSchemaTables);
2160
- __privateAdd$3(this, _transformObjectToApi);
2161
- __privateAdd$3(this, _table, void 0);
2162
- __privateAdd$3(this, _getFetchProps, void 0);
2163
- __privateAdd$3(this, _db, void 0);
2164
- __privateAdd$3(this, _schemaTables, void 0);
2165
- __privateAdd$3(this, _trace, void 0);
4140
+ __privateAdd$3(this, _RestRepository_instances);
4141
+ __privateAdd$3(this, _table);
4142
+ __privateAdd$3(this, _getFetchProps);
4143
+ __privateAdd$3(this, _db);
4144
+ __privateAdd$3(this, _schemaTables);
4145
+ __privateAdd$3(this, _trace);
2166
4146
  __privateSet$1(this, _table, options.table);
2167
4147
  __privateSet$1(this, _db, options.db);
2168
4148
  __privateSet$1(this, _schemaTables, options.schemaTables);
@@ -2181,31 +4161,28 @@ class RestRepository extends Query {
2181
4161
  return __privateGet$2(this, _trace).call(this, "create", async () => {
2182
4162
  const ifVersion = parseIfVersion(b, c, d);
2183
4163
  if (Array.isArray(a)) {
2184
- if (a.length === 0)
2185
- return [];
2186
- const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
4164
+ if (a.length === 0) return [];
4165
+ const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
2187
4166
  const columns = isValidSelectableColumns(b) ? b : ["*"];
2188
4167
  const result = await this.read(ids, columns);
2189
4168
  return result;
2190
4169
  }
2191
4170
  if (isString(a) && isObject(b)) {
2192
- if (a === "")
2193
- throw new Error("The id can't be empty");
4171
+ if (a === "") throw new Error("The id can't be empty");
2194
4172
  const columns = isValidSelectableColumns(c) ? c : void 0;
2195
- return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
4173
+ return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
2196
4174
  }
2197
4175
  if (isObject(a) && isString(a.xata_id)) {
2198
- if (a.xata_id === "")
2199
- throw new Error("The id can't be empty");
4176
+ if (a.xata_id === "") throw new Error("The id can't be empty");
2200
4177
  const columns = isValidSelectableColumns(b) ? b : void 0;
2201
- return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
4178
+ return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
2202
4179
  createOnly: true,
2203
4180
  ifVersion
2204
4181
  });
2205
4182
  }
2206
4183
  if (isObject(a)) {
2207
4184
  const columns = isValidSelectableColumns(b) ? b : void 0;
2208
- return __privateMethod$2(this, _insertRecordWithoutId, insertRecordWithoutId_fn).call(this, a, columns);
4185
+ return __privateMethod$2(this, _RestRepository_instances, insertRecordWithoutId_fn).call(this, a, columns);
2209
4186
  }
2210
4187
  throw new Error("Invalid arguments for create method");
2211
4188
  });
@@ -2214,8 +4191,7 @@ class RestRepository extends Query {
2214
4191
  return __privateGet$2(this, _trace).call(this, "read", async () => {
2215
4192
  const columns = isValidSelectableColumns(b) ? b : ["*"];
2216
4193
  if (Array.isArray(a)) {
2217
- if (a.length === 0)
2218
- return [];
4194
+ if (a.length === 0) return [];
2219
4195
  const ids = a.map((item) => extractId(item));
2220
4196
  const finalObjects = await this.getAll({ filter: { xata_id: { $any: compact(ids) } }, columns });
2221
4197
  const dictionary = finalObjects.reduce((acc, object) => {
@@ -2238,7 +4214,7 @@ class RestRepository extends Query {
2238
4214
  queryParams: { columns },
2239
4215
  ...__privateGet$2(this, _getFetchProps).call(this)
2240
4216
  });
2241
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4217
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2242
4218
  return initObject(
2243
4219
  __privateGet$2(this, _db),
2244
4220
  schemaTables,
@@ -2279,11 +4255,10 @@ class RestRepository extends Query {
2279
4255
  return __privateGet$2(this, _trace).call(this, "update", async () => {
2280
4256
  const ifVersion = parseIfVersion(b, c, d);
2281
4257
  if (Array.isArray(a)) {
2282
- if (a.length === 0)
2283
- return [];
4258
+ if (a.length === 0) return [];
2284
4259
  const existing = await this.read(a, ["xata_id"]);
2285
4260
  const updates = a.filter((_item, index) => existing[index] !== null);
2286
- await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, updates, {
4261
+ await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, updates, {
2287
4262
  ifVersion,
2288
4263
  upsert: false
2289
4264
  });
@@ -2294,15 +4269,14 @@ class RestRepository extends Query {
2294
4269
  try {
2295
4270
  if (isString(a) && isObject(b)) {
2296
4271
  const columns = isValidSelectableColumns(c) ? c : void 0;
2297
- return await __privateMethod$2(this, _updateRecordWithID, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
4272
+ return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
2298
4273
  }
2299
4274
  if (isObject(a) && isString(a.xata_id)) {
2300
4275
  const columns = isValidSelectableColumns(b) ? b : void 0;
2301
- return await __privateMethod$2(this, _updateRecordWithID, updateRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
4276
+ return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
2302
4277
  }
2303
4278
  } catch (error) {
2304
- if (error.status === 422)
2305
- return null;
4279
+ if (error.status === 422) return null;
2306
4280
  throw error;
2307
4281
  }
2308
4282
  throw new Error("Invalid arguments for update method");
@@ -2331,9 +4305,8 @@ class RestRepository extends Query {
2331
4305
  return __privateGet$2(this, _trace).call(this, "createOrUpdate", async () => {
2332
4306
  const ifVersion = parseIfVersion(b, c, d);
2333
4307
  if (Array.isArray(a)) {
2334
- if (a.length === 0)
2335
- return [];
2336
- await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, a, {
4308
+ if (a.length === 0) return [];
4309
+ await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, a, {
2337
4310
  ifVersion,
2338
4311
  upsert: true
2339
4312
  });
@@ -2342,16 +4315,14 @@ class RestRepository extends Query {
2342
4315
  return result;
2343
4316
  }
2344
4317
  if (isString(a) && isObject(b)) {
2345
- if (a === "")
2346
- throw new Error("The id can't be empty");
4318
+ if (a === "") throw new Error("The id can't be empty");
2347
4319
  const columns = isValidSelectableColumns(c) ? c : void 0;
2348
- return await __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
4320
+ return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
2349
4321
  }
2350
4322
  if (isObject(a) && isString(a.xata_id)) {
2351
- if (a.xata_id === "")
2352
- throw new Error("The id can't be empty");
4323
+ if (a.xata_id === "") throw new Error("The id can't be empty");
2353
4324
  const columns = isValidSelectableColumns(c) ? c : void 0;
2354
- return await __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
4325
+ return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
2355
4326
  }
2356
4327
  if (!isDefined(a) && isObject(b)) {
2357
4328
  return await this.create(b, c);
@@ -2366,24 +4337,21 @@ class RestRepository extends Query {
2366
4337
  return __privateGet$2(this, _trace).call(this, "createOrReplace", async () => {
2367
4338
  const ifVersion = parseIfVersion(b, c, d);
2368
4339
  if (Array.isArray(a)) {
2369
- if (a.length === 0)
2370
- return [];
2371
- const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
4340
+ if (a.length === 0) return [];
4341
+ const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
2372
4342
  const columns = isValidSelectableColumns(b) ? b : ["*"];
2373
4343
  const result = await this.read(ids, columns);
2374
4344
  return result;
2375
4345
  }
2376
4346
  if (isString(a) && isObject(b)) {
2377
- if (a === "")
2378
- throw new Error("The id can't be empty");
4347
+ if (a === "") throw new Error("The id can't be empty");
2379
4348
  const columns = isValidSelectableColumns(c) ? c : void 0;
2380
- return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
4349
+ return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
2381
4350
  }
2382
4351
  if (isObject(a) && isString(a.xata_id)) {
2383
- if (a.xata_id === "")
2384
- throw new Error("The id can't be empty");
4352
+ if (a.xata_id === "") throw new Error("The id can't be empty");
2385
4353
  const columns = isValidSelectableColumns(c) ? c : void 0;
2386
- return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
4354
+ return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
2387
4355
  createOnly: false,
2388
4356
  ifVersion
2389
4357
  });
@@ -2400,25 +4368,22 @@ class RestRepository extends Query {
2400
4368
  async delete(a, b) {
2401
4369
  return __privateGet$2(this, _trace).call(this, "delete", async () => {
2402
4370
  if (Array.isArray(a)) {
2403
- if (a.length === 0)
2404
- return [];
4371
+ if (a.length === 0) return [];
2405
4372
  const ids = a.map((o) => {
2406
- if (isString(o))
2407
- return o;
2408
- if (isString(o.xata_id))
2409
- return o.xata_id;
4373
+ if (isString(o)) return o;
4374
+ if (isString(o.xata_id)) return o.xata_id;
2410
4375
  throw new Error("Invalid arguments for delete method");
2411
4376
  });
2412
4377
  const columns = isValidSelectableColumns(b) ? b : ["*"];
2413
4378
  const result = await this.read(a, columns);
2414
- await __privateMethod$2(this, _deleteRecords, deleteRecords_fn).call(this, ids);
4379
+ await __privateMethod$2(this, _RestRepository_instances, deleteRecords_fn).call(this, ids);
2415
4380
  return result;
2416
4381
  }
2417
4382
  if (isString(a)) {
2418
- return __privateMethod$2(this, _deleteRecord, deleteRecord_fn).call(this, a, b);
4383
+ return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a, b);
2419
4384
  }
2420
4385
  if (isObject(a) && isString(a.xata_id)) {
2421
- return __privateMethod$2(this, _deleteRecord, deleteRecord_fn).call(this, a.xata_id, b);
4386
+ return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a.xata_id, b);
2422
4387
  }
2423
4388
  throw new Error("Invalid arguments for delete method");
2424
4389
  });
@@ -2462,7 +4427,7 @@ class RestRepository extends Query {
2462
4427
  },
2463
4428
  ...__privateGet$2(this, _getFetchProps).call(this)
2464
4429
  });
2465
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4430
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2466
4431
  return {
2467
4432
  records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
2468
4433
  totalCount
@@ -2487,7 +4452,7 @@ class RestRepository extends Query {
2487
4452
  },
2488
4453
  ...__privateGet$2(this, _getFetchProps).call(this)
2489
4454
  });
2490
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4455
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2491
4456
  return {
2492
4457
  records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
2493
4458
  totalCount
@@ -2529,7 +4494,7 @@ class RestRepository extends Query {
2529
4494
  fetchOptions: data.fetchOptions,
2530
4495
  ...__privateGet$2(this, _getFetchProps).call(this)
2531
4496
  });
2532
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4497
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2533
4498
  const records = objects.map(
2534
4499
  (record) => initObject(
2535
4500
  __privateGet$2(this, _db),
@@ -2563,7 +4528,7 @@ class RestRepository extends Query {
2563
4528
  },
2564
4529
  ...__privateGet$2(this, _getFetchProps).call(this)
2565
4530
  });
2566
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4531
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2567
4532
  return {
2568
4533
  ...result,
2569
4534
  summaries: result.summaries.map(
@@ -2611,9 +4576,9 @@ _getFetchProps = new WeakMap();
2611
4576
  _db = new WeakMap();
2612
4577
  _schemaTables = new WeakMap();
2613
4578
  _trace = new WeakMap();
2614
- _insertRecordWithoutId = new WeakSet();
4579
+ _RestRepository_instances = new WeakSet();
2615
4580
  insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
2616
- const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
4581
+ const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
2617
4582
  const response = await insertRecord({
2618
4583
  pathParams: {
2619
4584
  workspace: "{workspaceId}",
@@ -2625,14 +4590,12 @@ insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
2625
4590
  body: record,
2626
4591
  ...__privateGet$2(this, _getFetchProps).call(this)
2627
4592
  });
2628
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4593
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2629
4594
  return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
2630
4595
  };
2631
- _insertRecordWithId = new WeakSet();
2632
4596
  insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
2633
- if (!recordId)
2634
- return null;
2635
- const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
4597
+ if (!recordId) return null;
4598
+ const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
2636
4599
  const response = await insertRecordWithID({
2637
4600
  pathParams: {
2638
4601
  workspace: "{workspaceId}",
@@ -2645,13 +4608,12 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
2645
4608
  queryParams: { createOnly, columns, ifVersion },
2646
4609
  ...__privateGet$2(this, _getFetchProps).call(this)
2647
4610
  });
2648
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4611
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2649
4612
  return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
2650
4613
  };
2651
- _insertRecords = new WeakSet();
2652
4614
  insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
2653
4615
  const operations = await promiseMap(objects, async (object) => {
2654
- const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
4616
+ const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
2655
4617
  return { insert: { table: __privateGet$2(this, _table), record, createOnly, ifVersion } };
2656
4618
  });
2657
4619
  const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
@@ -2676,11 +4638,9 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
2676
4638
  }
2677
4639
  return ids;
2678
4640
  };
2679
- _updateRecordWithID = new WeakSet();
2680
4641
  updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
2681
- if (!recordId)
2682
- return null;
2683
- const { xata_id: _id, ...record } = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
4642
+ if (!recordId) return null;
4643
+ const { xata_id: _id, ...record } = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
2684
4644
  try {
2685
4645
  const response = await updateRecordWithID({
2686
4646
  pathParams: {
@@ -2694,7 +4654,7 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
2694
4654
  body: record,
2695
4655
  ...__privateGet$2(this, _getFetchProps).call(this)
2696
4656
  });
2697
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4657
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2698
4658
  return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
2699
4659
  } catch (e) {
2700
4660
  if (isObject(e) && e.status === 404) {
@@ -2703,10 +4663,9 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
2703
4663
  throw e;
2704
4664
  }
2705
4665
  };
2706
- _updateRecords = new WeakSet();
2707
4666
  updateRecords_fn = async function(objects, { ifVersion, upsert }) {
2708
4667
  const operations = await promiseMap(objects, async ({ xata_id, ...object }) => {
2709
- const fields = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
4668
+ const fields = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
2710
4669
  return { update: { table: __privateGet$2(this, _table), id: xata_id, ifVersion, upsert, fields } };
2711
4670
  });
2712
4671
  const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
@@ -2731,10 +4690,8 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
2731
4690
  }
2732
4691
  return ids;
2733
4692
  };
2734
- _upsertRecordWithID = new WeakSet();
2735
4693
  upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
2736
- if (!recordId)
2737
- return null;
4694
+ if (!recordId) return null;
2738
4695
  const response = await upsertRecordWithID({
2739
4696
  pathParams: {
2740
4697
  workspace: "{workspaceId}",
@@ -2747,13 +4704,11 @@ upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
2747
4704
  body: object,
2748
4705
  ...__privateGet$2(this, _getFetchProps).call(this)
2749
4706
  });
2750
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4707
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2751
4708
  return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
2752
4709
  };
2753
- _deleteRecord = new WeakSet();
2754
4710
  deleteRecord_fn = async function(recordId, columns = ["*"]) {
2755
- if (!recordId)
2756
- return null;
4711
+ if (!recordId) return null;
2757
4712
  try {
2758
4713
  const response = await deleteRecord({
2759
4714
  pathParams: {
@@ -2766,7 +4721,7 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
2766
4721
  queryParams: { columns },
2767
4722
  ...__privateGet$2(this, _getFetchProps).call(this)
2768
4723
  });
2769
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4724
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2770
4725
  return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
2771
4726
  } catch (e) {
2772
4727
  if (isObject(e) && e.status === 404) {
@@ -2775,7 +4730,6 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
2775
4730
  throw e;
2776
4731
  }
2777
4732
  };
2778
- _deleteRecords = new WeakSet();
2779
4733
  deleteRecords_fn = async function(recordIds) {
2780
4734
  const chunkedOperations = chunk(
2781
4735
  compact(recordIds).map((id) => ({ delete: { table: __privateGet$2(this, _table), id } })),
@@ -2793,10 +4747,8 @@ deleteRecords_fn = async function(recordIds) {
2793
4747
  });
2794
4748
  }
2795
4749
  };
2796
- _getSchemaTables = new WeakSet();
2797
4750
  getSchemaTables_fn = async function() {
2798
- if (__privateGet$2(this, _schemaTables))
2799
- return __privateGet$2(this, _schemaTables);
4751
+ if (__privateGet$2(this, _schemaTables)) return __privateGet$2(this, _schemaTables);
2800
4752
  const { schema } = await getBranchDetails({
2801
4753
  pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
2802
4754
  ...__privateGet$2(this, _getFetchProps).call(this)
@@ -2804,16 +4756,13 @@ getSchemaTables_fn = async function() {
2804
4756
  __privateSet$1(this, _schemaTables, schema.tables);
2805
4757
  return schema.tables;
2806
4758
  };
2807
- _transformObjectToApi = new WeakSet();
2808
4759
  transformObjectToApi_fn = async function(object) {
2809
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4760
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2810
4761
  const schema = schemaTables.find((table) => table.name === __privateGet$2(this, _table));
2811
- if (!schema)
2812
- throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
4762
+ if (!schema) throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
2813
4763
  const result = {};
2814
4764
  for (const [key, value] of Object.entries(object)) {
2815
- if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key))
2816
- continue;
4765
+ if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key)) continue;
2817
4766
  const type = schema.columns.find((column) => column.name === key)?.type;
2818
4767
  switch (type) {
2819
4768
  case "link": {
@@ -2843,11 +4792,9 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
2843
4792
  const data = {};
2844
4793
  Object.assign(data, { ...object });
2845
4794
  const { columns } = schemaTables.find(({ name }) => name === table) ?? {};
2846
- if (!columns)
2847
- console.error(`Table ${table} not found in schema`);
4795
+ if (!columns) console.error(`Table ${table} not found in schema`);
2848
4796
  for (const column of columns ?? []) {
2849
- if (!isValidColumn(selectedColumns, column))
2850
- continue;
4797
+ if (!isValidColumn(selectedColumns, column)) continue;
2851
4798
  const value = data[column.name];
2852
4799
  switch (column.type) {
2853
4800
  case "datetime": {
@@ -2933,15 +4880,12 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
2933
4880
  return record;
2934
4881
  };
2935
4882
  function extractId(value) {
2936
- if (isString(value))
2937
- return value;
2938
- if (isObject(value) && isString(value.xata_id))
2939
- return value.xata_id;
4883
+ if (isString(value)) return value;
4884
+ if (isObject(value) && isString(value.xata_id)) return value.xata_id;
2940
4885
  return void 0;
2941
4886
  }
2942
4887
  function isValidColumn(columns, column) {
2943
- if (columns.includes("*"))
2944
- return true;
4888
+ if (columns.includes("*")) return true;
2945
4889
  return columns.filter((item) => isString(item) && item.startsWith(column.name)).length > 0;
2946
4890
  }
2947
4891
  function parseIfVersion(...args) {
@@ -2981,19 +4925,12 @@ const includesAll = (value) => ({ $includesAll: value });
2981
4925
  const includesNone = (value) => ({ $includesNone: value });
2982
4926
  const includesAny = (value) => ({ $includesAny: value });
2983
4927
 
2984
- var __accessCheck$2 = (obj, member, msg) => {
2985
- if (!member.has(obj))
2986
- throw TypeError("Cannot " + msg);
2987
- };
2988
- var __privateGet$1 = (obj, member, getter) => {
2989
- __accessCheck$2(obj, member, "read from private field");
2990
- return getter ? getter.call(obj) : member.get(obj);
2991
- };
2992
- var __privateAdd$2 = (obj, member, value) => {
2993
- if (member.has(obj))
2994
- throw TypeError("Cannot add the same private member more than once");
2995
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
4928
+ var __typeError$2 = (msg) => {
4929
+ throw TypeError(msg);
2996
4930
  };
4931
+ var __accessCheck$2 = (obj, member, msg) => member.has(obj) || __typeError$2("Cannot " + msg);
4932
+ var __privateGet$1 = (obj, member, getter) => (__accessCheck$2(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
4933
+ var __privateAdd$2 = (obj, member, value) => member.has(obj) ? __typeError$2("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
2997
4934
  var _tables;
2998
4935
  class SchemaPlugin extends XataPlugin {
2999
4936
  constructor() {
@@ -3005,8 +4942,7 @@ class SchemaPlugin extends XataPlugin {
3005
4942
  {},
3006
4943
  {
3007
4944
  get: (_target, table) => {
3008
- if (!isString(table))
3009
- throw new Error("Invalid table name");
4945
+ if (!isString(table)) throw new Error("Invalid table name");
3010
4946
  if (__privateGet$1(this, _tables)[table] === void 0) {
3011
4947
  __privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
3012
4948
  }
@@ -3097,30 +5033,23 @@ function getContentType(file) {
3097
5033
  return "application/octet-stream";
3098
5034
  }
3099
5035
 
3100
- var __accessCheck$1 = (obj, member, msg) => {
3101
- if (!member.has(obj))
3102
- throw TypeError("Cannot " + msg);
3103
- };
3104
- var __privateAdd$1 = (obj, member, value) => {
3105
- if (member.has(obj))
3106
- throw TypeError("Cannot add the same private member more than once");
3107
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
5036
+ var __typeError$1 = (msg) => {
5037
+ throw TypeError(msg);
3108
5038
  };
3109
- var __privateMethod$1 = (obj, member, method) => {
3110
- __accessCheck$1(obj, member, "access private method");
3111
- return method;
3112
- };
3113
- var _search, search_fn;
5039
+ var __accessCheck$1 = (obj, member, msg) => member.has(obj) || __typeError$1("Cannot " + msg);
5040
+ var __privateAdd$1 = (obj, member, value) => member.has(obj) ? __typeError$1("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
5041
+ var __privateMethod$1 = (obj, member, method) => (__accessCheck$1(obj, member, "access private method"), method);
5042
+ var _SearchPlugin_instances, search_fn;
3114
5043
  class SearchPlugin extends XataPlugin {
3115
5044
  constructor(db) {
3116
5045
  super();
3117
5046
  this.db = db;
3118
- __privateAdd$1(this, _search);
5047
+ __privateAdd$1(this, _SearchPlugin_instances);
3119
5048
  }
3120
5049
  build(pluginOptions) {
3121
5050
  return {
3122
5051
  all: async (query, options = {}) => {
3123
- const { records, totalCount } = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
5052
+ const { records, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
3124
5053
  return {
3125
5054
  totalCount,
3126
5055
  records: records.map((record) => {
@@ -3130,7 +5059,7 @@ class SearchPlugin extends XataPlugin {
3130
5059
  };
3131
5060
  },
3132
5061
  byTable: async (query, options = {}) => {
3133
- const { records: rawRecords, totalCount } = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
5062
+ const { records: rawRecords, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
3134
5063
  const records = rawRecords.reduce((acc, record) => {
3135
5064
  const table = record.xata_table;
3136
5065
  const items = acc[table] ?? [];
@@ -3142,7 +5071,7 @@ class SearchPlugin extends XataPlugin {
3142
5071
  };
3143
5072
  }
3144
5073
  }
3145
- _search = new WeakSet();
5074
+ _SearchPlugin_instances = new WeakSet();
3146
5075
  search_fn = async function(query, options, pluginOptions) {
3147
5076
  const { tables, fuzziness, highlight, prefix, page } = options ?? {};
3148
5077
  const { records, totalCount } = await searchBranch({
@@ -3178,8 +5107,7 @@ function arrayString(val) {
3178
5107
  return result;
3179
5108
  }
3180
5109
  function prepareValue(value) {
3181
- if (!isDefined(value))
3182
- return null;
5110
+ if (!isDefined(value)) return null;
3183
5111
  if (value instanceof Date) {
3184
5112
  return value.toISOString();
3185
5113
  }
@@ -3219,19 +5147,28 @@ class SQLPlugin extends XataPlugin {
3219
5147
  throw new Error("Invalid usage of `xata.sql`. Please use it as a tagged template or with an object.");
3220
5148
  }
3221
5149
  const { statement, params, consistency, responseType } = prepareParams(query, parameters);
3222
- const {
3223
- records,
3224
- rows,
3225
- warning,
3226
- columns = []
3227
- } = await sqlQuery({
5150
+ const { warning, columns, ...response } = await sqlQuery({
3228
5151
  pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
3229
5152
  body: { statement, params, consistency, responseType },
3230
5153
  ...pluginOptions
3231
5154
  });
5155
+ const records = "records" in response ? response.records : void 0;
5156
+ const rows = "rows" in response ? response.rows : void 0;
3232
5157
  return { records, rows, warning, columns };
3233
5158
  };
3234
5159
  sqlFunction.connectionString = buildConnectionString(pluginOptions);
5160
+ sqlFunction.batch = async (query) => {
5161
+ const { results } = await sqlBatchQuery({
5162
+ pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
5163
+ body: {
5164
+ statements: query.statements.map(({ statement, params }) => ({ statement, params })),
5165
+ consistency: query.consistency,
5166
+ responseType: query.responseType
5167
+ },
5168
+ ...pluginOptions
5169
+ });
5170
+ return { results };
5171
+ };
3235
5172
  return sqlFunction;
3236
5173
  }
3237
5174
  }
@@ -3258,8 +5195,7 @@ function buildDomain(host, region) {
3258
5195
  function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
3259
5196
  const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
3260
5197
  const parts = parseWorkspacesUrlParts(url);
3261
- if (!parts)
3262
- throw new Error("Invalid workspaces URL");
5198
+ if (!parts) throw new Error("Invalid workspaces URL");
3263
5199
  const { workspace: workspaceSlug, region, database, host } = parts;
3264
5200
  const domain = buildDomain(host, region);
3265
5201
  const workspace = workspaceSlug.split("-").pop();
@@ -3284,39 +5220,24 @@ class TransactionPlugin extends XataPlugin {
3284
5220
  }
3285
5221
  }
3286
5222
 
3287
- var __accessCheck = (obj, member, msg) => {
3288
- if (!member.has(obj))
3289
- throw TypeError("Cannot " + msg);
3290
- };
3291
- var __privateGet = (obj, member, getter) => {
3292
- __accessCheck(obj, member, "read from private field");
3293
- return getter ? getter.call(obj) : member.get(obj);
3294
- };
3295
- var __privateAdd = (obj, member, value) => {
3296
- if (member.has(obj))
3297
- throw TypeError("Cannot add the same private member more than once");
3298
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
3299
- };
3300
- var __privateSet = (obj, member, value, setter) => {
3301
- __accessCheck(obj, member, "write to private field");
3302
- setter ? setter.call(obj, value) : member.set(obj, value);
3303
- return value;
3304
- };
3305
- var __privateMethod = (obj, member, method) => {
3306
- __accessCheck(obj, member, "access private method");
3307
- return method;
5223
+ var __typeError = (msg) => {
5224
+ throw TypeError(msg);
3308
5225
  };
5226
+ var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
5227
+ var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
5228
+ var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
5229
+ var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), member.set(obj, value), value);
5230
+ var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
3309
5231
  const buildClient = (plugins) => {
3310
- var _options, _parseOptions, parseOptions_fn, _getFetchProps, getFetchProps_fn, _a;
5232
+ var _options, _instances, parseOptions_fn, getFetchProps_fn, _a;
3311
5233
  return _a = class {
3312
5234
  constructor(options = {}, tables) {
3313
- __privateAdd(this, _parseOptions);
3314
- __privateAdd(this, _getFetchProps);
3315
- __privateAdd(this, _options, void 0);
3316
- const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
5235
+ __privateAdd(this, _instances);
5236
+ __privateAdd(this, _options);
5237
+ const safeOptions = __privateMethod(this, _instances, parseOptions_fn).call(this, options);
3317
5238
  __privateSet(this, _options, safeOptions);
3318
5239
  const pluginOptions = {
3319
- ...__privateMethod(this, _getFetchProps, getFetchProps_fn).call(this, safeOptions),
5240
+ ...__privateMethod(this, _instances, getFetchProps_fn).call(this, safeOptions),
3320
5241
  host: safeOptions.host,
3321
5242
  tables,
3322
5243
  branch: safeOptions.branch
@@ -3333,8 +5254,7 @@ const buildClient = (plugins) => {
3333
5254
  this.sql = sql;
3334
5255
  this.files = files;
3335
5256
  for (const [key, namespace] of Object.entries(plugins ?? {})) {
3336
- if (namespace === void 0)
3337
- continue;
5257
+ if (namespace === void 0) continue;
3338
5258
  this[key] = namespace.build(pluginOptions);
3339
5259
  }
3340
5260
  }
@@ -3343,8 +5263,8 @@ const buildClient = (plugins) => {
3343
5263
  const branch = __privateGet(this, _options).branch;
3344
5264
  return { databaseURL, branch };
3345
5265
  }
3346
- }, _options = new WeakMap(), _parseOptions = new WeakSet(), parseOptions_fn = function(options) {
3347
- const enableBrowser = options?.enableBrowser ?? getEnableBrowserVariable() ?? false;
5266
+ }, _options = new WeakMap(), _instances = new WeakSet(), parseOptions_fn = function(options) {
5267
+ const enableBrowser = options?.enableBrowser ?? false;
3348
5268
  const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
3349
5269
  if (isBrowser && !enableBrowser) {
3350
5270
  throw new Error(
@@ -3352,8 +5272,9 @@ const buildClient = (plugins) => {
3352
5272
  );
3353
5273
  }
3354
5274
  const fetch = getFetchImplementation(options?.fetch);
3355
- const databaseURL = options?.databaseURL || getDatabaseURL();
3356
- const apiKey = options?.apiKey || getAPIKey();
5275
+ const databaseURL = options?.databaseURL;
5276
+ const apiKey = options?.apiKey;
5277
+ const branch = options?.branch;
3357
5278
  const trace = options?.trace ?? defaultTrace;
3358
5279
  const clientName = options?.clientName;
3359
5280
  const host = options?.host ?? "production";
@@ -3364,25 +5285,8 @@ const buildClient = (plugins) => {
3364
5285
  if (!databaseURL) {
3365
5286
  throw new Error("Option databaseURL is required");
3366
5287
  }
3367
- const envBranch = getBranch();
3368
- const previewBranch = getPreviewBranch();
3369
- const branch = options?.branch || previewBranch || envBranch || "main";
3370
- if (!!previewBranch && branch !== previewBranch) {
3371
- console.warn(
3372
- `Ignoring preview branch ${previewBranch} because branch option was passed to the client constructor with value ${branch}`
3373
- );
3374
- } else if (!!envBranch && branch !== envBranch) {
3375
- console.warn(
3376
- `Ignoring branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
3377
- );
3378
- } else if (!!previewBranch && !!envBranch && previewBranch !== envBranch) {
3379
- console.warn(
3380
- `Ignoring preview branch ${previewBranch} and branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
3381
- );
3382
- } else if (!previewBranch && !envBranch && options?.branch === void 0) {
3383
- console.warn(
3384
- `No branch was passed to the client constructor. Using default branch ${branch}. You can set the branch with the environment variable XATA_BRANCH or by passing the branch option to the client constructor.`
3385
- );
5288
+ if (!branch) {
5289
+ throw new Error("Option branch is required");
3386
5290
  }
3387
5291
  return {
3388
5292
  fetch,
@@ -3396,7 +5300,7 @@ const buildClient = (plugins) => {
3396
5300
  clientName,
3397
5301
  xataAgentExtra
3398
5302
  };
3399
- }, _getFetchProps = new WeakSet(), getFetchProps_fn = function({
5303
+ }, getFetchProps_fn = function({
3400
5304
  fetch,
3401
5305
  apiKey,
3402
5306
  databaseURL,
@@ -3437,26 +5341,19 @@ class Serializer {
3437
5341
  }
3438
5342
  toJSON(data) {
3439
5343
  function visit(obj) {
3440
- if (Array.isArray(obj))
3441
- return obj.map(visit);
5344
+ if (Array.isArray(obj)) return obj.map(visit);
3442
5345
  const type = typeof obj;
3443
- if (type === "undefined")
3444
- return { [META]: "undefined" };
3445
- if (type === "bigint")
3446
- return { [META]: "bigint", [VALUE]: obj.toString() };
3447
- if (obj === null || type !== "object")
3448
- return obj;
5346
+ if (type === "undefined") return { [META]: "undefined" };
5347
+ if (type === "bigint") return { [META]: "bigint", [VALUE]: obj.toString() };
5348
+ if (obj === null || type !== "object") return obj;
3449
5349
  const constructor = obj.constructor;
3450
5350
  const o = { [META]: constructor.name };
3451
5351
  for (const [key, value] of Object.entries(obj)) {
3452
5352
  o[key] = visit(value);
3453
5353
  }
3454
- if (constructor === Date)
3455
- o[VALUE] = obj.toISOString();
3456
- if (constructor === Map)
3457
- o[VALUE] = Object.fromEntries(obj);
3458
- if (constructor === Set)
3459
- o[VALUE] = [...obj];
5354
+ if (constructor === Date) o[VALUE] = obj.toISOString();
5355
+ if (constructor === Map) o[VALUE] = Object.fromEntries(obj);
5356
+ if (constructor === Set) o[VALUE] = [...obj];
3460
5357
  return o;
3461
5358
  }
3462
5359
  return JSON.stringify(visit(data));
@@ -3469,16 +5366,11 @@ class Serializer {
3469
5366
  if (constructor) {
3470
5367
  return Object.assign(Object.create(constructor.prototype), rest);
3471
5368
  }
3472
- if (clazz === "Date")
3473
- return new Date(val);
3474
- if (clazz === "Set")
3475
- return new Set(val);
3476
- if (clazz === "Map")
3477
- return new Map(Object.entries(val));
3478
- if (clazz === "bigint")
3479
- return BigInt(val);
3480
- if (clazz === "undefined")
3481
- return void 0;
5369
+ if (clazz === "Date") return new Date(val);
5370
+ if (clazz === "Set") return new Set(val);
5371
+ if (clazz === "Map") return new Map(Object.entries(val));
5372
+ if (clazz === "bigint") return BigInt(val);
5373
+ if (clazz === "undefined") return void 0;
3482
5374
  return rest;
3483
5375
  }
3484
5376
  return value;
@@ -3493,6 +5385,47 @@ const deserialize = (json) => {
3493
5385
  return defaultSerializer.fromJSON(json);
3494
5386
  };
3495
5387
 
5388
+ function parseEnvironment(environment) {
5389
+ try {
5390
+ if (typeof environment === "function") {
5391
+ return new Proxy(
5392
+ {},
5393
+ {
5394
+ get(target) {
5395
+ return environment(target);
5396
+ }
5397
+ }
5398
+ );
5399
+ }
5400
+ if (isObject(environment)) {
5401
+ return environment;
5402
+ }
5403
+ } catch (error) {
5404
+ }
5405
+ return {};
5406
+ }
5407
+ function buildPreviewBranchName({ org, branch }) {
5408
+ return `preview-${org}-${branch}`;
5409
+ }
5410
+ function getDeployPreviewBranch(environment) {
5411
+ try {
5412
+ const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = parseEnvironment(environment);
5413
+ if (deployPreviewBranch) return deployPreviewBranch;
5414
+ switch (deployPreview) {
5415
+ case "vercel": {
5416
+ if (!vercelGitCommitRef || !vercelGitRepoOwner) {
5417
+ console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
5418
+ return void 0;
5419
+ }
5420
+ return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
5421
+ }
5422
+ }
5423
+ return void 0;
5424
+ } catch (err) {
5425
+ return void 0;
5426
+ }
5427
+ }
5428
+
3496
5429
  class XataError extends Error {
3497
5430
  constructor(message, status) {
3498
5431
  super(message);
@@ -3501,6 +5434,7 @@ class XataError extends Error {
3501
5434
  }
3502
5435
 
3503
5436
  exports.BaseClient = BaseClient;
5437
+ exports.Buffer = Buffer;
3504
5438
  exports.FetcherError = FetcherError;
3505
5439
  exports.FilesPlugin = FilesPlugin;
3506
5440
  exports.Operations = operationsByTag;
@@ -3544,9 +5478,11 @@ exports.cancelWorkspaceMemberInvite = cancelWorkspaceMemberInvite;
3544
5478
  exports.compareBranchSchemas = compareBranchSchemas;
3545
5479
  exports.compareBranchWithUserSchema = compareBranchWithUserSchema;
3546
5480
  exports.compareMigrationRequest = compareMigrationRequest;
5481
+ exports.completeMigration = completeMigration;
3547
5482
  exports.contains = contains;
3548
5483
  exports.copyBranch = copyBranch;
3549
5484
  exports.createBranch = createBranch;
5485
+ exports.createBranchAsync = createBranchAsync;
3550
5486
  exports.createCluster = createCluster;
3551
5487
  exports.createDatabase = createDatabase;
3552
5488
  exports.createMigrationRequest = createMigrationRequest;
@@ -3554,6 +5490,7 @@ exports.createTable = createTable;
3554
5490
  exports.createUserAPIKey = createUserAPIKey;
3555
5491
  exports.createWorkspace = createWorkspace;
3556
5492
  exports.deleteBranch = deleteBranch;
5493
+ exports.deleteCluster = deleteCluster;
3557
5494
  exports.deleteColumn = deleteColumn;
3558
5495
  exports.deleteDatabase = deleteDatabase;
3559
5496
  exports.deleteDatabaseGithubSettings = deleteDatabaseGithubSettings;
@@ -3567,6 +5504,7 @@ exports.deleteUserAPIKey = deleteUserAPIKey;
3567
5504
  exports.deleteUserOAuthClient = deleteUserOAuthClient;
3568
5505
  exports.deleteWorkspace = deleteWorkspace;
3569
5506
  exports.deserialize = deserialize;
5507
+ exports.dropClusterExtension = dropClusterExtension;
3570
5508
  exports.endsWith = endsWith;
3571
5509
  exports.equals = equals;
3572
5510
  exports.executeBranchMigrationPlan = executeBranchMigrationPlan;
@@ -3574,37 +5512,40 @@ exports.exists = exists;
3574
5512
  exports.fileAccess = fileAccess;
3575
5513
  exports.fileUpload = fileUpload;
3576
5514
  exports.ge = ge;
3577
- exports.getAPIKey = getAPIKey;
3578
5515
  exports.getAuthorizationCode = getAuthorizationCode;
3579
- exports.getBranch = getBranch;
3580
5516
  exports.getBranchDetails = getBranchDetails;
3581
5517
  exports.getBranchList = getBranchList;
3582
5518
  exports.getBranchMetadata = getBranchMetadata;
3583
5519
  exports.getBranchMigrationHistory = getBranchMigrationHistory;
3584
5520
  exports.getBranchMigrationJobStatus = getBranchMigrationJobStatus;
3585
5521
  exports.getBranchMigrationPlan = getBranchMigrationPlan;
5522
+ exports.getBranchMoveStatus = getBranchMoveStatus;
3586
5523
  exports.getBranchSchemaHistory = getBranchSchemaHistory;
3587
5524
  exports.getBranchStats = getBranchStats;
3588
5525
  exports.getCluster = getCluster;
5526
+ exports.getClusterMetrics = getClusterMetrics;
3589
5527
  exports.getColumn = getColumn;
3590
5528
  exports.getDatabaseGithubSettings = getDatabaseGithubSettings;
3591
5529
  exports.getDatabaseList = getDatabaseList;
3592
5530
  exports.getDatabaseMetadata = getDatabaseMetadata;
3593
5531
  exports.getDatabaseSettings = getDatabaseSettings;
3594
- exports.getDatabaseURL = getDatabaseURL;
5532
+ exports.getDeployPreviewBranch = getDeployPreviewBranch;
3595
5533
  exports.getFile = getFile;
3596
5534
  exports.getFileItem = getFileItem;
3597
5535
  exports.getGitBranchesMapping = getGitBranchesMapping;
3598
5536
  exports.getHostUrl = getHostUrl;
3599
5537
  exports.getMigrationHistory = getMigrationHistory;
3600
5538
  exports.getMigrationJobStatus = getMigrationJobStatus;
5539
+ exports.getMigrationJobs = getMigrationJobs;
3601
5540
  exports.getMigrationRequest = getMigrationRequest;
3602
5541
  exports.getMigrationRequestIsMerged = getMigrationRequestIsMerged;
3603
- exports.getPreviewBranch = getPreviewBranch;
3604
5542
  exports.getRecord = getRecord;
3605
5543
  exports.getSchema = getSchema;
5544
+ exports.getSchemas = getSchemas;
3606
5545
  exports.getTableColumns = getTableColumns;
3607
5546
  exports.getTableSchema = getTableSchema;
5547
+ exports.getTaskStatus = getTaskStatus;
5548
+ exports.getTasks = getTasks;
3608
5549
  exports.getUser = getUser;
3609
5550
  exports.getUserAPIKeys = getUserAPIKeys;
3610
5551
  exports.getUserOAuthAccessTokens = getUserOAuthAccessTokens;
@@ -3627,6 +5568,7 @@ exports.includesAny = includesAny;
3627
5568
  exports.includesNone = includesNone;
3628
5569
  exports.insertRecord = insertRecord;
3629
5570
  exports.insertRecordWithID = insertRecordWithID;
5571
+ exports.installClusterExtension = installClusterExtension;
3630
5572
  exports.inviteWorkspaceMember = inviteWorkspaceMember;
3631
5573
  exports.is = is;
3632
5574
  exports.isCursorPaginationOptions = isCursorPaginationOptions;
@@ -3640,12 +5582,15 @@ exports.le = le;
3640
5582
  exports.lessEquals = lessEquals;
3641
5583
  exports.lessThan = lessThan;
3642
5584
  exports.lessThanEquals = lessThanEquals;
5585
+ exports.listClusterBranches = listClusterBranches;
5586
+ exports.listClusterExtensions = listClusterExtensions;
3643
5587
  exports.listClusters = listClusters;
3644
5588
  exports.listMigrationRequestsCommits = listMigrationRequestsCommits;
3645
5589
  exports.listRegions = listRegions;
3646
5590
  exports.lt = lt;
3647
5591
  exports.lte = lte;
3648
5592
  exports.mergeMigrationRequest = mergeMigrationRequest;
5593
+ exports.moveBranch = moveBranch;
3649
5594
  exports.notExists = notExists;
3650
5595
  exports.operationsByTag = operationsByTag;
3651
5596
  exports.parseProviderString = parseProviderString;
@@ -3662,11 +5607,14 @@ exports.removeWorkspaceMember = removeWorkspaceMember;
3662
5607
  exports.renameDatabase = renameDatabase;
3663
5608
  exports.resendWorkspaceMemberInvite = resendWorkspaceMemberInvite;
3664
5609
  exports.resolveBranch = resolveBranch;
5610
+ exports.rollbackMigration = rollbackMigration;
3665
5611
  exports.searchBranch = searchBranch;
3666
5612
  exports.searchTable = searchTable;
3667
5613
  exports.serialize = serialize;
3668
5614
  exports.setTableSchema = setTableSchema;
5615
+ exports.sqlBatchQuery = sqlBatchQuery;
3669
5616
  exports.sqlQuery = sqlQuery;
5617
+ exports.startMigration = startMigration;
3670
5618
  exports.startsWith = startsWith;
3671
5619
  exports.summarizeTable = summarizeTable;
3672
5620
  exports.transformImage = transformImage;