@xata.io/client 0.0.0-alpha.vfc037e5fcc7638c56843d5834ef8a7d04c8d451b → 0.0.0-alpha.vfc2160d20dff569d0f4b3272a1273ca130158619

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -24,6 +24,1789 @@ const TraceAttributes = {
24
24
  CLOUDFLARE_RAY_ID: "cf.ray"
25
25
  };
26
26
 
27
+ const lookup = [];
28
+ const revLookup = [];
29
+ const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
30
+ for (let i = 0, len = code.length; i < len; ++i) {
31
+ lookup[i] = code[i];
32
+ revLookup[code.charCodeAt(i)] = i;
33
+ }
34
+ revLookup["-".charCodeAt(0)] = 62;
35
+ revLookup["_".charCodeAt(0)] = 63;
36
+ function getLens(b64) {
37
+ const len = b64.length;
38
+ if (len % 4 > 0) {
39
+ throw new Error("Invalid string. Length must be a multiple of 4");
40
+ }
41
+ let validLen = b64.indexOf("=");
42
+ if (validLen === -1) validLen = len;
43
+ const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
44
+ return [validLen, placeHoldersLen];
45
+ }
46
+ function _byteLength(_b64, validLen, placeHoldersLen) {
47
+ return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
48
+ }
49
+ function toByteArray(b64) {
50
+ let tmp;
51
+ const lens = getLens(b64);
52
+ const validLen = lens[0];
53
+ const placeHoldersLen = lens[1];
54
+ const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
55
+ let curByte = 0;
56
+ const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
57
+ let i;
58
+ for (i = 0; i < len; i += 4) {
59
+ tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
60
+ arr[curByte++] = tmp >> 16 & 255;
61
+ arr[curByte++] = tmp >> 8 & 255;
62
+ arr[curByte++] = tmp & 255;
63
+ }
64
+ if (placeHoldersLen === 2) {
65
+ tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
66
+ arr[curByte++] = tmp & 255;
67
+ }
68
+ if (placeHoldersLen === 1) {
69
+ tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
70
+ arr[curByte++] = tmp >> 8 & 255;
71
+ arr[curByte++] = tmp & 255;
72
+ }
73
+ return arr;
74
+ }
75
+ function tripletToBase64(num) {
76
+ return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
77
+ }
78
+ function encodeChunk(uint8, start, end) {
79
+ let tmp;
80
+ const output = [];
81
+ for (let i = start; i < end; i += 3) {
82
+ tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
83
+ output.push(tripletToBase64(tmp));
84
+ }
85
+ return output.join("");
86
+ }
87
+ function fromByteArray(uint8) {
88
+ let tmp;
89
+ const len = uint8.length;
90
+ const extraBytes = len % 3;
91
+ const parts = [];
92
+ const maxChunkLength = 16383;
93
+ for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
94
+ parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
95
+ }
96
+ if (extraBytes === 1) {
97
+ tmp = uint8[len - 1];
98
+ parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
99
+ } else if (extraBytes === 2) {
100
+ tmp = (uint8[len - 2] << 8) + uint8[len - 1];
101
+ parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
102
+ }
103
+ return parts.join("");
104
+ }
105
+
106
+ const K_MAX_LENGTH = 2147483647;
107
+ const MAX_ARGUMENTS_LENGTH = 4096;
108
+ class Buffer extends Uint8Array {
109
+ /**
110
+ * Constructs a new `Buffer` instance.
111
+ *
112
+ * @param value
113
+ * @param encodingOrOffset
114
+ * @param length
115
+ */
116
+ constructor(value, encodingOrOffset, length) {
117
+ if (typeof value === "number") {
118
+ if (typeof encodingOrOffset === "string") {
119
+ throw new TypeError("The first argument must be of type string, received type number");
120
+ }
121
+ if (value < 0) {
122
+ throw new RangeError("The buffer size cannot be negative");
123
+ }
124
+ super(value < 0 ? 0 : Buffer._checked(value) | 0);
125
+ } else if (typeof value === "string") {
126
+ if (typeof encodingOrOffset !== "string") {
127
+ encodingOrOffset = "utf8";
128
+ }
129
+ if (!Buffer.isEncoding(encodingOrOffset)) {
130
+ throw new TypeError("Unknown encoding: " + encodingOrOffset);
131
+ }
132
+ const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
133
+ super(length2);
134
+ const written = this.write(value, 0, this.length, encodingOrOffset);
135
+ if (written !== length2) {
136
+ throw new TypeError(
137
+ "Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
138
+ );
139
+ }
140
+ } else if (ArrayBuffer.isView(value)) {
141
+ if (Buffer._isInstance(value, Uint8Array)) {
142
+ const copy = new Uint8Array(value);
143
+ const array = copy.buffer;
144
+ const byteOffset = copy.byteOffset;
145
+ const length2 = copy.byteLength;
146
+ if (byteOffset < 0 || array.byteLength < byteOffset) {
147
+ throw new RangeError("offset is outside of buffer bounds");
148
+ }
149
+ if (array.byteLength < byteOffset + (length2 || 0)) {
150
+ throw new RangeError("length is outside of buffer bounds");
151
+ }
152
+ super(new Uint8Array(array, byteOffset, length2));
153
+ } else {
154
+ const array = value;
155
+ const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
156
+ super(new Uint8Array(length2));
157
+ for (let i = 0; i < length2; i++) {
158
+ this[i] = array[i] & 255;
159
+ }
160
+ }
161
+ } else if (value == null) {
162
+ throw new TypeError(
163
+ "The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
164
+ );
165
+ } else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
166
+ const array = value;
167
+ const byteOffset = encodingOrOffset;
168
+ if (byteOffset < 0 || array.byteLength < byteOffset) {
169
+ throw new RangeError("offset is outside of buffer bounds");
170
+ }
171
+ if (array.byteLength < byteOffset + (length || 0)) {
172
+ throw new RangeError("length is outside of buffer bounds");
173
+ }
174
+ super(new Uint8Array(array, byteOffset, length));
175
+ } else if (Array.isArray(value)) {
176
+ const array = value;
177
+ const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
178
+ super(new Uint8Array(length2));
179
+ for (let i = 0; i < length2; i++) {
180
+ this[i] = array[i] & 255;
181
+ }
182
+ } else {
183
+ throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
184
+ }
185
+ }
186
+ /**
187
+ * Return JSON representation of the buffer.
188
+ */
189
+ toJSON() {
190
+ return {
191
+ type: "Buffer",
192
+ data: Array.prototype.slice.call(this)
193
+ };
194
+ }
195
+ /**
196
+ * Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
197
+ * parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
198
+ * only part of `string` will be written. However, partially encoded characters will not be written.
199
+ *
200
+ * @param string String to write to `buf`.
201
+ * @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
202
+ * @param length Maximum number of bytes to write: Default: `buf.length - offset`.
203
+ * @param encoding The character encoding of `string`. Default: `utf8`.
204
+ */
205
+ write(string, offset, length, encoding) {
206
+ if (typeof offset === "undefined") {
207
+ encoding = "utf8";
208
+ length = this.length;
209
+ offset = 0;
210
+ } else if (typeof length === "undefined" && typeof offset === "string") {
211
+ encoding = offset;
212
+ length = this.length;
213
+ offset = 0;
214
+ } else if (typeof offset === "number" && isFinite(offset)) {
215
+ offset = offset >>> 0;
216
+ if (typeof length === "number" && isFinite(length)) {
217
+ length = length >>> 0;
218
+ encoding ?? (encoding = "utf8");
219
+ } else if (typeof length === "string") {
220
+ encoding = length;
221
+ length = void 0;
222
+ }
223
+ } else {
224
+ throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
225
+ }
226
+ const remaining = this.length - offset;
227
+ if (typeof length === "undefined" || length > remaining) {
228
+ length = remaining;
229
+ }
230
+ if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
231
+ throw new RangeError("Attempt to write outside buffer bounds");
232
+ }
233
+ encoding || (encoding = "utf8");
234
+ switch (Buffer._getEncoding(encoding)) {
235
+ case "hex":
236
+ return Buffer._hexWrite(this, string, offset, length);
237
+ case "utf8":
238
+ return Buffer._utf8Write(this, string, offset, length);
239
+ case "ascii":
240
+ case "latin1":
241
+ case "binary":
242
+ return Buffer._asciiWrite(this, string, offset, length);
243
+ case "ucs2":
244
+ case "utf16le":
245
+ return Buffer._ucs2Write(this, string, offset, length);
246
+ case "base64":
247
+ return Buffer._base64Write(this, string, offset, length);
248
+ }
249
+ }
250
+ /**
251
+ * Decodes the buffer to a string according to the specified character encoding.
252
+ * Passing `start` and `end` will decode only a subset of the buffer.
253
+ *
254
+ * Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
255
+ * will be replaced with `U+FFFD`.
256
+ *
257
+ * @param encoding
258
+ * @param start
259
+ * @param end
260
+ */
261
+ toString(encoding, start, end) {
262
+ const length = this.length;
263
+ if (length === 0) {
264
+ return "";
265
+ }
266
+ if (arguments.length === 0) {
267
+ return Buffer._utf8Slice(this, 0, length);
268
+ }
269
+ if (typeof start === "undefined" || start < 0) {
270
+ start = 0;
271
+ }
272
+ if (start > this.length) {
273
+ return "";
274
+ }
275
+ if (typeof end === "undefined" || end > this.length) {
276
+ end = this.length;
277
+ }
278
+ if (end <= 0) {
279
+ return "";
280
+ }
281
+ end >>>= 0;
282
+ start >>>= 0;
283
+ if (end <= start) {
284
+ return "";
285
+ }
286
+ if (!encoding) {
287
+ encoding = "utf8";
288
+ }
289
+ switch (Buffer._getEncoding(encoding)) {
290
+ case "hex":
291
+ return Buffer._hexSlice(this, start, end);
292
+ case "utf8":
293
+ return Buffer._utf8Slice(this, start, end);
294
+ case "ascii":
295
+ return Buffer._asciiSlice(this, start, end);
296
+ case "latin1":
297
+ case "binary":
298
+ return Buffer._latin1Slice(this, start, end);
299
+ case "ucs2":
300
+ case "utf16le":
301
+ return Buffer._utf16leSlice(this, start, end);
302
+ case "base64":
303
+ return Buffer._base64Slice(this, start, end);
304
+ }
305
+ }
306
+ /**
307
+ * Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
308
+ *
309
+ * @param otherBuffer
310
+ */
311
+ equals(otherBuffer) {
312
+ if (!Buffer.isBuffer(otherBuffer)) {
313
+ throw new TypeError("Argument must be a Buffer");
314
+ }
315
+ if (this === otherBuffer) {
316
+ return true;
317
+ }
318
+ return Buffer.compare(this, otherBuffer) === 0;
319
+ }
320
+ /**
321
+ * Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
322
+ * or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
323
+ * buffer.
324
+ *
325
+ * - `0` is returned if `otherBuffer` is the same as this buffer.
326
+ * - `1` is returned if `otherBuffer` should come before this buffer when sorted.
327
+ * - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
328
+ *
329
+ * @param otherBuffer The buffer to compare to.
330
+ * @param targetStart The offset within `otherBuffer` at which to begin comparison.
331
+ * @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
332
+ * @param sourceStart The offset within this buffer at which to begin comparison.
333
+ * @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
334
+ */
335
+ compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
336
+ if (Buffer._isInstance(otherBuffer, Uint8Array)) {
337
+ otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
338
+ }
339
+ if (!Buffer.isBuffer(otherBuffer)) {
340
+ throw new TypeError("Argument must be a Buffer or Uint8Array");
341
+ }
342
+ targetStart ?? (targetStart = 0);
343
+ targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
344
+ sourceStart ?? (sourceStart = 0);
345
+ sourceEnd ?? (sourceEnd = this.length);
346
+ if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
347
+ throw new RangeError("Out of range index");
348
+ }
349
+ if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
350
+ return 0;
351
+ }
352
+ if (sourceStart >= sourceEnd) {
353
+ return -1;
354
+ }
355
+ if (targetStart >= targetEnd) {
356
+ return 1;
357
+ }
358
+ targetStart >>>= 0;
359
+ targetEnd >>>= 0;
360
+ sourceStart >>>= 0;
361
+ sourceEnd >>>= 0;
362
+ if (this === otherBuffer) {
363
+ return 0;
364
+ }
365
+ let x = sourceEnd - sourceStart;
366
+ let y = targetEnd - targetStart;
367
+ const len = Math.min(x, y);
368
+ const thisCopy = this.slice(sourceStart, sourceEnd);
369
+ const targetCopy = otherBuffer.slice(targetStart, targetEnd);
370
+ for (let i = 0; i < len; ++i) {
371
+ if (thisCopy[i] !== targetCopy[i]) {
372
+ x = thisCopy[i];
373
+ y = targetCopy[i];
374
+ break;
375
+ }
376
+ }
377
+ if (x < y) return -1;
378
+ if (y < x) return 1;
379
+ return 0;
380
+ }
381
+ /**
382
+ * Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
383
+ * region overlaps with this buffer.
384
+ *
385
+ * @param targetBuffer The target buffer to copy into.
386
+ * @param targetStart The offset within `targetBuffer` at which to begin writing.
387
+ * @param sourceStart The offset within this buffer at which to begin copying.
388
+ * @param sourceEnd The offset within this buffer at which to end copying (exclusive).
389
+ */
390
+ copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
391
+ if (!Buffer.isBuffer(targetBuffer)) throw new TypeError("argument should be a Buffer");
392
+ if (!sourceStart) sourceStart = 0;
393
+ if (!targetStart) targetStart = 0;
394
+ if (!sourceEnd && sourceEnd !== 0) sourceEnd = this.length;
395
+ if (targetStart >= targetBuffer.length) targetStart = targetBuffer.length;
396
+ if (!targetStart) targetStart = 0;
397
+ if (sourceEnd > 0 && sourceEnd < sourceStart) sourceEnd = sourceStart;
398
+ if (sourceEnd === sourceStart) return 0;
399
+ if (targetBuffer.length === 0 || this.length === 0) return 0;
400
+ if (targetStart < 0) {
401
+ throw new RangeError("targetStart out of bounds");
402
+ }
403
+ if (sourceStart < 0 || sourceStart >= this.length) throw new RangeError("Index out of range");
404
+ if (sourceEnd < 0) throw new RangeError("sourceEnd out of bounds");
405
+ if (sourceEnd > this.length) sourceEnd = this.length;
406
+ if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
407
+ sourceEnd = targetBuffer.length - targetStart + sourceStart;
408
+ }
409
+ const len = sourceEnd - sourceStart;
410
+ if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
411
+ this.copyWithin(targetStart, sourceStart, sourceEnd);
412
+ } else {
413
+ Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
414
+ }
415
+ return len;
416
+ }
417
+ /**
418
+ * Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
419
+ * and `end` indices. This is the same behavior as `buf.subarray()`.
420
+ *
421
+ * This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
422
+ * the slice, use `Uint8Array.prototype.slice()`.
423
+ *
424
+ * @param start
425
+ * @param end
426
+ */
427
+ slice(start, end) {
428
+ if (!start) {
429
+ start = 0;
430
+ }
431
+ const len = this.length;
432
+ start = ~~start;
433
+ end = end === void 0 ? len : ~~end;
434
+ if (start < 0) {
435
+ start += len;
436
+ if (start < 0) {
437
+ start = 0;
438
+ }
439
+ } else if (start > len) {
440
+ start = len;
441
+ }
442
+ if (end < 0) {
443
+ end += len;
444
+ if (end < 0) {
445
+ end = 0;
446
+ }
447
+ } else if (end > len) {
448
+ end = len;
449
+ }
450
+ if (end < start) {
451
+ end = start;
452
+ }
453
+ const newBuf = this.subarray(start, end);
454
+ Object.setPrototypeOf(newBuf, Buffer.prototype);
455
+ return newBuf;
456
+ }
457
+ /**
458
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
459
+ * of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
460
+ *
461
+ * @param value Number to write.
462
+ * @param offset Number of bytes to skip before starting to write.
463
+ * @param byteLength Number of bytes to write, between 0 and 6.
464
+ * @param noAssert
465
+ * @returns `offset` plus the number of bytes written.
466
+ */
467
+ writeUIntLE(value, offset, byteLength, noAssert) {
468
+ value = +value;
469
+ offset = offset >>> 0;
470
+ byteLength = byteLength >>> 0;
471
+ if (!noAssert) {
472
+ const maxBytes = Math.pow(2, 8 * byteLength) - 1;
473
+ Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
474
+ }
475
+ let mul = 1;
476
+ let i = 0;
477
+ this[offset] = value & 255;
478
+ while (++i < byteLength && (mul *= 256)) {
479
+ this[offset + i] = value / mul & 255;
480
+ }
481
+ return offset + byteLength;
482
+ }
483
+ /**
484
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
485
+ * accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
486
+ *
487
+ * @param value Number to write.
488
+ * @param offset Number of bytes to skip before starting to write.
489
+ * @param byteLength Number of bytes to write, between 0 and 6.
490
+ * @param noAssert
491
+ * @returns `offset` plus the number of bytes written.
492
+ */
493
+ writeUIntBE(value, offset, byteLength, noAssert) {
494
+ value = +value;
495
+ offset = offset >>> 0;
496
+ byteLength = byteLength >>> 0;
497
+ if (!noAssert) {
498
+ const maxBytes = Math.pow(2, 8 * byteLength) - 1;
499
+ Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
500
+ }
501
+ let i = byteLength - 1;
502
+ let mul = 1;
503
+ this[offset + i] = value & 255;
504
+ while (--i >= 0 && (mul *= 256)) {
505
+ this[offset + i] = value / mul & 255;
506
+ }
507
+ return offset + byteLength;
508
+ }
509
+ /**
510
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
511
+ * of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
512
+ *
513
+ * @param value Number to write.
514
+ * @param offset Number of bytes to skip before starting to write.
515
+ * @param byteLength Number of bytes to write, between 0 and 6.
516
+ * @param noAssert
517
+ * @returns `offset` plus the number of bytes written.
518
+ */
519
+ writeIntLE(value, offset, byteLength, noAssert) {
520
+ value = +value;
521
+ offset = offset >>> 0;
522
+ if (!noAssert) {
523
+ const limit = Math.pow(2, 8 * byteLength - 1);
524
+ Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
525
+ }
526
+ let i = 0;
527
+ let mul = 1;
528
+ let sub = 0;
529
+ this[offset] = value & 255;
530
+ while (++i < byteLength && (mul *= 256)) {
531
+ if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
532
+ sub = 1;
533
+ }
534
+ this[offset + i] = (value / mul >> 0) - sub & 255;
535
+ }
536
+ return offset + byteLength;
537
+ }
538
+ /**
539
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
540
+ * of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
541
+ *
542
+ * @param value Number to write.
543
+ * @param offset Number of bytes to skip before starting to write.
544
+ * @param byteLength Number of bytes to write, between 0 and 6.
545
+ * @param noAssert
546
+ * @returns `offset` plus the number of bytes written.
547
+ */
548
+ writeIntBE(value, offset, byteLength, noAssert) {
549
+ value = +value;
550
+ offset = offset >>> 0;
551
+ if (!noAssert) {
552
+ const limit = Math.pow(2, 8 * byteLength - 1);
553
+ Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
554
+ }
555
+ let i = byteLength - 1;
556
+ let mul = 1;
557
+ let sub = 0;
558
+ this[offset + i] = value & 255;
559
+ while (--i >= 0 && (mul *= 256)) {
560
+ if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
561
+ sub = 1;
562
+ }
563
+ this[offset + i] = (value / mul >> 0) - sub & 255;
564
+ }
565
+ return offset + byteLength;
566
+ }
567
+ /**
568
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
569
+ * unsigned, little-endian integer supporting up to 48 bits of accuracy.
570
+ *
571
+ * @param offset Number of bytes to skip before starting to read.
572
+ * @param byteLength Number of bytes to read, between 0 and 6.
573
+ * @param noAssert
574
+ */
575
+ readUIntLE(offset, byteLength, noAssert) {
576
+ offset = offset >>> 0;
577
+ byteLength = byteLength >>> 0;
578
+ if (!noAssert) {
579
+ Buffer._checkOffset(offset, byteLength, this.length);
580
+ }
581
+ let val = this[offset];
582
+ let mul = 1;
583
+ let i = 0;
584
+ while (++i < byteLength && (mul *= 256)) {
585
+ val += this[offset + i] * mul;
586
+ }
587
+ return val;
588
+ }
589
+ /**
590
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
591
+ * unsigned, big-endian integer supporting up to 48 bits of accuracy.
592
+ *
593
+ * @param offset Number of bytes to skip before starting to read.
594
+ * @param byteLength Number of bytes to read, between 0 and 6.
595
+ * @param noAssert
596
+ */
597
+ readUIntBE(offset, byteLength, noAssert) {
598
+ offset = offset >>> 0;
599
+ byteLength = byteLength >>> 0;
600
+ if (!noAssert) {
601
+ Buffer._checkOffset(offset, byteLength, this.length);
602
+ }
603
+ let val = this[offset + --byteLength];
604
+ let mul = 1;
605
+ while (byteLength > 0 && (mul *= 256)) {
606
+ val += this[offset + --byteLength] * mul;
607
+ }
608
+ return val;
609
+ }
610
+ /**
611
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
612
+ * little-endian, two's complement signed value supporting up to 48 bits of accuracy.
613
+ *
614
+ * @param offset Number of bytes to skip before starting to read.
615
+ * @param byteLength Number of bytes to read, between 0 and 6.
616
+ * @param noAssert
617
+ */
618
+ readIntLE(offset, byteLength, noAssert) {
619
+ offset = offset >>> 0;
620
+ byteLength = byteLength >>> 0;
621
+ if (!noAssert) {
622
+ Buffer._checkOffset(offset, byteLength, this.length);
623
+ }
624
+ let val = this[offset];
625
+ let mul = 1;
626
+ let i = 0;
627
+ while (++i < byteLength && (mul *= 256)) {
628
+ val += this[offset + i] * mul;
629
+ }
630
+ mul *= 128;
631
+ if (val >= mul) {
632
+ val -= Math.pow(2, 8 * byteLength);
633
+ }
634
+ return val;
635
+ }
636
+ /**
637
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
638
+ * big-endian, two's complement signed value supporting up to 48 bits of accuracy.
639
+ *
640
+ * @param offset Number of bytes to skip before starting to read.
641
+ * @param byteLength Number of bytes to read, between 0 and 6.
642
+ * @param noAssert
643
+ */
644
+ readIntBE(offset, byteLength, noAssert) {
645
+ offset = offset >>> 0;
646
+ byteLength = byteLength >>> 0;
647
+ if (!noAssert) {
648
+ Buffer._checkOffset(offset, byteLength, this.length);
649
+ }
650
+ let i = byteLength;
651
+ let mul = 1;
652
+ let val = this[offset + --i];
653
+ while (i > 0 && (mul *= 256)) {
654
+ val += this[offset + --i] * mul;
655
+ }
656
+ mul *= 128;
657
+ if (val >= mul) {
658
+ val -= Math.pow(2, 8 * byteLength);
659
+ }
660
+ return val;
661
+ }
662
+ /**
663
+ * Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
664
+ *
665
+ * @param offset Number of bytes to skip before starting to read.
666
+ * @param noAssert
667
+ */
668
+ readUInt8(offset, noAssert) {
669
+ offset = offset >>> 0;
670
+ if (!noAssert) {
671
+ Buffer._checkOffset(offset, 1, this.length);
672
+ }
673
+ return this[offset];
674
+ }
675
+ /**
676
+ * Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
677
+ *
678
+ * @param offset Number of bytes to skip before starting to read.
679
+ * @param noAssert
680
+ */
681
+ readUInt16LE(offset, noAssert) {
682
+ offset = offset >>> 0;
683
+ if (!noAssert) {
684
+ Buffer._checkOffset(offset, 2, this.length);
685
+ }
686
+ return this[offset] | this[offset + 1] << 8;
687
+ }
688
+ /**
689
+ * Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
690
+ *
691
+ * @param offset Number of bytes to skip before starting to read.
692
+ * @param noAssert
693
+ */
694
+ readUInt16BE(offset, noAssert) {
695
+ offset = offset >>> 0;
696
+ if (!noAssert) {
697
+ Buffer._checkOffset(offset, 2, this.length);
698
+ }
699
+ return this[offset] << 8 | this[offset + 1];
700
+ }
701
+ /**
702
+ * Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
703
+ *
704
+ * @param offset Number of bytes to skip before starting to read.
705
+ * @param noAssert
706
+ */
707
+ readUInt32LE(offset, noAssert) {
708
+ offset = offset >>> 0;
709
+ if (!noAssert) {
710
+ Buffer._checkOffset(offset, 4, this.length);
711
+ }
712
+ return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
713
+ }
714
+ /**
715
+ * Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
716
+ *
717
+ * @param offset Number of bytes to skip before starting to read.
718
+ * @param noAssert
719
+ */
720
+ readUInt32BE(offset, noAssert) {
721
+ offset = offset >>> 0;
722
+ if (!noAssert) {
723
+ Buffer._checkOffset(offset, 4, this.length);
724
+ }
725
+ return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
726
+ }
727
+ /**
728
+ * Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
729
+ * as two's complement signed values.
730
+ *
731
+ * @param offset Number of bytes to skip before starting to read.
732
+ * @param noAssert
733
+ */
734
+ readInt8(offset, noAssert) {
735
+ offset = offset >>> 0;
736
+ if (!noAssert) {
737
+ Buffer._checkOffset(offset, 1, this.length);
738
+ }
739
+ if (!(this[offset] & 128)) {
740
+ return this[offset];
741
+ }
742
+ return (255 - this[offset] + 1) * -1;
743
+ }
744
+ /**
745
+ * Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
746
+ * are interpreted as two's complement signed values.
747
+ *
748
+ * @param offset Number of bytes to skip before starting to read.
749
+ * @param noAssert
750
+ */
751
+ readInt16LE(offset, noAssert) {
752
+ offset = offset >>> 0;
753
+ if (!noAssert) {
754
+ Buffer._checkOffset(offset, 2, this.length);
755
+ }
756
+ const val = this[offset] | this[offset + 1] << 8;
757
+ return val & 32768 ? val | 4294901760 : val;
758
+ }
759
+ /**
760
+ * Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
761
+ * are interpreted as two's complement signed values.
762
+ *
763
+ * @param offset Number of bytes to skip before starting to read.
764
+ * @param noAssert
765
+ */
766
+ readInt16BE(offset, noAssert) {
767
+ offset = offset >>> 0;
768
+ if (!noAssert) {
769
+ Buffer._checkOffset(offset, 2, this.length);
770
+ }
771
+ const val = this[offset + 1] | this[offset] << 8;
772
+ return val & 32768 ? val | 4294901760 : val;
773
+ }
774
+ /**
775
+ * Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
776
+ * are interpreted as two's complement signed values.
777
+ *
778
+ * @param offset Number of bytes to skip before starting to read.
779
+ * @param noAssert
780
+ */
781
+ readInt32LE(offset, noAssert) {
782
+ offset = offset >>> 0;
783
+ if (!noAssert) {
784
+ Buffer._checkOffset(offset, 4, this.length);
785
+ }
786
+ return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
787
+ }
788
+ /**
789
+ * Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
790
+ * are interpreted as two's complement signed values.
791
+ *
792
+ * @param offset Number of bytes to skip before starting to read.
793
+ * @param noAssert
794
+ */
795
+ readInt32BE(offset, noAssert) {
796
+ offset = offset >>> 0;
797
+ if (!noAssert) {
798
+ Buffer._checkOffset(offset, 4, this.length);
799
+ }
800
+ return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
801
+ }
802
+ /**
803
+ * Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
804
+ * Throws a `RangeError` if `buf.length` is not a multiple of 2.
805
+ */
806
+ swap16() {
807
+ const len = this.length;
808
+ if (len % 2 !== 0) {
809
+ throw new RangeError("Buffer size must be a multiple of 16-bits");
810
+ }
811
+ for (let i = 0; i < len; i += 2) {
812
+ this._swap(this, i, i + 1);
813
+ }
814
+ return this;
815
+ }
816
+ /**
817
+ * Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
818
+ * Throws a `RangeError` if `buf.length` is not a multiple of 4.
819
+ */
820
+ swap32() {
821
+ const len = this.length;
822
+ if (len % 4 !== 0) {
823
+ throw new RangeError("Buffer size must be a multiple of 32-bits");
824
+ }
825
+ for (let i = 0; i < len; i += 4) {
826
+ this._swap(this, i, i + 3);
827
+ this._swap(this, i + 1, i + 2);
828
+ }
829
+ return this;
830
+ }
831
+ /**
832
+ * Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
833
+ * Throws a `RangeError` if `buf.length` is not a multiple of 8.
834
+ */
835
+ swap64() {
836
+ const len = this.length;
837
+ if (len % 8 !== 0) {
838
+ throw new RangeError("Buffer size must be a multiple of 64-bits");
839
+ }
840
+ for (let i = 0; i < len; i += 8) {
841
+ this._swap(this, i, i + 7);
842
+ this._swap(this, i + 1, i + 6);
843
+ this._swap(this, i + 2, i + 5);
844
+ this._swap(this, i + 3, i + 4);
845
+ }
846
+ return this;
847
+ }
848
+ /**
849
+ * Swaps two octets.
850
+ *
851
+ * @param b
852
+ * @param n
853
+ * @param m
854
+ */
855
+ _swap(b, n, m) {
856
+ const i = b[n];
857
+ b[n] = b[m];
858
+ b[m] = i;
859
+ }
860
+ /**
861
+ * Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
862
+ * Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
863
+ *
864
+ * @param value Number to write.
865
+ * @param offset Number of bytes to skip before starting to write.
866
+ * @param noAssert
867
+ * @returns `offset` plus the number of bytes written.
868
+ */
869
+ writeUInt8(value, offset, noAssert) {
870
+ value = +value;
871
+ offset = offset >>> 0;
872
+ if (!noAssert) {
873
+ Buffer._checkInt(this, value, offset, 1, 255, 0);
874
+ }
875
+ this[offset] = value & 255;
876
+ return offset + 1;
877
+ }
878
+ /**
879
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
880
+ * integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
881
+ *
882
+ * @param value Number to write.
883
+ * @param offset Number of bytes to skip before starting to write.
884
+ * @param noAssert
885
+ * @returns `offset` plus the number of bytes written.
886
+ */
887
+ writeUInt16LE(value, offset, noAssert) {
888
+ value = +value;
889
+ offset = offset >>> 0;
890
+ if (!noAssert) {
891
+ Buffer._checkInt(this, value, offset, 2, 65535, 0);
892
+ }
893
+ this[offset] = value & 255;
894
+ this[offset + 1] = value >>> 8;
895
+ return offset + 2;
896
+ }
897
+ /**
898
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
899
+ * integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
900
+ *
901
+ * @param value Number to write.
902
+ * @param offset Number of bytes to skip before starting to write.
903
+ * @param noAssert
904
+ * @returns `offset` plus the number of bytes written.
905
+ */
906
+ writeUInt16BE(value, offset, noAssert) {
907
+ value = +value;
908
+ offset = offset >>> 0;
909
+ if (!noAssert) {
910
+ Buffer._checkInt(this, value, offset, 2, 65535, 0);
911
+ }
912
+ this[offset] = value >>> 8;
913
+ this[offset + 1] = value & 255;
914
+ return offset + 2;
915
+ }
916
+ /**
917
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
918
+ * integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
919
+ *
920
+ * @param value Number to write.
921
+ * @param offset Number of bytes to skip before starting to write.
922
+ * @param noAssert
923
+ * @returns `offset` plus the number of bytes written.
924
+ */
925
+ writeUInt32LE(value, offset, noAssert) {
926
+ value = +value;
927
+ offset = offset >>> 0;
928
+ if (!noAssert) {
929
+ Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
930
+ }
931
+ this[offset + 3] = value >>> 24;
932
+ this[offset + 2] = value >>> 16;
933
+ this[offset + 1] = value >>> 8;
934
+ this[offset] = value & 255;
935
+ return offset + 4;
936
+ }
937
+ /**
938
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
939
+ * integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
940
+ *
941
+ * @param value Number to write.
942
+ * @param offset Number of bytes to skip before starting to write.
943
+ * @param noAssert
944
+ * @returns `offset` plus the number of bytes written.
945
+ */
946
+ writeUInt32BE(value, offset, noAssert) {
947
+ value = +value;
948
+ offset = offset >>> 0;
949
+ if (!noAssert) {
950
+ Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
951
+ }
952
+ this[offset] = value >>> 24;
953
+ this[offset + 1] = value >>> 16;
954
+ this[offset + 2] = value >>> 8;
955
+ this[offset + 3] = value & 255;
956
+ return offset + 4;
957
+ }
958
+ /**
959
+ * Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
960
+ * Behavior is undefined when `value` is anything other than a signed 8-bit integer.
961
+ *
962
+ * @param value Number to write.
963
+ * @param offset Number of bytes to skip before starting to write.
964
+ * @param noAssert
965
+ * @returns `offset` plus the number of bytes written.
966
+ */
967
+ writeInt8(value, offset, noAssert) {
968
+ value = +value;
969
+ offset = offset >>> 0;
970
+ if (!noAssert) {
971
+ Buffer._checkInt(this, value, offset, 1, 127, -128);
972
+ }
973
+ if (value < 0) {
974
+ value = 255 + value + 1;
975
+ }
976
+ this[offset] = value & 255;
977
+ return offset + 1;
978
+ }
979
+ /**
980
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
981
+ * integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
982
+ *
983
+ * @param value Number to write.
984
+ * @param offset Number of bytes to skip before starting to write.
985
+ * @param noAssert
986
+ * @returns `offset` plus the number of bytes written.
987
+ */
988
+ writeInt16LE(value, offset, noAssert) {
989
+ value = +value;
990
+ offset = offset >>> 0;
991
+ if (!noAssert) {
992
+ Buffer._checkInt(this, value, offset, 2, 32767, -32768);
993
+ }
994
+ this[offset] = value & 255;
995
+ this[offset + 1] = value >>> 8;
996
+ return offset + 2;
997
+ }
998
+ /**
999
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
1000
+ * integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
1001
+ *
1002
+ * @param value Number to write.
1003
+ * @param offset Number of bytes to skip before starting to write.
1004
+ * @param noAssert
1005
+ * @returns `offset` plus the number of bytes written.
1006
+ */
1007
+ writeInt16BE(value, offset, noAssert) {
1008
+ value = +value;
1009
+ offset = offset >>> 0;
1010
+ if (!noAssert) {
1011
+ Buffer._checkInt(this, value, offset, 2, 32767, -32768);
1012
+ }
1013
+ this[offset] = value >>> 8;
1014
+ this[offset + 1] = value & 255;
1015
+ return offset + 2;
1016
+ }
1017
+ /**
1018
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
1019
+ * integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
1020
+ *
1021
+ * @param value Number to write.
1022
+ * @param offset Number of bytes to skip before starting to write.
1023
+ * @param noAssert
1024
+ * @returns `offset` plus the number of bytes written.
1025
+ */
1026
+ writeInt32LE(value, offset, noAssert) {
1027
+ value = +value;
1028
+ offset = offset >>> 0;
1029
+ if (!noAssert) {
1030
+ Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
1031
+ }
1032
+ this[offset] = value & 255;
1033
+ this[offset + 1] = value >>> 8;
1034
+ this[offset + 2] = value >>> 16;
1035
+ this[offset + 3] = value >>> 24;
1036
+ return offset + 4;
1037
+ }
1038
+ /**
1039
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
1040
+ * integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
1041
+ *
1042
+ * @param value Number to write.
1043
+ * @param offset Number of bytes to skip before starting to write.
1044
+ * @param noAssert
1045
+ * @returns `offset` plus the number of bytes written.
1046
+ */
1047
+ writeInt32BE(value, offset, noAssert) {
1048
+ value = +value;
1049
+ offset = offset >>> 0;
1050
+ if (!noAssert) {
1051
+ Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
1052
+ }
1053
+ if (value < 0) {
1054
+ value = 4294967295 + value + 1;
1055
+ }
1056
+ this[offset] = value >>> 24;
1057
+ this[offset + 1] = value >>> 16;
1058
+ this[offset + 2] = value >>> 8;
1059
+ this[offset + 3] = value & 255;
1060
+ return offset + 4;
1061
+ }
1062
+ /**
1063
+ * Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
1064
+ * filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
1065
+ * integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
1066
+ *
1067
+ * If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
1068
+ * character that fit into `buf` are written.
1069
+ *
1070
+ * If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
1071
+ *
1072
+ * @param value
1073
+ * @param encoding
1074
+ */
1075
+ fill(value, offset, end, encoding) {
1076
+ if (typeof value === "string") {
1077
+ if (typeof offset === "string") {
1078
+ encoding = offset;
1079
+ offset = 0;
1080
+ end = this.length;
1081
+ } else if (typeof end === "string") {
1082
+ encoding = end;
1083
+ end = this.length;
1084
+ }
1085
+ if (encoding !== void 0 && typeof encoding !== "string") {
1086
+ throw new TypeError("encoding must be a string");
1087
+ }
1088
+ if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
1089
+ throw new TypeError("Unknown encoding: " + encoding);
1090
+ }
1091
+ if (value.length === 1) {
1092
+ const code = value.charCodeAt(0);
1093
+ if (encoding === "utf8" && code < 128) {
1094
+ value = code;
1095
+ }
1096
+ }
1097
+ } else if (typeof value === "number") {
1098
+ value = value & 255;
1099
+ } else if (typeof value === "boolean") {
1100
+ value = Number(value);
1101
+ }
1102
+ offset ?? (offset = 0);
1103
+ end ?? (end = this.length);
1104
+ if (offset < 0 || this.length < offset || this.length < end) {
1105
+ throw new RangeError("Out of range index");
1106
+ }
1107
+ if (end <= offset) {
1108
+ return this;
1109
+ }
1110
+ offset = offset >>> 0;
1111
+ end = end === void 0 ? this.length : end >>> 0;
1112
+ value || (value = 0);
1113
+ let i;
1114
+ if (typeof value === "number") {
1115
+ for (i = offset; i < end; ++i) {
1116
+ this[i] = value;
1117
+ }
1118
+ } else {
1119
+ const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
1120
+ const len = bytes.length;
1121
+ if (len === 0) {
1122
+ throw new TypeError('The value "' + value + '" is invalid for argument "value"');
1123
+ }
1124
+ for (i = 0; i < end - offset; ++i) {
1125
+ this[i + offset] = bytes[i % len];
1126
+ }
1127
+ }
1128
+ return this;
1129
+ }
1130
+ /**
1131
+ * Returns the index of the specified value.
1132
+ *
1133
+ * If `value` is:
1134
+ * - a string, `value` is interpreted according to the character encoding in `encoding`.
1135
+ * - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
1136
+ * - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
1137
+ *
1138
+ * Any other types will throw a `TypeError`.
1139
+ *
1140
+ * @param value What to search for.
1141
+ * @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
1142
+ * @param encoding If `value` is a string, this is the encoding used to search.
1143
+ * @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
1144
+ */
1145
+ indexOf(value, byteOffset, encoding) {
1146
+ return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
1147
+ }
1148
+ /**
1149
+ * Gets the last index of the specified value.
1150
+ *
1151
+ * @see indexOf()
1152
+ * @param value
1153
+ * @param byteOffset
1154
+ * @param encoding
1155
+ */
1156
+ lastIndexOf(value, byteOffset, encoding) {
1157
+ return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
1158
+ }
1159
+ _bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
1160
+ if (buffer.length === 0) {
1161
+ return -1;
1162
+ }
1163
+ if (typeof byteOffset === "string") {
1164
+ encoding = byteOffset;
1165
+ byteOffset = 0;
1166
+ } else if (typeof byteOffset === "undefined") {
1167
+ byteOffset = 0;
1168
+ } else if (byteOffset > 2147483647) {
1169
+ byteOffset = 2147483647;
1170
+ } else if (byteOffset < -2147483648) {
1171
+ byteOffset = -2147483648;
1172
+ }
1173
+ byteOffset = +byteOffset;
1174
+ if (byteOffset !== byteOffset) {
1175
+ byteOffset = dir ? 0 : buffer.length - 1;
1176
+ }
1177
+ if (byteOffset < 0) {
1178
+ byteOffset = buffer.length + byteOffset;
1179
+ }
1180
+ if (byteOffset >= buffer.length) {
1181
+ if (dir) {
1182
+ return -1;
1183
+ } else {
1184
+ byteOffset = buffer.length - 1;
1185
+ }
1186
+ } else if (byteOffset < 0) {
1187
+ if (dir) {
1188
+ byteOffset = 0;
1189
+ } else {
1190
+ return -1;
1191
+ }
1192
+ }
1193
+ if (typeof val === "string") {
1194
+ val = Buffer.from(val, encoding);
1195
+ }
1196
+ if (Buffer.isBuffer(val)) {
1197
+ if (val.length === 0) {
1198
+ return -1;
1199
+ }
1200
+ return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
1201
+ } else if (typeof val === "number") {
1202
+ val = val & 255;
1203
+ if (typeof Uint8Array.prototype.indexOf === "function") {
1204
+ if (dir) {
1205
+ return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
1206
+ } else {
1207
+ return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
1208
+ }
1209
+ }
1210
+ return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
1211
+ }
1212
+ throw new TypeError("val must be string, number or Buffer");
1213
+ }
1214
+ /**
1215
+ * Equivalent to `buf.indexOf() !== -1`.
1216
+ *
1217
+ * @param value
1218
+ * @param byteOffset
1219
+ * @param encoding
1220
+ */
1221
+ includes(value, byteOffset, encoding) {
1222
+ return this.indexOf(value, byteOffset, encoding) !== -1;
1223
+ }
1224
+ /**
1225
+ * Creates a new buffer from the given parameters.
1226
+ *
1227
+ * @param data
1228
+ * @param encoding
1229
+ */
1230
+ static from(a, b, c) {
1231
+ return new Buffer(a, b, c);
1232
+ }
1233
+ /**
1234
+ * Returns true if `obj` is a Buffer.
1235
+ *
1236
+ * @param obj
1237
+ */
1238
+ static isBuffer(obj) {
1239
+ return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
1240
+ }
1241
+ /**
1242
+ * Returns true if `encoding` is a supported encoding.
1243
+ *
1244
+ * @param encoding
1245
+ */
1246
+ static isEncoding(encoding) {
1247
+ switch (encoding.toLowerCase()) {
1248
+ case "hex":
1249
+ case "utf8":
1250
+ case "ascii":
1251
+ case "binary":
1252
+ case "latin1":
1253
+ case "ucs2":
1254
+ case "utf16le":
1255
+ case "base64":
1256
+ return true;
1257
+ default:
1258
+ return false;
1259
+ }
1260
+ }
1261
+ /**
1262
+ * Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
1263
+ * returns the number of characters in the string.
1264
+ *
1265
+ * @param string The string to test.
1266
+ * @param encoding The encoding to use for calculation. Defaults is `utf8`.
1267
+ */
1268
+ static byteLength(string, encoding) {
1269
+ if (Buffer.isBuffer(string)) {
1270
+ return string.length;
1271
+ }
1272
+ if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
1273
+ return string.byteLength;
1274
+ }
1275
+ if (typeof string !== "string") {
1276
+ throw new TypeError(
1277
+ 'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
1278
+ );
1279
+ }
1280
+ const len = string.length;
1281
+ const mustMatch = arguments.length > 2 && arguments[2] === true;
1282
+ if (!mustMatch && len === 0) {
1283
+ return 0;
1284
+ }
1285
+ switch (encoding?.toLowerCase()) {
1286
+ case "ascii":
1287
+ case "latin1":
1288
+ case "binary":
1289
+ return len;
1290
+ case "utf8":
1291
+ return Buffer._utf8ToBytes(string).length;
1292
+ case "hex":
1293
+ return len >>> 1;
1294
+ case "ucs2":
1295
+ case "utf16le":
1296
+ return len * 2;
1297
+ case "base64":
1298
+ return Buffer._base64ToBytes(string).length;
1299
+ default:
1300
+ return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
1301
+ }
1302
+ }
1303
+ /**
1304
+ * Returns a Buffer which is the result of concatenating all the buffers in the list together.
1305
+ *
1306
+ * - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
1307
+ * - If the list has exactly one item, then the first item is returned.
1308
+ * - If the list has more than one item, then a new buffer is created.
1309
+ *
1310
+ * It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
1311
+ * a small computational expense.
1312
+ *
1313
+ * @param list An array of Buffer objects to concatenate.
1314
+ * @param totalLength Total length of the buffers when concatenated.
1315
+ */
1316
+ static concat(list, totalLength) {
1317
+ if (!Array.isArray(list)) {
1318
+ throw new TypeError('"list" argument must be an Array of Buffers');
1319
+ }
1320
+ if (list.length === 0) {
1321
+ return Buffer.alloc(0);
1322
+ }
1323
+ let i;
1324
+ if (totalLength === void 0) {
1325
+ totalLength = 0;
1326
+ for (i = 0; i < list.length; ++i) {
1327
+ totalLength += list[i].length;
1328
+ }
1329
+ }
1330
+ const buffer = Buffer.allocUnsafe(totalLength);
1331
+ let pos = 0;
1332
+ for (i = 0; i < list.length; ++i) {
1333
+ let buf = list[i];
1334
+ if (Buffer._isInstance(buf, Uint8Array)) {
1335
+ if (pos + buf.length > buffer.length) {
1336
+ if (!Buffer.isBuffer(buf)) {
1337
+ buf = Buffer.from(buf);
1338
+ }
1339
+ buf.copy(buffer, pos);
1340
+ } else {
1341
+ Uint8Array.prototype.set.call(buffer, buf, pos);
1342
+ }
1343
+ } else if (!Buffer.isBuffer(buf)) {
1344
+ throw new TypeError('"list" argument must be an Array of Buffers');
1345
+ } else {
1346
+ buf.copy(buffer, pos);
1347
+ }
1348
+ pos += buf.length;
1349
+ }
1350
+ return buffer;
1351
+ }
1352
+ /**
1353
+ * The same as `buf1.compare(buf2)`.
1354
+ */
1355
+ static compare(buf1, buf2) {
1356
+ if (Buffer._isInstance(buf1, Uint8Array)) {
1357
+ buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
1358
+ }
1359
+ if (Buffer._isInstance(buf2, Uint8Array)) {
1360
+ buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
1361
+ }
1362
+ if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
1363
+ throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
1364
+ }
1365
+ if (buf1 === buf2) {
1366
+ return 0;
1367
+ }
1368
+ let x = buf1.length;
1369
+ let y = buf2.length;
1370
+ for (let i = 0, len = Math.min(x, y); i < len; ++i) {
1371
+ if (buf1[i] !== buf2[i]) {
1372
+ x = buf1[i];
1373
+ y = buf2[i];
1374
+ break;
1375
+ }
1376
+ }
1377
+ if (x < y) {
1378
+ return -1;
1379
+ }
1380
+ if (y < x) {
1381
+ return 1;
1382
+ }
1383
+ return 0;
1384
+ }
1385
+ /**
1386
+ * Allocates a new buffer of `size` octets.
1387
+ *
1388
+ * @param size The number of octets to allocate.
1389
+ * @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
1390
+ * @param encoding The encoding used for the call to `buf.fill()` while initializing.
1391
+ */
1392
+ static alloc(size, fill, encoding) {
1393
+ if (typeof size !== "number") {
1394
+ throw new TypeError('"size" argument must be of type number');
1395
+ } else if (size < 0) {
1396
+ throw new RangeError('The value "' + size + '" is invalid for option "size"');
1397
+ }
1398
+ if (size <= 0) {
1399
+ return new Buffer(size);
1400
+ }
1401
+ if (fill !== void 0) {
1402
+ return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
1403
+ }
1404
+ return new Buffer(size);
1405
+ }
1406
+ /**
1407
+ * Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
1408
+ *
1409
+ * @param size
1410
+ */
1411
+ static allocUnsafe(size) {
1412
+ if (typeof size !== "number") {
1413
+ throw new TypeError('"size" argument must be of type number');
1414
+ } else if (size < 0) {
1415
+ throw new RangeError('The value "' + size + '" is invalid for option "size"');
1416
+ }
1417
+ return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
1418
+ }
1419
+ /**
1420
+ * Returns true if the given `obj` is an instance of `type`.
1421
+ *
1422
+ * @param obj
1423
+ * @param type
1424
+ */
1425
+ static _isInstance(obj, type) {
1426
+ return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
1427
+ }
1428
+ static _checked(length) {
1429
+ if (length >= K_MAX_LENGTH) {
1430
+ throw new RangeError(
1431
+ "Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
1432
+ );
1433
+ }
1434
+ return length | 0;
1435
+ }
1436
+ static _blitBuffer(src, dst, offset, length) {
1437
+ let i;
1438
+ for (i = 0; i < length; ++i) {
1439
+ if (i + offset >= dst.length || i >= src.length) {
1440
+ break;
1441
+ }
1442
+ dst[i + offset] = src[i];
1443
+ }
1444
+ return i;
1445
+ }
1446
+ static _utf8Write(buf, string, offset, length) {
1447
+ return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
1448
+ }
1449
+ static _asciiWrite(buf, string, offset, length) {
1450
+ return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
1451
+ }
1452
+ static _base64Write(buf, string, offset, length) {
1453
+ return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
1454
+ }
1455
+ static _ucs2Write(buf, string, offset, length) {
1456
+ return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
1457
+ }
1458
+ static _hexWrite(buf, string, offset, length) {
1459
+ offset = Number(offset) || 0;
1460
+ const remaining = buf.length - offset;
1461
+ if (!length) {
1462
+ length = remaining;
1463
+ } else {
1464
+ length = Number(length);
1465
+ if (length > remaining) {
1466
+ length = remaining;
1467
+ }
1468
+ }
1469
+ const strLen = string.length;
1470
+ if (length > strLen / 2) {
1471
+ length = strLen / 2;
1472
+ }
1473
+ let i;
1474
+ for (i = 0; i < length; ++i) {
1475
+ const parsed = parseInt(string.substr(i * 2, 2), 16);
1476
+ if (parsed !== parsed) {
1477
+ return i;
1478
+ }
1479
+ buf[offset + i] = parsed;
1480
+ }
1481
+ return i;
1482
+ }
1483
+ static _utf8ToBytes(string, units) {
1484
+ units = units || Infinity;
1485
+ const length = string.length;
1486
+ const bytes = [];
1487
+ let codePoint;
1488
+ let leadSurrogate = null;
1489
+ for (let i = 0; i < length; ++i) {
1490
+ codePoint = string.charCodeAt(i);
1491
+ if (codePoint > 55295 && codePoint < 57344) {
1492
+ if (!leadSurrogate) {
1493
+ if (codePoint > 56319) {
1494
+ if ((units -= 3) > -1) {
1495
+ bytes.push(239, 191, 189);
1496
+ }
1497
+ continue;
1498
+ } else if (i + 1 === length) {
1499
+ if ((units -= 3) > -1) {
1500
+ bytes.push(239, 191, 189);
1501
+ }
1502
+ continue;
1503
+ }
1504
+ leadSurrogate = codePoint;
1505
+ continue;
1506
+ }
1507
+ if (codePoint < 56320) {
1508
+ if ((units -= 3) > -1) {
1509
+ bytes.push(239, 191, 189);
1510
+ }
1511
+ leadSurrogate = codePoint;
1512
+ continue;
1513
+ }
1514
+ codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
1515
+ } else if (leadSurrogate) {
1516
+ if ((units -= 3) > -1) {
1517
+ bytes.push(239, 191, 189);
1518
+ }
1519
+ }
1520
+ leadSurrogate = null;
1521
+ if (codePoint < 128) {
1522
+ if ((units -= 1) < 0) {
1523
+ break;
1524
+ }
1525
+ bytes.push(codePoint);
1526
+ } else if (codePoint < 2048) {
1527
+ if ((units -= 2) < 0) {
1528
+ break;
1529
+ }
1530
+ bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
1531
+ } else if (codePoint < 65536) {
1532
+ if ((units -= 3) < 0) {
1533
+ break;
1534
+ }
1535
+ bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
1536
+ } else if (codePoint < 1114112) {
1537
+ if ((units -= 4) < 0) {
1538
+ break;
1539
+ }
1540
+ bytes.push(
1541
+ codePoint >> 18 | 240,
1542
+ codePoint >> 12 & 63 | 128,
1543
+ codePoint >> 6 & 63 | 128,
1544
+ codePoint & 63 | 128
1545
+ );
1546
+ } else {
1547
+ throw new Error("Invalid code point");
1548
+ }
1549
+ }
1550
+ return bytes;
1551
+ }
1552
+ static _base64ToBytes(str) {
1553
+ return toByteArray(base64clean(str));
1554
+ }
1555
+ static _asciiToBytes(str) {
1556
+ const byteArray = [];
1557
+ for (let i = 0; i < str.length; ++i) {
1558
+ byteArray.push(str.charCodeAt(i) & 255);
1559
+ }
1560
+ return byteArray;
1561
+ }
1562
+ static _utf16leToBytes(str, units) {
1563
+ let c, hi, lo;
1564
+ const byteArray = [];
1565
+ for (let i = 0; i < str.length; ++i) {
1566
+ if ((units -= 2) < 0) break;
1567
+ c = str.charCodeAt(i);
1568
+ hi = c >> 8;
1569
+ lo = c % 256;
1570
+ byteArray.push(lo);
1571
+ byteArray.push(hi);
1572
+ }
1573
+ return byteArray;
1574
+ }
1575
+ static _hexSlice(buf, start, end) {
1576
+ const len = buf.length;
1577
+ if (!start || start < 0) {
1578
+ start = 0;
1579
+ }
1580
+ if (!end || end < 0 || end > len) {
1581
+ end = len;
1582
+ }
1583
+ let out = "";
1584
+ for (let i = start; i < end; ++i) {
1585
+ out += hexSliceLookupTable[buf[i]];
1586
+ }
1587
+ return out;
1588
+ }
1589
+ static _base64Slice(buf, start, end) {
1590
+ if (start === 0 && end === buf.length) {
1591
+ return fromByteArray(buf);
1592
+ } else {
1593
+ return fromByteArray(buf.slice(start, end));
1594
+ }
1595
+ }
1596
+ static _utf8Slice(buf, start, end) {
1597
+ end = Math.min(buf.length, end);
1598
+ const res = [];
1599
+ let i = start;
1600
+ while (i < end) {
1601
+ const firstByte = buf[i];
1602
+ let codePoint = null;
1603
+ let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
1604
+ if (i + bytesPerSequence <= end) {
1605
+ let secondByte, thirdByte, fourthByte, tempCodePoint;
1606
+ switch (bytesPerSequence) {
1607
+ case 1:
1608
+ if (firstByte < 128) {
1609
+ codePoint = firstByte;
1610
+ }
1611
+ break;
1612
+ case 2:
1613
+ secondByte = buf[i + 1];
1614
+ if ((secondByte & 192) === 128) {
1615
+ tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
1616
+ if (tempCodePoint > 127) {
1617
+ codePoint = tempCodePoint;
1618
+ }
1619
+ }
1620
+ break;
1621
+ case 3:
1622
+ secondByte = buf[i + 1];
1623
+ thirdByte = buf[i + 2];
1624
+ if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
1625
+ tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
1626
+ if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
1627
+ codePoint = tempCodePoint;
1628
+ }
1629
+ }
1630
+ break;
1631
+ case 4:
1632
+ secondByte = buf[i + 1];
1633
+ thirdByte = buf[i + 2];
1634
+ fourthByte = buf[i + 3];
1635
+ if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
1636
+ tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
1637
+ if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
1638
+ codePoint = tempCodePoint;
1639
+ }
1640
+ }
1641
+ }
1642
+ }
1643
+ if (codePoint === null) {
1644
+ codePoint = 65533;
1645
+ bytesPerSequence = 1;
1646
+ } else if (codePoint > 65535) {
1647
+ codePoint -= 65536;
1648
+ res.push(codePoint >>> 10 & 1023 | 55296);
1649
+ codePoint = 56320 | codePoint & 1023;
1650
+ }
1651
+ res.push(codePoint);
1652
+ i += bytesPerSequence;
1653
+ }
1654
+ return Buffer._decodeCodePointsArray(res);
1655
+ }
1656
+ static _decodeCodePointsArray(codePoints) {
1657
+ const len = codePoints.length;
1658
+ if (len <= MAX_ARGUMENTS_LENGTH) {
1659
+ return String.fromCharCode.apply(String, codePoints);
1660
+ }
1661
+ let res = "";
1662
+ let i = 0;
1663
+ while (i < len) {
1664
+ res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
1665
+ }
1666
+ return res;
1667
+ }
1668
+ static _asciiSlice(buf, start, end) {
1669
+ let ret = "";
1670
+ end = Math.min(buf.length, end);
1671
+ for (let i = start; i < end; ++i) {
1672
+ ret += String.fromCharCode(buf[i] & 127);
1673
+ }
1674
+ return ret;
1675
+ }
1676
+ static _latin1Slice(buf, start, end) {
1677
+ let ret = "";
1678
+ end = Math.min(buf.length, end);
1679
+ for (let i = start; i < end; ++i) {
1680
+ ret += String.fromCharCode(buf[i]);
1681
+ }
1682
+ return ret;
1683
+ }
1684
+ static _utf16leSlice(buf, start, end) {
1685
+ const bytes = buf.slice(start, end);
1686
+ let res = "";
1687
+ for (let i = 0; i < bytes.length - 1; i += 2) {
1688
+ res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
1689
+ }
1690
+ return res;
1691
+ }
1692
+ static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
1693
+ let indexSize = 1;
1694
+ let arrLength = arr.length;
1695
+ let valLength = val.length;
1696
+ if (encoding !== void 0) {
1697
+ encoding = Buffer._getEncoding(encoding);
1698
+ if (encoding === "ucs2" || encoding === "utf16le") {
1699
+ if (arr.length < 2 || val.length < 2) {
1700
+ return -1;
1701
+ }
1702
+ indexSize = 2;
1703
+ arrLength /= 2;
1704
+ valLength /= 2;
1705
+ byteOffset /= 2;
1706
+ }
1707
+ }
1708
+ function read(buf, i2) {
1709
+ if (indexSize === 1) {
1710
+ return buf[i2];
1711
+ } else {
1712
+ return buf.readUInt16BE(i2 * indexSize);
1713
+ }
1714
+ }
1715
+ let i;
1716
+ if (dir) {
1717
+ let foundIndex = -1;
1718
+ for (i = byteOffset; i < arrLength; i++) {
1719
+ if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
1720
+ if (foundIndex === -1) foundIndex = i;
1721
+ if (i - foundIndex + 1 === valLength) return foundIndex * indexSize;
1722
+ } else {
1723
+ if (foundIndex !== -1) i -= i - foundIndex;
1724
+ foundIndex = -1;
1725
+ }
1726
+ }
1727
+ } else {
1728
+ if (byteOffset + valLength > arrLength) {
1729
+ byteOffset = arrLength - valLength;
1730
+ }
1731
+ for (i = byteOffset; i >= 0; i--) {
1732
+ let found = true;
1733
+ for (let j = 0; j < valLength; j++) {
1734
+ if (read(arr, i + j) !== read(val, j)) {
1735
+ found = false;
1736
+ break;
1737
+ }
1738
+ }
1739
+ if (found) {
1740
+ return i;
1741
+ }
1742
+ }
1743
+ }
1744
+ return -1;
1745
+ }
1746
+ static _checkOffset(offset, ext, length) {
1747
+ if (offset % 1 !== 0 || offset < 0) throw new RangeError("offset is not uint");
1748
+ if (offset + ext > length) throw new RangeError("Trying to access beyond buffer length");
1749
+ }
1750
+ static _checkInt(buf, value, offset, ext, max, min) {
1751
+ if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance');
1752
+ if (value > max || value < min) throw new RangeError('"value" argument is out of bounds');
1753
+ if (offset + ext > buf.length) throw new RangeError("Index out of range");
1754
+ }
1755
+ static _getEncoding(encoding) {
1756
+ let toLowerCase = false;
1757
+ let originalEncoding = "";
1758
+ for (; ; ) {
1759
+ switch (encoding) {
1760
+ case "hex":
1761
+ return "hex";
1762
+ case "utf8":
1763
+ return "utf8";
1764
+ case "ascii":
1765
+ return "ascii";
1766
+ case "binary":
1767
+ return "binary";
1768
+ case "latin1":
1769
+ return "latin1";
1770
+ case "ucs2":
1771
+ return "ucs2";
1772
+ case "utf16le":
1773
+ return "utf16le";
1774
+ case "base64":
1775
+ return "base64";
1776
+ default: {
1777
+ if (toLowerCase) {
1778
+ throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
1779
+ }
1780
+ toLowerCase = true;
1781
+ originalEncoding = encoding;
1782
+ encoding = encoding.toLowerCase();
1783
+ }
1784
+ }
1785
+ }
1786
+ }
1787
+ }
1788
+ const hexSliceLookupTable = function() {
1789
+ const alphabet = "0123456789abcdef";
1790
+ const table = new Array(256);
1791
+ for (let i = 0; i < 16; ++i) {
1792
+ const i16 = i * 16;
1793
+ for (let j = 0; j < 16; ++j) {
1794
+ table[i16 + j] = alphabet[i] + alphabet[j];
1795
+ }
1796
+ }
1797
+ return table;
1798
+ }();
1799
+ const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
1800
+ function base64clean(str) {
1801
+ str = str.split("=")[0];
1802
+ str = str.trim().replace(INVALID_BASE64_RE, "");
1803
+ if (str.length < 2) return "";
1804
+ while (str.length % 4 !== 0) {
1805
+ str = str + "=";
1806
+ }
1807
+ return str;
1808
+ }
1809
+
27
1810
  function notEmpty(value) {
28
1811
  return value !== null && value !== void 0;
29
1812
  }
@@ -227,8 +2010,7 @@ function buildPreviewBranchName({ org, branch }) {
227
2010
  function getPreviewBranch() {
228
2011
  try {
229
2012
  const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = getEnvironment();
230
- if (deployPreviewBranch)
231
- return deployPreviewBranch;
2013
+ if (deployPreviewBranch) return deployPreviewBranch;
232
2014
  switch (deployPreview) {
233
2015
  case "vercel": {
234
2016
  if (!vercelGitCommitRef || !vercelGitRepoOwner) {
@@ -244,29 +2026,15 @@ function getPreviewBranch() {
244
2026
  }
245
2027
  }
246
2028
 
247
- var __accessCheck$6 = (obj, member, msg) => {
248
- if (!member.has(obj))
249
- throw TypeError("Cannot " + msg);
250
- };
251
- var __privateGet$5 = (obj, member, getter) => {
252
- __accessCheck$6(obj, member, "read from private field");
253
- return getter ? getter.call(obj) : member.get(obj);
254
- };
255
- var __privateAdd$6 = (obj, member, value) => {
256
- if (member.has(obj))
257
- throw TypeError("Cannot add the same private member more than once");
258
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
2029
+ var __typeError$7 = (msg) => {
2030
+ throw TypeError(msg);
259
2031
  };
260
- var __privateSet$4 = (obj, member, value, setter) => {
261
- __accessCheck$6(obj, member, "write to private field");
262
- setter ? setter.call(obj, value) : member.set(obj, value);
263
- return value;
264
- };
265
- var __privateMethod$4 = (obj, member, method) => {
266
- __accessCheck$6(obj, member, "access private method");
267
- return method;
268
- };
269
- var _fetch, _queue, _concurrency, _enqueue, enqueue_fn;
2032
+ var __accessCheck$7 = (obj, member, msg) => member.has(obj) || __typeError$7("Cannot " + msg);
2033
+ var __privateGet$6 = (obj, member, getter) => (__accessCheck$7(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
2034
+ var __privateAdd$7 = (obj, member, value) => member.has(obj) ? __typeError$7("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
2035
+ var __privateSet$5 = (obj, member, value, setter) => (__accessCheck$7(obj, member, "write to private field"), member.set(obj, value), value);
2036
+ var __privateMethod$4 = (obj, member, method) => (__accessCheck$7(obj, member, "access private method"), method);
2037
+ var _fetch, _queue, _concurrency, _ApiRequestPool_instances, enqueue_fn;
270
2038
  const REQUEST_TIMEOUT = 5 * 60 * 1e3;
271
2039
  function getFetchImplementation(userFetch) {
272
2040
  const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
@@ -279,23 +2047,23 @@ function getFetchImplementation(userFetch) {
279
2047
  }
280
2048
  class ApiRequestPool {
281
2049
  constructor(concurrency = 10) {
282
- __privateAdd$6(this, _enqueue);
283
- __privateAdd$6(this, _fetch, void 0);
284
- __privateAdd$6(this, _queue, void 0);
285
- __privateAdd$6(this, _concurrency, void 0);
286
- __privateSet$4(this, _queue, []);
287
- __privateSet$4(this, _concurrency, concurrency);
2050
+ __privateAdd$7(this, _ApiRequestPool_instances);
2051
+ __privateAdd$7(this, _fetch);
2052
+ __privateAdd$7(this, _queue);
2053
+ __privateAdd$7(this, _concurrency);
2054
+ __privateSet$5(this, _queue, []);
2055
+ __privateSet$5(this, _concurrency, concurrency);
288
2056
  this.running = 0;
289
2057
  this.started = 0;
290
2058
  }
291
2059
  setFetch(fetch2) {
292
- __privateSet$4(this, _fetch, fetch2);
2060
+ __privateSet$5(this, _fetch, fetch2);
293
2061
  }
294
2062
  getFetch() {
295
- if (!__privateGet$5(this, _fetch)) {
2063
+ if (!__privateGet$6(this, _fetch)) {
296
2064
  throw new Error("Fetch not set");
297
2065
  }
298
- return __privateGet$5(this, _fetch);
2066
+ return __privateGet$6(this, _fetch);
299
2067
  }
300
2068
  request(url, options) {
301
2069
  const start = /* @__PURE__ */ new Date();
@@ -317,7 +2085,7 @@ class ApiRequestPool {
317
2085
  }
318
2086
  return response;
319
2087
  };
320
- return __privateMethod$4(this, _enqueue, enqueue_fn).call(this, async () => {
2088
+ return __privateMethod$4(this, _ApiRequestPool_instances, enqueue_fn).call(this, async () => {
321
2089
  return await runRequest();
322
2090
  });
323
2091
  }
@@ -325,21 +2093,21 @@ class ApiRequestPool {
325
2093
  _fetch = new WeakMap();
326
2094
  _queue = new WeakMap();
327
2095
  _concurrency = new WeakMap();
328
- _enqueue = new WeakSet();
2096
+ _ApiRequestPool_instances = new WeakSet();
329
2097
  enqueue_fn = function(task) {
330
- const promise = new Promise((resolve) => __privateGet$5(this, _queue).push(resolve)).finally(() => {
2098
+ const promise = new Promise((resolve) => __privateGet$6(this, _queue).push(resolve)).finally(() => {
331
2099
  this.started--;
332
2100
  this.running++;
333
2101
  }).then(() => task()).finally(() => {
334
2102
  this.running--;
335
- const next = __privateGet$5(this, _queue).shift();
2103
+ const next = __privateGet$6(this, _queue).shift();
336
2104
  if (next !== void 0) {
337
2105
  this.started++;
338
2106
  next();
339
2107
  }
340
2108
  });
341
- if (this.running + this.started < __privateGet$5(this, _concurrency)) {
342
- const next = __privateGet$5(this, _queue).shift();
2109
+ if (this.running + this.started < __privateGet$6(this, _concurrency)) {
2110
+ const next = __privateGet$6(this, _queue).shift();
343
2111
  if (next !== void 0) {
344
2112
  this.started++;
345
2113
  next();
@@ -528,7 +2296,7 @@ function defaultOnOpen(response) {
528
2296
  }
529
2297
  }
530
2298
 
531
- const VERSION = "0.29.2";
2299
+ const VERSION = "0.30.0";
532
2300
 
533
2301
  class ErrorWithCause extends Error {
534
2302
  constructor(message, options) {
@@ -608,35 +2376,30 @@ function parseProviderString(provider = "production") {
608
2376
  return provider;
609
2377
  }
610
2378
  const [main, workspaces] = provider.split(",");
611
- if (!main || !workspaces)
612
- return null;
2379
+ if (!main || !workspaces) return null;
613
2380
  return { main, workspaces };
614
2381
  }
615
2382
  function buildProviderString(provider) {
616
- if (isHostProviderAlias(provider))
617
- return provider;
2383
+ if (isHostProviderAlias(provider)) return provider;
618
2384
  return `${provider.main},${provider.workspaces}`;
619
2385
  }
620
2386
  function parseWorkspacesUrlParts(url) {
621
- if (!isString(url))
622
- return null;
2387
+ if (!isString(url)) return null;
623
2388
  const matches = {
624
- production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh.*/),
625
- staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev.*/),
626
- dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev.*/),
627
- local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(\d+)/)
2389
+ production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
2390
+ staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
2391
+ dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
2392
+ local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
628
2393
  };
629
2394
  const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
630
- if (!isHostProviderAlias(host) || !match)
631
- return null;
632
- return { workspace: match[1], region: match[2], host };
2395
+ if (!isHostProviderAlias(host) || !match) return null;
2396
+ return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
633
2397
  }
634
2398
 
635
2399
  const pool = new ApiRequestPool();
636
2400
  const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
637
2401
  const cleanQueryParams = Object.entries(queryParams).reduce((acc, [key, value]) => {
638
- if (value === void 0 || value === null)
639
- return acc;
2402
+ if (value === void 0 || value === null) return acc;
640
2403
  return { ...acc, [key]: value };
641
2404
  }, {});
642
2405
  const query = new URLSearchParams(cleanQueryParams).toString();
@@ -684,8 +2447,7 @@ function hostHeader(url) {
684
2447
  return groups?.host ? { Host: groups.host } : {};
685
2448
  }
686
2449
  async function parseBody(body, headers) {
687
- if (!isDefined(body))
688
- return void 0;
2450
+ if (!isDefined(body)) return void 0;
689
2451
  if (isBlob(body) || typeof body.text === "function") {
690
2452
  return body;
691
2453
  }
@@ -740,8 +2502,6 @@ async function fetch$1({
740
2502
  "X-Xata-Client-ID": clientID ?? defaultClientID,
741
2503
  "X-Xata-Session-ID": sessionID ?? generateUUID(),
742
2504
  "X-Xata-Agent": xataAgent,
743
- // Force field rename to xata_ internal properties
744
- "X-Features": compact(["feat-internal-field-rename-api=1", customHeaders?.["X-Features"]]).join(" "),
745
2505
  ...customHeaders,
746
2506
  ...hostHeader(fullUrl),
747
2507
  Authorization: `Bearer ${apiKey}`
@@ -764,8 +2524,7 @@ async function fetch$1({
764
2524
  [TraceAttributes.CLOUDFLARE_RAY_ID]: response.headers?.get("cf-ray") ?? void 0
765
2525
  });
766
2526
  const message = response.headers?.get("x-xata-message");
767
- if (message)
768
- console.warn(message);
2527
+ if (message) console.warn(message);
769
2528
  if (response.status === 204) {
770
2529
  return {};
771
2530
  }
@@ -849,16 +2608,108 @@ function parseUrl(url) {
849
2608
 
850
2609
  const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
851
2610
 
852
- const applyMigration = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/apply", method: "post", ...variables, signal });
2611
+ const getTasks = (variables, signal) => dataPlaneFetch({
2612
+ url: "/tasks",
2613
+ method: "get",
2614
+ ...variables,
2615
+ signal
2616
+ });
2617
+ const getTaskStatus = (variables, signal) => dataPlaneFetch({
2618
+ url: "/tasks/{taskId}",
2619
+ method: "get",
2620
+ ...variables,
2621
+ signal
2622
+ });
2623
+ const listClusterBranches = (variables, signal) => dataPlaneFetch({
2624
+ url: "/cluster/{clusterId}/branches",
2625
+ method: "get",
2626
+ ...variables,
2627
+ signal
2628
+ });
2629
+ const listClusterExtensions = (variables, signal) => dataPlaneFetch({
2630
+ url: "/cluster/{clusterId}/extensions",
2631
+ method: "get",
2632
+ ...variables,
2633
+ signal
2634
+ });
2635
+ const installClusterExtension = (variables, signal) => dataPlaneFetch({
2636
+ url: "/cluster/{clusterId}/extensions",
2637
+ method: "post",
2638
+ ...variables,
2639
+ signal
2640
+ });
2641
+ const dropClusterExtension = (variables, signal) => dataPlaneFetch({
2642
+ url: "/cluster/{clusterId}/extensions",
2643
+ method: "delete",
2644
+ ...variables,
2645
+ signal
2646
+ });
2647
+ const getClusterMetrics = (variables, signal) => dataPlaneFetch({
2648
+ url: "/cluster/{clusterId}/metrics",
2649
+ method: "get",
2650
+ ...variables,
2651
+ signal
2652
+ });
2653
+ const applyMigration = (variables, signal) => dataPlaneFetch({
2654
+ url: "/db/{dbBranchName}/migrations/apply",
2655
+ method: "post",
2656
+ ...variables,
2657
+ signal
2658
+ });
2659
+ const startMigration = (variables, signal) => dataPlaneFetch({
2660
+ url: "/db/{dbBranchName}/migrations/start",
2661
+ method: "post",
2662
+ ...variables,
2663
+ signal
2664
+ });
2665
+ const completeMigration = (variables, signal) => dataPlaneFetch({
2666
+ url: "/db/{dbBranchName}/migrations/complete",
2667
+ method: "post",
2668
+ ...variables,
2669
+ signal
2670
+ });
2671
+ const rollbackMigration = (variables, signal) => dataPlaneFetch({
2672
+ url: "/db/{dbBranchName}/migrations/rollback",
2673
+ method: "post",
2674
+ ...variables,
2675
+ signal
2676
+ });
853
2677
  const adaptTable = (variables, signal) => dataPlaneFetch({
854
2678
  url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
855
2679
  method: "post",
856
2680
  ...variables,
857
2681
  signal
858
2682
  });
859
- const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/status", method: "get", ...variables, signal });
860
- const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/jobs/{jobId}", method: "get", ...variables, signal });
861
- const getMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/history", method: "get", ...variables, signal });
2683
+ const adaptAllTables = (variables, signal) => dataPlaneFetch({
2684
+ url: "/db/{dbBranchName}/migrations/adapt",
2685
+ method: "post",
2686
+ ...variables,
2687
+ signal
2688
+ });
2689
+ const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
2690
+ url: "/db/{dbBranchName}/migrations/status",
2691
+ method: "get",
2692
+ ...variables,
2693
+ signal
2694
+ });
2695
+ const getMigrationJobs = (variables, signal) => dataPlaneFetch({
2696
+ url: "/db/{dbBranchName}/migrations/jobs",
2697
+ method: "get",
2698
+ ...variables,
2699
+ signal
2700
+ });
2701
+ const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({
2702
+ url: "/db/{dbBranchName}/migrations/jobs/{jobId}",
2703
+ method: "get",
2704
+ ...variables,
2705
+ signal
2706
+ });
2707
+ const getMigrationHistory = (variables, signal) => dataPlaneFetch({
2708
+ url: "/db/{dbBranchName}/migrations/history",
2709
+ method: "get",
2710
+ ...variables,
2711
+ signal
2712
+ });
862
2713
  const getBranchList = (variables, signal) => dataPlaneFetch({
863
2714
  url: "/dbs/{dbName}",
864
2715
  method: "get",
@@ -871,82 +2722,181 @@ const getDatabaseSettings = (variables, signal) => dataPlaneFetch({
871
2722
  ...variables,
872
2723
  signal
873
2724
  });
874
- const updateDatabaseSettings = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/settings", method: "patch", ...variables, signal });
875
- const getBranchDetails = (variables, signal) => dataPlaneFetch({
876
- url: "/db/{dbBranchName}",
2725
+ const updateDatabaseSettings = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/settings", method: "patch", ...variables, signal });
2726
+ const createBranchAsync = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/async", method: "put", ...variables, signal });
2727
+ const getBranchDetails = (variables, signal) => dataPlaneFetch({
2728
+ url: "/db/{dbBranchName}",
2729
+ method: "get",
2730
+ ...variables,
2731
+ signal
2732
+ });
2733
+ const createBranch = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}", method: "put", ...variables, signal });
2734
+ const deleteBranch = (variables, signal) => dataPlaneFetch({
2735
+ url: "/db/{dbBranchName}",
2736
+ method: "delete",
2737
+ ...variables,
2738
+ signal
2739
+ });
2740
+ const getSchema = (variables, signal) => dataPlaneFetch({
2741
+ url: "/db/{dbBranchName}/schema",
2742
+ method: "get",
2743
+ ...variables,
2744
+ signal
2745
+ });
2746
+ const getSchemas = (variables, signal) => dataPlaneFetch({
2747
+ url: "/db/{dbBranchName}/schemas",
2748
+ method: "get",
2749
+ ...variables,
2750
+ signal
2751
+ });
2752
+ const copyBranch = (variables, signal) => dataPlaneFetch({
2753
+ url: "/db/{dbBranchName}/copy",
2754
+ method: "post",
2755
+ ...variables,
2756
+ signal
2757
+ });
2758
+ const getBranchMoveStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/move", method: "get", ...variables, signal });
2759
+ const moveBranch = (variables, signal) => dataPlaneFetch({
2760
+ url: "/db/{dbBranchName}/move",
2761
+ method: "put",
2762
+ ...variables,
2763
+ signal
2764
+ });
2765
+ const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
2766
+ url: "/db/{dbBranchName}/metadata",
2767
+ method: "put",
2768
+ ...variables,
2769
+ signal
2770
+ });
2771
+ const getBranchMetadata = (variables, signal) => dataPlaneFetch({
2772
+ url: "/db/{dbBranchName}/metadata",
2773
+ method: "get",
2774
+ ...variables,
2775
+ signal
2776
+ });
2777
+ const getBranchStats = (variables, signal) => dataPlaneFetch({
2778
+ url: "/db/{dbBranchName}/stats",
2779
+ method: "get",
2780
+ ...variables,
2781
+ signal
2782
+ });
2783
+ const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
2784
+ const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
2785
+ const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({
2786
+ url: "/dbs/{dbName}/gitBranches",
2787
+ method: "delete",
2788
+ ...variables,
2789
+ signal
2790
+ });
2791
+ const resolveBranch = (variables, signal) => dataPlaneFetch({
2792
+ url: "/dbs/{dbName}/resolveBranch",
2793
+ method: "get",
2794
+ ...variables,
2795
+ signal
2796
+ });
2797
+ const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({
2798
+ url: "/db/{dbBranchName}/migrations",
2799
+ method: "get",
2800
+ ...variables,
2801
+ signal
2802
+ });
2803
+ const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
2804
+ url: "/db/{dbBranchName}/migrations/plan",
2805
+ method: "post",
2806
+ ...variables,
2807
+ signal
2808
+ });
2809
+ const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
2810
+ url: "/db/{dbBranchName}/migrations/execute",
2811
+ method: "post",
2812
+ ...variables,
2813
+ signal
2814
+ });
2815
+ const queryMigrationRequests = (variables, signal) => dataPlaneFetch({
2816
+ url: "/dbs/{dbName}/migrations/query",
2817
+ method: "post",
2818
+ ...variables,
2819
+ signal
2820
+ });
2821
+ const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
2822
+ const getMigrationRequest = (variables, signal) => dataPlaneFetch({
2823
+ url: "/dbs/{dbName}/migrations/{mrNumber}",
2824
+ method: "get",
2825
+ ...variables,
2826
+ signal
2827
+ });
2828
+ const updateMigrationRequest = (variables, signal) => dataPlaneFetch({
2829
+ url: "/dbs/{dbName}/migrations/{mrNumber}",
2830
+ method: "patch",
2831
+ ...variables,
2832
+ signal
2833
+ });
2834
+ const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({
2835
+ url: "/dbs/{dbName}/migrations/{mrNumber}/commits",
2836
+ method: "post",
2837
+ ...variables,
2838
+ signal
2839
+ });
2840
+ const compareMigrationRequest = (variables, signal) => dataPlaneFetch({
2841
+ url: "/dbs/{dbName}/migrations/{mrNumber}/compare",
2842
+ method: "post",
2843
+ ...variables,
2844
+ signal
2845
+ });
2846
+ const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({
2847
+ url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
877
2848
  method: "get",
878
2849
  ...variables,
879
2850
  signal
880
2851
  });
881
- const createBranch = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}", method: "put", ...variables, signal });
882
- const deleteBranch = (variables, signal) => dataPlaneFetch({
883
- url: "/db/{dbBranchName}",
884
- method: "delete",
2852
+ const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
2853
+ url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
2854
+ method: "post",
885
2855
  ...variables,
886
2856
  signal
887
2857
  });
888
- const getSchema = (variables, signal) => dataPlaneFetch({
889
- url: "/db/{dbBranchName}/schema",
890
- method: "get",
2858
+ const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({
2859
+ url: "/db/{dbBranchName}/schema/history",
2860
+ method: "post",
891
2861
  ...variables,
892
2862
  signal
893
2863
  });
894
- const copyBranch = (variables, signal) => dataPlaneFetch({
895
- url: "/db/{dbBranchName}/copy",
2864
+ const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({
2865
+ url: "/db/{dbBranchName}/schema/compare",
896
2866
  method: "post",
897
2867
  ...variables,
898
2868
  signal
899
2869
  });
900
- const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
901
- url: "/db/{dbBranchName}/metadata",
902
- method: "put",
2870
+ const compareBranchSchemas = (variables, signal) => dataPlaneFetch({
2871
+ url: "/db/{dbBranchName}/schema/compare/{branchName}",
2872
+ method: "post",
903
2873
  ...variables,
904
2874
  signal
905
2875
  });
906
- const getBranchMetadata = (variables, signal) => dataPlaneFetch({
907
- url: "/db/{dbBranchName}/metadata",
908
- method: "get",
2876
+ const updateBranchSchema = (variables, signal) => dataPlaneFetch({
2877
+ url: "/db/{dbBranchName}/schema/update",
2878
+ method: "post",
909
2879
  ...variables,
910
2880
  signal
911
2881
  });
912
- const getBranchStats = (variables, signal) => dataPlaneFetch({
913
- url: "/db/{dbBranchName}/stats",
914
- method: "get",
2882
+ const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
2883
+ url: "/db/{dbBranchName}/schema/preview",
2884
+ method: "post",
915
2885
  ...variables,
916
2886
  signal
917
2887
  });
918
- const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
919
- const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
920
- const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "delete", ...variables, signal });
921
- const resolveBranch = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/resolveBranch", method: "get", ...variables, signal });
922
- const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations", method: "get", ...variables, signal });
923
- const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/plan", method: "post", ...variables, signal });
924
- const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/execute", method: "post", ...variables, signal });
925
- const queryMigrationRequests = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/query", method: "post", ...variables, signal });
926
- const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
927
- const getMigrationRequest = (variables, signal) => dataPlaneFetch({
928
- url: "/dbs/{dbName}/migrations/{mrNumber}",
929
- method: "get",
2888
+ const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
2889
+ url: "/db/{dbBranchName}/schema/apply",
2890
+ method: "post",
930
2891
  ...variables,
931
2892
  signal
932
2893
  });
933
- const updateMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}", method: "patch", ...variables, signal });
934
- const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/commits", method: "post", ...variables, signal });
935
- const compareMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/compare", method: "post", ...variables, signal });
936
- const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/merge", method: "get", ...variables, signal });
937
- const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
938
- url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
2894
+ const pushBranchMigrations = (variables, signal) => dataPlaneFetch({
2895
+ url: "/db/{dbBranchName}/schema/push",
939
2896
  method: "post",
940
2897
  ...variables,
941
2898
  signal
942
2899
  });
943
- const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/history", method: "post", ...variables, signal });
944
- const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare", method: "post", ...variables, signal });
945
- const compareBranchSchemas = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare/{branchName}", method: "post", ...variables, signal });
946
- const updateBranchSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/update", method: "post", ...variables, signal });
947
- const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/preview", method: "post", ...variables, signal });
948
- const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/apply", method: "post", ...variables, signal });
949
- const pushBranchMigrations = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/push", method: "post", ...variables, signal });
950
2900
  const createTable = (variables, signal) => dataPlaneFetch({
951
2901
  url: "/db/{dbBranchName}/tables/{tableName}",
952
2902
  method: "put",
@@ -959,14 +2909,24 @@ const deleteTable = (variables, signal) => dataPlaneFetch({
959
2909
  ...variables,
960
2910
  signal
961
2911
  });
962
- const updateTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}", method: "patch", ...variables, signal });
2912
+ const updateTable = (variables, signal) => dataPlaneFetch({
2913
+ url: "/db/{dbBranchName}/tables/{tableName}",
2914
+ method: "patch",
2915
+ ...variables,
2916
+ signal
2917
+ });
963
2918
  const getTableSchema = (variables, signal) => dataPlaneFetch({
964
2919
  url: "/db/{dbBranchName}/tables/{tableName}/schema",
965
2920
  method: "get",
966
2921
  ...variables,
967
2922
  signal
968
2923
  });
969
- const setTableSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/schema", method: "put", ...variables, signal });
2924
+ const setTableSchema = (variables, signal) => dataPlaneFetch({
2925
+ url: "/db/{dbBranchName}/tables/{tableName}/schema",
2926
+ method: "put",
2927
+ ...variables,
2928
+ signal
2929
+ });
970
2930
  const getTableColumns = (variables, signal) => dataPlaneFetch({
971
2931
  url: "/db/{dbBranchName}/tables/{tableName}/columns",
972
2932
  method: "get",
@@ -974,7 +2934,12 @@ const getTableColumns = (variables, signal) => dataPlaneFetch({
974
2934
  signal
975
2935
  });
976
2936
  const addTableColumn = (variables, signal) => dataPlaneFetch(
977
- { url: "/db/{dbBranchName}/tables/{tableName}/columns", method: "post", ...variables, signal }
2937
+ {
2938
+ url: "/db/{dbBranchName}/tables/{tableName}/columns",
2939
+ method: "post",
2940
+ ...variables,
2941
+ signal
2942
+ }
978
2943
  );
979
2944
  const getColumn = (variables, signal) => dataPlaneFetch({
980
2945
  url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
@@ -982,15 +2947,30 @@ const getColumn = (variables, signal) => dataPlaneFetch({
982
2947
  ...variables,
983
2948
  signal
984
2949
  });
985
- const updateColumn = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}", method: "patch", ...variables, signal });
2950
+ const updateColumn = (variables, signal) => dataPlaneFetch({
2951
+ url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
2952
+ method: "patch",
2953
+ ...variables,
2954
+ signal
2955
+ });
986
2956
  const deleteColumn = (variables, signal) => dataPlaneFetch({
987
2957
  url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
988
2958
  method: "delete",
989
2959
  ...variables,
990
2960
  signal
991
2961
  });
992
- const branchTransaction = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/transaction", method: "post", ...variables, signal });
993
- const insertRecord = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data", method: "post", ...variables, signal });
2962
+ const branchTransaction = (variables, signal) => dataPlaneFetch({
2963
+ url: "/db/{dbBranchName}/transaction",
2964
+ method: "post",
2965
+ ...variables,
2966
+ signal
2967
+ });
2968
+ const insertRecord = (variables, signal) => dataPlaneFetch({
2969
+ url: "/db/{dbBranchName}/tables/{tableName}/data",
2970
+ method: "post",
2971
+ ...variables,
2972
+ signal
2973
+ });
994
2974
  const getFileItem = (variables, signal) => dataPlaneFetch({
995
2975
  url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
996
2976
  method: "get",
@@ -1033,11 +3013,36 @@ const getRecord = (variables, signal) => dataPlaneFetch({
1033
3013
  ...variables,
1034
3014
  signal
1035
3015
  });
1036
- const insertRecordWithID = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}", method: "put", ...variables, signal });
1037
- const updateRecordWithID = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}", method: "patch", ...variables, signal });
1038
- const upsertRecordWithID = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}", method: "post", ...variables, signal });
1039
- const deleteRecord = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}", method: "delete", ...variables, signal });
1040
- const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/bulk", method: "post", ...variables, signal });
3016
+ const insertRecordWithID = (variables, signal) => dataPlaneFetch({
3017
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
3018
+ method: "put",
3019
+ ...variables,
3020
+ signal
3021
+ });
3022
+ const updateRecordWithID = (variables, signal) => dataPlaneFetch({
3023
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
3024
+ method: "patch",
3025
+ ...variables,
3026
+ signal
3027
+ });
3028
+ const upsertRecordWithID = (variables, signal) => dataPlaneFetch({
3029
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
3030
+ method: "post",
3031
+ ...variables,
3032
+ signal
3033
+ });
3034
+ const deleteRecord = (variables, signal) => dataPlaneFetch({
3035
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
3036
+ method: "delete",
3037
+ ...variables,
3038
+ signal
3039
+ });
3040
+ const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({
3041
+ url: "/db/{dbBranchName}/tables/{tableName}/bulk",
3042
+ method: "post",
3043
+ ...variables,
3044
+ signal
3045
+ });
1041
3046
  const queryTable = (variables, signal) => dataPlaneFetch({
1042
3047
  url: "/db/{dbBranchName}/tables/{tableName}/query",
1043
3048
  method: "post",
@@ -1056,16 +3061,36 @@ const searchTable = (variables, signal) => dataPlaneFetch({
1056
3061
  ...variables,
1057
3062
  signal
1058
3063
  });
1059
- const vectorSearchTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch", method: "post", ...variables, signal });
3064
+ const vectorSearchTable = (variables, signal) => dataPlaneFetch({
3065
+ url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch",
3066
+ method: "post",
3067
+ ...variables,
3068
+ signal
3069
+ });
1060
3070
  const askTable = (variables, signal) => dataPlaneFetch({
1061
3071
  url: "/db/{dbBranchName}/tables/{tableName}/ask",
1062
3072
  method: "post",
1063
3073
  ...variables,
1064
3074
  signal
1065
3075
  });
1066
- const askTableSession = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}", method: "post", ...variables, signal });
1067
- const summarizeTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/summarize", method: "post", ...variables, signal });
1068
- const aggregateTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/aggregate", method: "post", ...variables, signal });
3076
+ const askTableSession = (variables, signal) => dataPlaneFetch({
3077
+ url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}",
3078
+ method: "post",
3079
+ ...variables,
3080
+ signal
3081
+ });
3082
+ const summarizeTable = (variables, signal) => dataPlaneFetch({
3083
+ url: "/db/{dbBranchName}/tables/{tableName}/summarize",
3084
+ method: "post",
3085
+ ...variables,
3086
+ signal
3087
+ });
3088
+ const aggregateTable = (variables, signal) => dataPlaneFetch({
3089
+ url: "/db/{dbBranchName}/tables/{tableName}/aggregate",
3090
+ method: "post",
3091
+ ...variables,
3092
+ signal
3093
+ });
1069
3094
  const fileAccess = (variables, signal) => dataPlaneFetch({
1070
3095
  url: "/file/{fileId}",
1071
3096
  method: "get",
@@ -1084,14 +3109,34 @@ const sqlQuery = (variables, signal) => dataPlaneFetch({
1084
3109
  ...variables,
1085
3110
  signal
1086
3111
  });
3112
+ const sqlBatchQuery = (variables, signal) => dataPlaneFetch({
3113
+ url: "/db/{dbBranchName}/sql/batch",
3114
+ method: "post",
3115
+ ...variables,
3116
+ signal
3117
+ });
1087
3118
  const operationsByTag$2 = {
3119
+ tasks: { getTasks, getTaskStatus },
3120
+ cluster: {
3121
+ listClusterBranches,
3122
+ listClusterExtensions,
3123
+ installClusterExtension,
3124
+ dropClusterExtension,
3125
+ getClusterMetrics
3126
+ },
1088
3127
  migrations: {
1089
3128
  applyMigration,
3129
+ startMigration,
3130
+ completeMigration,
3131
+ rollbackMigration,
1090
3132
  adaptTable,
3133
+ adaptAllTables,
1091
3134
  getBranchMigrationJobStatus,
3135
+ getMigrationJobs,
1092
3136
  getMigrationJobStatus,
1093
3137
  getMigrationHistory,
1094
3138
  getSchema,
3139
+ getSchemas,
1095
3140
  getBranchMigrationHistory,
1096
3141
  getBranchMigrationPlan,
1097
3142
  executeBranchMigrationPlan,
@@ -1105,10 +3150,13 @@ const operationsByTag$2 = {
1105
3150
  },
1106
3151
  branch: {
1107
3152
  getBranchList,
3153
+ createBranchAsync,
1108
3154
  getBranchDetails,
1109
3155
  createBranch,
1110
3156
  deleteBranch,
1111
3157
  copyBranch,
3158
+ getBranchMoveStatus,
3159
+ moveBranch,
1112
3160
  updateBranchMetadata,
1113
3161
  getBranchMetadata,
1114
3162
  getBranchStats,
@@ -1150,7 +3198,16 @@ const operationsByTag$2 = {
1150
3198
  deleteRecord,
1151
3199
  bulkInsertTableRecords
1152
3200
  },
1153
- files: { getFileItem, putFileItem, deleteFileItem, getFile, putFile, deleteFile, fileAccess, fileUpload },
3201
+ files: {
3202
+ getFileItem,
3203
+ putFileItem,
3204
+ deleteFileItem,
3205
+ getFile,
3206
+ putFile,
3207
+ deleteFile,
3208
+ fileAccess,
3209
+ fileUpload
3210
+ },
1154
3211
  searchAndFilter: {
1155
3212
  queryTable,
1156
3213
  searchBranch,
@@ -1161,7 +3218,7 @@ const operationsByTag$2 = {
1161
3218
  summarizeTable,
1162
3219
  aggregateTable
1163
3220
  },
1164
- sql: { sqlQuery }
3221
+ sql: { sqlQuery, sqlBatchQuery }
1165
3222
  };
1166
3223
 
1167
3224
  const controlPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "controlPlane" });
@@ -1228,7 +3285,12 @@ const deleteOAuthAccessToken = (variables, signal) => controlPlaneFetch({
1228
3285
  ...variables,
1229
3286
  signal
1230
3287
  });
1231
- const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({ url: "/user/oauth/tokens/{token}", method: "patch", ...variables, signal });
3288
+ const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
3289
+ url: "/user/oauth/tokens/{token}",
3290
+ method: "patch",
3291
+ ...variables,
3292
+ signal
3293
+ });
1232
3294
  const getWorkspacesList = (variables, signal) => controlPlaneFetch({
1233
3295
  url: "/workspaces",
1234
3296
  method: "get",
@@ -1259,47 +3321,150 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
1259
3321
  ...variables,
1260
3322
  signal
1261
3323
  });
1262
- const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/members", method: "get", ...variables, signal });
1263
- const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/members/{userId}", method: "put", ...variables, signal });
3324
+ const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
3325
+ url: "/workspaces/{workspaceId}/settings",
3326
+ method: "get",
3327
+ ...variables,
3328
+ signal
3329
+ });
3330
+ const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({
3331
+ url: "/workspaces/{workspaceId}/settings",
3332
+ method: "patch",
3333
+ ...variables,
3334
+ signal
3335
+ });
3336
+ const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({
3337
+ url: "/workspaces/{workspaceId}/members",
3338
+ method: "get",
3339
+ ...variables,
3340
+ signal
3341
+ });
3342
+ const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({
3343
+ url: "/workspaces/{workspaceId}/members/{userId}",
3344
+ method: "put",
3345
+ ...variables,
3346
+ signal
3347
+ });
1264
3348
  const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
1265
3349
  url: "/workspaces/{workspaceId}/members/{userId}",
1266
3350
  method: "delete",
1267
3351
  ...variables,
1268
3352
  signal
1269
3353
  });
1270
- const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites", method: "post", ...variables, signal });
1271
- const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteId}", method: "patch", ...variables, signal });
1272
- const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteId}", method: "delete", ...variables, signal });
1273
- const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept", method: "post", ...variables, signal });
1274
- const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteId}/resend", method: "post", ...variables, signal });
1275
- const listClusters = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/clusters", method: "get", ...variables, signal });
1276
- const createCluster = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/clusters", method: "post", ...variables, signal });
3354
+ const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
3355
+ url: "/workspaces/{workspaceId}/invites",
3356
+ method: "post",
3357
+ ...variables,
3358
+ signal
3359
+ });
3360
+ const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
3361
+ url: "/workspaces/{workspaceId}/invites/{inviteId}",
3362
+ method: "patch",
3363
+ ...variables,
3364
+ signal
3365
+ });
3366
+ const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
3367
+ url: "/workspaces/{workspaceId}/invites/{inviteId}",
3368
+ method: "delete",
3369
+ ...variables,
3370
+ signal
3371
+ });
3372
+ const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
3373
+ url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept",
3374
+ method: "post",
3375
+ ...variables,
3376
+ signal
3377
+ });
3378
+ const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
3379
+ url: "/workspaces/{workspaceId}/invites/{inviteId}/resend",
3380
+ method: "post",
3381
+ ...variables,
3382
+ signal
3383
+ });
3384
+ const listClusters = (variables, signal) => controlPlaneFetch({
3385
+ url: "/workspaces/{workspaceId}/clusters",
3386
+ method: "get",
3387
+ ...variables,
3388
+ signal
3389
+ });
3390
+ const createCluster = (variables, signal) => controlPlaneFetch({
3391
+ url: "/workspaces/{workspaceId}/clusters",
3392
+ method: "post",
3393
+ ...variables,
3394
+ signal
3395
+ });
1277
3396
  const getCluster = (variables, signal) => controlPlaneFetch({
1278
3397
  url: "/workspaces/{workspaceId}/clusters/{clusterId}",
1279
3398
  method: "get",
1280
3399
  ...variables,
1281
3400
  signal
1282
3401
  });
1283
- const updateCluster = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/clusters/{clusterId}", method: "patch", ...variables, signal });
3402
+ const updateCluster = (variables, signal) => controlPlaneFetch({
3403
+ url: "/workspaces/{workspaceId}/clusters/{clusterId}",
3404
+ method: "patch",
3405
+ ...variables,
3406
+ signal
3407
+ });
3408
+ const deleteCluster = (variables, signal) => controlPlaneFetch({
3409
+ url: "/workspaces/{workspaceId}/clusters/{clusterId}",
3410
+ method: "delete",
3411
+ ...variables,
3412
+ signal
3413
+ });
1284
3414
  const getDatabaseList = (variables, signal) => controlPlaneFetch({
1285
3415
  url: "/workspaces/{workspaceId}/dbs",
1286
3416
  method: "get",
1287
3417
  ...variables,
1288
3418
  signal
1289
3419
  });
1290
- const createDatabase = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}", method: "put", ...variables, signal });
3420
+ const createDatabase = (variables, signal) => controlPlaneFetch({
3421
+ url: "/workspaces/{workspaceId}/dbs/{dbName}",
3422
+ method: "put",
3423
+ ...variables,
3424
+ signal
3425
+ });
1291
3426
  const deleteDatabase = (variables, signal) => controlPlaneFetch({
1292
3427
  url: "/workspaces/{workspaceId}/dbs/{dbName}",
1293
3428
  method: "delete",
1294
3429
  ...variables,
1295
3430
  signal
1296
3431
  });
1297
- const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}", method: "get", ...variables, signal });
1298
- const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}", method: "patch", ...variables, signal });
1299
- const renameDatabase = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/rename", method: "post", ...variables, signal });
1300
- const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "get", ...variables, signal });
1301
- const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "put", ...variables, signal });
1302
- const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "delete", ...variables, signal });
3432
+ const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
3433
+ url: "/workspaces/{workspaceId}/dbs/{dbName}",
3434
+ method: "get",
3435
+ ...variables,
3436
+ signal
3437
+ });
3438
+ const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({
3439
+ url: "/workspaces/{workspaceId}/dbs/{dbName}",
3440
+ method: "patch",
3441
+ ...variables,
3442
+ signal
3443
+ });
3444
+ const renameDatabase = (variables, signal) => controlPlaneFetch({
3445
+ url: "/workspaces/{workspaceId}/dbs/{dbName}/rename",
3446
+ method: "post",
3447
+ ...variables,
3448
+ signal
3449
+ });
3450
+ const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
3451
+ url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
3452
+ method: "get",
3453
+ ...variables,
3454
+ signal
3455
+ });
3456
+ const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
3457
+ url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
3458
+ method: "put",
3459
+ ...variables,
3460
+ signal
3461
+ });
3462
+ const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
3463
+ url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
3464
+ method: "delete",
3465
+ ...variables,
3466
+ signal
3467
+ });
1303
3468
  const listRegions = (variables, signal) => controlPlaneFetch({
1304
3469
  url: "/workspaces/{workspaceId}/regions",
1305
3470
  method: "get",
@@ -1324,6 +3489,8 @@ const operationsByTag$1 = {
1324
3489
  getWorkspace,
1325
3490
  updateWorkspace,
1326
3491
  deleteWorkspace,
3492
+ getWorkspaceSettings,
3493
+ updateWorkspaceSettings,
1327
3494
  getWorkspaceMembersList,
1328
3495
  updateWorkspaceMemberRole,
1329
3496
  removeWorkspaceMember
@@ -1335,7 +3502,13 @@ const operationsByTag$1 = {
1335
3502
  acceptWorkspaceMemberInvite,
1336
3503
  resendWorkspaceMemberInvite
1337
3504
  },
1338
- xbcontrolOther: { listClusters, createCluster, getCluster, updateCluster },
3505
+ xbcontrolOther: {
3506
+ listClusters,
3507
+ createCluster,
3508
+ getCluster,
3509
+ updateCluster,
3510
+ deleteCluster
3511
+ },
1339
3512
  databases: {
1340
3513
  getDatabaseList,
1341
3514
  createDatabase,
@@ -1422,8 +3595,7 @@ function buildTransformString(transformations) {
1422
3595
  ).join(",");
1423
3596
  }
1424
3597
  function transformImage(url, ...transformations) {
1425
- if (!isDefined(url))
1426
- return void 0;
3598
+ if (!isDefined(url)) return void 0;
1427
3599
  const newTransformations = buildTransformString(transformations);
1428
3600
  const { hostname, pathname, search } = new URL(url);
1429
3601
  const pathParts = pathname.split("/");
@@ -1536,8 +3708,7 @@ class XataFile {
1536
3708
  }
1537
3709
  }
1538
3710
  const parseInputFileEntry = async (entry) => {
1539
- if (!isDefined(entry))
1540
- return null;
3711
+ if (!isDefined(entry)) return null;
1541
3712
  const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout, uploadUrlTimeout } = await entry;
1542
3713
  return compactObject({
1543
3714
  id,
@@ -1552,24 +3723,19 @@ const parseInputFileEntry = async (entry) => {
1552
3723
  };
1553
3724
 
1554
3725
  function cleanFilter(filter) {
1555
- if (!isDefined(filter))
1556
- return void 0;
1557
- if (!isObject(filter))
1558
- return filter;
3726
+ if (!isDefined(filter)) return void 0;
3727
+ if (!isObject(filter)) return filter;
1559
3728
  const values = Object.fromEntries(
1560
3729
  Object.entries(filter).reduce((acc, [key, value]) => {
1561
- if (!isDefined(value))
1562
- return acc;
3730
+ if (!isDefined(value)) return acc;
1563
3731
  if (Array.isArray(value)) {
1564
3732
  const clean = value.map((item) => cleanFilter(item)).filter((item) => isDefined(item));
1565
- if (clean.length === 0)
1566
- return acc;
3733
+ if (clean.length === 0) return acc;
1567
3734
  return [...acc, [key, clean]];
1568
3735
  }
1569
3736
  if (isObject(value)) {
1570
3737
  const clean = cleanFilter(value);
1571
- if (!isDefined(clean))
1572
- return acc;
3738
+ if (!isDefined(clean)) return acc;
1573
3739
  return [...acc, [key, clean]];
1574
3740
  }
1575
3741
  return [...acc, [key, value]];
@@ -1579,10 +3745,8 @@ function cleanFilter(filter) {
1579
3745
  }
1580
3746
 
1581
3747
  function stringifyJson(value) {
1582
- if (!isDefined(value))
1583
- return value;
1584
- if (isString(value))
1585
- return value;
3748
+ if (!isDefined(value)) return value;
3749
+ if (isString(value)) return value;
1586
3750
  try {
1587
3751
  return JSON.stringify(value);
1588
3752
  } catch (e) {
@@ -1597,29 +3761,18 @@ function parseJson(value) {
1597
3761
  }
1598
3762
  }
1599
3763
 
1600
- var __accessCheck$5 = (obj, member, msg) => {
1601
- if (!member.has(obj))
1602
- throw TypeError("Cannot " + msg);
1603
- };
1604
- var __privateGet$4 = (obj, member, getter) => {
1605
- __accessCheck$5(obj, member, "read from private field");
1606
- return getter ? getter.call(obj) : member.get(obj);
1607
- };
1608
- var __privateAdd$5 = (obj, member, value) => {
1609
- if (member.has(obj))
1610
- throw TypeError("Cannot add the same private member more than once");
1611
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
1612
- };
1613
- var __privateSet$3 = (obj, member, value, setter) => {
1614
- __accessCheck$5(obj, member, "write to private field");
1615
- setter ? setter.call(obj, value) : member.set(obj, value);
1616
- return value;
3764
+ var __typeError$6 = (msg) => {
3765
+ throw TypeError(msg);
1617
3766
  };
3767
+ var __accessCheck$6 = (obj, member, msg) => member.has(obj) || __typeError$6("Cannot " + msg);
3768
+ var __privateGet$5 = (obj, member, getter) => (__accessCheck$6(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
3769
+ var __privateAdd$6 = (obj, member, value) => member.has(obj) ? __typeError$6("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
3770
+ var __privateSet$4 = (obj, member, value, setter) => (__accessCheck$6(obj, member, "write to private field"), member.set(obj, value), value);
1618
3771
  var _query, _page;
1619
3772
  class Page {
1620
3773
  constructor(query, meta, records = []) {
1621
- __privateAdd$5(this, _query, void 0);
1622
- __privateSet$3(this, _query, query);
3774
+ __privateAdd$6(this, _query);
3775
+ __privateSet$4(this, _query, query);
1623
3776
  this.meta = meta;
1624
3777
  this.records = new PageRecordArray(this, records);
1625
3778
  }
@@ -1630,7 +3783,7 @@ class Page {
1630
3783
  * @returns The next page or results.
1631
3784
  */
1632
3785
  async nextPage(size, offset) {
1633
- return __privateGet$4(this, _query).getPaginated({ pagination: { size, offset, after: this.meta.page.cursor } });
3786
+ return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, after: this.meta.page.cursor } });
1634
3787
  }
1635
3788
  /**
1636
3789
  * Retrieves the previous page of results.
@@ -1639,7 +3792,7 @@ class Page {
1639
3792
  * @returns The previous page or results.
1640
3793
  */
1641
3794
  async previousPage(size, offset) {
1642
- return __privateGet$4(this, _query).getPaginated({ pagination: { size, offset, before: this.meta.page.cursor } });
3795
+ return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, before: this.meta.page.cursor } });
1643
3796
  }
1644
3797
  /**
1645
3798
  * Retrieves the start page of results.
@@ -1648,7 +3801,7 @@ class Page {
1648
3801
  * @returns The start page or results.
1649
3802
  */
1650
3803
  async startPage(size, offset) {
1651
- return __privateGet$4(this, _query).getPaginated({ pagination: { size, offset, start: this.meta.page.cursor } });
3804
+ return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, start: this.meta.page.cursor } });
1652
3805
  }
1653
3806
  /**
1654
3807
  * Retrieves the end page of results.
@@ -1657,7 +3810,7 @@ class Page {
1657
3810
  * @returns The end page or results.
1658
3811
  */
1659
3812
  async endPage(size, offset) {
1660
- return __privateGet$4(this, _query).getPaginated({ pagination: { size, offset, end: this.meta.page.cursor } });
3813
+ return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, end: this.meta.page.cursor } });
1661
3814
  }
1662
3815
  /**
1663
3816
  * Shortcut method to check if there will be additional results if the next page of results is retrieved.
@@ -1705,8 +3858,8 @@ class RecordArray extends Array {
1705
3858
  const _PageRecordArray = class _PageRecordArray extends Array {
1706
3859
  constructor(...args) {
1707
3860
  super(..._PageRecordArray.parseConstructorParams(...args));
1708
- __privateAdd$5(this, _page, void 0);
1709
- __privateSet$3(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
3861
+ __privateAdd$6(this, _page);
3862
+ __privateSet$4(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
1710
3863
  }
1711
3864
  static parseConstructorParams(...args) {
1712
3865
  if (args.length === 1 && typeof args[0] === "number") {
@@ -1736,7 +3889,7 @@ const _PageRecordArray = class _PageRecordArray extends Array {
1736
3889
  * @returns A new array of objects
1737
3890
  */
1738
3891
  async nextPage(size, offset) {
1739
- const newPage = await __privateGet$4(this, _page).nextPage(size, offset);
3892
+ const newPage = await __privateGet$5(this, _page).nextPage(size, offset);
1740
3893
  return new _PageRecordArray(newPage);
1741
3894
  }
1742
3895
  /**
@@ -1745,7 +3898,7 @@ const _PageRecordArray = class _PageRecordArray extends Array {
1745
3898
  * @returns A new array of objects
1746
3899
  */
1747
3900
  async previousPage(size, offset) {
1748
- const newPage = await __privateGet$4(this, _page).previousPage(size, offset);
3901
+ const newPage = await __privateGet$5(this, _page).previousPage(size, offset);
1749
3902
  return new _PageRecordArray(newPage);
1750
3903
  }
1751
3904
  /**
@@ -1754,7 +3907,7 @@ const _PageRecordArray = class _PageRecordArray extends Array {
1754
3907
  * @returns A new array of objects
1755
3908
  */
1756
3909
  async startPage(size, offset) {
1757
- const newPage = await __privateGet$4(this, _page).startPage(size, offset);
3910
+ const newPage = await __privateGet$5(this, _page).startPage(size, offset);
1758
3911
  return new _PageRecordArray(newPage);
1759
3912
  }
1760
3913
  /**
@@ -1763,68 +3916,55 @@ const _PageRecordArray = class _PageRecordArray extends Array {
1763
3916
  * @returns A new array of objects
1764
3917
  */
1765
3918
  async endPage(size, offset) {
1766
- const newPage = await __privateGet$4(this, _page).endPage(size, offset);
3919
+ const newPage = await __privateGet$5(this, _page).endPage(size, offset);
1767
3920
  return new _PageRecordArray(newPage);
1768
3921
  }
1769
3922
  /**
1770
3923
  * @returns Boolean indicating if there is a next page
1771
3924
  */
1772
3925
  hasNextPage() {
1773
- return __privateGet$4(this, _page).meta.page.more;
3926
+ return __privateGet$5(this, _page).meta.page.more;
1774
3927
  }
1775
3928
  };
1776
3929
  _page = new WeakMap();
1777
3930
  let PageRecordArray = _PageRecordArray;
1778
3931
 
1779
- var __accessCheck$4 = (obj, member, msg) => {
1780
- if (!member.has(obj))
1781
- throw TypeError("Cannot " + msg);
1782
- };
1783
- var __privateGet$3 = (obj, member, getter) => {
1784
- __accessCheck$4(obj, member, "read from private field");
1785
- return getter ? getter.call(obj) : member.get(obj);
1786
- };
1787
- var __privateAdd$4 = (obj, member, value) => {
1788
- if (member.has(obj))
1789
- throw TypeError("Cannot add the same private member more than once");
1790
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
3932
+ var __typeError$5 = (msg) => {
3933
+ throw TypeError(msg);
1791
3934
  };
1792
- var __privateSet$2 = (obj, member, value, setter) => {
1793
- __accessCheck$4(obj, member, "write to private field");
1794
- setter ? setter.call(obj, value) : member.set(obj, value);
1795
- return value;
1796
- };
1797
- var __privateMethod$3 = (obj, member, method) => {
1798
- __accessCheck$4(obj, member, "access private method");
1799
- return method;
1800
- };
1801
- var _table$1, _repository, _data, _cleanFilterConstraint, cleanFilterConstraint_fn;
3935
+ var __accessCheck$5 = (obj, member, msg) => member.has(obj) || __typeError$5("Cannot " + msg);
3936
+ var __privateGet$4 = (obj, member, getter) => (__accessCheck$5(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
3937
+ var __privateAdd$5 = (obj, member, value) => member.has(obj) ? __typeError$5("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
3938
+ var __privateSet$3 = (obj, member, value, setter) => (__accessCheck$5(obj, member, "write to private field"), member.set(obj, value), value);
3939
+ var __privateMethod$3 = (obj, member, method) => (__accessCheck$5(obj, member, "access private method"), method);
3940
+ var _table$1, _repository, _data, _Query_instances, cleanFilterConstraint_fn;
1802
3941
  const _Query = class _Query {
1803
3942
  constructor(repository, table, data, rawParent) {
1804
- __privateAdd$4(this, _cleanFilterConstraint);
1805
- __privateAdd$4(this, _table$1, void 0);
1806
- __privateAdd$4(this, _repository, void 0);
1807
- __privateAdd$4(this, _data, { filter: {} });
3943
+ __privateAdd$5(this, _Query_instances);
3944
+ __privateAdd$5(this, _table$1);
3945
+ __privateAdd$5(this, _repository);
3946
+ __privateAdd$5(this, _data, { filter: {} });
1808
3947
  // Implements pagination
1809
3948
  this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
1810
3949
  this.records = new PageRecordArray(this, []);
1811
- __privateSet$2(this, _table$1, table);
3950
+ __privateSet$3(this, _table$1, table);
1812
3951
  if (repository) {
1813
- __privateSet$2(this, _repository, repository);
3952
+ __privateSet$3(this, _repository, repository);
1814
3953
  } else {
1815
- __privateSet$2(this, _repository, this);
3954
+ __privateSet$3(this, _repository, this);
1816
3955
  }
1817
3956
  const parent = cleanParent(data, rawParent);
1818
- __privateGet$3(this, _data).filter = data.filter ?? parent?.filter ?? {};
1819
- __privateGet$3(this, _data).filter.$any = data.filter?.$any ?? parent?.filter?.$any;
1820
- __privateGet$3(this, _data).filter.$all = data.filter?.$all ?? parent?.filter?.$all;
1821
- __privateGet$3(this, _data).filter.$not = data.filter?.$not ?? parent?.filter?.$not;
1822
- __privateGet$3(this, _data).filter.$none = data.filter?.$none ?? parent?.filter?.$none;
1823
- __privateGet$3(this, _data).sort = data.sort ?? parent?.sort;
1824
- __privateGet$3(this, _data).columns = data.columns ?? parent?.columns;
1825
- __privateGet$3(this, _data).consistency = data.consistency ?? parent?.consistency;
1826
- __privateGet$3(this, _data).pagination = data.pagination ?? parent?.pagination;
1827
- __privateGet$3(this, _data).fetchOptions = data.fetchOptions ?? parent?.fetchOptions;
3957
+ __privateGet$4(this, _data).filter = data.filter ?? parent?.filter ?? {};
3958
+ __privateGet$4(this, _data).filter.$any = data.filter?.$any ?? parent?.filter?.$any;
3959
+ __privateGet$4(this, _data).filter.$all = data.filter?.$all ?? parent?.filter?.$all;
3960
+ __privateGet$4(this, _data).filter.$not = data.filter?.$not ?? parent?.filter?.$not;
3961
+ __privateGet$4(this, _data).filter.$none = data.filter?.$none ?? parent?.filter?.$none;
3962
+ __privateGet$4(this, _data).sort = data.sort ?? parent?.sort;
3963
+ __privateGet$4(this, _data).columns = data.columns ?? parent?.columns;
3964
+ __privateGet$4(this, _data).consistency = data.consistency ?? parent?.consistency;
3965
+ __privateGet$4(this, _data).pagination = data.pagination ?? parent?.pagination;
3966
+ __privateGet$4(this, _data).cache = data.cache ?? parent?.cache;
3967
+ __privateGet$4(this, _data).fetchOptions = data.fetchOptions ?? parent?.fetchOptions;
1828
3968
  this.any = this.any.bind(this);
1829
3969
  this.all = this.all.bind(this);
1830
3970
  this.not = this.not.bind(this);
@@ -1835,10 +3975,10 @@ const _Query = class _Query {
1835
3975
  Object.defineProperty(this, "repository", { enumerable: false });
1836
3976
  }
1837
3977
  getQueryOptions() {
1838
- return __privateGet$3(this, _data);
3978
+ return __privateGet$4(this, _data);
1839
3979
  }
1840
3980
  key() {
1841
- const { columns = [], filter = {}, sort = [], pagination = {} } = __privateGet$3(this, _data);
3981
+ const { columns = [], filter = {}, sort = [], pagination = {} } = __privateGet$4(this, _data);
1842
3982
  const key = JSON.stringify({ columns, filter, sort, pagination });
1843
3983
  return toBase64(key);
1844
3984
  }
@@ -1849,7 +3989,7 @@ const _Query = class _Query {
1849
3989
  */
1850
3990
  any(...queries) {
1851
3991
  const $any = queries.map((query) => query.getQueryOptions().filter ?? {});
1852
- return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $any } }, __privateGet$3(this, _data));
3992
+ return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $any } }, __privateGet$4(this, _data));
1853
3993
  }
1854
3994
  /**
1855
3995
  * Builds a new query object representing a logical AND between the given subqueries.
@@ -1858,7 +3998,7 @@ const _Query = class _Query {
1858
3998
  */
1859
3999
  all(...queries) {
1860
4000
  const $all = queries.map((query) => query.getQueryOptions().filter ?? {});
1861
- return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
4001
+ return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
1862
4002
  }
1863
4003
  /**
1864
4004
  * Builds a new query object representing a logical OR negating each subquery. In pseudo-code: !q1 OR !q2
@@ -1867,7 +4007,7 @@ const _Query = class _Query {
1867
4007
  */
1868
4008
  not(...queries) {
1869
4009
  const $not = queries.map((query) => query.getQueryOptions().filter ?? {});
1870
- return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $not } }, __privateGet$3(this, _data));
4010
+ return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $not } }, __privateGet$4(this, _data));
1871
4011
  }
1872
4012
  /**
1873
4013
  * Builds a new query object representing a logical AND negating each subquery. In pseudo-code: !q1 AND !q2
@@ -1876,25 +4016,25 @@ const _Query = class _Query {
1876
4016
  */
1877
4017
  none(...queries) {
1878
4018
  const $none = queries.map((query) => query.getQueryOptions().filter ?? {});
1879
- return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $none } }, __privateGet$3(this, _data));
4019
+ return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $none } }, __privateGet$4(this, _data));
1880
4020
  }
1881
4021
  filter(a, b) {
1882
4022
  if (arguments.length === 1) {
1883
4023
  const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
1884
- [column]: __privateMethod$3(this, _cleanFilterConstraint, cleanFilterConstraint_fn).call(this, column, constraint)
4024
+ [column]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, column, constraint)
1885
4025
  }));
1886
- const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
1887
- return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
4026
+ const $all = compact([__privateGet$4(this, _data).filter?.$all].flat().concat(constraints));
4027
+ return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
1888
4028
  } else {
1889
- const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _cleanFilterConstraint, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
1890
- const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
1891
- return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
4029
+ const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
4030
+ const $all = compact([__privateGet$4(this, _data).filter?.$all].flat().concat(constraints));
4031
+ return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
1892
4032
  }
1893
4033
  }
1894
4034
  sort(column, direction = "asc") {
1895
- const originalSort = [__privateGet$3(this, _data).sort ?? []].flat();
4035
+ const originalSort = [__privateGet$4(this, _data).sort ?? []].flat();
1896
4036
  const sort = [...originalSort, { column, direction }];
1897
- return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { sort }, __privateGet$3(this, _data));
4037
+ return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { sort }, __privateGet$4(this, _data));
1898
4038
  }
1899
4039
  /**
1900
4040
  * Builds a new query specifying the set of columns to be returned in the query response.
@@ -1903,15 +4043,15 @@ const _Query = class _Query {
1903
4043
  */
1904
4044
  select(columns) {
1905
4045
  return new _Query(
1906
- __privateGet$3(this, _repository),
1907
- __privateGet$3(this, _table$1),
4046
+ __privateGet$4(this, _repository),
4047
+ __privateGet$4(this, _table$1),
1908
4048
  { columns },
1909
- __privateGet$3(this, _data)
4049
+ __privateGet$4(this, _data)
1910
4050
  );
1911
4051
  }
1912
4052
  getPaginated(options = {}) {
1913
- const query = new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), options, __privateGet$3(this, _data));
1914
- return __privateGet$3(this, _repository).query(query);
4053
+ const query = new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), options, __privateGet$4(this, _data));
4054
+ return __privateGet$4(this, _repository).query(query);
1915
4055
  }
1916
4056
  /**
1917
4057
  * Get results in an iterator
@@ -1965,19 +4105,26 @@ const _Query = class _Query {
1965
4105
  }
1966
4106
  async getFirstOrThrow(options = {}) {
1967
4107
  const records = await this.getMany({ ...options, pagination: { size: 1 } });
1968
- if (records[0] === void 0)
1969
- throw new Error("No results found.");
4108
+ if (records[0] === void 0) throw new Error("No results found.");
1970
4109
  return records[0];
1971
4110
  }
1972
4111
  async summarize(params = {}) {
1973
4112
  const { summaries, summariesFilter, ...options } = params;
1974
4113
  const query = new _Query(
1975
- __privateGet$3(this, _repository),
1976
- __privateGet$3(this, _table$1),
4114
+ __privateGet$4(this, _repository),
4115
+ __privateGet$4(this, _table$1),
1977
4116
  options,
1978
- __privateGet$3(this, _data)
4117
+ __privateGet$4(this, _data)
1979
4118
  );
1980
- return __privateGet$3(this, _repository).summarizeTable(query, summaries, summariesFilter);
4119
+ return __privateGet$4(this, _repository).summarizeTable(query, summaries, summariesFilter);
4120
+ }
4121
+ /**
4122
+ * Builds a new query object adding a cache TTL in milliseconds.
4123
+ * @param ttl The cache TTL in milliseconds.
4124
+ * @returns A new Query object.
4125
+ */
4126
+ cache(ttl) {
4127
+ return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { cache: ttl }, __privateGet$4(this, _data));
1981
4128
  }
1982
4129
  /**
1983
4130
  * Retrieve next page of records
@@ -2021,9 +4168,9 @@ const _Query = class _Query {
2021
4168
  _table$1 = new WeakMap();
2022
4169
  _repository = new WeakMap();
2023
4170
  _data = new WeakMap();
2024
- _cleanFilterConstraint = new WeakSet();
4171
+ _Query_instances = new WeakSet();
2025
4172
  cleanFilterConstraint_fn = function(column, value) {
2026
- const columnType = __privateGet$3(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
4173
+ const columnType = __privateGet$4(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
2027
4174
  if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
2028
4175
  return { $includes: value };
2029
4176
  }
@@ -2056,7 +4203,12 @@ const RecordColumnTypes = [
2056
4203
  "json"
2057
4204
  ];
2058
4205
  function isIdentifiable(x) {
2059
- return isObject(x) && isString(x?.xata_id);
4206
+ return isObject(x) && isString(x?.id);
4207
+ }
4208
+ function isXataRecord(x) {
4209
+ const record = x;
4210
+ const metadata = record?.getMetadata();
4211
+ return isIdentifiable(x) && isObject(metadata) && typeof metadata.version === "number";
2060
4212
  }
2061
4213
 
2062
4214
  function isValidExpandedColumn(column) {
@@ -2082,8 +4234,7 @@ function isSortFilterString(value) {
2082
4234
  }
2083
4235
  function isSortFilterBase(filter) {
2084
4236
  return isObject(filter) && Object.entries(filter).every(([key, value]) => {
2085
- if (key === "*")
2086
- return value === "random";
4237
+ if (key === "*") return value === "random";
2087
4238
  return value === "asc" || value === "desc";
2088
4239
  });
2089
4240
  }
@@ -2104,29 +4255,15 @@ function buildSortFilter(filter) {
2104
4255
  }
2105
4256
  }
2106
4257
 
2107
- var __accessCheck$3 = (obj, member, msg) => {
2108
- if (!member.has(obj))
2109
- throw TypeError("Cannot " + msg);
4258
+ var __typeError$4 = (msg) => {
4259
+ throw TypeError(msg);
2110
4260
  };
2111
- var __privateGet$2 = (obj, member, getter) => {
2112
- __accessCheck$3(obj, member, "read from private field");
2113
- return getter ? getter.call(obj) : member.get(obj);
2114
- };
2115
- var __privateAdd$3 = (obj, member, value) => {
2116
- if (member.has(obj))
2117
- throw TypeError("Cannot add the same private member more than once");
2118
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
2119
- };
2120
- var __privateSet$1 = (obj, member, value, setter) => {
2121
- __accessCheck$3(obj, member, "write to private field");
2122
- setter ? setter.call(obj, value) : member.set(obj, value);
2123
- return value;
2124
- };
2125
- var __privateMethod$2 = (obj, member, method) => {
2126
- __accessCheck$3(obj, member, "access private method");
2127
- return method;
2128
- };
2129
- var _table, _getFetchProps, _db, _schemaTables, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn, _insertRecords, insertRecords_fn, _updateRecordWithID, updateRecordWithID_fn, _updateRecords, updateRecords_fn, _upsertRecordWithID, upsertRecordWithID_fn, _deleteRecord, deleteRecord_fn, _deleteRecords, deleteRecords_fn, _getSchemaTables, getSchemaTables_fn, _transformObjectToApi, transformObjectToApi_fn;
4261
+ var __accessCheck$4 = (obj, member, msg) => member.has(obj) || __typeError$4("Cannot " + msg);
4262
+ var __privateGet$3 = (obj, member, getter) => (__accessCheck$4(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
4263
+ var __privateAdd$4 = (obj, member, value) => member.has(obj) ? __typeError$4("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
4264
+ var __privateSet$2 = (obj, member, value, setter) => (__accessCheck$4(obj, member, "write to private field"), member.set(obj, value), value);
4265
+ var __privateMethod$2 = (obj, member, method) => (__accessCheck$4(obj, member, "access private method"), method);
4266
+ var _table, _getFetchProps, _db, _cache, _schemaTables, _trace, _RestRepository_instances, insertRecordWithoutId_fn, insertRecordWithId_fn, insertRecords_fn, updateRecordWithID_fn, updateRecords_fn, upsertRecordWithID_fn, deleteRecord_fn, deleteRecords_fn, setCacheQuery_fn, getCacheQuery_fn, getSchemaTables_fn, transformObjectToApi_fn;
2130
4267
  const BULK_OPERATION_MAX_SIZE = 1e3;
2131
4268
  class Repository extends Query {
2132
4269
  }
@@ -2137,78 +4274,64 @@ class RestRepository extends Query {
2137
4274
  { name: options.table, schema: options.schemaTables?.find((table) => table.name === options.table) },
2138
4275
  {}
2139
4276
  );
2140
- __privateAdd$3(this, _insertRecordWithoutId);
2141
- __privateAdd$3(this, _insertRecordWithId);
2142
- __privateAdd$3(this, _insertRecords);
2143
- __privateAdd$3(this, _updateRecordWithID);
2144
- __privateAdd$3(this, _updateRecords);
2145
- __privateAdd$3(this, _upsertRecordWithID);
2146
- __privateAdd$3(this, _deleteRecord);
2147
- __privateAdd$3(this, _deleteRecords);
2148
- __privateAdd$3(this, _getSchemaTables);
2149
- __privateAdd$3(this, _transformObjectToApi);
2150
- __privateAdd$3(this, _table, void 0);
2151
- __privateAdd$3(this, _getFetchProps, void 0);
2152
- __privateAdd$3(this, _db, void 0);
2153
- __privateAdd$3(this, _schemaTables, void 0);
2154
- __privateAdd$3(this, _trace, void 0);
2155
- __privateSet$1(this, _table, options.table);
2156
- __privateSet$1(this, _db, options.db);
2157
- __privateSet$1(this, _schemaTables, options.schemaTables);
2158
- __privateSet$1(this, _getFetchProps, () => ({ ...options.pluginOptions, sessionID: generateUUID() }));
4277
+ __privateAdd$4(this, _RestRepository_instances);
4278
+ __privateAdd$4(this, _table);
4279
+ __privateAdd$4(this, _getFetchProps);
4280
+ __privateAdd$4(this, _db);
4281
+ __privateAdd$4(this, _cache);
4282
+ __privateAdd$4(this, _schemaTables);
4283
+ __privateAdd$4(this, _trace);
4284
+ __privateSet$2(this, _table, options.table);
4285
+ __privateSet$2(this, _db, options.db);
4286
+ __privateSet$2(this, _cache, options.pluginOptions.cache);
4287
+ __privateSet$2(this, _schemaTables, options.schemaTables);
4288
+ __privateSet$2(this, _getFetchProps, () => ({ ...options.pluginOptions, sessionID: generateUUID() }));
2159
4289
  const trace = options.pluginOptions.trace ?? defaultTrace;
2160
- __privateSet$1(this, _trace, async (name, fn, options2 = {}) => {
4290
+ __privateSet$2(this, _trace, async (name, fn, options2 = {}) => {
2161
4291
  return trace(name, fn, {
2162
4292
  ...options2,
2163
- [TraceAttributes.TABLE]: __privateGet$2(this, _table),
4293
+ [TraceAttributes.TABLE]: __privateGet$3(this, _table),
2164
4294
  [TraceAttributes.KIND]: "sdk-operation",
2165
4295
  [TraceAttributes.VERSION]: VERSION
2166
4296
  });
2167
4297
  });
2168
4298
  }
2169
4299
  async create(a, b, c, d) {
2170
- return __privateGet$2(this, _trace).call(this, "create", async () => {
4300
+ return __privateGet$3(this, _trace).call(this, "create", async () => {
2171
4301
  const ifVersion = parseIfVersion(b, c, d);
2172
4302
  if (Array.isArray(a)) {
2173
- if (a.length === 0)
2174
- return [];
2175
- const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
4303
+ if (a.length === 0) return [];
4304
+ const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
2176
4305
  const columns = isValidSelectableColumns(b) ? b : ["*"];
2177
4306
  const result = await this.read(ids, columns);
2178
4307
  return result;
2179
4308
  }
2180
4309
  if (isString(a) && isObject(b)) {
2181
- if (a === "")
2182
- throw new Error("The id can't be empty");
4310
+ if (a === "") throw new Error("The id can't be empty");
2183
4311
  const columns = isValidSelectableColumns(c) ? c : void 0;
2184
- return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
4312
+ return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
2185
4313
  }
2186
- if (isObject(a) && isString(a.xata_id)) {
2187
- if (a.xata_id === "")
2188
- throw new Error("The id can't be empty");
4314
+ if (isObject(a) && isString(a.id)) {
4315
+ if (a.id === "") throw new Error("The id can't be empty");
2189
4316
  const columns = isValidSelectableColumns(b) ? b : void 0;
2190
- return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
2191
- createOnly: true,
2192
- ifVersion
2193
- });
4317
+ return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.id, { ...a, id: void 0 }, columns, { createOnly: true, ifVersion });
2194
4318
  }
2195
4319
  if (isObject(a)) {
2196
4320
  const columns = isValidSelectableColumns(b) ? b : void 0;
2197
- return __privateMethod$2(this, _insertRecordWithoutId, insertRecordWithoutId_fn).call(this, a, columns);
4321
+ return __privateMethod$2(this, _RestRepository_instances, insertRecordWithoutId_fn).call(this, a, columns);
2198
4322
  }
2199
4323
  throw new Error("Invalid arguments for create method");
2200
4324
  });
2201
4325
  }
2202
4326
  async read(a, b) {
2203
- return __privateGet$2(this, _trace).call(this, "read", async () => {
4327
+ return __privateGet$3(this, _trace).call(this, "read", async () => {
2204
4328
  const columns = isValidSelectableColumns(b) ? b : ["*"];
2205
4329
  if (Array.isArray(a)) {
2206
- if (a.length === 0)
2207
- return [];
4330
+ if (a.length === 0) return [];
2208
4331
  const ids = a.map((item) => extractId(item));
2209
- const finalObjects = await this.getAll({ filter: { xata_id: { $any: compact(ids) } }, columns });
4332
+ const finalObjects = await this.getAll({ filter: { id: { $any: compact(ids) } }, columns });
2210
4333
  const dictionary = finalObjects.reduce((acc, object) => {
2211
- acc[object.xata_id] = object;
4334
+ acc[object.id] = object;
2212
4335
  return acc;
2213
4336
  }, {});
2214
4337
  return ids.map((id2) => dictionary[id2 ?? ""] ?? null);
@@ -2221,17 +4344,17 @@ class RestRepository extends Query {
2221
4344
  workspace: "{workspaceId}",
2222
4345
  dbBranchName: "{dbBranch}",
2223
4346
  region: "{region}",
2224
- tableName: __privateGet$2(this, _table),
4347
+ tableName: __privateGet$3(this, _table),
2225
4348
  recordId: id
2226
4349
  },
2227
4350
  queryParams: { columns },
2228
- ...__privateGet$2(this, _getFetchProps).call(this)
4351
+ ...__privateGet$3(this, _getFetchProps).call(this)
2229
4352
  });
2230
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4353
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2231
4354
  return initObject(
2232
- __privateGet$2(this, _db),
4355
+ __privateGet$3(this, _db),
2233
4356
  schemaTables,
2234
- __privateGet$2(this, _table),
4357
+ __privateGet$3(this, _table),
2235
4358
  response,
2236
4359
  columns
2237
4360
  );
@@ -2246,7 +4369,7 @@ class RestRepository extends Query {
2246
4369
  });
2247
4370
  }
2248
4371
  async readOrThrow(a, b) {
2249
- return __privateGet$2(this, _trace).call(this, "readOrThrow", async () => {
4372
+ return __privateGet$3(this, _trace).call(this, "readOrThrow", async () => {
2250
4373
  const result = await this.read(a, b);
2251
4374
  if (Array.isArray(result)) {
2252
4375
  const missingIds = compact(
@@ -2265,14 +4388,13 @@ class RestRepository extends Query {
2265
4388
  });
2266
4389
  }
2267
4390
  async update(a, b, c, d) {
2268
- return __privateGet$2(this, _trace).call(this, "update", async () => {
4391
+ return __privateGet$3(this, _trace).call(this, "update", async () => {
2269
4392
  const ifVersion = parseIfVersion(b, c, d);
2270
4393
  if (Array.isArray(a)) {
2271
- if (a.length === 0)
2272
- return [];
2273
- const existing = await this.read(a, ["xata_id"]);
4394
+ if (a.length === 0) return [];
4395
+ const existing = await this.read(a, ["id"]);
2274
4396
  const updates = a.filter((_item, index) => existing[index] !== null);
2275
- await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, updates, {
4397
+ await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, updates, {
2276
4398
  ifVersion,
2277
4399
  upsert: false
2278
4400
  });
@@ -2283,22 +4405,21 @@ class RestRepository extends Query {
2283
4405
  try {
2284
4406
  if (isString(a) && isObject(b)) {
2285
4407
  const columns = isValidSelectableColumns(c) ? c : void 0;
2286
- return await __privateMethod$2(this, _updateRecordWithID, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
4408
+ return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
2287
4409
  }
2288
- if (isObject(a) && isString(a.xata_id)) {
4410
+ if (isObject(a) && isString(a.id)) {
2289
4411
  const columns = isValidSelectableColumns(b) ? b : void 0;
2290
- return await __privateMethod$2(this, _updateRecordWithID, updateRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
4412
+ return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a.id, { ...a, id: void 0 }, columns, { ifVersion });
2291
4413
  }
2292
4414
  } catch (error) {
2293
- if (error.status === 422)
2294
- return null;
4415
+ if (error.status === 422) return null;
2295
4416
  throw error;
2296
4417
  }
2297
4418
  throw new Error("Invalid arguments for update method");
2298
4419
  });
2299
4420
  }
2300
4421
  async updateOrThrow(a, b, c, d) {
2301
- return __privateGet$2(this, _trace).call(this, "updateOrThrow", async () => {
4422
+ return __privateGet$3(this, _trace).call(this, "updateOrThrow", async () => {
2302
4423
  const result = await this.update(a, b, c, d);
2303
4424
  if (Array.isArray(result)) {
2304
4425
  const missingIds = compact(
@@ -2317,12 +4438,11 @@ class RestRepository extends Query {
2317
4438
  });
2318
4439
  }
2319
4440
  async createOrUpdate(a, b, c, d) {
2320
- return __privateGet$2(this, _trace).call(this, "createOrUpdate", async () => {
4441
+ return __privateGet$3(this, _trace).call(this, "createOrUpdate", async () => {
2321
4442
  const ifVersion = parseIfVersion(b, c, d);
2322
4443
  if (Array.isArray(a)) {
2323
- if (a.length === 0)
2324
- return [];
2325
- await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, a, {
4444
+ if (a.length === 0) return [];
4445
+ await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, a, {
2326
4446
  ifVersion,
2327
4447
  upsert: true
2328
4448
  });
@@ -2331,89 +4451,78 @@ class RestRepository extends Query {
2331
4451
  return result;
2332
4452
  }
2333
4453
  if (isString(a) && isObject(b)) {
2334
- if (a === "")
2335
- throw new Error("The id can't be empty");
4454
+ if (a === "") throw new Error("The id can't be empty");
2336
4455
  const columns = isValidSelectableColumns(c) ? c : void 0;
2337
- return await __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
4456
+ return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
2338
4457
  }
2339
- if (isObject(a) && isString(a.xata_id)) {
2340
- if (a.xata_id === "")
2341
- throw new Error("The id can't be empty");
4458
+ if (isObject(a) && isString(a.id)) {
4459
+ if (a.id === "") throw new Error("The id can't be empty");
2342
4460
  const columns = isValidSelectableColumns(c) ? c : void 0;
2343
- return await __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
4461
+ return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a.id, { ...a, id: void 0 }, columns, { ifVersion });
2344
4462
  }
2345
4463
  if (!isDefined(a) && isObject(b)) {
2346
4464
  return await this.create(b, c);
2347
4465
  }
2348
- if (isObject(a) && !isDefined(a.xata_id)) {
4466
+ if (isObject(a) && !isDefined(a.id)) {
2349
4467
  return await this.create(a, b);
2350
4468
  }
2351
4469
  throw new Error("Invalid arguments for createOrUpdate method");
2352
4470
  });
2353
4471
  }
2354
4472
  async createOrReplace(a, b, c, d) {
2355
- return __privateGet$2(this, _trace).call(this, "createOrReplace", async () => {
4473
+ return __privateGet$3(this, _trace).call(this, "createOrReplace", async () => {
2356
4474
  const ifVersion = parseIfVersion(b, c, d);
2357
4475
  if (Array.isArray(a)) {
2358
- if (a.length === 0)
2359
- return [];
2360
- const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
4476
+ if (a.length === 0) return [];
4477
+ const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
2361
4478
  const columns = isValidSelectableColumns(b) ? b : ["*"];
2362
4479
  const result = await this.read(ids, columns);
2363
4480
  return result;
2364
4481
  }
2365
4482
  if (isString(a) && isObject(b)) {
2366
- if (a === "")
2367
- throw new Error("The id can't be empty");
4483
+ if (a === "") throw new Error("The id can't be empty");
2368
4484
  const columns = isValidSelectableColumns(c) ? c : void 0;
2369
- return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
4485
+ return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
2370
4486
  }
2371
- if (isObject(a) && isString(a.xata_id)) {
2372
- if (a.xata_id === "")
2373
- throw new Error("The id can't be empty");
4487
+ if (isObject(a) && isString(a.id)) {
4488
+ if (a.id === "") throw new Error("The id can't be empty");
2374
4489
  const columns = isValidSelectableColumns(c) ? c : void 0;
2375
- return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
2376
- createOnly: false,
2377
- ifVersion
2378
- });
4490
+ return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.id, { ...a, id: void 0 }, columns, { createOnly: false, ifVersion });
2379
4491
  }
2380
4492
  if (!isDefined(a) && isObject(b)) {
2381
4493
  return await this.create(b, c);
2382
4494
  }
2383
- if (isObject(a) && !isDefined(a.xata_id)) {
4495
+ if (isObject(a) && !isDefined(a.id)) {
2384
4496
  return await this.create(a, b);
2385
4497
  }
2386
4498
  throw new Error("Invalid arguments for createOrReplace method");
2387
4499
  });
2388
4500
  }
2389
4501
  async delete(a, b) {
2390
- return __privateGet$2(this, _trace).call(this, "delete", async () => {
4502
+ return __privateGet$3(this, _trace).call(this, "delete", async () => {
2391
4503
  if (Array.isArray(a)) {
2392
- if (a.length === 0)
2393
- return [];
4504
+ if (a.length === 0) return [];
2394
4505
  const ids = a.map((o) => {
2395
- if (isString(o))
2396
- return o;
2397
- if (isString(o.xata_id))
2398
- return o.xata_id;
4506
+ if (isString(o)) return o;
4507
+ if (isString(o.id)) return o.id;
2399
4508
  throw new Error("Invalid arguments for delete method");
2400
4509
  });
2401
4510
  const columns = isValidSelectableColumns(b) ? b : ["*"];
2402
4511
  const result = await this.read(a, columns);
2403
- await __privateMethod$2(this, _deleteRecords, deleteRecords_fn).call(this, ids);
4512
+ await __privateMethod$2(this, _RestRepository_instances, deleteRecords_fn).call(this, ids);
2404
4513
  return result;
2405
4514
  }
2406
4515
  if (isString(a)) {
2407
- return __privateMethod$2(this, _deleteRecord, deleteRecord_fn).call(this, a, b);
4516
+ return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a, b);
2408
4517
  }
2409
- if (isObject(a) && isString(a.xata_id)) {
2410
- return __privateMethod$2(this, _deleteRecord, deleteRecord_fn).call(this, a.xata_id, b);
4518
+ if (isObject(a) && isString(a.id)) {
4519
+ return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a.id, b);
2411
4520
  }
2412
4521
  throw new Error("Invalid arguments for delete method");
2413
4522
  });
2414
4523
  }
2415
4524
  async deleteOrThrow(a, b) {
2416
- return __privateGet$2(this, _trace).call(this, "deleteOrThrow", async () => {
4525
+ return __privateGet$3(this, _trace).call(this, "deleteOrThrow", async () => {
2417
4526
  const result = await this.delete(a, b);
2418
4527
  if (Array.isArray(result)) {
2419
4528
  const missingIds = compact(
@@ -2431,13 +4540,13 @@ class RestRepository extends Query {
2431
4540
  });
2432
4541
  }
2433
4542
  async search(query, options = {}) {
2434
- return __privateGet$2(this, _trace).call(this, "search", async () => {
4543
+ return __privateGet$3(this, _trace).call(this, "search", async () => {
2435
4544
  const { records, totalCount } = await searchTable({
2436
4545
  pathParams: {
2437
4546
  workspace: "{workspaceId}",
2438
4547
  dbBranchName: "{dbBranch}",
2439
4548
  region: "{region}",
2440
- tableName: __privateGet$2(this, _table)
4549
+ tableName: __privateGet$3(this, _table)
2441
4550
  },
2442
4551
  body: {
2443
4552
  query,
@@ -2449,23 +4558,23 @@ class RestRepository extends Query {
2449
4558
  page: options.page,
2450
4559
  target: options.target
2451
4560
  },
2452
- ...__privateGet$2(this, _getFetchProps).call(this)
4561
+ ...__privateGet$3(this, _getFetchProps).call(this)
2453
4562
  });
2454
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4563
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2455
4564
  return {
2456
- records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
4565
+ records: records.map((item) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), item, ["*"])),
2457
4566
  totalCount
2458
4567
  };
2459
4568
  });
2460
4569
  }
2461
4570
  async vectorSearch(column, query, options) {
2462
- return __privateGet$2(this, _trace).call(this, "vectorSearch", async () => {
4571
+ return __privateGet$3(this, _trace).call(this, "vectorSearch", async () => {
2463
4572
  const { records, totalCount } = await vectorSearchTable({
2464
4573
  pathParams: {
2465
4574
  workspace: "{workspaceId}",
2466
4575
  dbBranchName: "{dbBranch}",
2467
4576
  region: "{region}",
2468
- tableName: __privateGet$2(this, _table)
4577
+ tableName: __privateGet$3(this, _table)
2469
4578
  },
2470
4579
  body: {
2471
4580
  column,
@@ -2474,39 +4583,41 @@ class RestRepository extends Query {
2474
4583
  size: options?.size,
2475
4584
  filter: options?.filter
2476
4585
  },
2477
- ...__privateGet$2(this, _getFetchProps).call(this)
4586
+ ...__privateGet$3(this, _getFetchProps).call(this)
2478
4587
  });
2479
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4588
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2480
4589
  return {
2481
- records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
4590
+ records: records.map((item) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), item, ["*"])),
2482
4591
  totalCount
2483
4592
  };
2484
4593
  });
2485
4594
  }
2486
4595
  async aggregate(aggs, filter) {
2487
- return __privateGet$2(this, _trace).call(this, "aggregate", async () => {
4596
+ return __privateGet$3(this, _trace).call(this, "aggregate", async () => {
2488
4597
  const result = await aggregateTable({
2489
4598
  pathParams: {
2490
4599
  workspace: "{workspaceId}",
2491
4600
  dbBranchName: "{dbBranch}",
2492
4601
  region: "{region}",
2493
- tableName: __privateGet$2(this, _table)
4602
+ tableName: __privateGet$3(this, _table)
2494
4603
  },
2495
4604
  body: { aggs, filter },
2496
- ...__privateGet$2(this, _getFetchProps).call(this)
4605
+ ...__privateGet$3(this, _getFetchProps).call(this)
2497
4606
  });
2498
4607
  return result;
2499
4608
  });
2500
4609
  }
2501
4610
  async query(query) {
2502
- return __privateGet$2(this, _trace).call(this, "query", async () => {
4611
+ return __privateGet$3(this, _trace).call(this, "query", async () => {
4612
+ const cacheQuery = await __privateMethod$2(this, _RestRepository_instances, getCacheQuery_fn).call(this, query);
4613
+ if (cacheQuery) return new Page(query, cacheQuery.meta, cacheQuery.records);
2503
4614
  const data = query.getQueryOptions();
2504
4615
  const { meta, records: objects } = await queryTable({
2505
4616
  pathParams: {
2506
4617
  workspace: "{workspaceId}",
2507
4618
  dbBranchName: "{dbBranch}",
2508
4619
  region: "{region}",
2509
- tableName: __privateGet$2(this, _table)
4620
+ tableName: __privateGet$3(this, _table)
2510
4621
  },
2511
4622
  body: {
2512
4623
  filter: cleanFilter(data.filter),
@@ -2516,30 +4627,31 @@ class RestRepository extends Query {
2516
4627
  consistency: data.consistency
2517
4628
  },
2518
4629
  fetchOptions: data.fetchOptions,
2519
- ...__privateGet$2(this, _getFetchProps).call(this)
4630
+ ...__privateGet$3(this, _getFetchProps).call(this)
2520
4631
  });
2521
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4632
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2522
4633
  const records = objects.map(
2523
4634
  (record) => initObject(
2524
- __privateGet$2(this, _db),
4635
+ __privateGet$3(this, _db),
2525
4636
  schemaTables,
2526
- __privateGet$2(this, _table),
4637
+ __privateGet$3(this, _table),
2527
4638
  record,
2528
4639
  data.columns ?? ["*"]
2529
4640
  )
2530
4641
  );
4642
+ await __privateMethod$2(this, _RestRepository_instances, setCacheQuery_fn).call(this, query, meta, records);
2531
4643
  return new Page(query, meta, records);
2532
4644
  });
2533
4645
  }
2534
4646
  async summarizeTable(query, summaries, summariesFilter) {
2535
- return __privateGet$2(this, _trace).call(this, "summarize", async () => {
4647
+ return __privateGet$3(this, _trace).call(this, "summarize", async () => {
2536
4648
  const data = query.getQueryOptions();
2537
4649
  const result = await summarizeTable({
2538
4650
  pathParams: {
2539
4651
  workspace: "{workspaceId}",
2540
4652
  dbBranchName: "{dbBranch}",
2541
4653
  region: "{region}",
2542
- tableName: __privateGet$2(this, _table)
4654
+ tableName: __privateGet$3(this, _table)
2543
4655
  },
2544
4656
  body: {
2545
4657
  filter: cleanFilter(data.filter),
@@ -2550,13 +4662,13 @@ class RestRepository extends Query {
2550
4662
  summaries,
2551
4663
  summariesFilter
2552
4664
  },
2553
- ...__privateGet$2(this, _getFetchProps).call(this)
4665
+ ...__privateGet$3(this, _getFetchProps).call(this)
2554
4666
  });
2555
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4667
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2556
4668
  return {
2557
4669
  ...result,
2558
4670
  summaries: result.summaries.map(
2559
- (summary) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), summary, data.columns ?? [])
4671
+ (summary) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), summary, data.columns ?? [])
2560
4672
  )
2561
4673
  };
2562
4674
  });
@@ -2568,7 +4680,7 @@ class RestRepository extends Query {
2568
4680
  workspace: "{workspaceId}",
2569
4681
  dbBranchName: "{dbBranch}",
2570
4682
  region: "{region}",
2571
- tableName: __privateGet$2(this, _table),
4683
+ tableName: __privateGet$3(this, _table),
2572
4684
  sessionId: options?.sessionId
2573
4685
  },
2574
4686
  body: {
@@ -2578,7 +4690,7 @@ class RestRepository extends Query {
2578
4690
  search: options?.searchType === "keyword" ? options?.search : void 0,
2579
4691
  vectorSearch: options?.searchType === "vector" ? options?.vectorSearch : void 0
2580
4692
  },
2581
- ...__privateGet$2(this, _getFetchProps).call(this)
4693
+ ...__privateGet$3(this, _getFetchProps).call(this)
2582
4694
  };
2583
4695
  if (options?.onMessage) {
2584
4696
  fetchSSERequest({
@@ -2598,51 +4710,48 @@ class RestRepository extends Query {
2598
4710
  _table = new WeakMap();
2599
4711
  _getFetchProps = new WeakMap();
2600
4712
  _db = new WeakMap();
4713
+ _cache = new WeakMap();
2601
4714
  _schemaTables = new WeakMap();
2602
4715
  _trace = new WeakMap();
2603
- _insertRecordWithoutId = new WeakSet();
4716
+ _RestRepository_instances = new WeakSet();
2604
4717
  insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
2605
- const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
2606
- console.log("record", record);
4718
+ const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
2607
4719
  const response = await insertRecord({
2608
4720
  pathParams: {
2609
4721
  workspace: "{workspaceId}",
2610
4722
  dbBranchName: "{dbBranch}",
2611
4723
  region: "{region}",
2612
- tableName: __privateGet$2(this, _table)
4724
+ tableName: __privateGet$3(this, _table)
2613
4725
  },
2614
4726
  queryParams: { columns },
2615
4727
  body: record,
2616
- ...__privateGet$2(this, _getFetchProps).call(this)
4728
+ ...__privateGet$3(this, _getFetchProps).call(this)
2617
4729
  });
2618
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
2619
- return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
4730
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
4731
+ return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
2620
4732
  };
2621
- _insertRecordWithId = new WeakSet();
2622
4733
  insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
2623
- if (!recordId)
2624
- return null;
2625
- const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
4734
+ if (!recordId) return null;
4735
+ const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
2626
4736
  const response = await insertRecordWithID({
2627
4737
  pathParams: {
2628
4738
  workspace: "{workspaceId}",
2629
4739
  dbBranchName: "{dbBranch}",
2630
4740
  region: "{region}",
2631
- tableName: __privateGet$2(this, _table),
4741
+ tableName: __privateGet$3(this, _table),
2632
4742
  recordId
2633
4743
  },
2634
4744
  body: record,
2635
4745
  queryParams: { createOnly, columns, ifVersion },
2636
- ...__privateGet$2(this, _getFetchProps).call(this)
4746
+ ...__privateGet$3(this, _getFetchProps).call(this)
2637
4747
  });
2638
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
2639
- return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
4748
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
4749
+ return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
2640
4750
  };
2641
- _insertRecords = new WeakSet();
2642
4751
  insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
2643
4752
  const operations = await promiseMap(objects, async (object) => {
2644
- const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
2645
- return { insert: { table: __privateGet$2(this, _table), record, createOnly, ifVersion } };
4753
+ const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
4754
+ return { insert: { table: __privateGet$3(this, _table), record, createOnly, ifVersion } };
2646
4755
  });
2647
4756
  const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
2648
4757
  const ids = [];
@@ -2654,7 +4763,7 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
2654
4763
  region: "{region}"
2655
4764
  },
2656
4765
  body: { operations: operations2 },
2657
- ...__privateGet$2(this, _getFetchProps).call(this)
4766
+ ...__privateGet$3(this, _getFetchProps).call(this)
2658
4767
  });
2659
4768
  for (const result of results) {
2660
4769
  if (result.operation === "insert") {
@@ -2666,26 +4775,24 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
2666
4775
  }
2667
4776
  return ids;
2668
4777
  };
2669
- _updateRecordWithID = new WeakSet();
2670
4778
  updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
2671
- if (!recordId)
2672
- return null;
2673
- const { xata_id: _id, ...record } = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
4779
+ if (!recordId) return null;
4780
+ const { id: _id, ...record } = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
2674
4781
  try {
2675
4782
  const response = await updateRecordWithID({
2676
4783
  pathParams: {
2677
4784
  workspace: "{workspaceId}",
2678
4785
  dbBranchName: "{dbBranch}",
2679
4786
  region: "{region}",
2680
- tableName: __privateGet$2(this, _table),
4787
+ tableName: __privateGet$3(this, _table),
2681
4788
  recordId
2682
4789
  },
2683
4790
  queryParams: { columns, ifVersion },
2684
4791
  body: record,
2685
- ...__privateGet$2(this, _getFetchProps).call(this)
4792
+ ...__privateGet$3(this, _getFetchProps).call(this)
2686
4793
  });
2687
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
2688
- return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
4794
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
4795
+ return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
2689
4796
  } catch (e) {
2690
4797
  if (isObject(e) && e.status === 404) {
2691
4798
  return null;
@@ -2693,11 +4800,10 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
2693
4800
  throw e;
2694
4801
  }
2695
4802
  };
2696
- _updateRecords = new WeakSet();
2697
4803
  updateRecords_fn = async function(objects, { ifVersion, upsert }) {
2698
- const operations = await promiseMap(objects, async ({ xata_id, ...object }) => {
2699
- const fields = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
2700
- return { update: { table: __privateGet$2(this, _table), id: xata_id, ifVersion, upsert, fields } };
4804
+ const operations = await promiseMap(objects, async ({ id, ...object }) => {
4805
+ const fields = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
4806
+ return { update: { table: __privateGet$3(this, _table), id, ifVersion, upsert, fields } };
2701
4807
  });
2702
4808
  const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
2703
4809
  const ids = [];
@@ -2709,7 +4815,7 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
2709
4815
  region: "{region}"
2710
4816
  },
2711
4817
  body: { operations: operations2 },
2712
- ...__privateGet$2(this, _getFetchProps).call(this)
4818
+ ...__privateGet$3(this, _getFetchProps).call(this)
2713
4819
  });
2714
4820
  for (const result of results) {
2715
4821
  if (result.operation === "update") {
@@ -2721,43 +4827,39 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
2721
4827
  }
2722
4828
  return ids;
2723
4829
  };
2724
- _upsertRecordWithID = new WeakSet();
2725
4830
  upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
2726
- if (!recordId)
2727
- return null;
4831
+ if (!recordId) return null;
2728
4832
  const response = await upsertRecordWithID({
2729
4833
  pathParams: {
2730
4834
  workspace: "{workspaceId}",
2731
4835
  dbBranchName: "{dbBranch}",
2732
4836
  region: "{region}",
2733
- tableName: __privateGet$2(this, _table),
4837
+ tableName: __privateGet$3(this, _table),
2734
4838
  recordId
2735
4839
  },
2736
4840
  queryParams: { columns, ifVersion },
2737
4841
  body: object,
2738
- ...__privateGet$2(this, _getFetchProps).call(this)
4842
+ ...__privateGet$3(this, _getFetchProps).call(this)
2739
4843
  });
2740
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
2741
- return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
4844
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
4845
+ return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
2742
4846
  };
2743
- _deleteRecord = new WeakSet();
2744
4847
  deleteRecord_fn = async function(recordId, columns = ["*"]) {
2745
- if (!recordId)
2746
- return null;
4848
+ if (!recordId) return null;
2747
4849
  try {
2748
4850
  const response = await deleteRecord({
2749
4851
  pathParams: {
2750
4852
  workspace: "{workspaceId}",
2751
4853
  dbBranchName: "{dbBranch}",
2752
4854
  region: "{region}",
2753
- tableName: __privateGet$2(this, _table),
4855
+ tableName: __privateGet$3(this, _table),
2754
4856
  recordId
2755
4857
  },
2756
4858
  queryParams: { columns },
2757
- ...__privateGet$2(this, _getFetchProps).call(this)
4859
+ ...__privateGet$3(this, _getFetchProps).call(this)
2758
4860
  });
2759
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
2760
- return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
4861
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
4862
+ return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
2761
4863
  } catch (e) {
2762
4864
  if (isObject(e) && e.status === 404) {
2763
4865
  return null;
@@ -2765,10 +4867,9 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
2765
4867
  throw e;
2766
4868
  }
2767
4869
  };
2768
- _deleteRecords = new WeakSet();
2769
4870
  deleteRecords_fn = async function(recordIds) {
2770
4871
  const chunkedOperations = chunk(
2771
- compact(recordIds).map((id) => ({ delete: { table: __privateGet$2(this, _table), id } })),
4872
+ compact(recordIds).map((id) => ({ delete: { table: __privateGet$3(this, _table), id } })),
2772
4873
  BULK_OPERATION_MAX_SIZE
2773
4874
  );
2774
4875
  for (const operations of chunkedOperations) {
@@ -2779,35 +4880,43 @@ deleteRecords_fn = async function(recordIds) {
2779
4880
  region: "{region}"
2780
4881
  },
2781
4882
  body: { operations },
2782
- ...__privateGet$2(this, _getFetchProps).call(this)
4883
+ ...__privateGet$3(this, _getFetchProps).call(this)
2783
4884
  });
2784
4885
  }
2785
4886
  };
2786
- _getSchemaTables = new WeakSet();
4887
+ setCacheQuery_fn = async function(query, meta, records) {
4888
+ await __privateGet$3(this, _cache)?.set(`query_${__privateGet$3(this, _table)}:${query.key()}`, { date: /* @__PURE__ */ new Date(), meta, records });
4889
+ };
4890
+ getCacheQuery_fn = async function(query) {
4891
+ const key = `query_${__privateGet$3(this, _table)}:${query.key()}`;
4892
+ const result = await __privateGet$3(this, _cache)?.get(key);
4893
+ if (!result) return null;
4894
+ const defaultTTL = __privateGet$3(this, _cache)?.defaultQueryTTL ?? -1;
4895
+ const { cache: ttl = defaultTTL } = query.getQueryOptions();
4896
+ if (ttl < 0) return null;
4897
+ const hasExpired = result.date.getTime() + ttl < Date.now();
4898
+ return hasExpired ? null : result;
4899
+ };
2787
4900
  getSchemaTables_fn = async function() {
2788
- if (__privateGet$2(this, _schemaTables))
2789
- return __privateGet$2(this, _schemaTables);
4901
+ if (__privateGet$3(this, _schemaTables)) return __privateGet$3(this, _schemaTables);
2790
4902
  const { schema } = await getBranchDetails({
2791
4903
  pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
2792
- ...__privateGet$2(this, _getFetchProps).call(this)
4904
+ ...__privateGet$3(this, _getFetchProps).call(this)
2793
4905
  });
2794
- __privateSet$1(this, _schemaTables, schema.tables);
4906
+ __privateSet$2(this, _schemaTables, schema.tables);
2795
4907
  return schema.tables;
2796
4908
  };
2797
- _transformObjectToApi = new WeakSet();
2798
4909
  transformObjectToApi_fn = async function(object) {
2799
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
2800
- const schema = schemaTables.find((table) => table.name === __privateGet$2(this, _table));
2801
- if (!schema)
2802
- throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
4910
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
4911
+ const schema = schemaTables.find((table) => table.name === __privateGet$3(this, _table));
4912
+ if (!schema) throw new Error(`Table ${__privateGet$3(this, _table)} not found in schema`);
2803
4913
  const result = {};
2804
4914
  for (const [key, value] of Object.entries(object)) {
2805
- if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key))
2806
- continue;
4915
+ if (key === "xata") continue;
2807
4916
  const type = schema.columns.find((column) => column.name === key)?.type;
2808
4917
  switch (type) {
2809
4918
  case "link": {
2810
- result[key] = isIdentifiable(value) ? value.xata_id : value;
4919
+ result[key] = isIdentifiable(value) ? value.id : value;
2811
4920
  break;
2812
4921
  }
2813
4922
  case "datetime": {
@@ -2831,13 +4940,12 @@ transformObjectToApi_fn = async function(object) {
2831
4940
  };
2832
4941
  const initObject = (db, schemaTables, table, object, selectedColumns) => {
2833
4942
  const data = {};
2834
- Object.assign(data, { ...object });
4943
+ const { xata, ...rest } = object ?? {};
4944
+ Object.assign(data, rest);
2835
4945
  const { columns } = schemaTables.find(({ name }) => name === table) ?? {};
2836
- if (!columns)
2837
- console.error(`Table ${table} not found in schema`);
4946
+ if (!columns) console.error(`Table ${table} not found in schema`);
2838
4947
  for (const column of columns ?? []) {
2839
- if (!isValidColumn(selectedColumns, column))
2840
- continue;
4948
+ if (!isValidColumn(selectedColumns, column)) continue;
2841
4949
  const value = data[column.name];
2842
4950
  switch (column.type) {
2843
4951
  case "datetime": {
@@ -2894,21 +5002,28 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
2894
5002
  }
2895
5003
  }
2896
5004
  const record = { ...data };
5005
+ const metadata = xata !== void 0 ? { ...xata, createdAt: new Date(xata.createdAt), updatedAt: new Date(xata.updatedAt) } : void 0;
2897
5006
  record.read = function(columns2) {
2898
- return db[table].read(record["xata_id"], columns2);
5007
+ return db[table].read(record["id"], columns2);
2899
5008
  };
2900
5009
  record.update = function(data2, b, c) {
2901
5010
  const columns2 = isValidSelectableColumns(b) ? b : ["*"];
2902
5011
  const ifVersion = parseIfVersion(b, c);
2903
- return db[table].update(record["xata_id"], data2, columns2, { ifVersion });
5012
+ return db[table].update(record["id"], data2, columns2, { ifVersion });
2904
5013
  };
2905
5014
  record.replace = function(data2, b, c) {
2906
5015
  const columns2 = isValidSelectableColumns(b) ? b : ["*"];
2907
5016
  const ifVersion = parseIfVersion(b, c);
2908
- return db[table].createOrReplace(record["xata_id"], data2, columns2, { ifVersion });
5017
+ return db[table].createOrReplace(record["id"], data2, columns2, { ifVersion });
2909
5018
  };
2910
5019
  record.delete = function() {
2911
- return db[table].delete(record["xata_id"]);
5020
+ return db[table].delete(record["id"]);
5021
+ };
5022
+ if (metadata !== void 0) {
5023
+ record.xata = Object.freeze(metadata);
5024
+ }
5025
+ record.getMetadata = function() {
5026
+ return record.xata;
2912
5027
  };
2913
5028
  record.toSerializable = function() {
2914
5029
  return JSON.parse(JSON.stringify(record));
@@ -2916,22 +5031,19 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
2916
5031
  record.toString = function() {
2917
5032
  return JSON.stringify(record);
2918
5033
  };
2919
- for (const prop of ["read", "update", "replace", "delete", "toSerializable", "toString"]) {
5034
+ for (const prop of ["read", "update", "replace", "delete", "getMetadata", "toSerializable", "toString"]) {
2920
5035
  Object.defineProperty(record, prop, { enumerable: false });
2921
5036
  }
2922
5037
  Object.freeze(record);
2923
5038
  return record;
2924
5039
  };
2925
5040
  function extractId(value) {
2926
- if (isString(value))
2927
- return value;
2928
- if (isObject(value) && isString(value.xata_id))
2929
- return value.xata_id;
5041
+ if (isString(value)) return value;
5042
+ if (isObject(value) && isString(value.id)) return value.id;
2930
5043
  return void 0;
2931
5044
  }
2932
5045
  function isValidColumn(columns, column) {
2933
- if (columns.includes("*"))
2934
- return true;
5046
+ if (columns.includes("*")) return true;
2935
5047
  return columns.filter((item) => isString(item) && item.startsWith(column.name)).length > 0;
2936
5048
  }
2937
5049
  function parseIfVersion(...args) {
@@ -2943,6 +5055,44 @@ function parseIfVersion(...args) {
2943
5055
  return void 0;
2944
5056
  }
2945
5057
 
5058
+ var __typeError$3 = (msg) => {
5059
+ throw TypeError(msg);
5060
+ };
5061
+ var __accessCheck$3 = (obj, member, msg) => member.has(obj) || __typeError$3("Cannot " + msg);
5062
+ var __privateGet$2 = (obj, member, getter) => (__accessCheck$3(obj, member, "read from private field"), member.get(obj));
5063
+ var __privateAdd$3 = (obj, member, value) => member.has(obj) ? __typeError$3("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
5064
+ var __privateSet$1 = (obj, member, value, setter) => (__accessCheck$3(obj, member, "write to private field"), member.set(obj, value), value);
5065
+ var _map;
5066
+ class SimpleCache {
5067
+ constructor(options = {}) {
5068
+ __privateAdd$3(this, _map);
5069
+ __privateSet$1(this, _map, /* @__PURE__ */ new Map());
5070
+ this.capacity = options.max ?? 500;
5071
+ this.defaultQueryTTL = options.defaultQueryTTL ?? 60 * 1e3;
5072
+ }
5073
+ async getAll() {
5074
+ return Object.fromEntries(__privateGet$2(this, _map));
5075
+ }
5076
+ async get(key) {
5077
+ return __privateGet$2(this, _map).get(key) ?? null;
5078
+ }
5079
+ async set(key, value) {
5080
+ await this.delete(key);
5081
+ __privateGet$2(this, _map).set(key, value);
5082
+ if (__privateGet$2(this, _map).size > this.capacity) {
5083
+ const leastRecentlyUsed = __privateGet$2(this, _map).keys().next().value;
5084
+ if (leastRecentlyUsed) await this.delete(leastRecentlyUsed);
5085
+ }
5086
+ }
5087
+ async delete(key) {
5088
+ __privateGet$2(this, _map).delete(key);
5089
+ }
5090
+ async clear() {
5091
+ return __privateGet$2(this, _map).clear();
5092
+ }
5093
+ }
5094
+ _map = new WeakMap();
5095
+
2946
5096
  const greaterThan = (value) => ({ $gt: value });
2947
5097
  const gt = greaterThan;
2948
5098
  const greaterThanEquals = (value) => ({ $ge: value });
@@ -2971,19 +5121,12 @@ const includesAll = (value) => ({ $includesAll: value });
2971
5121
  const includesNone = (value) => ({ $includesNone: value });
2972
5122
  const includesAny = (value) => ({ $includesAny: value });
2973
5123
 
2974
- var __accessCheck$2 = (obj, member, msg) => {
2975
- if (!member.has(obj))
2976
- throw TypeError("Cannot " + msg);
2977
- };
2978
- var __privateGet$1 = (obj, member, getter) => {
2979
- __accessCheck$2(obj, member, "read from private field");
2980
- return getter ? getter.call(obj) : member.get(obj);
2981
- };
2982
- var __privateAdd$2 = (obj, member, value) => {
2983
- if (member.has(obj))
2984
- throw TypeError("Cannot add the same private member more than once");
2985
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
5124
+ var __typeError$2 = (msg) => {
5125
+ throw TypeError(msg);
2986
5126
  };
5127
+ var __accessCheck$2 = (obj, member, msg) => member.has(obj) || __typeError$2("Cannot " + msg);
5128
+ var __privateGet$1 = (obj, member, getter) => (__accessCheck$2(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
5129
+ var __privateAdd$2 = (obj, member, value) => member.has(obj) ? __typeError$2("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
2987
5130
  var _tables;
2988
5131
  class SchemaPlugin extends XataPlugin {
2989
5132
  constructor() {
@@ -2995,8 +5138,7 @@ class SchemaPlugin extends XataPlugin {
2995
5138
  {},
2996
5139
  {
2997
5140
  get: (_target, table) => {
2998
- if (!isString(table))
2999
- throw new Error("Invalid table name");
5141
+ if (!isString(table)) throw new Error("Invalid table name");
3000
5142
  if (__privateGet$1(this, _tables)[table] === void 0) {
3001
5143
  __privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
3002
5144
  }
@@ -3087,30 +5229,23 @@ function getContentType(file) {
3087
5229
  return "application/octet-stream";
3088
5230
  }
3089
5231
 
3090
- var __accessCheck$1 = (obj, member, msg) => {
3091
- if (!member.has(obj))
3092
- throw TypeError("Cannot " + msg);
3093
- };
3094
- var __privateAdd$1 = (obj, member, value) => {
3095
- if (member.has(obj))
3096
- throw TypeError("Cannot add the same private member more than once");
3097
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
3098
- };
3099
- var __privateMethod$1 = (obj, member, method) => {
3100
- __accessCheck$1(obj, member, "access private method");
3101
- return method;
5232
+ var __typeError$1 = (msg) => {
5233
+ throw TypeError(msg);
3102
5234
  };
3103
- var _search, search_fn;
5235
+ var __accessCheck$1 = (obj, member, msg) => member.has(obj) || __typeError$1("Cannot " + msg);
5236
+ var __privateAdd$1 = (obj, member, value) => member.has(obj) ? __typeError$1("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
5237
+ var __privateMethod$1 = (obj, member, method) => (__accessCheck$1(obj, member, "access private method"), method);
5238
+ var _SearchPlugin_instances, search_fn;
3104
5239
  class SearchPlugin extends XataPlugin {
3105
5240
  constructor(db) {
3106
5241
  super();
3107
5242
  this.db = db;
3108
- __privateAdd$1(this, _search);
5243
+ __privateAdd$1(this, _SearchPlugin_instances);
3109
5244
  }
3110
5245
  build(pluginOptions) {
3111
5246
  return {
3112
5247
  all: async (query, options = {}) => {
3113
- const { records, totalCount } = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
5248
+ const { records, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
3114
5249
  return {
3115
5250
  totalCount,
3116
5251
  records: records.map((record) => {
@@ -3120,7 +5255,7 @@ class SearchPlugin extends XataPlugin {
3120
5255
  };
3121
5256
  },
3122
5257
  byTable: async (query, options = {}) => {
3123
- const { records: rawRecords, totalCount } = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
5258
+ const { records: rawRecords, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
3124
5259
  const records = rawRecords.reduce((acc, record) => {
3125
5260
  const { table = "orphan" } = record.xata;
3126
5261
  const items = acc[table] ?? [];
@@ -3132,7 +5267,7 @@ class SearchPlugin extends XataPlugin {
3132
5267
  };
3133
5268
  }
3134
5269
  }
3135
- _search = new WeakSet();
5270
+ _SearchPlugin_instances = new WeakSet();
3136
5271
  search_fn = async function(query, options, pluginOptions) {
3137
5272
  const { tables, fuzziness, highlight, prefix, page } = options ?? {};
3138
5273
  const { records, totalCount } = await searchBranch({
@@ -3168,8 +5303,7 @@ function arrayString(val) {
3168
5303
  return result;
3169
5304
  }
3170
5305
  function prepareValue(value) {
3171
- if (!isDefined(value))
3172
- return null;
5306
+ if (!isDefined(value)) return null;
3173
5307
  if (value instanceof Date) {
3174
5308
  return value.toISOString();
3175
5309
  }
@@ -3196,31 +5330,42 @@ function prepareParams(param1, param2) {
3196
5330
  return { statement, params: param2?.map((value) => prepareValue(value)) };
3197
5331
  }
3198
5332
  if (isObject(param1)) {
3199
- const { statement, params, consistency } = param1;
3200
- return { statement, params: params?.map((value) => prepareValue(value)), consistency };
5333
+ const { statement, params, consistency, responseType } = param1;
5334
+ return { statement, params: params?.map((value) => prepareValue(value)), consistency, responseType };
3201
5335
  }
3202
5336
  throw new Error("Invalid query");
3203
5337
  }
3204
5338
 
3205
5339
  class SQLPlugin extends XataPlugin {
3206
5340
  build(pluginOptions) {
3207
- return async (query, ...parameters) => {
5341
+ const sqlFunction = async (query, ...parameters) => {
3208
5342
  if (!isParamsObject(query) && (!isTemplateStringsArray(query) || !Array.isArray(parameters))) {
3209
5343
  throw new Error("Invalid usage of `xata.sql`. Please use it as a tagged template or with an object.");
3210
5344
  }
3211
- const { statement, params, consistency } = prepareParams(query, parameters);
3212
- const {
3213
- records,
3214
- rows,
3215
- warning,
3216
- columns = []
3217
- } = await sqlQuery({
5345
+ const { statement, params, consistency, responseType } = prepareParams(query, parameters);
5346
+ const { warning, columns, ...response } = await sqlQuery({
3218
5347
  pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
3219
- body: { statement, params, consistency },
5348
+ body: { statement, params, consistency, responseType },
3220
5349
  ...pluginOptions
3221
5350
  });
5351
+ const records = "records" in response ? response.records : void 0;
5352
+ const rows = "rows" in response ? response.rows : void 0;
3222
5353
  return { records, rows, warning, columns };
3223
5354
  };
5355
+ sqlFunction.connectionString = buildConnectionString(pluginOptions);
5356
+ sqlFunction.batch = async (query) => {
5357
+ const { results } = await sqlBatchQuery({
5358
+ pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
5359
+ body: {
5360
+ statements: query.statements.map(({ statement, params }) => ({ statement, params })),
5361
+ consistency: query.consistency,
5362
+ responseType: query.responseType
5363
+ },
5364
+ ...pluginOptions
5365
+ });
5366
+ return { results };
5367
+ };
5368
+ return sqlFunction;
3224
5369
  }
3225
5370
  }
3226
5371
  function isTemplateStringsArray(strings) {
@@ -3229,6 +5374,32 @@ function isTemplateStringsArray(strings) {
3229
5374
  function isParamsObject(params) {
3230
5375
  return isObject(params) && "statement" in params;
3231
5376
  }
5377
+ function buildDomain(host, region) {
5378
+ switch (host) {
5379
+ case "production":
5380
+ return `${region}.sql.xata.sh`;
5381
+ case "staging":
5382
+ return `${region}.sql.staging-xata.dev`;
5383
+ case "dev":
5384
+ return `${region}.sql.dev-xata.dev`;
5385
+ case "local":
5386
+ return "localhost:7654";
5387
+ default:
5388
+ throw new Error("Invalid host provider");
5389
+ }
5390
+ }
5391
+ function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
5392
+ const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
5393
+ const parts = parseWorkspacesUrlParts(url);
5394
+ if (!parts) throw new Error("Invalid workspaces URL");
5395
+ const { workspace: workspaceSlug, region, database, host } = parts;
5396
+ const domain = buildDomain(host, region);
5397
+ const workspace = workspaceSlug.split("-").pop();
5398
+ if (!workspace || !region || !database || !apiKey || !branch) {
5399
+ throw new Error("Unable to build xata connection string");
5400
+ }
5401
+ return `postgresql://${workspace}:${apiKey}@${domain}/${database}:${branch}?sslmode=require`;
5402
+ }
3232
5403
 
3233
5404
  class TransactionPlugin extends XataPlugin {
3234
5405
  build(pluginOptions) {
@@ -3245,41 +5416,28 @@ class TransactionPlugin extends XataPlugin {
3245
5416
  }
3246
5417
  }
3247
5418
 
3248
- var __accessCheck = (obj, member, msg) => {
3249
- if (!member.has(obj))
3250
- throw TypeError("Cannot " + msg);
3251
- };
3252
- var __privateGet = (obj, member, getter) => {
3253
- __accessCheck(obj, member, "read from private field");
3254
- return getter ? getter.call(obj) : member.get(obj);
3255
- };
3256
- var __privateAdd = (obj, member, value) => {
3257
- if (member.has(obj))
3258
- throw TypeError("Cannot add the same private member more than once");
3259
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
3260
- };
3261
- var __privateSet = (obj, member, value, setter) => {
3262
- __accessCheck(obj, member, "write to private field");
3263
- setter ? setter.call(obj, value) : member.set(obj, value);
3264
- return value;
3265
- };
3266
- var __privateMethod = (obj, member, method) => {
3267
- __accessCheck(obj, member, "access private method");
3268
- return method;
5419
+ var __typeError = (msg) => {
5420
+ throw TypeError(msg);
3269
5421
  };
5422
+ var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
5423
+ var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
5424
+ var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
5425
+ var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), member.set(obj, value), value);
5426
+ var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
3270
5427
  const buildClient = (plugins) => {
3271
- var _options, _parseOptions, parseOptions_fn, _getFetchProps, getFetchProps_fn, _a;
5428
+ var _options, _instances, parseOptions_fn, getFetchProps_fn, _a;
3272
5429
  return _a = class {
3273
5430
  constructor(options = {}, tables) {
3274
- __privateAdd(this, _parseOptions);
3275
- __privateAdd(this, _getFetchProps);
3276
- __privateAdd(this, _options, void 0);
3277
- const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
5431
+ __privateAdd(this, _instances);
5432
+ __privateAdd(this, _options);
5433
+ const safeOptions = __privateMethod(this, _instances, parseOptions_fn).call(this, options);
3278
5434
  __privateSet(this, _options, safeOptions);
3279
5435
  const pluginOptions = {
3280
- ...__privateMethod(this, _getFetchProps, getFetchProps_fn).call(this, safeOptions),
5436
+ ...__privateMethod(this, _instances, getFetchProps_fn).call(this, safeOptions),
5437
+ cache: safeOptions.cache,
3281
5438
  host: safeOptions.host,
3282
- tables
5439
+ tables,
5440
+ branch: safeOptions.branch
3283
5441
  };
3284
5442
  const db = new SchemaPlugin().build(pluginOptions);
3285
5443
  const search = new SearchPlugin(db).build(pluginOptions);
@@ -3293,8 +5451,7 @@ const buildClient = (plugins) => {
3293
5451
  this.sql = sql;
3294
5452
  this.files = files;
3295
5453
  for (const [key, namespace] of Object.entries(plugins ?? {})) {
3296
- if (namespace === void 0)
3297
- continue;
5454
+ if (namespace === void 0) continue;
3298
5455
  this[key] = namespace.build(pluginOptions);
3299
5456
  }
3300
5457
  }
@@ -3303,7 +5460,7 @@ const buildClient = (plugins) => {
3303
5460
  const branch = __privateGet(this, _options).branch;
3304
5461
  return { databaseURL, branch };
3305
5462
  }
3306
- }, _options = new WeakMap(), _parseOptions = new WeakSet(), parseOptions_fn = function(options) {
5463
+ }, _options = new WeakMap(), _instances = new WeakSet(), parseOptions_fn = function(options) {
3307
5464
  const enableBrowser = options?.enableBrowser ?? getEnableBrowserVariable() ?? false;
3308
5465
  const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
3309
5466
  if (isBrowser && !enableBrowser) {
@@ -3314,6 +5471,7 @@ const buildClient = (plugins) => {
3314
5471
  const fetch = getFetchImplementation(options?.fetch);
3315
5472
  const databaseURL = options?.databaseURL || getDatabaseURL();
3316
5473
  const apiKey = options?.apiKey || getAPIKey();
5474
+ const cache = options?.cache ?? new SimpleCache({ defaultQueryTTL: 0 });
3317
5475
  const trace = options?.trace ?? defaultTrace;
3318
5476
  const clientName = options?.clientName;
3319
5477
  const host = options?.host ?? "production";
@@ -3349,6 +5507,7 @@ const buildClient = (plugins) => {
3349
5507
  databaseURL,
3350
5508
  apiKey,
3351
5509
  branch,
5510
+ cache,
3352
5511
  trace,
3353
5512
  host,
3354
5513
  clientID: generateUUID(),
@@ -3356,7 +5515,7 @@ const buildClient = (plugins) => {
3356
5515
  clientName,
3357
5516
  xataAgentExtra
3358
5517
  };
3359
- }, _getFetchProps = new WeakSet(), getFetchProps_fn = function({
5518
+ }, getFetchProps_fn = function({
3360
5519
  fetch,
3361
5520
  apiKey,
3362
5521
  databaseURL,
@@ -3397,26 +5556,19 @@ class Serializer {
3397
5556
  }
3398
5557
  toJSON(data) {
3399
5558
  function visit(obj) {
3400
- if (Array.isArray(obj))
3401
- return obj.map(visit);
5559
+ if (Array.isArray(obj)) return obj.map(visit);
3402
5560
  const type = typeof obj;
3403
- if (type === "undefined")
3404
- return { [META]: "undefined" };
3405
- if (type === "bigint")
3406
- return { [META]: "bigint", [VALUE]: obj.toString() };
3407
- if (obj === null || type !== "object")
3408
- return obj;
5561
+ if (type === "undefined") return { [META]: "undefined" };
5562
+ if (type === "bigint") return { [META]: "bigint", [VALUE]: obj.toString() };
5563
+ if (obj === null || type !== "object") return obj;
3409
5564
  const constructor = obj.constructor;
3410
5565
  const o = { [META]: constructor.name };
3411
5566
  for (const [key, value] of Object.entries(obj)) {
3412
5567
  o[key] = visit(value);
3413
5568
  }
3414
- if (constructor === Date)
3415
- o[VALUE] = obj.toISOString();
3416
- if (constructor === Map)
3417
- o[VALUE] = Object.fromEntries(obj);
3418
- if (constructor === Set)
3419
- o[VALUE] = [...obj];
5569
+ if (constructor === Date) o[VALUE] = obj.toISOString();
5570
+ if (constructor === Map) o[VALUE] = Object.fromEntries(obj);
5571
+ if (constructor === Set) o[VALUE] = [...obj];
3420
5572
  return o;
3421
5573
  }
3422
5574
  return JSON.stringify(visit(data));
@@ -3429,16 +5581,11 @@ class Serializer {
3429
5581
  if (constructor) {
3430
5582
  return Object.assign(Object.create(constructor.prototype), rest);
3431
5583
  }
3432
- if (clazz === "Date")
3433
- return new Date(val);
3434
- if (clazz === "Set")
3435
- return new Set(val);
3436
- if (clazz === "Map")
3437
- return new Map(Object.entries(val));
3438
- if (clazz === "bigint")
3439
- return BigInt(val);
3440
- if (clazz === "undefined")
3441
- return void 0;
5584
+ if (clazz === "Date") return new Date(val);
5585
+ if (clazz === "Set") return new Set(val);
5586
+ if (clazz === "Map") return new Map(Object.entries(val));
5587
+ if (clazz === "bigint") return BigInt(val);
5588
+ if (clazz === "undefined") return void 0;
3442
5589
  return rest;
3443
5590
  }
3444
5591
  return value;
@@ -3461,6 +5608,7 @@ class XataError extends Error {
3461
5608
  }
3462
5609
 
3463
5610
  exports.BaseClient = BaseClient;
5611
+ exports.Buffer = Buffer;
3464
5612
  exports.FetcherError = FetcherError;
3465
5613
  exports.FilesPlugin = FilesPlugin;
3466
5614
  exports.Operations = operationsByTag;
@@ -3479,6 +5627,7 @@ exports.SQLPlugin = SQLPlugin;
3479
5627
  exports.SchemaPlugin = SchemaPlugin;
3480
5628
  exports.SearchPlugin = SearchPlugin;
3481
5629
  exports.Serializer = Serializer;
5630
+ exports.SimpleCache = SimpleCache;
3482
5631
  exports.TransactionPlugin = TransactionPlugin;
3483
5632
  exports.XataApiClient = XataApiClient;
3484
5633
  exports.XataApiPlugin = XataApiPlugin;
@@ -3486,6 +5635,7 @@ exports.XataError = XataError;
3486
5635
  exports.XataFile = XataFile;
3487
5636
  exports.XataPlugin = XataPlugin;
3488
5637
  exports.acceptWorkspaceMemberInvite = acceptWorkspaceMemberInvite;
5638
+ exports.adaptAllTables = adaptAllTables;
3489
5639
  exports.adaptTable = adaptTable;
3490
5640
  exports.addGitBranchesEntry = addGitBranchesEntry;
3491
5641
  exports.addTableColumn = addTableColumn;
@@ -3503,9 +5653,11 @@ exports.cancelWorkspaceMemberInvite = cancelWorkspaceMemberInvite;
3503
5653
  exports.compareBranchSchemas = compareBranchSchemas;
3504
5654
  exports.compareBranchWithUserSchema = compareBranchWithUserSchema;
3505
5655
  exports.compareMigrationRequest = compareMigrationRequest;
5656
+ exports.completeMigration = completeMigration;
3506
5657
  exports.contains = contains;
3507
5658
  exports.copyBranch = copyBranch;
3508
5659
  exports.createBranch = createBranch;
5660
+ exports.createBranchAsync = createBranchAsync;
3509
5661
  exports.createCluster = createCluster;
3510
5662
  exports.createDatabase = createDatabase;
3511
5663
  exports.createMigrationRequest = createMigrationRequest;
@@ -3513,6 +5665,7 @@ exports.createTable = createTable;
3513
5665
  exports.createUserAPIKey = createUserAPIKey;
3514
5666
  exports.createWorkspace = createWorkspace;
3515
5667
  exports.deleteBranch = deleteBranch;
5668
+ exports.deleteCluster = deleteCluster;
3516
5669
  exports.deleteColumn = deleteColumn;
3517
5670
  exports.deleteDatabase = deleteDatabase;
3518
5671
  exports.deleteDatabaseGithubSettings = deleteDatabaseGithubSettings;
@@ -3526,6 +5679,7 @@ exports.deleteUserAPIKey = deleteUserAPIKey;
3526
5679
  exports.deleteUserOAuthClient = deleteUserOAuthClient;
3527
5680
  exports.deleteWorkspace = deleteWorkspace;
3528
5681
  exports.deserialize = deserialize;
5682
+ exports.dropClusterExtension = dropClusterExtension;
3529
5683
  exports.endsWith = endsWith;
3530
5684
  exports.equals = equals;
3531
5685
  exports.executeBranchMigrationPlan = executeBranchMigrationPlan;
@@ -3542,9 +5696,11 @@ exports.getBranchMetadata = getBranchMetadata;
3542
5696
  exports.getBranchMigrationHistory = getBranchMigrationHistory;
3543
5697
  exports.getBranchMigrationJobStatus = getBranchMigrationJobStatus;
3544
5698
  exports.getBranchMigrationPlan = getBranchMigrationPlan;
5699
+ exports.getBranchMoveStatus = getBranchMoveStatus;
3545
5700
  exports.getBranchSchemaHistory = getBranchSchemaHistory;
3546
5701
  exports.getBranchStats = getBranchStats;
3547
5702
  exports.getCluster = getCluster;
5703
+ exports.getClusterMetrics = getClusterMetrics;
3548
5704
  exports.getColumn = getColumn;
3549
5705
  exports.getDatabaseGithubSettings = getDatabaseGithubSettings;
3550
5706
  exports.getDatabaseList = getDatabaseList;
@@ -3557,19 +5713,24 @@ exports.getGitBranchesMapping = getGitBranchesMapping;
3557
5713
  exports.getHostUrl = getHostUrl;
3558
5714
  exports.getMigrationHistory = getMigrationHistory;
3559
5715
  exports.getMigrationJobStatus = getMigrationJobStatus;
5716
+ exports.getMigrationJobs = getMigrationJobs;
3560
5717
  exports.getMigrationRequest = getMigrationRequest;
3561
5718
  exports.getMigrationRequestIsMerged = getMigrationRequestIsMerged;
3562
5719
  exports.getPreviewBranch = getPreviewBranch;
3563
5720
  exports.getRecord = getRecord;
3564
5721
  exports.getSchema = getSchema;
5722
+ exports.getSchemas = getSchemas;
3565
5723
  exports.getTableColumns = getTableColumns;
3566
5724
  exports.getTableSchema = getTableSchema;
5725
+ exports.getTaskStatus = getTaskStatus;
5726
+ exports.getTasks = getTasks;
3567
5727
  exports.getUser = getUser;
3568
5728
  exports.getUserAPIKeys = getUserAPIKeys;
3569
5729
  exports.getUserOAuthAccessTokens = getUserOAuthAccessTokens;
3570
5730
  exports.getUserOAuthClients = getUserOAuthClients;
3571
5731
  exports.getWorkspace = getWorkspace;
3572
5732
  exports.getWorkspaceMembersList = getWorkspaceMembersList;
5733
+ exports.getWorkspaceSettings = getWorkspaceSettings;
3573
5734
  exports.getWorkspacesList = getWorkspacesList;
3574
5735
  exports.grantAuthorizationCode = grantAuthorizationCode;
3575
5736
  exports.greaterEquals = greaterEquals;
@@ -3585,6 +5746,7 @@ exports.includesAny = includesAny;
3585
5746
  exports.includesNone = includesNone;
3586
5747
  exports.insertRecord = insertRecord;
3587
5748
  exports.insertRecordWithID = insertRecordWithID;
5749
+ exports.installClusterExtension = installClusterExtension;
3588
5750
  exports.inviteWorkspaceMember = inviteWorkspaceMember;
3589
5751
  exports.is = is;
3590
5752
  exports.isCursorPaginationOptions = isCursorPaginationOptions;
@@ -3594,16 +5756,20 @@ exports.isIdentifiable = isIdentifiable;
3594
5756
  exports.isNot = isNot;
3595
5757
  exports.isValidExpandedColumn = isValidExpandedColumn;
3596
5758
  exports.isValidSelectableColumns = isValidSelectableColumns;
5759
+ exports.isXataRecord = isXataRecord;
3597
5760
  exports.le = le;
3598
5761
  exports.lessEquals = lessEquals;
3599
5762
  exports.lessThan = lessThan;
3600
5763
  exports.lessThanEquals = lessThanEquals;
5764
+ exports.listClusterBranches = listClusterBranches;
5765
+ exports.listClusterExtensions = listClusterExtensions;
3601
5766
  exports.listClusters = listClusters;
3602
5767
  exports.listMigrationRequestsCommits = listMigrationRequestsCommits;
3603
5768
  exports.listRegions = listRegions;
3604
5769
  exports.lt = lt;
3605
5770
  exports.lte = lte;
3606
5771
  exports.mergeMigrationRequest = mergeMigrationRequest;
5772
+ exports.moveBranch = moveBranch;
3607
5773
  exports.notExists = notExists;
3608
5774
  exports.operationsByTag = operationsByTag;
3609
5775
  exports.parseProviderString = parseProviderString;
@@ -3620,11 +5786,14 @@ exports.removeWorkspaceMember = removeWorkspaceMember;
3620
5786
  exports.renameDatabase = renameDatabase;
3621
5787
  exports.resendWorkspaceMemberInvite = resendWorkspaceMemberInvite;
3622
5788
  exports.resolveBranch = resolveBranch;
5789
+ exports.rollbackMigration = rollbackMigration;
3623
5790
  exports.searchBranch = searchBranch;
3624
5791
  exports.searchTable = searchTable;
3625
5792
  exports.serialize = serialize;
3626
5793
  exports.setTableSchema = setTableSchema;
5794
+ exports.sqlBatchQuery = sqlBatchQuery;
3627
5795
  exports.sqlQuery = sqlQuery;
5796
+ exports.startMigration = startMigration;
3628
5797
  exports.startsWith = startsWith;
3629
5798
  exports.summarizeTable = summarizeTable;
3630
5799
  exports.transformImage = transformImage;
@@ -3643,6 +5812,7 @@ exports.updateUser = updateUser;
3643
5812
  exports.updateWorkspace = updateWorkspace;
3644
5813
  exports.updateWorkspaceMemberInvite = updateWorkspaceMemberInvite;
3645
5814
  exports.updateWorkspaceMemberRole = updateWorkspaceMemberRole;
5815
+ exports.updateWorkspaceSettings = updateWorkspaceSettings;
3646
5816
  exports.upsertRecordWithID = upsertRecordWithID;
3647
5817
  exports.vectorSearchTable = vectorSearchTable;
3648
5818
  //# sourceMappingURL=index.cjs.map