@xata.io/client 0.0.0-alpha.vfc037e5fcc7638c56843d5834ef8a7d04c8d451b → 0.0.0-alpha.vfc2160d20dff569d0f4b3272a1273ca130158619

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -22,6 +22,1789 @@ const TraceAttributes = {
22
22
  CLOUDFLARE_RAY_ID: "cf.ray"
23
23
  };
24
24
 
25
+ const lookup = [];
26
+ const revLookup = [];
27
+ const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
28
+ for (let i = 0, len = code.length; i < len; ++i) {
29
+ lookup[i] = code[i];
30
+ revLookup[code.charCodeAt(i)] = i;
31
+ }
32
+ revLookup["-".charCodeAt(0)] = 62;
33
+ revLookup["_".charCodeAt(0)] = 63;
34
+ function getLens(b64) {
35
+ const len = b64.length;
36
+ if (len % 4 > 0) {
37
+ throw new Error("Invalid string. Length must be a multiple of 4");
38
+ }
39
+ let validLen = b64.indexOf("=");
40
+ if (validLen === -1) validLen = len;
41
+ const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
42
+ return [validLen, placeHoldersLen];
43
+ }
44
+ function _byteLength(_b64, validLen, placeHoldersLen) {
45
+ return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
46
+ }
47
+ function toByteArray(b64) {
48
+ let tmp;
49
+ const lens = getLens(b64);
50
+ const validLen = lens[0];
51
+ const placeHoldersLen = lens[1];
52
+ const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
53
+ let curByte = 0;
54
+ const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
55
+ let i;
56
+ for (i = 0; i < len; i += 4) {
57
+ tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
58
+ arr[curByte++] = tmp >> 16 & 255;
59
+ arr[curByte++] = tmp >> 8 & 255;
60
+ arr[curByte++] = tmp & 255;
61
+ }
62
+ if (placeHoldersLen === 2) {
63
+ tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
64
+ arr[curByte++] = tmp & 255;
65
+ }
66
+ if (placeHoldersLen === 1) {
67
+ tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
68
+ arr[curByte++] = tmp >> 8 & 255;
69
+ arr[curByte++] = tmp & 255;
70
+ }
71
+ return arr;
72
+ }
73
+ function tripletToBase64(num) {
74
+ return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
75
+ }
76
+ function encodeChunk(uint8, start, end) {
77
+ let tmp;
78
+ const output = [];
79
+ for (let i = start; i < end; i += 3) {
80
+ tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
81
+ output.push(tripletToBase64(tmp));
82
+ }
83
+ return output.join("");
84
+ }
85
+ function fromByteArray(uint8) {
86
+ let tmp;
87
+ const len = uint8.length;
88
+ const extraBytes = len % 3;
89
+ const parts = [];
90
+ const maxChunkLength = 16383;
91
+ for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
92
+ parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
93
+ }
94
+ if (extraBytes === 1) {
95
+ tmp = uint8[len - 1];
96
+ parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
97
+ } else if (extraBytes === 2) {
98
+ tmp = (uint8[len - 2] << 8) + uint8[len - 1];
99
+ parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
100
+ }
101
+ return parts.join("");
102
+ }
103
+
104
+ const K_MAX_LENGTH = 2147483647;
105
+ const MAX_ARGUMENTS_LENGTH = 4096;
106
+ class Buffer extends Uint8Array {
107
+ /**
108
+ * Constructs a new `Buffer` instance.
109
+ *
110
+ * @param value
111
+ * @param encodingOrOffset
112
+ * @param length
113
+ */
114
+ constructor(value, encodingOrOffset, length) {
115
+ if (typeof value === "number") {
116
+ if (typeof encodingOrOffset === "string") {
117
+ throw new TypeError("The first argument must be of type string, received type number");
118
+ }
119
+ if (value < 0) {
120
+ throw new RangeError("The buffer size cannot be negative");
121
+ }
122
+ super(value < 0 ? 0 : Buffer._checked(value) | 0);
123
+ } else if (typeof value === "string") {
124
+ if (typeof encodingOrOffset !== "string") {
125
+ encodingOrOffset = "utf8";
126
+ }
127
+ if (!Buffer.isEncoding(encodingOrOffset)) {
128
+ throw new TypeError("Unknown encoding: " + encodingOrOffset);
129
+ }
130
+ const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
131
+ super(length2);
132
+ const written = this.write(value, 0, this.length, encodingOrOffset);
133
+ if (written !== length2) {
134
+ throw new TypeError(
135
+ "Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
136
+ );
137
+ }
138
+ } else if (ArrayBuffer.isView(value)) {
139
+ if (Buffer._isInstance(value, Uint8Array)) {
140
+ const copy = new Uint8Array(value);
141
+ const array = copy.buffer;
142
+ const byteOffset = copy.byteOffset;
143
+ const length2 = copy.byteLength;
144
+ if (byteOffset < 0 || array.byteLength < byteOffset) {
145
+ throw new RangeError("offset is outside of buffer bounds");
146
+ }
147
+ if (array.byteLength < byteOffset + (length2 || 0)) {
148
+ throw new RangeError("length is outside of buffer bounds");
149
+ }
150
+ super(new Uint8Array(array, byteOffset, length2));
151
+ } else {
152
+ const array = value;
153
+ const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
154
+ super(new Uint8Array(length2));
155
+ for (let i = 0; i < length2; i++) {
156
+ this[i] = array[i] & 255;
157
+ }
158
+ }
159
+ } else if (value == null) {
160
+ throw new TypeError(
161
+ "The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
162
+ );
163
+ } else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
164
+ const array = value;
165
+ const byteOffset = encodingOrOffset;
166
+ if (byteOffset < 0 || array.byteLength < byteOffset) {
167
+ throw new RangeError("offset is outside of buffer bounds");
168
+ }
169
+ if (array.byteLength < byteOffset + (length || 0)) {
170
+ throw new RangeError("length is outside of buffer bounds");
171
+ }
172
+ super(new Uint8Array(array, byteOffset, length));
173
+ } else if (Array.isArray(value)) {
174
+ const array = value;
175
+ const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
176
+ super(new Uint8Array(length2));
177
+ for (let i = 0; i < length2; i++) {
178
+ this[i] = array[i] & 255;
179
+ }
180
+ } else {
181
+ throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
182
+ }
183
+ }
184
+ /**
185
+ * Return JSON representation of the buffer.
186
+ */
187
+ toJSON() {
188
+ return {
189
+ type: "Buffer",
190
+ data: Array.prototype.slice.call(this)
191
+ };
192
+ }
193
+ /**
194
+ * Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
195
+ * parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
196
+ * only part of `string` will be written. However, partially encoded characters will not be written.
197
+ *
198
+ * @param string String to write to `buf`.
199
+ * @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
200
+ * @param length Maximum number of bytes to write: Default: `buf.length - offset`.
201
+ * @param encoding The character encoding of `string`. Default: `utf8`.
202
+ */
203
+ write(string, offset, length, encoding) {
204
+ if (typeof offset === "undefined") {
205
+ encoding = "utf8";
206
+ length = this.length;
207
+ offset = 0;
208
+ } else if (typeof length === "undefined" && typeof offset === "string") {
209
+ encoding = offset;
210
+ length = this.length;
211
+ offset = 0;
212
+ } else if (typeof offset === "number" && isFinite(offset)) {
213
+ offset = offset >>> 0;
214
+ if (typeof length === "number" && isFinite(length)) {
215
+ length = length >>> 0;
216
+ encoding ?? (encoding = "utf8");
217
+ } else if (typeof length === "string") {
218
+ encoding = length;
219
+ length = void 0;
220
+ }
221
+ } else {
222
+ throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
223
+ }
224
+ const remaining = this.length - offset;
225
+ if (typeof length === "undefined" || length > remaining) {
226
+ length = remaining;
227
+ }
228
+ if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
229
+ throw new RangeError("Attempt to write outside buffer bounds");
230
+ }
231
+ encoding || (encoding = "utf8");
232
+ switch (Buffer._getEncoding(encoding)) {
233
+ case "hex":
234
+ return Buffer._hexWrite(this, string, offset, length);
235
+ case "utf8":
236
+ return Buffer._utf8Write(this, string, offset, length);
237
+ case "ascii":
238
+ case "latin1":
239
+ case "binary":
240
+ return Buffer._asciiWrite(this, string, offset, length);
241
+ case "ucs2":
242
+ case "utf16le":
243
+ return Buffer._ucs2Write(this, string, offset, length);
244
+ case "base64":
245
+ return Buffer._base64Write(this, string, offset, length);
246
+ }
247
+ }
248
+ /**
249
+ * Decodes the buffer to a string according to the specified character encoding.
250
+ * Passing `start` and `end` will decode only a subset of the buffer.
251
+ *
252
+ * Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
253
+ * will be replaced with `U+FFFD`.
254
+ *
255
+ * @param encoding
256
+ * @param start
257
+ * @param end
258
+ */
259
+ toString(encoding, start, end) {
260
+ const length = this.length;
261
+ if (length === 0) {
262
+ return "";
263
+ }
264
+ if (arguments.length === 0) {
265
+ return Buffer._utf8Slice(this, 0, length);
266
+ }
267
+ if (typeof start === "undefined" || start < 0) {
268
+ start = 0;
269
+ }
270
+ if (start > this.length) {
271
+ return "";
272
+ }
273
+ if (typeof end === "undefined" || end > this.length) {
274
+ end = this.length;
275
+ }
276
+ if (end <= 0) {
277
+ return "";
278
+ }
279
+ end >>>= 0;
280
+ start >>>= 0;
281
+ if (end <= start) {
282
+ return "";
283
+ }
284
+ if (!encoding) {
285
+ encoding = "utf8";
286
+ }
287
+ switch (Buffer._getEncoding(encoding)) {
288
+ case "hex":
289
+ return Buffer._hexSlice(this, start, end);
290
+ case "utf8":
291
+ return Buffer._utf8Slice(this, start, end);
292
+ case "ascii":
293
+ return Buffer._asciiSlice(this, start, end);
294
+ case "latin1":
295
+ case "binary":
296
+ return Buffer._latin1Slice(this, start, end);
297
+ case "ucs2":
298
+ case "utf16le":
299
+ return Buffer._utf16leSlice(this, start, end);
300
+ case "base64":
301
+ return Buffer._base64Slice(this, start, end);
302
+ }
303
+ }
304
+ /**
305
+ * Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
306
+ *
307
+ * @param otherBuffer
308
+ */
309
+ equals(otherBuffer) {
310
+ if (!Buffer.isBuffer(otherBuffer)) {
311
+ throw new TypeError("Argument must be a Buffer");
312
+ }
313
+ if (this === otherBuffer) {
314
+ return true;
315
+ }
316
+ return Buffer.compare(this, otherBuffer) === 0;
317
+ }
318
+ /**
319
+ * Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
320
+ * or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
321
+ * buffer.
322
+ *
323
+ * - `0` is returned if `otherBuffer` is the same as this buffer.
324
+ * - `1` is returned if `otherBuffer` should come before this buffer when sorted.
325
+ * - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
326
+ *
327
+ * @param otherBuffer The buffer to compare to.
328
+ * @param targetStart The offset within `otherBuffer` at which to begin comparison.
329
+ * @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
330
+ * @param sourceStart The offset within this buffer at which to begin comparison.
331
+ * @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
332
+ */
333
+ compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
334
+ if (Buffer._isInstance(otherBuffer, Uint8Array)) {
335
+ otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
336
+ }
337
+ if (!Buffer.isBuffer(otherBuffer)) {
338
+ throw new TypeError("Argument must be a Buffer or Uint8Array");
339
+ }
340
+ targetStart ?? (targetStart = 0);
341
+ targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
342
+ sourceStart ?? (sourceStart = 0);
343
+ sourceEnd ?? (sourceEnd = this.length);
344
+ if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
345
+ throw new RangeError("Out of range index");
346
+ }
347
+ if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
348
+ return 0;
349
+ }
350
+ if (sourceStart >= sourceEnd) {
351
+ return -1;
352
+ }
353
+ if (targetStart >= targetEnd) {
354
+ return 1;
355
+ }
356
+ targetStart >>>= 0;
357
+ targetEnd >>>= 0;
358
+ sourceStart >>>= 0;
359
+ sourceEnd >>>= 0;
360
+ if (this === otherBuffer) {
361
+ return 0;
362
+ }
363
+ let x = sourceEnd - sourceStart;
364
+ let y = targetEnd - targetStart;
365
+ const len = Math.min(x, y);
366
+ const thisCopy = this.slice(sourceStart, sourceEnd);
367
+ const targetCopy = otherBuffer.slice(targetStart, targetEnd);
368
+ for (let i = 0; i < len; ++i) {
369
+ if (thisCopy[i] !== targetCopy[i]) {
370
+ x = thisCopy[i];
371
+ y = targetCopy[i];
372
+ break;
373
+ }
374
+ }
375
+ if (x < y) return -1;
376
+ if (y < x) return 1;
377
+ return 0;
378
+ }
379
+ /**
380
+ * Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
381
+ * region overlaps with this buffer.
382
+ *
383
+ * @param targetBuffer The target buffer to copy into.
384
+ * @param targetStart The offset within `targetBuffer` at which to begin writing.
385
+ * @param sourceStart The offset within this buffer at which to begin copying.
386
+ * @param sourceEnd The offset within this buffer at which to end copying (exclusive).
387
+ */
388
+ copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
389
+ if (!Buffer.isBuffer(targetBuffer)) throw new TypeError("argument should be a Buffer");
390
+ if (!sourceStart) sourceStart = 0;
391
+ if (!targetStart) targetStart = 0;
392
+ if (!sourceEnd && sourceEnd !== 0) sourceEnd = this.length;
393
+ if (targetStart >= targetBuffer.length) targetStart = targetBuffer.length;
394
+ if (!targetStart) targetStart = 0;
395
+ if (sourceEnd > 0 && sourceEnd < sourceStart) sourceEnd = sourceStart;
396
+ if (sourceEnd === sourceStart) return 0;
397
+ if (targetBuffer.length === 0 || this.length === 0) return 0;
398
+ if (targetStart < 0) {
399
+ throw new RangeError("targetStart out of bounds");
400
+ }
401
+ if (sourceStart < 0 || sourceStart >= this.length) throw new RangeError("Index out of range");
402
+ if (sourceEnd < 0) throw new RangeError("sourceEnd out of bounds");
403
+ if (sourceEnd > this.length) sourceEnd = this.length;
404
+ if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
405
+ sourceEnd = targetBuffer.length - targetStart + sourceStart;
406
+ }
407
+ const len = sourceEnd - sourceStart;
408
+ if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
409
+ this.copyWithin(targetStart, sourceStart, sourceEnd);
410
+ } else {
411
+ Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
412
+ }
413
+ return len;
414
+ }
415
+ /**
416
+ * Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
417
+ * and `end` indices. This is the same behavior as `buf.subarray()`.
418
+ *
419
+ * This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
420
+ * the slice, use `Uint8Array.prototype.slice()`.
421
+ *
422
+ * @param start
423
+ * @param end
424
+ */
425
+ slice(start, end) {
426
+ if (!start) {
427
+ start = 0;
428
+ }
429
+ const len = this.length;
430
+ start = ~~start;
431
+ end = end === void 0 ? len : ~~end;
432
+ if (start < 0) {
433
+ start += len;
434
+ if (start < 0) {
435
+ start = 0;
436
+ }
437
+ } else if (start > len) {
438
+ start = len;
439
+ }
440
+ if (end < 0) {
441
+ end += len;
442
+ if (end < 0) {
443
+ end = 0;
444
+ }
445
+ } else if (end > len) {
446
+ end = len;
447
+ }
448
+ if (end < start) {
449
+ end = start;
450
+ }
451
+ const newBuf = this.subarray(start, end);
452
+ Object.setPrototypeOf(newBuf, Buffer.prototype);
453
+ return newBuf;
454
+ }
455
+ /**
456
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
457
+ * of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
458
+ *
459
+ * @param value Number to write.
460
+ * @param offset Number of bytes to skip before starting to write.
461
+ * @param byteLength Number of bytes to write, between 0 and 6.
462
+ * @param noAssert
463
+ * @returns `offset` plus the number of bytes written.
464
+ */
465
+ writeUIntLE(value, offset, byteLength, noAssert) {
466
+ value = +value;
467
+ offset = offset >>> 0;
468
+ byteLength = byteLength >>> 0;
469
+ if (!noAssert) {
470
+ const maxBytes = Math.pow(2, 8 * byteLength) - 1;
471
+ Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
472
+ }
473
+ let mul = 1;
474
+ let i = 0;
475
+ this[offset] = value & 255;
476
+ while (++i < byteLength && (mul *= 256)) {
477
+ this[offset + i] = value / mul & 255;
478
+ }
479
+ return offset + byteLength;
480
+ }
481
+ /**
482
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
483
+ * accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
484
+ *
485
+ * @param value Number to write.
486
+ * @param offset Number of bytes to skip before starting to write.
487
+ * @param byteLength Number of bytes to write, between 0 and 6.
488
+ * @param noAssert
489
+ * @returns `offset` plus the number of bytes written.
490
+ */
491
+ writeUIntBE(value, offset, byteLength, noAssert) {
492
+ value = +value;
493
+ offset = offset >>> 0;
494
+ byteLength = byteLength >>> 0;
495
+ if (!noAssert) {
496
+ const maxBytes = Math.pow(2, 8 * byteLength) - 1;
497
+ Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
498
+ }
499
+ let i = byteLength - 1;
500
+ let mul = 1;
501
+ this[offset + i] = value & 255;
502
+ while (--i >= 0 && (mul *= 256)) {
503
+ this[offset + i] = value / mul & 255;
504
+ }
505
+ return offset + byteLength;
506
+ }
507
+ /**
508
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
509
+ * of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
510
+ *
511
+ * @param value Number to write.
512
+ * @param offset Number of bytes to skip before starting to write.
513
+ * @param byteLength Number of bytes to write, between 0 and 6.
514
+ * @param noAssert
515
+ * @returns `offset` plus the number of bytes written.
516
+ */
517
+ writeIntLE(value, offset, byteLength, noAssert) {
518
+ value = +value;
519
+ offset = offset >>> 0;
520
+ if (!noAssert) {
521
+ const limit = Math.pow(2, 8 * byteLength - 1);
522
+ Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
523
+ }
524
+ let i = 0;
525
+ let mul = 1;
526
+ let sub = 0;
527
+ this[offset] = value & 255;
528
+ while (++i < byteLength && (mul *= 256)) {
529
+ if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
530
+ sub = 1;
531
+ }
532
+ this[offset + i] = (value / mul >> 0) - sub & 255;
533
+ }
534
+ return offset + byteLength;
535
+ }
536
+ /**
537
+ * Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
538
+ * of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
539
+ *
540
+ * @param value Number to write.
541
+ * @param offset Number of bytes to skip before starting to write.
542
+ * @param byteLength Number of bytes to write, between 0 and 6.
543
+ * @param noAssert
544
+ * @returns `offset` plus the number of bytes written.
545
+ */
546
+ writeIntBE(value, offset, byteLength, noAssert) {
547
+ value = +value;
548
+ offset = offset >>> 0;
549
+ if (!noAssert) {
550
+ const limit = Math.pow(2, 8 * byteLength - 1);
551
+ Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
552
+ }
553
+ let i = byteLength - 1;
554
+ let mul = 1;
555
+ let sub = 0;
556
+ this[offset + i] = value & 255;
557
+ while (--i >= 0 && (mul *= 256)) {
558
+ if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
559
+ sub = 1;
560
+ }
561
+ this[offset + i] = (value / mul >> 0) - sub & 255;
562
+ }
563
+ return offset + byteLength;
564
+ }
565
+ /**
566
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
567
+ * unsigned, little-endian integer supporting up to 48 bits of accuracy.
568
+ *
569
+ * @param offset Number of bytes to skip before starting to read.
570
+ * @param byteLength Number of bytes to read, between 0 and 6.
571
+ * @param noAssert
572
+ */
573
+ readUIntLE(offset, byteLength, noAssert) {
574
+ offset = offset >>> 0;
575
+ byteLength = byteLength >>> 0;
576
+ if (!noAssert) {
577
+ Buffer._checkOffset(offset, byteLength, this.length);
578
+ }
579
+ let val = this[offset];
580
+ let mul = 1;
581
+ let i = 0;
582
+ while (++i < byteLength && (mul *= 256)) {
583
+ val += this[offset + i] * mul;
584
+ }
585
+ return val;
586
+ }
587
+ /**
588
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
589
+ * unsigned, big-endian integer supporting up to 48 bits of accuracy.
590
+ *
591
+ * @param offset Number of bytes to skip before starting to read.
592
+ * @param byteLength Number of bytes to read, between 0 and 6.
593
+ * @param noAssert
594
+ */
595
+ readUIntBE(offset, byteLength, noAssert) {
596
+ offset = offset >>> 0;
597
+ byteLength = byteLength >>> 0;
598
+ if (!noAssert) {
599
+ Buffer._checkOffset(offset, byteLength, this.length);
600
+ }
601
+ let val = this[offset + --byteLength];
602
+ let mul = 1;
603
+ while (byteLength > 0 && (mul *= 256)) {
604
+ val += this[offset + --byteLength] * mul;
605
+ }
606
+ return val;
607
+ }
608
+ /**
609
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
610
+ * little-endian, two's complement signed value supporting up to 48 bits of accuracy.
611
+ *
612
+ * @param offset Number of bytes to skip before starting to read.
613
+ * @param byteLength Number of bytes to read, between 0 and 6.
614
+ * @param noAssert
615
+ */
616
+ readIntLE(offset, byteLength, noAssert) {
617
+ offset = offset >>> 0;
618
+ byteLength = byteLength >>> 0;
619
+ if (!noAssert) {
620
+ Buffer._checkOffset(offset, byteLength, this.length);
621
+ }
622
+ let val = this[offset];
623
+ let mul = 1;
624
+ let i = 0;
625
+ while (++i < byteLength && (mul *= 256)) {
626
+ val += this[offset + i] * mul;
627
+ }
628
+ mul *= 128;
629
+ if (val >= mul) {
630
+ val -= Math.pow(2, 8 * byteLength);
631
+ }
632
+ return val;
633
+ }
634
+ /**
635
+ * Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
636
+ * big-endian, two's complement signed value supporting up to 48 bits of accuracy.
637
+ *
638
+ * @param offset Number of bytes to skip before starting to read.
639
+ * @param byteLength Number of bytes to read, between 0 and 6.
640
+ * @param noAssert
641
+ */
642
+ readIntBE(offset, byteLength, noAssert) {
643
+ offset = offset >>> 0;
644
+ byteLength = byteLength >>> 0;
645
+ if (!noAssert) {
646
+ Buffer._checkOffset(offset, byteLength, this.length);
647
+ }
648
+ let i = byteLength;
649
+ let mul = 1;
650
+ let val = this[offset + --i];
651
+ while (i > 0 && (mul *= 256)) {
652
+ val += this[offset + --i] * mul;
653
+ }
654
+ mul *= 128;
655
+ if (val >= mul) {
656
+ val -= Math.pow(2, 8 * byteLength);
657
+ }
658
+ return val;
659
+ }
660
+ /**
661
+ * Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
662
+ *
663
+ * @param offset Number of bytes to skip before starting to read.
664
+ * @param noAssert
665
+ */
666
+ readUInt8(offset, noAssert) {
667
+ offset = offset >>> 0;
668
+ if (!noAssert) {
669
+ Buffer._checkOffset(offset, 1, this.length);
670
+ }
671
+ return this[offset];
672
+ }
673
+ /**
674
+ * Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
675
+ *
676
+ * @param offset Number of bytes to skip before starting to read.
677
+ * @param noAssert
678
+ */
679
+ readUInt16LE(offset, noAssert) {
680
+ offset = offset >>> 0;
681
+ if (!noAssert) {
682
+ Buffer._checkOffset(offset, 2, this.length);
683
+ }
684
+ return this[offset] | this[offset + 1] << 8;
685
+ }
686
+ /**
687
+ * Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
688
+ *
689
+ * @param offset Number of bytes to skip before starting to read.
690
+ * @param noAssert
691
+ */
692
+ readUInt16BE(offset, noAssert) {
693
+ offset = offset >>> 0;
694
+ if (!noAssert) {
695
+ Buffer._checkOffset(offset, 2, this.length);
696
+ }
697
+ return this[offset] << 8 | this[offset + 1];
698
+ }
699
+ /**
700
+ * Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
701
+ *
702
+ * @param offset Number of bytes to skip before starting to read.
703
+ * @param noAssert
704
+ */
705
+ readUInt32LE(offset, noAssert) {
706
+ offset = offset >>> 0;
707
+ if (!noAssert) {
708
+ Buffer._checkOffset(offset, 4, this.length);
709
+ }
710
+ return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
711
+ }
712
+ /**
713
+ * Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
714
+ *
715
+ * @param offset Number of bytes to skip before starting to read.
716
+ * @param noAssert
717
+ */
718
+ readUInt32BE(offset, noAssert) {
719
+ offset = offset >>> 0;
720
+ if (!noAssert) {
721
+ Buffer._checkOffset(offset, 4, this.length);
722
+ }
723
+ return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
724
+ }
725
+ /**
726
+ * Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
727
+ * as two's complement signed values.
728
+ *
729
+ * @param offset Number of bytes to skip before starting to read.
730
+ * @param noAssert
731
+ */
732
+ readInt8(offset, noAssert) {
733
+ offset = offset >>> 0;
734
+ if (!noAssert) {
735
+ Buffer._checkOffset(offset, 1, this.length);
736
+ }
737
+ if (!(this[offset] & 128)) {
738
+ return this[offset];
739
+ }
740
+ return (255 - this[offset] + 1) * -1;
741
+ }
742
+ /**
743
+ * Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
744
+ * are interpreted as two's complement signed values.
745
+ *
746
+ * @param offset Number of bytes to skip before starting to read.
747
+ * @param noAssert
748
+ */
749
+ readInt16LE(offset, noAssert) {
750
+ offset = offset >>> 0;
751
+ if (!noAssert) {
752
+ Buffer._checkOffset(offset, 2, this.length);
753
+ }
754
+ const val = this[offset] | this[offset + 1] << 8;
755
+ return val & 32768 ? val | 4294901760 : val;
756
+ }
757
+ /**
758
+ * Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
759
+ * are interpreted as two's complement signed values.
760
+ *
761
+ * @param offset Number of bytes to skip before starting to read.
762
+ * @param noAssert
763
+ */
764
+ readInt16BE(offset, noAssert) {
765
+ offset = offset >>> 0;
766
+ if (!noAssert) {
767
+ Buffer._checkOffset(offset, 2, this.length);
768
+ }
769
+ const val = this[offset + 1] | this[offset] << 8;
770
+ return val & 32768 ? val | 4294901760 : val;
771
+ }
772
+ /**
773
+ * Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
774
+ * are interpreted as two's complement signed values.
775
+ *
776
+ * @param offset Number of bytes to skip before starting to read.
777
+ * @param noAssert
778
+ */
779
+ readInt32LE(offset, noAssert) {
780
+ offset = offset >>> 0;
781
+ if (!noAssert) {
782
+ Buffer._checkOffset(offset, 4, this.length);
783
+ }
784
+ return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
785
+ }
786
+ /**
787
+ * Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
788
+ * are interpreted as two's complement signed values.
789
+ *
790
+ * @param offset Number of bytes to skip before starting to read.
791
+ * @param noAssert
792
+ */
793
+ readInt32BE(offset, noAssert) {
794
+ offset = offset >>> 0;
795
+ if (!noAssert) {
796
+ Buffer._checkOffset(offset, 4, this.length);
797
+ }
798
+ return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
799
+ }
800
+ /**
801
+ * Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
802
+ * Throws a `RangeError` if `buf.length` is not a multiple of 2.
803
+ */
804
+ swap16() {
805
+ const len = this.length;
806
+ if (len % 2 !== 0) {
807
+ throw new RangeError("Buffer size must be a multiple of 16-bits");
808
+ }
809
+ for (let i = 0; i < len; i += 2) {
810
+ this._swap(this, i, i + 1);
811
+ }
812
+ return this;
813
+ }
814
+ /**
815
+ * Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
816
+ * Throws a `RangeError` if `buf.length` is not a multiple of 4.
817
+ */
818
+ swap32() {
819
+ const len = this.length;
820
+ if (len % 4 !== 0) {
821
+ throw new RangeError("Buffer size must be a multiple of 32-bits");
822
+ }
823
+ for (let i = 0; i < len; i += 4) {
824
+ this._swap(this, i, i + 3);
825
+ this._swap(this, i + 1, i + 2);
826
+ }
827
+ return this;
828
+ }
829
+ /**
830
+ * Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
831
+ * Throws a `RangeError` if `buf.length` is not a multiple of 8.
832
+ */
833
+ swap64() {
834
+ const len = this.length;
835
+ if (len % 8 !== 0) {
836
+ throw new RangeError("Buffer size must be a multiple of 64-bits");
837
+ }
838
+ for (let i = 0; i < len; i += 8) {
839
+ this._swap(this, i, i + 7);
840
+ this._swap(this, i + 1, i + 6);
841
+ this._swap(this, i + 2, i + 5);
842
+ this._swap(this, i + 3, i + 4);
843
+ }
844
+ return this;
845
+ }
846
+ /**
847
+ * Swaps two octets.
848
+ *
849
+ * @param b
850
+ * @param n
851
+ * @param m
852
+ */
853
+ _swap(b, n, m) {
854
+ const i = b[n];
855
+ b[n] = b[m];
856
+ b[m] = i;
857
+ }
858
+ /**
859
+ * Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
860
+ * Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
861
+ *
862
+ * @param value Number to write.
863
+ * @param offset Number of bytes to skip before starting to write.
864
+ * @param noAssert
865
+ * @returns `offset` plus the number of bytes written.
866
+ */
867
+ writeUInt8(value, offset, noAssert) {
868
+ value = +value;
869
+ offset = offset >>> 0;
870
+ if (!noAssert) {
871
+ Buffer._checkInt(this, value, offset, 1, 255, 0);
872
+ }
873
+ this[offset] = value & 255;
874
+ return offset + 1;
875
+ }
876
+ /**
877
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
878
+ * integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
879
+ *
880
+ * @param value Number to write.
881
+ * @param offset Number of bytes to skip before starting to write.
882
+ * @param noAssert
883
+ * @returns `offset` plus the number of bytes written.
884
+ */
885
+ writeUInt16LE(value, offset, noAssert) {
886
+ value = +value;
887
+ offset = offset >>> 0;
888
+ if (!noAssert) {
889
+ Buffer._checkInt(this, value, offset, 2, 65535, 0);
890
+ }
891
+ this[offset] = value & 255;
892
+ this[offset + 1] = value >>> 8;
893
+ return offset + 2;
894
+ }
895
+ /**
896
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
897
+ * integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
898
+ *
899
+ * @param value Number to write.
900
+ * @param offset Number of bytes to skip before starting to write.
901
+ * @param noAssert
902
+ * @returns `offset` plus the number of bytes written.
903
+ */
904
+ writeUInt16BE(value, offset, noAssert) {
905
+ value = +value;
906
+ offset = offset >>> 0;
907
+ if (!noAssert) {
908
+ Buffer._checkInt(this, value, offset, 2, 65535, 0);
909
+ }
910
+ this[offset] = value >>> 8;
911
+ this[offset + 1] = value & 255;
912
+ return offset + 2;
913
+ }
914
+ /**
915
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
916
+ * integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
917
+ *
918
+ * @param value Number to write.
919
+ * @param offset Number of bytes to skip before starting to write.
920
+ * @param noAssert
921
+ * @returns `offset` plus the number of bytes written.
922
+ */
923
+ writeUInt32LE(value, offset, noAssert) {
924
+ value = +value;
925
+ offset = offset >>> 0;
926
+ if (!noAssert) {
927
+ Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
928
+ }
929
+ this[offset + 3] = value >>> 24;
930
+ this[offset + 2] = value >>> 16;
931
+ this[offset + 1] = value >>> 8;
932
+ this[offset] = value & 255;
933
+ return offset + 4;
934
+ }
935
+ /**
936
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
937
+ * integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
938
+ *
939
+ * @param value Number to write.
940
+ * @param offset Number of bytes to skip before starting to write.
941
+ * @param noAssert
942
+ * @returns `offset` plus the number of bytes written.
943
+ */
944
+ writeUInt32BE(value, offset, noAssert) {
945
+ value = +value;
946
+ offset = offset >>> 0;
947
+ if (!noAssert) {
948
+ Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
949
+ }
950
+ this[offset] = value >>> 24;
951
+ this[offset + 1] = value >>> 16;
952
+ this[offset + 2] = value >>> 8;
953
+ this[offset + 3] = value & 255;
954
+ return offset + 4;
955
+ }
956
+ /**
957
+ * Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
958
+ * Behavior is undefined when `value` is anything other than a signed 8-bit integer.
959
+ *
960
+ * @param value Number to write.
961
+ * @param offset Number of bytes to skip before starting to write.
962
+ * @param noAssert
963
+ * @returns `offset` plus the number of bytes written.
964
+ */
965
+ writeInt8(value, offset, noAssert) {
966
+ value = +value;
967
+ offset = offset >>> 0;
968
+ if (!noAssert) {
969
+ Buffer._checkInt(this, value, offset, 1, 127, -128);
970
+ }
971
+ if (value < 0) {
972
+ value = 255 + value + 1;
973
+ }
974
+ this[offset] = value & 255;
975
+ return offset + 1;
976
+ }
977
+ /**
978
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
979
+ * integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
980
+ *
981
+ * @param value Number to write.
982
+ * @param offset Number of bytes to skip before starting to write.
983
+ * @param noAssert
984
+ * @returns `offset` plus the number of bytes written.
985
+ */
986
+ writeInt16LE(value, offset, noAssert) {
987
+ value = +value;
988
+ offset = offset >>> 0;
989
+ if (!noAssert) {
990
+ Buffer._checkInt(this, value, offset, 2, 32767, -32768);
991
+ }
992
+ this[offset] = value & 255;
993
+ this[offset + 1] = value >>> 8;
994
+ return offset + 2;
995
+ }
996
+ /**
997
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
998
+ * integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
999
+ *
1000
+ * @param value Number to write.
1001
+ * @param offset Number of bytes to skip before starting to write.
1002
+ * @param noAssert
1003
+ * @returns `offset` plus the number of bytes written.
1004
+ */
1005
+ writeInt16BE(value, offset, noAssert) {
1006
+ value = +value;
1007
+ offset = offset >>> 0;
1008
+ if (!noAssert) {
1009
+ Buffer._checkInt(this, value, offset, 2, 32767, -32768);
1010
+ }
1011
+ this[offset] = value >>> 8;
1012
+ this[offset + 1] = value & 255;
1013
+ return offset + 2;
1014
+ }
1015
+ /**
1016
+ * Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
1017
+ * integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
1018
+ *
1019
+ * @param value Number to write.
1020
+ * @param offset Number of bytes to skip before starting to write.
1021
+ * @param noAssert
1022
+ * @returns `offset` plus the number of bytes written.
1023
+ */
1024
+ writeInt32LE(value, offset, noAssert) {
1025
+ value = +value;
1026
+ offset = offset >>> 0;
1027
+ if (!noAssert) {
1028
+ Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
1029
+ }
1030
+ this[offset] = value & 255;
1031
+ this[offset + 1] = value >>> 8;
1032
+ this[offset + 2] = value >>> 16;
1033
+ this[offset + 3] = value >>> 24;
1034
+ return offset + 4;
1035
+ }
1036
+ /**
1037
+ * Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
1038
+ * integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
1039
+ *
1040
+ * @param value Number to write.
1041
+ * @param offset Number of bytes to skip before starting to write.
1042
+ * @param noAssert
1043
+ * @returns `offset` plus the number of bytes written.
1044
+ */
1045
+ writeInt32BE(value, offset, noAssert) {
1046
+ value = +value;
1047
+ offset = offset >>> 0;
1048
+ if (!noAssert) {
1049
+ Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
1050
+ }
1051
+ if (value < 0) {
1052
+ value = 4294967295 + value + 1;
1053
+ }
1054
+ this[offset] = value >>> 24;
1055
+ this[offset + 1] = value >>> 16;
1056
+ this[offset + 2] = value >>> 8;
1057
+ this[offset + 3] = value & 255;
1058
+ return offset + 4;
1059
+ }
1060
+ /**
1061
+ * Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
1062
+ * filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
1063
+ * integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
1064
+ *
1065
+ * If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
1066
+ * character that fit into `buf` are written.
1067
+ *
1068
+ * If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
1069
+ *
1070
+ * @param value
1071
+ * @param encoding
1072
+ */
1073
+ fill(value, offset, end, encoding) {
1074
+ if (typeof value === "string") {
1075
+ if (typeof offset === "string") {
1076
+ encoding = offset;
1077
+ offset = 0;
1078
+ end = this.length;
1079
+ } else if (typeof end === "string") {
1080
+ encoding = end;
1081
+ end = this.length;
1082
+ }
1083
+ if (encoding !== void 0 && typeof encoding !== "string") {
1084
+ throw new TypeError("encoding must be a string");
1085
+ }
1086
+ if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
1087
+ throw new TypeError("Unknown encoding: " + encoding);
1088
+ }
1089
+ if (value.length === 1) {
1090
+ const code = value.charCodeAt(0);
1091
+ if (encoding === "utf8" && code < 128) {
1092
+ value = code;
1093
+ }
1094
+ }
1095
+ } else if (typeof value === "number") {
1096
+ value = value & 255;
1097
+ } else if (typeof value === "boolean") {
1098
+ value = Number(value);
1099
+ }
1100
+ offset ?? (offset = 0);
1101
+ end ?? (end = this.length);
1102
+ if (offset < 0 || this.length < offset || this.length < end) {
1103
+ throw new RangeError("Out of range index");
1104
+ }
1105
+ if (end <= offset) {
1106
+ return this;
1107
+ }
1108
+ offset = offset >>> 0;
1109
+ end = end === void 0 ? this.length : end >>> 0;
1110
+ value || (value = 0);
1111
+ let i;
1112
+ if (typeof value === "number") {
1113
+ for (i = offset; i < end; ++i) {
1114
+ this[i] = value;
1115
+ }
1116
+ } else {
1117
+ const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
1118
+ const len = bytes.length;
1119
+ if (len === 0) {
1120
+ throw new TypeError('The value "' + value + '" is invalid for argument "value"');
1121
+ }
1122
+ for (i = 0; i < end - offset; ++i) {
1123
+ this[i + offset] = bytes[i % len];
1124
+ }
1125
+ }
1126
+ return this;
1127
+ }
1128
+ /**
1129
+ * Returns the index of the specified value.
1130
+ *
1131
+ * If `value` is:
1132
+ * - a string, `value` is interpreted according to the character encoding in `encoding`.
1133
+ * - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
1134
+ * - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
1135
+ *
1136
+ * Any other types will throw a `TypeError`.
1137
+ *
1138
+ * @param value What to search for.
1139
+ * @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
1140
+ * @param encoding If `value` is a string, this is the encoding used to search.
1141
+ * @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
1142
+ */
1143
+ indexOf(value, byteOffset, encoding) {
1144
+ return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
1145
+ }
1146
+ /**
1147
+ * Gets the last index of the specified value.
1148
+ *
1149
+ * @see indexOf()
1150
+ * @param value
1151
+ * @param byteOffset
1152
+ * @param encoding
1153
+ */
1154
+ lastIndexOf(value, byteOffset, encoding) {
1155
+ return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
1156
+ }
1157
+ _bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
1158
+ if (buffer.length === 0) {
1159
+ return -1;
1160
+ }
1161
+ if (typeof byteOffset === "string") {
1162
+ encoding = byteOffset;
1163
+ byteOffset = 0;
1164
+ } else if (typeof byteOffset === "undefined") {
1165
+ byteOffset = 0;
1166
+ } else if (byteOffset > 2147483647) {
1167
+ byteOffset = 2147483647;
1168
+ } else if (byteOffset < -2147483648) {
1169
+ byteOffset = -2147483648;
1170
+ }
1171
+ byteOffset = +byteOffset;
1172
+ if (byteOffset !== byteOffset) {
1173
+ byteOffset = dir ? 0 : buffer.length - 1;
1174
+ }
1175
+ if (byteOffset < 0) {
1176
+ byteOffset = buffer.length + byteOffset;
1177
+ }
1178
+ if (byteOffset >= buffer.length) {
1179
+ if (dir) {
1180
+ return -1;
1181
+ } else {
1182
+ byteOffset = buffer.length - 1;
1183
+ }
1184
+ } else if (byteOffset < 0) {
1185
+ if (dir) {
1186
+ byteOffset = 0;
1187
+ } else {
1188
+ return -1;
1189
+ }
1190
+ }
1191
+ if (typeof val === "string") {
1192
+ val = Buffer.from(val, encoding);
1193
+ }
1194
+ if (Buffer.isBuffer(val)) {
1195
+ if (val.length === 0) {
1196
+ return -1;
1197
+ }
1198
+ return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
1199
+ } else if (typeof val === "number") {
1200
+ val = val & 255;
1201
+ if (typeof Uint8Array.prototype.indexOf === "function") {
1202
+ if (dir) {
1203
+ return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
1204
+ } else {
1205
+ return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
1206
+ }
1207
+ }
1208
+ return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
1209
+ }
1210
+ throw new TypeError("val must be string, number or Buffer");
1211
+ }
1212
+ /**
1213
+ * Equivalent to `buf.indexOf() !== -1`.
1214
+ *
1215
+ * @param value
1216
+ * @param byteOffset
1217
+ * @param encoding
1218
+ */
1219
+ includes(value, byteOffset, encoding) {
1220
+ return this.indexOf(value, byteOffset, encoding) !== -1;
1221
+ }
1222
+ /**
1223
+ * Creates a new buffer from the given parameters.
1224
+ *
1225
+ * @param data
1226
+ * @param encoding
1227
+ */
1228
+ static from(a, b, c) {
1229
+ return new Buffer(a, b, c);
1230
+ }
1231
+ /**
1232
+ * Returns true if `obj` is a Buffer.
1233
+ *
1234
+ * @param obj
1235
+ */
1236
+ static isBuffer(obj) {
1237
+ return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
1238
+ }
1239
+ /**
1240
+ * Returns true if `encoding` is a supported encoding.
1241
+ *
1242
+ * @param encoding
1243
+ */
1244
+ static isEncoding(encoding) {
1245
+ switch (encoding.toLowerCase()) {
1246
+ case "hex":
1247
+ case "utf8":
1248
+ case "ascii":
1249
+ case "binary":
1250
+ case "latin1":
1251
+ case "ucs2":
1252
+ case "utf16le":
1253
+ case "base64":
1254
+ return true;
1255
+ default:
1256
+ return false;
1257
+ }
1258
+ }
1259
+ /**
1260
+ * Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
1261
+ * returns the number of characters in the string.
1262
+ *
1263
+ * @param string The string to test.
1264
+ * @param encoding The encoding to use for calculation. Defaults is `utf8`.
1265
+ */
1266
+ static byteLength(string, encoding) {
1267
+ if (Buffer.isBuffer(string)) {
1268
+ return string.length;
1269
+ }
1270
+ if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
1271
+ return string.byteLength;
1272
+ }
1273
+ if (typeof string !== "string") {
1274
+ throw new TypeError(
1275
+ 'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
1276
+ );
1277
+ }
1278
+ const len = string.length;
1279
+ const mustMatch = arguments.length > 2 && arguments[2] === true;
1280
+ if (!mustMatch && len === 0) {
1281
+ return 0;
1282
+ }
1283
+ switch (encoding?.toLowerCase()) {
1284
+ case "ascii":
1285
+ case "latin1":
1286
+ case "binary":
1287
+ return len;
1288
+ case "utf8":
1289
+ return Buffer._utf8ToBytes(string).length;
1290
+ case "hex":
1291
+ return len >>> 1;
1292
+ case "ucs2":
1293
+ case "utf16le":
1294
+ return len * 2;
1295
+ case "base64":
1296
+ return Buffer._base64ToBytes(string).length;
1297
+ default:
1298
+ return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
1299
+ }
1300
+ }
1301
+ /**
1302
+ * Returns a Buffer which is the result of concatenating all the buffers in the list together.
1303
+ *
1304
+ * - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
1305
+ * - If the list has exactly one item, then the first item is returned.
1306
+ * - If the list has more than one item, then a new buffer is created.
1307
+ *
1308
+ * It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
1309
+ * a small computational expense.
1310
+ *
1311
+ * @param list An array of Buffer objects to concatenate.
1312
+ * @param totalLength Total length of the buffers when concatenated.
1313
+ */
1314
+ static concat(list, totalLength) {
1315
+ if (!Array.isArray(list)) {
1316
+ throw new TypeError('"list" argument must be an Array of Buffers');
1317
+ }
1318
+ if (list.length === 0) {
1319
+ return Buffer.alloc(0);
1320
+ }
1321
+ let i;
1322
+ if (totalLength === void 0) {
1323
+ totalLength = 0;
1324
+ for (i = 0; i < list.length; ++i) {
1325
+ totalLength += list[i].length;
1326
+ }
1327
+ }
1328
+ const buffer = Buffer.allocUnsafe(totalLength);
1329
+ let pos = 0;
1330
+ for (i = 0; i < list.length; ++i) {
1331
+ let buf = list[i];
1332
+ if (Buffer._isInstance(buf, Uint8Array)) {
1333
+ if (pos + buf.length > buffer.length) {
1334
+ if (!Buffer.isBuffer(buf)) {
1335
+ buf = Buffer.from(buf);
1336
+ }
1337
+ buf.copy(buffer, pos);
1338
+ } else {
1339
+ Uint8Array.prototype.set.call(buffer, buf, pos);
1340
+ }
1341
+ } else if (!Buffer.isBuffer(buf)) {
1342
+ throw new TypeError('"list" argument must be an Array of Buffers');
1343
+ } else {
1344
+ buf.copy(buffer, pos);
1345
+ }
1346
+ pos += buf.length;
1347
+ }
1348
+ return buffer;
1349
+ }
1350
+ /**
1351
+ * The same as `buf1.compare(buf2)`.
1352
+ */
1353
+ static compare(buf1, buf2) {
1354
+ if (Buffer._isInstance(buf1, Uint8Array)) {
1355
+ buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
1356
+ }
1357
+ if (Buffer._isInstance(buf2, Uint8Array)) {
1358
+ buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
1359
+ }
1360
+ if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
1361
+ throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
1362
+ }
1363
+ if (buf1 === buf2) {
1364
+ return 0;
1365
+ }
1366
+ let x = buf1.length;
1367
+ let y = buf2.length;
1368
+ for (let i = 0, len = Math.min(x, y); i < len; ++i) {
1369
+ if (buf1[i] !== buf2[i]) {
1370
+ x = buf1[i];
1371
+ y = buf2[i];
1372
+ break;
1373
+ }
1374
+ }
1375
+ if (x < y) {
1376
+ return -1;
1377
+ }
1378
+ if (y < x) {
1379
+ return 1;
1380
+ }
1381
+ return 0;
1382
+ }
1383
+ /**
1384
+ * Allocates a new buffer of `size` octets.
1385
+ *
1386
+ * @param size The number of octets to allocate.
1387
+ * @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
1388
+ * @param encoding The encoding used for the call to `buf.fill()` while initializing.
1389
+ */
1390
+ static alloc(size, fill, encoding) {
1391
+ if (typeof size !== "number") {
1392
+ throw new TypeError('"size" argument must be of type number');
1393
+ } else if (size < 0) {
1394
+ throw new RangeError('The value "' + size + '" is invalid for option "size"');
1395
+ }
1396
+ if (size <= 0) {
1397
+ return new Buffer(size);
1398
+ }
1399
+ if (fill !== void 0) {
1400
+ return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
1401
+ }
1402
+ return new Buffer(size);
1403
+ }
1404
+ /**
1405
+ * Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
1406
+ *
1407
+ * @param size
1408
+ */
1409
+ static allocUnsafe(size) {
1410
+ if (typeof size !== "number") {
1411
+ throw new TypeError('"size" argument must be of type number');
1412
+ } else if (size < 0) {
1413
+ throw new RangeError('The value "' + size + '" is invalid for option "size"');
1414
+ }
1415
+ return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
1416
+ }
1417
+ /**
1418
+ * Returns true if the given `obj` is an instance of `type`.
1419
+ *
1420
+ * @param obj
1421
+ * @param type
1422
+ */
1423
+ static _isInstance(obj, type) {
1424
+ return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
1425
+ }
1426
+ static _checked(length) {
1427
+ if (length >= K_MAX_LENGTH) {
1428
+ throw new RangeError(
1429
+ "Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
1430
+ );
1431
+ }
1432
+ return length | 0;
1433
+ }
1434
+ static _blitBuffer(src, dst, offset, length) {
1435
+ let i;
1436
+ for (i = 0; i < length; ++i) {
1437
+ if (i + offset >= dst.length || i >= src.length) {
1438
+ break;
1439
+ }
1440
+ dst[i + offset] = src[i];
1441
+ }
1442
+ return i;
1443
+ }
1444
+ static _utf8Write(buf, string, offset, length) {
1445
+ return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
1446
+ }
1447
+ static _asciiWrite(buf, string, offset, length) {
1448
+ return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
1449
+ }
1450
+ static _base64Write(buf, string, offset, length) {
1451
+ return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
1452
+ }
1453
+ static _ucs2Write(buf, string, offset, length) {
1454
+ return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
1455
+ }
1456
+ static _hexWrite(buf, string, offset, length) {
1457
+ offset = Number(offset) || 0;
1458
+ const remaining = buf.length - offset;
1459
+ if (!length) {
1460
+ length = remaining;
1461
+ } else {
1462
+ length = Number(length);
1463
+ if (length > remaining) {
1464
+ length = remaining;
1465
+ }
1466
+ }
1467
+ const strLen = string.length;
1468
+ if (length > strLen / 2) {
1469
+ length = strLen / 2;
1470
+ }
1471
+ let i;
1472
+ for (i = 0; i < length; ++i) {
1473
+ const parsed = parseInt(string.substr(i * 2, 2), 16);
1474
+ if (parsed !== parsed) {
1475
+ return i;
1476
+ }
1477
+ buf[offset + i] = parsed;
1478
+ }
1479
+ return i;
1480
+ }
1481
+ static _utf8ToBytes(string, units) {
1482
+ units = units || Infinity;
1483
+ const length = string.length;
1484
+ const bytes = [];
1485
+ let codePoint;
1486
+ let leadSurrogate = null;
1487
+ for (let i = 0; i < length; ++i) {
1488
+ codePoint = string.charCodeAt(i);
1489
+ if (codePoint > 55295 && codePoint < 57344) {
1490
+ if (!leadSurrogate) {
1491
+ if (codePoint > 56319) {
1492
+ if ((units -= 3) > -1) {
1493
+ bytes.push(239, 191, 189);
1494
+ }
1495
+ continue;
1496
+ } else if (i + 1 === length) {
1497
+ if ((units -= 3) > -1) {
1498
+ bytes.push(239, 191, 189);
1499
+ }
1500
+ continue;
1501
+ }
1502
+ leadSurrogate = codePoint;
1503
+ continue;
1504
+ }
1505
+ if (codePoint < 56320) {
1506
+ if ((units -= 3) > -1) {
1507
+ bytes.push(239, 191, 189);
1508
+ }
1509
+ leadSurrogate = codePoint;
1510
+ continue;
1511
+ }
1512
+ codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
1513
+ } else if (leadSurrogate) {
1514
+ if ((units -= 3) > -1) {
1515
+ bytes.push(239, 191, 189);
1516
+ }
1517
+ }
1518
+ leadSurrogate = null;
1519
+ if (codePoint < 128) {
1520
+ if ((units -= 1) < 0) {
1521
+ break;
1522
+ }
1523
+ bytes.push(codePoint);
1524
+ } else if (codePoint < 2048) {
1525
+ if ((units -= 2) < 0) {
1526
+ break;
1527
+ }
1528
+ bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
1529
+ } else if (codePoint < 65536) {
1530
+ if ((units -= 3) < 0) {
1531
+ break;
1532
+ }
1533
+ bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
1534
+ } else if (codePoint < 1114112) {
1535
+ if ((units -= 4) < 0) {
1536
+ break;
1537
+ }
1538
+ bytes.push(
1539
+ codePoint >> 18 | 240,
1540
+ codePoint >> 12 & 63 | 128,
1541
+ codePoint >> 6 & 63 | 128,
1542
+ codePoint & 63 | 128
1543
+ );
1544
+ } else {
1545
+ throw new Error("Invalid code point");
1546
+ }
1547
+ }
1548
+ return bytes;
1549
+ }
1550
+ static _base64ToBytes(str) {
1551
+ return toByteArray(base64clean(str));
1552
+ }
1553
+ static _asciiToBytes(str) {
1554
+ const byteArray = [];
1555
+ for (let i = 0; i < str.length; ++i) {
1556
+ byteArray.push(str.charCodeAt(i) & 255);
1557
+ }
1558
+ return byteArray;
1559
+ }
1560
+ static _utf16leToBytes(str, units) {
1561
+ let c, hi, lo;
1562
+ const byteArray = [];
1563
+ for (let i = 0; i < str.length; ++i) {
1564
+ if ((units -= 2) < 0) break;
1565
+ c = str.charCodeAt(i);
1566
+ hi = c >> 8;
1567
+ lo = c % 256;
1568
+ byteArray.push(lo);
1569
+ byteArray.push(hi);
1570
+ }
1571
+ return byteArray;
1572
+ }
1573
+ static _hexSlice(buf, start, end) {
1574
+ const len = buf.length;
1575
+ if (!start || start < 0) {
1576
+ start = 0;
1577
+ }
1578
+ if (!end || end < 0 || end > len) {
1579
+ end = len;
1580
+ }
1581
+ let out = "";
1582
+ for (let i = start; i < end; ++i) {
1583
+ out += hexSliceLookupTable[buf[i]];
1584
+ }
1585
+ return out;
1586
+ }
1587
+ static _base64Slice(buf, start, end) {
1588
+ if (start === 0 && end === buf.length) {
1589
+ return fromByteArray(buf);
1590
+ } else {
1591
+ return fromByteArray(buf.slice(start, end));
1592
+ }
1593
+ }
1594
+ static _utf8Slice(buf, start, end) {
1595
+ end = Math.min(buf.length, end);
1596
+ const res = [];
1597
+ let i = start;
1598
+ while (i < end) {
1599
+ const firstByte = buf[i];
1600
+ let codePoint = null;
1601
+ let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
1602
+ if (i + bytesPerSequence <= end) {
1603
+ let secondByte, thirdByte, fourthByte, tempCodePoint;
1604
+ switch (bytesPerSequence) {
1605
+ case 1:
1606
+ if (firstByte < 128) {
1607
+ codePoint = firstByte;
1608
+ }
1609
+ break;
1610
+ case 2:
1611
+ secondByte = buf[i + 1];
1612
+ if ((secondByte & 192) === 128) {
1613
+ tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
1614
+ if (tempCodePoint > 127) {
1615
+ codePoint = tempCodePoint;
1616
+ }
1617
+ }
1618
+ break;
1619
+ case 3:
1620
+ secondByte = buf[i + 1];
1621
+ thirdByte = buf[i + 2];
1622
+ if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
1623
+ tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
1624
+ if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
1625
+ codePoint = tempCodePoint;
1626
+ }
1627
+ }
1628
+ break;
1629
+ case 4:
1630
+ secondByte = buf[i + 1];
1631
+ thirdByte = buf[i + 2];
1632
+ fourthByte = buf[i + 3];
1633
+ if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
1634
+ tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
1635
+ if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
1636
+ codePoint = tempCodePoint;
1637
+ }
1638
+ }
1639
+ }
1640
+ }
1641
+ if (codePoint === null) {
1642
+ codePoint = 65533;
1643
+ bytesPerSequence = 1;
1644
+ } else if (codePoint > 65535) {
1645
+ codePoint -= 65536;
1646
+ res.push(codePoint >>> 10 & 1023 | 55296);
1647
+ codePoint = 56320 | codePoint & 1023;
1648
+ }
1649
+ res.push(codePoint);
1650
+ i += bytesPerSequence;
1651
+ }
1652
+ return Buffer._decodeCodePointsArray(res);
1653
+ }
1654
+ static _decodeCodePointsArray(codePoints) {
1655
+ const len = codePoints.length;
1656
+ if (len <= MAX_ARGUMENTS_LENGTH) {
1657
+ return String.fromCharCode.apply(String, codePoints);
1658
+ }
1659
+ let res = "";
1660
+ let i = 0;
1661
+ while (i < len) {
1662
+ res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
1663
+ }
1664
+ return res;
1665
+ }
1666
+ static _asciiSlice(buf, start, end) {
1667
+ let ret = "";
1668
+ end = Math.min(buf.length, end);
1669
+ for (let i = start; i < end; ++i) {
1670
+ ret += String.fromCharCode(buf[i] & 127);
1671
+ }
1672
+ return ret;
1673
+ }
1674
+ static _latin1Slice(buf, start, end) {
1675
+ let ret = "";
1676
+ end = Math.min(buf.length, end);
1677
+ for (let i = start; i < end; ++i) {
1678
+ ret += String.fromCharCode(buf[i]);
1679
+ }
1680
+ return ret;
1681
+ }
1682
+ static _utf16leSlice(buf, start, end) {
1683
+ const bytes = buf.slice(start, end);
1684
+ let res = "";
1685
+ for (let i = 0; i < bytes.length - 1; i += 2) {
1686
+ res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
1687
+ }
1688
+ return res;
1689
+ }
1690
+ static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
1691
+ let indexSize = 1;
1692
+ let arrLength = arr.length;
1693
+ let valLength = val.length;
1694
+ if (encoding !== void 0) {
1695
+ encoding = Buffer._getEncoding(encoding);
1696
+ if (encoding === "ucs2" || encoding === "utf16le") {
1697
+ if (arr.length < 2 || val.length < 2) {
1698
+ return -1;
1699
+ }
1700
+ indexSize = 2;
1701
+ arrLength /= 2;
1702
+ valLength /= 2;
1703
+ byteOffset /= 2;
1704
+ }
1705
+ }
1706
+ function read(buf, i2) {
1707
+ if (indexSize === 1) {
1708
+ return buf[i2];
1709
+ } else {
1710
+ return buf.readUInt16BE(i2 * indexSize);
1711
+ }
1712
+ }
1713
+ let i;
1714
+ if (dir) {
1715
+ let foundIndex = -1;
1716
+ for (i = byteOffset; i < arrLength; i++) {
1717
+ if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
1718
+ if (foundIndex === -1) foundIndex = i;
1719
+ if (i - foundIndex + 1 === valLength) return foundIndex * indexSize;
1720
+ } else {
1721
+ if (foundIndex !== -1) i -= i - foundIndex;
1722
+ foundIndex = -1;
1723
+ }
1724
+ }
1725
+ } else {
1726
+ if (byteOffset + valLength > arrLength) {
1727
+ byteOffset = arrLength - valLength;
1728
+ }
1729
+ for (i = byteOffset; i >= 0; i--) {
1730
+ let found = true;
1731
+ for (let j = 0; j < valLength; j++) {
1732
+ if (read(arr, i + j) !== read(val, j)) {
1733
+ found = false;
1734
+ break;
1735
+ }
1736
+ }
1737
+ if (found) {
1738
+ return i;
1739
+ }
1740
+ }
1741
+ }
1742
+ return -1;
1743
+ }
1744
+ static _checkOffset(offset, ext, length) {
1745
+ if (offset % 1 !== 0 || offset < 0) throw new RangeError("offset is not uint");
1746
+ if (offset + ext > length) throw new RangeError("Trying to access beyond buffer length");
1747
+ }
1748
+ static _checkInt(buf, value, offset, ext, max, min) {
1749
+ if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance');
1750
+ if (value > max || value < min) throw new RangeError('"value" argument is out of bounds');
1751
+ if (offset + ext > buf.length) throw new RangeError("Index out of range");
1752
+ }
1753
+ static _getEncoding(encoding) {
1754
+ let toLowerCase = false;
1755
+ let originalEncoding = "";
1756
+ for (; ; ) {
1757
+ switch (encoding) {
1758
+ case "hex":
1759
+ return "hex";
1760
+ case "utf8":
1761
+ return "utf8";
1762
+ case "ascii":
1763
+ return "ascii";
1764
+ case "binary":
1765
+ return "binary";
1766
+ case "latin1":
1767
+ return "latin1";
1768
+ case "ucs2":
1769
+ return "ucs2";
1770
+ case "utf16le":
1771
+ return "utf16le";
1772
+ case "base64":
1773
+ return "base64";
1774
+ default: {
1775
+ if (toLowerCase) {
1776
+ throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
1777
+ }
1778
+ toLowerCase = true;
1779
+ originalEncoding = encoding;
1780
+ encoding = encoding.toLowerCase();
1781
+ }
1782
+ }
1783
+ }
1784
+ }
1785
+ }
1786
+ const hexSliceLookupTable = function() {
1787
+ const alphabet = "0123456789abcdef";
1788
+ const table = new Array(256);
1789
+ for (let i = 0; i < 16; ++i) {
1790
+ const i16 = i * 16;
1791
+ for (let j = 0; j < 16; ++j) {
1792
+ table[i16 + j] = alphabet[i] + alphabet[j];
1793
+ }
1794
+ }
1795
+ return table;
1796
+ }();
1797
+ const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
1798
+ function base64clean(str) {
1799
+ str = str.split("=")[0];
1800
+ str = str.trim().replace(INVALID_BASE64_RE, "");
1801
+ if (str.length < 2) return "";
1802
+ while (str.length % 4 !== 0) {
1803
+ str = str + "=";
1804
+ }
1805
+ return str;
1806
+ }
1807
+
25
1808
  function notEmpty(value) {
26
1809
  return value !== null && value !== void 0;
27
1810
  }
@@ -225,8 +2008,7 @@ function buildPreviewBranchName({ org, branch }) {
225
2008
  function getPreviewBranch() {
226
2009
  try {
227
2010
  const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = getEnvironment();
228
- if (deployPreviewBranch)
229
- return deployPreviewBranch;
2011
+ if (deployPreviewBranch) return deployPreviewBranch;
230
2012
  switch (deployPreview) {
231
2013
  case "vercel": {
232
2014
  if (!vercelGitCommitRef || !vercelGitRepoOwner) {
@@ -242,29 +2024,15 @@ function getPreviewBranch() {
242
2024
  }
243
2025
  }
244
2026
 
245
- var __accessCheck$6 = (obj, member, msg) => {
246
- if (!member.has(obj))
247
- throw TypeError("Cannot " + msg);
248
- };
249
- var __privateGet$5 = (obj, member, getter) => {
250
- __accessCheck$6(obj, member, "read from private field");
251
- return getter ? getter.call(obj) : member.get(obj);
252
- };
253
- var __privateAdd$6 = (obj, member, value) => {
254
- if (member.has(obj))
255
- throw TypeError("Cannot add the same private member more than once");
256
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
2027
+ var __typeError$7 = (msg) => {
2028
+ throw TypeError(msg);
257
2029
  };
258
- var __privateSet$4 = (obj, member, value, setter) => {
259
- __accessCheck$6(obj, member, "write to private field");
260
- setter ? setter.call(obj, value) : member.set(obj, value);
261
- return value;
262
- };
263
- var __privateMethod$4 = (obj, member, method) => {
264
- __accessCheck$6(obj, member, "access private method");
265
- return method;
266
- };
267
- var _fetch, _queue, _concurrency, _enqueue, enqueue_fn;
2030
+ var __accessCheck$7 = (obj, member, msg) => member.has(obj) || __typeError$7("Cannot " + msg);
2031
+ var __privateGet$6 = (obj, member, getter) => (__accessCheck$7(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
2032
+ var __privateAdd$7 = (obj, member, value) => member.has(obj) ? __typeError$7("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
2033
+ var __privateSet$5 = (obj, member, value, setter) => (__accessCheck$7(obj, member, "write to private field"), member.set(obj, value), value);
2034
+ var __privateMethod$4 = (obj, member, method) => (__accessCheck$7(obj, member, "access private method"), method);
2035
+ var _fetch, _queue, _concurrency, _ApiRequestPool_instances, enqueue_fn;
268
2036
  const REQUEST_TIMEOUT = 5 * 60 * 1e3;
269
2037
  function getFetchImplementation(userFetch) {
270
2038
  const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
@@ -277,23 +2045,23 @@ function getFetchImplementation(userFetch) {
277
2045
  }
278
2046
  class ApiRequestPool {
279
2047
  constructor(concurrency = 10) {
280
- __privateAdd$6(this, _enqueue);
281
- __privateAdd$6(this, _fetch, void 0);
282
- __privateAdd$6(this, _queue, void 0);
283
- __privateAdd$6(this, _concurrency, void 0);
284
- __privateSet$4(this, _queue, []);
285
- __privateSet$4(this, _concurrency, concurrency);
2048
+ __privateAdd$7(this, _ApiRequestPool_instances);
2049
+ __privateAdd$7(this, _fetch);
2050
+ __privateAdd$7(this, _queue);
2051
+ __privateAdd$7(this, _concurrency);
2052
+ __privateSet$5(this, _queue, []);
2053
+ __privateSet$5(this, _concurrency, concurrency);
286
2054
  this.running = 0;
287
2055
  this.started = 0;
288
2056
  }
289
2057
  setFetch(fetch2) {
290
- __privateSet$4(this, _fetch, fetch2);
2058
+ __privateSet$5(this, _fetch, fetch2);
291
2059
  }
292
2060
  getFetch() {
293
- if (!__privateGet$5(this, _fetch)) {
2061
+ if (!__privateGet$6(this, _fetch)) {
294
2062
  throw new Error("Fetch not set");
295
2063
  }
296
- return __privateGet$5(this, _fetch);
2064
+ return __privateGet$6(this, _fetch);
297
2065
  }
298
2066
  request(url, options) {
299
2067
  const start = /* @__PURE__ */ new Date();
@@ -315,7 +2083,7 @@ class ApiRequestPool {
315
2083
  }
316
2084
  return response;
317
2085
  };
318
- return __privateMethod$4(this, _enqueue, enqueue_fn).call(this, async () => {
2086
+ return __privateMethod$4(this, _ApiRequestPool_instances, enqueue_fn).call(this, async () => {
319
2087
  return await runRequest();
320
2088
  });
321
2089
  }
@@ -323,21 +2091,21 @@ class ApiRequestPool {
323
2091
  _fetch = new WeakMap();
324
2092
  _queue = new WeakMap();
325
2093
  _concurrency = new WeakMap();
326
- _enqueue = new WeakSet();
2094
+ _ApiRequestPool_instances = new WeakSet();
327
2095
  enqueue_fn = function(task) {
328
- const promise = new Promise((resolve) => __privateGet$5(this, _queue).push(resolve)).finally(() => {
2096
+ const promise = new Promise((resolve) => __privateGet$6(this, _queue).push(resolve)).finally(() => {
329
2097
  this.started--;
330
2098
  this.running++;
331
2099
  }).then(() => task()).finally(() => {
332
2100
  this.running--;
333
- const next = __privateGet$5(this, _queue).shift();
2101
+ const next = __privateGet$6(this, _queue).shift();
334
2102
  if (next !== void 0) {
335
2103
  this.started++;
336
2104
  next();
337
2105
  }
338
2106
  });
339
- if (this.running + this.started < __privateGet$5(this, _concurrency)) {
340
- const next = __privateGet$5(this, _queue).shift();
2107
+ if (this.running + this.started < __privateGet$6(this, _concurrency)) {
2108
+ const next = __privateGet$6(this, _queue).shift();
341
2109
  if (next !== void 0) {
342
2110
  this.started++;
343
2111
  next();
@@ -526,7 +2294,7 @@ function defaultOnOpen(response) {
526
2294
  }
527
2295
  }
528
2296
 
529
- const VERSION = "0.29.2";
2297
+ const VERSION = "0.30.0";
530
2298
 
531
2299
  class ErrorWithCause extends Error {
532
2300
  constructor(message, options) {
@@ -606,35 +2374,30 @@ function parseProviderString(provider = "production") {
606
2374
  return provider;
607
2375
  }
608
2376
  const [main, workspaces] = provider.split(",");
609
- if (!main || !workspaces)
610
- return null;
2377
+ if (!main || !workspaces) return null;
611
2378
  return { main, workspaces };
612
2379
  }
613
2380
  function buildProviderString(provider) {
614
- if (isHostProviderAlias(provider))
615
- return provider;
2381
+ if (isHostProviderAlias(provider)) return provider;
616
2382
  return `${provider.main},${provider.workspaces}`;
617
2383
  }
618
2384
  function parseWorkspacesUrlParts(url) {
619
- if (!isString(url))
620
- return null;
2385
+ if (!isString(url)) return null;
621
2386
  const matches = {
622
- production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh.*/),
623
- staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev.*/),
624
- dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev.*/),
625
- local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(\d+)/)
2387
+ production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
2388
+ staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
2389
+ dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
2390
+ local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
626
2391
  };
627
2392
  const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
628
- if (!isHostProviderAlias(host) || !match)
629
- return null;
630
- return { workspace: match[1], region: match[2], host };
2393
+ if (!isHostProviderAlias(host) || !match) return null;
2394
+ return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
631
2395
  }
632
2396
 
633
2397
  const pool = new ApiRequestPool();
634
2398
  const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
635
2399
  const cleanQueryParams = Object.entries(queryParams).reduce((acc, [key, value]) => {
636
- if (value === void 0 || value === null)
637
- return acc;
2400
+ if (value === void 0 || value === null) return acc;
638
2401
  return { ...acc, [key]: value };
639
2402
  }, {});
640
2403
  const query = new URLSearchParams(cleanQueryParams).toString();
@@ -682,8 +2445,7 @@ function hostHeader(url) {
682
2445
  return groups?.host ? { Host: groups.host } : {};
683
2446
  }
684
2447
  async function parseBody(body, headers) {
685
- if (!isDefined(body))
686
- return void 0;
2448
+ if (!isDefined(body)) return void 0;
687
2449
  if (isBlob(body) || typeof body.text === "function") {
688
2450
  return body;
689
2451
  }
@@ -738,8 +2500,6 @@ async function fetch$1({
738
2500
  "X-Xata-Client-ID": clientID ?? defaultClientID,
739
2501
  "X-Xata-Session-ID": sessionID ?? generateUUID(),
740
2502
  "X-Xata-Agent": xataAgent,
741
- // Force field rename to xata_ internal properties
742
- "X-Features": compact(["feat-internal-field-rename-api=1", customHeaders?.["X-Features"]]).join(" "),
743
2503
  ...customHeaders,
744
2504
  ...hostHeader(fullUrl),
745
2505
  Authorization: `Bearer ${apiKey}`
@@ -762,8 +2522,7 @@ async function fetch$1({
762
2522
  [TraceAttributes.CLOUDFLARE_RAY_ID]: response.headers?.get("cf-ray") ?? void 0
763
2523
  });
764
2524
  const message = response.headers?.get("x-xata-message");
765
- if (message)
766
- console.warn(message);
2525
+ if (message) console.warn(message);
767
2526
  if (response.status === 204) {
768
2527
  return {};
769
2528
  }
@@ -847,16 +2606,108 @@ function parseUrl(url) {
847
2606
 
848
2607
  const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
849
2608
 
850
- const applyMigration = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/apply", method: "post", ...variables, signal });
2609
+ const getTasks = (variables, signal) => dataPlaneFetch({
2610
+ url: "/tasks",
2611
+ method: "get",
2612
+ ...variables,
2613
+ signal
2614
+ });
2615
+ const getTaskStatus = (variables, signal) => dataPlaneFetch({
2616
+ url: "/tasks/{taskId}",
2617
+ method: "get",
2618
+ ...variables,
2619
+ signal
2620
+ });
2621
+ const listClusterBranches = (variables, signal) => dataPlaneFetch({
2622
+ url: "/cluster/{clusterId}/branches",
2623
+ method: "get",
2624
+ ...variables,
2625
+ signal
2626
+ });
2627
+ const listClusterExtensions = (variables, signal) => dataPlaneFetch({
2628
+ url: "/cluster/{clusterId}/extensions",
2629
+ method: "get",
2630
+ ...variables,
2631
+ signal
2632
+ });
2633
+ const installClusterExtension = (variables, signal) => dataPlaneFetch({
2634
+ url: "/cluster/{clusterId}/extensions",
2635
+ method: "post",
2636
+ ...variables,
2637
+ signal
2638
+ });
2639
+ const dropClusterExtension = (variables, signal) => dataPlaneFetch({
2640
+ url: "/cluster/{clusterId}/extensions",
2641
+ method: "delete",
2642
+ ...variables,
2643
+ signal
2644
+ });
2645
+ const getClusterMetrics = (variables, signal) => dataPlaneFetch({
2646
+ url: "/cluster/{clusterId}/metrics",
2647
+ method: "get",
2648
+ ...variables,
2649
+ signal
2650
+ });
2651
+ const applyMigration = (variables, signal) => dataPlaneFetch({
2652
+ url: "/db/{dbBranchName}/migrations/apply",
2653
+ method: "post",
2654
+ ...variables,
2655
+ signal
2656
+ });
2657
+ const startMigration = (variables, signal) => dataPlaneFetch({
2658
+ url: "/db/{dbBranchName}/migrations/start",
2659
+ method: "post",
2660
+ ...variables,
2661
+ signal
2662
+ });
2663
+ const completeMigration = (variables, signal) => dataPlaneFetch({
2664
+ url: "/db/{dbBranchName}/migrations/complete",
2665
+ method: "post",
2666
+ ...variables,
2667
+ signal
2668
+ });
2669
+ const rollbackMigration = (variables, signal) => dataPlaneFetch({
2670
+ url: "/db/{dbBranchName}/migrations/rollback",
2671
+ method: "post",
2672
+ ...variables,
2673
+ signal
2674
+ });
851
2675
  const adaptTable = (variables, signal) => dataPlaneFetch({
852
2676
  url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
853
2677
  method: "post",
854
2678
  ...variables,
855
2679
  signal
856
2680
  });
857
- const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/status", method: "get", ...variables, signal });
858
- const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/jobs/{jobId}", method: "get", ...variables, signal });
859
- const getMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/history", method: "get", ...variables, signal });
2681
+ const adaptAllTables = (variables, signal) => dataPlaneFetch({
2682
+ url: "/db/{dbBranchName}/migrations/adapt",
2683
+ method: "post",
2684
+ ...variables,
2685
+ signal
2686
+ });
2687
+ const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
2688
+ url: "/db/{dbBranchName}/migrations/status",
2689
+ method: "get",
2690
+ ...variables,
2691
+ signal
2692
+ });
2693
+ const getMigrationJobs = (variables, signal) => dataPlaneFetch({
2694
+ url: "/db/{dbBranchName}/migrations/jobs",
2695
+ method: "get",
2696
+ ...variables,
2697
+ signal
2698
+ });
2699
+ const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({
2700
+ url: "/db/{dbBranchName}/migrations/jobs/{jobId}",
2701
+ method: "get",
2702
+ ...variables,
2703
+ signal
2704
+ });
2705
+ const getMigrationHistory = (variables, signal) => dataPlaneFetch({
2706
+ url: "/db/{dbBranchName}/migrations/history",
2707
+ method: "get",
2708
+ ...variables,
2709
+ signal
2710
+ });
860
2711
  const getBranchList = (variables, signal) => dataPlaneFetch({
861
2712
  url: "/dbs/{dbName}",
862
2713
  method: "get",
@@ -869,82 +2720,181 @@ const getDatabaseSettings = (variables, signal) => dataPlaneFetch({
869
2720
  ...variables,
870
2721
  signal
871
2722
  });
872
- const updateDatabaseSettings = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/settings", method: "patch", ...variables, signal });
873
- const getBranchDetails = (variables, signal) => dataPlaneFetch({
874
- url: "/db/{dbBranchName}",
2723
+ const updateDatabaseSettings = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/settings", method: "patch", ...variables, signal });
2724
+ const createBranchAsync = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/async", method: "put", ...variables, signal });
2725
+ const getBranchDetails = (variables, signal) => dataPlaneFetch({
2726
+ url: "/db/{dbBranchName}",
2727
+ method: "get",
2728
+ ...variables,
2729
+ signal
2730
+ });
2731
+ const createBranch = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}", method: "put", ...variables, signal });
2732
+ const deleteBranch = (variables, signal) => dataPlaneFetch({
2733
+ url: "/db/{dbBranchName}",
2734
+ method: "delete",
2735
+ ...variables,
2736
+ signal
2737
+ });
2738
+ const getSchema = (variables, signal) => dataPlaneFetch({
2739
+ url: "/db/{dbBranchName}/schema",
2740
+ method: "get",
2741
+ ...variables,
2742
+ signal
2743
+ });
2744
+ const getSchemas = (variables, signal) => dataPlaneFetch({
2745
+ url: "/db/{dbBranchName}/schemas",
2746
+ method: "get",
2747
+ ...variables,
2748
+ signal
2749
+ });
2750
+ const copyBranch = (variables, signal) => dataPlaneFetch({
2751
+ url: "/db/{dbBranchName}/copy",
2752
+ method: "post",
2753
+ ...variables,
2754
+ signal
2755
+ });
2756
+ const getBranchMoveStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/move", method: "get", ...variables, signal });
2757
+ const moveBranch = (variables, signal) => dataPlaneFetch({
2758
+ url: "/db/{dbBranchName}/move",
2759
+ method: "put",
2760
+ ...variables,
2761
+ signal
2762
+ });
2763
+ const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
2764
+ url: "/db/{dbBranchName}/metadata",
2765
+ method: "put",
2766
+ ...variables,
2767
+ signal
2768
+ });
2769
+ const getBranchMetadata = (variables, signal) => dataPlaneFetch({
2770
+ url: "/db/{dbBranchName}/metadata",
2771
+ method: "get",
2772
+ ...variables,
2773
+ signal
2774
+ });
2775
+ const getBranchStats = (variables, signal) => dataPlaneFetch({
2776
+ url: "/db/{dbBranchName}/stats",
2777
+ method: "get",
2778
+ ...variables,
2779
+ signal
2780
+ });
2781
+ const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
2782
+ const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
2783
+ const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({
2784
+ url: "/dbs/{dbName}/gitBranches",
2785
+ method: "delete",
2786
+ ...variables,
2787
+ signal
2788
+ });
2789
+ const resolveBranch = (variables, signal) => dataPlaneFetch({
2790
+ url: "/dbs/{dbName}/resolveBranch",
2791
+ method: "get",
2792
+ ...variables,
2793
+ signal
2794
+ });
2795
+ const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({
2796
+ url: "/db/{dbBranchName}/migrations",
2797
+ method: "get",
2798
+ ...variables,
2799
+ signal
2800
+ });
2801
+ const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
2802
+ url: "/db/{dbBranchName}/migrations/plan",
2803
+ method: "post",
2804
+ ...variables,
2805
+ signal
2806
+ });
2807
+ const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
2808
+ url: "/db/{dbBranchName}/migrations/execute",
2809
+ method: "post",
2810
+ ...variables,
2811
+ signal
2812
+ });
2813
+ const queryMigrationRequests = (variables, signal) => dataPlaneFetch({
2814
+ url: "/dbs/{dbName}/migrations/query",
2815
+ method: "post",
2816
+ ...variables,
2817
+ signal
2818
+ });
2819
+ const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
2820
+ const getMigrationRequest = (variables, signal) => dataPlaneFetch({
2821
+ url: "/dbs/{dbName}/migrations/{mrNumber}",
2822
+ method: "get",
2823
+ ...variables,
2824
+ signal
2825
+ });
2826
+ const updateMigrationRequest = (variables, signal) => dataPlaneFetch({
2827
+ url: "/dbs/{dbName}/migrations/{mrNumber}",
2828
+ method: "patch",
2829
+ ...variables,
2830
+ signal
2831
+ });
2832
+ const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({
2833
+ url: "/dbs/{dbName}/migrations/{mrNumber}/commits",
2834
+ method: "post",
2835
+ ...variables,
2836
+ signal
2837
+ });
2838
+ const compareMigrationRequest = (variables, signal) => dataPlaneFetch({
2839
+ url: "/dbs/{dbName}/migrations/{mrNumber}/compare",
2840
+ method: "post",
2841
+ ...variables,
2842
+ signal
2843
+ });
2844
+ const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({
2845
+ url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
875
2846
  method: "get",
876
2847
  ...variables,
877
2848
  signal
878
2849
  });
879
- const createBranch = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}", method: "put", ...variables, signal });
880
- const deleteBranch = (variables, signal) => dataPlaneFetch({
881
- url: "/db/{dbBranchName}",
882
- method: "delete",
2850
+ const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
2851
+ url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
2852
+ method: "post",
883
2853
  ...variables,
884
2854
  signal
885
2855
  });
886
- const getSchema = (variables, signal) => dataPlaneFetch({
887
- url: "/db/{dbBranchName}/schema",
888
- method: "get",
2856
+ const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({
2857
+ url: "/db/{dbBranchName}/schema/history",
2858
+ method: "post",
889
2859
  ...variables,
890
2860
  signal
891
2861
  });
892
- const copyBranch = (variables, signal) => dataPlaneFetch({
893
- url: "/db/{dbBranchName}/copy",
2862
+ const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({
2863
+ url: "/db/{dbBranchName}/schema/compare",
894
2864
  method: "post",
895
2865
  ...variables,
896
2866
  signal
897
2867
  });
898
- const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
899
- url: "/db/{dbBranchName}/metadata",
900
- method: "put",
2868
+ const compareBranchSchemas = (variables, signal) => dataPlaneFetch({
2869
+ url: "/db/{dbBranchName}/schema/compare/{branchName}",
2870
+ method: "post",
901
2871
  ...variables,
902
2872
  signal
903
2873
  });
904
- const getBranchMetadata = (variables, signal) => dataPlaneFetch({
905
- url: "/db/{dbBranchName}/metadata",
906
- method: "get",
2874
+ const updateBranchSchema = (variables, signal) => dataPlaneFetch({
2875
+ url: "/db/{dbBranchName}/schema/update",
2876
+ method: "post",
907
2877
  ...variables,
908
2878
  signal
909
2879
  });
910
- const getBranchStats = (variables, signal) => dataPlaneFetch({
911
- url: "/db/{dbBranchName}/stats",
912
- method: "get",
2880
+ const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
2881
+ url: "/db/{dbBranchName}/schema/preview",
2882
+ method: "post",
913
2883
  ...variables,
914
2884
  signal
915
2885
  });
916
- const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
917
- const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
918
- const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "delete", ...variables, signal });
919
- const resolveBranch = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/resolveBranch", method: "get", ...variables, signal });
920
- const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations", method: "get", ...variables, signal });
921
- const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/plan", method: "post", ...variables, signal });
922
- const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/execute", method: "post", ...variables, signal });
923
- const queryMigrationRequests = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/query", method: "post", ...variables, signal });
924
- const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
925
- const getMigrationRequest = (variables, signal) => dataPlaneFetch({
926
- url: "/dbs/{dbName}/migrations/{mrNumber}",
927
- method: "get",
2886
+ const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
2887
+ url: "/db/{dbBranchName}/schema/apply",
2888
+ method: "post",
928
2889
  ...variables,
929
2890
  signal
930
2891
  });
931
- const updateMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}", method: "patch", ...variables, signal });
932
- const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/commits", method: "post", ...variables, signal });
933
- const compareMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/compare", method: "post", ...variables, signal });
934
- const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/merge", method: "get", ...variables, signal });
935
- const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
936
- url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
2892
+ const pushBranchMigrations = (variables, signal) => dataPlaneFetch({
2893
+ url: "/db/{dbBranchName}/schema/push",
937
2894
  method: "post",
938
2895
  ...variables,
939
2896
  signal
940
2897
  });
941
- const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/history", method: "post", ...variables, signal });
942
- const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare", method: "post", ...variables, signal });
943
- const compareBranchSchemas = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare/{branchName}", method: "post", ...variables, signal });
944
- const updateBranchSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/update", method: "post", ...variables, signal });
945
- const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/preview", method: "post", ...variables, signal });
946
- const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/apply", method: "post", ...variables, signal });
947
- const pushBranchMigrations = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/push", method: "post", ...variables, signal });
948
2898
  const createTable = (variables, signal) => dataPlaneFetch({
949
2899
  url: "/db/{dbBranchName}/tables/{tableName}",
950
2900
  method: "put",
@@ -957,14 +2907,24 @@ const deleteTable = (variables, signal) => dataPlaneFetch({
957
2907
  ...variables,
958
2908
  signal
959
2909
  });
960
- const updateTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}", method: "patch", ...variables, signal });
2910
+ const updateTable = (variables, signal) => dataPlaneFetch({
2911
+ url: "/db/{dbBranchName}/tables/{tableName}",
2912
+ method: "patch",
2913
+ ...variables,
2914
+ signal
2915
+ });
961
2916
  const getTableSchema = (variables, signal) => dataPlaneFetch({
962
2917
  url: "/db/{dbBranchName}/tables/{tableName}/schema",
963
2918
  method: "get",
964
2919
  ...variables,
965
2920
  signal
966
2921
  });
967
- const setTableSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/schema", method: "put", ...variables, signal });
2922
+ const setTableSchema = (variables, signal) => dataPlaneFetch({
2923
+ url: "/db/{dbBranchName}/tables/{tableName}/schema",
2924
+ method: "put",
2925
+ ...variables,
2926
+ signal
2927
+ });
968
2928
  const getTableColumns = (variables, signal) => dataPlaneFetch({
969
2929
  url: "/db/{dbBranchName}/tables/{tableName}/columns",
970
2930
  method: "get",
@@ -972,7 +2932,12 @@ const getTableColumns = (variables, signal) => dataPlaneFetch({
972
2932
  signal
973
2933
  });
974
2934
  const addTableColumn = (variables, signal) => dataPlaneFetch(
975
- { url: "/db/{dbBranchName}/tables/{tableName}/columns", method: "post", ...variables, signal }
2935
+ {
2936
+ url: "/db/{dbBranchName}/tables/{tableName}/columns",
2937
+ method: "post",
2938
+ ...variables,
2939
+ signal
2940
+ }
976
2941
  );
977
2942
  const getColumn = (variables, signal) => dataPlaneFetch({
978
2943
  url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
@@ -980,15 +2945,30 @@ const getColumn = (variables, signal) => dataPlaneFetch({
980
2945
  ...variables,
981
2946
  signal
982
2947
  });
983
- const updateColumn = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}", method: "patch", ...variables, signal });
2948
+ const updateColumn = (variables, signal) => dataPlaneFetch({
2949
+ url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
2950
+ method: "patch",
2951
+ ...variables,
2952
+ signal
2953
+ });
984
2954
  const deleteColumn = (variables, signal) => dataPlaneFetch({
985
2955
  url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
986
2956
  method: "delete",
987
2957
  ...variables,
988
2958
  signal
989
2959
  });
990
- const branchTransaction = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/transaction", method: "post", ...variables, signal });
991
- const insertRecord = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data", method: "post", ...variables, signal });
2960
+ const branchTransaction = (variables, signal) => dataPlaneFetch({
2961
+ url: "/db/{dbBranchName}/transaction",
2962
+ method: "post",
2963
+ ...variables,
2964
+ signal
2965
+ });
2966
+ const insertRecord = (variables, signal) => dataPlaneFetch({
2967
+ url: "/db/{dbBranchName}/tables/{tableName}/data",
2968
+ method: "post",
2969
+ ...variables,
2970
+ signal
2971
+ });
992
2972
  const getFileItem = (variables, signal) => dataPlaneFetch({
993
2973
  url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
994
2974
  method: "get",
@@ -1031,11 +3011,36 @@ const getRecord = (variables, signal) => dataPlaneFetch({
1031
3011
  ...variables,
1032
3012
  signal
1033
3013
  });
1034
- const insertRecordWithID = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}", method: "put", ...variables, signal });
1035
- const updateRecordWithID = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}", method: "patch", ...variables, signal });
1036
- const upsertRecordWithID = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}", method: "post", ...variables, signal });
1037
- const deleteRecord = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}", method: "delete", ...variables, signal });
1038
- const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/bulk", method: "post", ...variables, signal });
3014
+ const insertRecordWithID = (variables, signal) => dataPlaneFetch({
3015
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
3016
+ method: "put",
3017
+ ...variables,
3018
+ signal
3019
+ });
3020
+ const updateRecordWithID = (variables, signal) => dataPlaneFetch({
3021
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
3022
+ method: "patch",
3023
+ ...variables,
3024
+ signal
3025
+ });
3026
+ const upsertRecordWithID = (variables, signal) => dataPlaneFetch({
3027
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
3028
+ method: "post",
3029
+ ...variables,
3030
+ signal
3031
+ });
3032
+ const deleteRecord = (variables, signal) => dataPlaneFetch({
3033
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
3034
+ method: "delete",
3035
+ ...variables,
3036
+ signal
3037
+ });
3038
+ const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({
3039
+ url: "/db/{dbBranchName}/tables/{tableName}/bulk",
3040
+ method: "post",
3041
+ ...variables,
3042
+ signal
3043
+ });
1039
3044
  const queryTable = (variables, signal) => dataPlaneFetch({
1040
3045
  url: "/db/{dbBranchName}/tables/{tableName}/query",
1041
3046
  method: "post",
@@ -1054,16 +3059,36 @@ const searchTable = (variables, signal) => dataPlaneFetch({
1054
3059
  ...variables,
1055
3060
  signal
1056
3061
  });
1057
- const vectorSearchTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch", method: "post", ...variables, signal });
3062
+ const vectorSearchTable = (variables, signal) => dataPlaneFetch({
3063
+ url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch",
3064
+ method: "post",
3065
+ ...variables,
3066
+ signal
3067
+ });
1058
3068
  const askTable = (variables, signal) => dataPlaneFetch({
1059
3069
  url: "/db/{dbBranchName}/tables/{tableName}/ask",
1060
3070
  method: "post",
1061
3071
  ...variables,
1062
3072
  signal
1063
3073
  });
1064
- const askTableSession = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}", method: "post", ...variables, signal });
1065
- const summarizeTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/summarize", method: "post", ...variables, signal });
1066
- const aggregateTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/aggregate", method: "post", ...variables, signal });
3074
+ const askTableSession = (variables, signal) => dataPlaneFetch({
3075
+ url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}",
3076
+ method: "post",
3077
+ ...variables,
3078
+ signal
3079
+ });
3080
+ const summarizeTable = (variables, signal) => dataPlaneFetch({
3081
+ url: "/db/{dbBranchName}/tables/{tableName}/summarize",
3082
+ method: "post",
3083
+ ...variables,
3084
+ signal
3085
+ });
3086
+ const aggregateTable = (variables, signal) => dataPlaneFetch({
3087
+ url: "/db/{dbBranchName}/tables/{tableName}/aggregate",
3088
+ method: "post",
3089
+ ...variables,
3090
+ signal
3091
+ });
1067
3092
  const fileAccess = (variables, signal) => dataPlaneFetch({
1068
3093
  url: "/file/{fileId}",
1069
3094
  method: "get",
@@ -1082,14 +3107,34 @@ const sqlQuery = (variables, signal) => dataPlaneFetch({
1082
3107
  ...variables,
1083
3108
  signal
1084
3109
  });
3110
+ const sqlBatchQuery = (variables, signal) => dataPlaneFetch({
3111
+ url: "/db/{dbBranchName}/sql/batch",
3112
+ method: "post",
3113
+ ...variables,
3114
+ signal
3115
+ });
1085
3116
  const operationsByTag$2 = {
3117
+ tasks: { getTasks, getTaskStatus },
3118
+ cluster: {
3119
+ listClusterBranches,
3120
+ listClusterExtensions,
3121
+ installClusterExtension,
3122
+ dropClusterExtension,
3123
+ getClusterMetrics
3124
+ },
1086
3125
  migrations: {
1087
3126
  applyMigration,
3127
+ startMigration,
3128
+ completeMigration,
3129
+ rollbackMigration,
1088
3130
  adaptTable,
3131
+ adaptAllTables,
1089
3132
  getBranchMigrationJobStatus,
3133
+ getMigrationJobs,
1090
3134
  getMigrationJobStatus,
1091
3135
  getMigrationHistory,
1092
3136
  getSchema,
3137
+ getSchemas,
1093
3138
  getBranchMigrationHistory,
1094
3139
  getBranchMigrationPlan,
1095
3140
  executeBranchMigrationPlan,
@@ -1103,10 +3148,13 @@ const operationsByTag$2 = {
1103
3148
  },
1104
3149
  branch: {
1105
3150
  getBranchList,
3151
+ createBranchAsync,
1106
3152
  getBranchDetails,
1107
3153
  createBranch,
1108
3154
  deleteBranch,
1109
3155
  copyBranch,
3156
+ getBranchMoveStatus,
3157
+ moveBranch,
1110
3158
  updateBranchMetadata,
1111
3159
  getBranchMetadata,
1112
3160
  getBranchStats,
@@ -1148,7 +3196,16 @@ const operationsByTag$2 = {
1148
3196
  deleteRecord,
1149
3197
  bulkInsertTableRecords
1150
3198
  },
1151
- files: { getFileItem, putFileItem, deleteFileItem, getFile, putFile, deleteFile, fileAccess, fileUpload },
3199
+ files: {
3200
+ getFileItem,
3201
+ putFileItem,
3202
+ deleteFileItem,
3203
+ getFile,
3204
+ putFile,
3205
+ deleteFile,
3206
+ fileAccess,
3207
+ fileUpload
3208
+ },
1152
3209
  searchAndFilter: {
1153
3210
  queryTable,
1154
3211
  searchBranch,
@@ -1159,7 +3216,7 @@ const operationsByTag$2 = {
1159
3216
  summarizeTable,
1160
3217
  aggregateTable
1161
3218
  },
1162
- sql: { sqlQuery }
3219
+ sql: { sqlQuery, sqlBatchQuery }
1163
3220
  };
1164
3221
 
1165
3222
  const controlPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "controlPlane" });
@@ -1226,7 +3283,12 @@ const deleteOAuthAccessToken = (variables, signal) => controlPlaneFetch({
1226
3283
  ...variables,
1227
3284
  signal
1228
3285
  });
1229
- const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({ url: "/user/oauth/tokens/{token}", method: "patch", ...variables, signal });
3286
+ const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
3287
+ url: "/user/oauth/tokens/{token}",
3288
+ method: "patch",
3289
+ ...variables,
3290
+ signal
3291
+ });
1230
3292
  const getWorkspacesList = (variables, signal) => controlPlaneFetch({
1231
3293
  url: "/workspaces",
1232
3294
  method: "get",
@@ -1257,47 +3319,150 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
1257
3319
  ...variables,
1258
3320
  signal
1259
3321
  });
1260
- const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/members", method: "get", ...variables, signal });
1261
- const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/members/{userId}", method: "put", ...variables, signal });
3322
+ const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
3323
+ url: "/workspaces/{workspaceId}/settings",
3324
+ method: "get",
3325
+ ...variables,
3326
+ signal
3327
+ });
3328
+ const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({
3329
+ url: "/workspaces/{workspaceId}/settings",
3330
+ method: "patch",
3331
+ ...variables,
3332
+ signal
3333
+ });
3334
+ const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({
3335
+ url: "/workspaces/{workspaceId}/members",
3336
+ method: "get",
3337
+ ...variables,
3338
+ signal
3339
+ });
3340
+ const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({
3341
+ url: "/workspaces/{workspaceId}/members/{userId}",
3342
+ method: "put",
3343
+ ...variables,
3344
+ signal
3345
+ });
1262
3346
  const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
1263
3347
  url: "/workspaces/{workspaceId}/members/{userId}",
1264
3348
  method: "delete",
1265
3349
  ...variables,
1266
3350
  signal
1267
3351
  });
1268
- const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites", method: "post", ...variables, signal });
1269
- const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteId}", method: "patch", ...variables, signal });
1270
- const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteId}", method: "delete", ...variables, signal });
1271
- const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept", method: "post", ...variables, signal });
1272
- const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteId}/resend", method: "post", ...variables, signal });
1273
- const listClusters = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/clusters", method: "get", ...variables, signal });
1274
- const createCluster = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/clusters", method: "post", ...variables, signal });
3352
+ const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
3353
+ url: "/workspaces/{workspaceId}/invites",
3354
+ method: "post",
3355
+ ...variables,
3356
+ signal
3357
+ });
3358
+ const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
3359
+ url: "/workspaces/{workspaceId}/invites/{inviteId}",
3360
+ method: "patch",
3361
+ ...variables,
3362
+ signal
3363
+ });
3364
+ const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
3365
+ url: "/workspaces/{workspaceId}/invites/{inviteId}",
3366
+ method: "delete",
3367
+ ...variables,
3368
+ signal
3369
+ });
3370
+ const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
3371
+ url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept",
3372
+ method: "post",
3373
+ ...variables,
3374
+ signal
3375
+ });
3376
+ const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
3377
+ url: "/workspaces/{workspaceId}/invites/{inviteId}/resend",
3378
+ method: "post",
3379
+ ...variables,
3380
+ signal
3381
+ });
3382
+ const listClusters = (variables, signal) => controlPlaneFetch({
3383
+ url: "/workspaces/{workspaceId}/clusters",
3384
+ method: "get",
3385
+ ...variables,
3386
+ signal
3387
+ });
3388
+ const createCluster = (variables, signal) => controlPlaneFetch({
3389
+ url: "/workspaces/{workspaceId}/clusters",
3390
+ method: "post",
3391
+ ...variables,
3392
+ signal
3393
+ });
1275
3394
  const getCluster = (variables, signal) => controlPlaneFetch({
1276
3395
  url: "/workspaces/{workspaceId}/clusters/{clusterId}",
1277
3396
  method: "get",
1278
3397
  ...variables,
1279
3398
  signal
1280
3399
  });
1281
- const updateCluster = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/clusters/{clusterId}", method: "patch", ...variables, signal });
3400
+ const updateCluster = (variables, signal) => controlPlaneFetch({
3401
+ url: "/workspaces/{workspaceId}/clusters/{clusterId}",
3402
+ method: "patch",
3403
+ ...variables,
3404
+ signal
3405
+ });
3406
+ const deleteCluster = (variables, signal) => controlPlaneFetch({
3407
+ url: "/workspaces/{workspaceId}/clusters/{clusterId}",
3408
+ method: "delete",
3409
+ ...variables,
3410
+ signal
3411
+ });
1282
3412
  const getDatabaseList = (variables, signal) => controlPlaneFetch({
1283
3413
  url: "/workspaces/{workspaceId}/dbs",
1284
3414
  method: "get",
1285
3415
  ...variables,
1286
3416
  signal
1287
3417
  });
1288
- const createDatabase = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}", method: "put", ...variables, signal });
3418
+ const createDatabase = (variables, signal) => controlPlaneFetch({
3419
+ url: "/workspaces/{workspaceId}/dbs/{dbName}",
3420
+ method: "put",
3421
+ ...variables,
3422
+ signal
3423
+ });
1289
3424
  const deleteDatabase = (variables, signal) => controlPlaneFetch({
1290
3425
  url: "/workspaces/{workspaceId}/dbs/{dbName}",
1291
3426
  method: "delete",
1292
3427
  ...variables,
1293
3428
  signal
1294
3429
  });
1295
- const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}", method: "get", ...variables, signal });
1296
- const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}", method: "patch", ...variables, signal });
1297
- const renameDatabase = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/rename", method: "post", ...variables, signal });
1298
- const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "get", ...variables, signal });
1299
- const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "put", ...variables, signal });
1300
- const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "delete", ...variables, signal });
3430
+ const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
3431
+ url: "/workspaces/{workspaceId}/dbs/{dbName}",
3432
+ method: "get",
3433
+ ...variables,
3434
+ signal
3435
+ });
3436
+ const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({
3437
+ url: "/workspaces/{workspaceId}/dbs/{dbName}",
3438
+ method: "patch",
3439
+ ...variables,
3440
+ signal
3441
+ });
3442
+ const renameDatabase = (variables, signal) => controlPlaneFetch({
3443
+ url: "/workspaces/{workspaceId}/dbs/{dbName}/rename",
3444
+ method: "post",
3445
+ ...variables,
3446
+ signal
3447
+ });
3448
+ const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
3449
+ url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
3450
+ method: "get",
3451
+ ...variables,
3452
+ signal
3453
+ });
3454
+ const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
3455
+ url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
3456
+ method: "put",
3457
+ ...variables,
3458
+ signal
3459
+ });
3460
+ const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
3461
+ url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
3462
+ method: "delete",
3463
+ ...variables,
3464
+ signal
3465
+ });
1301
3466
  const listRegions = (variables, signal) => controlPlaneFetch({
1302
3467
  url: "/workspaces/{workspaceId}/regions",
1303
3468
  method: "get",
@@ -1322,6 +3487,8 @@ const operationsByTag$1 = {
1322
3487
  getWorkspace,
1323
3488
  updateWorkspace,
1324
3489
  deleteWorkspace,
3490
+ getWorkspaceSettings,
3491
+ updateWorkspaceSettings,
1325
3492
  getWorkspaceMembersList,
1326
3493
  updateWorkspaceMemberRole,
1327
3494
  removeWorkspaceMember
@@ -1333,7 +3500,13 @@ const operationsByTag$1 = {
1333
3500
  acceptWorkspaceMemberInvite,
1334
3501
  resendWorkspaceMemberInvite
1335
3502
  },
1336
- xbcontrolOther: { listClusters, createCluster, getCluster, updateCluster },
3503
+ xbcontrolOther: {
3504
+ listClusters,
3505
+ createCluster,
3506
+ getCluster,
3507
+ updateCluster,
3508
+ deleteCluster
3509
+ },
1337
3510
  databases: {
1338
3511
  getDatabaseList,
1339
3512
  createDatabase,
@@ -1420,8 +3593,7 @@ function buildTransformString(transformations) {
1420
3593
  ).join(",");
1421
3594
  }
1422
3595
  function transformImage(url, ...transformations) {
1423
- if (!isDefined(url))
1424
- return void 0;
3596
+ if (!isDefined(url)) return void 0;
1425
3597
  const newTransformations = buildTransformString(transformations);
1426
3598
  const { hostname, pathname, search } = new URL(url);
1427
3599
  const pathParts = pathname.split("/");
@@ -1534,8 +3706,7 @@ class XataFile {
1534
3706
  }
1535
3707
  }
1536
3708
  const parseInputFileEntry = async (entry) => {
1537
- if (!isDefined(entry))
1538
- return null;
3709
+ if (!isDefined(entry)) return null;
1539
3710
  const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout, uploadUrlTimeout } = await entry;
1540
3711
  return compactObject({
1541
3712
  id,
@@ -1550,24 +3721,19 @@ const parseInputFileEntry = async (entry) => {
1550
3721
  };
1551
3722
 
1552
3723
  function cleanFilter(filter) {
1553
- if (!isDefined(filter))
1554
- return void 0;
1555
- if (!isObject(filter))
1556
- return filter;
3724
+ if (!isDefined(filter)) return void 0;
3725
+ if (!isObject(filter)) return filter;
1557
3726
  const values = Object.fromEntries(
1558
3727
  Object.entries(filter).reduce((acc, [key, value]) => {
1559
- if (!isDefined(value))
1560
- return acc;
3728
+ if (!isDefined(value)) return acc;
1561
3729
  if (Array.isArray(value)) {
1562
3730
  const clean = value.map((item) => cleanFilter(item)).filter((item) => isDefined(item));
1563
- if (clean.length === 0)
1564
- return acc;
3731
+ if (clean.length === 0) return acc;
1565
3732
  return [...acc, [key, clean]];
1566
3733
  }
1567
3734
  if (isObject(value)) {
1568
3735
  const clean = cleanFilter(value);
1569
- if (!isDefined(clean))
1570
- return acc;
3736
+ if (!isDefined(clean)) return acc;
1571
3737
  return [...acc, [key, clean]];
1572
3738
  }
1573
3739
  return [...acc, [key, value]];
@@ -1577,10 +3743,8 @@ function cleanFilter(filter) {
1577
3743
  }
1578
3744
 
1579
3745
  function stringifyJson(value) {
1580
- if (!isDefined(value))
1581
- return value;
1582
- if (isString(value))
1583
- return value;
3746
+ if (!isDefined(value)) return value;
3747
+ if (isString(value)) return value;
1584
3748
  try {
1585
3749
  return JSON.stringify(value);
1586
3750
  } catch (e) {
@@ -1595,29 +3759,18 @@ function parseJson(value) {
1595
3759
  }
1596
3760
  }
1597
3761
 
1598
- var __accessCheck$5 = (obj, member, msg) => {
1599
- if (!member.has(obj))
1600
- throw TypeError("Cannot " + msg);
1601
- };
1602
- var __privateGet$4 = (obj, member, getter) => {
1603
- __accessCheck$5(obj, member, "read from private field");
1604
- return getter ? getter.call(obj) : member.get(obj);
1605
- };
1606
- var __privateAdd$5 = (obj, member, value) => {
1607
- if (member.has(obj))
1608
- throw TypeError("Cannot add the same private member more than once");
1609
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
1610
- };
1611
- var __privateSet$3 = (obj, member, value, setter) => {
1612
- __accessCheck$5(obj, member, "write to private field");
1613
- setter ? setter.call(obj, value) : member.set(obj, value);
1614
- return value;
3762
+ var __typeError$6 = (msg) => {
3763
+ throw TypeError(msg);
1615
3764
  };
3765
+ var __accessCheck$6 = (obj, member, msg) => member.has(obj) || __typeError$6("Cannot " + msg);
3766
+ var __privateGet$5 = (obj, member, getter) => (__accessCheck$6(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
3767
+ var __privateAdd$6 = (obj, member, value) => member.has(obj) ? __typeError$6("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
3768
+ var __privateSet$4 = (obj, member, value, setter) => (__accessCheck$6(obj, member, "write to private field"), member.set(obj, value), value);
1616
3769
  var _query, _page;
1617
3770
  class Page {
1618
3771
  constructor(query, meta, records = []) {
1619
- __privateAdd$5(this, _query, void 0);
1620
- __privateSet$3(this, _query, query);
3772
+ __privateAdd$6(this, _query);
3773
+ __privateSet$4(this, _query, query);
1621
3774
  this.meta = meta;
1622
3775
  this.records = new PageRecordArray(this, records);
1623
3776
  }
@@ -1628,7 +3781,7 @@ class Page {
1628
3781
  * @returns The next page or results.
1629
3782
  */
1630
3783
  async nextPage(size, offset) {
1631
- return __privateGet$4(this, _query).getPaginated({ pagination: { size, offset, after: this.meta.page.cursor } });
3784
+ return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, after: this.meta.page.cursor } });
1632
3785
  }
1633
3786
  /**
1634
3787
  * Retrieves the previous page of results.
@@ -1637,7 +3790,7 @@ class Page {
1637
3790
  * @returns The previous page or results.
1638
3791
  */
1639
3792
  async previousPage(size, offset) {
1640
- return __privateGet$4(this, _query).getPaginated({ pagination: { size, offset, before: this.meta.page.cursor } });
3793
+ return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, before: this.meta.page.cursor } });
1641
3794
  }
1642
3795
  /**
1643
3796
  * Retrieves the start page of results.
@@ -1646,7 +3799,7 @@ class Page {
1646
3799
  * @returns The start page or results.
1647
3800
  */
1648
3801
  async startPage(size, offset) {
1649
- return __privateGet$4(this, _query).getPaginated({ pagination: { size, offset, start: this.meta.page.cursor } });
3802
+ return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, start: this.meta.page.cursor } });
1650
3803
  }
1651
3804
  /**
1652
3805
  * Retrieves the end page of results.
@@ -1655,7 +3808,7 @@ class Page {
1655
3808
  * @returns The end page or results.
1656
3809
  */
1657
3810
  async endPage(size, offset) {
1658
- return __privateGet$4(this, _query).getPaginated({ pagination: { size, offset, end: this.meta.page.cursor } });
3811
+ return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, end: this.meta.page.cursor } });
1659
3812
  }
1660
3813
  /**
1661
3814
  * Shortcut method to check if there will be additional results if the next page of results is retrieved.
@@ -1703,8 +3856,8 @@ class RecordArray extends Array {
1703
3856
  const _PageRecordArray = class _PageRecordArray extends Array {
1704
3857
  constructor(...args) {
1705
3858
  super(..._PageRecordArray.parseConstructorParams(...args));
1706
- __privateAdd$5(this, _page, void 0);
1707
- __privateSet$3(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
3859
+ __privateAdd$6(this, _page);
3860
+ __privateSet$4(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
1708
3861
  }
1709
3862
  static parseConstructorParams(...args) {
1710
3863
  if (args.length === 1 && typeof args[0] === "number") {
@@ -1734,7 +3887,7 @@ const _PageRecordArray = class _PageRecordArray extends Array {
1734
3887
  * @returns A new array of objects
1735
3888
  */
1736
3889
  async nextPage(size, offset) {
1737
- const newPage = await __privateGet$4(this, _page).nextPage(size, offset);
3890
+ const newPage = await __privateGet$5(this, _page).nextPage(size, offset);
1738
3891
  return new _PageRecordArray(newPage);
1739
3892
  }
1740
3893
  /**
@@ -1743,7 +3896,7 @@ const _PageRecordArray = class _PageRecordArray extends Array {
1743
3896
  * @returns A new array of objects
1744
3897
  */
1745
3898
  async previousPage(size, offset) {
1746
- const newPage = await __privateGet$4(this, _page).previousPage(size, offset);
3899
+ const newPage = await __privateGet$5(this, _page).previousPage(size, offset);
1747
3900
  return new _PageRecordArray(newPage);
1748
3901
  }
1749
3902
  /**
@@ -1752,7 +3905,7 @@ const _PageRecordArray = class _PageRecordArray extends Array {
1752
3905
  * @returns A new array of objects
1753
3906
  */
1754
3907
  async startPage(size, offset) {
1755
- const newPage = await __privateGet$4(this, _page).startPage(size, offset);
3908
+ const newPage = await __privateGet$5(this, _page).startPage(size, offset);
1756
3909
  return new _PageRecordArray(newPage);
1757
3910
  }
1758
3911
  /**
@@ -1761,68 +3914,55 @@ const _PageRecordArray = class _PageRecordArray extends Array {
1761
3914
  * @returns A new array of objects
1762
3915
  */
1763
3916
  async endPage(size, offset) {
1764
- const newPage = await __privateGet$4(this, _page).endPage(size, offset);
3917
+ const newPage = await __privateGet$5(this, _page).endPage(size, offset);
1765
3918
  return new _PageRecordArray(newPage);
1766
3919
  }
1767
3920
  /**
1768
3921
  * @returns Boolean indicating if there is a next page
1769
3922
  */
1770
3923
  hasNextPage() {
1771
- return __privateGet$4(this, _page).meta.page.more;
3924
+ return __privateGet$5(this, _page).meta.page.more;
1772
3925
  }
1773
3926
  };
1774
3927
  _page = new WeakMap();
1775
3928
  let PageRecordArray = _PageRecordArray;
1776
3929
 
1777
- var __accessCheck$4 = (obj, member, msg) => {
1778
- if (!member.has(obj))
1779
- throw TypeError("Cannot " + msg);
1780
- };
1781
- var __privateGet$3 = (obj, member, getter) => {
1782
- __accessCheck$4(obj, member, "read from private field");
1783
- return getter ? getter.call(obj) : member.get(obj);
1784
- };
1785
- var __privateAdd$4 = (obj, member, value) => {
1786
- if (member.has(obj))
1787
- throw TypeError("Cannot add the same private member more than once");
1788
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
3930
+ var __typeError$5 = (msg) => {
3931
+ throw TypeError(msg);
1789
3932
  };
1790
- var __privateSet$2 = (obj, member, value, setter) => {
1791
- __accessCheck$4(obj, member, "write to private field");
1792
- setter ? setter.call(obj, value) : member.set(obj, value);
1793
- return value;
1794
- };
1795
- var __privateMethod$3 = (obj, member, method) => {
1796
- __accessCheck$4(obj, member, "access private method");
1797
- return method;
1798
- };
1799
- var _table$1, _repository, _data, _cleanFilterConstraint, cleanFilterConstraint_fn;
3933
+ var __accessCheck$5 = (obj, member, msg) => member.has(obj) || __typeError$5("Cannot " + msg);
3934
+ var __privateGet$4 = (obj, member, getter) => (__accessCheck$5(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
3935
+ var __privateAdd$5 = (obj, member, value) => member.has(obj) ? __typeError$5("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
3936
+ var __privateSet$3 = (obj, member, value, setter) => (__accessCheck$5(obj, member, "write to private field"), member.set(obj, value), value);
3937
+ var __privateMethod$3 = (obj, member, method) => (__accessCheck$5(obj, member, "access private method"), method);
3938
+ var _table$1, _repository, _data, _Query_instances, cleanFilterConstraint_fn;
1800
3939
  const _Query = class _Query {
1801
3940
  constructor(repository, table, data, rawParent) {
1802
- __privateAdd$4(this, _cleanFilterConstraint);
1803
- __privateAdd$4(this, _table$1, void 0);
1804
- __privateAdd$4(this, _repository, void 0);
1805
- __privateAdd$4(this, _data, { filter: {} });
3941
+ __privateAdd$5(this, _Query_instances);
3942
+ __privateAdd$5(this, _table$1);
3943
+ __privateAdd$5(this, _repository);
3944
+ __privateAdd$5(this, _data, { filter: {} });
1806
3945
  // Implements pagination
1807
3946
  this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
1808
3947
  this.records = new PageRecordArray(this, []);
1809
- __privateSet$2(this, _table$1, table);
3948
+ __privateSet$3(this, _table$1, table);
1810
3949
  if (repository) {
1811
- __privateSet$2(this, _repository, repository);
3950
+ __privateSet$3(this, _repository, repository);
1812
3951
  } else {
1813
- __privateSet$2(this, _repository, this);
3952
+ __privateSet$3(this, _repository, this);
1814
3953
  }
1815
3954
  const parent = cleanParent(data, rawParent);
1816
- __privateGet$3(this, _data).filter = data.filter ?? parent?.filter ?? {};
1817
- __privateGet$3(this, _data).filter.$any = data.filter?.$any ?? parent?.filter?.$any;
1818
- __privateGet$3(this, _data).filter.$all = data.filter?.$all ?? parent?.filter?.$all;
1819
- __privateGet$3(this, _data).filter.$not = data.filter?.$not ?? parent?.filter?.$not;
1820
- __privateGet$3(this, _data).filter.$none = data.filter?.$none ?? parent?.filter?.$none;
1821
- __privateGet$3(this, _data).sort = data.sort ?? parent?.sort;
1822
- __privateGet$3(this, _data).columns = data.columns ?? parent?.columns;
1823
- __privateGet$3(this, _data).consistency = data.consistency ?? parent?.consistency;
1824
- __privateGet$3(this, _data).pagination = data.pagination ?? parent?.pagination;
1825
- __privateGet$3(this, _data).fetchOptions = data.fetchOptions ?? parent?.fetchOptions;
3955
+ __privateGet$4(this, _data).filter = data.filter ?? parent?.filter ?? {};
3956
+ __privateGet$4(this, _data).filter.$any = data.filter?.$any ?? parent?.filter?.$any;
3957
+ __privateGet$4(this, _data).filter.$all = data.filter?.$all ?? parent?.filter?.$all;
3958
+ __privateGet$4(this, _data).filter.$not = data.filter?.$not ?? parent?.filter?.$not;
3959
+ __privateGet$4(this, _data).filter.$none = data.filter?.$none ?? parent?.filter?.$none;
3960
+ __privateGet$4(this, _data).sort = data.sort ?? parent?.sort;
3961
+ __privateGet$4(this, _data).columns = data.columns ?? parent?.columns;
3962
+ __privateGet$4(this, _data).consistency = data.consistency ?? parent?.consistency;
3963
+ __privateGet$4(this, _data).pagination = data.pagination ?? parent?.pagination;
3964
+ __privateGet$4(this, _data).cache = data.cache ?? parent?.cache;
3965
+ __privateGet$4(this, _data).fetchOptions = data.fetchOptions ?? parent?.fetchOptions;
1826
3966
  this.any = this.any.bind(this);
1827
3967
  this.all = this.all.bind(this);
1828
3968
  this.not = this.not.bind(this);
@@ -1833,10 +3973,10 @@ const _Query = class _Query {
1833
3973
  Object.defineProperty(this, "repository", { enumerable: false });
1834
3974
  }
1835
3975
  getQueryOptions() {
1836
- return __privateGet$3(this, _data);
3976
+ return __privateGet$4(this, _data);
1837
3977
  }
1838
3978
  key() {
1839
- const { columns = [], filter = {}, sort = [], pagination = {} } = __privateGet$3(this, _data);
3979
+ const { columns = [], filter = {}, sort = [], pagination = {} } = __privateGet$4(this, _data);
1840
3980
  const key = JSON.stringify({ columns, filter, sort, pagination });
1841
3981
  return toBase64(key);
1842
3982
  }
@@ -1847,7 +3987,7 @@ const _Query = class _Query {
1847
3987
  */
1848
3988
  any(...queries) {
1849
3989
  const $any = queries.map((query) => query.getQueryOptions().filter ?? {});
1850
- return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $any } }, __privateGet$3(this, _data));
3990
+ return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $any } }, __privateGet$4(this, _data));
1851
3991
  }
1852
3992
  /**
1853
3993
  * Builds a new query object representing a logical AND between the given subqueries.
@@ -1856,7 +3996,7 @@ const _Query = class _Query {
1856
3996
  */
1857
3997
  all(...queries) {
1858
3998
  const $all = queries.map((query) => query.getQueryOptions().filter ?? {});
1859
- return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
3999
+ return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
1860
4000
  }
1861
4001
  /**
1862
4002
  * Builds a new query object representing a logical OR negating each subquery. In pseudo-code: !q1 OR !q2
@@ -1865,7 +4005,7 @@ const _Query = class _Query {
1865
4005
  */
1866
4006
  not(...queries) {
1867
4007
  const $not = queries.map((query) => query.getQueryOptions().filter ?? {});
1868
- return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $not } }, __privateGet$3(this, _data));
4008
+ return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $not } }, __privateGet$4(this, _data));
1869
4009
  }
1870
4010
  /**
1871
4011
  * Builds a new query object representing a logical AND negating each subquery. In pseudo-code: !q1 AND !q2
@@ -1874,25 +4014,25 @@ const _Query = class _Query {
1874
4014
  */
1875
4015
  none(...queries) {
1876
4016
  const $none = queries.map((query) => query.getQueryOptions().filter ?? {});
1877
- return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $none } }, __privateGet$3(this, _data));
4017
+ return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $none } }, __privateGet$4(this, _data));
1878
4018
  }
1879
4019
  filter(a, b) {
1880
4020
  if (arguments.length === 1) {
1881
4021
  const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
1882
- [column]: __privateMethod$3(this, _cleanFilterConstraint, cleanFilterConstraint_fn).call(this, column, constraint)
4022
+ [column]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, column, constraint)
1883
4023
  }));
1884
- const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
1885
- return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
4024
+ const $all = compact([__privateGet$4(this, _data).filter?.$all].flat().concat(constraints));
4025
+ return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
1886
4026
  } else {
1887
- const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _cleanFilterConstraint, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
1888
- const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
1889
- return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
4027
+ const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
4028
+ const $all = compact([__privateGet$4(this, _data).filter?.$all].flat().concat(constraints));
4029
+ return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
1890
4030
  }
1891
4031
  }
1892
4032
  sort(column, direction = "asc") {
1893
- const originalSort = [__privateGet$3(this, _data).sort ?? []].flat();
4033
+ const originalSort = [__privateGet$4(this, _data).sort ?? []].flat();
1894
4034
  const sort = [...originalSort, { column, direction }];
1895
- return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { sort }, __privateGet$3(this, _data));
4035
+ return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { sort }, __privateGet$4(this, _data));
1896
4036
  }
1897
4037
  /**
1898
4038
  * Builds a new query specifying the set of columns to be returned in the query response.
@@ -1901,15 +4041,15 @@ const _Query = class _Query {
1901
4041
  */
1902
4042
  select(columns) {
1903
4043
  return new _Query(
1904
- __privateGet$3(this, _repository),
1905
- __privateGet$3(this, _table$1),
4044
+ __privateGet$4(this, _repository),
4045
+ __privateGet$4(this, _table$1),
1906
4046
  { columns },
1907
- __privateGet$3(this, _data)
4047
+ __privateGet$4(this, _data)
1908
4048
  );
1909
4049
  }
1910
4050
  getPaginated(options = {}) {
1911
- const query = new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), options, __privateGet$3(this, _data));
1912
- return __privateGet$3(this, _repository).query(query);
4051
+ const query = new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), options, __privateGet$4(this, _data));
4052
+ return __privateGet$4(this, _repository).query(query);
1913
4053
  }
1914
4054
  /**
1915
4055
  * Get results in an iterator
@@ -1963,19 +4103,26 @@ const _Query = class _Query {
1963
4103
  }
1964
4104
  async getFirstOrThrow(options = {}) {
1965
4105
  const records = await this.getMany({ ...options, pagination: { size: 1 } });
1966
- if (records[0] === void 0)
1967
- throw new Error("No results found.");
4106
+ if (records[0] === void 0) throw new Error("No results found.");
1968
4107
  return records[0];
1969
4108
  }
1970
4109
  async summarize(params = {}) {
1971
4110
  const { summaries, summariesFilter, ...options } = params;
1972
4111
  const query = new _Query(
1973
- __privateGet$3(this, _repository),
1974
- __privateGet$3(this, _table$1),
4112
+ __privateGet$4(this, _repository),
4113
+ __privateGet$4(this, _table$1),
1975
4114
  options,
1976
- __privateGet$3(this, _data)
4115
+ __privateGet$4(this, _data)
1977
4116
  );
1978
- return __privateGet$3(this, _repository).summarizeTable(query, summaries, summariesFilter);
4117
+ return __privateGet$4(this, _repository).summarizeTable(query, summaries, summariesFilter);
4118
+ }
4119
+ /**
4120
+ * Builds a new query object adding a cache TTL in milliseconds.
4121
+ * @param ttl The cache TTL in milliseconds.
4122
+ * @returns A new Query object.
4123
+ */
4124
+ cache(ttl) {
4125
+ return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { cache: ttl }, __privateGet$4(this, _data));
1979
4126
  }
1980
4127
  /**
1981
4128
  * Retrieve next page of records
@@ -2019,9 +4166,9 @@ const _Query = class _Query {
2019
4166
  _table$1 = new WeakMap();
2020
4167
  _repository = new WeakMap();
2021
4168
  _data = new WeakMap();
2022
- _cleanFilterConstraint = new WeakSet();
4169
+ _Query_instances = new WeakSet();
2023
4170
  cleanFilterConstraint_fn = function(column, value) {
2024
- const columnType = __privateGet$3(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
4171
+ const columnType = __privateGet$4(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
2025
4172
  if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
2026
4173
  return { $includes: value };
2027
4174
  }
@@ -2054,7 +4201,12 @@ const RecordColumnTypes = [
2054
4201
  "json"
2055
4202
  ];
2056
4203
  function isIdentifiable(x) {
2057
- return isObject(x) && isString(x?.xata_id);
4204
+ return isObject(x) && isString(x?.id);
4205
+ }
4206
+ function isXataRecord(x) {
4207
+ const record = x;
4208
+ const metadata = record?.getMetadata();
4209
+ return isIdentifiable(x) && isObject(metadata) && typeof metadata.version === "number";
2058
4210
  }
2059
4211
 
2060
4212
  function isValidExpandedColumn(column) {
@@ -2080,8 +4232,7 @@ function isSortFilterString(value) {
2080
4232
  }
2081
4233
  function isSortFilterBase(filter) {
2082
4234
  return isObject(filter) && Object.entries(filter).every(([key, value]) => {
2083
- if (key === "*")
2084
- return value === "random";
4235
+ if (key === "*") return value === "random";
2085
4236
  return value === "asc" || value === "desc";
2086
4237
  });
2087
4238
  }
@@ -2102,29 +4253,15 @@ function buildSortFilter(filter) {
2102
4253
  }
2103
4254
  }
2104
4255
 
2105
- var __accessCheck$3 = (obj, member, msg) => {
2106
- if (!member.has(obj))
2107
- throw TypeError("Cannot " + msg);
4256
+ var __typeError$4 = (msg) => {
4257
+ throw TypeError(msg);
2108
4258
  };
2109
- var __privateGet$2 = (obj, member, getter) => {
2110
- __accessCheck$3(obj, member, "read from private field");
2111
- return getter ? getter.call(obj) : member.get(obj);
2112
- };
2113
- var __privateAdd$3 = (obj, member, value) => {
2114
- if (member.has(obj))
2115
- throw TypeError("Cannot add the same private member more than once");
2116
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
2117
- };
2118
- var __privateSet$1 = (obj, member, value, setter) => {
2119
- __accessCheck$3(obj, member, "write to private field");
2120
- setter ? setter.call(obj, value) : member.set(obj, value);
2121
- return value;
2122
- };
2123
- var __privateMethod$2 = (obj, member, method) => {
2124
- __accessCheck$3(obj, member, "access private method");
2125
- return method;
2126
- };
2127
- var _table, _getFetchProps, _db, _schemaTables, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn, _insertRecords, insertRecords_fn, _updateRecordWithID, updateRecordWithID_fn, _updateRecords, updateRecords_fn, _upsertRecordWithID, upsertRecordWithID_fn, _deleteRecord, deleteRecord_fn, _deleteRecords, deleteRecords_fn, _getSchemaTables, getSchemaTables_fn, _transformObjectToApi, transformObjectToApi_fn;
4259
+ var __accessCheck$4 = (obj, member, msg) => member.has(obj) || __typeError$4("Cannot " + msg);
4260
+ var __privateGet$3 = (obj, member, getter) => (__accessCheck$4(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
4261
+ var __privateAdd$4 = (obj, member, value) => member.has(obj) ? __typeError$4("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
4262
+ var __privateSet$2 = (obj, member, value, setter) => (__accessCheck$4(obj, member, "write to private field"), member.set(obj, value), value);
4263
+ var __privateMethod$2 = (obj, member, method) => (__accessCheck$4(obj, member, "access private method"), method);
4264
+ var _table, _getFetchProps, _db, _cache, _schemaTables, _trace, _RestRepository_instances, insertRecordWithoutId_fn, insertRecordWithId_fn, insertRecords_fn, updateRecordWithID_fn, updateRecords_fn, upsertRecordWithID_fn, deleteRecord_fn, deleteRecords_fn, setCacheQuery_fn, getCacheQuery_fn, getSchemaTables_fn, transformObjectToApi_fn;
2128
4265
  const BULK_OPERATION_MAX_SIZE = 1e3;
2129
4266
  class Repository extends Query {
2130
4267
  }
@@ -2135,78 +4272,64 @@ class RestRepository extends Query {
2135
4272
  { name: options.table, schema: options.schemaTables?.find((table) => table.name === options.table) },
2136
4273
  {}
2137
4274
  );
2138
- __privateAdd$3(this, _insertRecordWithoutId);
2139
- __privateAdd$3(this, _insertRecordWithId);
2140
- __privateAdd$3(this, _insertRecords);
2141
- __privateAdd$3(this, _updateRecordWithID);
2142
- __privateAdd$3(this, _updateRecords);
2143
- __privateAdd$3(this, _upsertRecordWithID);
2144
- __privateAdd$3(this, _deleteRecord);
2145
- __privateAdd$3(this, _deleteRecords);
2146
- __privateAdd$3(this, _getSchemaTables);
2147
- __privateAdd$3(this, _transformObjectToApi);
2148
- __privateAdd$3(this, _table, void 0);
2149
- __privateAdd$3(this, _getFetchProps, void 0);
2150
- __privateAdd$3(this, _db, void 0);
2151
- __privateAdd$3(this, _schemaTables, void 0);
2152
- __privateAdd$3(this, _trace, void 0);
2153
- __privateSet$1(this, _table, options.table);
2154
- __privateSet$1(this, _db, options.db);
2155
- __privateSet$1(this, _schemaTables, options.schemaTables);
2156
- __privateSet$1(this, _getFetchProps, () => ({ ...options.pluginOptions, sessionID: generateUUID() }));
4275
+ __privateAdd$4(this, _RestRepository_instances);
4276
+ __privateAdd$4(this, _table);
4277
+ __privateAdd$4(this, _getFetchProps);
4278
+ __privateAdd$4(this, _db);
4279
+ __privateAdd$4(this, _cache);
4280
+ __privateAdd$4(this, _schemaTables);
4281
+ __privateAdd$4(this, _trace);
4282
+ __privateSet$2(this, _table, options.table);
4283
+ __privateSet$2(this, _db, options.db);
4284
+ __privateSet$2(this, _cache, options.pluginOptions.cache);
4285
+ __privateSet$2(this, _schemaTables, options.schemaTables);
4286
+ __privateSet$2(this, _getFetchProps, () => ({ ...options.pluginOptions, sessionID: generateUUID() }));
2157
4287
  const trace = options.pluginOptions.trace ?? defaultTrace;
2158
- __privateSet$1(this, _trace, async (name, fn, options2 = {}) => {
4288
+ __privateSet$2(this, _trace, async (name, fn, options2 = {}) => {
2159
4289
  return trace(name, fn, {
2160
4290
  ...options2,
2161
- [TraceAttributes.TABLE]: __privateGet$2(this, _table),
4291
+ [TraceAttributes.TABLE]: __privateGet$3(this, _table),
2162
4292
  [TraceAttributes.KIND]: "sdk-operation",
2163
4293
  [TraceAttributes.VERSION]: VERSION
2164
4294
  });
2165
4295
  });
2166
4296
  }
2167
4297
  async create(a, b, c, d) {
2168
- return __privateGet$2(this, _trace).call(this, "create", async () => {
4298
+ return __privateGet$3(this, _trace).call(this, "create", async () => {
2169
4299
  const ifVersion = parseIfVersion(b, c, d);
2170
4300
  if (Array.isArray(a)) {
2171
- if (a.length === 0)
2172
- return [];
2173
- const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
4301
+ if (a.length === 0) return [];
4302
+ const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
2174
4303
  const columns = isValidSelectableColumns(b) ? b : ["*"];
2175
4304
  const result = await this.read(ids, columns);
2176
4305
  return result;
2177
4306
  }
2178
4307
  if (isString(a) && isObject(b)) {
2179
- if (a === "")
2180
- throw new Error("The id can't be empty");
4308
+ if (a === "") throw new Error("The id can't be empty");
2181
4309
  const columns = isValidSelectableColumns(c) ? c : void 0;
2182
- return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
4310
+ return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
2183
4311
  }
2184
- if (isObject(a) && isString(a.xata_id)) {
2185
- if (a.xata_id === "")
2186
- throw new Error("The id can't be empty");
4312
+ if (isObject(a) && isString(a.id)) {
4313
+ if (a.id === "") throw new Error("The id can't be empty");
2187
4314
  const columns = isValidSelectableColumns(b) ? b : void 0;
2188
- return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
2189
- createOnly: true,
2190
- ifVersion
2191
- });
4315
+ return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.id, { ...a, id: void 0 }, columns, { createOnly: true, ifVersion });
2192
4316
  }
2193
4317
  if (isObject(a)) {
2194
4318
  const columns = isValidSelectableColumns(b) ? b : void 0;
2195
- return __privateMethod$2(this, _insertRecordWithoutId, insertRecordWithoutId_fn).call(this, a, columns);
4319
+ return __privateMethod$2(this, _RestRepository_instances, insertRecordWithoutId_fn).call(this, a, columns);
2196
4320
  }
2197
4321
  throw new Error("Invalid arguments for create method");
2198
4322
  });
2199
4323
  }
2200
4324
  async read(a, b) {
2201
- return __privateGet$2(this, _trace).call(this, "read", async () => {
4325
+ return __privateGet$3(this, _trace).call(this, "read", async () => {
2202
4326
  const columns = isValidSelectableColumns(b) ? b : ["*"];
2203
4327
  if (Array.isArray(a)) {
2204
- if (a.length === 0)
2205
- return [];
4328
+ if (a.length === 0) return [];
2206
4329
  const ids = a.map((item) => extractId(item));
2207
- const finalObjects = await this.getAll({ filter: { xata_id: { $any: compact(ids) } }, columns });
4330
+ const finalObjects = await this.getAll({ filter: { id: { $any: compact(ids) } }, columns });
2208
4331
  const dictionary = finalObjects.reduce((acc, object) => {
2209
- acc[object.xata_id] = object;
4332
+ acc[object.id] = object;
2210
4333
  return acc;
2211
4334
  }, {});
2212
4335
  return ids.map((id2) => dictionary[id2 ?? ""] ?? null);
@@ -2219,17 +4342,17 @@ class RestRepository extends Query {
2219
4342
  workspace: "{workspaceId}",
2220
4343
  dbBranchName: "{dbBranch}",
2221
4344
  region: "{region}",
2222
- tableName: __privateGet$2(this, _table),
4345
+ tableName: __privateGet$3(this, _table),
2223
4346
  recordId: id
2224
4347
  },
2225
4348
  queryParams: { columns },
2226
- ...__privateGet$2(this, _getFetchProps).call(this)
4349
+ ...__privateGet$3(this, _getFetchProps).call(this)
2227
4350
  });
2228
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4351
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2229
4352
  return initObject(
2230
- __privateGet$2(this, _db),
4353
+ __privateGet$3(this, _db),
2231
4354
  schemaTables,
2232
- __privateGet$2(this, _table),
4355
+ __privateGet$3(this, _table),
2233
4356
  response,
2234
4357
  columns
2235
4358
  );
@@ -2244,7 +4367,7 @@ class RestRepository extends Query {
2244
4367
  });
2245
4368
  }
2246
4369
  async readOrThrow(a, b) {
2247
- return __privateGet$2(this, _trace).call(this, "readOrThrow", async () => {
4370
+ return __privateGet$3(this, _trace).call(this, "readOrThrow", async () => {
2248
4371
  const result = await this.read(a, b);
2249
4372
  if (Array.isArray(result)) {
2250
4373
  const missingIds = compact(
@@ -2263,14 +4386,13 @@ class RestRepository extends Query {
2263
4386
  });
2264
4387
  }
2265
4388
  async update(a, b, c, d) {
2266
- return __privateGet$2(this, _trace).call(this, "update", async () => {
4389
+ return __privateGet$3(this, _trace).call(this, "update", async () => {
2267
4390
  const ifVersion = parseIfVersion(b, c, d);
2268
4391
  if (Array.isArray(a)) {
2269
- if (a.length === 0)
2270
- return [];
2271
- const existing = await this.read(a, ["xata_id"]);
4392
+ if (a.length === 0) return [];
4393
+ const existing = await this.read(a, ["id"]);
2272
4394
  const updates = a.filter((_item, index) => existing[index] !== null);
2273
- await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, updates, {
4395
+ await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, updates, {
2274
4396
  ifVersion,
2275
4397
  upsert: false
2276
4398
  });
@@ -2281,22 +4403,21 @@ class RestRepository extends Query {
2281
4403
  try {
2282
4404
  if (isString(a) && isObject(b)) {
2283
4405
  const columns = isValidSelectableColumns(c) ? c : void 0;
2284
- return await __privateMethod$2(this, _updateRecordWithID, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
4406
+ return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
2285
4407
  }
2286
- if (isObject(a) && isString(a.xata_id)) {
4408
+ if (isObject(a) && isString(a.id)) {
2287
4409
  const columns = isValidSelectableColumns(b) ? b : void 0;
2288
- return await __privateMethod$2(this, _updateRecordWithID, updateRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
4410
+ return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a.id, { ...a, id: void 0 }, columns, { ifVersion });
2289
4411
  }
2290
4412
  } catch (error) {
2291
- if (error.status === 422)
2292
- return null;
4413
+ if (error.status === 422) return null;
2293
4414
  throw error;
2294
4415
  }
2295
4416
  throw new Error("Invalid arguments for update method");
2296
4417
  });
2297
4418
  }
2298
4419
  async updateOrThrow(a, b, c, d) {
2299
- return __privateGet$2(this, _trace).call(this, "updateOrThrow", async () => {
4420
+ return __privateGet$3(this, _trace).call(this, "updateOrThrow", async () => {
2300
4421
  const result = await this.update(a, b, c, d);
2301
4422
  if (Array.isArray(result)) {
2302
4423
  const missingIds = compact(
@@ -2315,12 +4436,11 @@ class RestRepository extends Query {
2315
4436
  });
2316
4437
  }
2317
4438
  async createOrUpdate(a, b, c, d) {
2318
- return __privateGet$2(this, _trace).call(this, "createOrUpdate", async () => {
4439
+ return __privateGet$3(this, _trace).call(this, "createOrUpdate", async () => {
2319
4440
  const ifVersion = parseIfVersion(b, c, d);
2320
4441
  if (Array.isArray(a)) {
2321
- if (a.length === 0)
2322
- return [];
2323
- await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, a, {
4442
+ if (a.length === 0) return [];
4443
+ await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, a, {
2324
4444
  ifVersion,
2325
4445
  upsert: true
2326
4446
  });
@@ -2329,89 +4449,78 @@ class RestRepository extends Query {
2329
4449
  return result;
2330
4450
  }
2331
4451
  if (isString(a) && isObject(b)) {
2332
- if (a === "")
2333
- throw new Error("The id can't be empty");
4452
+ if (a === "") throw new Error("The id can't be empty");
2334
4453
  const columns = isValidSelectableColumns(c) ? c : void 0;
2335
- return await __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
4454
+ return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
2336
4455
  }
2337
- if (isObject(a) && isString(a.xata_id)) {
2338
- if (a.xata_id === "")
2339
- throw new Error("The id can't be empty");
4456
+ if (isObject(a) && isString(a.id)) {
4457
+ if (a.id === "") throw new Error("The id can't be empty");
2340
4458
  const columns = isValidSelectableColumns(c) ? c : void 0;
2341
- return await __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
4459
+ return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a.id, { ...a, id: void 0 }, columns, { ifVersion });
2342
4460
  }
2343
4461
  if (!isDefined(a) && isObject(b)) {
2344
4462
  return await this.create(b, c);
2345
4463
  }
2346
- if (isObject(a) && !isDefined(a.xata_id)) {
4464
+ if (isObject(a) && !isDefined(a.id)) {
2347
4465
  return await this.create(a, b);
2348
4466
  }
2349
4467
  throw new Error("Invalid arguments for createOrUpdate method");
2350
4468
  });
2351
4469
  }
2352
4470
  async createOrReplace(a, b, c, d) {
2353
- return __privateGet$2(this, _trace).call(this, "createOrReplace", async () => {
4471
+ return __privateGet$3(this, _trace).call(this, "createOrReplace", async () => {
2354
4472
  const ifVersion = parseIfVersion(b, c, d);
2355
4473
  if (Array.isArray(a)) {
2356
- if (a.length === 0)
2357
- return [];
2358
- const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
4474
+ if (a.length === 0) return [];
4475
+ const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
2359
4476
  const columns = isValidSelectableColumns(b) ? b : ["*"];
2360
4477
  const result = await this.read(ids, columns);
2361
4478
  return result;
2362
4479
  }
2363
4480
  if (isString(a) && isObject(b)) {
2364
- if (a === "")
2365
- throw new Error("The id can't be empty");
4481
+ if (a === "") throw new Error("The id can't be empty");
2366
4482
  const columns = isValidSelectableColumns(c) ? c : void 0;
2367
- return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
4483
+ return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
2368
4484
  }
2369
- if (isObject(a) && isString(a.xata_id)) {
2370
- if (a.xata_id === "")
2371
- throw new Error("The id can't be empty");
4485
+ if (isObject(a) && isString(a.id)) {
4486
+ if (a.id === "") throw new Error("The id can't be empty");
2372
4487
  const columns = isValidSelectableColumns(c) ? c : void 0;
2373
- return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
2374
- createOnly: false,
2375
- ifVersion
2376
- });
4488
+ return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.id, { ...a, id: void 0 }, columns, { createOnly: false, ifVersion });
2377
4489
  }
2378
4490
  if (!isDefined(a) && isObject(b)) {
2379
4491
  return await this.create(b, c);
2380
4492
  }
2381
- if (isObject(a) && !isDefined(a.xata_id)) {
4493
+ if (isObject(a) && !isDefined(a.id)) {
2382
4494
  return await this.create(a, b);
2383
4495
  }
2384
4496
  throw new Error("Invalid arguments for createOrReplace method");
2385
4497
  });
2386
4498
  }
2387
4499
  async delete(a, b) {
2388
- return __privateGet$2(this, _trace).call(this, "delete", async () => {
4500
+ return __privateGet$3(this, _trace).call(this, "delete", async () => {
2389
4501
  if (Array.isArray(a)) {
2390
- if (a.length === 0)
2391
- return [];
4502
+ if (a.length === 0) return [];
2392
4503
  const ids = a.map((o) => {
2393
- if (isString(o))
2394
- return o;
2395
- if (isString(o.xata_id))
2396
- return o.xata_id;
4504
+ if (isString(o)) return o;
4505
+ if (isString(o.id)) return o.id;
2397
4506
  throw new Error("Invalid arguments for delete method");
2398
4507
  });
2399
4508
  const columns = isValidSelectableColumns(b) ? b : ["*"];
2400
4509
  const result = await this.read(a, columns);
2401
- await __privateMethod$2(this, _deleteRecords, deleteRecords_fn).call(this, ids);
4510
+ await __privateMethod$2(this, _RestRepository_instances, deleteRecords_fn).call(this, ids);
2402
4511
  return result;
2403
4512
  }
2404
4513
  if (isString(a)) {
2405
- return __privateMethod$2(this, _deleteRecord, deleteRecord_fn).call(this, a, b);
4514
+ return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a, b);
2406
4515
  }
2407
- if (isObject(a) && isString(a.xata_id)) {
2408
- return __privateMethod$2(this, _deleteRecord, deleteRecord_fn).call(this, a.xata_id, b);
4516
+ if (isObject(a) && isString(a.id)) {
4517
+ return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a.id, b);
2409
4518
  }
2410
4519
  throw new Error("Invalid arguments for delete method");
2411
4520
  });
2412
4521
  }
2413
4522
  async deleteOrThrow(a, b) {
2414
- return __privateGet$2(this, _trace).call(this, "deleteOrThrow", async () => {
4523
+ return __privateGet$3(this, _trace).call(this, "deleteOrThrow", async () => {
2415
4524
  const result = await this.delete(a, b);
2416
4525
  if (Array.isArray(result)) {
2417
4526
  const missingIds = compact(
@@ -2429,13 +4538,13 @@ class RestRepository extends Query {
2429
4538
  });
2430
4539
  }
2431
4540
  async search(query, options = {}) {
2432
- return __privateGet$2(this, _trace).call(this, "search", async () => {
4541
+ return __privateGet$3(this, _trace).call(this, "search", async () => {
2433
4542
  const { records, totalCount } = await searchTable({
2434
4543
  pathParams: {
2435
4544
  workspace: "{workspaceId}",
2436
4545
  dbBranchName: "{dbBranch}",
2437
4546
  region: "{region}",
2438
- tableName: __privateGet$2(this, _table)
4547
+ tableName: __privateGet$3(this, _table)
2439
4548
  },
2440
4549
  body: {
2441
4550
  query,
@@ -2447,23 +4556,23 @@ class RestRepository extends Query {
2447
4556
  page: options.page,
2448
4557
  target: options.target
2449
4558
  },
2450
- ...__privateGet$2(this, _getFetchProps).call(this)
4559
+ ...__privateGet$3(this, _getFetchProps).call(this)
2451
4560
  });
2452
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4561
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2453
4562
  return {
2454
- records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
4563
+ records: records.map((item) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), item, ["*"])),
2455
4564
  totalCount
2456
4565
  };
2457
4566
  });
2458
4567
  }
2459
4568
  async vectorSearch(column, query, options) {
2460
- return __privateGet$2(this, _trace).call(this, "vectorSearch", async () => {
4569
+ return __privateGet$3(this, _trace).call(this, "vectorSearch", async () => {
2461
4570
  const { records, totalCount } = await vectorSearchTable({
2462
4571
  pathParams: {
2463
4572
  workspace: "{workspaceId}",
2464
4573
  dbBranchName: "{dbBranch}",
2465
4574
  region: "{region}",
2466
- tableName: __privateGet$2(this, _table)
4575
+ tableName: __privateGet$3(this, _table)
2467
4576
  },
2468
4577
  body: {
2469
4578
  column,
@@ -2472,39 +4581,41 @@ class RestRepository extends Query {
2472
4581
  size: options?.size,
2473
4582
  filter: options?.filter
2474
4583
  },
2475
- ...__privateGet$2(this, _getFetchProps).call(this)
4584
+ ...__privateGet$3(this, _getFetchProps).call(this)
2476
4585
  });
2477
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4586
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2478
4587
  return {
2479
- records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
4588
+ records: records.map((item) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), item, ["*"])),
2480
4589
  totalCount
2481
4590
  };
2482
4591
  });
2483
4592
  }
2484
4593
  async aggregate(aggs, filter) {
2485
- return __privateGet$2(this, _trace).call(this, "aggregate", async () => {
4594
+ return __privateGet$3(this, _trace).call(this, "aggregate", async () => {
2486
4595
  const result = await aggregateTable({
2487
4596
  pathParams: {
2488
4597
  workspace: "{workspaceId}",
2489
4598
  dbBranchName: "{dbBranch}",
2490
4599
  region: "{region}",
2491
- tableName: __privateGet$2(this, _table)
4600
+ tableName: __privateGet$3(this, _table)
2492
4601
  },
2493
4602
  body: { aggs, filter },
2494
- ...__privateGet$2(this, _getFetchProps).call(this)
4603
+ ...__privateGet$3(this, _getFetchProps).call(this)
2495
4604
  });
2496
4605
  return result;
2497
4606
  });
2498
4607
  }
2499
4608
  async query(query) {
2500
- return __privateGet$2(this, _trace).call(this, "query", async () => {
4609
+ return __privateGet$3(this, _trace).call(this, "query", async () => {
4610
+ const cacheQuery = await __privateMethod$2(this, _RestRepository_instances, getCacheQuery_fn).call(this, query);
4611
+ if (cacheQuery) return new Page(query, cacheQuery.meta, cacheQuery.records);
2501
4612
  const data = query.getQueryOptions();
2502
4613
  const { meta, records: objects } = await queryTable({
2503
4614
  pathParams: {
2504
4615
  workspace: "{workspaceId}",
2505
4616
  dbBranchName: "{dbBranch}",
2506
4617
  region: "{region}",
2507
- tableName: __privateGet$2(this, _table)
4618
+ tableName: __privateGet$3(this, _table)
2508
4619
  },
2509
4620
  body: {
2510
4621
  filter: cleanFilter(data.filter),
@@ -2514,30 +4625,31 @@ class RestRepository extends Query {
2514
4625
  consistency: data.consistency
2515
4626
  },
2516
4627
  fetchOptions: data.fetchOptions,
2517
- ...__privateGet$2(this, _getFetchProps).call(this)
4628
+ ...__privateGet$3(this, _getFetchProps).call(this)
2518
4629
  });
2519
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4630
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2520
4631
  const records = objects.map(
2521
4632
  (record) => initObject(
2522
- __privateGet$2(this, _db),
4633
+ __privateGet$3(this, _db),
2523
4634
  schemaTables,
2524
- __privateGet$2(this, _table),
4635
+ __privateGet$3(this, _table),
2525
4636
  record,
2526
4637
  data.columns ?? ["*"]
2527
4638
  )
2528
4639
  );
4640
+ await __privateMethod$2(this, _RestRepository_instances, setCacheQuery_fn).call(this, query, meta, records);
2529
4641
  return new Page(query, meta, records);
2530
4642
  });
2531
4643
  }
2532
4644
  async summarizeTable(query, summaries, summariesFilter) {
2533
- return __privateGet$2(this, _trace).call(this, "summarize", async () => {
4645
+ return __privateGet$3(this, _trace).call(this, "summarize", async () => {
2534
4646
  const data = query.getQueryOptions();
2535
4647
  const result = await summarizeTable({
2536
4648
  pathParams: {
2537
4649
  workspace: "{workspaceId}",
2538
4650
  dbBranchName: "{dbBranch}",
2539
4651
  region: "{region}",
2540
- tableName: __privateGet$2(this, _table)
4652
+ tableName: __privateGet$3(this, _table)
2541
4653
  },
2542
4654
  body: {
2543
4655
  filter: cleanFilter(data.filter),
@@ -2548,13 +4660,13 @@ class RestRepository extends Query {
2548
4660
  summaries,
2549
4661
  summariesFilter
2550
4662
  },
2551
- ...__privateGet$2(this, _getFetchProps).call(this)
4663
+ ...__privateGet$3(this, _getFetchProps).call(this)
2552
4664
  });
2553
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
4665
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
2554
4666
  return {
2555
4667
  ...result,
2556
4668
  summaries: result.summaries.map(
2557
- (summary) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), summary, data.columns ?? [])
4669
+ (summary) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), summary, data.columns ?? [])
2558
4670
  )
2559
4671
  };
2560
4672
  });
@@ -2566,7 +4678,7 @@ class RestRepository extends Query {
2566
4678
  workspace: "{workspaceId}",
2567
4679
  dbBranchName: "{dbBranch}",
2568
4680
  region: "{region}",
2569
- tableName: __privateGet$2(this, _table),
4681
+ tableName: __privateGet$3(this, _table),
2570
4682
  sessionId: options?.sessionId
2571
4683
  },
2572
4684
  body: {
@@ -2576,7 +4688,7 @@ class RestRepository extends Query {
2576
4688
  search: options?.searchType === "keyword" ? options?.search : void 0,
2577
4689
  vectorSearch: options?.searchType === "vector" ? options?.vectorSearch : void 0
2578
4690
  },
2579
- ...__privateGet$2(this, _getFetchProps).call(this)
4691
+ ...__privateGet$3(this, _getFetchProps).call(this)
2580
4692
  };
2581
4693
  if (options?.onMessage) {
2582
4694
  fetchSSERequest({
@@ -2596,51 +4708,48 @@ class RestRepository extends Query {
2596
4708
  _table = new WeakMap();
2597
4709
  _getFetchProps = new WeakMap();
2598
4710
  _db = new WeakMap();
4711
+ _cache = new WeakMap();
2599
4712
  _schemaTables = new WeakMap();
2600
4713
  _trace = new WeakMap();
2601
- _insertRecordWithoutId = new WeakSet();
4714
+ _RestRepository_instances = new WeakSet();
2602
4715
  insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
2603
- const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
2604
- console.log("record", record);
4716
+ const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
2605
4717
  const response = await insertRecord({
2606
4718
  pathParams: {
2607
4719
  workspace: "{workspaceId}",
2608
4720
  dbBranchName: "{dbBranch}",
2609
4721
  region: "{region}",
2610
- tableName: __privateGet$2(this, _table)
4722
+ tableName: __privateGet$3(this, _table)
2611
4723
  },
2612
4724
  queryParams: { columns },
2613
4725
  body: record,
2614
- ...__privateGet$2(this, _getFetchProps).call(this)
4726
+ ...__privateGet$3(this, _getFetchProps).call(this)
2615
4727
  });
2616
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
2617
- return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
4728
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
4729
+ return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
2618
4730
  };
2619
- _insertRecordWithId = new WeakSet();
2620
4731
  insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
2621
- if (!recordId)
2622
- return null;
2623
- const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
4732
+ if (!recordId) return null;
4733
+ const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
2624
4734
  const response = await insertRecordWithID({
2625
4735
  pathParams: {
2626
4736
  workspace: "{workspaceId}",
2627
4737
  dbBranchName: "{dbBranch}",
2628
4738
  region: "{region}",
2629
- tableName: __privateGet$2(this, _table),
4739
+ tableName: __privateGet$3(this, _table),
2630
4740
  recordId
2631
4741
  },
2632
4742
  body: record,
2633
4743
  queryParams: { createOnly, columns, ifVersion },
2634
- ...__privateGet$2(this, _getFetchProps).call(this)
4744
+ ...__privateGet$3(this, _getFetchProps).call(this)
2635
4745
  });
2636
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
2637
- return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
4746
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
4747
+ return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
2638
4748
  };
2639
- _insertRecords = new WeakSet();
2640
4749
  insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
2641
4750
  const operations = await promiseMap(objects, async (object) => {
2642
- const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
2643
- return { insert: { table: __privateGet$2(this, _table), record, createOnly, ifVersion } };
4751
+ const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
4752
+ return { insert: { table: __privateGet$3(this, _table), record, createOnly, ifVersion } };
2644
4753
  });
2645
4754
  const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
2646
4755
  const ids = [];
@@ -2652,7 +4761,7 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
2652
4761
  region: "{region}"
2653
4762
  },
2654
4763
  body: { operations: operations2 },
2655
- ...__privateGet$2(this, _getFetchProps).call(this)
4764
+ ...__privateGet$3(this, _getFetchProps).call(this)
2656
4765
  });
2657
4766
  for (const result of results) {
2658
4767
  if (result.operation === "insert") {
@@ -2664,26 +4773,24 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
2664
4773
  }
2665
4774
  return ids;
2666
4775
  };
2667
- _updateRecordWithID = new WeakSet();
2668
4776
  updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
2669
- if (!recordId)
2670
- return null;
2671
- const { xata_id: _id, ...record } = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
4777
+ if (!recordId) return null;
4778
+ const { id: _id, ...record } = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
2672
4779
  try {
2673
4780
  const response = await updateRecordWithID({
2674
4781
  pathParams: {
2675
4782
  workspace: "{workspaceId}",
2676
4783
  dbBranchName: "{dbBranch}",
2677
4784
  region: "{region}",
2678
- tableName: __privateGet$2(this, _table),
4785
+ tableName: __privateGet$3(this, _table),
2679
4786
  recordId
2680
4787
  },
2681
4788
  queryParams: { columns, ifVersion },
2682
4789
  body: record,
2683
- ...__privateGet$2(this, _getFetchProps).call(this)
4790
+ ...__privateGet$3(this, _getFetchProps).call(this)
2684
4791
  });
2685
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
2686
- return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
4792
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
4793
+ return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
2687
4794
  } catch (e) {
2688
4795
  if (isObject(e) && e.status === 404) {
2689
4796
  return null;
@@ -2691,11 +4798,10 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
2691
4798
  throw e;
2692
4799
  }
2693
4800
  };
2694
- _updateRecords = new WeakSet();
2695
4801
  updateRecords_fn = async function(objects, { ifVersion, upsert }) {
2696
- const operations = await promiseMap(objects, async ({ xata_id, ...object }) => {
2697
- const fields = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
2698
- return { update: { table: __privateGet$2(this, _table), id: xata_id, ifVersion, upsert, fields } };
4802
+ const operations = await promiseMap(objects, async ({ id, ...object }) => {
4803
+ const fields = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
4804
+ return { update: { table: __privateGet$3(this, _table), id, ifVersion, upsert, fields } };
2699
4805
  });
2700
4806
  const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
2701
4807
  const ids = [];
@@ -2707,7 +4813,7 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
2707
4813
  region: "{region}"
2708
4814
  },
2709
4815
  body: { operations: operations2 },
2710
- ...__privateGet$2(this, _getFetchProps).call(this)
4816
+ ...__privateGet$3(this, _getFetchProps).call(this)
2711
4817
  });
2712
4818
  for (const result of results) {
2713
4819
  if (result.operation === "update") {
@@ -2719,43 +4825,39 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
2719
4825
  }
2720
4826
  return ids;
2721
4827
  };
2722
- _upsertRecordWithID = new WeakSet();
2723
4828
  upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
2724
- if (!recordId)
2725
- return null;
4829
+ if (!recordId) return null;
2726
4830
  const response = await upsertRecordWithID({
2727
4831
  pathParams: {
2728
4832
  workspace: "{workspaceId}",
2729
4833
  dbBranchName: "{dbBranch}",
2730
4834
  region: "{region}",
2731
- tableName: __privateGet$2(this, _table),
4835
+ tableName: __privateGet$3(this, _table),
2732
4836
  recordId
2733
4837
  },
2734
4838
  queryParams: { columns, ifVersion },
2735
4839
  body: object,
2736
- ...__privateGet$2(this, _getFetchProps).call(this)
4840
+ ...__privateGet$3(this, _getFetchProps).call(this)
2737
4841
  });
2738
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
2739
- return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
4842
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
4843
+ return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
2740
4844
  };
2741
- _deleteRecord = new WeakSet();
2742
4845
  deleteRecord_fn = async function(recordId, columns = ["*"]) {
2743
- if (!recordId)
2744
- return null;
4846
+ if (!recordId) return null;
2745
4847
  try {
2746
4848
  const response = await deleteRecord({
2747
4849
  pathParams: {
2748
4850
  workspace: "{workspaceId}",
2749
4851
  dbBranchName: "{dbBranch}",
2750
4852
  region: "{region}",
2751
- tableName: __privateGet$2(this, _table),
4853
+ tableName: __privateGet$3(this, _table),
2752
4854
  recordId
2753
4855
  },
2754
4856
  queryParams: { columns },
2755
- ...__privateGet$2(this, _getFetchProps).call(this)
4857
+ ...__privateGet$3(this, _getFetchProps).call(this)
2756
4858
  });
2757
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
2758
- return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
4859
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
4860
+ return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
2759
4861
  } catch (e) {
2760
4862
  if (isObject(e) && e.status === 404) {
2761
4863
  return null;
@@ -2763,10 +4865,9 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
2763
4865
  throw e;
2764
4866
  }
2765
4867
  };
2766
- _deleteRecords = new WeakSet();
2767
4868
  deleteRecords_fn = async function(recordIds) {
2768
4869
  const chunkedOperations = chunk(
2769
- compact(recordIds).map((id) => ({ delete: { table: __privateGet$2(this, _table), id } })),
4870
+ compact(recordIds).map((id) => ({ delete: { table: __privateGet$3(this, _table), id } })),
2770
4871
  BULK_OPERATION_MAX_SIZE
2771
4872
  );
2772
4873
  for (const operations of chunkedOperations) {
@@ -2777,35 +4878,43 @@ deleteRecords_fn = async function(recordIds) {
2777
4878
  region: "{region}"
2778
4879
  },
2779
4880
  body: { operations },
2780
- ...__privateGet$2(this, _getFetchProps).call(this)
4881
+ ...__privateGet$3(this, _getFetchProps).call(this)
2781
4882
  });
2782
4883
  }
2783
4884
  };
2784
- _getSchemaTables = new WeakSet();
4885
+ setCacheQuery_fn = async function(query, meta, records) {
4886
+ await __privateGet$3(this, _cache)?.set(`query_${__privateGet$3(this, _table)}:${query.key()}`, { date: /* @__PURE__ */ new Date(), meta, records });
4887
+ };
4888
+ getCacheQuery_fn = async function(query) {
4889
+ const key = `query_${__privateGet$3(this, _table)}:${query.key()}`;
4890
+ const result = await __privateGet$3(this, _cache)?.get(key);
4891
+ if (!result) return null;
4892
+ const defaultTTL = __privateGet$3(this, _cache)?.defaultQueryTTL ?? -1;
4893
+ const { cache: ttl = defaultTTL } = query.getQueryOptions();
4894
+ if (ttl < 0) return null;
4895
+ const hasExpired = result.date.getTime() + ttl < Date.now();
4896
+ return hasExpired ? null : result;
4897
+ };
2785
4898
  getSchemaTables_fn = async function() {
2786
- if (__privateGet$2(this, _schemaTables))
2787
- return __privateGet$2(this, _schemaTables);
4899
+ if (__privateGet$3(this, _schemaTables)) return __privateGet$3(this, _schemaTables);
2788
4900
  const { schema } = await getBranchDetails({
2789
4901
  pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
2790
- ...__privateGet$2(this, _getFetchProps).call(this)
4902
+ ...__privateGet$3(this, _getFetchProps).call(this)
2791
4903
  });
2792
- __privateSet$1(this, _schemaTables, schema.tables);
4904
+ __privateSet$2(this, _schemaTables, schema.tables);
2793
4905
  return schema.tables;
2794
4906
  };
2795
- _transformObjectToApi = new WeakSet();
2796
4907
  transformObjectToApi_fn = async function(object) {
2797
- const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
2798
- const schema = schemaTables.find((table) => table.name === __privateGet$2(this, _table));
2799
- if (!schema)
2800
- throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
4908
+ const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
4909
+ const schema = schemaTables.find((table) => table.name === __privateGet$3(this, _table));
4910
+ if (!schema) throw new Error(`Table ${__privateGet$3(this, _table)} not found in schema`);
2801
4911
  const result = {};
2802
4912
  for (const [key, value] of Object.entries(object)) {
2803
- if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key))
2804
- continue;
4913
+ if (key === "xata") continue;
2805
4914
  const type = schema.columns.find((column) => column.name === key)?.type;
2806
4915
  switch (type) {
2807
4916
  case "link": {
2808
- result[key] = isIdentifiable(value) ? value.xata_id : value;
4917
+ result[key] = isIdentifiable(value) ? value.id : value;
2809
4918
  break;
2810
4919
  }
2811
4920
  case "datetime": {
@@ -2829,13 +4938,12 @@ transformObjectToApi_fn = async function(object) {
2829
4938
  };
2830
4939
  const initObject = (db, schemaTables, table, object, selectedColumns) => {
2831
4940
  const data = {};
2832
- Object.assign(data, { ...object });
4941
+ const { xata, ...rest } = object ?? {};
4942
+ Object.assign(data, rest);
2833
4943
  const { columns } = schemaTables.find(({ name }) => name === table) ?? {};
2834
- if (!columns)
2835
- console.error(`Table ${table} not found in schema`);
4944
+ if (!columns) console.error(`Table ${table} not found in schema`);
2836
4945
  for (const column of columns ?? []) {
2837
- if (!isValidColumn(selectedColumns, column))
2838
- continue;
4946
+ if (!isValidColumn(selectedColumns, column)) continue;
2839
4947
  const value = data[column.name];
2840
4948
  switch (column.type) {
2841
4949
  case "datetime": {
@@ -2892,21 +5000,28 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
2892
5000
  }
2893
5001
  }
2894
5002
  const record = { ...data };
5003
+ const metadata = xata !== void 0 ? { ...xata, createdAt: new Date(xata.createdAt), updatedAt: new Date(xata.updatedAt) } : void 0;
2895
5004
  record.read = function(columns2) {
2896
- return db[table].read(record["xata_id"], columns2);
5005
+ return db[table].read(record["id"], columns2);
2897
5006
  };
2898
5007
  record.update = function(data2, b, c) {
2899
5008
  const columns2 = isValidSelectableColumns(b) ? b : ["*"];
2900
5009
  const ifVersion = parseIfVersion(b, c);
2901
- return db[table].update(record["xata_id"], data2, columns2, { ifVersion });
5010
+ return db[table].update(record["id"], data2, columns2, { ifVersion });
2902
5011
  };
2903
5012
  record.replace = function(data2, b, c) {
2904
5013
  const columns2 = isValidSelectableColumns(b) ? b : ["*"];
2905
5014
  const ifVersion = parseIfVersion(b, c);
2906
- return db[table].createOrReplace(record["xata_id"], data2, columns2, { ifVersion });
5015
+ return db[table].createOrReplace(record["id"], data2, columns2, { ifVersion });
2907
5016
  };
2908
5017
  record.delete = function() {
2909
- return db[table].delete(record["xata_id"]);
5018
+ return db[table].delete(record["id"]);
5019
+ };
5020
+ if (metadata !== void 0) {
5021
+ record.xata = Object.freeze(metadata);
5022
+ }
5023
+ record.getMetadata = function() {
5024
+ return record.xata;
2910
5025
  };
2911
5026
  record.toSerializable = function() {
2912
5027
  return JSON.parse(JSON.stringify(record));
@@ -2914,22 +5029,19 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
2914
5029
  record.toString = function() {
2915
5030
  return JSON.stringify(record);
2916
5031
  };
2917
- for (const prop of ["read", "update", "replace", "delete", "toSerializable", "toString"]) {
5032
+ for (const prop of ["read", "update", "replace", "delete", "getMetadata", "toSerializable", "toString"]) {
2918
5033
  Object.defineProperty(record, prop, { enumerable: false });
2919
5034
  }
2920
5035
  Object.freeze(record);
2921
5036
  return record;
2922
5037
  };
2923
5038
  function extractId(value) {
2924
- if (isString(value))
2925
- return value;
2926
- if (isObject(value) && isString(value.xata_id))
2927
- return value.xata_id;
5039
+ if (isString(value)) return value;
5040
+ if (isObject(value) && isString(value.id)) return value.id;
2928
5041
  return void 0;
2929
5042
  }
2930
5043
  function isValidColumn(columns, column) {
2931
- if (columns.includes("*"))
2932
- return true;
5044
+ if (columns.includes("*")) return true;
2933
5045
  return columns.filter((item) => isString(item) && item.startsWith(column.name)).length > 0;
2934
5046
  }
2935
5047
  function parseIfVersion(...args) {
@@ -2941,6 +5053,44 @@ function parseIfVersion(...args) {
2941
5053
  return void 0;
2942
5054
  }
2943
5055
 
5056
+ var __typeError$3 = (msg) => {
5057
+ throw TypeError(msg);
5058
+ };
5059
+ var __accessCheck$3 = (obj, member, msg) => member.has(obj) || __typeError$3("Cannot " + msg);
5060
+ var __privateGet$2 = (obj, member, getter) => (__accessCheck$3(obj, member, "read from private field"), member.get(obj));
5061
+ var __privateAdd$3 = (obj, member, value) => member.has(obj) ? __typeError$3("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
5062
+ var __privateSet$1 = (obj, member, value, setter) => (__accessCheck$3(obj, member, "write to private field"), member.set(obj, value), value);
5063
+ var _map;
5064
+ class SimpleCache {
5065
+ constructor(options = {}) {
5066
+ __privateAdd$3(this, _map);
5067
+ __privateSet$1(this, _map, /* @__PURE__ */ new Map());
5068
+ this.capacity = options.max ?? 500;
5069
+ this.defaultQueryTTL = options.defaultQueryTTL ?? 60 * 1e3;
5070
+ }
5071
+ async getAll() {
5072
+ return Object.fromEntries(__privateGet$2(this, _map));
5073
+ }
5074
+ async get(key) {
5075
+ return __privateGet$2(this, _map).get(key) ?? null;
5076
+ }
5077
+ async set(key, value) {
5078
+ await this.delete(key);
5079
+ __privateGet$2(this, _map).set(key, value);
5080
+ if (__privateGet$2(this, _map).size > this.capacity) {
5081
+ const leastRecentlyUsed = __privateGet$2(this, _map).keys().next().value;
5082
+ if (leastRecentlyUsed) await this.delete(leastRecentlyUsed);
5083
+ }
5084
+ }
5085
+ async delete(key) {
5086
+ __privateGet$2(this, _map).delete(key);
5087
+ }
5088
+ async clear() {
5089
+ return __privateGet$2(this, _map).clear();
5090
+ }
5091
+ }
5092
+ _map = new WeakMap();
5093
+
2944
5094
  const greaterThan = (value) => ({ $gt: value });
2945
5095
  const gt = greaterThan;
2946
5096
  const greaterThanEquals = (value) => ({ $ge: value });
@@ -2969,19 +5119,12 @@ const includesAll = (value) => ({ $includesAll: value });
2969
5119
  const includesNone = (value) => ({ $includesNone: value });
2970
5120
  const includesAny = (value) => ({ $includesAny: value });
2971
5121
 
2972
- var __accessCheck$2 = (obj, member, msg) => {
2973
- if (!member.has(obj))
2974
- throw TypeError("Cannot " + msg);
2975
- };
2976
- var __privateGet$1 = (obj, member, getter) => {
2977
- __accessCheck$2(obj, member, "read from private field");
2978
- return getter ? getter.call(obj) : member.get(obj);
2979
- };
2980
- var __privateAdd$2 = (obj, member, value) => {
2981
- if (member.has(obj))
2982
- throw TypeError("Cannot add the same private member more than once");
2983
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
5122
+ var __typeError$2 = (msg) => {
5123
+ throw TypeError(msg);
2984
5124
  };
5125
+ var __accessCheck$2 = (obj, member, msg) => member.has(obj) || __typeError$2("Cannot " + msg);
5126
+ var __privateGet$1 = (obj, member, getter) => (__accessCheck$2(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
5127
+ var __privateAdd$2 = (obj, member, value) => member.has(obj) ? __typeError$2("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
2985
5128
  var _tables;
2986
5129
  class SchemaPlugin extends XataPlugin {
2987
5130
  constructor() {
@@ -2993,8 +5136,7 @@ class SchemaPlugin extends XataPlugin {
2993
5136
  {},
2994
5137
  {
2995
5138
  get: (_target, table) => {
2996
- if (!isString(table))
2997
- throw new Error("Invalid table name");
5139
+ if (!isString(table)) throw new Error("Invalid table name");
2998
5140
  if (__privateGet$1(this, _tables)[table] === void 0) {
2999
5141
  __privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
3000
5142
  }
@@ -3085,30 +5227,23 @@ function getContentType(file) {
3085
5227
  return "application/octet-stream";
3086
5228
  }
3087
5229
 
3088
- var __accessCheck$1 = (obj, member, msg) => {
3089
- if (!member.has(obj))
3090
- throw TypeError("Cannot " + msg);
3091
- };
3092
- var __privateAdd$1 = (obj, member, value) => {
3093
- if (member.has(obj))
3094
- throw TypeError("Cannot add the same private member more than once");
3095
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
3096
- };
3097
- var __privateMethod$1 = (obj, member, method) => {
3098
- __accessCheck$1(obj, member, "access private method");
3099
- return method;
5230
+ var __typeError$1 = (msg) => {
5231
+ throw TypeError(msg);
3100
5232
  };
3101
- var _search, search_fn;
5233
+ var __accessCheck$1 = (obj, member, msg) => member.has(obj) || __typeError$1("Cannot " + msg);
5234
+ var __privateAdd$1 = (obj, member, value) => member.has(obj) ? __typeError$1("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
5235
+ var __privateMethod$1 = (obj, member, method) => (__accessCheck$1(obj, member, "access private method"), method);
5236
+ var _SearchPlugin_instances, search_fn;
3102
5237
  class SearchPlugin extends XataPlugin {
3103
5238
  constructor(db) {
3104
5239
  super();
3105
5240
  this.db = db;
3106
- __privateAdd$1(this, _search);
5241
+ __privateAdd$1(this, _SearchPlugin_instances);
3107
5242
  }
3108
5243
  build(pluginOptions) {
3109
5244
  return {
3110
5245
  all: async (query, options = {}) => {
3111
- const { records, totalCount } = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
5246
+ const { records, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
3112
5247
  return {
3113
5248
  totalCount,
3114
5249
  records: records.map((record) => {
@@ -3118,7 +5253,7 @@ class SearchPlugin extends XataPlugin {
3118
5253
  };
3119
5254
  },
3120
5255
  byTable: async (query, options = {}) => {
3121
- const { records: rawRecords, totalCount } = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
5256
+ const { records: rawRecords, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
3122
5257
  const records = rawRecords.reduce((acc, record) => {
3123
5258
  const { table = "orphan" } = record.xata;
3124
5259
  const items = acc[table] ?? [];
@@ -3130,7 +5265,7 @@ class SearchPlugin extends XataPlugin {
3130
5265
  };
3131
5266
  }
3132
5267
  }
3133
- _search = new WeakSet();
5268
+ _SearchPlugin_instances = new WeakSet();
3134
5269
  search_fn = async function(query, options, pluginOptions) {
3135
5270
  const { tables, fuzziness, highlight, prefix, page } = options ?? {};
3136
5271
  const { records, totalCount } = await searchBranch({
@@ -3166,8 +5301,7 @@ function arrayString(val) {
3166
5301
  return result;
3167
5302
  }
3168
5303
  function prepareValue(value) {
3169
- if (!isDefined(value))
3170
- return null;
5304
+ if (!isDefined(value)) return null;
3171
5305
  if (value instanceof Date) {
3172
5306
  return value.toISOString();
3173
5307
  }
@@ -3194,31 +5328,42 @@ function prepareParams(param1, param2) {
3194
5328
  return { statement, params: param2?.map((value) => prepareValue(value)) };
3195
5329
  }
3196
5330
  if (isObject(param1)) {
3197
- const { statement, params, consistency } = param1;
3198
- return { statement, params: params?.map((value) => prepareValue(value)), consistency };
5331
+ const { statement, params, consistency, responseType } = param1;
5332
+ return { statement, params: params?.map((value) => prepareValue(value)), consistency, responseType };
3199
5333
  }
3200
5334
  throw new Error("Invalid query");
3201
5335
  }
3202
5336
 
3203
5337
  class SQLPlugin extends XataPlugin {
3204
5338
  build(pluginOptions) {
3205
- return async (query, ...parameters) => {
5339
+ const sqlFunction = async (query, ...parameters) => {
3206
5340
  if (!isParamsObject(query) && (!isTemplateStringsArray(query) || !Array.isArray(parameters))) {
3207
5341
  throw new Error("Invalid usage of `xata.sql`. Please use it as a tagged template or with an object.");
3208
5342
  }
3209
- const { statement, params, consistency } = prepareParams(query, parameters);
3210
- const {
3211
- records,
3212
- rows,
3213
- warning,
3214
- columns = []
3215
- } = await sqlQuery({
5343
+ const { statement, params, consistency, responseType } = prepareParams(query, parameters);
5344
+ const { warning, columns, ...response } = await sqlQuery({
3216
5345
  pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
3217
- body: { statement, params, consistency },
5346
+ body: { statement, params, consistency, responseType },
3218
5347
  ...pluginOptions
3219
5348
  });
5349
+ const records = "records" in response ? response.records : void 0;
5350
+ const rows = "rows" in response ? response.rows : void 0;
3220
5351
  return { records, rows, warning, columns };
3221
5352
  };
5353
+ sqlFunction.connectionString = buildConnectionString(pluginOptions);
5354
+ sqlFunction.batch = async (query) => {
5355
+ const { results } = await sqlBatchQuery({
5356
+ pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
5357
+ body: {
5358
+ statements: query.statements.map(({ statement, params }) => ({ statement, params })),
5359
+ consistency: query.consistency,
5360
+ responseType: query.responseType
5361
+ },
5362
+ ...pluginOptions
5363
+ });
5364
+ return { results };
5365
+ };
5366
+ return sqlFunction;
3222
5367
  }
3223
5368
  }
3224
5369
  function isTemplateStringsArray(strings) {
@@ -3227,6 +5372,32 @@ function isTemplateStringsArray(strings) {
3227
5372
  function isParamsObject(params) {
3228
5373
  return isObject(params) && "statement" in params;
3229
5374
  }
5375
+ function buildDomain(host, region) {
5376
+ switch (host) {
5377
+ case "production":
5378
+ return `${region}.sql.xata.sh`;
5379
+ case "staging":
5380
+ return `${region}.sql.staging-xata.dev`;
5381
+ case "dev":
5382
+ return `${region}.sql.dev-xata.dev`;
5383
+ case "local":
5384
+ return "localhost:7654";
5385
+ default:
5386
+ throw new Error("Invalid host provider");
5387
+ }
5388
+ }
5389
+ function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
5390
+ const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
5391
+ const parts = parseWorkspacesUrlParts(url);
5392
+ if (!parts) throw new Error("Invalid workspaces URL");
5393
+ const { workspace: workspaceSlug, region, database, host } = parts;
5394
+ const domain = buildDomain(host, region);
5395
+ const workspace = workspaceSlug.split("-").pop();
5396
+ if (!workspace || !region || !database || !apiKey || !branch) {
5397
+ throw new Error("Unable to build xata connection string");
5398
+ }
5399
+ return `postgresql://${workspace}:${apiKey}@${domain}/${database}:${branch}?sslmode=require`;
5400
+ }
3230
5401
 
3231
5402
  class TransactionPlugin extends XataPlugin {
3232
5403
  build(pluginOptions) {
@@ -3243,41 +5414,28 @@ class TransactionPlugin extends XataPlugin {
3243
5414
  }
3244
5415
  }
3245
5416
 
3246
- var __accessCheck = (obj, member, msg) => {
3247
- if (!member.has(obj))
3248
- throw TypeError("Cannot " + msg);
3249
- };
3250
- var __privateGet = (obj, member, getter) => {
3251
- __accessCheck(obj, member, "read from private field");
3252
- return getter ? getter.call(obj) : member.get(obj);
3253
- };
3254
- var __privateAdd = (obj, member, value) => {
3255
- if (member.has(obj))
3256
- throw TypeError("Cannot add the same private member more than once");
3257
- member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
3258
- };
3259
- var __privateSet = (obj, member, value, setter) => {
3260
- __accessCheck(obj, member, "write to private field");
3261
- setter ? setter.call(obj, value) : member.set(obj, value);
3262
- return value;
3263
- };
3264
- var __privateMethod = (obj, member, method) => {
3265
- __accessCheck(obj, member, "access private method");
3266
- return method;
5417
+ var __typeError = (msg) => {
5418
+ throw TypeError(msg);
3267
5419
  };
5420
+ var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
5421
+ var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
5422
+ var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
5423
+ var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), member.set(obj, value), value);
5424
+ var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
3268
5425
  const buildClient = (plugins) => {
3269
- var _options, _parseOptions, parseOptions_fn, _getFetchProps, getFetchProps_fn, _a;
5426
+ var _options, _instances, parseOptions_fn, getFetchProps_fn, _a;
3270
5427
  return _a = class {
3271
5428
  constructor(options = {}, tables) {
3272
- __privateAdd(this, _parseOptions);
3273
- __privateAdd(this, _getFetchProps);
3274
- __privateAdd(this, _options, void 0);
3275
- const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
5429
+ __privateAdd(this, _instances);
5430
+ __privateAdd(this, _options);
5431
+ const safeOptions = __privateMethod(this, _instances, parseOptions_fn).call(this, options);
3276
5432
  __privateSet(this, _options, safeOptions);
3277
5433
  const pluginOptions = {
3278
- ...__privateMethod(this, _getFetchProps, getFetchProps_fn).call(this, safeOptions),
5434
+ ...__privateMethod(this, _instances, getFetchProps_fn).call(this, safeOptions),
5435
+ cache: safeOptions.cache,
3279
5436
  host: safeOptions.host,
3280
- tables
5437
+ tables,
5438
+ branch: safeOptions.branch
3281
5439
  };
3282
5440
  const db = new SchemaPlugin().build(pluginOptions);
3283
5441
  const search = new SearchPlugin(db).build(pluginOptions);
@@ -3291,8 +5449,7 @@ const buildClient = (plugins) => {
3291
5449
  this.sql = sql;
3292
5450
  this.files = files;
3293
5451
  for (const [key, namespace] of Object.entries(plugins ?? {})) {
3294
- if (namespace === void 0)
3295
- continue;
5452
+ if (namespace === void 0) continue;
3296
5453
  this[key] = namespace.build(pluginOptions);
3297
5454
  }
3298
5455
  }
@@ -3301,7 +5458,7 @@ const buildClient = (plugins) => {
3301
5458
  const branch = __privateGet(this, _options).branch;
3302
5459
  return { databaseURL, branch };
3303
5460
  }
3304
- }, _options = new WeakMap(), _parseOptions = new WeakSet(), parseOptions_fn = function(options) {
5461
+ }, _options = new WeakMap(), _instances = new WeakSet(), parseOptions_fn = function(options) {
3305
5462
  const enableBrowser = options?.enableBrowser ?? getEnableBrowserVariable() ?? false;
3306
5463
  const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
3307
5464
  if (isBrowser && !enableBrowser) {
@@ -3312,6 +5469,7 @@ const buildClient = (plugins) => {
3312
5469
  const fetch = getFetchImplementation(options?.fetch);
3313
5470
  const databaseURL = options?.databaseURL || getDatabaseURL();
3314
5471
  const apiKey = options?.apiKey || getAPIKey();
5472
+ const cache = options?.cache ?? new SimpleCache({ defaultQueryTTL: 0 });
3315
5473
  const trace = options?.trace ?? defaultTrace;
3316
5474
  const clientName = options?.clientName;
3317
5475
  const host = options?.host ?? "production";
@@ -3347,6 +5505,7 @@ const buildClient = (plugins) => {
3347
5505
  databaseURL,
3348
5506
  apiKey,
3349
5507
  branch,
5508
+ cache,
3350
5509
  trace,
3351
5510
  host,
3352
5511
  clientID: generateUUID(),
@@ -3354,7 +5513,7 @@ const buildClient = (plugins) => {
3354
5513
  clientName,
3355
5514
  xataAgentExtra
3356
5515
  };
3357
- }, _getFetchProps = new WeakSet(), getFetchProps_fn = function({
5516
+ }, getFetchProps_fn = function({
3358
5517
  fetch,
3359
5518
  apiKey,
3360
5519
  databaseURL,
@@ -3395,26 +5554,19 @@ class Serializer {
3395
5554
  }
3396
5555
  toJSON(data) {
3397
5556
  function visit(obj) {
3398
- if (Array.isArray(obj))
3399
- return obj.map(visit);
5557
+ if (Array.isArray(obj)) return obj.map(visit);
3400
5558
  const type = typeof obj;
3401
- if (type === "undefined")
3402
- return { [META]: "undefined" };
3403
- if (type === "bigint")
3404
- return { [META]: "bigint", [VALUE]: obj.toString() };
3405
- if (obj === null || type !== "object")
3406
- return obj;
5559
+ if (type === "undefined") return { [META]: "undefined" };
5560
+ if (type === "bigint") return { [META]: "bigint", [VALUE]: obj.toString() };
5561
+ if (obj === null || type !== "object") return obj;
3407
5562
  const constructor = obj.constructor;
3408
5563
  const o = { [META]: constructor.name };
3409
5564
  for (const [key, value] of Object.entries(obj)) {
3410
5565
  o[key] = visit(value);
3411
5566
  }
3412
- if (constructor === Date)
3413
- o[VALUE] = obj.toISOString();
3414
- if (constructor === Map)
3415
- o[VALUE] = Object.fromEntries(obj);
3416
- if (constructor === Set)
3417
- o[VALUE] = [...obj];
5567
+ if (constructor === Date) o[VALUE] = obj.toISOString();
5568
+ if (constructor === Map) o[VALUE] = Object.fromEntries(obj);
5569
+ if (constructor === Set) o[VALUE] = [...obj];
3418
5570
  return o;
3419
5571
  }
3420
5572
  return JSON.stringify(visit(data));
@@ -3427,16 +5579,11 @@ class Serializer {
3427
5579
  if (constructor) {
3428
5580
  return Object.assign(Object.create(constructor.prototype), rest);
3429
5581
  }
3430
- if (clazz === "Date")
3431
- return new Date(val);
3432
- if (clazz === "Set")
3433
- return new Set(val);
3434
- if (clazz === "Map")
3435
- return new Map(Object.entries(val));
3436
- if (clazz === "bigint")
3437
- return BigInt(val);
3438
- if (clazz === "undefined")
3439
- return void 0;
5582
+ if (clazz === "Date") return new Date(val);
5583
+ if (clazz === "Set") return new Set(val);
5584
+ if (clazz === "Map") return new Map(Object.entries(val));
5585
+ if (clazz === "bigint") return BigInt(val);
5586
+ if (clazz === "undefined") return void 0;
3440
5587
  return rest;
3441
5588
  }
3442
5589
  return value;
@@ -3458,5 +5605,5 @@ class XataError extends Error {
3458
5605
  }
3459
5606
  }
3460
5607
 
3461
- export { BaseClient, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge, getAPIKey, getAuthorizationCode, getBranch, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationJobStatus, getBranchMigrationPlan, getBranchSchemaHistory, getBranchStats, getCluster, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseSettings, getDatabaseURL, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationHistory, getMigrationJobStatus, getMigrationRequest, getMigrationRequestIsMerged, getPreviewBranch, getRecord, getSchema, getTableColumns, getTableSchema, getUser, getUserAPIKeys, getUserOAuthAccessTokens, getUserOAuthClients, getWorkspace, getWorkspaceMembersList, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, iContains, iPattern, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isValidExpandedColumn, isValidSelectableColumns, le, lessEquals, lessThan, lessThanEquals, listClusters, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern, previewBranchSchemaEdit, pushBranchMigrations, putFile, putFileItem, queryMigrationRequests, queryTable, removeGitBranchesEntry, removeWorkspaceMember, renameDatabase, resendWorkspaceMemberInvite, resolveBranch, searchBranch, searchTable, serialize, setTableSchema, sqlQuery, startsWith, summarizeTable, transformImage, updateBranchMetadata, updateBranchSchema, updateCluster, updateColumn, updateDatabaseGithubSettings, updateDatabaseMetadata, updateDatabaseSettings, updateMigrationRequest, updateOAuthAccessToken, updateRecordWithID, updateTable, updateUser, updateWorkspace, updateWorkspaceMemberInvite, updateWorkspaceMemberRole, upsertRecordWithID, vectorSearchTable };
5608
+ export { BaseClient, Buffer, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, SimpleCache, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptAllTables, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, completeMigration, contains, copyBranch, createBranch, createBranchAsync, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteCluster, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, dropClusterExtension, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge, getAPIKey, getAuthorizationCode, getBranch, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationJobStatus, getBranchMigrationPlan, getBranchMoveStatus, getBranchSchemaHistory, getBranchStats, getCluster, getClusterMetrics, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseSettings, getDatabaseURL, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationHistory, getMigrationJobStatus, getMigrationJobs, getMigrationRequest, getMigrationRequestIsMerged, getPreviewBranch, getRecord, getSchema, getSchemas, getTableColumns, getTableSchema, getTaskStatus, getTasks, getUser, getUserAPIKeys, getUserOAuthAccessTokens, getUserOAuthClients, getWorkspace, getWorkspaceMembersList, getWorkspaceSettings, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, iContains, iPattern, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, installClusterExtension, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isValidExpandedColumn, isValidSelectableColumns, isXataRecord, le, lessEquals, lessThan, lessThanEquals, listClusterBranches, listClusterExtensions, listClusters, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, moveBranch, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern, previewBranchSchemaEdit, pushBranchMigrations, putFile, putFileItem, queryMigrationRequests, queryTable, removeGitBranchesEntry, removeWorkspaceMember, renameDatabase, resendWorkspaceMemberInvite, resolveBranch, rollbackMigration, searchBranch, searchTable, serialize, setTableSchema, sqlBatchQuery, sqlQuery, startMigration, startsWith, summarizeTable, transformImage, updateBranchMetadata, updateBranchSchema, updateCluster, updateColumn, updateDatabaseGithubSettings, updateDatabaseMetadata, updateDatabaseSettings, updateMigrationRequest, updateOAuthAccessToken, updateRecordWithID, updateTable, updateUser, updateWorkspace, updateWorkspaceMemberInvite, updateWorkspaceMemberRole, updateWorkspaceSettings, upsertRecordWithID, vectorSearchTable };
3462
5609
  //# sourceMappingURL=index.mjs.map