@xata.io/client 0.0.0-alpha.vfc08a49f5b2f1e93114c76b97e7da90408e84709 → 0.0.0-alpha.vfc2160d20dff569d0f4b3272a1273ca130158619
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-add-version.log +1 -1
- package/.turbo/turbo-build.log +3 -3
- package/CHANGELOG.md +52 -6
- package/dist/index.cjs +2752 -657
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +2279 -261
- package/dist/index.mjs +2725 -655
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
@@ -22,6 +22,1789 @@ const TraceAttributes = {
|
|
22
22
|
CLOUDFLARE_RAY_ID: "cf.ray"
|
23
23
|
};
|
24
24
|
|
25
|
+
const lookup = [];
|
26
|
+
const revLookup = [];
|
27
|
+
const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
28
|
+
for (let i = 0, len = code.length; i < len; ++i) {
|
29
|
+
lookup[i] = code[i];
|
30
|
+
revLookup[code.charCodeAt(i)] = i;
|
31
|
+
}
|
32
|
+
revLookup["-".charCodeAt(0)] = 62;
|
33
|
+
revLookup["_".charCodeAt(0)] = 63;
|
34
|
+
function getLens(b64) {
|
35
|
+
const len = b64.length;
|
36
|
+
if (len % 4 > 0) {
|
37
|
+
throw new Error("Invalid string. Length must be a multiple of 4");
|
38
|
+
}
|
39
|
+
let validLen = b64.indexOf("=");
|
40
|
+
if (validLen === -1) validLen = len;
|
41
|
+
const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
|
42
|
+
return [validLen, placeHoldersLen];
|
43
|
+
}
|
44
|
+
function _byteLength(_b64, validLen, placeHoldersLen) {
|
45
|
+
return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
|
46
|
+
}
|
47
|
+
function toByteArray(b64) {
|
48
|
+
let tmp;
|
49
|
+
const lens = getLens(b64);
|
50
|
+
const validLen = lens[0];
|
51
|
+
const placeHoldersLen = lens[1];
|
52
|
+
const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
|
53
|
+
let curByte = 0;
|
54
|
+
const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
|
55
|
+
let i;
|
56
|
+
for (i = 0; i < len; i += 4) {
|
57
|
+
tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
|
58
|
+
arr[curByte++] = tmp >> 16 & 255;
|
59
|
+
arr[curByte++] = tmp >> 8 & 255;
|
60
|
+
arr[curByte++] = tmp & 255;
|
61
|
+
}
|
62
|
+
if (placeHoldersLen === 2) {
|
63
|
+
tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
|
64
|
+
arr[curByte++] = tmp & 255;
|
65
|
+
}
|
66
|
+
if (placeHoldersLen === 1) {
|
67
|
+
tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
|
68
|
+
arr[curByte++] = tmp >> 8 & 255;
|
69
|
+
arr[curByte++] = tmp & 255;
|
70
|
+
}
|
71
|
+
return arr;
|
72
|
+
}
|
73
|
+
function tripletToBase64(num) {
|
74
|
+
return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
|
75
|
+
}
|
76
|
+
function encodeChunk(uint8, start, end) {
|
77
|
+
let tmp;
|
78
|
+
const output = [];
|
79
|
+
for (let i = start; i < end; i += 3) {
|
80
|
+
tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
|
81
|
+
output.push(tripletToBase64(tmp));
|
82
|
+
}
|
83
|
+
return output.join("");
|
84
|
+
}
|
85
|
+
function fromByteArray(uint8) {
|
86
|
+
let tmp;
|
87
|
+
const len = uint8.length;
|
88
|
+
const extraBytes = len % 3;
|
89
|
+
const parts = [];
|
90
|
+
const maxChunkLength = 16383;
|
91
|
+
for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
92
|
+
parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
|
93
|
+
}
|
94
|
+
if (extraBytes === 1) {
|
95
|
+
tmp = uint8[len - 1];
|
96
|
+
parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
|
97
|
+
} else if (extraBytes === 2) {
|
98
|
+
tmp = (uint8[len - 2] << 8) + uint8[len - 1];
|
99
|
+
parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
|
100
|
+
}
|
101
|
+
return parts.join("");
|
102
|
+
}
|
103
|
+
|
104
|
+
const K_MAX_LENGTH = 2147483647;
|
105
|
+
const MAX_ARGUMENTS_LENGTH = 4096;
|
106
|
+
class Buffer extends Uint8Array {
|
107
|
+
/**
|
108
|
+
* Constructs a new `Buffer` instance.
|
109
|
+
*
|
110
|
+
* @param value
|
111
|
+
* @param encodingOrOffset
|
112
|
+
* @param length
|
113
|
+
*/
|
114
|
+
constructor(value, encodingOrOffset, length) {
|
115
|
+
if (typeof value === "number") {
|
116
|
+
if (typeof encodingOrOffset === "string") {
|
117
|
+
throw new TypeError("The first argument must be of type string, received type number");
|
118
|
+
}
|
119
|
+
if (value < 0) {
|
120
|
+
throw new RangeError("The buffer size cannot be negative");
|
121
|
+
}
|
122
|
+
super(value < 0 ? 0 : Buffer._checked(value) | 0);
|
123
|
+
} else if (typeof value === "string") {
|
124
|
+
if (typeof encodingOrOffset !== "string") {
|
125
|
+
encodingOrOffset = "utf8";
|
126
|
+
}
|
127
|
+
if (!Buffer.isEncoding(encodingOrOffset)) {
|
128
|
+
throw new TypeError("Unknown encoding: " + encodingOrOffset);
|
129
|
+
}
|
130
|
+
const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
|
131
|
+
super(length2);
|
132
|
+
const written = this.write(value, 0, this.length, encodingOrOffset);
|
133
|
+
if (written !== length2) {
|
134
|
+
throw new TypeError(
|
135
|
+
"Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
|
136
|
+
);
|
137
|
+
}
|
138
|
+
} else if (ArrayBuffer.isView(value)) {
|
139
|
+
if (Buffer._isInstance(value, Uint8Array)) {
|
140
|
+
const copy = new Uint8Array(value);
|
141
|
+
const array = copy.buffer;
|
142
|
+
const byteOffset = copy.byteOffset;
|
143
|
+
const length2 = copy.byteLength;
|
144
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
145
|
+
throw new RangeError("offset is outside of buffer bounds");
|
146
|
+
}
|
147
|
+
if (array.byteLength < byteOffset + (length2 || 0)) {
|
148
|
+
throw new RangeError("length is outside of buffer bounds");
|
149
|
+
}
|
150
|
+
super(new Uint8Array(array, byteOffset, length2));
|
151
|
+
} else {
|
152
|
+
const array = value;
|
153
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
154
|
+
super(new Uint8Array(length2));
|
155
|
+
for (let i = 0; i < length2; i++) {
|
156
|
+
this[i] = array[i] & 255;
|
157
|
+
}
|
158
|
+
}
|
159
|
+
} else if (value == null) {
|
160
|
+
throw new TypeError(
|
161
|
+
"The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
|
162
|
+
);
|
163
|
+
} else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
|
164
|
+
const array = value;
|
165
|
+
const byteOffset = encodingOrOffset;
|
166
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
167
|
+
throw new RangeError("offset is outside of buffer bounds");
|
168
|
+
}
|
169
|
+
if (array.byteLength < byteOffset + (length || 0)) {
|
170
|
+
throw new RangeError("length is outside of buffer bounds");
|
171
|
+
}
|
172
|
+
super(new Uint8Array(array, byteOffset, length));
|
173
|
+
} else if (Array.isArray(value)) {
|
174
|
+
const array = value;
|
175
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
176
|
+
super(new Uint8Array(length2));
|
177
|
+
for (let i = 0; i < length2; i++) {
|
178
|
+
this[i] = array[i] & 255;
|
179
|
+
}
|
180
|
+
} else {
|
181
|
+
throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
|
182
|
+
}
|
183
|
+
}
|
184
|
+
/**
|
185
|
+
* Return JSON representation of the buffer.
|
186
|
+
*/
|
187
|
+
toJSON() {
|
188
|
+
return {
|
189
|
+
type: "Buffer",
|
190
|
+
data: Array.prototype.slice.call(this)
|
191
|
+
};
|
192
|
+
}
|
193
|
+
/**
|
194
|
+
* Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
|
195
|
+
* parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
|
196
|
+
* only part of `string` will be written. However, partially encoded characters will not be written.
|
197
|
+
*
|
198
|
+
* @param string String to write to `buf`.
|
199
|
+
* @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
|
200
|
+
* @param length Maximum number of bytes to write: Default: `buf.length - offset`.
|
201
|
+
* @param encoding The character encoding of `string`. Default: `utf8`.
|
202
|
+
*/
|
203
|
+
write(string, offset, length, encoding) {
|
204
|
+
if (typeof offset === "undefined") {
|
205
|
+
encoding = "utf8";
|
206
|
+
length = this.length;
|
207
|
+
offset = 0;
|
208
|
+
} else if (typeof length === "undefined" && typeof offset === "string") {
|
209
|
+
encoding = offset;
|
210
|
+
length = this.length;
|
211
|
+
offset = 0;
|
212
|
+
} else if (typeof offset === "number" && isFinite(offset)) {
|
213
|
+
offset = offset >>> 0;
|
214
|
+
if (typeof length === "number" && isFinite(length)) {
|
215
|
+
length = length >>> 0;
|
216
|
+
encoding ?? (encoding = "utf8");
|
217
|
+
} else if (typeof length === "string") {
|
218
|
+
encoding = length;
|
219
|
+
length = void 0;
|
220
|
+
}
|
221
|
+
} else {
|
222
|
+
throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
|
223
|
+
}
|
224
|
+
const remaining = this.length - offset;
|
225
|
+
if (typeof length === "undefined" || length > remaining) {
|
226
|
+
length = remaining;
|
227
|
+
}
|
228
|
+
if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
|
229
|
+
throw new RangeError("Attempt to write outside buffer bounds");
|
230
|
+
}
|
231
|
+
encoding || (encoding = "utf8");
|
232
|
+
switch (Buffer._getEncoding(encoding)) {
|
233
|
+
case "hex":
|
234
|
+
return Buffer._hexWrite(this, string, offset, length);
|
235
|
+
case "utf8":
|
236
|
+
return Buffer._utf8Write(this, string, offset, length);
|
237
|
+
case "ascii":
|
238
|
+
case "latin1":
|
239
|
+
case "binary":
|
240
|
+
return Buffer._asciiWrite(this, string, offset, length);
|
241
|
+
case "ucs2":
|
242
|
+
case "utf16le":
|
243
|
+
return Buffer._ucs2Write(this, string, offset, length);
|
244
|
+
case "base64":
|
245
|
+
return Buffer._base64Write(this, string, offset, length);
|
246
|
+
}
|
247
|
+
}
|
248
|
+
/**
|
249
|
+
* Decodes the buffer to a string according to the specified character encoding.
|
250
|
+
* Passing `start` and `end` will decode only a subset of the buffer.
|
251
|
+
*
|
252
|
+
* Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
|
253
|
+
* will be replaced with `U+FFFD`.
|
254
|
+
*
|
255
|
+
* @param encoding
|
256
|
+
* @param start
|
257
|
+
* @param end
|
258
|
+
*/
|
259
|
+
toString(encoding, start, end) {
|
260
|
+
const length = this.length;
|
261
|
+
if (length === 0) {
|
262
|
+
return "";
|
263
|
+
}
|
264
|
+
if (arguments.length === 0) {
|
265
|
+
return Buffer._utf8Slice(this, 0, length);
|
266
|
+
}
|
267
|
+
if (typeof start === "undefined" || start < 0) {
|
268
|
+
start = 0;
|
269
|
+
}
|
270
|
+
if (start > this.length) {
|
271
|
+
return "";
|
272
|
+
}
|
273
|
+
if (typeof end === "undefined" || end > this.length) {
|
274
|
+
end = this.length;
|
275
|
+
}
|
276
|
+
if (end <= 0) {
|
277
|
+
return "";
|
278
|
+
}
|
279
|
+
end >>>= 0;
|
280
|
+
start >>>= 0;
|
281
|
+
if (end <= start) {
|
282
|
+
return "";
|
283
|
+
}
|
284
|
+
if (!encoding) {
|
285
|
+
encoding = "utf8";
|
286
|
+
}
|
287
|
+
switch (Buffer._getEncoding(encoding)) {
|
288
|
+
case "hex":
|
289
|
+
return Buffer._hexSlice(this, start, end);
|
290
|
+
case "utf8":
|
291
|
+
return Buffer._utf8Slice(this, start, end);
|
292
|
+
case "ascii":
|
293
|
+
return Buffer._asciiSlice(this, start, end);
|
294
|
+
case "latin1":
|
295
|
+
case "binary":
|
296
|
+
return Buffer._latin1Slice(this, start, end);
|
297
|
+
case "ucs2":
|
298
|
+
case "utf16le":
|
299
|
+
return Buffer._utf16leSlice(this, start, end);
|
300
|
+
case "base64":
|
301
|
+
return Buffer._base64Slice(this, start, end);
|
302
|
+
}
|
303
|
+
}
|
304
|
+
/**
|
305
|
+
* Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
|
306
|
+
*
|
307
|
+
* @param otherBuffer
|
308
|
+
*/
|
309
|
+
equals(otherBuffer) {
|
310
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
311
|
+
throw new TypeError("Argument must be a Buffer");
|
312
|
+
}
|
313
|
+
if (this === otherBuffer) {
|
314
|
+
return true;
|
315
|
+
}
|
316
|
+
return Buffer.compare(this, otherBuffer) === 0;
|
317
|
+
}
|
318
|
+
/**
|
319
|
+
* Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
|
320
|
+
* or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
|
321
|
+
* buffer.
|
322
|
+
*
|
323
|
+
* - `0` is returned if `otherBuffer` is the same as this buffer.
|
324
|
+
* - `1` is returned if `otherBuffer` should come before this buffer when sorted.
|
325
|
+
* - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
|
326
|
+
*
|
327
|
+
* @param otherBuffer The buffer to compare to.
|
328
|
+
* @param targetStart The offset within `otherBuffer` at which to begin comparison.
|
329
|
+
* @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
|
330
|
+
* @param sourceStart The offset within this buffer at which to begin comparison.
|
331
|
+
* @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
|
332
|
+
*/
|
333
|
+
compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
|
334
|
+
if (Buffer._isInstance(otherBuffer, Uint8Array)) {
|
335
|
+
otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
|
336
|
+
}
|
337
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
338
|
+
throw new TypeError("Argument must be a Buffer or Uint8Array");
|
339
|
+
}
|
340
|
+
targetStart ?? (targetStart = 0);
|
341
|
+
targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
|
342
|
+
sourceStart ?? (sourceStart = 0);
|
343
|
+
sourceEnd ?? (sourceEnd = this.length);
|
344
|
+
if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
|
345
|
+
throw new RangeError("Out of range index");
|
346
|
+
}
|
347
|
+
if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
|
348
|
+
return 0;
|
349
|
+
}
|
350
|
+
if (sourceStart >= sourceEnd) {
|
351
|
+
return -1;
|
352
|
+
}
|
353
|
+
if (targetStart >= targetEnd) {
|
354
|
+
return 1;
|
355
|
+
}
|
356
|
+
targetStart >>>= 0;
|
357
|
+
targetEnd >>>= 0;
|
358
|
+
sourceStart >>>= 0;
|
359
|
+
sourceEnd >>>= 0;
|
360
|
+
if (this === otherBuffer) {
|
361
|
+
return 0;
|
362
|
+
}
|
363
|
+
let x = sourceEnd - sourceStart;
|
364
|
+
let y = targetEnd - targetStart;
|
365
|
+
const len = Math.min(x, y);
|
366
|
+
const thisCopy = this.slice(sourceStart, sourceEnd);
|
367
|
+
const targetCopy = otherBuffer.slice(targetStart, targetEnd);
|
368
|
+
for (let i = 0; i < len; ++i) {
|
369
|
+
if (thisCopy[i] !== targetCopy[i]) {
|
370
|
+
x = thisCopy[i];
|
371
|
+
y = targetCopy[i];
|
372
|
+
break;
|
373
|
+
}
|
374
|
+
}
|
375
|
+
if (x < y) return -1;
|
376
|
+
if (y < x) return 1;
|
377
|
+
return 0;
|
378
|
+
}
|
379
|
+
/**
|
380
|
+
* Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
|
381
|
+
* region overlaps with this buffer.
|
382
|
+
*
|
383
|
+
* @param targetBuffer The target buffer to copy into.
|
384
|
+
* @param targetStart The offset within `targetBuffer` at which to begin writing.
|
385
|
+
* @param sourceStart The offset within this buffer at which to begin copying.
|
386
|
+
* @param sourceEnd The offset within this buffer at which to end copying (exclusive).
|
387
|
+
*/
|
388
|
+
copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
|
389
|
+
if (!Buffer.isBuffer(targetBuffer)) throw new TypeError("argument should be a Buffer");
|
390
|
+
if (!sourceStart) sourceStart = 0;
|
391
|
+
if (!targetStart) targetStart = 0;
|
392
|
+
if (!sourceEnd && sourceEnd !== 0) sourceEnd = this.length;
|
393
|
+
if (targetStart >= targetBuffer.length) targetStart = targetBuffer.length;
|
394
|
+
if (!targetStart) targetStart = 0;
|
395
|
+
if (sourceEnd > 0 && sourceEnd < sourceStart) sourceEnd = sourceStart;
|
396
|
+
if (sourceEnd === sourceStart) return 0;
|
397
|
+
if (targetBuffer.length === 0 || this.length === 0) return 0;
|
398
|
+
if (targetStart < 0) {
|
399
|
+
throw new RangeError("targetStart out of bounds");
|
400
|
+
}
|
401
|
+
if (sourceStart < 0 || sourceStart >= this.length) throw new RangeError("Index out of range");
|
402
|
+
if (sourceEnd < 0) throw new RangeError("sourceEnd out of bounds");
|
403
|
+
if (sourceEnd > this.length) sourceEnd = this.length;
|
404
|
+
if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
|
405
|
+
sourceEnd = targetBuffer.length - targetStart + sourceStart;
|
406
|
+
}
|
407
|
+
const len = sourceEnd - sourceStart;
|
408
|
+
if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
|
409
|
+
this.copyWithin(targetStart, sourceStart, sourceEnd);
|
410
|
+
} else {
|
411
|
+
Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
|
412
|
+
}
|
413
|
+
return len;
|
414
|
+
}
|
415
|
+
/**
|
416
|
+
* Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
|
417
|
+
* and `end` indices. This is the same behavior as `buf.subarray()`.
|
418
|
+
*
|
419
|
+
* This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
|
420
|
+
* the slice, use `Uint8Array.prototype.slice()`.
|
421
|
+
*
|
422
|
+
* @param start
|
423
|
+
* @param end
|
424
|
+
*/
|
425
|
+
slice(start, end) {
|
426
|
+
if (!start) {
|
427
|
+
start = 0;
|
428
|
+
}
|
429
|
+
const len = this.length;
|
430
|
+
start = ~~start;
|
431
|
+
end = end === void 0 ? len : ~~end;
|
432
|
+
if (start < 0) {
|
433
|
+
start += len;
|
434
|
+
if (start < 0) {
|
435
|
+
start = 0;
|
436
|
+
}
|
437
|
+
} else if (start > len) {
|
438
|
+
start = len;
|
439
|
+
}
|
440
|
+
if (end < 0) {
|
441
|
+
end += len;
|
442
|
+
if (end < 0) {
|
443
|
+
end = 0;
|
444
|
+
}
|
445
|
+
} else if (end > len) {
|
446
|
+
end = len;
|
447
|
+
}
|
448
|
+
if (end < start) {
|
449
|
+
end = start;
|
450
|
+
}
|
451
|
+
const newBuf = this.subarray(start, end);
|
452
|
+
Object.setPrototypeOf(newBuf, Buffer.prototype);
|
453
|
+
return newBuf;
|
454
|
+
}
|
455
|
+
/**
|
456
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
457
|
+
* of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
|
458
|
+
*
|
459
|
+
* @param value Number to write.
|
460
|
+
* @param offset Number of bytes to skip before starting to write.
|
461
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
462
|
+
* @param noAssert
|
463
|
+
* @returns `offset` plus the number of bytes written.
|
464
|
+
*/
|
465
|
+
writeUIntLE(value, offset, byteLength, noAssert) {
|
466
|
+
value = +value;
|
467
|
+
offset = offset >>> 0;
|
468
|
+
byteLength = byteLength >>> 0;
|
469
|
+
if (!noAssert) {
|
470
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
471
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
472
|
+
}
|
473
|
+
let mul = 1;
|
474
|
+
let i = 0;
|
475
|
+
this[offset] = value & 255;
|
476
|
+
while (++i < byteLength && (mul *= 256)) {
|
477
|
+
this[offset + i] = value / mul & 255;
|
478
|
+
}
|
479
|
+
return offset + byteLength;
|
480
|
+
}
|
481
|
+
/**
|
482
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
|
483
|
+
* accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
|
484
|
+
*
|
485
|
+
* @param value Number to write.
|
486
|
+
* @param offset Number of bytes to skip before starting to write.
|
487
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
488
|
+
* @param noAssert
|
489
|
+
* @returns `offset` plus the number of bytes written.
|
490
|
+
*/
|
491
|
+
writeUIntBE(value, offset, byteLength, noAssert) {
|
492
|
+
value = +value;
|
493
|
+
offset = offset >>> 0;
|
494
|
+
byteLength = byteLength >>> 0;
|
495
|
+
if (!noAssert) {
|
496
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
497
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
498
|
+
}
|
499
|
+
let i = byteLength - 1;
|
500
|
+
let mul = 1;
|
501
|
+
this[offset + i] = value & 255;
|
502
|
+
while (--i >= 0 && (mul *= 256)) {
|
503
|
+
this[offset + i] = value / mul & 255;
|
504
|
+
}
|
505
|
+
return offset + byteLength;
|
506
|
+
}
|
507
|
+
/**
|
508
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
509
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
510
|
+
*
|
511
|
+
* @param value Number to write.
|
512
|
+
* @param offset Number of bytes to skip before starting to write.
|
513
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
514
|
+
* @param noAssert
|
515
|
+
* @returns `offset` plus the number of bytes written.
|
516
|
+
*/
|
517
|
+
writeIntLE(value, offset, byteLength, noAssert) {
|
518
|
+
value = +value;
|
519
|
+
offset = offset >>> 0;
|
520
|
+
if (!noAssert) {
|
521
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
522
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
523
|
+
}
|
524
|
+
let i = 0;
|
525
|
+
let mul = 1;
|
526
|
+
let sub = 0;
|
527
|
+
this[offset] = value & 255;
|
528
|
+
while (++i < byteLength && (mul *= 256)) {
|
529
|
+
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
530
|
+
sub = 1;
|
531
|
+
}
|
532
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
533
|
+
}
|
534
|
+
return offset + byteLength;
|
535
|
+
}
|
536
|
+
/**
|
537
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
|
538
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
539
|
+
*
|
540
|
+
* @param value Number to write.
|
541
|
+
* @param offset Number of bytes to skip before starting to write.
|
542
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
543
|
+
* @param noAssert
|
544
|
+
* @returns `offset` plus the number of bytes written.
|
545
|
+
*/
|
546
|
+
writeIntBE(value, offset, byteLength, noAssert) {
|
547
|
+
value = +value;
|
548
|
+
offset = offset >>> 0;
|
549
|
+
if (!noAssert) {
|
550
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
551
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
552
|
+
}
|
553
|
+
let i = byteLength - 1;
|
554
|
+
let mul = 1;
|
555
|
+
let sub = 0;
|
556
|
+
this[offset + i] = value & 255;
|
557
|
+
while (--i >= 0 && (mul *= 256)) {
|
558
|
+
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
559
|
+
sub = 1;
|
560
|
+
}
|
561
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
562
|
+
}
|
563
|
+
return offset + byteLength;
|
564
|
+
}
|
565
|
+
/**
|
566
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
567
|
+
* unsigned, little-endian integer supporting up to 48 bits of accuracy.
|
568
|
+
*
|
569
|
+
* @param offset Number of bytes to skip before starting to read.
|
570
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
571
|
+
* @param noAssert
|
572
|
+
*/
|
573
|
+
readUIntLE(offset, byteLength, noAssert) {
|
574
|
+
offset = offset >>> 0;
|
575
|
+
byteLength = byteLength >>> 0;
|
576
|
+
if (!noAssert) {
|
577
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
578
|
+
}
|
579
|
+
let val = this[offset];
|
580
|
+
let mul = 1;
|
581
|
+
let i = 0;
|
582
|
+
while (++i < byteLength && (mul *= 256)) {
|
583
|
+
val += this[offset + i] * mul;
|
584
|
+
}
|
585
|
+
return val;
|
586
|
+
}
|
587
|
+
/**
|
588
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
589
|
+
* unsigned, big-endian integer supporting up to 48 bits of accuracy.
|
590
|
+
*
|
591
|
+
* @param offset Number of bytes to skip before starting to read.
|
592
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
593
|
+
* @param noAssert
|
594
|
+
*/
|
595
|
+
readUIntBE(offset, byteLength, noAssert) {
|
596
|
+
offset = offset >>> 0;
|
597
|
+
byteLength = byteLength >>> 0;
|
598
|
+
if (!noAssert) {
|
599
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
600
|
+
}
|
601
|
+
let val = this[offset + --byteLength];
|
602
|
+
let mul = 1;
|
603
|
+
while (byteLength > 0 && (mul *= 256)) {
|
604
|
+
val += this[offset + --byteLength] * mul;
|
605
|
+
}
|
606
|
+
return val;
|
607
|
+
}
|
608
|
+
/**
|
609
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
610
|
+
* little-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
611
|
+
*
|
612
|
+
* @param offset Number of bytes to skip before starting to read.
|
613
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
614
|
+
* @param noAssert
|
615
|
+
*/
|
616
|
+
readIntLE(offset, byteLength, noAssert) {
|
617
|
+
offset = offset >>> 0;
|
618
|
+
byteLength = byteLength >>> 0;
|
619
|
+
if (!noAssert) {
|
620
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
621
|
+
}
|
622
|
+
let val = this[offset];
|
623
|
+
let mul = 1;
|
624
|
+
let i = 0;
|
625
|
+
while (++i < byteLength && (mul *= 256)) {
|
626
|
+
val += this[offset + i] * mul;
|
627
|
+
}
|
628
|
+
mul *= 128;
|
629
|
+
if (val >= mul) {
|
630
|
+
val -= Math.pow(2, 8 * byteLength);
|
631
|
+
}
|
632
|
+
return val;
|
633
|
+
}
|
634
|
+
/**
|
635
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
636
|
+
* big-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
637
|
+
*
|
638
|
+
* @param offset Number of bytes to skip before starting to read.
|
639
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
640
|
+
* @param noAssert
|
641
|
+
*/
|
642
|
+
readIntBE(offset, byteLength, noAssert) {
|
643
|
+
offset = offset >>> 0;
|
644
|
+
byteLength = byteLength >>> 0;
|
645
|
+
if (!noAssert) {
|
646
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
647
|
+
}
|
648
|
+
let i = byteLength;
|
649
|
+
let mul = 1;
|
650
|
+
let val = this[offset + --i];
|
651
|
+
while (i > 0 && (mul *= 256)) {
|
652
|
+
val += this[offset + --i] * mul;
|
653
|
+
}
|
654
|
+
mul *= 128;
|
655
|
+
if (val >= mul) {
|
656
|
+
val -= Math.pow(2, 8 * byteLength);
|
657
|
+
}
|
658
|
+
return val;
|
659
|
+
}
|
660
|
+
/**
|
661
|
+
* Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
|
662
|
+
*
|
663
|
+
* @param offset Number of bytes to skip before starting to read.
|
664
|
+
* @param noAssert
|
665
|
+
*/
|
666
|
+
readUInt8(offset, noAssert) {
|
667
|
+
offset = offset >>> 0;
|
668
|
+
if (!noAssert) {
|
669
|
+
Buffer._checkOffset(offset, 1, this.length);
|
670
|
+
}
|
671
|
+
return this[offset];
|
672
|
+
}
|
673
|
+
/**
|
674
|
+
* Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
|
675
|
+
*
|
676
|
+
* @param offset Number of bytes to skip before starting to read.
|
677
|
+
* @param noAssert
|
678
|
+
*/
|
679
|
+
readUInt16LE(offset, noAssert) {
|
680
|
+
offset = offset >>> 0;
|
681
|
+
if (!noAssert) {
|
682
|
+
Buffer._checkOffset(offset, 2, this.length);
|
683
|
+
}
|
684
|
+
return this[offset] | this[offset + 1] << 8;
|
685
|
+
}
|
686
|
+
/**
|
687
|
+
* Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
|
688
|
+
*
|
689
|
+
* @param offset Number of bytes to skip before starting to read.
|
690
|
+
* @param noAssert
|
691
|
+
*/
|
692
|
+
readUInt16BE(offset, noAssert) {
|
693
|
+
offset = offset >>> 0;
|
694
|
+
if (!noAssert) {
|
695
|
+
Buffer._checkOffset(offset, 2, this.length);
|
696
|
+
}
|
697
|
+
return this[offset] << 8 | this[offset + 1];
|
698
|
+
}
|
699
|
+
/**
|
700
|
+
* Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
|
701
|
+
*
|
702
|
+
* @param offset Number of bytes to skip before starting to read.
|
703
|
+
* @param noAssert
|
704
|
+
*/
|
705
|
+
readUInt32LE(offset, noAssert) {
|
706
|
+
offset = offset >>> 0;
|
707
|
+
if (!noAssert) {
|
708
|
+
Buffer._checkOffset(offset, 4, this.length);
|
709
|
+
}
|
710
|
+
return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
|
711
|
+
}
|
712
|
+
/**
|
713
|
+
* Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
|
714
|
+
*
|
715
|
+
* @param offset Number of bytes to skip before starting to read.
|
716
|
+
* @param noAssert
|
717
|
+
*/
|
718
|
+
readUInt32BE(offset, noAssert) {
|
719
|
+
offset = offset >>> 0;
|
720
|
+
if (!noAssert) {
|
721
|
+
Buffer._checkOffset(offset, 4, this.length);
|
722
|
+
}
|
723
|
+
return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
|
724
|
+
}
|
725
|
+
/**
|
726
|
+
* Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
|
727
|
+
* as two's complement signed values.
|
728
|
+
*
|
729
|
+
* @param offset Number of bytes to skip before starting to read.
|
730
|
+
* @param noAssert
|
731
|
+
*/
|
732
|
+
readInt8(offset, noAssert) {
|
733
|
+
offset = offset >>> 0;
|
734
|
+
if (!noAssert) {
|
735
|
+
Buffer._checkOffset(offset, 1, this.length);
|
736
|
+
}
|
737
|
+
if (!(this[offset] & 128)) {
|
738
|
+
return this[offset];
|
739
|
+
}
|
740
|
+
return (255 - this[offset] + 1) * -1;
|
741
|
+
}
|
742
|
+
/**
|
743
|
+
* Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
744
|
+
* are interpreted as two's complement signed values.
|
745
|
+
*
|
746
|
+
* @param offset Number of bytes to skip before starting to read.
|
747
|
+
* @param noAssert
|
748
|
+
*/
|
749
|
+
readInt16LE(offset, noAssert) {
|
750
|
+
offset = offset >>> 0;
|
751
|
+
if (!noAssert) {
|
752
|
+
Buffer._checkOffset(offset, 2, this.length);
|
753
|
+
}
|
754
|
+
const val = this[offset] | this[offset + 1] << 8;
|
755
|
+
return val & 32768 ? val | 4294901760 : val;
|
756
|
+
}
|
757
|
+
/**
|
758
|
+
* Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
759
|
+
* are interpreted as two's complement signed values.
|
760
|
+
*
|
761
|
+
* @param offset Number of bytes to skip before starting to read.
|
762
|
+
* @param noAssert
|
763
|
+
*/
|
764
|
+
readInt16BE(offset, noAssert) {
|
765
|
+
offset = offset >>> 0;
|
766
|
+
if (!noAssert) {
|
767
|
+
Buffer._checkOffset(offset, 2, this.length);
|
768
|
+
}
|
769
|
+
const val = this[offset + 1] | this[offset] << 8;
|
770
|
+
return val & 32768 ? val | 4294901760 : val;
|
771
|
+
}
|
772
|
+
/**
|
773
|
+
* Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
774
|
+
* are interpreted as two's complement signed values.
|
775
|
+
*
|
776
|
+
* @param offset Number of bytes to skip before starting to read.
|
777
|
+
* @param noAssert
|
778
|
+
*/
|
779
|
+
readInt32LE(offset, noAssert) {
|
780
|
+
offset = offset >>> 0;
|
781
|
+
if (!noAssert) {
|
782
|
+
Buffer._checkOffset(offset, 4, this.length);
|
783
|
+
}
|
784
|
+
return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
|
785
|
+
}
|
786
|
+
/**
|
787
|
+
* Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
788
|
+
* are interpreted as two's complement signed values.
|
789
|
+
*
|
790
|
+
* @param offset Number of bytes to skip before starting to read.
|
791
|
+
* @param noAssert
|
792
|
+
*/
|
793
|
+
readInt32BE(offset, noAssert) {
|
794
|
+
offset = offset >>> 0;
|
795
|
+
if (!noAssert) {
|
796
|
+
Buffer._checkOffset(offset, 4, this.length);
|
797
|
+
}
|
798
|
+
return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
|
799
|
+
}
|
800
|
+
/**
|
801
|
+
* Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
|
802
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 2.
|
803
|
+
*/
|
804
|
+
swap16() {
|
805
|
+
const len = this.length;
|
806
|
+
if (len % 2 !== 0) {
|
807
|
+
throw new RangeError("Buffer size must be a multiple of 16-bits");
|
808
|
+
}
|
809
|
+
for (let i = 0; i < len; i += 2) {
|
810
|
+
this._swap(this, i, i + 1);
|
811
|
+
}
|
812
|
+
return this;
|
813
|
+
}
|
814
|
+
/**
|
815
|
+
* Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
|
816
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 4.
|
817
|
+
*/
|
818
|
+
swap32() {
|
819
|
+
const len = this.length;
|
820
|
+
if (len % 4 !== 0) {
|
821
|
+
throw new RangeError("Buffer size must be a multiple of 32-bits");
|
822
|
+
}
|
823
|
+
for (let i = 0; i < len; i += 4) {
|
824
|
+
this._swap(this, i, i + 3);
|
825
|
+
this._swap(this, i + 1, i + 2);
|
826
|
+
}
|
827
|
+
return this;
|
828
|
+
}
|
829
|
+
/**
|
830
|
+
* Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
|
831
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 8.
|
832
|
+
*/
|
833
|
+
swap64() {
|
834
|
+
const len = this.length;
|
835
|
+
if (len % 8 !== 0) {
|
836
|
+
throw new RangeError("Buffer size must be a multiple of 64-bits");
|
837
|
+
}
|
838
|
+
for (let i = 0; i < len; i += 8) {
|
839
|
+
this._swap(this, i, i + 7);
|
840
|
+
this._swap(this, i + 1, i + 6);
|
841
|
+
this._swap(this, i + 2, i + 5);
|
842
|
+
this._swap(this, i + 3, i + 4);
|
843
|
+
}
|
844
|
+
return this;
|
845
|
+
}
|
846
|
+
/**
|
847
|
+
* Swaps two octets.
|
848
|
+
*
|
849
|
+
* @param b
|
850
|
+
* @param n
|
851
|
+
* @param m
|
852
|
+
*/
|
853
|
+
_swap(b, n, m) {
|
854
|
+
const i = b[n];
|
855
|
+
b[n] = b[m];
|
856
|
+
b[m] = i;
|
857
|
+
}
|
858
|
+
/**
|
859
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
|
860
|
+
* Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
|
861
|
+
*
|
862
|
+
* @param value Number to write.
|
863
|
+
* @param offset Number of bytes to skip before starting to write.
|
864
|
+
* @param noAssert
|
865
|
+
* @returns `offset` plus the number of bytes written.
|
866
|
+
*/
|
867
|
+
writeUInt8(value, offset, noAssert) {
|
868
|
+
value = +value;
|
869
|
+
offset = offset >>> 0;
|
870
|
+
if (!noAssert) {
|
871
|
+
Buffer._checkInt(this, value, offset, 1, 255, 0);
|
872
|
+
}
|
873
|
+
this[offset] = value & 255;
|
874
|
+
return offset + 1;
|
875
|
+
}
|
876
|
+
/**
|
877
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
|
878
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
879
|
+
*
|
880
|
+
* @param value Number to write.
|
881
|
+
* @param offset Number of bytes to skip before starting to write.
|
882
|
+
* @param noAssert
|
883
|
+
* @returns `offset` plus the number of bytes written.
|
884
|
+
*/
|
885
|
+
writeUInt16LE(value, offset, noAssert) {
|
886
|
+
value = +value;
|
887
|
+
offset = offset >>> 0;
|
888
|
+
if (!noAssert) {
|
889
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
890
|
+
}
|
891
|
+
this[offset] = value & 255;
|
892
|
+
this[offset + 1] = value >>> 8;
|
893
|
+
return offset + 2;
|
894
|
+
}
|
895
|
+
/**
|
896
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
|
897
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
898
|
+
*
|
899
|
+
* @param value Number to write.
|
900
|
+
* @param offset Number of bytes to skip before starting to write.
|
901
|
+
* @param noAssert
|
902
|
+
* @returns `offset` plus the number of bytes written.
|
903
|
+
*/
|
904
|
+
writeUInt16BE(value, offset, noAssert) {
|
905
|
+
value = +value;
|
906
|
+
offset = offset >>> 0;
|
907
|
+
if (!noAssert) {
|
908
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
909
|
+
}
|
910
|
+
this[offset] = value >>> 8;
|
911
|
+
this[offset + 1] = value & 255;
|
912
|
+
return offset + 2;
|
913
|
+
}
|
914
|
+
/**
|
915
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
|
916
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
917
|
+
*
|
918
|
+
* @param value Number to write.
|
919
|
+
* @param offset Number of bytes to skip before starting to write.
|
920
|
+
* @param noAssert
|
921
|
+
* @returns `offset` plus the number of bytes written.
|
922
|
+
*/
|
923
|
+
writeUInt32LE(value, offset, noAssert) {
|
924
|
+
value = +value;
|
925
|
+
offset = offset >>> 0;
|
926
|
+
if (!noAssert) {
|
927
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
928
|
+
}
|
929
|
+
this[offset + 3] = value >>> 24;
|
930
|
+
this[offset + 2] = value >>> 16;
|
931
|
+
this[offset + 1] = value >>> 8;
|
932
|
+
this[offset] = value & 255;
|
933
|
+
return offset + 4;
|
934
|
+
}
|
935
|
+
/**
|
936
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
|
937
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
938
|
+
*
|
939
|
+
* @param value Number to write.
|
940
|
+
* @param offset Number of bytes to skip before starting to write.
|
941
|
+
* @param noAssert
|
942
|
+
* @returns `offset` plus the number of bytes written.
|
943
|
+
*/
|
944
|
+
writeUInt32BE(value, offset, noAssert) {
|
945
|
+
value = +value;
|
946
|
+
offset = offset >>> 0;
|
947
|
+
if (!noAssert) {
|
948
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
949
|
+
}
|
950
|
+
this[offset] = value >>> 24;
|
951
|
+
this[offset + 1] = value >>> 16;
|
952
|
+
this[offset + 2] = value >>> 8;
|
953
|
+
this[offset + 3] = value & 255;
|
954
|
+
return offset + 4;
|
955
|
+
}
|
956
|
+
/**
|
957
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
|
958
|
+
* Behavior is undefined when `value` is anything other than a signed 8-bit integer.
|
959
|
+
*
|
960
|
+
* @param value Number to write.
|
961
|
+
* @param offset Number of bytes to skip before starting to write.
|
962
|
+
* @param noAssert
|
963
|
+
* @returns `offset` plus the number of bytes written.
|
964
|
+
*/
|
965
|
+
writeInt8(value, offset, noAssert) {
|
966
|
+
value = +value;
|
967
|
+
offset = offset >>> 0;
|
968
|
+
if (!noAssert) {
|
969
|
+
Buffer._checkInt(this, value, offset, 1, 127, -128);
|
970
|
+
}
|
971
|
+
if (value < 0) {
|
972
|
+
value = 255 + value + 1;
|
973
|
+
}
|
974
|
+
this[offset] = value & 255;
|
975
|
+
return offset + 1;
|
976
|
+
}
|
977
|
+
/**
|
978
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
|
979
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
980
|
+
*
|
981
|
+
* @param value Number to write.
|
982
|
+
* @param offset Number of bytes to skip before starting to write.
|
983
|
+
* @param noAssert
|
984
|
+
* @returns `offset` plus the number of bytes written.
|
985
|
+
*/
|
986
|
+
writeInt16LE(value, offset, noAssert) {
|
987
|
+
value = +value;
|
988
|
+
offset = offset >>> 0;
|
989
|
+
if (!noAssert) {
|
990
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
991
|
+
}
|
992
|
+
this[offset] = value & 255;
|
993
|
+
this[offset + 1] = value >>> 8;
|
994
|
+
return offset + 2;
|
995
|
+
}
|
996
|
+
/**
|
997
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
|
998
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
999
|
+
*
|
1000
|
+
* @param value Number to write.
|
1001
|
+
* @param offset Number of bytes to skip before starting to write.
|
1002
|
+
* @param noAssert
|
1003
|
+
* @returns `offset` plus the number of bytes written.
|
1004
|
+
*/
|
1005
|
+
writeInt16BE(value, offset, noAssert) {
|
1006
|
+
value = +value;
|
1007
|
+
offset = offset >>> 0;
|
1008
|
+
if (!noAssert) {
|
1009
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1010
|
+
}
|
1011
|
+
this[offset] = value >>> 8;
|
1012
|
+
this[offset + 1] = value & 255;
|
1013
|
+
return offset + 2;
|
1014
|
+
}
|
1015
|
+
/**
|
1016
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
|
1017
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1018
|
+
*
|
1019
|
+
* @param value Number to write.
|
1020
|
+
* @param offset Number of bytes to skip before starting to write.
|
1021
|
+
* @param noAssert
|
1022
|
+
* @returns `offset` plus the number of bytes written.
|
1023
|
+
*/
|
1024
|
+
writeInt32LE(value, offset, noAssert) {
|
1025
|
+
value = +value;
|
1026
|
+
offset = offset >>> 0;
|
1027
|
+
if (!noAssert) {
|
1028
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1029
|
+
}
|
1030
|
+
this[offset] = value & 255;
|
1031
|
+
this[offset + 1] = value >>> 8;
|
1032
|
+
this[offset + 2] = value >>> 16;
|
1033
|
+
this[offset + 3] = value >>> 24;
|
1034
|
+
return offset + 4;
|
1035
|
+
}
|
1036
|
+
/**
|
1037
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
|
1038
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1039
|
+
*
|
1040
|
+
* @param value Number to write.
|
1041
|
+
* @param offset Number of bytes to skip before starting to write.
|
1042
|
+
* @param noAssert
|
1043
|
+
* @returns `offset` plus the number of bytes written.
|
1044
|
+
*/
|
1045
|
+
writeInt32BE(value, offset, noAssert) {
|
1046
|
+
value = +value;
|
1047
|
+
offset = offset >>> 0;
|
1048
|
+
if (!noAssert) {
|
1049
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1050
|
+
}
|
1051
|
+
if (value < 0) {
|
1052
|
+
value = 4294967295 + value + 1;
|
1053
|
+
}
|
1054
|
+
this[offset] = value >>> 24;
|
1055
|
+
this[offset + 1] = value >>> 16;
|
1056
|
+
this[offset + 2] = value >>> 8;
|
1057
|
+
this[offset + 3] = value & 255;
|
1058
|
+
return offset + 4;
|
1059
|
+
}
|
1060
|
+
/**
|
1061
|
+
* Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
|
1062
|
+
* filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
|
1063
|
+
* integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
|
1064
|
+
*
|
1065
|
+
* If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
|
1066
|
+
* character that fit into `buf` are written.
|
1067
|
+
*
|
1068
|
+
* If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
|
1069
|
+
*
|
1070
|
+
* @param value
|
1071
|
+
* @param encoding
|
1072
|
+
*/
|
1073
|
+
fill(value, offset, end, encoding) {
|
1074
|
+
if (typeof value === "string") {
|
1075
|
+
if (typeof offset === "string") {
|
1076
|
+
encoding = offset;
|
1077
|
+
offset = 0;
|
1078
|
+
end = this.length;
|
1079
|
+
} else if (typeof end === "string") {
|
1080
|
+
encoding = end;
|
1081
|
+
end = this.length;
|
1082
|
+
}
|
1083
|
+
if (encoding !== void 0 && typeof encoding !== "string") {
|
1084
|
+
throw new TypeError("encoding must be a string");
|
1085
|
+
}
|
1086
|
+
if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
|
1087
|
+
throw new TypeError("Unknown encoding: " + encoding);
|
1088
|
+
}
|
1089
|
+
if (value.length === 1) {
|
1090
|
+
const code = value.charCodeAt(0);
|
1091
|
+
if (encoding === "utf8" && code < 128) {
|
1092
|
+
value = code;
|
1093
|
+
}
|
1094
|
+
}
|
1095
|
+
} else if (typeof value === "number") {
|
1096
|
+
value = value & 255;
|
1097
|
+
} else if (typeof value === "boolean") {
|
1098
|
+
value = Number(value);
|
1099
|
+
}
|
1100
|
+
offset ?? (offset = 0);
|
1101
|
+
end ?? (end = this.length);
|
1102
|
+
if (offset < 0 || this.length < offset || this.length < end) {
|
1103
|
+
throw new RangeError("Out of range index");
|
1104
|
+
}
|
1105
|
+
if (end <= offset) {
|
1106
|
+
return this;
|
1107
|
+
}
|
1108
|
+
offset = offset >>> 0;
|
1109
|
+
end = end === void 0 ? this.length : end >>> 0;
|
1110
|
+
value || (value = 0);
|
1111
|
+
let i;
|
1112
|
+
if (typeof value === "number") {
|
1113
|
+
for (i = offset; i < end; ++i) {
|
1114
|
+
this[i] = value;
|
1115
|
+
}
|
1116
|
+
} else {
|
1117
|
+
const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
|
1118
|
+
const len = bytes.length;
|
1119
|
+
if (len === 0) {
|
1120
|
+
throw new TypeError('The value "' + value + '" is invalid for argument "value"');
|
1121
|
+
}
|
1122
|
+
for (i = 0; i < end - offset; ++i) {
|
1123
|
+
this[i + offset] = bytes[i % len];
|
1124
|
+
}
|
1125
|
+
}
|
1126
|
+
return this;
|
1127
|
+
}
|
1128
|
+
/**
|
1129
|
+
* Returns the index of the specified value.
|
1130
|
+
*
|
1131
|
+
* If `value` is:
|
1132
|
+
* - a string, `value` is interpreted according to the character encoding in `encoding`.
|
1133
|
+
* - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
|
1134
|
+
* - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
|
1135
|
+
*
|
1136
|
+
* Any other types will throw a `TypeError`.
|
1137
|
+
*
|
1138
|
+
* @param value What to search for.
|
1139
|
+
* @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
|
1140
|
+
* @param encoding If `value` is a string, this is the encoding used to search.
|
1141
|
+
* @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
|
1142
|
+
*/
|
1143
|
+
indexOf(value, byteOffset, encoding) {
|
1144
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
|
1145
|
+
}
|
1146
|
+
/**
|
1147
|
+
* Gets the last index of the specified value.
|
1148
|
+
*
|
1149
|
+
* @see indexOf()
|
1150
|
+
* @param value
|
1151
|
+
* @param byteOffset
|
1152
|
+
* @param encoding
|
1153
|
+
*/
|
1154
|
+
lastIndexOf(value, byteOffset, encoding) {
|
1155
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
|
1156
|
+
}
|
1157
|
+
_bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
|
1158
|
+
if (buffer.length === 0) {
|
1159
|
+
return -1;
|
1160
|
+
}
|
1161
|
+
if (typeof byteOffset === "string") {
|
1162
|
+
encoding = byteOffset;
|
1163
|
+
byteOffset = 0;
|
1164
|
+
} else if (typeof byteOffset === "undefined") {
|
1165
|
+
byteOffset = 0;
|
1166
|
+
} else if (byteOffset > 2147483647) {
|
1167
|
+
byteOffset = 2147483647;
|
1168
|
+
} else if (byteOffset < -2147483648) {
|
1169
|
+
byteOffset = -2147483648;
|
1170
|
+
}
|
1171
|
+
byteOffset = +byteOffset;
|
1172
|
+
if (byteOffset !== byteOffset) {
|
1173
|
+
byteOffset = dir ? 0 : buffer.length - 1;
|
1174
|
+
}
|
1175
|
+
if (byteOffset < 0) {
|
1176
|
+
byteOffset = buffer.length + byteOffset;
|
1177
|
+
}
|
1178
|
+
if (byteOffset >= buffer.length) {
|
1179
|
+
if (dir) {
|
1180
|
+
return -1;
|
1181
|
+
} else {
|
1182
|
+
byteOffset = buffer.length - 1;
|
1183
|
+
}
|
1184
|
+
} else if (byteOffset < 0) {
|
1185
|
+
if (dir) {
|
1186
|
+
byteOffset = 0;
|
1187
|
+
} else {
|
1188
|
+
return -1;
|
1189
|
+
}
|
1190
|
+
}
|
1191
|
+
if (typeof val === "string") {
|
1192
|
+
val = Buffer.from(val, encoding);
|
1193
|
+
}
|
1194
|
+
if (Buffer.isBuffer(val)) {
|
1195
|
+
if (val.length === 0) {
|
1196
|
+
return -1;
|
1197
|
+
}
|
1198
|
+
return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
|
1199
|
+
} else if (typeof val === "number") {
|
1200
|
+
val = val & 255;
|
1201
|
+
if (typeof Uint8Array.prototype.indexOf === "function") {
|
1202
|
+
if (dir) {
|
1203
|
+
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
|
1204
|
+
} else {
|
1205
|
+
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
|
1206
|
+
}
|
1207
|
+
}
|
1208
|
+
return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
|
1209
|
+
}
|
1210
|
+
throw new TypeError("val must be string, number or Buffer");
|
1211
|
+
}
|
1212
|
+
/**
|
1213
|
+
* Equivalent to `buf.indexOf() !== -1`.
|
1214
|
+
*
|
1215
|
+
* @param value
|
1216
|
+
* @param byteOffset
|
1217
|
+
* @param encoding
|
1218
|
+
*/
|
1219
|
+
includes(value, byteOffset, encoding) {
|
1220
|
+
return this.indexOf(value, byteOffset, encoding) !== -1;
|
1221
|
+
}
|
1222
|
+
/**
|
1223
|
+
* Creates a new buffer from the given parameters.
|
1224
|
+
*
|
1225
|
+
* @param data
|
1226
|
+
* @param encoding
|
1227
|
+
*/
|
1228
|
+
static from(a, b, c) {
|
1229
|
+
return new Buffer(a, b, c);
|
1230
|
+
}
|
1231
|
+
/**
|
1232
|
+
* Returns true if `obj` is a Buffer.
|
1233
|
+
*
|
1234
|
+
* @param obj
|
1235
|
+
*/
|
1236
|
+
static isBuffer(obj) {
|
1237
|
+
return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
|
1238
|
+
}
|
1239
|
+
/**
|
1240
|
+
* Returns true if `encoding` is a supported encoding.
|
1241
|
+
*
|
1242
|
+
* @param encoding
|
1243
|
+
*/
|
1244
|
+
static isEncoding(encoding) {
|
1245
|
+
switch (encoding.toLowerCase()) {
|
1246
|
+
case "hex":
|
1247
|
+
case "utf8":
|
1248
|
+
case "ascii":
|
1249
|
+
case "binary":
|
1250
|
+
case "latin1":
|
1251
|
+
case "ucs2":
|
1252
|
+
case "utf16le":
|
1253
|
+
case "base64":
|
1254
|
+
return true;
|
1255
|
+
default:
|
1256
|
+
return false;
|
1257
|
+
}
|
1258
|
+
}
|
1259
|
+
/**
|
1260
|
+
* Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
|
1261
|
+
* returns the number of characters in the string.
|
1262
|
+
*
|
1263
|
+
* @param string The string to test.
|
1264
|
+
* @param encoding The encoding to use for calculation. Defaults is `utf8`.
|
1265
|
+
*/
|
1266
|
+
static byteLength(string, encoding) {
|
1267
|
+
if (Buffer.isBuffer(string)) {
|
1268
|
+
return string.length;
|
1269
|
+
}
|
1270
|
+
if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
|
1271
|
+
return string.byteLength;
|
1272
|
+
}
|
1273
|
+
if (typeof string !== "string") {
|
1274
|
+
throw new TypeError(
|
1275
|
+
'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
|
1276
|
+
);
|
1277
|
+
}
|
1278
|
+
const len = string.length;
|
1279
|
+
const mustMatch = arguments.length > 2 && arguments[2] === true;
|
1280
|
+
if (!mustMatch && len === 0) {
|
1281
|
+
return 0;
|
1282
|
+
}
|
1283
|
+
switch (encoding?.toLowerCase()) {
|
1284
|
+
case "ascii":
|
1285
|
+
case "latin1":
|
1286
|
+
case "binary":
|
1287
|
+
return len;
|
1288
|
+
case "utf8":
|
1289
|
+
return Buffer._utf8ToBytes(string).length;
|
1290
|
+
case "hex":
|
1291
|
+
return len >>> 1;
|
1292
|
+
case "ucs2":
|
1293
|
+
case "utf16le":
|
1294
|
+
return len * 2;
|
1295
|
+
case "base64":
|
1296
|
+
return Buffer._base64ToBytes(string).length;
|
1297
|
+
default:
|
1298
|
+
return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
|
1299
|
+
}
|
1300
|
+
}
|
1301
|
+
/**
|
1302
|
+
* Returns a Buffer which is the result of concatenating all the buffers in the list together.
|
1303
|
+
*
|
1304
|
+
* - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
|
1305
|
+
* - If the list has exactly one item, then the first item is returned.
|
1306
|
+
* - If the list has more than one item, then a new buffer is created.
|
1307
|
+
*
|
1308
|
+
* It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
|
1309
|
+
* a small computational expense.
|
1310
|
+
*
|
1311
|
+
* @param list An array of Buffer objects to concatenate.
|
1312
|
+
* @param totalLength Total length of the buffers when concatenated.
|
1313
|
+
*/
|
1314
|
+
static concat(list, totalLength) {
|
1315
|
+
if (!Array.isArray(list)) {
|
1316
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1317
|
+
}
|
1318
|
+
if (list.length === 0) {
|
1319
|
+
return Buffer.alloc(0);
|
1320
|
+
}
|
1321
|
+
let i;
|
1322
|
+
if (totalLength === void 0) {
|
1323
|
+
totalLength = 0;
|
1324
|
+
for (i = 0; i < list.length; ++i) {
|
1325
|
+
totalLength += list[i].length;
|
1326
|
+
}
|
1327
|
+
}
|
1328
|
+
const buffer = Buffer.allocUnsafe(totalLength);
|
1329
|
+
let pos = 0;
|
1330
|
+
for (i = 0; i < list.length; ++i) {
|
1331
|
+
let buf = list[i];
|
1332
|
+
if (Buffer._isInstance(buf, Uint8Array)) {
|
1333
|
+
if (pos + buf.length > buffer.length) {
|
1334
|
+
if (!Buffer.isBuffer(buf)) {
|
1335
|
+
buf = Buffer.from(buf);
|
1336
|
+
}
|
1337
|
+
buf.copy(buffer, pos);
|
1338
|
+
} else {
|
1339
|
+
Uint8Array.prototype.set.call(buffer, buf, pos);
|
1340
|
+
}
|
1341
|
+
} else if (!Buffer.isBuffer(buf)) {
|
1342
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1343
|
+
} else {
|
1344
|
+
buf.copy(buffer, pos);
|
1345
|
+
}
|
1346
|
+
pos += buf.length;
|
1347
|
+
}
|
1348
|
+
return buffer;
|
1349
|
+
}
|
1350
|
+
/**
|
1351
|
+
* The same as `buf1.compare(buf2)`.
|
1352
|
+
*/
|
1353
|
+
static compare(buf1, buf2) {
|
1354
|
+
if (Buffer._isInstance(buf1, Uint8Array)) {
|
1355
|
+
buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
|
1356
|
+
}
|
1357
|
+
if (Buffer._isInstance(buf2, Uint8Array)) {
|
1358
|
+
buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
|
1359
|
+
}
|
1360
|
+
if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
|
1361
|
+
throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
|
1362
|
+
}
|
1363
|
+
if (buf1 === buf2) {
|
1364
|
+
return 0;
|
1365
|
+
}
|
1366
|
+
let x = buf1.length;
|
1367
|
+
let y = buf2.length;
|
1368
|
+
for (let i = 0, len = Math.min(x, y); i < len; ++i) {
|
1369
|
+
if (buf1[i] !== buf2[i]) {
|
1370
|
+
x = buf1[i];
|
1371
|
+
y = buf2[i];
|
1372
|
+
break;
|
1373
|
+
}
|
1374
|
+
}
|
1375
|
+
if (x < y) {
|
1376
|
+
return -1;
|
1377
|
+
}
|
1378
|
+
if (y < x) {
|
1379
|
+
return 1;
|
1380
|
+
}
|
1381
|
+
return 0;
|
1382
|
+
}
|
1383
|
+
/**
|
1384
|
+
* Allocates a new buffer of `size` octets.
|
1385
|
+
*
|
1386
|
+
* @param size The number of octets to allocate.
|
1387
|
+
* @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
|
1388
|
+
* @param encoding The encoding used for the call to `buf.fill()` while initializing.
|
1389
|
+
*/
|
1390
|
+
static alloc(size, fill, encoding) {
|
1391
|
+
if (typeof size !== "number") {
|
1392
|
+
throw new TypeError('"size" argument must be of type number');
|
1393
|
+
} else if (size < 0) {
|
1394
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1395
|
+
}
|
1396
|
+
if (size <= 0) {
|
1397
|
+
return new Buffer(size);
|
1398
|
+
}
|
1399
|
+
if (fill !== void 0) {
|
1400
|
+
return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
|
1401
|
+
}
|
1402
|
+
return new Buffer(size);
|
1403
|
+
}
|
1404
|
+
/**
|
1405
|
+
* Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
|
1406
|
+
*
|
1407
|
+
* @param size
|
1408
|
+
*/
|
1409
|
+
static allocUnsafe(size) {
|
1410
|
+
if (typeof size !== "number") {
|
1411
|
+
throw new TypeError('"size" argument must be of type number');
|
1412
|
+
} else if (size < 0) {
|
1413
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1414
|
+
}
|
1415
|
+
return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
|
1416
|
+
}
|
1417
|
+
/**
|
1418
|
+
* Returns true if the given `obj` is an instance of `type`.
|
1419
|
+
*
|
1420
|
+
* @param obj
|
1421
|
+
* @param type
|
1422
|
+
*/
|
1423
|
+
static _isInstance(obj, type) {
|
1424
|
+
return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
|
1425
|
+
}
|
1426
|
+
static _checked(length) {
|
1427
|
+
if (length >= K_MAX_LENGTH) {
|
1428
|
+
throw new RangeError(
|
1429
|
+
"Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
|
1430
|
+
);
|
1431
|
+
}
|
1432
|
+
return length | 0;
|
1433
|
+
}
|
1434
|
+
static _blitBuffer(src, dst, offset, length) {
|
1435
|
+
let i;
|
1436
|
+
for (i = 0; i < length; ++i) {
|
1437
|
+
if (i + offset >= dst.length || i >= src.length) {
|
1438
|
+
break;
|
1439
|
+
}
|
1440
|
+
dst[i + offset] = src[i];
|
1441
|
+
}
|
1442
|
+
return i;
|
1443
|
+
}
|
1444
|
+
static _utf8Write(buf, string, offset, length) {
|
1445
|
+
return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
|
1446
|
+
}
|
1447
|
+
static _asciiWrite(buf, string, offset, length) {
|
1448
|
+
return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
|
1449
|
+
}
|
1450
|
+
static _base64Write(buf, string, offset, length) {
|
1451
|
+
return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
|
1452
|
+
}
|
1453
|
+
static _ucs2Write(buf, string, offset, length) {
|
1454
|
+
return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
|
1455
|
+
}
|
1456
|
+
static _hexWrite(buf, string, offset, length) {
|
1457
|
+
offset = Number(offset) || 0;
|
1458
|
+
const remaining = buf.length - offset;
|
1459
|
+
if (!length) {
|
1460
|
+
length = remaining;
|
1461
|
+
} else {
|
1462
|
+
length = Number(length);
|
1463
|
+
if (length > remaining) {
|
1464
|
+
length = remaining;
|
1465
|
+
}
|
1466
|
+
}
|
1467
|
+
const strLen = string.length;
|
1468
|
+
if (length > strLen / 2) {
|
1469
|
+
length = strLen / 2;
|
1470
|
+
}
|
1471
|
+
let i;
|
1472
|
+
for (i = 0; i < length; ++i) {
|
1473
|
+
const parsed = parseInt(string.substr(i * 2, 2), 16);
|
1474
|
+
if (parsed !== parsed) {
|
1475
|
+
return i;
|
1476
|
+
}
|
1477
|
+
buf[offset + i] = parsed;
|
1478
|
+
}
|
1479
|
+
return i;
|
1480
|
+
}
|
1481
|
+
static _utf8ToBytes(string, units) {
|
1482
|
+
units = units || Infinity;
|
1483
|
+
const length = string.length;
|
1484
|
+
const bytes = [];
|
1485
|
+
let codePoint;
|
1486
|
+
let leadSurrogate = null;
|
1487
|
+
for (let i = 0; i < length; ++i) {
|
1488
|
+
codePoint = string.charCodeAt(i);
|
1489
|
+
if (codePoint > 55295 && codePoint < 57344) {
|
1490
|
+
if (!leadSurrogate) {
|
1491
|
+
if (codePoint > 56319) {
|
1492
|
+
if ((units -= 3) > -1) {
|
1493
|
+
bytes.push(239, 191, 189);
|
1494
|
+
}
|
1495
|
+
continue;
|
1496
|
+
} else if (i + 1 === length) {
|
1497
|
+
if ((units -= 3) > -1) {
|
1498
|
+
bytes.push(239, 191, 189);
|
1499
|
+
}
|
1500
|
+
continue;
|
1501
|
+
}
|
1502
|
+
leadSurrogate = codePoint;
|
1503
|
+
continue;
|
1504
|
+
}
|
1505
|
+
if (codePoint < 56320) {
|
1506
|
+
if ((units -= 3) > -1) {
|
1507
|
+
bytes.push(239, 191, 189);
|
1508
|
+
}
|
1509
|
+
leadSurrogate = codePoint;
|
1510
|
+
continue;
|
1511
|
+
}
|
1512
|
+
codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
|
1513
|
+
} else if (leadSurrogate) {
|
1514
|
+
if ((units -= 3) > -1) {
|
1515
|
+
bytes.push(239, 191, 189);
|
1516
|
+
}
|
1517
|
+
}
|
1518
|
+
leadSurrogate = null;
|
1519
|
+
if (codePoint < 128) {
|
1520
|
+
if ((units -= 1) < 0) {
|
1521
|
+
break;
|
1522
|
+
}
|
1523
|
+
bytes.push(codePoint);
|
1524
|
+
} else if (codePoint < 2048) {
|
1525
|
+
if ((units -= 2) < 0) {
|
1526
|
+
break;
|
1527
|
+
}
|
1528
|
+
bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
|
1529
|
+
} else if (codePoint < 65536) {
|
1530
|
+
if ((units -= 3) < 0) {
|
1531
|
+
break;
|
1532
|
+
}
|
1533
|
+
bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
|
1534
|
+
} else if (codePoint < 1114112) {
|
1535
|
+
if ((units -= 4) < 0) {
|
1536
|
+
break;
|
1537
|
+
}
|
1538
|
+
bytes.push(
|
1539
|
+
codePoint >> 18 | 240,
|
1540
|
+
codePoint >> 12 & 63 | 128,
|
1541
|
+
codePoint >> 6 & 63 | 128,
|
1542
|
+
codePoint & 63 | 128
|
1543
|
+
);
|
1544
|
+
} else {
|
1545
|
+
throw new Error("Invalid code point");
|
1546
|
+
}
|
1547
|
+
}
|
1548
|
+
return bytes;
|
1549
|
+
}
|
1550
|
+
static _base64ToBytes(str) {
|
1551
|
+
return toByteArray(base64clean(str));
|
1552
|
+
}
|
1553
|
+
static _asciiToBytes(str) {
|
1554
|
+
const byteArray = [];
|
1555
|
+
for (let i = 0; i < str.length; ++i) {
|
1556
|
+
byteArray.push(str.charCodeAt(i) & 255);
|
1557
|
+
}
|
1558
|
+
return byteArray;
|
1559
|
+
}
|
1560
|
+
static _utf16leToBytes(str, units) {
|
1561
|
+
let c, hi, lo;
|
1562
|
+
const byteArray = [];
|
1563
|
+
for (let i = 0; i < str.length; ++i) {
|
1564
|
+
if ((units -= 2) < 0) break;
|
1565
|
+
c = str.charCodeAt(i);
|
1566
|
+
hi = c >> 8;
|
1567
|
+
lo = c % 256;
|
1568
|
+
byteArray.push(lo);
|
1569
|
+
byteArray.push(hi);
|
1570
|
+
}
|
1571
|
+
return byteArray;
|
1572
|
+
}
|
1573
|
+
static _hexSlice(buf, start, end) {
|
1574
|
+
const len = buf.length;
|
1575
|
+
if (!start || start < 0) {
|
1576
|
+
start = 0;
|
1577
|
+
}
|
1578
|
+
if (!end || end < 0 || end > len) {
|
1579
|
+
end = len;
|
1580
|
+
}
|
1581
|
+
let out = "";
|
1582
|
+
for (let i = start; i < end; ++i) {
|
1583
|
+
out += hexSliceLookupTable[buf[i]];
|
1584
|
+
}
|
1585
|
+
return out;
|
1586
|
+
}
|
1587
|
+
static _base64Slice(buf, start, end) {
|
1588
|
+
if (start === 0 && end === buf.length) {
|
1589
|
+
return fromByteArray(buf);
|
1590
|
+
} else {
|
1591
|
+
return fromByteArray(buf.slice(start, end));
|
1592
|
+
}
|
1593
|
+
}
|
1594
|
+
static _utf8Slice(buf, start, end) {
|
1595
|
+
end = Math.min(buf.length, end);
|
1596
|
+
const res = [];
|
1597
|
+
let i = start;
|
1598
|
+
while (i < end) {
|
1599
|
+
const firstByte = buf[i];
|
1600
|
+
let codePoint = null;
|
1601
|
+
let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
|
1602
|
+
if (i + bytesPerSequence <= end) {
|
1603
|
+
let secondByte, thirdByte, fourthByte, tempCodePoint;
|
1604
|
+
switch (bytesPerSequence) {
|
1605
|
+
case 1:
|
1606
|
+
if (firstByte < 128) {
|
1607
|
+
codePoint = firstByte;
|
1608
|
+
}
|
1609
|
+
break;
|
1610
|
+
case 2:
|
1611
|
+
secondByte = buf[i + 1];
|
1612
|
+
if ((secondByte & 192) === 128) {
|
1613
|
+
tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
|
1614
|
+
if (tempCodePoint > 127) {
|
1615
|
+
codePoint = tempCodePoint;
|
1616
|
+
}
|
1617
|
+
}
|
1618
|
+
break;
|
1619
|
+
case 3:
|
1620
|
+
secondByte = buf[i + 1];
|
1621
|
+
thirdByte = buf[i + 2];
|
1622
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
|
1623
|
+
tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
|
1624
|
+
if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
|
1625
|
+
codePoint = tempCodePoint;
|
1626
|
+
}
|
1627
|
+
}
|
1628
|
+
break;
|
1629
|
+
case 4:
|
1630
|
+
secondByte = buf[i + 1];
|
1631
|
+
thirdByte = buf[i + 2];
|
1632
|
+
fourthByte = buf[i + 3];
|
1633
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
|
1634
|
+
tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
|
1635
|
+
if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
|
1636
|
+
codePoint = tempCodePoint;
|
1637
|
+
}
|
1638
|
+
}
|
1639
|
+
}
|
1640
|
+
}
|
1641
|
+
if (codePoint === null) {
|
1642
|
+
codePoint = 65533;
|
1643
|
+
bytesPerSequence = 1;
|
1644
|
+
} else if (codePoint > 65535) {
|
1645
|
+
codePoint -= 65536;
|
1646
|
+
res.push(codePoint >>> 10 & 1023 | 55296);
|
1647
|
+
codePoint = 56320 | codePoint & 1023;
|
1648
|
+
}
|
1649
|
+
res.push(codePoint);
|
1650
|
+
i += bytesPerSequence;
|
1651
|
+
}
|
1652
|
+
return Buffer._decodeCodePointsArray(res);
|
1653
|
+
}
|
1654
|
+
static _decodeCodePointsArray(codePoints) {
|
1655
|
+
const len = codePoints.length;
|
1656
|
+
if (len <= MAX_ARGUMENTS_LENGTH) {
|
1657
|
+
return String.fromCharCode.apply(String, codePoints);
|
1658
|
+
}
|
1659
|
+
let res = "";
|
1660
|
+
let i = 0;
|
1661
|
+
while (i < len) {
|
1662
|
+
res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
|
1663
|
+
}
|
1664
|
+
return res;
|
1665
|
+
}
|
1666
|
+
static _asciiSlice(buf, start, end) {
|
1667
|
+
let ret = "";
|
1668
|
+
end = Math.min(buf.length, end);
|
1669
|
+
for (let i = start; i < end; ++i) {
|
1670
|
+
ret += String.fromCharCode(buf[i] & 127);
|
1671
|
+
}
|
1672
|
+
return ret;
|
1673
|
+
}
|
1674
|
+
static _latin1Slice(buf, start, end) {
|
1675
|
+
let ret = "";
|
1676
|
+
end = Math.min(buf.length, end);
|
1677
|
+
for (let i = start; i < end; ++i) {
|
1678
|
+
ret += String.fromCharCode(buf[i]);
|
1679
|
+
}
|
1680
|
+
return ret;
|
1681
|
+
}
|
1682
|
+
static _utf16leSlice(buf, start, end) {
|
1683
|
+
const bytes = buf.slice(start, end);
|
1684
|
+
let res = "";
|
1685
|
+
for (let i = 0; i < bytes.length - 1; i += 2) {
|
1686
|
+
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
|
1687
|
+
}
|
1688
|
+
return res;
|
1689
|
+
}
|
1690
|
+
static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
|
1691
|
+
let indexSize = 1;
|
1692
|
+
let arrLength = arr.length;
|
1693
|
+
let valLength = val.length;
|
1694
|
+
if (encoding !== void 0) {
|
1695
|
+
encoding = Buffer._getEncoding(encoding);
|
1696
|
+
if (encoding === "ucs2" || encoding === "utf16le") {
|
1697
|
+
if (arr.length < 2 || val.length < 2) {
|
1698
|
+
return -1;
|
1699
|
+
}
|
1700
|
+
indexSize = 2;
|
1701
|
+
arrLength /= 2;
|
1702
|
+
valLength /= 2;
|
1703
|
+
byteOffset /= 2;
|
1704
|
+
}
|
1705
|
+
}
|
1706
|
+
function read(buf, i2) {
|
1707
|
+
if (indexSize === 1) {
|
1708
|
+
return buf[i2];
|
1709
|
+
} else {
|
1710
|
+
return buf.readUInt16BE(i2 * indexSize);
|
1711
|
+
}
|
1712
|
+
}
|
1713
|
+
let i;
|
1714
|
+
if (dir) {
|
1715
|
+
let foundIndex = -1;
|
1716
|
+
for (i = byteOffset; i < arrLength; i++) {
|
1717
|
+
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
1718
|
+
if (foundIndex === -1) foundIndex = i;
|
1719
|
+
if (i - foundIndex + 1 === valLength) return foundIndex * indexSize;
|
1720
|
+
} else {
|
1721
|
+
if (foundIndex !== -1) i -= i - foundIndex;
|
1722
|
+
foundIndex = -1;
|
1723
|
+
}
|
1724
|
+
}
|
1725
|
+
} else {
|
1726
|
+
if (byteOffset + valLength > arrLength) {
|
1727
|
+
byteOffset = arrLength - valLength;
|
1728
|
+
}
|
1729
|
+
for (i = byteOffset; i >= 0; i--) {
|
1730
|
+
let found = true;
|
1731
|
+
for (let j = 0; j < valLength; j++) {
|
1732
|
+
if (read(arr, i + j) !== read(val, j)) {
|
1733
|
+
found = false;
|
1734
|
+
break;
|
1735
|
+
}
|
1736
|
+
}
|
1737
|
+
if (found) {
|
1738
|
+
return i;
|
1739
|
+
}
|
1740
|
+
}
|
1741
|
+
}
|
1742
|
+
return -1;
|
1743
|
+
}
|
1744
|
+
static _checkOffset(offset, ext, length) {
|
1745
|
+
if (offset % 1 !== 0 || offset < 0) throw new RangeError("offset is not uint");
|
1746
|
+
if (offset + ext > length) throw new RangeError("Trying to access beyond buffer length");
|
1747
|
+
}
|
1748
|
+
static _checkInt(buf, value, offset, ext, max, min) {
|
1749
|
+
if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance');
|
1750
|
+
if (value > max || value < min) throw new RangeError('"value" argument is out of bounds');
|
1751
|
+
if (offset + ext > buf.length) throw new RangeError("Index out of range");
|
1752
|
+
}
|
1753
|
+
static _getEncoding(encoding) {
|
1754
|
+
let toLowerCase = false;
|
1755
|
+
let originalEncoding = "";
|
1756
|
+
for (; ; ) {
|
1757
|
+
switch (encoding) {
|
1758
|
+
case "hex":
|
1759
|
+
return "hex";
|
1760
|
+
case "utf8":
|
1761
|
+
return "utf8";
|
1762
|
+
case "ascii":
|
1763
|
+
return "ascii";
|
1764
|
+
case "binary":
|
1765
|
+
return "binary";
|
1766
|
+
case "latin1":
|
1767
|
+
return "latin1";
|
1768
|
+
case "ucs2":
|
1769
|
+
return "ucs2";
|
1770
|
+
case "utf16le":
|
1771
|
+
return "utf16le";
|
1772
|
+
case "base64":
|
1773
|
+
return "base64";
|
1774
|
+
default: {
|
1775
|
+
if (toLowerCase) {
|
1776
|
+
throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
|
1777
|
+
}
|
1778
|
+
toLowerCase = true;
|
1779
|
+
originalEncoding = encoding;
|
1780
|
+
encoding = encoding.toLowerCase();
|
1781
|
+
}
|
1782
|
+
}
|
1783
|
+
}
|
1784
|
+
}
|
1785
|
+
}
|
1786
|
+
const hexSliceLookupTable = function() {
|
1787
|
+
const alphabet = "0123456789abcdef";
|
1788
|
+
const table = new Array(256);
|
1789
|
+
for (let i = 0; i < 16; ++i) {
|
1790
|
+
const i16 = i * 16;
|
1791
|
+
for (let j = 0; j < 16; ++j) {
|
1792
|
+
table[i16 + j] = alphabet[i] + alphabet[j];
|
1793
|
+
}
|
1794
|
+
}
|
1795
|
+
return table;
|
1796
|
+
}();
|
1797
|
+
const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
|
1798
|
+
function base64clean(str) {
|
1799
|
+
str = str.split("=")[0];
|
1800
|
+
str = str.trim().replace(INVALID_BASE64_RE, "");
|
1801
|
+
if (str.length < 2) return "";
|
1802
|
+
while (str.length % 4 !== 0) {
|
1803
|
+
str = str + "=";
|
1804
|
+
}
|
1805
|
+
return str;
|
1806
|
+
}
|
1807
|
+
|
25
1808
|
function notEmpty(value) {
|
26
1809
|
return value !== null && value !== void 0;
|
27
1810
|
}
|
@@ -225,8 +2008,7 @@ function buildPreviewBranchName({ org, branch }) {
|
|
225
2008
|
function getPreviewBranch() {
|
226
2009
|
try {
|
227
2010
|
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = getEnvironment();
|
228
|
-
if (deployPreviewBranch)
|
229
|
-
return deployPreviewBranch;
|
2011
|
+
if (deployPreviewBranch) return deployPreviewBranch;
|
230
2012
|
switch (deployPreview) {
|
231
2013
|
case "vercel": {
|
232
2014
|
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
@@ -242,29 +2024,15 @@ function getPreviewBranch() {
|
|
242
2024
|
}
|
243
2025
|
}
|
244
2026
|
|
245
|
-
var
|
246
|
-
|
247
|
-
throw TypeError("Cannot " + msg);
|
248
|
-
};
|
249
|
-
var __privateGet$7 = (obj, member, getter) => {
|
250
|
-
__accessCheck$7(obj, member, "read from private field");
|
251
|
-
return getter ? getter.call(obj) : member.get(obj);
|
252
|
-
};
|
253
|
-
var __privateAdd$7 = (obj, member, value) => {
|
254
|
-
if (member.has(obj))
|
255
|
-
throw TypeError("Cannot add the same private member more than once");
|
256
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
257
|
-
};
|
258
|
-
var __privateSet$7 = (obj, member, value, setter) => {
|
259
|
-
__accessCheck$7(obj, member, "write to private field");
|
260
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
261
|
-
return value;
|
2027
|
+
var __typeError$7 = (msg) => {
|
2028
|
+
throw TypeError(msg);
|
262
2029
|
};
|
263
|
-
var
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
var
|
2030
|
+
var __accessCheck$7 = (obj, member, msg) => member.has(obj) || __typeError$7("Cannot " + msg);
|
2031
|
+
var __privateGet$6 = (obj, member, getter) => (__accessCheck$7(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
2032
|
+
var __privateAdd$7 = (obj, member, value) => member.has(obj) ? __typeError$7("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2033
|
+
var __privateSet$5 = (obj, member, value, setter) => (__accessCheck$7(obj, member, "write to private field"), member.set(obj, value), value);
|
2034
|
+
var __privateMethod$4 = (obj, member, method) => (__accessCheck$7(obj, member, "access private method"), method);
|
2035
|
+
var _fetch, _queue, _concurrency, _ApiRequestPool_instances, enqueue_fn;
|
268
2036
|
const REQUEST_TIMEOUT = 5 * 60 * 1e3;
|
269
2037
|
function getFetchImplementation(userFetch) {
|
270
2038
|
const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
|
@@ -277,23 +2045,23 @@ function getFetchImplementation(userFetch) {
|
|
277
2045
|
}
|
278
2046
|
class ApiRequestPool {
|
279
2047
|
constructor(concurrency = 10) {
|
280
|
-
__privateAdd$7(this,
|
281
|
-
__privateAdd$7(this, _fetch
|
282
|
-
__privateAdd$7(this, _queue
|
283
|
-
__privateAdd$7(this, _concurrency
|
284
|
-
__privateSet$
|
285
|
-
__privateSet$
|
2048
|
+
__privateAdd$7(this, _ApiRequestPool_instances);
|
2049
|
+
__privateAdd$7(this, _fetch);
|
2050
|
+
__privateAdd$7(this, _queue);
|
2051
|
+
__privateAdd$7(this, _concurrency);
|
2052
|
+
__privateSet$5(this, _queue, []);
|
2053
|
+
__privateSet$5(this, _concurrency, concurrency);
|
286
2054
|
this.running = 0;
|
287
2055
|
this.started = 0;
|
288
2056
|
}
|
289
2057
|
setFetch(fetch2) {
|
290
|
-
__privateSet$
|
2058
|
+
__privateSet$5(this, _fetch, fetch2);
|
291
2059
|
}
|
292
2060
|
getFetch() {
|
293
|
-
if (!__privateGet$
|
2061
|
+
if (!__privateGet$6(this, _fetch)) {
|
294
2062
|
throw new Error("Fetch not set");
|
295
2063
|
}
|
296
|
-
return __privateGet$
|
2064
|
+
return __privateGet$6(this, _fetch);
|
297
2065
|
}
|
298
2066
|
request(url, options) {
|
299
2067
|
const start = /* @__PURE__ */ new Date();
|
@@ -315,7 +2083,7 @@ class ApiRequestPool {
|
|
315
2083
|
}
|
316
2084
|
return response;
|
317
2085
|
};
|
318
|
-
return __privateMethod$4(this,
|
2086
|
+
return __privateMethod$4(this, _ApiRequestPool_instances, enqueue_fn).call(this, async () => {
|
319
2087
|
return await runRequest();
|
320
2088
|
});
|
321
2089
|
}
|
@@ -323,21 +2091,21 @@ class ApiRequestPool {
|
|
323
2091
|
_fetch = new WeakMap();
|
324
2092
|
_queue = new WeakMap();
|
325
2093
|
_concurrency = new WeakMap();
|
326
|
-
|
2094
|
+
_ApiRequestPool_instances = new WeakSet();
|
327
2095
|
enqueue_fn = function(task) {
|
328
|
-
const promise = new Promise((resolve) => __privateGet$
|
2096
|
+
const promise = new Promise((resolve) => __privateGet$6(this, _queue).push(resolve)).finally(() => {
|
329
2097
|
this.started--;
|
330
2098
|
this.running++;
|
331
2099
|
}).then(() => task()).finally(() => {
|
332
2100
|
this.running--;
|
333
|
-
const next = __privateGet$
|
2101
|
+
const next = __privateGet$6(this, _queue).shift();
|
334
2102
|
if (next !== void 0) {
|
335
2103
|
this.started++;
|
336
2104
|
next();
|
337
2105
|
}
|
338
2106
|
});
|
339
|
-
if (this.running + this.started < __privateGet$
|
340
|
-
const next = __privateGet$
|
2107
|
+
if (this.running + this.started < __privateGet$6(this, _concurrency)) {
|
2108
|
+
const next = __privateGet$6(this, _queue).shift();
|
341
2109
|
if (next !== void 0) {
|
342
2110
|
this.started++;
|
343
2111
|
next();
|
@@ -526,7 +2294,7 @@ function defaultOnOpen(response) {
|
|
526
2294
|
}
|
527
2295
|
}
|
528
2296
|
|
529
|
-
const VERSION = "0.
|
2297
|
+
const VERSION = "0.30.0";
|
530
2298
|
|
531
2299
|
class ErrorWithCause extends Error {
|
532
2300
|
constructor(message, options) {
|
@@ -606,35 +2374,30 @@ function parseProviderString(provider = "production") {
|
|
606
2374
|
return provider;
|
607
2375
|
}
|
608
2376
|
const [main, workspaces] = provider.split(",");
|
609
|
-
if (!main || !workspaces)
|
610
|
-
return null;
|
2377
|
+
if (!main || !workspaces) return null;
|
611
2378
|
return { main, workspaces };
|
612
2379
|
}
|
613
2380
|
function buildProviderString(provider) {
|
614
|
-
if (isHostProviderAlias(provider))
|
615
|
-
return provider;
|
2381
|
+
if (isHostProviderAlias(provider)) return provider;
|
616
2382
|
return `${provider.main},${provider.workspaces}`;
|
617
2383
|
}
|
618
2384
|
function parseWorkspacesUrlParts(url) {
|
619
|
-
if (!isString(url))
|
620
|
-
return null;
|
2385
|
+
if (!isString(url)) return null;
|
621
2386
|
const matches = {
|
622
|
-
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh
|
623
|
-
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev
|
624
|
-
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev
|
625
|
-
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(
|
2387
|
+
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
|
2388
|
+
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2389
|
+
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2390
|
+
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
|
626
2391
|
};
|
627
2392
|
const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
|
628
|
-
if (!isHostProviderAlias(host) || !match)
|
629
|
-
|
630
|
-
return { workspace: match[1], region: match[2], host };
|
2393
|
+
if (!isHostProviderAlias(host) || !match) return null;
|
2394
|
+
return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
|
631
2395
|
}
|
632
2396
|
|
633
2397
|
const pool = new ApiRequestPool();
|
634
2398
|
const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
|
635
2399
|
const cleanQueryParams = Object.entries(queryParams).reduce((acc, [key, value]) => {
|
636
|
-
if (value === void 0 || value === null)
|
637
|
-
return acc;
|
2400
|
+
if (value === void 0 || value === null) return acc;
|
638
2401
|
return { ...acc, [key]: value };
|
639
2402
|
}, {});
|
640
2403
|
const query = new URLSearchParams(cleanQueryParams).toString();
|
@@ -682,8 +2445,7 @@ function hostHeader(url) {
|
|
682
2445
|
return groups?.host ? { Host: groups.host } : {};
|
683
2446
|
}
|
684
2447
|
async function parseBody(body, headers) {
|
685
|
-
if (!isDefined(body))
|
686
|
-
return void 0;
|
2448
|
+
if (!isDefined(body)) return void 0;
|
687
2449
|
if (isBlob(body) || typeof body.text === "function") {
|
688
2450
|
return body;
|
689
2451
|
}
|
@@ -760,8 +2522,7 @@ async function fetch$1({
|
|
760
2522
|
[TraceAttributes.CLOUDFLARE_RAY_ID]: response.headers?.get("cf-ray") ?? void 0
|
761
2523
|
});
|
762
2524
|
const message = response.headers?.get("x-xata-message");
|
763
|
-
if (message)
|
764
|
-
console.warn(message);
|
2525
|
+
if (message) console.warn(message);
|
765
2526
|
if (response.status === 204) {
|
766
2527
|
return {};
|
767
2528
|
}
|
@@ -845,26 +2606,122 @@ function parseUrl(url) {
|
|
845
2606
|
|
846
2607
|
const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
|
847
2608
|
|
848
|
-
const
|
849
|
-
|
850
|
-
|
2609
|
+
const getTasks = (variables, signal) => dataPlaneFetch({
|
2610
|
+
url: "/tasks",
|
2611
|
+
method: "get",
|
2612
|
+
...variables,
|
2613
|
+
signal
|
2614
|
+
});
|
2615
|
+
const getTaskStatus = (variables, signal) => dataPlaneFetch({
|
2616
|
+
url: "/tasks/{taskId}",
|
2617
|
+
method: "get",
|
2618
|
+
...variables,
|
2619
|
+
signal
|
2620
|
+
});
|
2621
|
+
const listClusterBranches = (variables, signal) => dataPlaneFetch({
|
2622
|
+
url: "/cluster/{clusterId}/branches",
|
2623
|
+
method: "get",
|
2624
|
+
...variables,
|
2625
|
+
signal
|
2626
|
+
});
|
2627
|
+
const listClusterExtensions = (variables, signal) => dataPlaneFetch({
|
2628
|
+
url: "/cluster/{clusterId}/extensions",
|
2629
|
+
method: "get",
|
2630
|
+
...variables,
|
2631
|
+
signal
|
2632
|
+
});
|
2633
|
+
const installClusterExtension = (variables, signal) => dataPlaneFetch({
|
2634
|
+
url: "/cluster/{clusterId}/extensions",
|
2635
|
+
method: "post",
|
2636
|
+
...variables,
|
2637
|
+
signal
|
2638
|
+
});
|
2639
|
+
const dropClusterExtension = (variables, signal) => dataPlaneFetch({
|
2640
|
+
url: "/cluster/{clusterId}/extensions",
|
2641
|
+
method: "delete",
|
2642
|
+
...variables,
|
2643
|
+
signal
|
2644
|
+
});
|
2645
|
+
const getClusterMetrics = (variables, signal) => dataPlaneFetch({
|
2646
|
+
url: "/cluster/{clusterId}/metrics",
|
2647
|
+
method: "get",
|
2648
|
+
...variables,
|
2649
|
+
signal
|
2650
|
+
});
|
2651
|
+
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2652
|
+
url: "/db/{dbBranchName}/migrations/apply",
|
2653
|
+
method: "post",
|
2654
|
+
...variables,
|
2655
|
+
signal
|
2656
|
+
});
|
2657
|
+
const startMigration = (variables, signal) => dataPlaneFetch({
|
2658
|
+
url: "/db/{dbBranchName}/migrations/start",
|
2659
|
+
method: "post",
|
2660
|
+
...variables,
|
2661
|
+
signal
|
2662
|
+
});
|
2663
|
+
const completeMigration = (variables, signal) => dataPlaneFetch({
|
2664
|
+
url: "/db/{dbBranchName}/migrations/complete",
|
2665
|
+
method: "post",
|
2666
|
+
...variables,
|
2667
|
+
signal
|
2668
|
+
});
|
2669
|
+
const rollbackMigration = (variables, signal) => dataPlaneFetch({
|
2670
|
+
url: "/db/{dbBranchName}/migrations/rollback",
|
2671
|
+
method: "post",
|
2672
|
+
...variables,
|
2673
|
+
signal
|
2674
|
+
});
|
2675
|
+
const adaptTable = (variables, signal) => dataPlaneFetch({
|
2676
|
+
url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
|
2677
|
+
method: "post",
|
2678
|
+
...variables,
|
2679
|
+
signal
|
2680
|
+
});
|
2681
|
+
const adaptAllTables = (variables, signal) => dataPlaneFetch({
|
2682
|
+
url: "/db/{dbBranchName}/migrations/adapt",
|
2683
|
+
method: "post",
|
2684
|
+
...variables,
|
2685
|
+
signal
|
2686
|
+
});
|
2687
|
+
const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2688
|
+
url: "/db/{dbBranchName}/migrations/status",
|
2689
|
+
method: "get",
|
2690
|
+
...variables,
|
2691
|
+
signal
|
2692
|
+
});
|
2693
|
+
const getMigrationJobs = (variables, signal) => dataPlaneFetch({
|
2694
|
+
url: "/db/{dbBranchName}/migrations/jobs",
|
2695
|
+
method: "get",
|
2696
|
+
...variables,
|
2697
|
+
signal
|
2698
|
+
});
|
2699
|
+
const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2700
|
+
url: "/db/{dbBranchName}/migrations/jobs/{jobId}",
|
851
2701
|
method: "get",
|
852
2702
|
...variables,
|
853
2703
|
signal
|
854
2704
|
});
|
855
|
-
const
|
856
|
-
url: "/db/{dbBranchName}/
|
2705
|
+
const getMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2706
|
+
url: "/db/{dbBranchName}/migrations/history",
|
857
2707
|
method: "get",
|
858
2708
|
...variables,
|
859
2709
|
signal
|
860
2710
|
});
|
861
|
-
const pgRollMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/pgroll/migrations", method: "get", ...variables, signal });
|
862
2711
|
const getBranchList = (variables, signal) => dataPlaneFetch({
|
863
2712
|
url: "/dbs/{dbName}",
|
864
2713
|
method: "get",
|
865
2714
|
...variables,
|
866
2715
|
signal
|
867
2716
|
});
|
2717
|
+
const getDatabaseSettings = (variables, signal) => dataPlaneFetch({
|
2718
|
+
url: "/dbs/{dbName}/settings",
|
2719
|
+
method: "get",
|
2720
|
+
...variables,
|
2721
|
+
signal
|
2722
|
+
});
|
2723
|
+
const updateDatabaseSettings = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/settings", method: "patch", ...variables, signal });
|
2724
|
+
const createBranchAsync = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/async", method: "put", ...variables, signal });
|
868
2725
|
const getBranchDetails = (variables, signal) => dataPlaneFetch({
|
869
2726
|
url: "/db/{dbBranchName}",
|
870
2727
|
method: "get",
|
@@ -884,12 +2741,25 @@ const getSchema = (variables, signal) => dataPlaneFetch({
|
|
884
2741
|
...variables,
|
885
2742
|
signal
|
886
2743
|
});
|
2744
|
+
const getSchemas = (variables, signal) => dataPlaneFetch({
|
2745
|
+
url: "/db/{dbBranchName}/schemas",
|
2746
|
+
method: "get",
|
2747
|
+
...variables,
|
2748
|
+
signal
|
2749
|
+
});
|
887
2750
|
const copyBranch = (variables, signal) => dataPlaneFetch({
|
888
2751
|
url: "/db/{dbBranchName}/copy",
|
889
2752
|
method: "post",
|
890
2753
|
...variables,
|
891
2754
|
signal
|
892
2755
|
});
|
2756
|
+
const getBranchMoveStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/move", method: "get", ...variables, signal });
|
2757
|
+
const moveBranch = (variables, signal) => dataPlaneFetch({
|
2758
|
+
url: "/db/{dbBranchName}/move",
|
2759
|
+
method: "put",
|
2760
|
+
...variables,
|
2761
|
+
signal
|
2762
|
+
});
|
893
2763
|
const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
|
894
2764
|
url: "/db/{dbBranchName}/metadata",
|
895
2765
|
method: "put",
|
@@ -910,12 +2780,42 @@ const getBranchStats = (variables, signal) => dataPlaneFetch({
|
|
910
2780
|
});
|
911
2781
|
const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
|
912
2782
|
const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
|
913
|
-
const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({
|
914
|
-
|
915
|
-
|
916
|
-
|
917
|
-
|
918
|
-
|
2783
|
+
const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({
|
2784
|
+
url: "/dbs/{dbName}/gitBranches",
|
2785
|
+
method: "delete",
|
2786
|
+
...variables,
|
2787
|
+
signal
|
2788
|
+
});
|
2789
|
+
const resolveBranch = (variables, signal) => dataPlaneFetch({
|
2790
|
+
url: "/dbs/{dbName}/resolveBranch",
|
2791
|
+
method: "get",
|
2792
|
+
...variables,
|
2793
|
+
signal
|
2794
|
+
});
|
2795
|
+
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2796
|
+
url: "/db/{dbBranchName}/migrations",
|
2797
|
+
method: "get",
|
2798
|
+
...variables,
|
2799
|
+
signal
|
2800
|
+
});
|
2801
|
+
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2802
|
+
url: "/db/{dbBranchName}/migrations/plan",
|
2803
|
+
method: "post",
|
2804
|
+
...variables,
|
2805
|
+
signal
|
2806
|
+
});
|
2807
|
+
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2808
|
+
url: "/db/{dbBranchName}/migrations/execute",
|
2809
|
+
method: "post",
|
2810
|
+
...variables,
|
2811
|
+
signal
|
2812
|
+
});
|
2813
|
+
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({
|
2814
|
+
url: "/dbs/{dbName}/migrations/query",
|
2815
|
+
method: "post",
|
2816
|
+
...variables,
|
2817
|
+
signal
|
2818
|
+
});
|
919
2819
|
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
920
2820
|
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
921
2821
|
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
@@ -923,23 +2823,78 @@ const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
|
923
2823
|
...variables,
|
924
2824
|
signal
|
925
2825
|
});
|
926
|
-
const updateMigrationRequest = (variables, signal) => dataPlaneFetch({
|
927
|
-
|
928
|
-
|
929
|
-
|
2826
|
+
const updateMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2827
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2828
|
+
method: "patch",
|
2829
|
+
...variables,
|
2830
|
+
signal
|
2831
|
+
});
|
2832
|
+
const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({
|
2833
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/commits",
|
2834
|
+
method: "post",
|
2835
|
+
...variables,
|
2836
|
+
signal
|
2837
|
+
});
|
2838
|
+
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2839
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/compare",
|
2840
|
+
method: "post",
|
2841
|
+
...variables,
|
2842
|
+
signal
|
2843
|
+
});
|
2844
|
+
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({
|
2845
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2846
|
+
method: "get",
|
2847
|
+
...variables,
|
2848
|
+
signal
|
2849
|
+
});
|
930
2850
|
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
931
2851
|
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
932
2852
|
method: "post",
|
933
2853
|
...variables,
|
934
2854
|
signal
|
935
2855
|
});
|
936
|
-
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({
|
937
|
-
|
938
|
-
|
939
|
-
|
940
|
-
|
941
|
-
|
942
|
-
const
|
2856
|
+
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({
|
2857
|
+
url: "/db/{dbBranchName}/schema/history",
|
2858
|
+
method: "post",
|
2859
|
+
...variables,
|
2860
|
+
signal
|
2861
|
+
});
|
2862
|
+
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({
|
2863
|
+
url: "/db/{dbBranchName}/schema/compare",
|
2864
|
+
method: "post",
|
2865
|
+
...variables,
|
2866
|
+
signal
|
2867
|
+
});
|
2868
|
+
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({
|
2869
|
+
url: "/db/{dbBranchName}/schema/compare/{branchName}",
|
2870
|
+
method: "post",
|
2871
|
+
...variables,
|
2872
|
+
signal
|
2873
|
+
});
|
2874
|
+
const updateBranchSchema = (variables, signal) => dataPlaneFetch({
|
2875
|
+
url: "/db/{dbBranchName}/schema/update",
|
2876
|
+
method: "post",
|
2877
|
+
...variables,
|
2878
|
+
signal
|
2879
|
+
});
|
2880
|
+
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2881
|
+
url: "/db/{dbBranchName}/schema/preview",
|
2882
|
+
method: "post",
|
2883
|
+
...variables,
|
2884
|
+
signal
|
2885
|
+
});
|
2886
|
+
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2887
|
+
url: "/db/{dbBranchName}/schema/apply",
|
2888
|
+
method: "post",
|
2889
|
+
...variables,
|
2890
|
+
signal
|
2891
|
+
});
|
2892
|
+
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({
|
2893
|
+
url: "/db/{dbBranchName}/schema/push",
|
2894
|
+
method: "post",
|
2895
|
+
...variables,
|
2896
|
+
signal
|
2897
|
+
});
|
943
2898
|
const createTable = (variables, signal) => dataPlaneFetch({
|
944
2899
|
url: "/db/{dbBranchName}/tables/{tableName}",
|
945
2900
|
method: "put",
|
@@ -952,14 +2907,24 @@ const deleteTable = (variables, signal) => dataPlaneFetch({
|
|
952
2907
|
...variables,
|
953
2908
|
signal
|
954
2909
|
});
|
955
|
-
const updateTable = (variables, signal) => dataPlaneFetch({
|
2910
|
+
const updateTable = (variables, signal) => dataPlaneFetch({
|
2911
|
+
url: "/db/{dbBranchName}/tables/{tableName}",
|
2912
|
+
method: "patch",
|
2913
|
+
...variables,
|
2914
|
+
signal
|
2915
|
+
});
|
956
2916
|
const getTableSchema = (variables, signal) => dataPlaneFetch({
|
957
2917
|
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
958
2918
|
method: "get",
|
959
2919
|
...variables,
|
960
2920
|
signal
|
961
2921
|
});
|
962
|
-
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2922
|
+
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2923
|
+
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
2924
|
+
method: "put",
|
2925
|
+
...variables,
|
2926
|
+
signal
|
2927
|
+
});
|
963
2928
|
const getTableColumns = (variables, signal) => dataPlaneFetch({
|
964
2929
|
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
965
2930
|
method: "get",
|
@@ -967,23 +2932,43 @@ const getTableColumns = (variables, signal) => dataPlaneFetch({
|
|
967
2932
|
signal
|
968
2933
|
});
|
969
2934
|
const addTableColumn = (variables, signal) => dataPlaneFetch(
|
970
|
-
{
|
2935
|
+
{
|
2936
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
2937
|
+
method: "post",
|
2938
|
+
...variables,
|
2939
|
+
signal
|
2940
|
+
}
|
971
2941
|
);
|
972
2942
|
const getColumn = (variables, signal) => dataPlaneFetch({
|
973
2943
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
974
|
-
method: "get",
|
2944
|
+
method: "get",
|
2945
|
+
...variables,
|
2946
|
+
signal
|
2947
|
+
});
|
2948
|
+
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2949
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
2950
|
+
method: "patch",
|
975
2951
|
...variables,
|
976
2952
|
signal
|
977
2953
|
});
|
978
|
-
const updateColumn = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}", method: "patch", ...variables, signal });
|
979
2954
|
const deleteColumn = (variables, signal) => dataPlaneFetch({
|
980
2955
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
981
2956
|
method: "delete",
|
982
2957
|
...variables,
|
983
2958
|
signal
|
984
2959
|
});
|
985
|
-
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
986
|
-
|
2960
|
+
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
2961
|
+
url: "/db/{dbBranchName}/transaction",
|
2962
|
+
method: "post",
|
2963
|
+
...variables,
|
2964
|
+
signal
|
2965
|
+
});
|
2966
|
+
const insertRecord = (variables, signal) => dataPlaneFetch({
|
2967
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data",
|
2968
|
+
method: "post",
|
2969
|
+
...variables,
|
2970
|
+
signal
|
2971
|
+
});
|
987
2972
|
const getFileItem = (variables, signal) => dataPlaneFetch({
|
988
2973
|
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
|
989
2974
|
method: "get",
|
@@ -1026,11 +3011,36 @@ const getRecord = (variables, signal) => dataPlaneFetch({
|
|
1026
3011
|
...variables,
|
1027
3012
|
signal
|
1028
3013
|
});
|
1029
|
-
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
1030
|
-
|
1031
|
-
|
1032
|
-
|
1033
|
-
|
3014
|
+
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
3015
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
3016
|
+
method: "put",
|
3017
|
+
...variables,
|
3018
|
+
signal
|
3019
|
+
});
|
3020
|
+
const updateRecordWithID = (variables, signal) => dataPlaneFetch({
|
3021
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
3022
|
+
method: "patch",
|
3023
|
+
...variables,
|
3024
|
+
signal
|
3025
|
+
});
|
3026
|
+
const upsertRecordWithID = (variables, signal) => dataPlaneFetch({
|
3027
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
3028
|
+
method: "post",
|
3029
|
+
...variables,
|
3030
|
+
signal
|
3031
|
+
});
|
3032
|
+
const deleteRecord = (variables, signal) => dataPlaneFetch({
|
3033
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
3034
|
+
method: "delete",
|
3035
|
+
...variables,
|
3036
|
+
signal
|
3037
|
+
});
|
3038
|
+
const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({
|
3039
|
+
url: "/db/{dbBranchName}/tables/{tableName}/bulk",
|
3040
|
+
method: "post",
|
3041
|
+
...variables,
|
3042
|
+
signal
|
3043
|
+
});
|
1034
3044
|
const queryTable = (variables, signal) => dataPlaneFetch({
|
1035
3045
|
url: "/db/{dbBranchName}/tables/{tableName}/query",
|
1036
3046
|
method: "post",
|
@@ -1049,16 +3059,36 @@ const searchTable = (variables, signal) => dataPlaneFetch({
|
|
1049
3059
|
...variables,
|
1050
3060
|
signal
|
1051
3061
|
});
|
1052
|
-
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
3062
|
+
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
3063
|
+
url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch",
|
3064
|
+
method: "post",
|
3065
|
+
...variables,
|
3066
|
+
signal
|
3067
|
+
});
|
1053
3068
|
const askTable = (variables, signal) => dataPlaneFetch({
|
1054
3069
|
url: "/db/{dbBranchName}/tables/{tableName}/ask",
|
1055
3070
|
method: "post",
|
1056
3071
|
...variables,
|
1057
3072
|
signal
|
1058
3073
|
});
|
1059
|
-
const askTableSession = (variables, signal) => dataPlaneFetch({
|
1060
|
-
|
1061
|
-
|
3074
|
+
const askTableSession = (variables, signal) => dataPlaneFetch({
|
3075
|
+
url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}",
|
3076
|
+
method: "post",
|
3077
|
+
...variables,
|
3078
|
+
signal
|
3079
|
+
});
|
3080
|
+
const summarizeTable = (variables, signal) => dataPlaneFetch({
|
3081
|
+
url: "/db/{dbBranchName}/tables/{tableName}/summarize",
|
3082
|
+
method: "post",
|
3083
|
+
...variables,
|
3084
|
+
signal
|
3085
|
+
});
|
3086
|
+
const aggregateTable = (variables, signal) => dataPlaneFetch({
|
3087
|
+
url: "/db/{dbBranchName}/tables/{tableName}/aggregate",
|
3088
|
+
method: "post",
|
3089
|
+
...variables,
|
3090
|
+
signal
|
3091
|
+
});
|
1062
3092
|
const fileAccess = (variables, signal) => dataPlaneFetch({
|
1063
3093
|
url: "/file/{fileId}",
|
1064
3094
|
method: "get",
|
@@ -1077,27 +3107,34 @@ const sqlQuery = (variables, signal) => dataPlaneFetch({
|
|
1077
3107
|
...variables,
|
1078
3108
|
signal
|
1079
3109
|
});
|
3110
|
+
const sqlBatchQuery = (variables, signal) => dataPlaneFetch({
|
3111
|
+
url: "/db/{dbBranchName}/sql/batch",
|
3112
|
+
method: "post",
|
3113
|
+
...variables,
|
3114
|
+
signal
|
3115
|
+
});
|
1080
3116
|
const operationsByTag$2 = {
|
1081
|
-
|
1082
|
-
|
1083
|
-
|
1084
|
-
|
1085
|
-
|
1086
|
-
|
1087
|
-
|
1088
|
-
createBranch,
|
1089
|
-
deleteBranch,
|
1090
|
-
copyBranch,
|
1091
|
-
updateBranchMetadata,
|
1092
|
-
getBranchMetadata,
|
1093
|
-
getBranchStats,
|
1094
|
-
getGitBranchesMapping,
|
1095
|
-
addGitBranchesEntry,
|
1096
|
-
removeGitBranchesEntry,
|
1097
|
-
resolveBranch
|
3117
|
+
tasks: { getTasks, getTaskStatus },
|
3118
|
+
cluster: {
|
3119
|
+
listClusterBranches,
|
3120
|
+
listClusterExtensions,
|
3121
|
+
installClusterExtension,
|
3122
|
+
dropClusterExtension,
|
3123
|
+
getClusterMetrics
|
1098
3124
|
},
|
1099
3125
|
migrations: {
|
3126
|
+
applyMigration,
|
3127
|
+
startMigration,
|
3128
|
+
completeMigration,
|
3129
|
+
rollbackMigration,
|
3130
|
+
adaptTable,
|
3131
|
+
adaptAllTables,
|
3132
|
+
getBranchMigrationJobStatus,
|
3133
|
+
getMigrationJobs,
|
3134
|
+
getMigrationJobStatus,
|
3135
|
+
getMigrationHistory,
|
1100
3136
|
getSchema,
|
3137
|
+
getSchemas,
|
1101
3138
|
getBranchMigrationHistory,
|
1102
3139
|
getBranchMigrationPlan,
|
1103
3140
|
executeBranchMigrationPlan,
|
@@ -1109,6 +3146,24 @@ const operationsByTag$2 = {
|
|
1109
3146
|
applyBranchSchemaEdit,
|
1110
3147
|
pushBranchMigrations
|
1111
3148
|
},
|
3149
|
+
branch: {
|
3150
|
+
getBranchList,
|
3151
|
+
createBranchAsync,
|
3152
|
+
getBranchDetails,
|
3153
|
+
createBranch,
|
3154
|
+
deleteBranch,
|
3155
|
+
copyBranch,
|
3156
|
+
getBranchMoveStatus,
|
3157
|
+
moveBranch,
|
3158
|
+
updateBranchMetadata,
|
3159
|
+
getBranchMetadata,
|
3160
|
+
getBranchStats,
|
3161
|
+
getGitBranchesMapping,
|
3162
|
+
addGitBranchesEntry,
|
3163
|
+
removeGitBranchesEntry,
|
3164
|
+
resolveBranch
|
3165
|
+
},
|
3166
|
+
database: { getDatabaseSettings, updateDatabaseSettings },
|
1112
3167
|
migrationRequests: {
|
1113
3168
|
queryMigrationRequests,
|
1114
3169
|
createMigrationRequest,
|
@@ -1141,7 +3196,16 @@ const operationsByTag$2 = {
|
|
1141
3196
|
deleteRecord,
|
1142
3197
|
bulkInsertTableRecords
|
1143
3198
|
},
|
1144
|
-
files: {
|
3199
|
+
files: {
|
3200
|
+
getFileItem,
|
3201
|
+
putFileItem,
|
3202
|
+
deleteFileItem,
|
3203
|
+
getFile,
|
3204
|
+
putFile,
|
3205
|
+
deleteFile,
|
3206
|
+
fileAccess,
|
3207
|
+
fileUpload
|
3208
|
+
},
|
1145
3209
|
searchAndFilter: {
|
1146
3210
|
queryTable,
|
1147
3211
|
searchBranch,
|
@@ -1152,7 +3216,7 @@ const operationsByTag$2 = {
|
|
1152
3216
|
summarizeTable,
|
1153
3217
|
aggregateTable
|
1154
3218
|
},
|
1155
|
-
sql: { sqlQuery }
|
3219
|
+
sql: { sqlQuery, sqlBatchQuery }
|
1156
3220
|
};
|
1157
3221
|
|
1158
3222
|
const controlPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "controlPlane" });
|
@@ -1219,7 +3283,12 @@ const deleteOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
|
1219
3283
|
...variables,
|
1220
3284
|
signal
|
1221
3285
|
});
|
1222
|
-
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3286
|
+
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3287
|
+
url: "/user/oauth/tokens/{token}",
|
3288
|
+
method: "patch",
|
3289
|
+
...variables,
|
3290
|
+
signal
|
3291
|
+
});
|
1223
3292
|
const getWorkspacesList = (variables, signal) => controlPlaneFetch({
|
1224
3293
|
url: "/workspaces",
|
1225
3294
|
method: "get",
|
@@ -1250,47 +3319,150 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
|
|
1250
3319
|
...variables,
|
1251
3320
|
signal
|
1252
3321
|
});
|
1253
|
-
const
|
1254
|
-
|
3322
|
+
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3323
|
+
url: "/workspaces/{workspaceId}/settings",
|
3324
|
+
method: "get",
|
3325
|
+
...variables,
|
3326
|
+
signal
|
3327
|
+
});
|
3328
|
+
const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3329
|
+
url: "/workspaces/{workspaceId}/settings",
|
3330
|
+
method: "patch",
|
3331
|
+
...variables,
|
3332
|
+
signal
|
3333
|
+
});
|
3334
|
+
const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({
|
3335
|
+
url: "/workspaces/{workspaceId}/members",
|
3336
|
+
method: "get",
|
3337
|
+
...variables,
|
3338
|
+
signal
|
3339
|
+
});
|
3340
|
+
const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({
|
3341
|
+
url: "/workspaces/{workspaceId}/members/{userId}",
|
3342
|
+
method: "put",
|
3343
|
+
...variables,
|
3344
|
+
signal
|
3345
|
+
});
|
1255
3346
|
const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1256
3347
|
url: "/workspaces/{workspaceId}/members/{userId}",
|
1257
3348
|
method: "delete",
|
1258
3349
|
...variables,
|
1259
3350
|
signal
|
1260
3351
|
});
|
1261
|
-
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1262
|
-
|
1263
|
-
|
1264
|
-
|
1265
|
-
|
1266
|
-
|
1267
|
-
const
|
3352
|
+
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
3353
|
+
url: "/workspaces/{workspaceId}/invites",
|
3354
|
+
method: "post",
|
3355
|
+
...variables,
|
3356
|
+
signal
|
3357
|
+
});
|
3358
|
+
const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3359
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3360
|
+
method: "patch",
|
3361
|
+
...variables,
|
3362
|
+
signal
|
3363
|
+
});
|
3364
|
+
const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3365
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3366
|
+
method: "delete",
|
3367
|
+
...variables,
|
3368
|
+
signal
|
3369
|
+
});
|
3370
|
+
const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3371
|
+
url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept",
|
3372
|
+
method: "post",
|
3373
|
+
...variables,
|
3374
|
+
signal
|
3375
|
+
});
|
3376
|
+
const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3377
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}/resend",
|
3378
|
+
method: "post",
|
3379
|
+
...variables,
|
3380
|
+
signal
|
3381
|
+
});
|
3382
|
+
const listClusters = (variables, signal) => controlPlaneFetch({
|
3383
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3384
|
+
method: "get",
|
3385
|
+
...variables,
|
3386
|
+
signal
|
3387
|
+
});
|
3388
|
+
const createCluster = (variables, signal) => controlPlaneFetch({
|
3389
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3390
|
+
method: "post",
|
3391
|
+
...variables,
|
3392
|
+
signal
|
3393
|
+
});
|
1268
3394
|
const getCluster = (variables, signal) => controlPlaneFetch({
|
1269
3395
|
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
1270
3396
|
method: "get",
|
1271
3397
|
...variables,
|
1272
3398
|
signal
|
1273
3399
|
});
|
1274
|
-
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3400
|
+
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3401
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3402
|
+
method: "patch",
|
3403
|
+
...variables,
|
3404
|
+
signal
|
3405
|
+
});
|
3406
|
+
const deleteCluster = (variables, signal) => controlPlaneFetch({
|
3407
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3408
|
+
method: "delete",
|
3409
|
+
...variables,
|
3410
|
+
signal
|
3411
|
+
});
|
1275
3412
|
const getDatabaseList = (variables, signal) => controlPlaneFetch({
|
1276
3413
|
url: "/workspaces/{workspaceId}/dbs",
|
1277
3414
|
method: "get",
|
1278
3415
|
...variables,
|
1279
3416
|
signal
|
1280
3417
|
});
|
1281
|
-
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3418
|
+
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3419
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3420
|
+
method: "put",
|
3421
|
+
...variables,
|
3422
|
+
signal
|
3423
|
+
});
|
1282
3424
|
const deleteDatabase = (variables, signal) => controlPlaneFetch({
|
1283
3425
|
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
1284
3426
|
method: "delete",
|
1285
3427
|
...variables,
|
1286
3428
|
signal
|
1287
3429
|
});
|
1288
|
-
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
1289
|
-
|
1290
|
-
|
1291
|
-
|
1292
|
-
|
1293
|
-
|
3430
|
+
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3431
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3432
|
+
method: "get",
|
3433
|
+
...variables,
|
3434
|
+
signal
|
3435
|
+
});
|
3436
|
+
const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3437
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3438
|
+
method: "patch",
|
3439
|
+
...variables,
|
3440
|
+
signal
|
3441
|
+
});
|
3442
|
+
const renameDatabase = (variables, signal) => controlPlaneFetch({
|
3443
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/rename",
|
3444
|
+
method: "post",
|
3445
|
+
...variables,
|
3446
|
+
signal
|
3447
|
+
});
|
3448
|
+
const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3449
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3450
|
+
method: "get",
|
3451
|
+
...variables,
|
3452
|
+
signal
|
3453
|
+
});
|
3454
|
+
const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3455
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3456
|
+
method: "put",
|
3457
|
+
...variables,
|
3458
|
+
signal
|
3459
|
+
});
|
3460
|
+
const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3461
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3462
|
+
method: "delete",
|
3463
|
+
...variables,
|
3464
|
+
signal
|
3465
|
+
});
|
1294
3466
|
const listRegions = (variables, signal) => controlPlaneFetch({
|
1295
3467
|
url: "/workspaces/{workspaceId}/regions",
|
1296
3468
|
method: "get",
|
@@ -1315,6 +3487,8 @@ const operationsByTag$1 = {
|
|
1315
3487
|
getWorkspace,
|
1316
3488
|
updateWorkspace,
|
1317
3489
|
deleteWorkspace,
|
3490
|
+
getWorkspaceSettings,
|
3491
|
+
updateWorkspaceSettings,
|
1318
3492
|
getWorkspaceMembersList,
|
1319
3493
|
updateWorkspaceMemberRole,
|
1320
3494
|
removeWorkspaceMember
|
@@ -1326,7 +3500,13 @@ const operationsByTag$1 = {
|
|
1326
3500
|
acceptWorkspaceMemberInvite,
|
1327
3501
|
resendWorkspaceMemberInvite
|
1328
3502
|
},
|
1329
|
-
xbcontrolOther: {
|
3503
|
+
xbcontrolOther: {
|
3504
|
+
listClusters,
|
3505
|
+
createCluster,
|
3506
|
+
getCluster,
|
3507
|
+
updateCluster,
|
3508
|
+
deleteCluster
|
3509
|
+
},
|
1330
3510
|
databases: {
|
1331
3511
|
getDatabaseList,
|
1332
3512
|
createDatabase,
|
@@ -1413,8 +3593,7 @@ function buildTransformString(transformations) {
|
|
1413
3593
|
).join(",");
|
1414
3594
|
}
|
1415
3595
|
function transformImage(url, ...transformations) {
|
1416
|
-
if (!isDefined(url))
|
1417
|
-
return void 0;
|
3596
|
+
if (!isDefined(url)) return void 0;
|
1418
3597
|
const newTransformations = buildTransformString(transformations);
|
1419
3598
|
const { hostname, pathname, search } = new URL(url);
|
1420
3599
|
const pathParts = pathname.split("/");
|
@@ -1527,8 +3706,7 @@ class XataFile {
|
|
1527
3706
|
}
|
1528
3707
|
}
|
1529
3708
|
const parseInputFileEntry = async (entry) => {
|
1530
|
-
if (!isDefined(entry))
|
1531
|
-
return null;
|
3709
|
+
if (!isDefined(entry)) return null;
|
1532
3710
|
const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout, uploadUrlTimeout } = await entry;
|
1533
3711
|
return compactObject({
|
1534
3712
|
id,
|
@@ -1543,24 +3721,19 @@ const parseInputFileEntry = async (entry) => {
|
|
1543
3721
|
};
|
1544
3722
|
|
1545
3723
|
function cleanFilter(filter) {
|
1546
|
-
if (!isDefined(filter))
|
1547
|
-
|
1548
|
-
if (!isObject(filter))
|
1549
|
-
return filter;
|
3724
|
+
if (!isDefined(filter)) return void 0;
|
3725
|
+
if (!isObject(filter)) return filter;
|
1550
3726
|
const values = Object.fromEntries(
|
1551
3727
|
Object.entries(filter).reduce((acc, [key, value]) => {
|
1552
|
-
if (!isDefined(value))
|
1553
|
-
return acc;
|
3728
|
+
if (!isDefined(value)) return acc;
|
1554
3729
|
if (Array.isArray(value)) {
|
1555
3730
|
const clean = value.map((item) => cleanFilter(item)).filter((item) => isDefined(item));
|
1556
|
-
if (clean.length === 0)
|
1557
|
-
return acc;
|
3731
|
+
if (clean.length === 0) return acc;
|
1558
3732
|
return [...acc, [key, clean]];
|
1559
3733
|
}
|
1560
3734
|
if (isObject(value)) {
|
1561
3735
|
const clean = cleanFilter(value);
|
1562
|
-
if (!isDefined(clean))
|
1563
|
-
return acc;
|
3736
|
+
if (!isDefined(clean)) return acc;
|
1564
3737
|
return [...acc, [key, clean]];
|
1565
3738
|
}
|
1566
3739
|
return [...acc, [key, value]];
|
@@ -1570,10 +3743,8 @@ function cleanFilter(filter) {
|
|
1570
3743
|
}
|
1571
3744
|
|
1572
3745
|
function stringifyJson(value) {
|
1573
|
-
if (!isDefined(value))
|
1574
|
-
|
1575
|
-
if (isString(value))
|
1576
|
-
return value;
|
3746
|
+
if (!isDefined(value)) return value;
|
3747
|
+
if (isString(value)) return value;
|
1577
3748
|
try {
|
1578
3749
|
return JSON.stringify(value);
|
1579
3750
|
} catch (e) {
|
@@ -1588,31 +3759,20 @@ function parseJson(value) {
|
|
1588
3759
|
}
|
1589
3760
|
}
|
1590
3761
|
|
1591
|
-
var
|
1592
|
-
|
1593
|
-
throw TypeError("Cannot " + msg);
|
1594
|
-
};
|
1595
|
-
var __privateGet$6 = (obj, member, getter) => {
|
1596
|
-
__accessCheck$6(obj, member, "read from private field");
|
1597
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1598
|
-
};
|
1599
|
-
var __privateAdd$6 = (obj, member, value) => {
|
1600
|
-
if (member.has(obj))
|
1601
|
-
throw TypeError("Cannot add the same private member more than once");
|
1602
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1603
|
-
};
|
1604
|
-
var __privateSet$6 = (obj, member, value, setter) => {
|
1605
|
-
__accessCheck$6(obj, member, "write to private field");
|
1606
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1607
|
-
return value;
|
3762
|
+
var __typeError$6 = (msg) => {
|
3763
|
+
throw TypeError(msg);
|
1608
3764
|
};
|
3765
|
+
var __accessCheck$6 = (obj, member, msg) => member.has(obj) || __typeError$6("Cannot " + msg);
|
3766
|
+
var __privateGet$5 = (obj, member, getter) => (__accessCheck$6(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3767
|
+
var __privateAdd$6 = (obj, member, value) => member.has(obj) ? __typeError$6("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3768
|
+
var __privateSet$4 = (obj, member, value, setter) => (__accessCheck$6(obj, member, "write to private field"), member.set(obj, value), value);
|
1609
3769
|
var _query, _page;
|
1610
3770
|
class Page {
|
1611
3771
|
constructor(query, meta, records = []) {
|
1612
|
-
__privateAdd$6(this, _query
|
1613
|
-
__privateSet$
|
3772
|
+
__privateAdd$6(this, _query);
|
3773
|
+
__privateSet$4(this, _query, query);
|
1614
3774
|
this.meta = meta;
|
1615
|
-
this.records = new
|
3775
|
+
this.records = new PageRecordArray(this, records);
|
1616
3776
|
}
|
1617
3777
|
/**
|
1618
3778
|
* Retrieves the next page of results.
|
@@ -1621,7 +3781,7 @@ class Page {
|
|
1621
3781
|
* @returns The next page or results.
|
1622
3782
|
*/
|
1623
3783
|
async nextPage(size, offset) {
|
1624
|
-
return __privateGet$
|
3784
|
+
return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, after: this.meta.page.cursor } });
|
1625
3785
|
}
|
1626
3786
|
/**
|
1627
3787
|
* Retrieves the previous page of results.
|
@@ -1630,7 +3790,7 @@ class Page {
|
|
1630
3790
|
* @returns The previous page or results.
|
1631
3791
|
*/
|
1632
3792
|
async previousPage(size, offset) {
|
1633
|
-
return __privateGet$
|
3793
|
+
return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, before: this.meta.page.cursor } });
|
1634
3794
|
}
|
1635
3795
|
/**
|
1636
3796
|
* Retrieves the start page of results.
|
@@ -1639,7 +3799,7 @@ class Page {
|
|
1639
3799
|
* @returns The start page or results.
|
1640
3800
|
*/
|
1641
3801
|
async startPage(size, offset) {
|
1642
|
-
return __privateGet$
|
3802
|
+
return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, start: this.meta.page.cursor } });
|
1643
3803
|
}
|
1644
3804
|
/**
|
1645
3805
|
* Retrieves the end page of results.
|
@@ -1648,7 +3808,7 @@ class Page {
|
|
1648
3808
|
* @returns The end page or results.
|
1649
3809
|
*/
|
1650
3810
|
async endPage(size, offset) {
|
1651
|
-
return __privateGet$
|
3811
|
+
return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, end: this.meta.page.cursor } });
|
1652
3812
|
}
|
1653
3813
|
/**
|
1654
3814
|
* Shortcut method to check if there will be additional results if the next page of results is retrieved.
|
@@ -1666,11 +3826,38 @@ const PAGINATION_DEFAULT_OFFSET = 0;
|
|
1666
3826
|
function isCursorPaginationOptions(options) {
|
1667
3827
|
return isDefined(options) && (isDefined(options.start) || isDefined(options.end) || isDefined(options.after) || isDefined(options.before));
|
1668
3828
|
}
|
1669
|
-
|
3829
|
+
class RecordArray extends Array {
|
3830
|
+
constructor(...args) {
|
3831
|
+
super(...RecordArray.parseConstructorParams(...args));
|
3832
|
+
}
|
3833
|
+
static parseConstructorParams(...args) {
|
3834
|
+
if (args.length === 1 && typeof args[0] === "number") {
|
3835
|
+
return new Array(args[0]);
|
3836
|
+
}
|
3837
|
+
if (args.length <= 1 && Array.isArray(args[0] ?? [])) {
|
3838
|
+
const result = args[0] ?? [];
|
3839
|
+
return new Array(...result);
|
3840
|
+
}
|
3841
|
+
return new Array(...args);
|
3842
|
+
}
|
3843
|
+
toArray() {
|
3844
|
+
return new Array(...this);
|
3845
|
+
}
|
3846
|
+
toSerializable() {
|
3847
|
+
return JSON.parse(this.toString());
|
3848
|
+
}
|
3849
|
+
toString() {
|
3850
|
+
return JSON.stringify(this.toArray());
|
3851
|
+
}
|
3852
|
+
map(callbackfn, thisArg) {
|
3853
|
+
return this.toArray().map(callbackfn, thisArg);
|
3854
|
+
}
|
3855
|
+
}
|
3856
|
+
const _PageRecordArray = class _PageRecordArray extends Array {
|
1670
3857
|
constructor(...args) {
|
1671
|
-
super(...
|
1672
|
-
__privateAdd$6(this, _page
|
1673
|
-
__privateSet$
|
3858
|
+
super(..._PageRecordArray.parseConstructorParams(...args));
|
3859
|
+
__privateAdd$6(this, _page);
|
3860
|
+
__privateSet$4(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
|
1674
3861
|
}
|
1675
3862
|
static parseConstructorParams(...args) {
|
1676
3863
|
if (args.length === 1 && typeof args[0] === "number") {
|
@@ -1700,8 +3887,8 @@ const _RecordArray = class _RecordArray extends Array {
|
|
1700
3887
|
* @returns A new array of objects
|
1701
3888
|
*/
|
1702
3889
|
async nextPage(size, offset) {
|
1703
|
-
const newPage = await __privateGet$
|
1704
|
-
return new
|
3890
|
+
const newPage = await __privateGet$5(this, _page).nextPage(size, offset);
|
3891
|
+
return new _PageRecordArray(newPage);
|
1705
3892
|
}
|
1706
3893
|
/**
|
1707
3894
|
* Retrieve previous page of records
|
@@ -1709,8 +3896,8 @@ const _RecordArray = class _RecordArray extends Array {
|
|
1709
3896
|
* @returns A new array of objects
|
1710
3897
|
*/
|
1711
3898
|
async previousPage(size, offset) {
|
1712
|
-
const newPage = await __privateGet$
|
1713
|
-
return new
|
3899
|
+
const newPage = await __privateGet$5(this, _page).previousPage(size, offset);
|
3900
|
+
return new _PageRecordArray(newPage);
|
1714
3901
|
}
|
1715
3902
|
/**
|
1716
3903
|
* Retrieve start page of records
|
@@ -1718,8 +3905,8 @@ const _RecordArray = class _RecordArray extends Array {
|
|
1718
3905
|
* @returns A new array of objects
|
1719
3906
|
*/
|
1720
3907
|
async startPage(size, offset) {
|
1721
|
-
const newPage = await __privateGet$
|
1722
|
-
return new
|
3908
|
+
const newPage = await __privateGet$5(this, _page).startPage(size, offset);
|
3909
|
+
return new _PageRecordArray(newPage);
|
1723
3910
|
}
|
1724
3911
|
/**
|
1725
3912
|
* Retrieve end page of records
|
@@ -1727,69 +3914,55 @@ const _RecordArray = class _RecordArray extends Array {
|
|
1727
3914
|
* @returns A new array of objects
|
1728
3915
|
*/
|
1729
3916
|
async endPage(size, offset) {
|
1730
|
-
const newPage = await __privateGet$
|
1731
|
-
return new
|
3917
|
+
const newPage = await __privateGet$5(this, _page).endPage(size, offset);
|
3918
|
+
return new _PageRecordArray(newPage);
|
1732
3919
|
}
|
1733
3920
|
/**
|
1734
3921
|
* @returns Boolean indicating if there is a next page
|
1735
3922
|
*/
|
1736
3923
|
hasNextPage() {
|
1737
|
-
return __privateGet$
|
3924
|
+
return __privateGet$5(this, _page).meta.page.more;
|
1738
3925
|
}
|
1739
3926
|
};
|
1740
3927
|
_page = new WeakMap();
|
1741
|
-
let
|
3928
|
+
let PageRecordArray = _PageRecordArray;
|
1742
3929
|
|
1743
|
-
var
|
1744
|
-
|
1745
|
-
throw TypeError("Cannot " + msg);
|
1746
|
-
};
|
1747
|
-
var __privateGet$5 = (obj, member, getter) => {
|
1748
|
-
__accessCheck$5(obj, member, "read from private field");
|
1749
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1750
|
-
};
|
1751
|
-
var __privateAdd$5 = (obj, member, value) => {
|
1752
|
-
if (member.has(obj))
|
1753
|
-
throw TypeError("Cannot add the same private member more than once");
|
1754
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1755
|
-
};
|
1756
|
-
var __privateSet$5 = (obj, member, value, setter) => {
|
1757
|
-
__accessCheck$5(obj, member, "write to private field");
|
1758
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1759
|
-
return value;
|
1760
|
-
};
|
1761
|
-
var __privateMethod$3 = (obj, member, method) => {
|
1762
|
-
__accessCheck$5(obj, member, "access private method");
|
1763
|
-
return method;
|
3930
|
+
var __typeError$5 = (msg) => {
|
3931
|
+
throw TypeError(msg);
|
1764
3932
|
};
|
1765
|
-
var
|
3933
|
+
var __accessCheck$5 = (obj, member, msg) => member.has(obj) || __typeError$5("Cannot " + msg);
|
3934
|
+
var __privateGet$4 = (obj, member, getter) => (__accessCheck$5(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3935
|
+
var __privateAdd$5 = (obj, member, value) => member.has(obj) ? __typeError$5("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3936
|
+
var __privateSet$3 = (obj, member, value, setter) => (__accessCheck$5(obj, member, "write to private field"), member.set(obj, value), value);
|
3937
|
+
var __privateMethod$3 = (obj, member, method) => (__accessCheck$5(obj, member, "access private method"), method);
|
3938
|
+
var _table$1, _repository, _data, _Query_instances, cleanFilterConstraint_fn;
|
1766
3939
|
const _Query = class _Query {
|
1767
3940
|
constructor(repository, table, data, rawParent) {
|
1768
|
-
__privateAdd$5(this,
|
1769
|
-
__privateAdd$5(this, _table$1
|
1770
|
-
__privateAdd$5(this, _repository
|
3941
|
+
__privateAdd$5(this, _Query_instances);
|
3942
|
+
__privateAdd$5(this, _table$1);
|
3943
|
+
__privateAdd$5(this, _repository);
|
1771
3944
|
__privateAdd$5(this, _data, { filter: {} });
|
1772
3945
|
// Implements pagination
|
1773
3946
|
this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
|
1774
|
-
this.records = new
|
1775
|
-
__privateSet$
|
3947
|
+
this.records = new PageRecordArray(this, []);
|
3948
|
+
__privateSet$3(this, _table$1, table);
|
1776
3949
|
if (repository) {
|
1777
|
-
__privateSet$
|
3950
|
+
__privateSet$3(this, _repository, repository);
|
1778
3951
|
} else {
|
1779
|
-
__privateSet$
|
3952
|
+
__privateSet$3(this, _repository, this);
|
1780
3953
|
}
|
1781
3954
|
const parent = cleanParent(data, rawParent);
|
1782
|
-
__privateGet$
|
1783
|
-
__privateGet$
|
1784
|
-
__privateGet$
|
1785
|
-
__privateGet$
|
1786
|
-
__privateGet$
|
1787
|
-
__privateGet$
|
1788
|
-
__privateGet$
|
1789
|
-
__privateGet$
|
1790
|
-
__privateGet$
|
1791
|
-
__privateGet$
|
1792
|
-
__privateGet$
|
3955
|
+
__privateGet$4(this, _data).filter = data.filter ?? parent?.filter ?? {};
|
3956
|
+
__privateGet$4(this, _data).filter.$any = data.filter?.$any ?? parent?.filter?.$any;
|
3957
|
+
__privateGet$4(this, _data).filter.$all = data.filter?.$all ?? parent?.filter?.$all;
|
3958
|
+
__privateGet$4(this, _data).filter.$not = data.filter?.$not ?? parent?.filter?.$not;
|
3959
|
+
__privateGet$4(this, _data).filter.$none = data.filter?.$none ?? parent?.filter?.$none;
|
3960
|
+
__privateGet$4(this, _data).sort = data.sort ?? parent?.sort;
|
3961
|
+
__privateGet$4(this, _data).columns = data.columns ?? parent?.columns;
|
3962
|
+
__privateGet$4(this, _data).consistency = data.consistency ?? parent?.consistency;
|
3963
|
+
__privateGet$4(this, _data).pagination = data.pagination ?? parent?.pagination;
|
3964
|
+
__privateGet$4(this, _data).cache = data.cache ?? parent?.cache;
|
3965
|
+
__privateGet$4(this, _data).fetchOptions = data.fetchOptions ?? parent?.fetchOptions;
|
1793
3966
|
this.any = this.any.bind(this);
|
1794
3967
|
this.all = this.all.bind(this);
|
1795
3968
|
this.not = this.not.bind(this);
|
@@ -1800,10 +3973,10 @@ const _Query = class _Query {
|
|
1800
3973
|
Object.defineProperty(this, "repository", { enumerable: false });
|
1801
3974
|
}
|
1802
3975
|
getQueryOptions() {
|
1803
|
-
return __privateGet$
|
3976
|
+
return __privateGet$4(this, _data);
|
1804
3977
|
}
|
1805
3978
|
key() {
|
1806
|
-
const { columns = [], filter = {}, sort = [], pagination = {} } = __privateGet$
|
3979
|
+
const { columns = [], filter = {}, sort = [], pagination = {} } = __privateGet$4(this, _data);
|
1807
3980
|
const key = JSON.stringify({ columns, filter, sort, pagination });
|
1808
3981
|
return toBase64(key);
|
1809
3982
|
}
|
@@ -1814,7 +3987,7 @@ const _Query = class _Query {
|
|
1814
3987
|
*/
|
1815
3988
|
any(...queries) {
|
1816
3989
|
const $any = queries.map((query) => query.getQueryOptions().filter ?? {});
|
1817
|
-
return new _Query(__privateGet$
|
3990
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $any } }, __privateGet$4(this, _data));
|
1818
3991
|
}
|
1819
3992
|
/**
|
1820
3993
|
* Builds a new query object representing a logical AND between the given subqueries.
|
@@ -1823,7 +3996,7 @@ const _Query = class _Query {
|
|
1823
3996
|
*/
|
1824
3997
|
all(...queries) {
|
1825
3998
|
const $all = queries.map((query) => query.getQueryOptions().filter ?? {});
|
1826
|
-
return new _Query(__privateGet$
|
3999
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
|
1827
4000
|
}
|
1828
4001
|
/**
|
1829
4002
|
* Builds a new query object representing a logical OR negating each subquery. In pseudo-code: !q1 OR !q2
|
@@ -1832,7 +4005,7 @@ const _Query = class _Query {
|
|
1832
4005
|
*/
|
1833
4006
|
not(...queries) {
|
1834
4007
|
const $not = queries.map((query) => query.getQueryOptions().filter ?? {});
|
1835
|
-
return new _Query(__privateGet$
|
4008
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $not } }, __privateGet$4(this, _data));
|
1836
4009
|
}
|
1837
4010
|
/**
|
1838
4011
|
* Builds a new query object representing a logical AND negating each subquery. In pseudo-code: !q1 AND !q2
|
@@ -1841,25 +4014,25 @@ const _Query = class _Query {
|
|
1841
4014
|
*/
|
1842
4015
|
none(...queries) {
|
1843
4016
|
const $none = queries.map((query) => query.getQueryOptions().filter ?? {});
|
1844
|
-
return new _Query(__privateGet$
|
4017
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $none } }, __privateGet$4(this, _data));
|
1845
4018
|
}
|
1846
4019
|
filter(a, b) {
|
1847
4020
|
if (arguments.length === 1) {
|
1848
4021
|
const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
|
1849
|
-
[column]: __privateMethod$3(this,
|
4022
|
+
[column]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, column, constraint)
|
1850
4023
|
}));
|
1851
|
-
const $all = compact([__privateGet$
|
1852
|
-
return new _Query(__privateGet$
|
4024
|
+
const $all = compact([__privateGet$4(this, _data).filter?.$all].flat().concat(constraints));
|
4025
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
|
1853
4026
|
} else {
|
1854
|
-
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this,
|
1855
|
-
const $all = compact([__privateGet$
|
1856
|
-
return new _Query(__privateGet$
|
4027
|
+
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
|
4028
|
+
const $all = compact([__privateGet$4(this, _data).filter?.$all].flat().concat(constraints));
|
4029
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
|
1857
4030
|
}
|
1858
4031
|
}
|
1859
4032
|
sort(column, direction = "asc") {
|
1860
|
-
const originalSort = [__privateGet$
|
4033
|
+
const originalSort = [__privateGet$4(this, _data).sort ?? []].flat();
|
1861
4034
|
const sort = [...originalSort, { column, direction }];
|
1862
|
-
return new _Query(__privateGet$
|
4035
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { sort }, __privateGet$4(this, _data));
|
1863
4036
|
}
|
1864
4037
|
/**
|
1865
4038
|
* Builds a new query specifying the set of columns to be returned in the query response.
|
@@ -1868,15 +4041,15 @@ const _Query = class _Query {
|
|
1868
4041
|
*/
|
1869
4042
|
select(columns) {
|
1870
4043
|
return new _Query(
|
1871
|
-
__privateGet$
|
1872
|
-
__privateGet$
|
4044
|
+
__privateGet$4(this, _repository),
|
4045
|
+
__privateGet$4(this, _table$1),
|
1873
4046
|
{ columns },
|
1874
|
-
__privateGet$
|
4047
|
+
__privateGet$4(this, _data)
|
1875
4048
|
);
|
1876
4049
|
}
|
1877
4050
|
getPaginated(options = {}) {
|
1878
|
-
const query = new _Query(__privateGet$
|
1879
|
-
return __privateGet$
|
4051
|
+
const query = new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), options, __privateGet$4(this, _data));
|
4052
|
+
return __privateGet$4(this, _repository).query(query);
|
1880
4053
|
}
|
1881
4054
|
/**
|
1882
4055
|
* Get results in an iterator
|
@@ -1913,7 +4086,7 @@ const _Query = class _Query {
|
|
1913
4086
|
if (page.hasNextPage() && options.pagination?.size === void 0) {
|
1914
4087
|
console.trace("Calling getMany does not return all results. Paginate to get all results or call getAll.");
|
1915
4088
|
}
|
1916
|
-
const array = new
|
4089
|
+
const array = new PageRecordArray(page, results.slice(0, size));
|
1917
4090
|
return array;
|
1918
4091
|
}
|
1919
4092
|
async getAll(options = {}) {
|
@@ -1922,7 +4095,7 @@ const _Query = class _Query {
|
|
1922
4095
|
for await (const page of this.getIterator({ ...rest, batchSize })) {
|
1923
4096
|
results.push(...page);
|
1924
4097
|
}
|
1925
|
-
return results;
|
4098
|
+
return new RecordArray(results);
|
1926
4099
|
}
|
1927
4100
|
async getFirst(options = {}) {
|
1928
4101
|
const records = await this.getMany({ ...options, pagination: { size: 1 } });
|
@@ -1930,19 +4103,18 @@ const _Query = class _Query {
|
|
1930
4103
|
}
|
1931
4104
|
async getFirstOrThrow(options = {}) {
|
1932
4105
|
const records = await this.getMany({ ...options, pagination: { size: 1 } });
|
1933
|
-
if (records[0] === void 0)
|
1934
|
-
throw new Error("No results found.");
|
4106
|
+
if (records[0] === void 0) throw new Error("No results found.");
|
1935
4107
|
return records[0];
|
1936
4108
|
}
|
1937
4109
|
async summarize(params = {}) {
|
1938
4110
|
const { summaries, summariesFilter, ...options } = params;
|
1939
4111
|
const query = new _Query(
|
1940
|
-
__privateGet$
|
1941
|
-
__privateGet$
|
4112
|
+
__privateGet$4(this, _repository),
|
4113
|
+
__privateGet$4(this, _table$1),
|
1942
4114
|
options,
|
1943
|
-
__privateGet$
|
4115
|
+
__privateGet$4(this, _data)
|
1944
4116
|
);
|
1945
|
-
return __privateGet$
|
4117
|
+
return __privateGet$4(this, _repository).summarizeTable(query, summaries, summariesFilter);
|
1946
4118
|
}
|
1947
4119
|
/**
|
1948
4120
|
* Builds a new query object adding a cache TTL in milliseconds.
|
@@ -1950,7 +4122,7 @@ const _Query = class _Query {
|
|
1950
4122
|
* @returns A new Query object.
|
1951
4123
|
*/
|
1952
4124
|
cache(ttl) {
|
1953
|
-
return new _Query(__privateGet$
|
4125
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { cache: ttl }, __privateGet$4(this, _data));
|
1954
4126
|
}
|
1955
4127
|
/**
|
1956
4128
|
* Retrieve next page of records
|
@@ -1994,9 +4166,9 @@ const _Query = class _Query {
|
|
1994
4166
|
_table$1 = new WeakMap();
|
1995
4167
|
_repository = new WeakMap();
|
1996
4168
|
_data = new WeakMap();
|
1997
|
-
|
4169
|
+
_Query_instances = new WeakSet();
|
1998
4170
|
cleanFilterConstraint_fn = function(column, value) {
|
1999
|
-
const columnType = __privateGet$
|
4171
|
+
const columnType = __privateGet$4(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
|
2000
4172
|
if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
|
2001
4173
|
return { $includes: value };
|
2002
4174
|
}
|
@@ -2060,8 +4232,7 @@ function isSortFilterString(value) {
|
|
2060
4232
|
}
|
2061
4233
|
function isSortFilterBase(filter) {
|
2062
4234
|
return isObject(filter) && Object.entries(filter).every(([key, value]) => {
|
2063
|
-
if (key === "*")
|
2064
|
-
return value === "random";
|
4235
|
+
if (key === "*") return value === "random";
|
2065
4236
|
return value === "asc" || value === "desc";
|
2066
4237
|
});
|
2067
4238
|
}
|
@@ -2082,29 +4253,15 @@ function buildSortFilter(filter) {
|
|
2082
4253
|
}
|
2083
4254
|
}
|
2084
4255
|
|
2085
|
-
var
|
2086
|
-
|
2087
|
-
throw TypeError("Cannot " + msg);
|
4256
|
+
var __typeError$4 = (msg) => {
|
4257
|
+
throw TypeError(msg);
|
2088
4258
|
};
|
2089
|
-
var
|
2090
|
-
|
2091
|
-
|
2092
|
-
|
2093
|
-
var
|
2094
|
-
|
2095
|
-
throw TypeError("Cannot add the same private member more than once");
|
2096
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2097
|
-
};
|
2098
|
-
var __privateSet$4 = (obj, member, value, setter) => {
|
2099
|
-
__accessCheck$4(obj, member, "write to private field");
|
2100
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
2101
|
-
return value;
|
2102
|
-
};
|
2103
|
-
var __privateMethod$2 = (obj, member, method) => {
|
2104
|
-
__accessCheck$4(obj, member, "access private method");
|
2105
|
-
return method;
|
2106
|
-
};
|
2107
|
-
var _table, _getFetchProps, _db, _cache, _schemaTables$2, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn, _insertRecords, insertRecords_fn, _updateRecordWithID, updateRecordWithID_fn, _updateRecords, updateRecords_fn, _upsertRecordWithID, upsertRecordWithID_fn, _deleteRecord, deleteRecord_fn, _deleteRecords, deleteRecords_fn, _setCacheQuery, setCacheQuery_fn, _getCacheQuery, getCacheQuery_fn, _getSchemaTables$1, getSchemaTables_fn$1, _transformObjectToApi, transformObjectToApi_fn;
|
4259
|
+
var __accessCheck$4 = (obj, member, msg) => member.has(obj) || __typeError$4("Cannot " + msg);
|
4260
|
+
var __privateGet$3 = (obj, member, getter) => (__accessCheck$4(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4261
|
+
var __privateAdd$4 = (obj, member, value) => member.has(obj) ? __typeError$4("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4262
|
+
var __privateSet$2 = (obj, member, value, setter) => (__accessCheck$4(obj, member, "write to private field"), member.set(obj, value), value);
|
4263
|
+
var __privateMethod$2 = (obj, member, method) => (__accessCheck$4(obj, member, "access private method"), method);
|
4264
|
+
var _table, _getFetchProps, _db, _cache, _schemaTables, _trace, _RestRepository_instances, insertRecordWithoutId_fn, insertRecordWithId_fn, insertRecords_fn, updateRecordWithID_fn, updateRecords_fn, upsertRecordWithID_fn, deleteRecord_fn, deleteRecords_fn, setCacheQuery_fn, getCacheQuery_fn, getSchemaTables_fn, transformObjectToApi_fn;
|
2108
4265
|
const BULK_OPERATION_MAX_SIZE = 1e3;
|
2109
4266
|
class Repository extends Query {
|
2110
4267
|
}
|
@@ -2115,75 +4272,60 @@ class RestRepository extends Query {
|
|
2115
4272
|
{ name: options.table, schema: options.schemaTables?.find((table) => table.name === options.table) },
|
2116
4273
|
{}
|
2117
4274
|
);
|
2118
|
-
__privateAdd$4(this,
|
2119
|
-
__privateAdd$4(this,
|
2120
|
-
__privateAdd$4(this,
|
2121
|
-
__privateAdd$4(this,
|
2122
|
-
__privateAdd$4(this,
|
2123
|
-
__privateAdd$4(this,
|
2124
|
-
__privateAdd$4(this,
|
2125
|
-
|
2126
|
-
|
2127
|
-
|
2128
|
-
|
2129
|
-
|
2130
|
-
__privateAdd$4(this, _table, void 0);
|
2131
|
-
__privateAdd$4(this, _getFetchProps, void 0);
|
2132
|
-
__privateAdd$4(this, _db, void 0);
|
2133
|
-
__privateAdd$4(this, _cache, void 0);
|
2134
|
-
__privateAdd$4(this, _schemaTables$2, void 0);
|
2135
|
-
__privateAdd$4(this, _trace, void 0);
|
2136
|
-
__privateSet$4(this, _table, options.table);
|
2137
|
-
__privateSet$4(this, _db, options.db);
|
2138
|
-
__privateSet$4(this, _cache, options.pluginOptions.cache);
|
2139
|
-
__privateSet$4(this, _schemaTables$2, options.schemaTables);
|
2140
|
-
__privateSet$4(this, _getFetchProps, () => ({ ...options.pluginOptions, sessionID: generateUUID() }));
|
4275
|
+
__privateAdd$4(this, _RestRepository_instances);
|
4276
|
+
__privateAdd$4(this, _table);
|
4277
|
+
__privateAdd$4(this, _getFetchProps);
|
4278
|
+
__privateAdd$4(this, _db);
|
4279
|
+
__privateAdd$4(this, _cache);
|
4280
|
+
__privateAdd$4(this, _schemaTables);
|
4281
|
+
__privateAdd$4(this, _trace);
|
4282
|
+
__privateSet$2(this, _table, options.table);
|
4283
|
+
__privateSet$2(this, _db, options.db);
|
4284
|
+
__privateSet$2(this, _cache, options.pluginOptions.cache);
|
4285
|
+
__privateSet$2(this, _schemaTables, options.schemaTables);
|
4286
|
+
__privateSet$2(this, _getFetchProps, () => ({ ...options.pluginOptions, sessionID: generateUUID() }));
|
2141
4287
|
const trace = options.pluginOptions.trace ?? defaultTrace;
|
2142
|
-
__privateSet$
|
4288
|
+
__privateSet$2(this, _trace, async (name, fn, options2 = {}) => {
|
2143
4289
|
return trace(name, fn, {
|
2144
4290
|
...options2,
|
2145
|
-
[TraceAttributes.TABLE]: __privateGet$
|
4291
|
+
[TraceAttributes.TABLE]: __privateGet$3(this, _table),
|
2146
4292
|
[TraceAttributes.KIND]: "sdk-operation",
|
2147
4293
|
[TraceAttributes.VERSION]: VERSION
|
2148
4294
|
});
|
2149
4295
|
});
|
2150
4296
|
}
|
2151
4297
|
async create(a, b, c, d) {
|
2152
|
-
return __privateGet$
|
4298
|
+
return __privateGet$3(this, _trace).call(this, "create", async () => {
|
2153
4299
|
const ifVersion = parseIfVersion(b, c, d);
|
2154
4300
|
if (Array.isArray(a)) {
|
2155
|
-
if (a.length === 0)
|
2156
|
-
|
2157
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
4301
|
+
if (a.length === 0) return [];
|
4302
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
2158
4303
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2159
4304
|
const result = await this.read(ids, columns);
|
2160
4305
|
return result;
|
2161
4306
|
}
|
2162
4307
|
if (isString(a) && isObject(b)) {
|
2163
|
-
if (a === "")
|
2164
|
-
throw new Error("The id can't be empty");
|
4308
|
+
if (a === "") throw new Error("The id can't be empty");
|
2165
4309
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2166
|
-
return await __privateMethod$2(this,
|
4310
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
|
2167
4311
|
}
|
2168
4312
|
if (isObject(a) && isString(a.id)) {
|
2169
|
-
if (a.id === "")
|
2170
|
-
throw new Error("The id can't be empty");
|
4313
|
+
if (a.id === "") throw new Error("The id can't be empty");
|
2171
4314
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2172
|
-
return await __privateMethod$2(this,
|
4315
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.id, { ...a, id: void 0 }, columns, { createOnly: true, ifVersion });
|
2173
4316
|
}
|
2174
4317
|
if (isObject(a)) {
|
2175
4318
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2176
|
-
return __privateMethod$2(this,
|
4319
|
+
return __privateMethod$2(this, _RestRepository_instances, insertRecordWithoutId_fn).call(this, a, columns);
|
2177
4320
|
}
|
2178
4321
|
throw new Error("Invalid arguments for create method");
|
2179
4322
|
});
|
2180
4323
|
}
|
2181
4324
|
async read(a, b) {
|
2182
|
-
return __privateGet$
|
4325
|
+
return __privateGet$3(this, _trace).call(this, "read", async () => {
|
2183
4326
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2184
4327
|
if (Array.isArray(a)) {
|
2185
|
-
if (a.length === 0)
|
2186
|
-
return [];
|
4328
|
+
if (a.length === 0) return [];
|
2187
4329
|
const ids = a.map((item) => extractId(item));
|
2188
4330
|
const finalObjects = await this.getAll({ filter: { id: { $any: compact(ids) } }, columns });
|
2189
4331
|
const dictionary = finalObjects.reduce((acc, object) => {
|
@@ -2200,17 +4342,17 @@ class RestRepository extends Query {
|
|
2200
4342
|
workspace: "{workspaceId}",
|
2201
4343
|
dbBranchName: "{dbBranch}",
|
2202
4344
|
region: "{region}",
|
2203
|
-
tableName: __privateGet$
|
4345
|
+
tableName: __privateGet$3(this, _table),
|
2204
4346
|
recordId: id
|
2205
4347
|
},
|
2206
4348
|
queryParams: { columns },
|
2207
|
-
...__privateGet$
|
4349
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
2208
4350
|
});
|
2209
|
-
const schemaTables = await __privateMethod$2(this,
|
4351
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2210
4352
|
return initObject(
|
2211
|
-
__privateGet$
|
4353
|
+
__privateGet$3(this, _db),
|
2212
4354
|
schemaTables,
|
2213
|
-
__privateGet$
|
4355
|
+
__privateGet$3(this, _table),
|
2214
4356
|
response,
|
2215
4357
|
columns
|
2216
4358
|
);
|
@@ -2225,7 +4367,7 @@ class RestRepository extends Query {
|
|
2225
4367
|
});
|
2226
4368
|
}
|
2227
4369
|
async readOrThrow(a, b) {
|
2228
|
-
return __privateGet$
|
4370
|
+
return __privateGet$3(this, _trace).call(this, "readOrThrow", async () => {
|
2229
4371
|
const result = await this.read(a, b);
|
2230
4372
|
if (Array.isArray(result)) {
|
2231
4373
|
const missingIds = compact(
|
@@ -2244,14 +4386,13 @@ class RestRepository extends Query {
|
|
2244
4386
|
});
|
2245
4387
|
}
|
2246
4388
|
async update(a, b, c, d) {
|
2247
|
-
return __privateGet$
|
4389
|
+
return __privateGet$3(this, _trace).call(this, "update", async () => {
|
2248
4390
|
const ifVersion = parseIfVersion(b, c, d);
|
2249
4391
|
if (Array.isArray(a)) {
|
2250
|
-
if (a.length === 0)
|
2251
|
-
return [];
|
4392
|
+
if (a.length === 0) return [];
|
2252
4393
|
const existing = await this.read(a, ["id"]);
|
2253
4394
|
const updates = a.filter((_item, index) => existing[index] !== null);
|
2254
|
-
await __privateMethod$2(this,
|
4395
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, updates, {
|
2255
4396
|
ifVersion,
|
2256
4397
|
upsert: false
|
2257
4398
|
});
|
@@ -2262,22 +4403,21 @@ class RestRepository extends Query {
|
|
2262
4403
|
try {
|
2263
4404
|
if (isString(a) && isObject(b)) {
|
2264
4405
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2265
|
-
return await __privateMethod$2(this,
|
4406
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2266
4407
|
}
|
2267
4408
|
if (isObject(a) && isString(a.id)) {
|
2268
4409
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2269
|
-
return await __privateMethod$2(this,
|
4410
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a.id, { ...a, id: void 0 }, columns, { ifVersion });
|
2270
4411
|
}
|
2271
4412
|
} catch (error) {
|
2272
|
-
if (error.status === 422)
|
2273
|
-
return null;
|
4413
|
+
if (error.status === 422) return null;
|
2274
4414
|
throw error;
|
2275
4415
|
}
|
2276
4416
|
throw new Error("Invalid arguments for update method");
|
2277
4417
|
});
|
2278
4418
|
}
|
2279
4419
|
async updateOrThrow(a, b, c, d) {
|
2280
|
-
return __privateGet$
|
4420
|
+
return __privateGet$3(this, _trace).call(this, "updateOrThrow", async () => {
|
2281
4421
|
const result = await this.update(a, b, c, d);
|
2282
4422
|
if (Array.isArray(result)) {
|
2283
4423
|
const missingIds = compact(
|
@@ -2296,12 +4436,11 @@ class RestRepository extends Query {
|
|
2296
4436
|
});
|
2297
4437
|
}
|
2298
4438
|
async createOrUpdate(a, b, c, d) {
|
2299
|
-
return __privateGet$
|
4439
|
+
return __privateGet$3(this, _trace).call(this, "createOrUpdate", async () => {
|
2300
4440
|
const ifVersion = parseIfVersion(b, c, d);
|
2301
4441
|
if (Array.isArray(a)) {
|
2302
|
-
if (a.length === 0)
|
2303
|
-
|
2304
|
-
await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, a, {
|
4442
|
+
if (a.length === 0) return [];
|
4443
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, a, {
|
2305
4444
|
ifVersion,
|
2306
4445
|
upsert: true
|
2307
4446
|
});
|
@@ -2310,16 +4449,14 @@ class RestRepository extends Query {
|
|
2310
4449
|
return result;
|
2311
4450
|
}
|
2312
4451
|
if (isString(a) && isObject(b)) {
|
2313
|
-
if (a === "")
|
2314
|
-
throw new Error("The id can't be empty");
|
4452
|
+
if (a === "") throw new Error("The id can't be empty");
|
2315
4453
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2316
|
-
return await __privateMethod$2(this,
|
4454
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2317
4455
|
}
|
2318
4456
|
if (isObject(a) && isString(a.id)) {
|
2319
|
-
if (a.id === "")
|
2320
|
-
throw new Error("The id can't be empty");
|
4457
|
+
if (a.id === "") throw new Error("The id can't be empty");
|
2321
4458
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2322
|
-
return await __privateMethod$2(this,
|
4459
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a.id, { ...a, id: void 0 }, columns, { ifVersion });
|
2323
4460
|
}
|
2324
4461
|
if (!isDefined(a) && isObject(b)) {
|
2325
4462
|
return await this.create(b, c);
|
@@ -2331,27 +4468,24 @@ class RestRepository extends Query {
|
|
2331
4468
|
});
|
2332
4469
|
}
|
2333
4470
|
async createOrReplace(a, b, c, d) {
|
2334
|
-
return __privateGet$
|
4471
|
+
return __privateGet$3(this, _trace).call(this, "createOrReplace", async () => {
|
2335
4472
|
const ifVersion = parseIfVersion(b, c, d);
|
2336
4473
|
if (Array.isArray(a)) {
|
2337
|
-
if (a.length === 0)
|
2338
|
-
|
2339
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
4474
|
+
if (a.length === 0) return [];
|
4475
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
2340
4476
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2341
4477
|
const result = await this.read(ids, columns);
|
2342
4478
|
return result;
|
2343
4479
|
}
|
2344
4480
|
if (isString(a) && isObject(b)) {
|
2345
|
-
if (a === "")
|
2346
|
-
throw new Error("The id can't be empty");
|
4481
|
+
if (a === "") throw new Error("The id can't be empty");
|
2347
4482
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2348
|
-
return await __privateMethod$2(this,
|
4483
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
|
2349
4484
|
}
|
2350
4485
|
if (isObject(a) && isString(a.id)) {
|
2351
|
-
if (a.id === "")
|
2352
|
-
throw new Error("The id can't be empty");
|
4486
|
+
if (a.id === "") throw new Error("The id can't be empty");
|
2353
4487
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2354
|
-
return await __privateMethod$2(this,
|
4488
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.id, { ...a, id: void 0 }, columns, { createOnly: false, ifVersion });
|
2355
4489
|
}
|
2356
4490
|
if (!isDefined(a) && isObject(b)) {
|
2357
4491
|
return await this.create(b, c);
|
@@ -2363,33 +4497,30 @@ class RestRepository extends Query {
|
|
2363
4497
|
});
|
2364
4498
|
}
|
2365
4499
|
async delete(a, b) {
|
2366
|
-
return __privateGet$
|
4500
|
+
return __privateGet$3(this, _trace).call(this, "delete", async () => {
|
2367
4501
|
if (Array.isArray(a)) {
|
2368
|
-
if (a.length === 0)
|
2369
|
-
return [];
|
4502
|
+
if (a.length === 0) return [];
|
2370
4503
|
const ids = a.map((o) => {
|
2371
|
-
if (isString(o))
|
2372
|
-
|
2373
|
-
if (isString(o.id))
|
2374
|
-
return o.id;
|
4504
|
+
if (isString(o)) return o;
|
4505
|
+
if (isString(o.id)) return o.id;
|
2375
4506
|
throw new Error("Invalid arguments for delete method");
|
2376
4507
|
});
|
2377
4508
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2378
4509
|
const result = await this.read(a, columns);
|
2379
|
-
await __privateMethod$2(this,
|
4510
|
+
await __privateMethod$2(this, _RestRepository_instances, deleteRecords_fn).call(this, ids);
|
2380
4511
|
return result;
|
2381
4512
|
}
|
2382
4513
|
if (isString(a)) {
|
2383
|
-
return __privateMethod$2(this,
|
4514
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a, b);
|
2384
4515
|
}
|
2385
4516
|
if (isObject(a) && isString(a.id)) {
|
2386
|
-
return __privateMethod$2(this,
|
4517
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a.id, b);
|
2387
4518
|
}
|
2388
4519
|
throw new Error("Invalid arguments for delete method");
|
2389
4520
|
});
|
2390
4521
|
}
|
2391
4522
|
async deleteOrThrow(a, b) {
|
2392
|
-
return __privateGet$
|
4523
|
+
return __privateGet$3(this, _trace).call(this, "deleteOrThrow", async () => {
|
2393
4524
|
const result = await this.delete(a, b);
|
2394
4525
|
if (Array.isArray(result)) {
|
2395
4526
|
const missingIds = compact(
|
@@ -2407,13 +4538,13 @@ class RestRepository extends Query {
|
|
2407
4538
|
});
|
2408
4539
|
}
|
2409
4540
|
async search(query, options = {}) {
|
2410
|
-
return __privateGet$
|
4541
|
+
return __privateGet$3(this, _trace).call(this, "search", async () => {
|
2411
4542
|
const { records, totalCount } = await searchTable({
|
2412
4543
|
pathParams: {
|
2413
4544
|
workspace: "{workspaceId}",
|
2414
4545
|
dbBranchName: "{dbBranch}",
|
2415
4546
|
region: "{region}",
|
2416
|
-
tableName: __privateGet$
|
4547
|
+
tableName: __privateGet$3(this, _table)
|
2417
4548
|
},
|
2418
4549
|
body: {
|
2419
4550
|
query,
|
@@ -2425,23 +4556,23 @@ class RestRepository extends Query {
|
|
2425
4556
|
page: options.page,
|
2426
4557
|
target: options.target
|
2427
4558
|
},
|
2428
|
-
...__privateGet$
|
4559
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
2429
4560
|
});
|
2430
|
-
const schemaTables = await __privateMethod$2(this,
|
4561
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2431
4562
|
return {
|
2432
|
-
records: records.map((item) => initObject(__privateGet$
|
4563
|
+
records: records.map((item) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), item, ["*"])),
|
2433
4564
|
totalCount
|
2434
4565
|
};
|
2435
4566
|
});
|
2436
4567
|
}
|
2437
4568
|
async vectorSearch(column, query, options) {
|
2438
|
-
return __privateGet$
|
4569
|
+
return __privateGet$3(this, _trace).call(this, "vectorSearch", async () => {
|
2439
4570
|
const { records, totalCount } = await vectorSearchTable({
|
2440
4571
|
pathParams: {
|
2441
4572
|
workspace: "{workspaceId}",
|
2442
4573
|
dbBranchName: "{dbBranch}",
|
2443
4574
|
region: "{region}",
|
2444
|
-
tableName: __privateGet$
|
4575
|
+
tableName: __privateGet$3(this, _table)
|
2445
4576
|
},
|
2446
4577
|
body: {
|
2447
4578
|
column,
|
@@ -2450,42 +4581,41 @@ class RestRepository extends Query {
|
|
2450
4581
|
size: options?.size,
|
2451
4582
|
filter: options?.filter
|
2452
4583
|
},
|
2453
|
-
...__privateGet$
|
4584
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
2454
4585
|
});
|
2455
|
-
const schemaTables = await __privateMethod$2(this,
|
4586
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2456
4587
|
return {
|
2457
|
-
records: records.map((item) => initObject(__privateGet$
|
4588
|
+
records: records.map((item) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), item, ["*"])),
|
2458
4589
|
totalCount
|
2459
4590
|
};
|
2460
4591
|
});
|
2461
4592
|
}
|
2462
4593
|
async aggregate(aggs, filter) {
|
2463
|
-
return __privateGet$
|
4594
|
+
return __privateGet$3(this, _trace).call(this, "aggregate", async () => {
|
2464
4595
|
const result = await aggregateTable({
|
2465
4596
|
pathParams: {
|
2466
4597
|
workspace: "{workspaceId}",
|
2467
4598
|
dbBranchName: "{dbBranch}",
|
2468
4599
|
region: "{region}",
|
2469
|
-
tableName: __privateGet$
|
4600
|
+
tableName: __privateGet$3(this, _table)
|
2470
4601
|
},
|
2471
4602
|
body: { aggs, filter },
|
2472
|
-
...__privateGet$
|
4603
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
2473
4604
|
});
|
2474
4605
|
return result;
|
2475
4606
|
});
|
2476
4607
|
}
|
2477
4608
|
async query(query) {
|
2478
|
-
return __privateGet$
|
2479
|
-
const cacheQuery = await __privateMethod$2(this,
|
2480
|
-
if (cacheQuery)
|
2481
|
-
return new Page(query, cacheQuery.meta, cacheQuery.records);
|
4609
|
+
return __privateGet$3(this, _trace).call(this, "query", async () => {
|
4610
|
+
const cacheQuery = await __privateMethod$2(this, _RestRepository_instances, getCacheQuery_fn).call(this, query);
|
4611
|
+
if (cacheQuery) return new Page(query, cacheQuery.meta, cacheQuery.records);
|
2482
4612
|
const data = query.getQueryOptions();
|
2483
4613
|
const { meta, records: objects } = await queryTable({
|
2484
4614
|
pathParams: {
|
2485
4615
|
workspace: "{workspaceId}",
|
2486
4616
|
dbBranchName: "{dbBranch}",
|
2487
4617
|
region: "{region}",
|
2488
|
-
tableName: __privateGet$
|
4618
|
+
tableName: __privateGet$3(this, _table)
|
2489
4619
|
},
|
2490
4620
|
body: {
|
2491
4621
|
filter: cleanFilter(data.filter),
|
@@ -2495,31 +4625,31 @@ class RestRepository extends Query {
|
|
2495
4625
|
consistency: data.consistency
|
2496
4626
|
},
|
2497
4627
|
fetchOptions: data.fetchOptions,
|
2498
|
-
...__privateGet$
|
4628
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
2499
4629
|
});
|
2500
|
-
const schemaTables = await __privateMethod$2(this,
|
4630
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2501
4631
|
const records = objects.map(
|
2502
4632
|
(record) => initObject(
|
2503
|
-
__privateGet$
|
4633
|
+
__privateGet$3(this, _db),
|
2504
4634
|
schemaTables,
|
2505
|
-
__privateGet$
|
4635
|
+
__privateGet$3(this, _table),
|
2506
4636
|
record,
|
2507
4637
|
data.columns ?? ["*"]
|
2508
4638
|
)
|
2509
4639
|
);
|
2510
|
-
await __privateMethod$2(this,
|
4640
|
+
await __privateMethod$2(this, _RestRepository_instances, setCacheQuery_fn).call(this, query, meta, records);
|
2511
4641
|
return new Page(query, meta, records);
|
2512
4642
|
});
|
2513
4643
|
}
|
2514
4644
|
async summarizeTable(query, summaries, summariesFilter) {
|
2515
|
-
return __privateGet$
|
4645
|
+
return __privateGet$3(this, _trace).call(this, "summarize", async () => {
|
2516
4646
|
const data = query.getQueryOptions();
|
2517
4647
|
const result = await summarizeTable({
|
2518
4648
|
pathParams: {
|
2519
4649
|
workspace: "{workspaceId}",
|
2520
4650
|
dbBranchName: "{dbBranch}",
|
2521
4651
|
region: "{region}",
|
2522
|
-
tableName: __privateGet$
|
4652
|
+
tableName: __privateGet$3(this, _table)
|
2523
4653
|
},
|
2524
4654
|
body: {
|
2525
4655
|
filter: cleanFilter(data.filter),
|
@@ -2530,13 +4660,13 @@ class RestRepository extends Query {
|
|
2530
4660
|
summaries,
|
2531
4661
|
summariesFilter
|
2532
4662
|
},
|
2533
|
-
...__privateGet$
|
4663
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
2534
4664
|
});
|
2535
|
-
const schemaTables = await __privateMethod$2(this,
|
4665
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2536
4666
|
return {
|
2537
4667
|
...result,
|
2538
4668
|
summaries: result.summaries.map(
|
2539
|
-
(summary) => initObject(__privateGet$
|
4669
|
+
(summary) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), summary, data.columns ?? [])
|
2540
4670
|
)
|
2541
4671
|
};
|
2542
4672
|
});
|
@@ -2548,7 +4678,7 @@ class RestRepository extends Query {
|
|
2548
4678
|
workspace: "{workspaceId}",
|
2549
4679
|
dbBranchName: "{dbBranch}",
|
2550
4680
|
region: "{region}",
|
2551
|
-
tableName: __privateGet$
|
4681
|
+
tableName: __privateGet$3(this, _table),
|
2552
4682
|
sessionId: options?.sessionId
|
2553
4683
|
},
|
2554
4684
|
body: {
|
@@ -2558,7 +4688,7 @@ class RestRepository extends Query {
|
|
2558
4688
|
search: options?.searchType === "keyword" ? options?.search : void 0,
|
2559
4689
|
vectorSearch: options?.searchType === "vector" ? options?.vectorSearch : void 0
|
2560
4690
|
},
|
2561
|
-
...__privateGet$
|
4691
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
2562
4692
|
};
|
2563
4693
|
if (options?.onMessage) {
|
2564
4694
|
fetchSSERequest({
|
@@ -2579,50 +4709,47 @@ _table = new WeakMap();
|
|
2579
4709
|
_getFetchProps = new WeakMap();
|
2580
4710
|
_db = new WeakMap();
|
2581
4711
|
_cache = new WeakMap();
|
2582
|
-
_schemaTables
|
4712
|
+
_schemaTables = new WeakMap();
|
2583
4713
|
_trace = new WeakMap();
|
2584
|
-
|
4714
|
+
_RestRepository_instances = new WeakSet();
|
2585
4715
|
insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
2586
|
-
const record = await __privateMethod$2(this,
|
4716
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2587
4717
|
const response = await insertRecord({
|
2588
4718
|
pathParams: {
|
2589
4719
|
workspace: "{workspaceId}",
|
2590
4720
|
dbBranchName: "{dbBranch}",
|
2591
4721
|
region: "{region}",
|
2592
|
-
tableName: __privateGet$
|
4722
|
+
tableName: __privateGet$3(this, _table)
|
2593
4723
|
},
|
2594
4724
|
queryParams: { columns },
|
2595
4725
|
body: record,
|
2596
|
-
...__privateGet$
|
4726
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
2597
4727
|
});
|
2598
|
-
const schemaTables = await __privateMethod$2(this,
|
2599
|
-
return initObject(__privateGet$
|
4728
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
4729
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
2600
4730
|
};
|
2601
|
-
_insertRecordWithId = new WeakSet();
|
2602
4731
|
insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
|
2603
|
-
if (!recordId)
|
2604
|
-
|
2605
|
-
const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4732
|
+
if (!recordId) return null;
|
4733
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2606
4734
|
const response = await insertRecordWithID({
|
2607
4735
|
pathParams: {
|
2608
4736
|
workspace: "{workspaceId}",
|
2609
4737
|
dbBranchName: "{dbBranch}",
|
2610
4738
|
region: "{region}",
|
2611
|
-
tableName: __privateGet$
|
4739
|
+
tableName: __privateGet$3(this, _table),
|
2612
4740
|
recordId
|
2613
4741
|
},
|
2614
4742
|
body: record,
|
2615
4743
|
queryParams: { createOnly, columns, ifVersion },
|
2616
|
-
...__privateGet$
|
4744
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
2617
4745
|
});
|
2618
|
-
const schemaTables = await __privateMethod$2(this,
|
2619
|
-
return initObject(__privateGet$
|
4746
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
4747
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
2620
4748
|
};
|
2621
|
-
_insertRecords = new WeakSet();
|
2622
4749
|
insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
2623
4750
|
const operations = await promiseMap(objects, async (object) => {
|
2624
|
-
const record = await __privateMethod$2(this,
|
2625
|
-
return { insert: { table: __privateGet$
|
4751
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
4752
|
+
return { insert: { table: __privateGet$3(this, _table), record, createOnly, ifVersion } };
|
2626
4753
|
});
|
2627
4754
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
2628
4755
|
const ids = [];
|
@@ -2634,7 +4761,7 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
|
2634
4761
|
region: "{region}"
|
2635
4762
|
},
|
2636
4763
|
body: { operations: operations2 },
|
2637
|
-
...__privateGet$
|
4764
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
2638
4765
|
});
|
2639
4766
|
for (const result of results) {
|
2640
4767
|
if (result.operation === "insert") {
|
@@ -2646,26 +4773,24 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
|
2646
4773
|
}
|
2647
4774
|
return ids;
|
2648
4775
|
};
|
2649
|
-
_updateRecordWithID = new WeakSet();
|
2650
4776
|
updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2651
|
-
if (!recordId)
|
2652
|
-
|
2653
|
-
const { id: _id, ...record } = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4777
|
+
if (!recordId) return null;
|
4778
|
+
const { id: _id, ...record } = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2654
4779
|
try {
|
2655
4780
|
const response = await updateRecordWithID({
|
2656
4781
|
pathParams: {
|
2657
4782
|
workspace: "{workspaceId}",
|
2658
4783
|
dbBranchName: "{dbBranch}",
|
2659
4784
|
region: "{region}",
|
2660
|
-
tableName: __privateGet$
|
4785
|
+
tableName: __privateGet$3(this, _table),
|
2661
4786
|
recordId
|
2662
4787
|
},
|
2663
4788
|
queryParams: { columns, ifVersion },
|
2664
4789
|
body: record,
|
2665
|
-
...__privateGet$
|
4790
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
2666
4791
|
});
|
2667
|
-
const schemaTables = await __privateMethod$2(this,
|
2668
|
-
return initObject(__privateGet$
|
4792
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
4793
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
2669
4794
|
} catch (e) {
|
2670
4795
|
if (isObject(e) && e.status === 404) {
|
2671
4796
|
return null;
|
@@ -2673,11 +4798,10 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2673
4798
|
throw e;
|
2674
4799
|
}
|
2675
4800
|
};
|
2676
|
-
_updateRecords = new WeakSet();
|
2677
4801
|
updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
2678
4802
|
const operations = await promiseMap(objects, async ({ id, ...object }) => {
|
2679
|
-
const fields = await __privateMethod$2(this,
|
2680
|
-
return { update: { table: __privateGet$
|
4803
|
+
const fields = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
4804
|
+
return { update: { table: __privateGet$3(this, _table), id, ifVersion, upsert, fields } };
|
2681
4805
|
});
|
2682
4806
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
2683
4807
|
const ids = [];
|
@@ -2689,7 +4813,7 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
|
2689
4813
|
region: "{region}"
|
2690
4814
|
},
|
2691
4815
|
body: { operations: operations2 },
|
2692
|
-
...__privateGet$
|
4816
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
2693
4817
|
});
|
2694
4818
|
for (const result of results) {
|
2695
4819
|
if (result.operation === "update") {
|
@@ -2701,43 +4825,39 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
|
2701
4825
|
}
|
2702
4826
|
return ids;
|
2703
4827
|
};
|
2704
|
-
_upsertRecordWithID = new WeakSet();
|
2705
4828
|
upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2706
|
-
if (!recordId)
|
2707
|
-
return null;
|
4829
|
+
if (!recordId) return null;
|
2708
4830
|
const response = await upsertRecordWithID({
|
2709
4831
|
pathParams: {
|
2710
4832
|
workspace: "{workspaceId}",
|
2711
4833
|
dbBranchName: "{dbBranch}",
|
2712
4834
|
region: "{region}",
|
2713
|
-
tableName: __privateGet$
|
4835
|
+
tableName: __privateGet$3(this, _table),
|
2714
4836
|
recordId
|
2715
4837
|
},
|
2716
4838
|
queryParams: { columns, ifVersion },
|
2717
4839
|
body: object,
|
2718
|
-
...__privateGet$
|
4840
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
2719
4841
|
});
|
2720
|
-
const schemaTables = await __privateMethod$2(this,
|
2721
|
-
return initObject(__privateGet$
|
4842
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
4843
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
2722
4844
|
};
|
2723
|
-
_deleteRecord = new WeakSet();
|
2724
4845
|
deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
2725
|
-
if (!recordId)
|
2726
|
-
return null;
|
4846
|
+
if (!recordId) return null;
|
2727
4847
|
try {
|
2728
4848
|
const response = await deleteRecord({
|
2729
4849
|
pathParams: {
|
2730
4850
|
workspace: "{workspaceId}",
|
2731
4851
|
dbBranchName: "{dbBranch}",
|
2732
4852
|
region: "{region}",
|
2733
|
-
tableName: __privateGet$
|
4853
|
+
tableName: __privateGet$3(this, _table),
|
2734
4854
|
recordId
|
2735
4855
|
},
|
2736
4856
|
queryParams: { columns },
|
2737
|
-
...__privateGet$
|
4857
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
2738
4858
|
});
|
2739
|
-
const schemaTables = await __privateMethod$2(this,
|
2740
|
-
return initObject(__privateGet$
|
4859
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
4860
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
2741
4861
|
} catch (e) {
|
2742
4862
|
if (isObject(e) && e.status === 404) {
|
2743
4863
|
return null;
|
@@ -2745,10 +4865,9 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2745
4865
|
throw e;
|
2746
4866
|
}
|
2747
4867
|
};
|
2748
|
-
_deleteRecords = new WeakSet();
|
2749
4868
|
deleteRecords_fn = async function(recordIds) {
|
2750
4869
|
const chunkedOperations = chunk(
|
2751
|
-
compact(recordIds).map((id) => ({ delete: { table: __privateGet$
|
4870
|
+
compact(recordIds).map((id) => ({ delete: { table: __privateGet$3(this, _table), id } })),
|
2752
4871
|
BULK_OPERATION_MAX_SIZE
|
2753
4872
|
);
|
2754
4873
|
for (const operations of chunkedOperations) {
|
@@ -2759,48 +4878,39 @@ deleteRecords_fn = async function(recordIds) {
|
|
2759
4878
|
region: "{region}"
|
2760
4879
|
},
|
2761
4880
|
body: { operations },
|
2762
|
-
...__privateGet$
|
4881
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
2763
4882
|
});
|
2764
4883
|
}
|
2765
4884
|
};
|
2766
|
-
_setCacheQuery = new WeakSet();
|
2767
4885
|
setCacheQuery_fn = async function(query, meta, records) {
|
2768
|
-
await __privateGet$
|
4886
|
+
await __privateGet$3(this, _cache)?.set(`query_${__privateGet$3(this, _table)}:${query.key()}`, { date: /* @__PURE__ */ new Date(), meta, records });
|
2769
4887
|
};
|
2770
|
-
_getCacheQuery = new WeakSet();
|
2771
4888
|
getCacheQuery_fn = async function(query) {
|
2772
|
-
const key = `query_${__privateGet$
|
2773
|
-
const result = await __privateGet$
|
2774
|
-
if (!result)
|
2775
|
-
|
2776
|
-
const defaultTTL = __privateGet$4(this, _cache)?.defaultQueryTTL ?? -1;
|
4889
|
+
const key = `query_${__privateGet$3(this, _table)}:${query.key()}`;
|
4890
|
+
const result = await __privateGet$3(this, _cache)?.get(key);
|
4891
|
+
if (!result) return null;
|
4892
|
+
const defaultTTL = __privateGet$3(this, _cache)?.defaultQueryTTL ?? -1;
|
2777
4893
|
const { cache: ttl = defaultTTL } = query.getQueryOptions();
|
2778
|
-
if (ttl < 0)
|
2779
|
-
return null;
|
4894
|
+
if (ttl < 0) return null;
|
2780
4895
|
const hasExpired = result.date.getTime() + ttl < Date.now();
|
2781
4896
|
return hasExpired ? null : result;
|
2782
4897
|
};
|
2783
|
-
|
2784
|
-
|
2785
|
-
if (__privateGet$4(this, _schemaTables$2))
|
2786
|
-
return __privateGet$4(this, _schemaTables$2);
|
4898
|
+
getSchemaTables_fn = async function() {
|
4899
|
+
if (__privateGet$3(this, _schemaTables)) return __privateGet$3(this, _schemaTables);
|
2787
4900
|
const { schema } = await getBranchDetails({
|
2788
4901
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
2789
|
-
...__privateGet$
|
4902
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
2790
4903
|
});
|
2791
|
-
__privateSet$
|
4904
|
+
__privateSet$2(this, _schemaTables, schema.tables);
|
2792
4905
|
return schema.tables;
|
2793
4906
|
};
|
2794
|
-
_transformObjectToApi = new WeakSet();
|
2795
4907
|
transformObjectToApi_fn = async function(object) {
|
2796
|
-
const schemaTables = await __privateMethod$2(this,
|
2797
|
-
const schema = schemaTables.find((table) => table.name === __privateGet$
|
2798
|
-
if (!schema)
|
2799
|
-
throw new Error(`Table ${__privateGet$4(this, _table)} not found in schema`);
|
4908
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
4909
|
+
const schema = schemaTables.find((table) => table.name === __privateGet$3(this, _table));
|
4910
|
+
if (!schema) throw new Error(`Table ${__privateGet$3(this, _table)} not found in schema`);
|
2800
4911
|
const result = {};
|
2801
4912
|
for (const [key, value] of Object.entries(object)) {
|
2802
|
-
if (key === "xata")
|
2803
|
-
continue;
|
4913
|
+
if (key === "xata") continue;
|
2804
4914
|
const type = schema.columns.find((column) => column.name === key)?.type;
|
2805
4915
|
switch (type) {
|
2806
4916
|
case "link": {
|
@@ -2831,11 +4941,9 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2831
4941
|
const { xata, ...rest } = object ?? {};
|
2832
4942
|
Object.assign(data, rest);
|
2833
4943
|
const { columns } = schemaTables.find(({ name }) => name === table) ?? {};
|
2834
|
-
if (!columns)
|
2835
|
-
console.error(`Table ${table} not found in schema`);
|
4944
|
+
if (!columns) console.error(`Table ${table} not found in schema`);
|
2836
4945
|
for (const column of columns ?? []) {
|
2837
|
-
if (!isValidColumn(selectedColumns, column))
|
2838
|
-
continue;
|
4946
|
+
if (!isValidColumn(selectedColumns, column)) continue;
|
2839
4947
|
const value = data[column.name];
|
2840
4948
|
switch (column.type) {
|
2841
4949
|
case "datetime": {
|
@@ -2928,15 +5036,12 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2928
5036
|
return record;
|
2929
5037
|
};
|
2930
5038
|
function extractId(value) {
|
2931
|
-
if (isString(value))
|
2932
|
-
|
2933
|
-
if (isObject(value) && isString(value.id))
|
2934
|
-
return value.id;
|
5039
|
+
if (isString(value)) return value;
|
5040
|
+
if (isObject(value) && isString(value.id)) return value.id;
|
2935
5041
|
return void 0;
|
2936
5042
|
}
|
2937
5043
|
function isValidColumn(columns, column) {
|
2938
|
-
if (columns.includes("*"))
|
2939
|
-
return true;
|
5044
|
+
if (columns.includes("*")) return true;
|
2940
5045
|
return columns.filter((item) => isString(item) && item.startsWith(column.name)).length > 0;
|
2941
5046
|
}
|
2942
5047
|
function parseIfVersion(...args) {
|
@@ -2948,51 +5053,40 @@ function parseIfVersion(...args) {
|
|
2948
5053
|
return void 0;
|
2949
5054
|
}
|
2950
5055
|
|
2951
|
-
var
|
2952
|
-
|
2953
|
-
throw TypeError("Cannot " + msg);
|
2954
|
-
};
|
2955
|
-
var __privateGet$3 = (obj, member, getter) => {
|
2956
|
-
__accessCheck$3(obj, member, "read from private field");
|
2957
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2958
|
-
};
|
2959
|
-
var __privateAdd$3 = (obj, member, value) => {
|
2960
|
-
if (member.has(obj))
|
2961
|
-
throw TypeError("Cannot add the same private member more than once");
|
2962
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2963
|
-
};
|
2964
|
-
var __privateSet$3 = (obj, member, value, setter) => {
|
2965
|
-
__accessCheck$3(obj, member, "write to private field");
|
2966
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
2967
|
-
return value;
|
5056
|
+
var __typeError$3 = (msg) => {
|
5057
|
+
throw TypeError(msg);
|
2968
5058
|
};
|
5059
|
+
var __accessCheck$3 = (obj, member, msg) => member.has(obj) || __typeError$3("Cannot " + msg);
|
5060
|
+
var __privateGet$2 = (obj, member, getter) => (__accessCheck$3(obj, member, "read from private field"), member.get(obj));
|
5061
|
+
var __privateAdd$3 = (obj, member, value) => member.has(obj) ? __typeError$3("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5062
|
+
var __privateSet$1 = (obj, member, value, setter) => (__accessCheck$3(obj, member, "write to private field"), member.set(obj, value), value);
|
2969
5063
|
var _map;
|
2970
5064
|
class SimpleCache {
|
2971
5065
|
constructor(options = {}) {
|
2972
|
-
__privateAdd$3(this, _map
|
2973
|
-
__privateSet$
|
5066
|
+
__privateAdd$3(this, _map);
|
5067
|
+
__privateSet$1(this, _map, /* @__PURE__ */ new Map());
|
2974
5068
|
this.capacity = options.max ?? 500;
|
2975
5069
|
this.defaultQueryTTL = options.defaultQueryTTL ?? 60 * 1e3;
|
2976
5070
|
}
|
2977
5071
|
async getAll() {
|
2978
|
-
return Object.fromEntries(__privateGet$
|
5072
|
+
return Object.fromEntries(__privateGet$2(this, _map));
|
2979
5073
|
}
|
2980
5074
|
async get(key) {
|
2981
|
-
return __privateGet$
|
5075
|
+
return __privateGet$2(this, _map).get(key) ?? null;
|
2982
5076
|
}
|
2983
5077
|
async set(key, value) {
|
2984
5078
|
await this.delete(key);
|
2985
|
-
__privateGet$
|
2986
|
-
if (__privateGet$
|
2987
|
-
const leastRecentlyUsed = __privateGet$
|
2988
|
-
await this.delete(leastRecentlyUsed);
|
5079
|
+
__privateGet$2(this, _map).set(key, value);
|
5080
|
+
if (__privateGet$2(this, _map).size > this.capacity) {
|
5081
|
+
const leastRecentlyUsed = __privateGet$2(this, _map).keys().next().value;
|
5082
|
+
if (leastRecentlyUsed) await this.delete(leastRecentlyUsed);
|
2989
5083
|
}
|
2990
5084
|
}
|
2991
5085
|
async delete(key) {
|
2992
|
-
__privateGet$
|
5086
|
+
__privateGet$2(this, _map).delete(key);
|
2993
5087
|
}
|
2994
5088
|
async clear() {
|
2995
|
-
return __privateGet$
|
5089
|
+
return __privateGet$2(this, _map).clear();
|
2996
5090
|
}
|
2997
5091
|
}
|
2998
5092
|
_map = new WeakMap();
|
@@ -3025,55 +5119,39 @@ const includesAll = (value) => ({ $includesAll: value });
|
|
3025
5119
|
const includesNone = (value) => ({ $includesNone: value });
|
3026
5120
|
const includesAny = (value) => ({ $includesAny: value });
|
3027
5121
|
|
3028
|
-
var
|
3029
|
-
|
3030
|
-
throw TypeError("Cannot " + msg);
|
3031
|
-
};
|
3032
|
-
var __privateGet$2 = (obj, member, getter) => {
|
3033
|
-
__accessCheck$2(obj, member, "read from private field");
|
3034
|
-
return getter ? getter.call(obj) : member.get(obj);
|
3035
|
-
};
|
3036
|
-
var __privateAdd$2 = (obj, member, value) => {
|
3037
|
-
if (member.has(obj))
|
3038
|
-
throw TypeError("Cannot add the same private member more than once");
|
3039
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3040
|
-
};
|
3041
|
-
var __privateSet$2 = (obj, member, value, setter) => {
|
3042
|
-
__accessCheck$2(obj, member, "write to private field");
|
3043
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
3044
|
-
return value;
|
5122
|
+
var __typeError$2 = (msg) => {
|
5123
|
+
throw TypeError(msg);
|
3045
5124
|
};
|
3046
|
-
var
|
5125
|
+
var __accessCheck$2 = (obj, member, msg) => member.has(obj) || __typeError$2("Cannot " + msg);
|
5126
|
+
var __privateGet$1 = (obj, member, getter) => (__accessCheck$2(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
5127
|
+
var __privateAdd$2 = (obj, member, value) => member.has(obj) ? __typeError$2("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5128
|
+
var _tables;
|
3047
5129
|
class SchemaPlugin extends XataPlugin {
|
3048
|
-
constructor(
|
5130
|
+
constructor() {
|
3049
5131
|
super();
|
3050
5132
|
__privateAdd$2(this, _tables, {});
|
3051
|
-
__privateAdd$2(this, _schemaTables$1, void 0);
|
3052
|
-
__privateSet$2(this, _schemaTables$1, schemaTables);
|
3053
5133
|
}
|
3054
5134
|
build(pluginOptions) {
|
3055
5135
|
const db = new Proxy(
|
3056
5136
|
{},
|
3057
5137
|
{
|
3058
5138
|
get: (_target, table) => {
|
3059
|
-
if (!isString(table))
|
3060
|
-
|
3061
|
-
|
3062
|
-
__privateGet$2(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: __privateGet$2(this, _schemaTables$1) });
|
5139
|
+
if (!isString(table)) throw new Error("Invalid table name");
|
5140
|
+
if (__privateGet$1(this, _tables)[table] === void 0) {
|
5141
|
+
__privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
3063
5142
|
}
|
3064
|
-
return __privateGet$
|
5143
|
+
return __privateGet$1(this, _tables)[table];
|
3065
5144
|
}
|
3066
5145
|
}
|
3067
5146
|
);
|
3068
|
-
const tableNames =
|
5147
|
+
const tableNames = pluginOptions.tables?.map(({ name }) => name) ?? [];
|
3069
5148
|
for (const table of tableNames) {
|
3070
|
-
db[table] = new RestRepository({ db, pluginOptions, table, schemaTables:
|
5149
|
+
db[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
3071
5150
|
}
|
3072
5151
|
return db;
|
3073
5152
|
}
|
3074
5153
|
}
|
3075
5154
|
_tables = new WeakMap();
|
3076
|
-
_schemaTables$1 = new WeakMap();
|
3077
5155
|
|
3078
5156
|
class FilesPlugin extends XataPlugin {
|
3079
5157
|
build(pluginOptions) {
|
@@ -3149,58 +5227,37 @@ function getContentType(file) {
|
|
3149
5227
|
return "application/octet-stream";
|
3150
5228
|
}
|
3151
5229
|
|
3152
|
-
var
|
3153
|
-
|
3154
|
-
throw TypeError("Cannot " + msg);
|
3155
|
-
};
|
3156
|
-
var __privateGet$1 = (obj, member, getter) => {
|
3157
|
-
__accessCheck$1(obj, member, "read from private field");
|
3158
|
-
return getter ? getter.call(obj) : member.get(obj);
|
3159
|
-
};
|
3160
|
-
var __privateAdd$1 = (obj, member, value) => {
|
3161
|
-
if (member.has(obj))
|
3162
|
-
throw TypeError("Cannot add the same private member more than once");
|
3163
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3164
|
-
};
|
3165
|
-
var __privateSet$1 = (obj, member, value, setter) => {
|
3166
|
-
__accessCheck$1(obj, member, "write to private field");
|
3167
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
3168
|
-
return value;
|
3169
|
-
};
|
3170
|
-
var __privateMethod$1 = (obj, member, method) => {
|
3171
|
-
__accessCheck$1(obj, member, "access private method");
|
3172
|
-
return method;
|
5230
|
+
var __typeError$1 = (msg) => {
|
5231
|
+
throw TypeError(msg);
|
3173
5232
|
};
|
3174
|
-
var
|
5233
|
+
var __accessCheck$1 = (obj, member, msg) => member.has(obj) || __typeError$1("Cannot " + msg);
|
5234
|
+
var __privateAdd$1 = (obj, member, value) => member.has(obj) ? __typeError$1("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5235
|
+
var __privateMethod$1 = (obj, member, method) => (__accessCheck$1(obj, member, "access private method"), method);
|
5236
|
+
var _SearchPlugin_instances, search_fn;
|
3175
5237
|
class SearchPlugin extends XataPlugin {
|
3176
|
-
constructor(db
|
5238
|
+
constructor(db) {
|
3177
5239
|
super();
|
3178
5240
|
this.db = db;
|
3179
|
-
__privateAdd$1(this,
|
3180
|
-
__privateAdd$1(this, _getSchemaTables);
|
3181
|
-
__privateAdd$1(this, _schemaTables, void 0);
|
3182
|
-
__privateSet$1(this, _schemaTables, schemaTables);
|
5241
|
+
__privateAdd$1(this, _SearchPlugin_instances);
|
3183
5242
|
}
|
3184
5243
|
build(pluginOptions) {
|
3185
5244
|
return {
|
3186
5245
|
all: async (query, options = {}) => {
|
3187
|
-
const { records, totalCount } = await __privateMethod$1(this,
|
3188
|
-
const schemaTables = await __privateMethod$1(this, _getSchemaTables, getSchemaTables_fn).call(this, pluginOptions);
|
5246
|
+
const { records, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3189
5247
|
return {
|
3190
5248
|
totalCount,
|
3191
5249
|
records: records.map((record) => {
|
3192
5250
|
const { table = "orphan" } = record.xata;
|
3193
|
-
return { table, record: initObject(this.db,
|
5251
|
+
return { table, record: initObject(this.db, pluginOptions.tables, table, record, ["*"]) };
|
3194
5252
|
})
|
3195
5253
|
};
|
3196
5254
|
},
|
3197
5255
|
byTable: async (query, options = {}) => {
|
3198
|
-
const { records: rawRecords, totalCount } = await __privateMethod$1(this,
|
3199
|
-
const schemaTables = await __privateMethod$1(this, _getSchemaTables, getSchemaTables_fn).call(this, pluginOptions);
|
5256
|
+
const { records: rawRecords, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3200
5257
|
const records = rawRecords.reduce((acc, record) => {
|
3201
5258
|
const { table = "orphan" } = record.xata;
|
3202
5259
|
const items = acc[table] ?? [];
|
3203
|
-
const item = initObject(this.db,
|
5260
|
+
const item = initObject(this.db, pluginOptions.tables, table, record, ["*"]);
|
3204
5261
|
return { ...acc, [table]: [...items, item] };
|
3205
5262
|
}, {});
|
3206
5263
|
return { totalCount, records };
|
@@ -3208,29 +5265,17 @@ class SearchPlugin extends XataPlugin {
|
|
3208
5265
|
};
|
3209
5266
|
}
|
3210
5267
|
}
|
3211
|
-
|
3212
|
-
_search = new WeakSet();
|
5268
|
+
_SearchPlugin_instances = new WeakSet();
|
3213
5269
|
search_fn = async function(query, options, pluginOptions) {
|
3214
5270
|
const { tables, fuzziness, highlight, prefix, page } = options ?? {};
|
3215
5271
|
const { records, totalCount } = await searchBranch({
|
3216
5272
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
3217
|
-
// @ts-
|
5273
|
+
// @ts-expect-error Filter properties do not match inferred type
|
3218
5274
|
body: { tables, query, fuzziness, prefix, highlight, page },
|
3219
5275
|
...pluginOptions
|
3220
5276
|
});
|
3221
5277
|
return { records, totalCount };
|
3222
5278
|
};
|
3223
|
-
_getSchemaTables = new WeakSet();
|
3224
|
-
getSchemaTables_fn = async function(pluginOptions) {
|
3225
|
-
if (__privateGet$1(this, _schemaTables))
|
3226
|
-
return __privateGet$1(this, _schemaTables);
|
3227
|
-
const { schema } = await getBranchDetails({
|
3228
|
-
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
3229
|
-
...pluginOptions
|
3230
|
-
});
|
3231
|
-
__privateSet$1(this, _schemaTables, schema.tables);
|
3232
|
-
return schema.tables;
|
3233
|
-
};
|
3234
5279
|
|
3235
5280
|
function escapeElement(elementRepresentation) {
|
3236
5281
|
const escaped = elementRepresentation.replace(/\\/g, "\\\\").replace(/"/g, '\\"');
|
@@ -3256,8 +5301,7 @@ function arrayString(val) {
|
|
3256
5301
|
return result;
|
3257
5302
|
}
|
3258
5303
|
function prepareValue(value) {
|
3259
|
-
if (!isDefined(value))
|
3260
|
-
return null;
|
5304
|
+
if (!isDefined(value)) return null;
|
3261
5305
|
if (value instanceof Date) {
|
3262
5306
|
return value.toISOString();
|
3263
5307
|
}
|
@@ -3284,25 +5328,76 @@ function prepareParams(param1, param2) {
|
|
3284
5328
|
return { statement, params: param2?.map((value) => prepareValue(value)) };
|
3285
5329
|
}
|
3286
5330
|
if (isObject(param1)) {
|
3287
|
-
const { statement, params, consistency } = param1;
|
3288
|
-
return { statement, params: params?.map((value) => prepareValue(value)), consistency };
|
5331
|
+
const { statement, params, consistency, responseType } = param1;
|
5332
|
+
return { statement, params: params?.map((value) => prepareValue(value)), consistency, responseType };
|
3289
5333
|
}
|
3290
5334
|
throw new Error("Invalid query");
|
3291
5335
|
}
|
3292
5336
|
|
3293
5337
|
class SQLPlugin extends XataPlugin {
|
3294
5338
|
build(pluginOptions) {
|
3295
|
-
|
3296
|
-
|
3297
|
-
|
5339
|
+
const sqlFunction = async (query, ...parameters) => {
|
5340
|
+
if (!isParamsObject(query) && (!isTemplateStringsArray(query) || !Array.isArray(parameters))) {
|
5341
|
+
throw new Error("Invalid usage of `xata.sql`. Please use it as a tagged template or with an object.");
|
5342
|
+
}
|
5343
|
+
const { statement, params, consistency, responseType } = prepareParams(query, parameters);
|
5344
|
+
const { warning, columns, ...response } = await sqlQuery({
|
5345
|
+
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
5346
|
+
body: { statement, params, consistency, responseType },
|
5347
|
+
...pluginOptions
|
5348
|
+
});
|
5349
|
+
const records = "records" in response ? response.records : void 0;
|
5350
|
+
const rows = "rows" in response ? response.rows : void 0;
|
5351
|
+
return { records, rows, warning, columns };
|
5352
|
+
};
|
5353
|
+
sqlFunction.connectionString = buildConnectionString(pluginOptions);
|
5354
|
+
sqlFunction.batch = async (query) => {
|
5355
|
+
const { results } = await sqlBatchQuery({
|
3298
5356
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
3299
|
-
body: {
|
5357
|
+
body: {
|
5358
|
+
statements: query.statements.map(({ statement, params }) => ({ statement, params })),
|
5359
|
+
consistency: query.consistency,
|
5360
|
+
responseType: query.responseType
|
5361
|
+
},
|
3300
5362
|
...pluginOptions
|
3301
5363
|
});
|
3302
|
-
return {
|
5364
|
+
return { results };
|
3303
5365
|
};
|
5366
|
+
return sqlFunction;
|
5367
|
+
}
|
5368
|
+
}
|
5369
|
+
function isTemplateStringsArray(strings) {
|
5370
|
+
return Array.isArray(strings) && "raw" in strings && Array.isArray(strings.raw);
|
5371
|
+
}
|
5372
|
+
function isParamsObject(params) {
|
5373
|
+
return isObject(params) && "statement" in params;
|
5374
|
+
}
|
5375
|
+
function buildDomain(host, region) {
|
5376
|
+
switch (host) {
|
5377
|
+
case "production":
|
5378
|
+
return `${region}.sql.xata.sh`;
|
5379
|
+
case "staging":
|
5380
|
+
return `${region}.sql.staging-xata.dev`;
|
5381
|
+
case "dev":
|
5382
|
+
return `${region}.sql.dev-xata.dev`;
|
5383
|
+
case "local":
|
5384
|
+
return "localhost:7654";
|
5385
|
+
default:
|
5386
|
+
throw new Error("Invalid host provider");
|
3304
5387
|
}
|
3305
5388
|
}
|
5389
|
+
function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
|
5390
|
+
const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
|
5391
|
+
const parts = parseWorkspacesUrlParts(url);
|
5392
|
+
if (!parts) throw new Error("Invalid workspaces URL");
|
5393
|
+
const { workspace: workspaceSlug, region, database, host } = parts;
|
5394
|
+
const domain = buildDomain(host, region);
|
5395
|
+
const workspace = workspaceSlug.split("-").pop();
|
5396
|
+
if (!workspace || !region || !database || !apiKey || !branch) {
|
5397
|
+
throw new Error("Unable to build xata connection string");
|
5398
|
+
}
|
5399
|
+
return `postgresql://${workspace}:${apiKey}@${domain}/${database}:${branch}?sslmode=require`;
|
5400
|
+
}
|
3306
5401
|
|
3307
5402
|
class TransactionPlugin extends XataPlugin {
|
3308
5403
|
build(pluginOptions) {
|
@@ -3319,55 +5414,42 @@ class TransactionPlugin extends XataPlugin {
|
|
3319
5414
|
}
|
3320
5415
|
}
|
3321
5416
|
|
3322
|
-
var
|
3323
|
-
|
3324
|
-
throw TypeError("Cannot " + msg);
|
3325
|
-
};
|
3326
|
-
var __privateGet = (obj, member, getter) => {
|
3327
|
-
__accessCheck(obj, member, "read from private field");
|
3328
|
-
return getter ? getter.call(obj) : member.get(obj);
|
3329
|
-
};
|
3330
|
-
var __privateAdd = (obj, member, value) => {
|
3331
|
-
if (member.has(obj))
|
3332
|
-
throw TypeError("Cannot add the same private member more than once");
|
3333
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3334
|
-
};
|
3335
|
-
var __privateSet = (obj, member, value, setter) => {
|
3336
|
-
__accessCheck(obj, member, "write to private field");
|
3337
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
3338
|
-
return value;
|
3339
|
-
};
|
3340
|
-
var __privateMethod = (obj, member, method) => {
|
3341
|
-
__accessCheck(obj, member, "access private method");
|
3342
|
-
return method;
|
5417
|
+
var __typeError = (msg) => {
|
5418
|
+
throw TypeError(msg);
|
3343
5419
|
};
|
5420
|
+
var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
|
5421
|
+
var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
5422
|
+
var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5423
|
+
var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), member.set(obj, value), value);
|
5424
|
+
var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
|
3344
5425
|
const buildClient = (plugins) => {
|
3345
|
-
var _options,
|
5426
|
+
var _options, _instances, parseOptions_fn, getFetchProps_fn, _a;
|
3346
5427
|
return _a = class {
|
3347
|
-
constructor(options = {},
|
3348
|
-
__privateAdd(this,
|
3349
|
-
__privateAdd(this,
|
3350
|
-
|
3351
|
-
const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
|
5428
|
+
constructor(options = {}, tables) {
|
5429
|
+
__privateAdd(this, _instances);
|
5430
|
+
__privateAdd(this, _options);
|
5431
|
+
const safeOptions = __privateMethod(this, _instances, parseOptions_fn).call(this, options);
|
3352
5432
|
__privateSet(this, _options, safeOptions);
|
3353
5433
|
const pluginOptions = {
|
3354
|
-
...__privateMethod(this,
|
5434
|
+
...__privateMethod(this, _instances, getFetchProps_fn).call(this, safeOptions),
|
3355
5435
|
cache: safeOptions.cache,
|
3356
|
-
host: safeOptions.host
|
5436
|
+
host: safeOptions.host,
|
5437
|
+
tables,
|
5438
|
+
branch: safeOptions.branch
|
3357
5439
|
};
|
3358
|
-
const db = new SchemaPlugin(
|
3359
|
-
const search = new SearchPlugin(db
|
5440
|
+
const db = new SchemaPlugin().build(pluginOptions);
|
5441
|
+
const search = new SearchPlugin(db).build(pluginOptions);
|
3360
5442
|
const transactions = new TransactionPlugin().build(pluginOptions);
|
3361
5443
|
const sql = new SQLPlugin().build(pluginOptions);
|
3362
5444
|
const files = new FilesPlugin().build(pluginOptions);
|
5445
|
+
this.schema = { tables };
|
3363
5446
|
this.db = db;
|
3364
5447
|
this.search = search;
|
3365
5448
|
this.transactions = transactions;
|
3366
5449
|
this.sql = sql;
|
3367
5450
|
this.files = files;
|
3368
5451
|
for (const [key, namespace] of Object.entries(plugins ?? {})) {
|
3369
|
-
if (namespace === void 0)
|
3370
|
-
continue;
|
5452
|
+
if (namespace === void 0) continue;
|
3371
5453
|
this[key] = namespace.build(pluginOptions);
|
3372
5454
|
}
|
3373
5455
|
}
|
@@ -3376,12 +5458,12 @@ const buildClient = (plugins) => {
|
|
3376
5458
|
const branch = __privateGet(this, _options).branch;
|
3377
5459
|
return { databaseURL, branch };
|
3378
5460
|
}
|
3379
|
-
}, _options = new WeakMap(),
|
5461
|
+
}, _options = new WeakMap(), _instances = new WeakSet(), parseOptions_fn = function(options) {
|
3380
5462
|
const enableBrowser = options?.enableBrowser ?? getEnableBrowserVariable() ?? false;
|
3381
5463
|
const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
|
3382
5464
|
if (isBrowser && !enableBrowser) {
|
3383
5465
|
throw new Error(
|
3384
|
-
"You are trying to use Xata from the browser, which is potentially a non-secure environment.
|
5466
|
+
"You are trying to use Xata from the browser, which is potentially a non-secure environment. How to fix: https://xata.io/docs/messages/api-key-browser-error"
|
3385
5467
|
);
|
3386
5468
|
}
|
3387
5469
|
const fetch = getFetchImplementation(options?.fetch);
|
@@ -3431,7 +5513,7 @@ const buildClient = (plugins) => {
|
|
3431
5513
|
clientName,
|
3432
5514
|
xataAgentExtra
|
3433
5515
|
};
|
3434
|
-
},
|
5516
|
+
}, getFetchProps_fn = function({
|
3435
5517
|
fetch,
|
3436
5518
|
apiKey,
|
3437
5519
|
databaseURL,
|
@@ -3472,26 +5554,19 @@ class Serializer {
|
|
3472
5554
|
}
|
3473
5555
|
toJSON(data) {
|
3474
5556
|
function visit(obj) {
|
3475
|
-
if (Array.isArray(obj))
|
3476
|
-
return obj.map(visit);
|
5557
|
+
if (Array.isArray(obj)) return obj.map(visit);
|
3477
5558
|
const type = typeof obj;
|
3478
|
-
if (type === "undefined")
|
3479
|
-
|
3480
|
-
if (
|
3481
|
-
return { [META]: "bigint", [VALUE]: obj.toString() };
|
3482
|
-
if (obj === null || type !== "object")
|
3483
|
-
return obj;
|
5559
|
+
if (type === "undefined") return { [META]: "undefined" };
|
5560
|
+
if (type === "bigint") return { [META]: "bigint", [VALUE]: obj.toString() };
|
5561
|
+
if (obj === null || type !== "object") return obj;
|
3484
5562
|
const constructor = obj.constructor;
|
3485
5563
|
const o = { [META]: constructor.name };
|
3486
5564
|
for (const [key, value] of Object.entries(obj)) {
|
3487
5565
|
o[key] = visit(value);
|
3488
5566
|
}
|
3489
|
-
if (constructor === Date)
|
3490
|
-
|
3491
|
-
if (constructor ===
|
3492
|
-
o[VALUE] = Object.fromEntries(obj);
|
3493
|
-
if (constructor === Set)
|
3494
|
-
o[VALUE] = [...obj];
|
5567
|
+
if (constructor === Date) o[VALUE] = obj.toISOString();
|
5568
|
+
if (constructor === Map) o[VALUE] = Object.fromEntries(obj);
|
5569
|
+
if (constructor === Set) o[VALUE] = [...obj];
|
3495
5570
|
return o;
|
3496
5571
|
}
|
3497
5572
|
return JSON.stringify(visit(data));
|
@@ -3504,16 +5579,11 @@ class Serializer {
|
|
3504
5579
|
if (constructor) {
|
3505
5580
|
return Object.assign(Object.create(constructor.prototype), rest);
|
3506
5581
|
}
|
3507
|
-
if (clazz === "Date")
|
3508
|
-
|
3509
|
-
if (clazz === "
|
3510
|
-
|
3511
|
-
if (clazz === "
|
3512
|
-
return new Map(Object.entries(val));
|
3513
|
-
if (clazz === "bigint")
|
3514
|
-
return BigInt(val);
|
3515
|
-
if (clazz === "undefined")
|
3516
|
-
return void 0;
|
5582
|
+
if (clazz === "Date") return new Date(val);
|
5583
|
+
if (clazz === "Set") return new Set(val);
|
5584
|
+
if (clazz === "Map") return new Map(Object.entries(val));
|
5585
|
+
if (clazz === "bigint") return BigInt(val);
|
5586
|
+
if (clazz === "undefined") return void 0;
|
3517
5587
|
return rest;
|
3518
5588
|
}
|
3519
5589
|
return value;
|
@@ -3535,5 +5605,5 @@ class XataError extends Error {
|
|
3535
5605
|
}
|
3536
5606
|
}
|
3537
5607
|
|
3538
|
-
export { BaseClient, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, SimpleCache, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge, getAPIKey, getAuthorizationCode, getBranch, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationPlan, getBranchSchemaHistory, getBranchStats, getCluster, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseURL, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationRequest, getMigrationRequestIsMerged, getPreviewBranch, getRecord, getSchema, getTableColumns, getTableSchema, getUser, getUserAPIKeys, getUserOAuthAccessTokens, getUserOAuthClients, getWorkspace, getWorkspaceMembersList, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, iContains, iPattern, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isValidExpandedColumn, isValidSelectableColumns, isXataRecord, le, lessEquals, lessThan, lessThanEquals, listClusters, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern,
|
5608
|
+
export { BaseClient, Buffer, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, SimpleCache, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptAllTables, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, completeMigration, contains, copyBranch, createBranch, createBranchAsync, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteCluster, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, dropClusterExtension, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge, getAPIKey, getAuthorizationCode, getBranch, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationJobStatus, getBranchMigrationPlan, getBranchMoveStatus, getBranchSchemaHistory, getBranchStats, getCluster, getClusterMetrics, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseSettings, getDatabaseURL, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationHistory, getMigrationJobStatus, getMigrationJobs, getMigrationRequest, getMigrationRequestIsMerged, getPreviewBranch, getRecord, getSchema, getSchemas, getTableColumns, getTableSchema, getTaskStatus, getTasks, getUser, getUserAPIKeys, getUserOAuthAccessTokens, getUserOAuthClients, getWorkspace, getWorkspaceMembersList, getWorkspaceSettings, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, iContains, iPattern, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, installClusterExtension, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isValidExpandedColumn, isValidSelectableColumns, isXataRecord, le, lessEquals, lessThan, lessThanEquals, listClusterBranches, listClusterExtensions, listClusters, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, moveBranch, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern, previewBranchSchemaEdit, pushBranchMigrations, putFile, putFileItem, queryMigrationRequests, queryTable, removeGitBranchesEntry, removeWorkspaceMember, renameDatabase, resendWorkspaceMemberInvite, resolveBranch, rollbackMigration, searchBranch, searchTable, serialize, setTableSchema, sqlBatchQuery, sqlQuery, startMigration, startsWith, summarizeTable, transformImage, updateBranchMetadata, updateBranchSchema, updateCluster, updateColumn, updateDatabaseGithubSettings, updateDatabaseMetadata, updateDatabaseSettings, updateMigrationRequest, updateOAuthAccessToken, updateRecordWithID, updateTable, updateUser, updateWorkspace, updateWorkspaceMemberInvite, updateWorkspaceMemberRole, updateWorkspaceSettings, upsertRecordWithID, vectorSearchTable };
|
3539
5609
|
//# sourceMappingURL=index.mjs.map
|