@xata.io/client 0.29.3 → 0.29.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-add-version.log +1 -1
- package/.turbo/turbo-build.log +4 -4
- package/CHANGELOG.md +14 -0
- package/dist/index.cjs +2437 -502
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +1014 -23
- package/dist/index.mjs +2431 -503
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
@@ -22,6 +22,1789 @@ const TraceAttributes = {
|
|
22
22
|
CLOUDFLARE_RAY_ID: "cf.ray"
|
23
23
|
};
|
24
24
|
|
25
|
+
const lookup = [];
|
26
|
+
const revLookup = [];
|
27
|
+
const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
28
|
+
for (let i = 0, len = code.length; i < len; ++i) {
|
29
|
+
lookup[i] = code[i];
|
30
|
+
revLookup[code.charCodeAt(i)] = i;
|
31
|
+
}
|
32
|
+
revLookup["-".charCodeAt(0)] = 62;
|
33
|
+
revLookup["_".charCodeAt(0)] = 63;
|
34
|
+
function getLens(b64) {
|
35
|
+
const len = b64.length;
|
36
|
+
if (len % 4 > 0) {
|
37
|
+
throw new Error("Invalid string. Length must be a multiple of 4");
|
38
|
+
}
|
39
|
+
let validLen = b64.indexOf("=");
|
40
|
+
if (validLen === -1) validLen = len;
|
41
|
+
const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
|
42
|
+
return [validLen, placeHoldersLen];
|
43
|
+
}
|
44
|
+
function _byteLength(_b64, validLen, placeHoldersLen) {
|
45
|
+
return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
|
46
|
+
}
|
47
|
+
function toByteArray(b64) {
|
48
|
+
let tmp;
|
49
|
+
const lens = getLens(b64);
|
50
|
+
const validLen = lens[0];
|
51
|
+
const placeHoldersLen = lens[1];
|
52
|
+
const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
|
53
|
+
let curByte = 0;
|
54
|
+
const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
|
55
|
+
let i;
|
56
|
+
for (i = 0; i < len; i += 4) {
|
57
|
+
tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
|
58
|
+
arr[curByte++] = tmp >> 16 & 255;
|
59
|
+
arr[curByte++] = tmp >> 8 & 255;
|
60
|
+
arr[curByte++] = tmp & 255;
|
61
|
+
}
|
62
|
+
if (placeHoldersLen === 2) {
|
63
|
+
tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
|
64
|
+
arr[curByte++] = tmp & 255;
|
65
|
+
}
|
66
|
+
if (placeHoldersLen === 1) {
|
67
|
+
tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
|
68
|
+
arr[curByte++] = tmp >> 8 & 255;
|
69
|
+
arr[curByte++] = tmp & 255;
|
70
|
+
}
|
71
|
+
return arr;
|
72
|
+
}
|
73
|
+
function tripletToBase64(num) {
|
74
|
+
return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
|
75
|
+
}
|
76
|
+
function encodeChunk(uint8, start, end) {
|
77
|
+
let tmp;
|
78
|
+
const output = [];
|
79
|
+
for (let i = start; i < end; i += 3) {
|
80
|
+
tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
|
81
|
+
output.push(tripletToBase64(tmp));
|
82
|
+
}
|
83
|
+
return output.join("");
|
84
|
+
}
|
85
|
+
function fromByteArray(uint8) {
|
86
|
+
let tmp;
|
87
|
+
const len = uint8.length;
|
88
|
+
const extraBytes = len % 3;
|
89
|
+
const parts = [];
|
90
|
+
const maxChunkLength = 16383;
|
91
|
+
for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
92
|
+
parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
|
93
|
+
}
|
94
|
+
if (extraBytes === 1) {
|
95
|
+
tmp = uint8[len - 1];
|
96
|
+
parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
|
97
|
+
} else if (extraBytes === 2) {
|
98
|
+
tmp = (uint8[len - 2] << 8) + uint8[len - 1];
|
99
|
+
parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
|
100
|
+
}
|
101
|
+
return parts.join("");
|
102
|
+
}
|
103
|
+
|
104
|
+
const K_MAX_LENGTH = 2147483647;
|
105
|
+
const MAX_ARGUMENTS_LENGTH = 4096;
|
106
|
+
class Buffer extends Uint8Array {
|
107
|
+
/**
|
108
|
+
* Constructs a new `Buffer` instance.
|
109
|
+
*
|
110
|
+
* @param value
|
111
|
+
* @param encodingOrOffset
|
112
|
+
* @param length
|
113
|
+
*/
|
114
|
+
constructor(value, encodingOrOffset, length) {
|
115
|
+
if (typeof value === "number") {
|
116
|
+
if (typeof encodingOrOffset === "string") {
|
117
|
+
throw new TypeError("The first argument must be of type string, received type number");
|
118
|
+
}
|
119
|
+
if (value < 0) {
|
120
|
+
throw new RangeError("The buffer size cannot be negative");
|
121
|
+
}
|
122
|
+
super(value < 0 ? 0 : Buffer._checked(value) | 0);
|
123
|
+
} else if (typeof value === "string") {
|
124
|
+
if (typeof encodingOrOffset !== "string") {
|
125
|
+
encodingOrOffset = "utf8";
|
126
|
+
}
|
127
|
+
if (!Buffer.isEncoding(encodingOrOffset)) {
|
128
|
+
throw new TypeError("Unknown encoding: " + encodingOrOffset);
|
129
|
+
}
|
130
|
+
const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
|
131
|
+
super(length2);
|
132
|
+
const written = this.write(value, 0, this.length, encodingOrOffset);
|
133
|
+
if (written !== length2) {
|
134
|
+
throw new TypeError(
|
135
|
+
"Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
|
136
|
+
);
|
137
|
+
}
|
138
|
+
} else if (ArrayBuffer.isView(value)) {
|
139
|
+
if (Buffer._isInstance(value, Uint8Array)) {
|
140
|
+
const copy = new Uint8Array(value);
|
141
|
+
const array = copy.buffer;
|
142
|
+
const byteOffset = copy.byteOffset;
|
143
|
+
const length2 = copy.byteLength;
|
144
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
145
|
+
throw new RangeError("offset is outside of buffer bounds");
|
146
|
+
}
|
147
|
+
if (array.byteLength < byteOffset + (length2 || 0)) {
|
148
|
+
throw new RangeError("length is outside of buffer bounds");
|
149
|
+
}
|
150
|
+
super(new Uint8Array(array, byteOffset, length2));
|
151
|
+
} else {
|
152
|
+
const array = value;
|
153
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
154
|
+
super(new Uint8Array(length2));
|
155
|
+
for (let i = 0; i < length2; i++) {
|
156
|
+
this[i] = array[i] & 255;
|
157
|
+
}
|
158
|
+
}
|
159
|
+
} else if (value == null) {
|
160
|
+
throw new TypeError(
|
161
|
+
"The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
|
162
|
+
);
|
163
|
+
} else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
|
164
|
+
const array = value;
|
165
|
+
const byteOffset = encodingOrOffset;
|
166
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
167
|
+
throw new RangeError("offset is outside of buffer bounds");
|
168
|
+
}
|
169
|
+
if (array.byteLength < byteOffset + (length || 0)) {
|
170
|
+
throw new RangeError("length is outside of buffer bounds");
|
171
|
+
}
|
172
|
+
super(new Uint8Array(array, byteOffset, length));
|
173
|
+
} else if (Array.isArray(value)) {
|
174
|
+
const array = value;
|
175
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
176
|
+
super(new Uint8Array(length2));
|
177
|
+
for (let i = 0; i < length2; i++) {
|
178
|
+
this[i] = array[i] & 255;
|
179
|
+
}
|
180
|
+
} else {
|
181
|
+
throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
|
182
|
+
}
|
183
|
+
}
|
184
|
+
/**
|
185
|
+
* Return JSON representation of the buffer.
|
186
|
+
*/
|
187
|
+
toJSON() {
|
188
|
+
return {
|
189
|
+
type: "Buffer",
|
190
|
+
data: Array.prototype.slice.call(this)
|
191
|
+
};
|
192
|
+
}
|
193
|
+
/**
|
194
|
+
* Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
|
195
|
+
* parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
|
196
|
+
* only part of `string` will be written. However, partially encoded characters will not be written.
|
197
|
+
*
|
198
|
+
* @param string String to write to `buf`.
|
199
|
+
* @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
|
200
|
+
* @param length Maximum number of bytes to write: Default: `buf.length - offset`.
|
201
|
+
* @param encoding The character encoding of `string`. Default: `utf8`.
|
202
|
+
*/
|
203
|
+
write(string, offset, length, encoding) {
|
204
|
+
if (typeof offset === "undefined") {
|
205
|
+
encoding = "utf8";
|
206
|
+
length = this.length;
|
207
|
+
offset = 0;
|
208
|
+
} else if (typeof length === "undefined" && typeof offset === "string") {
|
209
|
+
encoding = offset;
|
210
|
+
length = this.length;
|
211
|
+
offset = 0;
|
212
|
+
} else if (typeof offset === "number" && isFinite(offset)) {
|
213
|
+
offset = offset >>> 0;
|
214
|
+
if (typeof length === "number" && isFinite(length)) {
|
215
|
+
length = length >>> 0;
|
216
|
+
encoding ?? (encoding = "utf8");
|
217
|
+
} else if (typeof length === "string") {
|
218
|
+
encoding = length;
|
219
|
+
length = void 0;
|
220
|
+
}
|
221
|
+
} else {
|
222
|
+
throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
|
223
|
+
}
|
224
|
+
const remaining = this.length - offset;
|
225
|
+
if (typeof length === "undefined" || length > remaining) {
|
226
|
+
length = remaining;
|
227
|
+
}
|
228
|
+
if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
|
229
|
+
throw new RangeError("Attempt to write outside buffer bounds");
|
230
|
+
}
|
231
|
+
encoding || (encoding = "utf8");
|
232
|
+
switch (Buffer._getEncoding(encoding)) {
|
233
|
+
case "hex":
|
234
|
+
return Buffer._hexWrite(this, string, offset, length);
|
235
|
+
case "utf8":
|
236
|
+
return Buffer._utf8Write(this, string, offset, length);
|
237
|
+
case "ascii":
|
238
|
+
case "latin1":
|
239
|
+
case "binary":
|
240
|
+
return Buffer._asciiWrite(this, string, offset, length);
|
241
|
+
case "ucs2":
|
242
|
+
case "utf16le":
|
243
|
+
return Buffer._ucs2Write(this, string, offset, length);
|
244
|
+
case "base64":
|
245
|
+
return Buffer._base64Write(this, string, offset, length);
|
246
|
+
}
|
247
|
+
}
|
248
|
+
/**
|
249
|
+
* Decodes the buffer to a string according to the specified character encoding.
|
250
|
+
* Passing `start` and `end` will decode only a subset of the buffer.
|
251
|
+
*
|
252
|
+
* Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
|
253
|
+
* will be replaced with `U+FFFD`.
|
254
|
+
*
|
255
|
+
* @param encoding
|
256
|
+
* @param start
|
257
|
+
* @param end
|
258
|
+
*/
|
259
|
+
toString(encoding, start, end) {
|
260
|
+
const length = this.length;
|
261
|
+
if (length === 0) {
|
262
|
+
return "";
|
263
|
+
}
|
264
|
+
if (arguments.length === 0) {
|
265
|
+
return Buffer._utf8Slice(this, 0, length);
|
266
|
+
}
|
267
|
+
if (typeof start === "undefined" || start < 0) {
|
268
|
+
start = 0;
|
269
|
+
}
|
270
|
+
if (start > this.length) {
|
271
|
+
return "";
|
272
|
+
}
|
273
|
+
if (typeof end === "undefined" || end > this.length) {
|
274
|
+
end = this.length;
|
275
|
+
}
|
276
|
+
if (end <= 0) {
|
277
|
+
return "";
|
278
|
+
}
|
279
|
+
end >>>= 0;
|
280
|
+
start >>>= 0;
|
281
|
+
if (end <= start) {
|
282
|
+
return "";
|
283
|
+
}
|
284
|
+
if (!encoding) {
|
285
|
+
encoding = "utf8";
|
286
|
+
}
|
287
|
+
switch (Buffer._getEncoding(encoding)) {
|
288
|
+
case "hex":
|
289
|
+
return Buffer._hexSlice(this, start, end);
|
290
|
+
case "utf8":
|
291
|
+
return Buffer._utf8Slice(this, start, end);
|
292
|
+
case "ascii":
|
293
|
+
return Buffer._asciiSlice(this, start, end);
|
294
|
+
case "latin1":
|
295
|
+
case "binary":
|
296
|
+
return Buffer._latin1Slice(this, start, end);
|
297
|
+
case "ucs2":
|
298
|
+
case "utf16le":
|
299
|
+
return Buffer._utf16leSlice(this, start, end);
|
300
|
+
case "base64":
|
301
|
+
return Buffer._base64Slice(this, start, end);
|
302
|
+
}
|
303
|
+
}
|
304
|
+
/**
|
305
|
+
* Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
|
306
|
+
*
|
307
|
+
* @param otherBuffer
|
308
|
+
*/
|
309
|
+
equals(otherBuffer) {
|
310
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
311
|
+
throw new TypeError("Argument must be a Buffer");
|
312
|
+
}
|
313
|
+
if (this === otherBuffer) {
|
314
|
+
return true;
|
315
|
+
}
|
316
|
+
return Buffer.compare(this, otherBuffer) === 0;
|
317
|
+
}
|
318
|
+
/**
|
319
|
+
* Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
|
320
|
+
* or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
|
321
|
+
* buffer.
|
322
|
+
*
|
323
|
+
* - `0` is returned if `otherBuffer` is the same as this buffer.
|
324
|
+
* - `1` is returned if `otherBuffer` should come before this buffer when sorted.
|
325
|
+
* - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
|
326
|
+
*
|
327
|
+
* @param otherBuffer The buffer to compare to.
|
328
|
+
* @param targetStart The offset within `otherBuffer` at which to begin comparison.
|
329
|
+
* @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
|
330
|
+
* @param sourceStart The offset within this buffer at which to begin comparison.
|
331
|
+
* @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
|
332
|
+
*/
|
333
|
+
compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
|
334
|
+
if (Buffer._isInstance(otherBuffer, Uint8Array)) {
|
335
|
+
otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
|
336
|
+
}
|
337
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
338
|
+
throw new TypeError("Argument must be a Buffer or Uint8Array");
|
339
|
+
}
|
340
|
+
targetStart ?? (targetStart = 0);
|
341
|
+
targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
|
342
|
+
sourceStart ?? (sourceStart = 0);
|
343
|
+
sourceEnd ?? (sourceEnd = this.length);
|
344
|
+
if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
|
345
|
+
throw new RangeError("Out of range index");
|
346
|
+
}
|
347
|
+
if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
|
348
|
+
return 0;
|
349
|
+
}
|
350
|
+
if (sourceStart >= sourceEnd) {
|
351
|
+
return -1;
|
352
|
+
}
|
353
|
+
if (targetStart >= targetEnd) {
|
354
|
+
return 1;
|
355
|
+
}
|
356
|
+
targetStart >>>= 0;
|
357
|
+
targetEnd >>>= 0;
|
358
|
+
sourceStart >>>= 0;
|
359
|
+
sourceEnd >>>= 0;
|
360
|
+
if (this === otherBuffer) {
|
361
|
+
return 0;
|
362
|
+
}
|
363
|
+
let x = sourceEnd - sourceStart;
|
364
|
+
let y = targetEnd - targetStart;
|
365
|
+
const len = Math.min(x, y);
|
366
|
+
const thisCopy = this.slice(sourceStart, sourceEnd);
|
367
|
+
const targetCopy = otherBuffer.slice(targetStart, targetEnd);
|
368
|
+
for (let i = 0; i < len; ++i) {
|
369
|
+
if (thisCopy[i] !== targetCopy[i]) {
|
370
|
+
x = thisCopy[i];
|
371
|
+
y = targetCopy[i];
|
372
|
+
break;
|
373
|
+
}
|
374
|
+
}
|
375
|
+
if (x < y) return -1;
|
376
|
+
if (y < x) return 1;
|
377
|
+
return 0;
|
378
|
+
}
|
379
|
+
/**
|
380
|
+
* Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
|
381
|
+
* region overlaps with this buffer.
|
382
|
+
*
|
383
|
+
* @param targetBuffer The target buffer to copy into.
|
384
|
+
* @param targetStart The offset within `targetBuffer` at which to begin writing.
|
385
|
+
* @param sourceStart The offset within this buffer at which to begin copying.
|
386
|
+
* @param sourceEnd The offset within this buffer at which to end copying (exclusive).
|
387
|
+
*/
|
388
|
+
copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
|
389
|
+
if (!Buffer.isBuffer(targetBuffer)) throw new TypeError("argument should be a Buffer");
|
390
|
+
if (!sourceStart) sourceStart = 0;
|
391
|
+
if (!targetStart) targetStart = 0;
|
392
|
+
if (!sourceEnd && sourceEnd !== 0) sourceEnd = this.length;
|
393
|
+
if (targetStart >= targetBuffer.length) targetStart = targetBuffer.length;
|
394
|
+
if (!targetStart) targetStart = 0;
|
395
|
+
if (sourceEnd > 0 && sourceEnd < sourceStart) sourceEnd = sourceStart;
|
396
|
+
if (sourceEnd === sourceStart) return 0;
|
397
|
+
if (targetBuffer.length === 0 || this.length === 0) return 0;
|
398
|
+
if (targetStart < 0) {
|
399
|
+
throw new RangeError("targetStart out of bounds");
|
400
|
+
}
|
401
|
+
if (sourceStart < 0 || sourceStart >= this.length) throw new RangeError("Index out of range");
|
402
|
+
if (sourceEnd < 0) throw new RangeError("sourceEnd out of bounds");
|
403
|
+
if (sourceEnd > this.length) sourceEnd = this.length;
|
404
|
+
if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
|
405
|
+
sourceEnd = targetBuffer.length - targetStart + sourceStart;
|
406
|
+
}
|
407
|
+
const len = sourceEnd - sourceStart;
|
408
|
+
if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
|
409
|
+
this.copyWithin(targetStart, sourceStart, sourceEnd);
|
410
|
+
} else {
|
411
|
+
Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
|
412
|
+
}
|
413
|
+
return len;
|
414
|
+
}
|
415
|
+
/**
|
416
|
+
* Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
|
417
|
+
* and `end` indices. This is the same behavior as `buf.subarray()`.
|
418
|
+
*
|
419
|
+
* This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
|
420
|
+
* the slice, use `Uint8Array.prototype.slice()`.
|
421
|
+
*
|
422
|
+
* @param start
|
423
|
+
* @param end
|
424
|
+
*/
|
425
|
+
slice(start, end) {
|
426
|
+
if (!start) {
|
427
|
+
start = 0;
|
428
|
+
}
|
429
|
+
const len = this.length;
|
430
|
+
start = ~~start;
|
431
|
+
end = end === void 0 ? len : ~~end;
|
432
|
+
if (start < 0) {
|
433
|
+
start += len;
|
434
|
+
if (start < 0) {
|
435
|
+
start = 0;
|
436
|
+
}
|
437
|
+
} else if (start > len) {
|
438
|
+
start = len;
|
439
|
+
}
|
440
|
+
if (end < 0) {
|
441
|
+
end += len;
|
442
|
+
if (end < 0) {
|
443
|
+
end = 0;
|
444
|
+
}
|
445
|
+
} else if (end > len) {
|
446
|
+
end = len;
|
447
|
+
}
|
448
|
+
if (end < start) {
|
449
|
+
end = start;
|
450
|
+
}
|
451
|
+
const newBuf = this.subarray(start, end);
|
452
|
+
Object.setPrototypeOf(newBuf, Buffer.prototype);
|
453
|
+
return newBuf;
|
454
|
+
}
|
455
|
+
/**
|
456
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
457
|
+
* of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
|
458
|
+
*
|
459
|
+
* @param value Number to write.
|
460
|
+
* @param offset Number of bytes to skip before starting to write.
|
461
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
462
|
+
* @param noAssert
|
463
|
+
* @returns `offset` plus the number of bytes written.
|
464
|
+
*/
|
465
|
+
writeUIntLE(value, offset, byteLength, noAssert) {
|
466
|
+
value = +value;
|
467
|
+
offset = offset >>> 0;
|
468
|
+
byteLength = byteLength >>> 0;
|
469
|
+
if (!noAssert) {
|
470
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
471
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
472
|
+
}
|
473
|
+
let mul = 1;
|
474
|
+
let i = 0;
|
475
|
+
this[offset] = value & 255;
|
476
|
+
while (++i < byteLength && (mul *= 256)) {
|
477
|
+
this[offset + i] = value / mul & 255;
|
478
|
+
}
|
479
|
+
return offset + byteLength;
|
480
|
+
}
|
481
|
+
/**
|
482
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
|
483
|
+
* accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
|
484
|
+
*
|
485
|
+
* @param value Number to write.
|
486
|
+
* @param offset Number of bytes to skip before starting to write.
|
487
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
488
|
+
* @param noAssert
|
489
|
+
* @returns `offset` plus the number of bytes written.
|
490
|
+
*/
|
491
|
+
writeUIntBE(value, offset, byteLength, noAssert) {
|
492
|
+
value = +value;
|
493
|
+
offset = offset >>> 0;
|
494
|
+
byteLength = byteLength >>> 0;
|
495
|
+
if (!noAssert) {
|
496
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
497
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
498
|
+
}
|
499
|
+
let i = byteLength - 1;
|
500
|
+
let mul = 1;
|
501
|
+
this[offset + i] = value & 255;
|
502
|
+
while (--i >= 0 && (mul *= 256)) {
|
503
|
+
this[offset + i] = value / mul & 255;
|
504
|
+
}
|
505
|
+
return offset + byteLength;
|
506
|
+
}
|
507
|
+
/**
|
508
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
509
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
510
|
+
*
|
511
|
+
* @param value Number to write.
|
512
|
+
* @param offset Number of bytes to skip before starting to write.
|
513
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
514
|
+
* @param noAssert
|
515
|
+
* @returns `offset` plus the number of bytes written.
|
516
|
+
*/
|
517
|
+
writeIntLE(value, offset, byteLength, noAssert) {
|
518
|
+
value = +value;
|
519
|
+
offset = offset >>> 0;
|
520
|
+
if (!noAssert) {
|
521
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
522
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
523
|
+
}
|
524
|
+
let i = 0;
|
525
|
+
let mul = 1;
|
526
|
+
let sub = 0;
|
527
|
+
this[offset] = value & 255;
|
528
|
+
while (++i < byteLength && (mul *= 256)) {
|
529
|
+
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
530
|
+
sub = 1;
|
531
|
+
}
|
532
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
533
|
+
}
|
534
|
+
return offset + byteLength;
|
535
|
+
}
|
536
|
+
/**
|
537
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
|
538
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
539
|
+
*
|
540
|
+
* @param value Number to write.
|
541
|
+
* @param offset Number of bytes to skip before starting to write.
|
542
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
543
|
+
* @param noAssert
|
544
|
+
* @returns `offset` plus the number of bytes written.
|
545
|
+
*/
|
546
|
+
writeIntBE(value, offset, byteLength, noAssert) {
|
547
|
+
value = +value;
|
548
|
+
offset = offset >>> 0;
|
549
|
+
if (!noAssert) {
|
550
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
551
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
552
|
+
}
|
553
|
+
let i = byteLength - 1;
|
554
|
+
let mul = 1;
|
555
|
+
let sub = 0;
|
556
|
+
this[offset + i] = value & 255;
|
557
|
+
while (--i >= 0 && (mul *= 256)) {
|
558
|
+
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
559
|
+
sub = 1;
|
560
|
+
}
|
561
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
562
|
+
}
|
563
|
+
return offset + byteLength;
|
564
|
+
}
|
565
|
+
/**
|
566
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
567
|
+
* unsigned, little-endian integer supporting up to 48 bits of accuracy.
|
568
|
+
*
|
569
|
+
* @param offset Number of bytes to skip before starting to read.
|
570
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
571
|
+
* @param noAssert
|
572
|
+
*/
|
573
|
+
readUIntLE(offset, byteLength, noAssert) {
|
574
|
+
offset = offset >>> 0;
|
575
|
+
byteLength = byteLength >>> 0;
|
576
|
+
if (!noAssert) {
|
577
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
578
|
+
}
|
579
|
+
let val = this[offset];
|
580
|
+
let mul = 1;
|
581
|
+
let i = 0;
|
582
|
+
while (++i < byteLength && (mul *= 256)) {
|
583
|
+
val += this[offset + i] * mul;
|
584
|
+
}
|
585
|
+
return val;
|
586
|
+
}
|
587
|
+
/**
|
588
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
589
|
+
* unsigned, big-endian integer supporting up to 48 bits of accuracy.
|
590
|
+
*
|
591
|
+
* @param offset Number of bytes to skip before starting to read.
|
592
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
593
|
+
* @param noAssert
|
594
|
+
*/
|
595
|
+
readUIntBE(offset, byteLength, noAssert) {
|
596
|
+
offset = offset >>> 0;
|
597
|
+
byteLength = byteLength >>> 0;
|
598
|
+
if (!noAssert) {
|
599
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
600
|
+
}
|
601
|
+
let val = this[offset + --byteLength];
|
602
|
+
let mul = 1;
|
603
|
+
while (byteLength > 0 && (mul *= 256)) {
|
604
|
+
val += this[offset + --byteLength] * mul;
|
605
|
+
}
|
606
|
+
return val;
|
607
|
+
}
|
608
|
+
/**
|
609
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
610
|
+
* little-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
611
|
+
*
|
612
|
+
* @param offset Number of bytes to skip before starting to read.
|
613
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
614
|
+
* @param noAssert
|
615
|
+
*/
|
616
|
+
readIntLE(offset, byteLength, noAssert) {
|
617
|
+
offset = offset >>> 0;
|
618
|
+
byteLength = byteLength >>> 0;
|
619
|
+
if (!noAssert) {
|
620
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
621
|
+
}
|
622
|
+
let val = this[offset];
|
623
|
+
let mul = 1;
|
624
|
+
let i = 0;
|
625
|
+
while (++i < byteLength && (mul *= 256)) {
|
626
|
+
val += this[offset + i] * mul;
|
627
|
+
}
|
628
|
+
mul *= 128;
|
629
|
+
if (val >= mul) {
|
630
|
+
val -= Math.pow(2, 8 * byteLength);
|
631
|
+
}
|
632
|
+
return val;
|
633
|
+
}
|
634
|
+
/**
|
635
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
636
|
+
* big-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
637
|
+
*
|
638
|
+
* @param offset Number of bytes to skip before starting to read.
|
639
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
640
|
+
* @param noAssert
|
641
|
+
*/
|
642
|
+
readIntBE(offset, byteLength, noAssert) {
|
643
|
+
offset = offset >>> 0;
|
644
|
+
byteLength = byteLength >>> 0;
|
645
|
+
if (!noAssert) {
|
646
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
647
|
+
}
|
648
|
+
let i = byteLength;
|
649
|
+
let mul = 1;
|
650
|
+
let val = this[offset + --i];
|
651
|
+
while (i > 0 && (mul *= 256)) {
|
652
|
+
val += this[offset + --i] * mul;
|
653
|
+
}
|
654
|
+
mul *= 128;
|
655
|
+
if (val >= mul) {
|
656
|
+
val -= Math.pow(2, 8 * byteLength);
|
657
|
+
}
|
658
|
+
return val;
|
659
|
+
}
|
660
|
+
/**
|
661
|
+
* Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
|
662
|
+
*
|
663
|
+
* @param offset Number of bytes to skip before starting to read.
|
664
|
+
* @param noAssert
|
665
|
+
*/
|
666
|
+
readUInt8(offset, noAssert) {
|
667
|
+
offset = offset >>> 0;
|
668
|
+
if (!noAssert) {
|
669
|
+
Buffer._checkOffset(offset, 1, this.length);
|
670
|
+
}
|
671
|
+
return this[offset];
|
672
|
+
}
|
673
|
+
/**
|
674
|
+
* Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
|
675
|
+
*
|
676
|
+
* @param offset Number of bytes to skip before starting to read.
|
677
|
+
* @param noAssert
|
678
|
+
*/
|
679
|
+
readUInt16LE(offset, noAssert) {
|
680
|
+
offset = offset >>> 0;
|
681
|
+
if (!noAssert) {
|
682
|
+
Buffer._checkOffset(offset, 2, this.length);
|
683
|
+
}
|
684
|
+
return this[offset] | this[offset + 1] << 8;
|
685
|
+
}
|
686
|
+
/**
|
687
|
+
* Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
|
688
|
+
*
|
689
|
+
* @param offset Number of bytes to skip before starting to read.
|
690
|
+
* @param noAssert
|
691
|
+
*/
|
692
|
+
readUInt16BE(offset, noAssert) {
|
693
|
+
offset = offset >>> 0;
|
694
|
+
if (!noAssert) {
|
695
|
+
Buffer._checkOffset(offset, 2, this.length);
|
696
|
+
}
|
697
|
+
return this[offset] << 8 | this[offset + 1];
|
698
|
+
}
|
699
|
+
/**
|
700
|
+
* Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
|
701
|
+
*
|
702
|
+
* @param offset Number of bytes to skip before starting to read.
|
703
|
+
* @param noAssert
|
704
|
+
*/
|
705
|
+
readUInt32LE(offset, noAssert) {
|
706
|
+
offset = offset >>> 0;
|
707
|
+
if (!noAssert) {
|
708
|
+
Buffer._checkOffset(offset, 4, this.length);
|
709
|
+
}
|
710
|
+
return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
|
711
|
+
}
|
712
|
+
/**
|
713
|
+
* Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
|
714
|
+
*
|
715
|
+
* @param offset Number of bytes to skip before starting to read.
|
716
|
+
* @param noAssert
|
717
|
+
*/
|
718
|
+
readUInt32BE(offset, noAssert) {
|
719
|
+
offset = offset >>> 0;
|
720
|
+
if (!noAssert) {
|
721
|
+
Buffer._checkOffset(offset, 4, this.length);
|
722
|
+
}
|
723
|
+
return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
|
724
|
+
}
|
725
|
+
/**
|
726
|
+
* Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
|
727
|
+
* as two's complement signed values.
|
728
|
+
*
|
729
|
+
* @param offset Number of bytes to skip before starting to read.
|
730
|
+
* @param noAssert
|
731
|
+
*/
|
732
|
+
readInt8(offset, noAssert) {
|
733
|
+
offset = offset >>> 0;
|
734
|
+
if (!noAssert) {
|
735
|
+
Buffer._checkOffset(offset, 1, this.length);
|
736
|
+
}
|
737
|
+
if (!(this[offset] & 128)) {
|
738
|
+
return this[offset];
|
739
|
+
}
|
740
|
+
return (255 - this[offset] + 1) * -1;
|
741
|
+
}
|
742
|
+
/**
|
743
|
+
* Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
744
|
+
* are interpreted as two's complement signed values.
|
745
|
+
*
|
746
|
+
* @param offset Number of bytes to skip before starting to read.
|
747
|
+
* @param noAssert
|
748
|
+
*/
|
749
|
+
readInt16LE(offset, noAssert) {
|
750
|
+
offset = offset >>> 0;
|
751
|
+
if (!noAssert) {
|
752
|
+
Buffer._checkOffset(offset, 2, this.length);
|
753
|
+
}
|
754
|
+
const val = this[offset] | this[offset + 1] << 8;
|
755
|
+
return val & 32768 ? val | 4294901760 : val;
|
756
|
+
}
|
757
|
+
/**
|
758
|
+
* Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
759
|
+
* are interpreted as two's complement signed values.
|
760
|
+
*
|
761
|
+
* @param offset Number of bytes to skip before starting to read.
|
762
|
+
* @param noAssert
|
763
|
+
*/
|
764
|
+
readInt16BE(offset, noAssert) {
|
765
|
+
offset = offset >>> 0;
|
766
|
+
if (!noAssert) {
|
767
|
+
Buffer._checkOffset(offset, 2, this.length);
|
768
|
+
}
|
769
|
+
const val = this[offset + 1] | this[offset] << 8;
|
770
|
+
return val & 32768 ? val | 4294901760 : val;
|
771
|
+
}
|
772
|
+
/**
|
773
|
+
* Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
774
|
+
* are interpreted as two's complement signed values.
|
775
|
+
*
|
776
|
+
* @param offset Number of bytes to skip before starting to read.
|
777
|
+
* @param noAssert
|
778
|
+
*/
|
779
|
+
readInt32LE(offset, noAssert) {
|
780
|
+
offset = offset >>> 0;
|
781
|
+
if (!noAssert) {
|
782
|
+
Buffer._checkOffset(offset, 4, this.length);
|
783
|
+
}
|
784
|
+
return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
|
785
|
+
}
|
786
|
+
/**
|
787
|
+
* Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
788
|
+
* are interpreted as two's complement signed values.
|
789
|
+
*
|
790
|
+
* @param offset Number of bytes to skip before starting to read.
|
791
|
+
* @param noAssert
|
792
|
+
*/
|
793
|
+
readInt32BE(offset, noAssert) {
|
794
|
+
offset = offset >>> 0;
|
795
|
+
if (!noAssert) {
|
796
|
+
Buffer._checkOffset(offset, 4, this.length);
|
797
|
+
}
|
798
|
+
return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
|
799
|
+
}
|
800
|
+
/**
|
801
|
+
* Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
|
802
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 2.
|
803
|
+
*/
|
804
|
+
swap16() {
|
805
|
+
const len = this.length;
|
806
|
+
if (len % 2 !== 0) {
|
807
|
+
throw new RangeError("Buffer size must be a multiple of 16-bits");
|
808
|
+
}
|
809
|
+
for (let i = 0; i < len; i += 2) {
|
810
|
+
this._swap(this, i, i + 1);
|
811
|
+
}
|
812
|
+
return this;
|
813
|
+
}
|
814
|
+
/**
|
815
|
+
* Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
|
816
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 4.
|
817
|
+
*/
|
818
|
+
swap32() {
|
819
|
+
const len = this.length;
|
820
|
+
if (len % 4 !== 0) {
|
821
|
+
throw new RangeError("Buffer size must be a multiple of 32-bits");
|
822
|
+
}
|
823
|
+
for (let i = 0; i < len; i += 4) {
|
824
|
+
this._swap(this, i, i + 3);
|
825
|
+
this._swap(this, i + 1, i + 2);
|
826
|
+
}
|
827
|
+
return this;
|
828
|
+
}
|
829
|
+
/**
|
830
|
+
* Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
|
831
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 8.
|
832
|
+
*/
|
833
|
+
swap64() {
|
834
|
+
const len = this.length;
|
835
|
+
if (len % 8 !== 0) {
|
836
|
+
throw new RangeError("Buffer size must be a multiple of 64-bits");
|
837
|
+
}
|
838
|
+
for (let i = 0; i < len; i += 8) {
|
839
|
+
this._swap(this, i, i + 7);
|
840
|
+
this._swap(this, i + 1, i + 6);
|
841
|
+
this._swap(this, i + 2, i + 5);
|
842
|
+
this._swap(this, i + 3, i + 4);
|
843
|
+
}
|
844
|
+
return this;
|
845
|
+
}
|
846
|
+
/**
|
847
|
+
* Swaps two octets.
|
848
|
+
*
|
849
|
+
* @param b
|
850
|
+
* @param n
|
851
|
+
* @param m
|
852
|
+
*/
|
853
|
+
_swap(b, n, m) {
|
854
|
+
const i = b[n];
|
855
|
+
b[n] = b[m];
|
856
|
+
b[m] = i;
|
857
|
+
}
|
858
|
+
/**
|
859
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
|
860
|
+
* Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
|
861
|
+
*
|
862
|
+
* @param value Number to write.
|
863
|
+
* @param offset Number of bytes to skip before starting to write.
|
864
|
+
* @param noAssert
|
865
|
+
* @returns `offset` plus the number of bytes written.
|
866
|
+
*/
|
867
|
+
writeUInt8(value, offset, noAssert) {
|
868
|
+
value = +value;
|
869
|
+
offset = offset >>> 0;
|
870
|
+
if (!noAssert) {
|
871
|
+
Buffer._checkInt(this, value, offset, 1, 255, 0);
|
872
|
+
}
|
873
|
+
this[offset] = value & 255;
|
874
|
+
return offset + 1;
|
875
|
+
}
|
876
|
+
/**
|
877
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
|
878
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
879
|
+
*
|
880
|
+
* @param value Number to write.
|
881
|
+
* @param offset Number of bytes to skip before starting to write.
|
882
|
+
* @param noAssert
|
883
|
+
* @returns `offset` plus the number of bytes written.
|
884
|
+
*/
|
885
|
+
writeUInt16LE(value, offset, noAssert) {
|
886
|
+
value = +value;
|
887
|
+
offset = offset >>> 0;
|
888
|
+
if (!noAssert) {
|
889
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
890
|
+
}
|
891
|
+
this[offset] = value & 255;
|
892
|
+
this[offset + 1] = value >>> 8;
|
893
|
+
return offset + 2;
|
894
|
+
}
|
895
|
+
/**
|
896
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
|
897
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
898
|
+
*
|
899
|
+
* @param value Number to write.
|
900
|
+
* @param offset Number of bytes to skip before starting to write.
|
901
|
+
* @param noAssert
|
902
|
+
* @returns `offset` plus the number of bytes written.
|
903
|
+
*/
|
904
|
+
writeUInt16BE(value, offset, noAssert) {
|
905
|
+
value = +value;
|
906
|
+
offset = offset >>> 0;
|
907
|
+
if (!noAssert) {
|
908
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
909
|
+
}
|
910
|
+
this[offset] = value >>> 8;
|
911
|
+
this[offset + 1] = value & 255;
|
912
|
+
return offset + 2;
|
913
|
+
}
|
914
|
+
/**
|
915
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
|
916
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
917
|
+
*
|
918
|
+
* @param value Number to write.
|
919
|
+
* @param offset Number of bytes to skip before starting to write.
|
920
|
+
* @param noAssert
|
921
|
+
* @returns `offset` plus the number of bytes written.
|
922
|
+
*/
|
923
|
+
writeUInt32LE(value, offset, noAssert) {
|
924
|
+
value = +value;
|
925
|
+
offset = offset >>> 0;
|
926
|
+
if (!noAssert) {
|
927
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
928
|
+
}
|
929
|
+
this[offset + 3] = value >>> 24;
|
930
|
+
this[offset + 2] = value >>> 16;
|
931
|
+
this[offset + 1] = value >>> 8;
|
932
|
+
this[offset] = value & 255;
|
933
|
+
return offset + 4;
|
934
|
+
}
|
935
|
+
/**
|
936
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
|
937
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
938
|
+
*
|
939
|
+
* @param value Number to write.
|
940
|
+
* @param offset Number of bytes to skip before starting to write.
|
941
|
+
* @param noAssert
|
942
|
+
* @returns `offset` plus the number of bytes written.
|
943
|
+
*/
|
944
|
+
writeUInt32BE(value, offset, noAssert) {
|
945
|
+
value = +value;
|
946
|
+
offset = offset >>> 0;
|
947
|
+
if (!noAssert) {
|
948
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
949
|
+
}
|
950
|
+
this[offset] = value >>> 24;
|
951
|
+
this[offset + 1] = value >>> 16;
|
952
|
+
this[offset + 2] = value >>> 8;
|
953
|
+
this[offset + 3] = value & 255;
|
954
|
+
return offset + 4;
|
955
|
+
}
|
956
|
+
/**
|
957
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
|
958
|
+
* Behavior is undefined when `value` is anything other than a signed 8-bit integer.
|
959
|
+
*
|
960
|
+
* @param value Number to write.
|
961
|
+
* @param offset Number of bytes to skip before starting to write.
|
962
|
+
* @param noAssert
|
963
|
+
* @returns `offset` plus the number of bytes written.
|
964
|
+
*/
|
965
|
+
writeInt8(value, offset, noAssert) {
|
966
|
+
value = +value;
|
967
|
+
offset = offset >>> 0;
|
968
|
+
if (!noAssert) {
|
969
|
+
Buffer._checkInt(this, value, offset, 1, 127, -128);
|
970
|
+
}
|
971
|
+
if (value < 0) {
|
972
|
+
value = 255 + value + 1;
|
973
|
+
}
|
974
|
+
this[offset] = value & 255;
|
975
|
+
return offset + 1;
|
976
|
+
}
|
977
|
+
/**
|
978
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
|
979
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
980
|
+
*
|
981
|
+
* @param value Number to write.
|
982
|
+
* @param offset Number of bytes to skip before starting to write.
|
983
|
+
* @param noAssert
|
984
|
+
* @returns `offset` plus the number of bytes written.
|
985
|
+
*/
|
986
|
+
writeInt16LE(value, offset, noAssert) {
|
987
|
+
value = +value;
|
988
|
+
offset = offset >>> 0;
|
989
|
+
if (!noAssert) {
|
990
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
991
|
+
}
|
992
|
+
this[offset] = value & 255;
|
993
|
+
this[offset + 1] = value >>> 8;
|
994
|
+
return offset + 2;
|
995
|
+
}
|
996
|
+
/**
|
997
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
|
998
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
999
|
+
*
|
1000
|
+
* @param value Number to write.
|
1001
|
+
* @param offset Number of bytes to skip before starting to write.
|
1002
|
+
* @param noAssert
|
1003
|
+
* @returns `offset` plus the number of bytes written.
|
1004
|
+
*/
|
1005
|
+
writeInt16BE(value, offset, noAssert) {
|
1006
|
+
value = +value;
|
1007
|
+
offset = offset >>> 0;
|
1008
|
+
if (!noAssert) {
|
1009
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1010
|
+
}
|
1011
|
+
this[offset] = value >>> 8;
|
1012
|
+
this[offset + 1] = value & 255;
|
1013
|
+
return offset + 2;
|
1014
|
+
}
|
1015
|
+
/**
|
1016
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
|
1017
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1018
|
+
*
|
1019
|
+
* @param value Number to write.
|
1020
|
+
* @param offset Number of bytes to skip before starting to write.
|
1021
|
+
* @param noAssert
|
1022
|
+
* @returns `offset` plus the number of bytes written.
|
1023
|
+
*/
|
1024
|
+
writeInt32LE(value, offset, noAssert) {
|
1025
|
+
value = +value;
|
1026
|
+
offset = offset >>> 0;
|
1027
|
+
if (!noAssert) {
|
1028
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1029
|
+
}
|
1030
|
+
this[offset] = value & 255;
|
1031
|
+
this[offset + 1] = value >>> 8;
|
1032
|
+
this[offset + 2] = value >>> 16;
|
1033
|
+
this[offset + 3] = value >>> 24;
|
1034
|
+
return offset + 4;
|
1035
|
+
}
|
1036
|
+
/**
|
1037
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
|
1038
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1039
|
+
*
|
1040
|
+
* @param value Number to write.
|
1041
|
+
* @param offset Number of bytes to skip before starting to write.
|
1042
|
+
* @param noAssert
|
1043
|
+
* @returns `offset` plus the number of bytes written.
|
1044
|
+
*/
|
1045
|
+
writeInt32BE(value, offset, noAssert) {
|
1046
|
+
value = +value;
|
1047
|
+
offset = offset >>> 0;
|
1048
|
+
if (!noAssert) {
|
1049
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1050
|
+
}
|
1051
|
+
if (value < 0) {
|
1052
|
+
value = 4294967295 + value + 1;
|
1053
|
+
}
|
1054
|
+
this[offset] = value >>> 24;
|
1055
|
+
this[offset + 1] = value >>> 16;
|
1056
|
+
this[offset + 2] = value >>> 8;
|
1057
|
+
this[offset + 3] = value & 255;
|
1058
|
+
return offset + 4;
|
1059
|
+
}
|
1060
|
+
/**
|
1061
|
+
* Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
|
1062
|
+
* filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
|
1063
|
+
* integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
|
1064
|
+
*
|
1065
|
+
* If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
|
1066
|
+
* character that fit into `buf` are written.
|
1067
|
+
*
|
1068
|
+
* If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
|
1069
|
+
*
|
1070
|
+
* @param value
|
1071
|
+
* @param encoding
|
1072
|
+
*/
|
1073
|
+
fill(value, offset, end, encoding) {
|
1074
|
+
if (typeof value === "string") {
|
1075
|
+
if (typeof offset === "string") {
|
1076
|
+
encoding = offset;
|
1077
|
+
offset = 0;
|
1078
|
+
end = this.length;
|
1079
|
+
} else if (typeof end === "string") {
|
1080
|
+
encoding = end;
|
1081
|
+
end = this.length;
|
1082
|
+
}
|
1083
|
+
if (encoding !== void 0 && typeof encoding !== "string") {
|
1084
|
+
throw new TypeError("encoding must be a string");
|
1085
|
+
}
|
1086
|
+
if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
|
1087
|
+
throw new TypeError("Unknown encoding: " + encoding);
|
1088
|
+
}
|
1089
|
+
if (value.length === 1) {
|
1090
|
+
const code = value.charCodeAt(0);
|
1091
|
+
if (encoding === "utf8" && code < 128) {
|
1092
|
+
value = code;
|
1093
|
+
}
|
1094
|
+
}
|
1095
|
+
} else if (typeof value === "number") {
|
1096
|
+
value = value & 255;
|
1097
|
+
} else if (typeof value === "boolean") {
|
1098
|
+
value = Number(value);
|
1099
|
+
}
|
1100
|
+
offset ?? (offset = 0);
|
1101
|
+
end ?? (end = this.length);
|
1102
|
+
if (offset < 0 || this.length < offset || this.length < end) {
|
1103
|
+
throw new RangeError("Out of range index");
|
1104
|
+
}
|
1105
|
+
if (end <= offset) {
|
1106
|
+
return this;
|
1107
|
+
}
|
1108
|
+
offset = offset >>> 0;
|
1109
|
+
end = end === void 0 ? this.length : end >>> 0;
|
1110
|
+
value || (value = 0);
|
1111
|
+
let i;
|
1112
|
+
if (typeof value === "number") {
|
1113
|
+
for (i = offset; i < end; ++i) {
|
1114
|
+
this[i] = value;
|
1115
|
+
}
|
1116
|
+
} else {
|
1117
|
+
const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
|
1118
|
+
const len = bytes.length;
|
1119
|
+
if (len === 0) {
|
1120
|
+
throw new TypeError('The value "' + value + '" is invalid for argument "value"');
|
1121
|
+
}
|
1122
|
+
for (i = 0; i < end - offset; ++i) {
|
1123
|
+
this[i + offset] = bytes[i % len];
|
1124
|
+
}
|
1125
|
+
}
|
1126
|
+
return this;
|
1127
|
+
}
|
1128
|
+
/**
|
1129
|
+
* Returns the index of the specified value.
|
1130
|
+
*
|
1131
|
+
* If `value` is:
|
1132
|
+
* - a string, `value` is interpreted according to the character encoding in `encoding`.
|
1133
|
+
* - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
|
1134
|
+
* - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
|
1135
|
+
*
|
1136
|
+
* Any other types will throw a `TypeError`.
|
1137
|
+
*
|
1138
|
+
* @param value What to search for.
|
1139
|
+
* @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
|
1140
|
+
* @param encoding If `value` is a string, this is the encoding used to search.
|
1141
|
+
* @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
|
1142
|
+
*/
|
1143
|
+
indexOf(value, byteOffset, encoding) {
|
1144
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
|
1145
|
+
}
|
1146
|
+
/**
|
1147
|
+
* Gets the last index of the specified value.
|
1148
|
+
*
|
1149
|
+
* @see indexOf()
|
1150
|
+
* @param value
|
1151
|
+
* @param byteOffset
|
1152
|
+
* @param encoding
|
1153
|
+
*/
|
1154
|
+
lastIndexOf(value, byteOffset, encoding) {
|
1155
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
|
1156
|
+
}
|
1157
|
+
_bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
|
1158
|
+
if (buffer.length === 0) {
|
1159
|
+
return -1;
|
1160
|
+
}
|
1161
|
+
if (typeof byteOffset === "string") {
|
1162
|
+
encoding = byteOffset;
|
1163
|
+
byteOffset = 0;
|
1164
|
+
} else if (typeof byteOffset === "undefined") {
|
1165
|
+
byteOffset = 0;
|
1166
|
+
} else if (byteOffset > 2147483647) {
|
1167
|
+
byteOffset = 2147483647;
|
1168
|
+
} else if (byteOffset < -2147483648) {
|
1169
|
+
byteOffset = -2147483648;
|
1170
|
+
}
|
1171
|
+
byteOffset = +byteOffset;
|
1172
|
+
if (byteOffset !== byteOffset) {
|
1173
|
+
byteOffset = dir ? 0 : buffer.length - 1;
|
1174
|
+
}
|
1175
|
+
if (byteOffset < 0) {
|
1176
|
+
byteOffset = buffer.length + byteOffset;
|
1177
|
+
}
|
1178
|
+
if (byteOffset >= buffer.length) {
|
1179
|
+
if (dir) {
|
1180
|
+
return -1;
|
1181
|
+
} else {
|
1182
|
+
byteOffset = buffer.length - 1;
|
1183
|
+
}
|
1184
|
+
} else if (byteOffset < 0) {
|
1185
|
+
if (dir) {
|
1186
|
+
byteOffset = 0;
|
1187
|
+
} else {
|
1188
|
+
return -1;
|
1189
|
+
}
|
1190
|
+
}
|
1191
|
+
if (typeof val === "string") {
|
1192
|
+
val = Buffer.from(val, encoding);
|
1193
|
+
}
|
1194
|
+
if (Buffer.isBuffer(val)) {
|
1195
|
+
if (val.length === 0) {
|
1196
|
+
return -1;
|
1197
|
+
}
|
1198
|
+
return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
|
1199
|
+
} else if (typeof val === "number") {
|
1200
|
+
val = val & 255;
|
1201
|
+
if (typeof Uint8Array.prototype.indexOf === "function") {
|
1202
|
+
if (dir) {
|
1203
|
+
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
|
1204
|
+
} else {
|
1205
|
+
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
|
1206
|
+
}
|
1207
|
+
}
|
1208
|
+
return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
|
1209
|
+
}
|
1210
|
+
throw new TypeError("val must be string, number or Buffer");
|
1211
|
+
}
|
1212
|
+
/**
|
1213
|
+
* Equivalent to `buf.indexOf() !== -1`.
|
1214
|
+
*
|
1215
|
+
* @param value
|
1216
|
+
* @param byteOffset
|
1217
|
+
* @param encoding
|
1218
|
+
*/
|
1219
|
+
includes(value, byteOffset, encoding) {
|
1220
|
+
return this.indexOf(value, byteOffset, encoding) !== -1;
|
1221
|
+
}
|
1222
|
+
/**
|
1223
|
+
* Creates a new buffer from the given parameters.
|
1224
|
+
*
|
1225
|
+
* @param data
|
1226
|
+
* @param encoding
|
1227
|
+
*/
|
1228
|
+
static from(a, b, c) {
|
1229
|
+
return new Buffer(a, b, c);
|
1230
|
+
}
|
1231
|
+
/**
|
1232
|
+
* Returns true if `obj` is a Buffer.
|
1233
|
+
*
|
1234
|
+
* @param obj
|
1235
|
+
*/
|
1236
|
+
static isBuffer(obj) {
|
1237
|
+
return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
|
1238
|
+
}
|
1239
|
+
/**
|
1240
|
+
* Returns true if `encoding` is a supported encoding.
|
1241
|
+
*
|
1242
|
+
* @param encoding
|
1243
|
+
*/
|
1244
|
+
static isEncoding(encoding) {
|
1245
|
+
switch (encoding.toLowerCase()) {
|
1246
|
+
case "hex":
|
1247
|
+
case "utf8":
|
1248
|
+
case "ascii":
|
1249
|
+
case "binary":
|
1250
|
+
case "latin1":
|
1251
|
+
case "ucs2":
|
1252
|
+
case "utf16le":
|
1253
|
+
case "base64":
|
1254
|
+
return true;
|
1255
|
+
default:
|
1256
|
+
return false;
|
1257
|
+
}
|
1258
|
+
}
|
1259
|
+
/**
|
1260
|
+
* Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
|
1261
|
+
* returns the number of characters in the string.
|
1262
|
+
*
|
1263
|
+
* @param string The string to test.
|
1264
|
+
* @param encoding The encoding to use for calculation. Defaults is `utf8`.
|
1265
|
+
*/
|
1266
|
+
static byteLength(string, encoding) {
|
1267
|
+
if (Buffer.isBuffer(string)) {
|
1268
|
+
return string.length;
|
1269
|
+
}
|
1270
|
+
if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
|
1271
|
+
return string.byteLength;
|
1272
|
+
}
|
1273
|
+
if (typeof string !== "string") {
|
1274
|
+
throw new TypeError(
|
1275
|
+
'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
|
1276
|
+
);
|
1277
|
+
}
|
1278
|
+
const len = string.length;
|
1279
|
+
const mustMatch = arguments.length > 2 && arguments[2] === true;
|
1280
|
+
if (!mustMatch && len === 0) {
|
1281
|
+
return 0;
|
1282
|
+
}
|
1283
|
+
switch (encoding?.toLowerCase()) {
|
1284
|
+
case "ascii":
|
1285
|
+
case "latin1":
|
1286
|
+
case "binary":
|
1287
|
+
return len;
|
1288
|
+
case "utf8":
|
1289
|
+
return Buffer._utf8ToBytes(string).length;
|
1290
|
+
case "hex":
|
1291
|
+
return len >>> 1;
|
1292
|
+
case "ucs2":
|
1293
|
+
case "utf16le":
|
1294
|
+
return len * 2;
|
1295
|
+
case "base64":
|
1296
|
+
return Buffer._base64ToBytes(string).length;
|
1297
|
+
default:
|
1298
|
+
return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
|
1299
|
+
}
|
1300
|
+
}
|
1301
|
+
/**
|
1302
|
+
* Returns a Buffer which is the result of concatenating all the buffers in the list together.
|
1303
|
+
*
|
1304
|
+
* - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
|
1305
|
+
* - If the list has exactly one item, then the first item is returned.
|
1306
|
+
* - If the list has more than one item, then a new buffer is created.
|
1307
|
+
*
|
1308
|
+
* It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
|
1309
|
+
* a small computational expense.
|
1310
|
+
*
|
1311
|
+
* @param list An array of Buffer objects to concatenate.
|
1312
|
+
* @param totalLength Total length of the buffers when concatenated.
|
1313
|
+
*/
|
1314
|
+
static concat(list, totalLength) {
|
1315
|
+
if (!Array.isArray(list)) {
|
1316
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1317
|
+
}
|
1318
|
+
if (list.length === 0) {
|
1319
|
+
return Buffer.alloc(0);
|
1320
|
+
}
|
1321
|
+
let i;
|
1322
|
+
if (totalLength === void 0) {
|
1323
|
+
totalLength = 0;
|
1324
|
+
for (i = 0; i < list.length; ++i) {
|
1325
|
+
totalLength += list[i].length;
|
1326
|
+
}
|
1327
|
+
}
|
1328
|
+
const buffer = Buffer.allocUnsafe(totalLength);
|
1329
|
+
let pos = 0;
|
1330
|
+
for (i = 0; i < list.length; ++i) {
|
1331
|
+
let buf = list[i];
|
1332
|
+
if (Buffer._isInstance(buf, Uint8Array)) {
|
1333
|
+
if (pos + buf.length > buffer.length) {
|
1334
|
+
if (!Buffer.isBuffer(buf)) {
|
1335
|
+
buf = Buffer.from(buf);
|
1336
|
+
}
|
1337
|
+
buf.copy(buffer, pos);
|
1338
|
+
} else {
|
1339
|
+
Uint8Array.prototype.set.call(buffer, buf, pos);
|
1340
|
+
}
|
1341
|
+
} else if (!Buffer.isBuffer(buf)) {
|
1342
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1343
|
+
} else {
|
1344
|
+
buf.copy(buffer, pos);
|
1345
|
+
}
|
1346
|
+
pos += buf.length;
|
1347
|
+
}
|
1348
|
+
return buffer;
|
1349
|
+
}
|
1350
|
+
/**
|
1351
|
+
* The same as `buf1.compare(buf2)`.
|
1352
|
+
*/
|
1353
|
+
static compare(buf1, buf2) {
|
1354
|
+
if (Buffer._isInstance(buf1, Uint8Array)) {
|
1355
|
+
buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
|
1356
|
+
}
|
1357
|
+
if (Buffer._isInstance(buf2, Uint8Array)) {
|
1358
|
+
buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
|
1359
|
+
}
|
1360
|
+
if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
|
1361
|
+
throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
|
1362
|
+
}
|
1363
|
+
if (buf1 === buf2) {
|
1364
|
+
return 0;
|
1365
|
+
}
|
1366
|
+
let x = buf1.length;
|
1367
|
+
let y = buf2.length;
|
1368
|
+
for (let i = 0, len = Math.min(x, y); i < len; ++i) {
|
1369
|
+
if (buf1[i] !== buf2[i]) {
|
1370
|
+
x = buf1[i];
|
1371
|
+
y = buf2[i];
|
1372
|
+
break;
|
1373
|
+
}
|
1374
|
+
}
|
1375
|
+
if (x < y) {
|
1376
|
+
return -1;
|
1377
|
+
}
|
1378
|
+
if (y < x) {
|
1379
|
+
return 1;
|
1380
|
+
}
|
1381
|
+
return 0;
|
1382
|
+
}
|
1383
|
+
/**
|
1384
|
+
* Allocates a new buffer of `size` octets.
|
1385
|
+
*
|
1386
|
+
* @param size The number of octets to allocate.
|
1387
|
+
* @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
|
1388
|
+
* @param encoding The encoding used for the call to `buf.fill()` while initializing.
|
1389
|
+
*/
|
1390
|
+
static alloc(size, fill, encoding) {
|
1391
|
+
if (typeof size !== "number") {
|
1392
|
+
throw new TypeError('"size" argument must be of type number');
|
1393
|
+
} else if (size < 0) {
|
1394
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1395
|
+
}
|
1396
|
+
if (size <= 0) {
|
1397
|
+
return new Buffer(size);
|
1398
|
+
}
|
1399
|
+
if (fill !== void 0) {
|
1400
|
+
return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
|
1401
|
+
}
|
1402
|
+
return new Buffer(size);
|
1403
|
+
}
|
1404
|
+
/**
|
1405
|
+
* Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
|
1406
|
+
*
|
1407
|
+
* @param size
|
1408
|
+
*/
|
1409
|
+
static allocUnsafe(size) {
|
1410
|
+
if (typeof size !== "number") {
|
1411
|
+
throw new TypeError('"size" argument must be of type number');
|
1412
|
+
} else if (size < 0) {
|
1413
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1414
|
+
}
|
1415
|
+
return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
|
1416
|
+
}
|
1417
|
+
/**
|
1418
|
+
* Returns true if the given `obj` is an instance of `type`.
|
1419
|
+
*
|
1420
|
+
* @param obj
|
1421
|
+
* @param type
|
1422
|
+
*/
|
1423
|
+
static _isInstance(obj, type) {
|
1424
|
+
return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
|
1425
|
+
}
|
1426
|
+
static _checked(length) {
|
1427
|
+
if (length >= K_MAX_LENGTH) {
|
1428
|
+
throw new RangeError(
|
1429
|
+
"Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
|
1430
|
+
);
|
1431
|
+
}
|
1432
|
+
return length | 0;
|
1433
|
+
}
|
1434
|
+
static _blitBuffer(src, dst, offset, length) {
|
1435
|
+
let i;
|
1436
|
+
for (i = 0; i < length; ++i) {
|
1437
|
+
if (i + offset >= dst.length || i >= src.length) {
|
1438
|
+
break;
|
1439
|
+
}
|
1440
|
+
dst[i + offset] = src[i];
|
1441
|
+
}
|
1442
|
+
return i;
|
1443
|
+
}
|
1444
|
+
static _utf8Write(buf, string, offset, length) {
|
1445
|
+
return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
|
1446
|
+
}
|
1447
|
+
static _asciiWrite(buf, string, offset, length) {
|
1448
|
+
return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
|
1449
|
+
}
|
1450
|
+
static _base64Write(buf, string, offset, length) {
|
1451
|
+
return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
|
1452
|
+
}
|
1453
|
+
static _ucs2Write(buf, string, offset, length) {
|
1454
|
+
return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
|
1455
|
+
}
|
1456
|
+
static _hexWrite(buf, string, offset, length) {
|
1457
|
+
offset = Number(offset) || 0;
|
1458
|
+
const remaining = buf.length - offset;
|
1459
|
+
if (!length) {
|
1460
|
+
length = remaining;
|
1461
|
+
} else {
|
1462
|
+
length = Number(length);
|
1463
|
+
if (length > remaining) {
|
1464
|
+
length = remaining;
|
1465
|
+
}
|
1466
|
+
}
|
1467
|
+
const strLen = string.length;
|
1468
|
+
if (length > strLen / 2) {
|
1469
|
+
length = strLen / 2;
|
1470
|
+
}
|
1471
|
+
let i;
|
1472
|
+
for (i = 0; i < length; ++i) {
|
1473
|
+
const parsed = parseInt(string.substr(i * 2, 2), 16);
|
1474
|
+
if (parsed !== parsed) {
|
1475
|
+
return i;
|
1476
|
+
}
|
1477
|
+
buf[offset + i] = parsed;
|
1478
|
+
}
|
1479
|
+
return i;
|
1480
|
+
}
|
1481
|
+
static _utf8ToBytes(string, units) {
|
1482
|
+
units = units || Infinity;
|
1483
|
+
const length = string.length;
|
1484
|
+
const bytes = [];
|
1485
|
+
let codePoint;
|
1486
|
+
let leadSurrogate = null;
|
1487
|
+
for (let i = 0; i < length; ++i) {
|
1488
|
+
codePoint = string.charCodeAt(i);
|
1489
|
+
if (codePoint > 55295 && codePoint < 57344) {
|
1490
|
+
if (!leadSurrogate) {
|
1491
|
+
if (codePoint > 56319) {
|
1492
|
+
if ((units -= 3) > -1) {
|
1493
|
+
bytes.push(239, 191, 189);
|
1494
|
+
}
|
1495
|
+
continue;
|
1496
|
+
} else if (i + 1 === length) {
|
1497
|
+
if ((units -= 3) > -1) {
|
1498
|
+
bytes.push(239, 191, 189);
|
1499
|
+
}
|
1500
|
+
continue;
|
1501
|
+
}
|
1502
|
+
leadSurrogate = codePoint;
|
1503
|
+
continue;
|
1504
|
+
}
|
1505
|
+
if (codePoint < 56320) {
|
1506
|
+
if ((units -= 3) > -1) {
|
1507
|
+
bytes.push(239, 191, 189);
|
1508
|
+
}
|
1509
|
+
leadSurrogate = codePoint;
|
1510
|
+
continue;
|
1511
|
+
}
|
1512
|
+
codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
|
1513
|
+
} else if (leadSurrogate) {
|
1514
|
+
if ((units -= 3) > -1) {
|
1515
|
+
bytes.push(239, 191, 189);
|
1516
|
+
}
|
1517
|
+
}
|
1518
|
+
leadSurrogate = null;
|
1519
|
+
if (codePoint < 128) {
|
1520
|
+
if ((units -= 1) < 0) {
|
1521
|
+
break;
|
1522
|
+
}
|
1523
|
+
bytes.push(codePoint);
|
1524
|
+
} else if (codePoint < 2048) {
|
1525
|
+
if ((units -= 2) < 0) {
|
1526
|
+
break;
|
1527
|
+
}
|
1528
|
+
bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
|
1529
|
+
} else if (codePoint < 65536) {
|
1530
|
+
if ((units -= 3) < 0) {
|
1531
|
+
break;
|
1532
|
+
}
|
1533
|
+
bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
|
1534
|
+
} else if (codePoint < 1114112) {
|
1535
|
+
if ((units -= 4) < 0) {
|
1536
|
+
break;
|
1537
|
+
}
|
1538
|
+
bytes.push(
|
1539
|
+
codePoint >> 18 | 240,
|
1540
|
+
codePoint >> 12 & 63 | 128,
|
1541
|
+
codePoint >> 6 & 63 | 128,
|
1542
|
+
codePoint & 63 | 128
|
1543
|
+
);
|
1544
|
+
} else {
|
1545
|
+
throw new Error("Invalid code point");
|
1546
|
+
}
|
1547
|
+
}
|
1548
|
+
return bytes;
|
1549
|
+
}
|
1550
|
+
static _base64ToBytes(str) {
|
1551
|
+
return toByteArray(base64clean(str));
|
1552
|
+
}
|
1553
|
+
static _asciiToBytes(str) {
|
1554
|
+
const byteArray = [];
|
1555
|
+
for (let i = 0; i < str.length; ++i) {
|
1556
|
+
byteArray.push(str.charCodeAt(i) & 255);
|
1557
|
+
}
|
1558
|
+
return byteArray;
|
1559
|
+
}
|
1560
|
+
static _utf16leToBytes(str, units) {
|
1561
|
+
let c, hi, lo;
|
1562
|
+
const byteArray = [];
|
1563
|
+
for (let i = 0; i < str.length; ++i) {
|
1564
|
+
if ((units -= 2) < 0) break;
|
1565
|
+
c = str.charCodeAt(i);
|
1566
|
+
hi = c >> 8;
|
1567
|
+
lo = c % 256;
|
1568
|
+
byteArray.push(lo);
|
1569
|
+
byteArray.push(hi);
|
1570
|
+
}
|
1571
|
+
return byteArray;
|
1572
|
+
}
|
1573
|
+
static _hexSlice(buf, start, end) {
|
1574
|
+
const len = buf.length;
|
1575
|
+
if (!start || start < 0) {
|
1576
|
+
start = 0;
|
1577
|
+
}
|
1578
|
+
if (!end || end < 0 || end > len) {
|
1579
|
+
end = len;
|
1580
|
+
}
|
1581
|
+
let out = "";
|
1582
|
+
for (let i = start; i < end; ++i) {
|
1583
|
+
out += hexSliceLookupTable[buf[i]];
|
1584
|
+
}
|
1585
|
+
return out;
|
1586
|
+
}
|
1587
|
+
static _base64Slice(buf, start, end) {
|
1588
|
+
if (start === 0 && end === buf.length) {
|
1589
|
+
return fromByteArray(buf);
|
1590
|
+
} else {
|
1591
|
+
return fromByteArray(buf.slice(start, end));
|
1592
|
+
}
|
1593
|
+
}
|
1594
|
+
static _utf8Slice(buf, start, end) {
|
1595
|
+
end = Math.min(buf.length, end);
|
1596
|
+
const res = [];
|
1597
|
+
let i = start;
|
1598
|
+
while (i < end) {
|
1599
|
+
const firstByte = buf[i];
|
1600
|
+
let codePoint = null;
|
1601
|
+
let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
|
1602
|
+
if (i + bytesPerSequence <= end) {
|
1603
|
+
let secondByte, thirdByte, fourthByte, tempCodePoint;
|
1604
|
+
switch (bytesPerSequence) {
|
1605
|
+
case 1:
|
1606
|
+
if (firstByte < 128) {
|
1607
|
+
codePoint = firstByte;
|
1608
|
+
}
|
1609
|
+
break;
|
1610
|
+
case 2:
|
1611
|
+
secondByte = buf[i + 1];
|
1612
|
+
if ((secondByte & 192) === 128) {
|
1613
|
+
tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
|
1614
|
+
if (tempCodePoint > 127) {
|
1615
|
+
codePoint = tempCodePoint;
|
1616
|
+
}
|
1617
|
+
}
|
1618
|
+
break;
|
1619
|
+
case 3:
|
1620
|
+
secondByte = buf[i + 1];
|
1621
|
+
thirdByte = buf[i + 2];
|
1622
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
|
1623
|
+
tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
|
1624
|
+
if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
|
1625
|
+
codePoint = tempCodePoint;
|
1626
|
+
}
|
1627
|
+
}
|
1628
|
+
break;
|
1629
|
+
case 4:
|
1630
|
+
secondByte = buf[i + 1];
|
1631
|
+
thirdByte = buf[i + 2];
|
1632
|
+
fourthByte = buf[i + 3];
|
1633
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
|
1634
|
+
tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
|
1635
|
+
if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
|
1636
|
+
codePoint = tempCodePoint;
|
1637
|
+
}
|
1638
|
+
}
|
1639
|
+
}
|
1640
|
+
}
|
1641
|
+
if (codePoint === null) {
|
1642
|
+
codePoint = 65533;
|
1643
|
+
bytesPerSequence = 1;
|
1644
|
+
} else if (codePoint > 65535) {
|
1645
|
+
codePoint -= 65536;
|
1646
|
+
res.push(codePoint >>> 10 & 1023 | 55296);
|
1647
|
+
codePoint = 56320 | codePoint & 1023;
|
1648
|
+
}
|
1649
|
+
res.push(codePoint);
|
1650
|
+
i += bytesPerSequence;
|
1651
|
+
}
|
1652
|
+
return Buffer._decodeCodePointsArray(res);
|
1653
|
+
}
|
1654
|
+
static _decodeCodePointsArray(codePoints) {
|
1655
|
+
const len = codePoints.length;
|
1656
|
+
if (len <= MAX_ARGUMENTS_LENGTH) {
|
1657
|
+
return String.fromCharCode.apply(String, codePoints);
|
1658
|
+
}
|
1659
|
+
let res = "";
|
1660
|
+
let i = 0;
|
1661
|
+
while (i < len) {
|
1662
|
+
res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
|
1663
|
+
}
|
1664
|
+
return res;
|
1665
|
+
}
|
1666
|
+
static _asciiSlice(buf, start, end) {
|
1667
|
+
let ret = "";
|
1668
|
+
end = Math.min(buf.length, end);
|
1669
|
+
for (let i = start; i < end; ++i) {
|
1670
|
+
ret += String.fromCharCode(buf[i] & 127);
|
1671
|
+
}
|
1672
|
+
return ret;
|
1673
|
+
}
|
1674
|
+
static _latin1Slice(buf, start, end) {
|
1675
|
+
let ret = "";
|
1676
|
+
end = Math.min(buf.length, end);
|
1677
|
+
for (let i = start; i < end; ++i) {
|
1678
|
+
ret += String.fromCharCode(buf[i]);
|
1679
|
+
}
|
1680
|
+
return ret;
|
1681
|
+
}
|
1682
|
+
static _utf16leSlice(buf, start, end) {
|
1683
|
+
const bytes = buf.slice(start, end);
|
1684
|
+
let res = "";
|
1685
|
+
for (let i = 0; i < bytes.length - 1; i += 2) {
|
1686
|
+
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
|
1687
|
+
}
|
1688
|
+
return res;
|
1689
|
+
}
|
1690
|
+
static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
|
1691
|
+
let indexSize = 1;
|
1692
|
+
let arrLength = arr.length;
|
1693
|
+
let valLength = val.length;
|
1694
|
+
if (encoding !== void 0) {
|
1695
|
+
encoding = Buffer._getEncoding(encoding);
|
1696
|
+
if (encoding === "ucs2" || encoding === "utf16le") {
|
1697
|
+
if (arr.length < 2 || val.length < 2) {
|
1698
|
+
return -1;
|
1699
|
+
}
|
1700
|
+
indexSize = 2;
|
1701
|
+
arrLength /= 2;
|
1702
|
+
valLength /= 2;
|
1703
|
+
byteOffset /= 2;
|
1704
|
+
}
|
1705
|
+
}
|
1706
|
+
function read(buf, i2) {
|
1707
|
+
if (indexSize === 1) {
|
1708
|
+
return buf[i2];
|
1709
|
+
} else {
|
1710
|
+
return buf.readUInt16BE(i2 * indexSize);
|
1711
|
+
}
|
1712
|
+
}
|
1713
|
+
let i;
|
1714
|
+
if (dir) {
|
1715
|
+
let foundIndex = -1;
|
1716
|
+
for (i = byteOffset; i < arrLength; i++) {
|
1717
|
+
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
1718
|
+
if (foundIndex === -1) foundIndex = i;
|
1719
|
+
if (i - foundIndex + 1 === valLength) return foundIndex * indexSize;
|
1720
|
+
} else {
|
1721
|
+
if (foundIndex !== -1) i -= i - foundIndex;
|
1722
|
+
foundIndex = -1;
|
1723
|
+
}
|
1724
|
+
}
|
1725
|
+
} else {
|
1726
|
+
if (byteOffset + valLength > arrLength) {
|
1727
|
+
byteOffset = arrLength - valLength;
|
1728
|
+
}
|
1729
|
+
for (i = byteOffset; i >= 0; i--) {
|
1730
|
+
let found = true;
|
1731
|
+
for (let j = 0; j < valLength; j++) {
|
1732
|
+
if (read(arr, i + j) !== read(val, j)) {
|
1733
|
+
found = false;
|
1734
|
+
break;
|
1735
|
+
}
|
1736
|
+
}
|
1737
|
+
if (found) {
|
1738
|
+
return i;
|
1739
|
+
}
|
1740
|
+
}
|
1741
|
+
}
|
1742
|
+
return -1;
|
1743
|
+
}
|
1744
|
+
static _checkOffset(offset, ext, length) {
|
1745
|
+
if (offset % 1 !== 0 || offset < 0) throw new RangeError("offset is not uint");
|
1746
|
+
if (offset + ext > length) throw new RangeError("Trying to access beyond buffer length");
|
1747
|
+
}
|
1748
|
+
static _checkInt(buf, value, offset, ext, max, min) {
|
1749
|
+
if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance');
|
1750
|
+
if (value > max || value < min) throw new RangeError('"value" argument is out of bounds');
|
1751
|
+
if (offset + ext > buf.length) throw new RangeError("Index out of range");
|
1752
|
+
}
|
1753
|
+
static _getEncoding(encoding) {
|
1754
|
+
let toLowerCase = false;
|
1755
|
+
let originalEncoding = "";
|
1756
|
+
for (; ; ) {
|
1757
|
+
switch (encoding) {
|
1758
|
+
case "hex":
|
1759
|
+
return "hex";
|
1760
|
+
case "utf8":
|
1761
|
+
return "utf8";
|
1762
|
+
case "ascii":
|
1763
|
+
return "ascii";
|
1764
|
+
case "binary":
|
1765
|
+
return "binary";
|
1766
|
+
case "latin1":
|
1767
|
+
return "latin1";
|
1768
|
+
case "ucs2":
|
1769
|
+
return "ucs2";
|
1770
|
+
case "utf16le":
|
1771
|
+
return "utf16le";
|
1772
|
+
case "base64":
|
1773
|
+
return "base64";
|
1774
|
+
default: {
|
1775
|
+
if (toLowerCase) {
|
1776
|
+
throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
|
1777
|
+
}
|
1778
|
+
toLowerCase = true;
|
1779
|
+
originalEncoding = encoding;
|
1780
|
+
encoding = encoding.toLowerCase();
|
1781
|
+
}
|
1782
|
+
}
|
1783
|
+
}
|
1784
|
+
}
|
1785
|
+
}
|
1786
|
+
const hexSliceLookupTable = function() {
|
1787
|
+
const alphabet = "0123456789abcdef";
|
1788
|
+
const table = new Array(256);
|
1789
|
+
for (let i = 0; i < 16; ++i) {
|
1790
|
+
const i16 = i * 16;
|
1791
|
+
for (let j = 0; j < 16; ++j) {
|
1792
|
+
table[i16 + j] = alphabet[i] + alphabet[j];
|
1793
|
+
}
|
1794
|
+
}
|
1795
|
+
return table;
|
1796
|
+
}();
|
1797
|
+
const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
|
1798
|
+
function base64clean(str) {
|
1799
|
+
str = str.split("=")[0];
|
1800
|
+
str = str.trim().replace(INVALID_BASE64_RE, "");
|
1801
|
+
if (str.length < 2) return "";
|
1802
|
+
while (str.length % 4 !== 0) {
|
1803
|
+
str = str + "=";
|
1804
|
+
}
|
1805
|
+
return str;
|
1806
|
+
}
|
1807
|
+
|
25
1808
|
function notEmpty(value) {
|
26
1809
|
return value !== null && value !== void 0;
|
27
1810
|
}
|
@@ -225,8 +2008,7 @@ function buildPreviewBranchName({ org, branch }) {
|
|
225
2008
|
function getPreviewBranch() {
|
226
2009
|
try {
|
227
2010
|
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = getEnvironment();
|
228
|
-
if (deployPreviewBranch)
|
229
|
-
return deployPreviewBranch;
|
2011
|
+
if (deployPreviewBranch) return deployPreviewBranch;
|
230
2012
|
switch (deployPreview) {
|
231
2013
|
case "vercel": {
|
232
2014
|
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
@@ -242,29 +2024,15 @@ function getPreviewBranch() {
|
|
242
2024
|
}
|
243
2025
|
}
|
244
2026
|
|
245
|
-
var
|
246
|
-
|
247
|
-
throw TypeError("Cannot " + msg);
|
248
|
-
};
|
249
|
-
var __privateGet$7 = (obj, member, getter) => {
|
250
|
-
__accessCheck$8(obj, member, "read from private field");
|
251
|
-
return getter ? getter.call(obj) : member.get(obj);
|
252
|
-
};
|
253
|
-
var __privateAdd$8 = (obj, member, value) => {
|
254
|
-
if (member.has(obj))
|
255
|
-
throw TypeError("Cannot add the same private member more than once");
|
256
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2027
|
+
var __typeError$8 = (msg) => {
|
2028
|
+
throw TypeError(msg);
|
257
2029
|
};
|
258
|
-
var
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
var
|
264
|
-
__accessCheck$8(obj, member, "access private method");
|
265
|
-
return method;
|
266
|
-
};
|
267
|
-
var _fetch, _queue, _concurrency, _enqueue, enqueue_fn;
|
2030
|
+
var __accessCheck$8 = (obj, member, msg) => member.has(obj) || __typeError$8("Cannot " + msg);
|
2031
|
+
var __privateGet$7 = (obj, member, getter) => (__accessCheck$8(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
2032
|
+
var __privateAdd$8 = (obj, member, value) => member.has(obj) ? __typeError$8("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2033
|
+
var __privateSet$6 = (obj, member, value, setter) => (__accessCheck$8(obj, member, "write to private field"), member.set(obj, value), value);
|
2034
|
+
var __privateMethod$4 = (obj, member, method) => (__accessCheck$8(obj, member, "access private method"), method);
|
2035
|
+
var _fetch, _queue, _concurrency, _ApiRequestPool_instances, enqueue_fn;
|
268
2036
|
const REQUEST_TIMEOUT = 5 * 60 * 1e3;
|
269
2037
|
function getFetchImplementation(userFetch) {
|
270
2038
|
const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
|
@@ -277,10 +2045,10 @@ function getFetchImplementation(userFetch) {
|
|
277
2045
|
}
|
278
2046
|
class ApiRequestPool {
|
279
2047
|
constructor(concurrency = 10) {
|
280
|
-
__privateAdd$8(this,
|
281
|
-
__privateAdd$8(this, _fetch
|
282
|
-
__privateAdd$8(this, _queue
|
283
|
-
__privateAdd$8(this, _concurrency
|
2048
|
+
__privateAdd$8(this, _ApiRequestPool_instances);
|
2049
|
+
__privateAdd$8(this, _fetch);
|
2050
|
+
__privateAdd$8(this, _queue);
|
2051
|
+
__privateAdd$8(this, _concurrency);
|
284
2052
|
__privateSet$6(this, _queue, []);
|
285
2053
|
__privateSet$6(this, _concurrency, concurrency);
|
286
2054
|
this.running = 0;
|
@@ -315,7 +2083,7 @@ class ApiRequestPool {
|
|
315
2083
|
}
|
316
2084
|
return response;
|
317
2085
|
};
|
318
|
-
return __privateMethod$4(this,
|
2086
|
+
return __privateMethod$4(this, _ApiRequestPool_instances, enqueue_fn).call(this, async () => {
|
319
2087
|
return await runRequest();
|
320
2088
|
});
|
321
2089
|
}
|
@@ -323,7 +2091,7 @@ class ApiRequestPool {
|
|
323
2091
|
_fetch = new WeakMap();
|
324
2092
|
_queue = new WeakMap();
|
325
2093
|
_concurrency = new WeakMap();
|
326
|
-
|
2094
|
+
_ApiRequestPool_instances = new WeakSet();
|
327
2095
|
enqueue_fn = function(task) {
|
328
2096
|
const promise = new Promise((resolve) => __privateGet$7(this, _queue).push(resolve)).finally(() => {
|
329
2097
|
this.started--;
|
@@ -526,7 +2294,7 @@ function defaultOnOpen(response) {
|
|
526
2294
|
}
|
527
2295
|
}
|
528
2296
|
|
529
|
-
const VERSION = "0.29.
|
2297
|
+
const VERSION = "0.29.5";
|
530
2298
|
|
531
2299
|
class ErrorWithCause extends Error {
|
532
2300
|
constructor(message, options) {
|
@@ -606,35 +2374,30 @@ function parseProviderString(provider = "production") {
|
|
606
2374
|
return provider;
|
607
2375
|
}
|
608
2376
|
const [main, workspaces] = provider.split(",");
|
609
|
-
if (!main || !workspaces)
|
610
|
-
return null;
|
2377
|
+
if (!main || !workspaces) return null;
|
611
2378
|
return { main, workspaces };
|
612
2379
|
}
|
613
2380
|
function buildProviderString(provider) {
|
614
|
-
if (isHostProviderAlias(provider))
|
615
|
-
return provider;
|
2381
|
+
if (isHostProviderAlias(provider)) return provider;
|
616
2382
|
return `${provider.main},${provider.workspaces}`;
|
617
2383
|
}
|
618
2384
|
function parseWorkspacesUrlParts(url) {
|
619
|
-
if (!isString(url))
|
620
|
-
return null;
|
2385
|
+
if (!isString(url)) return null;
|
621
2386
|
const matches = {
|
622
2387
|
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
|
623
2388
|
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
|
624
2389
|
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
|
625
|
-
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:([^:]+):?(.*)?/)
|
2390
|
+
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
|
626
2391
|
};
|
627
2392
|
const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
|
628
|
-
if (!isHostProviderAlias(host) || !match)
|
629
|
-
return null;
|
2393
|
+
if (!isHostProviderAlias(host) || !match) return null;
|
630
2394
|
return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
|
631
2395
|
}
|
632
2396
|
|
633
2397
|
const pool = new ApiRequestPool();
|
634
2398
|
const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
|
635
2399
|
const cleanQueryParams = Object.entries(queryParams).reduce((acc, [key, value]) => {
|
636
|
-
if (value === void 0 || value === null)
|
637
|
-
return acc;
|
2400
|
+
if (value === void 0 || value === null) return acc;
|
638
2401
|
return { ...acc, [key]: value };
|
639
2402
|
}, {});
|
640
2403
|
const query = new URLSearchParams(cleanQueryParams).toString();
|
@@ -682,8 +2445,7 @@ function hostHeader(url) {
|
|
682
2445
|
return groups?.host ? { Host: groups.host } : {};
|
683
2446
|
}
|
684
2447
|
async function parseBody(body, headers) {
|
685
|
-
if (!isDefined(body))
|
686
|
-
return void 0;
|
2448
|
+
if (!isDefined(body)) return void 0;
|
687
2449
|
if (isBlob(body) || typeof body.text === "function") {
|
688
2450
|
return body;
|
689
2451
|
}
|
@@ -760,8 +2522,7 @@ async function fetch$1({
|
|
760
2522
|
[TraceAttributes.CLOUDFLARE_RAY_ID]: response.headers?.get("cf-ray") ?? void 0
|
761
2523
|
});
|
762
2524
|
const message = response.headers?.get("x-xata-message");
|
763
|
-
if (message)
|
764
|
-
console.warn(message);
|
2525
|
+
if (message) console.warn(message);
|
765
2526
|
if (response.status === 204) {
|
766
2527
|
return {};
|
767
2528
|
}
|
@@ -845,104 +2606,233 @@ function parseUrl(url) {
|
|
845
2606
|
|
846
2607
|
const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
|
847
2608
|
|
848
|
-
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2609
|
+
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2610
|
+
url: "/db/{dbBranchName}/migrations/apply",
|
2611
|
+
method: "post",
|
2612
|
+
...variables,
|
2613
|
+
signal
|
2614
|
+
});
|
2615
|
+
const startMigration = (variables, signal) => dataPlaneFetch({
|
2616
|
+
url: "/db/{dbBranchName}/migrations/start",
|
2617
|
+
method: "post",
|
2618
|
+
...variables,
|
2619
|
+
signal
|
2620
|
+
});
|
2621
|
+
const completeMigration = (variables, signal) => dataPlaneFetch({
|
2622
|
+
url: "/db/{dbBranchName}/migrations/complete",
|
2623
|
+
method: "post",
|
2624
|
+
...variables,
|
2625
|
+
signal
|
2626
|
+
});
|
2627
|
+
const rollbackMigration = (variables, signal) => dataPlaneFetch({
|
2628
|
+
url: "/db/{dbBranchName}/migrations/rollback",
|
2629
|
+
method: "post",
|
2630
|
+
...variables,
|
2631
|
+
signal
|
2632
|
+
});
|
849
2633
|
const adaptTable = (variables, signal) => dataPlaneFetch({
|
850
2634
|
url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
|
851
2635
|
method: "post",
|
852
2636
|
...variables,
|
853
2637
|
signal
|
854
2638
|
});
|
855
|
-
const
|
856
|
-
|
857
|
-
|
858
|
-
|
859
|
-
|
2639
|
+
const adaptAllTables = (variables, signal) => dataPlaneFetch({
|
2640
|
+
url: "/db/{dbBranchName}/migrations/adapt",
|
2641
|
+
method: "post",
|
2642
|
+
...variables,
|
2643
|
+
signal
|
2644
|
+
});
|
2645
|
+
const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2646
|
+
url: "/db/{dbBranchName}/migrations/status",
|
2647
|
+
method: "get",
|
2648
|
+
...variables,
|
2649
|
+
signal
|
2650
|
+
});
|
2651
|
+
const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2652
|
+
url: "/db/{dbBranchName}/migrations/jobs/{jobId}",
|
2653
|
+
method: "get",
|
2654
|
+
...variables,
|
2655
|
+
signal
|
2656
|
+
});
|
2657
|
+
const getMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2658
|
+
url: "/db/{dbBranchName}/migrations/history",
|
2659
|
+
method: "get",
|
2660
|
+
...variables,
|
2661
|
+
signal
|
2662
|
+
});
|
2663
|
+
const getBranchList = (variables, signal) => dataPlaneFetch({
|
2664
|
+
url: "/dbs/{dbName}",
|
2665
|
+
method: "get",
|
2666
|
+
...variables,
|
2667
|
+
signal
|
2668
|
+
});
|
2669
|
+
const getDatabaseSettings = (variables, signal) => dataPlaneFetch({
|
2670
|
+
url: "/dbs/{dbName}/settings",
|
2671
|
+
method: "get",
|
2672
|
+
...variables,
|
2673
|
+
signal
|
2674
|
+
});
|
2675
|
+
const updateDatabaseSettings = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/settings", method: "patch", ...variables, signal });
|
2676
|
+
const getBranchDetails = (variables, signal) => dataPlaneFetch({
|
2677
|
+
url: "/db/{dbBranchName}",
|
2678
|
+
method: "get",
|
2679
|
+
...variables,
|
2680
|
+
signal
|
2681
|
+
});
|
2682
|
+
const createBranch = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}", method: "put", ...variables, signal });
|
2683
|
+
const deleteBranch = (variables, signal) => dataPlaneFetch({
|
2684
|
+
url: "/db/{dbBranchName}",
|
2685
|
+
method: "delete",
|
2686
|
+
...variables,
|
2687
|
+
signal
|
2688
|
+
});
|
2689
|
+
const getSchema = (variables, signal) => dataPlaneFetch({
|
2690
|
+
url: "/db/{dbBranchName}/schema",
|
2691
|
+
method: "get",
|
2692
|
+
...variables,
|
2693
|
+
signal
|
2694
|
+
});
|
2695
|
+
const copyBranch = (variables, signal) => dataPlaneFetch({
|
2696
|
+
url: "/db/{dbBranchName}/copy",
|
2697
|
+
method: "post",
|
2698
|
+
...variables,
|
2699
|
+
signal
|
2700
|
+
});
|
2701
|
+
const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2702
|
+
url: "/db/{dbBranchName}/metadata",
|
2703
|
+
method: "put",
|
2704
|
+
...variables,
|
2705
|
+
signal
|
2706
|
+
});
|
2707
|
+
const getBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2708
|
+
url: "/db/{dbBranchName}/metadata",
|
2709
|
+
method: "get",
|
2710
|
+
...variables,
|
2711
|
+
signal
|
2712
|
+
});
|
2713
|
+
const getBranchStats = (variables, signal) => dataPlaneFetch({
|
2714
|
+
url: "/db/{dbBranchName}/stats",
|
2715
|
+
method: "get",
|
2716
|
+
...variables,
|
2717
|
+
signal
|
2718
|
+
});
|
2719
|
+
const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
|
2720
|
+
const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
|
2721
|
+
const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({
|
2722
|
+
url: "/dbs/{dbName}/gitBranches",
|
2723
|
+
method: "delete",
|
2724
|
+
...variables,
|
2725
|
+
signal
|
2726
|
+
});
|
2727
|
+
const resolveBranch = (variables, signal) => dataPlaneFetch({
|
2728
|
+
url: "/dbs/{dbName}/resolveBranch",
|
2729
|
+
method: "get",
|
2730
|
+
...variables,
|
2731
|
+
signal
|
2732
|
+
});
|
2733
|
+
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2734
|
+
url: "/db/{dbBranchName}/migrations",
|
2735
|
+
method: "get",
|
2736
|
+
...variables,
|
2737
|
+
signal
|
2738
|
+
});
|
2739
|
+
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2740
|
+
url: "/db/{dbBranchName}/migrations/plan",
|
2741
|
+
method: "post",
|
2742
|
+
...variables,
|
2743
|
+
signal
|
2744
|
+
});
|
2745
|
+
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2746
|
+
url: "/db/{dbBranchName}/migrations/execute",
|
2747
|
+
method: "post",
|
2748
|
+
...variables,
|
2749
|
+
signal
|
2750
|
+
});
|
2751
|
+
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({
|
2752
|
+
url: "/dbs/{dbName}/migrations/query",
|
2753
|
+
method: "post",
|
2754
|
+
...variables,
|
2755
|
+
signal
|
2756
|
+
});
|
2757
|
+
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
2758
|
+
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2759
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
860
2760
|
method: "get",
|
861
2761
|
...variables,
|
862
2762
|
signal
|
863
2763
|
});
|
864
|
-
const
|
865
|
-
url: "/dbs/{dbName}/
|
866
|
-
method: "
|
2764
|
+
const updateMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2765
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2766
|
+
method: "patch",
|
867
2767
|
...variables,
|
868
2768
|
signal
|
869
2769
|
});
|
870
|
-
const
|
871
|
-
|
872
|
-
|
873
|
-
method: "get",
|
2770
|
+
const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({
|
2771
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/commits",
|
2772
|
+
method: "post",
|
874
2773
|
...variables,
|
875
2774
|
signal
|
876
2775
|
});
|
877
|
-
const
|
878
|
-
|
879
|
-
|
880
|
-
method: "delete",
|
2776
|
+
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2777
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/compare",
|
2778
|
+
method: "post",
|
881
2779
|
...variables,
|
882
2780
|
signal
|
883
2781
|
});
|
884
|
-
const
|
885
|
-
url: "/
|
2782
|
+
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({
|
2783
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
886
2784
|
method: "get",
|
887
2785
|
...variables,
|
888
2786
|
signal
|
889
2787
|
});
|
890
|
-
const
|
891
|
-
url: "/
|
2788
|
+
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2789
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
892
2790
|
method: "post",
|
893
2791
|
...variables,
|
894
2792
|
signal
|
895
2793
|
});
|
896
|
-
const
|
897
|
-
url: "/db/{dbBranchName}/
|
898
|
-
method: "
|
2794
|
+
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({
|
2795
|
+
url: "/db/{dbBranchName}/schema/history",
|
2796
|
+
method: "post",
|
899
2797
|
...variables,
|
900
2798
|
signal
|
901
2799
|
});
|
902
|
-
const
|
903
|
-
url: "/db/{dbBranchName}/
|
904
|
-
method: "
|
2800
|
+
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({
|
2801
|
+
url: "/db/{dbBranchName}/schema/compare",
|
2802
|
+
method: "post",
|
905
2803
|
...variables,
|
906
2804
|
signal
|
907
2805
|
});
|
908
|
-
const
|
909
|
-
url: "/db/{dbBranchName}/
|
910
|
-
method: "
|
2806
|
+
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({
|
2807
|
+
url: "/db/{dbBranchName}/schema/compare/{branchName}",
|
2808
|
+
method: "post",
|
911
2809
|
...variables,
|
912
2810
|
signal
|
913
2811
|
});
|
914
|
-
const
|
915
|
-
|
916
|
-
|
917
|
-
const resolveBranch = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/resolveBranch", method: "get", ...variables, signal });
|
918
|
-
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations", method: "get", ...variables, signal });
|
919
|
-
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/plan", method: "post", ...variables, signal });
|
920
|
-
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/execute", method: "post", ...variables, signal });
|
921
|
-
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/query", method: "post", ...variables, signal });
|
922
|
-
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
923
|
-
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
924
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
925
|
-
method: "get",
|
2812
|
+
const updateBranchSchema = (variables, signal) => dataPlaneFetch({
|
2813
|
+
url: "/db/{dbBranchName}/schema/update",
|
2814
|
+
method: "post",
|
926
2815
|
...variables,
|
927
2816
|
signal
|
928
2817
|
});
|
929
|
-
const
|
930
|
-
|
931
|
-
|
932
|
-
|
933
|
-
|
934
|
-
|
2818
|
+
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2819
|
+
url: "/db/{dbBranchName}/schema/preview",
|
2820
|
+
method: "post",
|
2821
|
+
...variables,
|
2822
|
+
signal
|
2823
|
+
});
|
2824
|
+
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2825
|
+
url: "/db/{dbBranchName}/schema/apply",
|
2826
|
+
method: "post",
|
2827
|
+
...variables,
|
2828
|
+
signal
|
2829
|
+
});
|
2830
|
+
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({
|
2831
|
+
url: "/db/{dbBranchName}/schema/push",
|
935
2832
|
method: "post",
|
936
2833
|
...variables,
|
937
2834
|
signal
|
938
2835
|
});
|
939
|
-
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/history", method: "post", ...variables, signal });
|
940
|
-
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare", method: "post", ...variables, signal });
|
941
|
-
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare/{branchName}", method: "post", ...variables, signal });
|
942
|
-
const updateBranchSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/update", method: "post", ...variables, signal });
|
943
|
-
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/preview", method: "post", ...variables, signal });
|
944
|
-
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/apply", method: "post", ...variables, signal });
|
945
|
-
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/push", method: "post", ...variables, signal });
|
946
2836
|
const createTable = (variables, signal) => dataPlaneFetch({
|
947
2837
|
url: "/db/{dbBranchName}/tables/{tableName}",
|
948
2838
|
method: "put",
|
@@ -955,14 +2845,24 @@ const deleteTable = (variables, signal) => dataPlaneFetch({
|
|
955
2845
|
...variables,
|
956
2846
|
signal
|
957
2847
|
});
|
958
|
-
const updateTable = (variables, signal) => dataPlaneFetch({
|
2848
|
+
const updateTable = (variables, signal) => dataPlaneFetch({
|
2849
|
+
url: "/db/{dbBranchName}/tables/{tableName}",
|
2850
|
+
method: "patch",
|
2851
|
+
...variables,
|
2852
|
+
signal
|
2853
|
+
});
|
959
2854
|
const getTableSchema = (variables, signal) => dataPlaneFetch({
|
960
2855
|
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
961
2856
|
method: "get",
|
962
2857
|
...variables,
|
963
2858
|
signal
|
964
2859
|
});
|
965
|
-
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2860
|
+
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2861
|
+
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
2862
|
+
method: "put",
|
2863
|
+
...variables,
|
2864
|
+
signal
|
2865
|
+
});
|
966
2866
|
const getTableColumns = (variables, signal) => dataPlaneFetch({
|
967
2867
|
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
968
2868
|
method: "get",
|
@@ -970,7 +2870,12 @@ const getTableColumns = (variables, signal) => dataPlaneFetch({
|
|
970
2870
|
signal
|
971
2871
|
});
|
972
2872
|
const addTableColumn = (variables, signal) => dataPlaneFetch(
|
973
|
-
{
|
2873
|
+
{
|
2874
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
2875
|
+
method: "post",
|
2876
|
+
...variables,
|
2877
|
+
signal
|
2878
|
+
}
|
974
2879
|
);
|
975
2880
|
const getColumn = (variables, signal) => dataPlaneFetch({
|
976
2881
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
@@ -978,15 +2883,30 @@ const getColumn = (variables, signal) => dataPlaneFetch({
|
|
978
2883
|
...variables,
|
979
2884
|
signal
|
980
2885
|
});
|
981
|
-
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2886
|
+
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2887
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
2888
|
+
method: "patch",
|
2889
|
+
...variables,
|
2890
|
+
signal
|
2891
|
+
});
|
982
2892
|
const deleteColumn = (variables, signal) => dataPlaneFetch({
|
983
2893
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
984
2894
|
method: "delete",
|
985
2895
|
...variables,
|
986
2896
|
signal
|
987
2897
|
});
|
988
|
-
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
989
|
-
|
2898
|
+
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
2899
|
+
url: "/db/{dbBranchName}/transaction",
|
2900
|
+
method: "post",
|
2901
|
+
...variables,
|
2902
|
+
signal
|
2903
|
+
});
|
2904
|
+
const insertRecord = (variables, signal) => dataPlaneFetch({
|
2905
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data",
|
2906
|
+
method: "post",
|
2907
|
+
...variables,
|
2908
|
+
signal
|
2909
|
+
});
|
990
2910
|
const getFileItem = (variables, signal) => dataPlaneFetch({
|
991
2911
|
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
|
992
2912
|
method: "get",
|
@@ -1029,11 +2949,36 @@ const getRecord = (variables, signal) => dataPlaneFetch({
|
|
1029
2949
|
...variables,
|
1030
2950
|
signal
|
1031
2951
|
});
|
1032
|
-
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
1033
|
-
|
1034
|
-
|
1035
|
-
|
1036
|
-
|
2952
|
+
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2953
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2954
|
+
method: "put",
|
2955
|
+
...variables,
|
2956
|
+
signal
|
2957
|
+
});
|
2958
|
+
const updateRecordWithID = (variables, signal) => dataPlaneFetch({
|
2959
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2960
|
+
method: "patch",
|
2961
|
+
...variables,
|
2962
|
+
signal
|
2963
|
+
});
|
2964
|
+
const upsertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2965
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2966
|
+
method: "post",
|
2967
|
+
...variables,
|
2968
|
+
signal
|
2969
|
+
});
|
2970
|
+
const deleteRecord = (variables, signal) => dataPlaneFetch({
|
2971
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2972
|
+
method: "delete",
|
2973
|
+
...variables,
|
2974
|
+
signal
|
2975
|
+
});
|
2976
|
+
const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({
|
2977
|
+
url: "/db/{dbBranchName}/tables/{tableName}/bulk",
|
2978
|
+
method: "post",
|
2979
|
+
...variables,
|
2980
|
+
signal
|
2981
|
+
});
|
1037
2982
|
const queryTable = (variables, signal) => dataPlaneFetch({
|
1038
2983
|
url: "/db/{dbBranchName}/tables/{tableName}/query",
|
1039
2984
|
method: "post",
|
@@ -1052,16 +2997,36 @@ const searchTable = (variables, signal) => dataPlaneFetch({
|
|
1052
2997
|
...variables,
|
1053
2998
|
signal
|
1054
2999
|
});
|
1055
|
-
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
3000
|
+
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
3001
|
+
url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch",
|
3002
|
+
method: "post",
|
3003
|
+
...variables,
|
3004
|
+
signal
|
3005
|
+
});
|
1056
3006
|
const askTable = (variables, signal) => dataPlaneFetch({
|
1057
3007
|
url: "/db/{dbBranchName}/tables/{tableName}/ask",
|
1058
3008
|
method: "post",
|
1059
3009
|
...variables,
|
1060
3010
|
signal
|
1061
3011
|
});
|
1062
|
-
const askTableSession = (variables, signal) => dataPlaneFetch({
|
1063
|
-
|
1064
|
-
|
3012
|
+
const askTableSession = (variables, signal) => dataPlaneFetch({
|
3013
|
+
url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}",
|
3014
|
+
method: "post",
|
3015
|
+
...variables,
|
3016
|
+
signal
|
3017
|
+
});
|
3018
|
+
const summarizeTable = (variables, signal) => dataPlaneFetch({
|
3019
|
+
url: "/db/{dbBranchName}/tables/{tableName}/summarize",
|
3020
|
+
method: "post",
|
3021
|
+
...variables,
|
3022
|
+
signal
|
3023
|
+
});
|
3024
|
+
const aggregateTable = (variables, signal) => dataPlaneFetch({
|
3025
|
+
url: "/db/{dbBranchName}/tables/{tableName}/aggregate",
|
3026
|
+
method: "post",
|
3027
|
+
...variables,
|
3028
|
+
signal
|
3029
|
+
});
|
1065
3030
|
const fileAccess = (variables, signal) => dataPlaneFetch({
|
1066
3031
|
url: "/file/{fileId}",
|
1067
3032
|
method: "get",
|
@@ -1080,10 +3045,20 @@ const sqlQuery = (variables, signal) => dataPlaneFetch({
|
|
1080
3045
|
...variables,
|
1081
3046
|
signal
|
1082
3047
|
});
|
3048
|
+
const sqlBatchQuery = (variables, signal) => dataPlaneFetch({
|
3049
|
+
url: "/db/{dbBranchName}/sql/batch",
|
3050
|
+
method: "post",
|
3051
|
+
...variables,
|
3052
|
+
signal
|
3053
|
+
});
|
1083
3054
|
const operationsByTag$2 = {
|
1084
3055
|
migrations: {
|
1085
3056
|
applyMigration,
|
3057
|
+
startMigration,
|
3058
|
+
completeMigration,
|
3059
|
+
rollbackMigration,
|
1086
3060
|
adaptTable,
|
3061
|
+
adaptAllTables,
|
1087
3062
|
getBranchMigrationJobStatus,
|
1088
3063
|
getMigrationJobStatus,
|
1089
3064
|
getMigrationHistory,
|
@@ -1146,7 +3121,16 @@ const operationsByTag$2 = {
|
|
1146
3121
|
deleteRecord,
|
1147
3122
|
bulkInsertTableRecords
|
1148
3123
|
},
|
1149
|
-
files: {
|
3124
|
+
files: {
|
3125
|
+
getFileItem,
|
3126
|
+
putFileItem,
|
3127
|
+
deleteFileItem,
|
3128
|
+
getFile,
|
3129
|
+
putFile,
|
3130
|
+
deleteFile,
|
3131
|
+
fileAccess,
|
3132
|
+
fileUpload
|
3133
|
+
},
|
1150
3134
|
searchAndFilter: {
|
1151
3135
|
queryTable,
|
1152
3136
|
searchBranch,
|
@@ -1157,7 +3141,7 @@ const operationsByTag$2 = {
|
|
1157
3141
|
summarizeTable,
|
1158
3142
|
aggregateTable
|
1159
3143
|
},
|
1160
|
-
sql: { sqlQuery }
|
3144
|
+
sql: { sqlQuery, sqlBatchQuery }
|
1161
3145
|
};
|
1162
3146
|
|
1163
3147
|
const controlPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "controlPlane" });
|
@@ -1224,7 +3208,12 @@ const deleteOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
|
1224
3208
|
...variables,
|
1225
3209
|
signal
|
1226
3210
|
});
|
1227
|
-
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3211
|
+
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3212
|
+
url: "/user/oauth/tokens/{token}",
|
3213
|
+
method: "patch",
|
3214
|
+
...variables,
|
3215
|
+
signal
|
3216
|
+
});
|
1228
3217
|
const getWorkspacesList = (variables, signal) => controlPlaneFetch({
|
1229
3218
|
url: "/workspaces",
|
1230
3219
|
method: "get",
|
@@ -1255,49 +3244,150 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
|
|
1255
3244
|
...variables,
|
1256
3245
|
signal
|
1257
3246
|
});
|
1258
|
-
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
1259
|
-
|
1260
|
-
|
1261
|
-
|
3247
|
+
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3248
|
+
url: "/workspaces/{workspaceId}/settings",
|
3249
|
+
method: "get",
|
3250
|
+
...variables,
|
3251
|
+
signal
|
3252
|
+
});
|
3253
|
+
const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3254
|
+
url: "/workspaces/{workspaceId}/settings",
|
3255
|
+
method: "patch",
|
3256
|
+
...variables,
|
3257
|
+
signal
|
3258
|
+
});
|
3259
|
+
const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({
|
3260
|
+
url: "/workspaces/{workspaceId}/members",
|
3261
|
+
method: "get",
|
3262
|
+
...variables,
|
3263
|
+
signal
|
3264
|
+
});
|
3265
|
+
const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({
|
3266
|
+
url: "/workspaces/{workspaceId}/members/{userId}",
|
3267
|
+
method: "put",
|
3268
|
+
...variables,
|
3269
|
+
signal
|
3270
|
+
});
|
1262
3271
|
const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1263
3272
|
url: "/workspaces/{workspaceId}/members/{userId}",
|
1264
3273
|
method: "delete",
|
1265
3274
|
...variables,
|
1266
3275
|
signal
|
1267
3276
|
});
|
1268
|
-
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1269
|
-
|
1270
|
-
|
1271
|
-
|
1272
|
-
|
1273
|
-
|
1274
|
-
const
|
3277
|
+
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
3278
|
+
url: "/workspaces/{workspaceId}/invites",
|
3279
|
+
method: "post",
|
3280
|
+
...variables,
|
3281
|
+
signal
|
3282
|
+
});
|
3283
|
+
const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3284
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3285
|
+
method: "patch",
|
3286
|
+
...variables,
|
3287
|
+
signal
|
3288
|
+
});
|
3289
|
+
const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3290
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3291
|
+
method: "delete",
|
3292
|
+
...variables,
|
3293
|
+
signal
|
3294
|
+
});
|
3295
|
+
const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3296
|
+
url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept",
|
3297
|
+
method: "post",
|
3298
|
+
...variables,
|
3299
|
+
signal
|
3300
|
+
});
|
3301
|
+
const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3302
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}/resend",
|
3303
|
+
method: "post",
|
3304
|
+
...variables,
|
3305
|
+
signal
|
3306
|
+
});
|
3307
|
+
const listClusters = (variables, signal) => controlPlaneFetch({
|
3308
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3309
|
+
method: "get",
|
3310
|
+
...variables,
|
3311
|
+
signal
|
3312
|
+
});
|
3313
|
+
const createCluster = (variables, signal) => controlPlaneFetch({
|
3314
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3315
|
+
method: "post",
|
3316
|
+
...variables,
|
3317
|
+
signal
|
3318
|
+
});
|
1275
3319
|
const getCluster = (variables, signal) => controlPlaneFetch({
|
1276
3320
|
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
1277
3321
|
method: "get",
|
1278
3322
|
...variables,
|
1279
3323
|
signal
|
1280
3324
|
});
|
1281
|
-
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3325
|
+
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3326
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3327
|
+
method: "patch",
|
3328
|
+
...variables,
|
3329
|
+
signal
|
3330
|
+
});
|
3331
|
+
const deleteCluster = (variables, signal) => controlPlaneFetch({
|
3332
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3333
|
+
method: "delete",
|
3334
|
+
...variables,
|
3335
|
+
signal
|
3336
|
+
});
|
1282
3337
|
const getDatabaseList = (variables, signal) => controlPlaneFetch({
|
1283
3338
|
url: "/workspaces/{workspaceId}/dbs",
|
1284
3339
|
method: "get",
|
1285
3340
|
...variables,
|
1286
3341
|
signal
|
1287
3342
|
});
|
1288
|
-
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3343
|
+
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3344
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3345
|
+
method: "put",
|
3346
|
+
...variables,
|
3347
|
+
signal
|
3348
|
+
});
|
1289
3349
|
const deleteDatabase = (variables, signal) => controlPlaneFetch({
|
1290
3350
|
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
1291
3351
|
method: "delete",
|
1292
3352
|
...variables,
|
1293
3353
|
signal
|
1294
3354
|
});
|
1295
|
-
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
1296
|
-
|
1297
|
-
|
1298
|
-
|
1299
|
-
|
1300
|
-
|
3355
|
+
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3356
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3357
|
+
method: "get",
|
3358
|
+
...variables,
|
3359
|
+
signal
|
3360
|
+
});
|
3361
|
+
const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3362
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3363
|
+
method: "patch",
|
3364
|
+
...variables,
|
3365
|
+
signal
|
3366
|
+
});
|
3367
|
+
const renameDatabase = (variables, signal) => controlPlaneFetch({
|
3368
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/rename",
|
3369
|
+
method: "post",
|
3370
|
+
...variables,
|
3371
|
+
signal
|
3372
|
+
});
|
3373
|
+
const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3374
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3375
|
+
method: "get",
|
3376
|
+
...variables,
|
3377
|
+
signal
|
3378
|
+
});
|
3379
|
+
const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3380
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3381
|
+
method: "put",
|
3382
|
+
...variables,
|
3383
|
+
signal
|
3384
|
+
});
|
3385
|
+
const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3386
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3387
|
+
method: "delete",
|
3388
|
+
...variables,
|
3389
|
+
signal
|
3390
|
+
});
|
1301
3391
|
const listRegions = (variables, signal) => controlPlaneFetch({
|
1302
3392
|
url: "/workspaces/{workspaceId}/regions",
|
1303
3393
|
method: "get",
|
@@ -1335,7 +3425,13 @@ const operationsByTag$1 = {
|
|
1335
3425
|
acceptWorkspaceMemberInvite,
|
1336
3426
|
resendWorkspaceMemberInvite
|
1337
3427
|
},
|
1338
|
-
xbcontrolOther: {
|
3428
|
+
xbcontrolOther: {
|
3429
|
+
listClusters,
|
3430
|
+
createCluster,
|
3431
|
+
getCluster,
|
3432
|
+
updateCluster,
|
3433
|
+
deleteCluster
|
3434
|
+
},
|
1339
3435
|
databases: {
|
1340
3436
|
getDatabaseList,
|
1341
3437
|
createDatabase,
|
@@ -1352,28 +3448,17 @@ const operationsByTag$1 = {
|
|
1352
3448
|
|
1353
3449
|
const operationsByTag = deepMerge(operationsByTag$2, operationsByTag$1);
|
1354
3450
|
|
1355
|
-
var
|
1356
|
-
|
1357
|
-
throw TypeError("Cannot " + msg);
|
1358
|
-
};
|
1359
|
-
var __privateGet$6 = (obj, member, getter) => {
|
1360
|
-
__accessCheck$7(obj, member, "read from private field");
|
1361
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1362
|
-
};
|
1363
|
-
var __privateAdd$7 = (obj, member, value) => {
|
1364
|
-
if (member.has(obj))
|
1365
|
-
throw TypeError("Cannot add the same private member more than once");
|
1366
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1367
|
-
};
|
1368
|
-
var __privateSet$5 = (obj, member, value, setter) => {
|
1369
|
-
__accessCheck$7(obj, member, "write to private field");
|
1370
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1371
|
-
return value;
|
3451
|
+
var __typeError$7 = (msg) => {
|
3452
|
+
throw TypeError(msg);
|
1372
3453
|
};
|
3454
|
+
var __accessCheck$7 = (obj, member, msg) => member.has(obj) || __typeError$7("Cannot " + msg);
|
3455
|
+
var __privateGet$6 = (obj, member, getter) => (__accessCheck$7(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3456
|
+
var __privateAdd$7 = (obj, member, value) => member.has(obj) ? __typeError$7("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3457
|
+
var __privateSet$5 = (obj, member, value, setter) => (__accessCheck$7(obj, member, "write to private field"), member.set(obj, value), value);
|
1373
3458
|
var _extraProps, _namespaces;
|
1374
3459
|
class XataApiClient {
|
1375
3460
|
constructor(options = {}) {
|
1376
|
-
__privateAdd$7(this, _extraProps
|
3461
|
+
__privateAdd$7(this, _extraProps);
|
1377
3462
|
__privateAdd$7(this, _namespaces, {});
|
1378
3463
|
const provider = options.host ?? "production";
|
1379
3464
|
const apiKey = options.apiKey ?? getAPIKey();
|
@@ -1394,38 +3479,31 @@ class XataApiClient {
|
|
1394
3479
|
});
|
1395
3480
|
}
|
1396
3481
|
get user() {
|
1397
|
-
if (!__privateGet$6(this, _namespaces).user)
|
1398
|
-
__privateGet$6(this, _namespaces).user = new UserApi(__privateGet$6(this, _extraProps));
|
3482
|
+
if (!__privateGet$6(this, _namespaces).user) __privateGet$6(this, _namespaces).user = new UserApi(__privateGet$6(this, _extraProps));
|
1399
3483
|
return __privateGet$6(this, _namespaces).user;
|
1400
3484
|
}
|
1401
3485
|
get authentication() {
|
1402
|
-
if (!__privateGet$6(this, _namespaces).authentication)
|
1403
|
-
__privateGet$6(this, _namespaces).authentication = new AuthenticationApi(__privateGet$6(this, _extraProps));
|
3486
|
+
if (!__privateGet$6(this, _namespaces).authentication) __privateGet$6(this, _namespaces).authentication = new AuthenticationApi(__privateGet$6(this, _extraProps));
|
1404
3487
|
return __privateGet$6(this, _namespaces).authentication;
|
1405
3488
|
}
|
1406
3489
|
get workspaces() {
|
1407
|
-
if (!__privateGet$6(this, _namespaces).workspaces)
|
1408
|
-
__privateGet$6(this, _namespaces).workspaces = new WorkspaceApi(__privateGet$6(this, _extraProps));
|
3490
|
+
if (!__privateGet$6(this, _namespaces).workspaces) __privateGet$6(this, _namespaces).workspaces = new WorkspaceApi(__privateGet$6(this, _extraProps));
|
1409
3491
|
return __privateGet$6(this, _namespaces).workspaces;
|
1410
3492
|
}
|
1411
3493
|
get invites() {
|
1412
|
-
if (!__privateGet$6(this, _namespaces).invites)
|
1413
|
-
__privateGet$6(this, _namespaces).invites = new InvitesApi(__privateGet$6(this, _extraProps));
|
3494
|
+
if (!__privateGet$6(this, _namespaces).invites) __privateGet$6(this, _namespaces).invites = new InvitesApi(__privateGet$6(this, _extraProps));
|
1414
3495
|
return __privateGet$6(this, _namespaces).invites;
|
1415
3496
|
}
|
1416
3497
|
get database() {
|
1417
|
-
if (!__privateGet$6(this, _namespaces).database)
|
1418
|
-
__privateGet$6(this, _namespaces).database = new DatabaseApi(__privateGet$6(this, _extraProps));
|
3498
|
+
if (!__privateGet$6(this, _namespaces).database) __privateGet$6(this, _namespaces).database = new DatabaseApi(__privateGet$6(this, _extraProps));
|
1419
3499
|
return __privateGet$6(this, _namespaces).database;
|
1420
3500
|
}
|
1421
3501
|
get branches() {
|
1422
|
-
if (!__privateGet$6(this, _namespaces).branches)
|
1423
|
-
__privateGet$6(this, _namespaces).branches = new BranchApi(__privateGet$6(this, _extraProps));
|
3502
|
+
if (!__privateGet$6(this, _namespaces).branches) __privateGet$6(this, _namespaces).branches = new BranchApi(__privateGet$6(this, _extraProps));
|
1424
3503
|
return __privateGet$6(this, _namespaces).branches;
|
1425
3504
|
}
|
1426
3505
|
get migrations() {
|
1427
|
-
if (!__privateGet$6(this, _namespaces).migrations)
|
1428
|
-
__privateGet$6(this, _namespaces).migrations = new MigrationsApi(__privateGet$6(this, _extraProps));
|
3506
|
+
if (!__privateGet$6(this, _namespaces).migrations) __privateGet$6(this, _namespaces).migrations = new MigrationsApi(__privateGet$6(this, _extraProps));
|
1429
3507
|
return __privateGet$6(this, _namespaces).migrations;
|
1430
3508
|
}
|
1431
3509
|
get migrationRequests() {
|
@@ -1434,23 +3512,19 @@ class XataApiClient {
|
|
1434
3512
|
return __privateGet$6(this, _namespaces).migrationRequests;
|
1435
3513
|
}
|
1436
3514
|
get tables() {
|
1437
|
-
if (!__privateGet$6(this, _namespaces).tables)
|
1438
|
-
__privateGet$6(this, _namespaces).tables = new TableApi(__privateGet$6(this, _extraProps));
|
3515
|
+
if (!__privateGet$6(this, _namespaces).tables) __privateGet$6(this, _namespaces).tables = new TableApi(__privateGet$6(this, _extraProps));
|
1439
3516
|
return __privateGet$6(this, _namespaces).tables;
|
1440
3517
|
}
|
1441
3518
|
get records() {
|
1442
|
-
if (!__privateGet$6(this, _namespaces).records)
|
1443
|
-
__privateGet$6(this, _namespaces).records = new RecordsApi(__privateGet$6(this, _extraProps));
|
3519
|
+
if (!__privateGet$6(this, _namespaces).records) __privateGet$6(this, _namespaces).records = new RecordsApi(__privateGet$6(this, _extraProps));
|
1444
3520
|
return __privateGet$6(this, _namespaces).records;
|
1445
3521
|
}
|
1446
3522
|
get files() {
|
1447
|
-
if (!__privateGet$6(this, _namespaces).files)
|
1448
|
-
__privateGet$6(this, _namespaces).files = new FilesApi(__privateGet$6(this, _extraProps));
|
3523
|
+
if (!__privateGet$6(this, _namespaces).files) __privateGet$6(this, _namespaces).files = new FilesApi(__privateGet$6(this, _extraProps));
|
1449
3524
|
return __privateGet$6(this, _namespaces).files;
|
1450
3525
|
}
|
1451
3526
|
get searchAndFilter() {
|
1452
|
-
if (!__privateGet$6(this, _namespaces).searchAndFilter)
|
1453
|
-
__privateGet$6(this, _namespaces).searchAndFilter = new SearchAndFilterApi(__privateGet$6(this, _extraProps));
|
3527
|
+
if (!__privateGet$6(this, _namespaces).searchAndFilter) __privateGet$6(this, _namespaces).searchAndFilter = new SearchAndFilterApi(__privateGet$6(this, _extraProps));
|
1454
3528
|
return __privateGet$6(this, _namespaces).searchAndFilter;
|
1455
3529
|
}
|
1456
3530
|
}
|
@@ -2727,8 +4801,7 @@ function buildTransformString(transformations) {
|
|
2727
4801
|
).join(",");
|
2728
4802
|
}
|
2729
4803
|
function transformImage(url, ...transformations) {
|
2730
|
-
if (!isDefined(url))
|
2731
|
-
return void 0;
|
4804
|
+
if (!isDefined(url)) return void 0;
|
2732
4805
|
const newTransformations = buildTransformString(transformations);
|
2733
4806
|
const { hostname, pathname, search } = new URL(url);
|
2734
4807
|
const pathParts = pathname.split("/");
|
@@ -2841,8 +4914,7 @@ class XataFile {
|
|
2841
4914
|
}
|
2842
4915
|
}
|
2843
4916
|
const parseInputFileEntry = async (entry) => {
|
2844
|
-
if (!isDefined(entry))
|
2845
|
-
return null;
|
4917
|
+
if (!isDefined(entry)) return null;
|
2846
4918
|
const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout, uploadUrlTimeout } = await entry;
|
2847
4919
|
return compactObject({
|
2848
4920
|
id,
|
@@ -2857,24 +4929,19 @@ const parseInputFileEntry = async (entry) => {
|
|
2857
4929
|
};
|
2858
4930
|
|
2859
4931
|
function cleanFilter(filter) {
|
2860
|
-
if (!isDefined(filter))
|
2861
|
-
|
2862
|
-
if (!isObject(filter))
|
2863
|
-
return filter;
|
4932
|
+
if (!isDefined(filter)) return void 0;
|
4933
|
+
if (!isObject(filter)) return filter;
|
2864
4934
|
const values = Object.fromEntries(
|
2865
4935
|
Object.entries(filter).reduce((acc, [key, value]) => {
|
2866
|
-
if (!isDefined(value))
|
2867
|
-
return acc;
|
4936
|
+
if (!isDefined(value)) return acc;
|
2868
4937
|
if (Array.isArray(value)) {
|
2869
4938
|
const clean = value.map((item) => cleanFilter(item)).filter((item) => isDefined(item));
|
2870
|
-
if (clean.length === 0)
|
2871
|
-
return acc;
|
4939
|
+
if (clean.length === 0) return acc;
|
2872
4940
|
return [...acc, [key, clean]];
|
2873
4941
|
}
|
2874
4942
|
if (isObject(value)) {
|
2875
4943
|
const clean = cleanFilter(value);
|
2876
|
-
if (!isDefined(clean))
|
2877
|
-
return acc;
|
4944
|
+
if (!isDefined(clean)) return acc;
|
2878
4945
|
return [...acc, [key, clean]];
|
2879
4946
|
}
|
2880
4947
|
return [...acc, [key, value]];
|
@@ -2884,10 +4951,8 @@ function cleanFilter(filter) {
|
|
2884
4951
|
}
|
2885
4952
|
|
2886
4953
|
function stringifyJson(value) {
|
2887
|
-
if (!isDefined(value))
|
2888
|
-
|
2889
|
-
if (isString(value))
|
2890
|
-
return value;
|
4954
|
+
if (!isDefined(value)) return value;
|
4955
|
+
if (isString(value)) return value;
|
2891
4956
|
try {
|
2892
4957
|
return JSON.stringify(value);
|
2893
4958
|
} catch (e) {
|
@@ -2902,28 +4967,17 @@ function parseJson(value) {
|
|
2902
4967
|
}
|
2903
4968
|
}
|
2904
4969
|
|
2905
|
-
var
|
2906
|
-
|
2907
|
-
throw TypeError("Cannot " + msg);
|
2908
|
-
};
|
2909
|
-
var __privateGet$5 = (obj, member, getter) => {
|
2910
|
-
__accessCheck$6(obj, member, "read from private field");
|
2911
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2912
|
-
};
|
2913
|
-
var __privateAdd$6 = (obj, member, value) => {
|
2914
|
-
if (member.has(obj))
|
2915
|
-
throw TypeError("Cannot add the same private member more than once");
|
2916
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2917
|
-
};
|
2918
|
-
var __privateSet$4 = (obj, member, value, setter) => {
|
2919
|
-
__accessCheck$6(obj, member, "write to private field");
|
2920
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
2921
|
-
return value;
|
4970
|
+
var __typeError$6 = (msg) => {
|
4971
|
+
throw TypeError(msg);
|
2922
4972
|
};
|
4973
|
+
var __accessCheck$6 = (obj, member, msg) => member.has(obj) || __typeError$6("Cannot " + msg);
|
4974
|
+
var __privateGet$5 = (obj, member, getter) => (__accessCheck$6(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4975
|
+
var __privateAdd$6 = (obj, member, value) => member.has(obj) ? __typeError$6("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4976
|
+
var __privateSet$4 = (obj, member, value, setter) => (__accessCheck$6(obj, member, "write to private field"), member.set(obj, value), value);
|
2923
4977
|
var _query, _page;
|
2924
4978
|
class Page {
|
2925
4979
|
constructor(query, meta, records = []) {
|
2926
|
-
__privateAdd$6(this, _query
|
4980
|
+
__privateAdd$6(this, _query);
|
2927
4981
|
__privateSet$4(this, _query, query);
|
2928
4982
|
this.meta = meta;
|
2929
4983
|
this.records = new PageRecordArray(this, records);
|
@@ -3010,7 +5064,7 @@ class RecordArray extends Array {
|
|
3010
5064
|
const _PageRecordArray = class _PageRecordArray extends Array {
|
3011
5065
|
constructor(...args) {
|
3012
5066
|
super(..._PageRecordArray.parseConstructorParams(...args));
|
3013
|
-
__privateAdd$6(this, _page
|
5067
|
+
__privateAdd$6(this, _page);
|
3014
5068
|
__privateSet$4(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
|
3015
5069
|
}
|
3016
5070
|
static parseConstructorParams(...args) {
|
@@ -3081,34 +5135,20 @@ const _PageRecordArray = class _PageRecordArray extends Array {
|
|
3081
5135
|
_page = new WeakMap();
|
3082
5136
|
let PageRecordArray = _PageRecordArray;
|
3083
5137
|
|
3084
|
-
var
|
3085
|
-
|
3086
|
-
throw TypeError("Cannot " + msg);
|
3087
|
-
};
|
3088
|
-
var __privateGet$4 = (obj, member, getter) => {
|
3089
|
-
__accessCheck$5(obj, member, "read from private field");
|
3090
|
-
return getter ? getter.call(obj) : member.get(obj);
|
5138
|
+
var __typeError$5 = (msg) => {
|
5139
|
+
throw TypeError(msg);
|
3091
5140
|
};
|
3092
|
-
var
|
3093
|
-
|
3094
|
-
|
3095
|
-
|
3096
|
-
|
3097
|
-
var
|
3098
|
-
__accessCheck$5(obj, member, "write to private field");
|
3099
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
3100
|
-
return value;
|
3101
|
-
};
|
3102
|
-
var __privateMethod$3 = (obj, member, method) => {
|
3103
|
-
__accessCheck$5(obj, member, "access private method");
|
3104
|
-
return method;
|
3105
|
-
};
|
3106
|
-
var _table$1, _repository, _data, _cleanFilterConstraint, cleanFilterConstraint_fn;
|
5141
|
+
var __accessCheck$5 = (obj, member, msg) => member.has(obj) || __typeError$5("Cannot " + msg);
|
5142
|
+
var __privateGet$4 = (obj, member, getter) => (__accessCheck$5(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
5143
|
+
var __privateAdd$5 = (obj, member, value) => member.has(obj) ? __typeError$5("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5144
|
+
var __privateSet$3 = (obj, member, value, setter) => (__accessCheck$5(obj, member, "write to private field"), member.set(obj, value), value);
|
5145
|
+
var __privateMethod$3 = (obj, member, method) => (__accessCheck$5(obj, member, "access private method"), method);
|
5146
|
+
var _table$1, _repository, _data, _Query_instances, cleanFilterConstraint_fn;
|
3107
5147
|
const _Query = class _Query {
|
3108
5148
|
constructor(repository, table, data, rawParent) {
|
3109
|
-
__privateAdd$5(this,
|
3110
|
-
__privateAdd$5(this, _table$1
|
3111
|
-
__privateAdd$5(this, _repository
|
5149
|
+
__privateAdd$5(this, _Query_instances);
|
5150
|
+
__privateAdd$5(this, _table$1);
|
5151
|
+
__privateAdd$5(this, _repository);
|
3112
5152
|
__privateAdd$5(this, _data, { filter: {} });
|
3113
5153
|
// Implements pagination
|
3114
5154
|
this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
|
@@ -3187,12 +5227,12 @@ const _Query = class _Query {
|
|
3187
5227
|
filter(a, b) {
|
3188
5228
|
if (arguments.length === 1) {
|
3189
5229
|
const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
|
3190
|
-
[column]: __privateMethod$3(this,
|
5230
|
+
[column]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, column, constraint)
|
3191
5231
|
}));
|
3192
5232
|
const $all = compact([__privateGet$4(this, _data).filter?.$all].flat().concat(constraints));
|
3193
5233
|
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
|
3194
5234
|
} else {
|
3195
|
-
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this,
|
5235
|
+
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
|
3196
5236
|
const $all = compact([__privateGet$4(this, _data).filter?.$all].flat().concat(constraints));
|
3197
5237
|
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
|
3198
5238
|
}
|
@@ -3271,8 +5311,7 @@ const _Query = class _Query {
|
|
3271
5311
|
}
|
3272
5312
|
async getFirstOrThrow(options = {}) {
|
3273
5313
|
const records = await this.getMany({ ...options, pagination: { size: 1 } });
|
3274
|
-
if (records[0] === void 0)
|
3275
|
-
throw new Error("No results found.");
|
5314
|
+
if (records[0] === void 0) throw new Error("No results found.");
|
3276
5315
|
return records[0];
|
3277
5316
|
}
|
3278
5317
|
async summarize(params = {}) {
|
@@ -3335,7 +5374,7 @@ const _Query = class _Query {
|
|
3335
5374
|
_table$1 = new WeakMap();
|
3336
5375
|
_repository = new WeakMap();
|
3337
5376
|
_data = new WeakMap();
|
3338
|
-
|
5377
|
+
_Query_instances = new WeakSet();
|
3339
5378
|
cleanFilterConstraint_fn = function(column, value) {
|
3340
5379
|
const columnType = __privateGet$4(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
|
3341
5380
|
if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
|
@@ -3401,8 +5440,7 @@ function isSortFilterString(value) {
|
|
3401
5440
|
}
|
3402
5441
|
function isSortFilterBase(filter) {
|
3403
5442
|
return isObject(filter) && Object.entries(filter).every(([key, value]) => {
|
3404
|
-
if (key === "*")
|
3405
|
-
return value === "random";
|
5443
|
+
if (key === "*") return value === "random";
|
3406
5444
|
return value === "asc" || value === "desc";
|
3407
5445
|
});
|
3408
5446
|
}
|
@@ -3423,29 +5461,15 @@ function buildSortFilter(filter) {
|
|
3423
5461
|
}
|
3424
5462
|
}
|
3425
5463
|
|
3426
|
-
var
|
3427
|
-
|
3428
|
-
throw TypeError("Cannot " + msg);
|
3429
|
-
};
|
3430
|
-
var __privateGet$3 = (obj, member, getter) => {
|
3431
|
-
__accessCheck$4(obj, member, "read from private field");
|
3432
|
-
return getter ? getter.call(obj) : member.get(obj);
|
3433
|
-
};
|
3434
|
-
var __privateAdd$4 = (obj, member, value) => {
|
3435
|
-
if (member.has(obj))
|
3436
|
-
throw TypeError("Cannot add the same private member more than once");
|
3437
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3438
|
-
};
|
3439
|
-
var __privateSet$2 = (obj, member, value, setter) => {
|
3440
|
-
__accessCheck$4(obj, member, "write to private field");
|
3441
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
3442
|
-
return value;
|
5464
|
+
var __typeError$4 = (msg) => {
|
5465
|
+
throw TypeError(msg);
|
3443
5466
|
};
|
3444
|
-
var
|
3445
|
-
|
3446
|
-
|
3447
|
-
|
3448
|
-
var
|
5467
|
+
var __accessCheck$4 = (obj, member, msg) => member.has(obj) || __typeError$4("Cannot " + msg);
|
5468
|
+
var __privateGet$3 = (obj, member, getter) => (__accessCheck$4(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
5469
|
+
var __privateAdd$4 = (obj, member, value) => member.has(obj) ? __typeError$4("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5470
|
+
var __privateSet$2 = (obj, member, value, setter) => (__accessCheck$4(obj, member, "write to private field"), member.set(obj, value), value);
|
5471
|
+
var __privateMethod$2 = (obj, member, method) => (__accessCheck$4(obj, member, "access private method"), method);
|
5472
|
+
var _table, _getFetchProps, _db, _cache, _schemaTables, _trace, _RestRepository_instances, insertRecordWithoutId_fn, insertRecordWithId_fn, insertRecords_fn, updateRecordWithID_fn, updateRecords_fn, upsertRecordWithID_fn, deleteRecord_fn, deleteRecords_fn, setCacheQuery_fn, getCacheQuery_fn, getSchemaTables_fn, transformObjectToApi_fn;
|
3449
5473
|
const BULK_OPERATION_MAX_SIZE = 1e3;
|
3450
5474
|
class Repository extends Query {
|
3451
5475
|
}
|
@@ -3456,24 +5480,13 @@ class RestRepository extends Query {
|
|
3456
5480
|
{ name: options.table, schema: options.schemaTables?.find((table) => table.name === options.table) },
|
3457
5481
|
{}
|
3458
5482
|
);
|
3459
|
-
__privateAdd$4(this,
|
3460
|
-
__privateAdd$4(this,
|
3461
|
-
__privateAdd$4(this,
|
3462
|
-
__privateAdd$4(this,
|
3463
|
-
__privateAdd$4(this,
|
3464
|
-
__privateAdd$4(this,
|
3465
|
-
__privateAdd$4(this,
|
3466
|
-
__privateAdd$4(this, _deleteRecords);
|
3467
|
-
__privateAdd$4(this, _setCacheQuery);
|
3468
|
-
__privateAdd$4(this, _getCacheQuery);
|
3469
|
-
__privateAdd$4(this, _getSchemaTables);
|
3470
|
-
__privateAdd$4(this, _transformObjectToApi);
|
3471
|
-
__privateAdd$4(this, _table, void 0);
|
3472
|
-
__privateAdd$4(this, _getFetchProps, void 0);
|
3473
|
-
__privateAdd$4(this, _db, void 0);
|
3474
|
-
__privateAdd$4(this, _cache, void 0);
|
3475
|
-
__privateAdd$4(this, _schemaTables, void 0);
|
3476
|
-
__privateAdd$4(this, _trace, void 0);
|
5483
|
+
__privateAdd$4(this, _RestRepository_instances);
|
5484
|
+
__privateAdd$4(this, _table);
|
5485
|
+
__privateAdd$4(this, _getFetchProps);
|
5486
|
+
__privateAdd$4(this, _db);
|
5487
|
+
__privateAdd$4(this, _cache);
|
5488
|
+
__privateAdd$4(this, _schemaTables);
|
5489
|
+
__privateAdd$4(this, _trace);
|
3477
5490
|
__privateSet$2(this, _table, options.table);
|
3478
5491
|
__privateSet$2(this, _db, options.db);
|
3479
5492
|
__privateSet$2(this, _cache, options.pluginOptions.cache);
|
@@ -3493,28 +5506,25 @@ class RestRepository extends Query {
|
|
3493
5506
|
return __privateGet$3(this, _trace).call(this, "create", async () => {
|
3494
5507
|
const ifVersion = parseIfVersion(b, c, d);
|
3495
5508
|
if (Array.isArray(a)) {
|
3496
|
-
if (a.length === 0)
|
3497
|
-
|
3498
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
5509
|
+
if (a.length === 0) return [];
|
5510
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
3499
5511
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3500
5512
|
const result = await this.read(ids, columns);
|
3501
5513
|
return result;
|
3502
5514
|
}
|
3503
5515
|
if (isString(a) && isObject(b)) {
|
3504
|
-
if (a === "")
|
3505
|
-
throw new Error("The id can't be empty");
|
5516
|
+
if (a === "") throw new Error("The id can't be empty");
|
3506
5517
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3507
|
-
return await __privateMethod$2(this,
|
5518
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
|
3508
5519
|
}
|
3509
5520
|
if (isObject(a) && isString(a.id)) {
|
3510
|
-
if (a.id === "")
|
3511
|
-
throw new Error("The id can't be empty");
|
5521
|
+
if (a.id === "") throw new Error("The id can't be empty");
|
3512
5522
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
3513
|
-
return await __privateMethod$2(this,
|
5523
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.id, { ...a, id: void 0 }, columns, { createOnly: true, ifVersion });
|
3514
5524
|
}
|
3515
5525
|
if (isObject(a)) {
|
3516
5526
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
3517
|
-
return __privateMethod$2(this,
|
5527
|
+
return __privateMethod$2(this, _RestRepository_instances, insertRecordWithoutId_fn).call(this, a, columns);
|
3518
5528
|
}
|
3519
5529
|
throw new Error("Invalid arguments for create method");
|
3520
5530
|
});
|
@@ -3523,8 +5533,7 @@ class RestRepository extends Query {
|
|
3523
5533
|
return __privateGet$3(this, _trace).call(this, "read", async () => {
|
3524
5534
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3525
5535
|
if (Array.isArray(a)) {
|
3526
|
-
if (a.length === 0)
|
3527
|
-
return [];
|
5536
|
+
if (a.length === 0) return [];
|
3528
5537
|
const ids = a.map((item) => extractId(item));
|
3529
5538
|
const finalObjects = await this.getAll({ filter: { id: { $any: compact(ids) } }, columns });
|
3530
5539
|
const dictionary = finalObjects.reduce((acc, object) => {
|
@@ -3547,7 +5556,7 @@ class RestRepository extends Query {
|
|
3547
5556
|
queryParams: { columns },
|
3548
5557
|
...__privateGet$3(this, _getFetchProps).call(this)
|
3549
5558
|
});
|
3550
|
-
const schemaTables = await __privateMethod$2(this,
|
5559
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
3551
5560
|
return initObject(
|
3552
5561
|
__privateGet$3(this, _db),
|
3553
5562
|
schemaTables,
|
@@ -3588,11 +5597,10 @@ class RestRepository extends Query {
|
|
3588
5597
|
return __privateGet$3(this, _trace).call(this, "update", async () => {
|
3589
5598
|
const ifVersion = parseIfVersion(b, c, d);
|
3590
5599
|
if (Array.isArray(a)) {
|
3591
|
-
if (a.length === 0)
|
3592
|
-
return [];
|
5600
|
+
if (a.length === 0) return [];
|
3593
5601
|
const existing = await this.read(a, ["id"]);
|
3594
5602
|
const updates = a.filter((_item, index) => existing[index] !== null);
|
3595
|
-
await __privateMethod$2(this,
|
5603
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, updates, {
|
3596
5604
|
ifVersion,
|
3597
5605
|
upsert: false
|
3598
5606
|
});
|
@@ -3603,15 +5611,14 @@ class RestRepository extends Query {
|
|
3603
5611
|
try {
|
3604
5612
|
if (isString(a) && isObject(b)) {
|
3605
5613
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3606
|
-
return await __privateMethod$2(this,
|
5614
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
3607
5615
|
}
|
3608
5616
|
if (isObject(a) && isString(a.id)) {
|
3609
5617
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
3610
|
-
return await __privateMethod$2(this,
|
5618
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a.id, { ...a, id: void 0 }, columns, { ifVersion });
|
3611
5619
|
}
|
3612
5620
|
} catch (error) {
|
3613
|
-
if (error.status === 422)
|
3614
|
-
return null;
|
5621
|
+
if (error.status === 422) return null;
|
3615
5622
|
throw error;
|
3616
5623
|
}
|
3617
5624
|
throw new Error("Invalid arguments for update method");
|
@@ -3640,9 +5647,8 @@ class RestRepository extends Query {
|
|
3640
5647
|
return __privateGet$3(this, _trace).call(this, "createOrUpdate", async () => {
|
3641
5648
|
const ifVersion = parseIfVersion(b, c, d);
|
3642
5649
|
if (Array.isArray(a)) {
|
3643
|
-
if (a.length === 0)
|
3644
|
-
|
3645
|
-
await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, a, {
|
5650
|
+
if (a.length === 0) return [];
|
5651
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, a, {
|
3646
5652
|
ifVersion,
|
3647
5653
|
upsert: true
|
3648
5654
|
});
|
@@ -3651,16 +5657,14 @@ class RestRepository extends Query {
|
|
3651
5657
|
return result;
|
3652
5658
|
}
|
3653
5659
|
if (isString(a) && isObject(b)) {
|
3654
|
-
if (a === "")
|
3655
|
-
throw new Error("The id can't be empty");
|
5660
|
+
if (a === "") throw new Error("The id can't be empty");
|
3656
5661
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3657
|
-
return await __privateMethod$2(this,
|
5662
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
3658
5663
|
}
|
3659
5664
|
if (isObject(a) && isString(a.id)) {
|
3660
|
-
if (a.id === "")
|
3661
|
-
throw new Error("The id can't be empty");
|
5665
|
+
if (a.id === "") throw new Error("The id can't be empty");
|
3662
5666
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3663
|
-
return await __privateMethod$2(this,
|
5667
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a.id, { ...a, id: void 0 }, columns, { ifVersion });
|
3664
5668
|
}
|
3665
5669
|
if (!isDefined(a) && isObject(b)) {
|
3666
5670
|
return await this.create(b, c);
|
@@ -3675,24 +5679,21 @@ class RestRepository extends Query {
|
|
3675
5679
|
return __privateGet$3(this, _trace).call(this, "createOrReplace", async () => {
|
3676
5680
|
const ifVersion = parseIfVersion(b, c, d);
|
3677
5681
|
if (Array.isArray(a)) {
|
3678
|
-
if (a.length === 0)
|
3679
|
-
|
3680
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
5682
|
+
if (a.length === 0) return [];
|
5683
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
3681
5684
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3682
5685
|
const result = await this.read(ids, columns);
|
3683
5686
|
return result;
|
3684
5687
|
}
|
3685
5688
|
if (isString(a) && isObject(b)) {
|
3686
|
-
if (a === "")
|
3687
|
-
throw new Error("The id can't be empty");
|
5689
|
+
if (a === "") throw new Error("The id can't be empty");
|
3688
5690
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3689
|
-
return await __privateMethod$2(this,
|
5691
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
|
3690
5692
|
}
|
3691
5693
|
if (isObject(a) && isString(a.id)) {
|
3692
|
-
if (a.id === "")
|
3693
|
-
throw new Error("The id can't be empty");
|
5694
|
+
if (a.id === "") throw new Error("The id can't be empty");
|
3694
5695
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3695
|
-
return await __privateMethod$2(this,
|
5696
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.id, { ...a, id: void 0 }, columns, { createOnly: false, ifVersion });
|
3696
5697
|
}
|
3697
5698
|
if (!isDefined(a) && isObject(b)) {
|
3698
5699
|
return await this.create(b, c);
|
@@ -3706,25 +5707,22 @@ class RestRepository extends Query {
|
|
3706
5707
|
async delete(a, b) {
|
3707
5708
|
return __privateGet$3(this, _trace).call(this, "delete", async () => {
|
3708
5709
|
if (Array.isArray(a)) {
|
3709
|
-
if (a.length === 0)
|
3710
|
-
return [];
|
5710
|
+
if (a.length === 0) return [];
|
3711
5711
|
const ids = a.map((o) => {
|
3712
|
-
if (isString(o))
|
3713
|
-
|
3714
|
-
if (isString(o.id))
|
3715
|
-
return o.id;
|
5712
|
+
if (isString(o)) return o;
|
5713
|
+
if (isString(o.id)) return o.id;
|
3716
5714
|
throw new Error("Invalid arguments for delete method");
|
3717
5715
|
});
|
3718
5716
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3719
5717
|
const result = await this.read(a, columns);
|
3720
|
-
await __privateMethod$2(this,
|
5718
|
+
await __privateMethod$2(this, _RestRepository_instances, deleteRecords_fn).call(this, ids);
|
3721
5719
|
return result;
|
3722
5720
|
}
|
3723
5721
|
if (isString(a)) {
|
3724
|
-
return __privateMethod$2(this,
|
5722
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a, b);
|
3725
5723
|
}
|
3726
5724
|
if (isObject(a) && isString(a.id)) {
|
3727
|
-
return __privateMethod$2(this,
|
5725
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a.id, b);
|
3728
5726
|
}
|
3729
5727
|
throw new Error("Invalid arguments for delete method");
|
3730
5728
|
});
|
@@ -3768,7 +5766,7 @@ class RestRepository extends Query {
|
|
3768
5766
|
},
|
3769
5767
|
...__privateGet$3(this, _getFetchProps).call(this)
|
3770
5768
|
});
|
3771
|
-
const schemaTables = await __privateMethod$2(this,
|
5769
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
3772
5770
|
return {
|
3773
5771
|
records: records.map((item) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), item, ["*"])),
|
3774
5772
|
totalCount
|
@@ -3793,7 +5791,7 @@ class RestRepository extends Query {
|
|
3793
5791
|
},
|
3794
5792
|
...__privateGet$3(this, _getFetchProps).call(this)
|
3795
5793
|
});
|
3796
|
-
const schemaTables = await __privateMethod$2(this,
|
5794
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
3797
5795
|
return {
|
3798
5796
|
records: records.map((item) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), item, ["*"])),
|
3799
5797
|
totalCount
|
@@ -3817,9 +5815,8 @@ class RestRepository extends Query {
|
|
3817
5815
|
}
|
3818
5816
|
async query(query) {
|
3819
5817
|
return __privateGet$3(this, _trace).call(this, "query", async () => {
|
3820
|
-
const cacheQuery = await __privateMethod$2(this,
|
3821
|
-
if (cacheQuery)
|
3822
|
-
return new Page(query, cacheQuery.meta, cacheQuery.records);
|
5818
|
+
const cacheQuery = await __privateMethod$2(this, _RestRepository_instances, getCacheQuery_fn).call(this, query);
|
5819
|
+
if (cacheQuery) return new Page(query, cacheQuery.meta, cacheQuery.records);
|
3823
5820
|
const data = query.getQueryOptions();
|
3824
5821
|
const { meta, records: objects } = await queryTable({
|
3825
5822
|
pathParams: {
|
@@ -3838,7 +5835,7 @@ class RestRepository extends Query {
|
|
3838
5835
|
fetchOptions: data.fetchOptions,
|
3839
5836
|
...__privateGet$3(this, _getFetchProps).call(this)
|
3840
5837
|
});
|
3841
|
-
const schemaTables = await __privateMethod$2(this,
|
5838
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
3842
5839
|
const records = objects.map(
|
3843
5840
|
(record) => initObject(
|
3844
5841
|
__privateGet$3(this, _db),
|
@@ -3848,7 +5845,7 @@ class RestRepository extends Query {
|
|
3848
5845
|
data.columns ?? ["*"]
|
3849
5846
|
)
|
3850
5847
|
);
|
3851
|
-
await __privateMethod$2(this,
|
5848
|
+
await __privateMethod$2(this, _RestRepository_instances, setCacheQuery_fn).call(this, query, meta, records);
|
3852
5849
|
return new Page(query, meta, records);
|
3853
5850
|
});
|
3854
5851
|
}
|
@@ -3873,7 +5870,7 @@ class RestRepository extends Query {
|
|
3873
5870
|
},
|
3874
5871
|
...__privateGet$3(this, _getFetchProps).call(this)
|
3875
5872
|
});
|
3876
|
-
const schemaTables = await __privateMethod$2(this,
|
5873
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
3877
5874
|
return {
|
3878
5875
|
...result,
|
3879
5876
|
summaries: result.summaries.map(
|
@@ -3922,9 +5919,9 @@ _db = new WeakMap();
|
|
3922
5919
|
_cache = new WeakMap();
|
3923
5920
|
_schemaTables = new WeakMap();
|
3924
5921
|
_trace = new WeakMap();
|
3925
|
-
|
5922
|
+
_RestRepository_instances = new WeakSet();
|
3926
5923
|
insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
3927
|
-
const record = await __privateMethod$2(this,
|
5924
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
3928
5925
|
const response = await insertRecord({
|
3929
5926
|
pathParams: {
|
3930
5927
|
workspace: "{workspaceId}",
|
@@ -3936,14 +5933,12 @@ insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
|
3936
5933
|
body: record,
|
3937
5934
|
...__privateGet$3(this, _getFetchProps).call(this)
|
3938
5935
|
});
|
3939
|
-
const schemaTables = await __privateMethod$2(this,
|
5936
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
3940
5937
|
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3941
5938
|
};
|
3942
|
-
_insertRecordWithId = new WeakSet();
|
3943
5939
|
insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
|
3944
|
-
if (!recordId)
|
3945
|
-
|
3946
|
-
const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
5940
|
+
if (!recordId) return null;
|
5941
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
3947
5942
|
const response = await insertRecordWithID({
|
3948
5943
|
pathParams: {
|
3949
5944
|
workspace: "{workspaceId}",
|
@@ -3956,13 +5951,12 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
|
|
3956
5951
|
queryParams: { createOnly, columns, ifVersion },
|
3957
5952
|
...__privateGet$3(this, _getFetchProps).call(this)
|
3958
5953
|
});
|
3959
|
-
const schemaTables = await __privateMethod$2(this,
|
5954
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
3960
5955
|
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3961
5956
|
};
|
3962
|
-
_insertRecords = new WeakSet();
|
3963
5957
|
insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
3964
5958
|
const operations = await promiseMap(objects, async (object) => {
|
3965
|
-
const record = await __privateMethod$2(this,
|
5959
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
3966
5960
|
return { insert: { table: __privateGet$3(this, _table), record, createOnly, ifVersion } };
|
3967
5961
|
});
|
3968
5962
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -3987,11 +5981,9 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
|
3987
5981
|
}
|
3988
5982
|
return ids;
|
3989
5983
|
};
|
3990
|
-
_updateRecordWithID = new WeakSet();
|
3991
5984
|
updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
3992
|
-
if (!recordId)
|
3993
|
-
|
3994
|
-
const { id: _id, ...record } = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
5985
|
+
if (!recordId) return null;
|
5986
|
+
const { id: _id, ...record } = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
3995
5987
|
try {
|
3996
5988
|
const response = await updateRecordWithID({
|
3997
5989
|
pathParams: {
|
@@ -4005,7 +5997,7 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
4005
5997
|
body: record,
|
4006
5998
|
...__privateGet$3(this, _getFetchProps).call(this)
|
4007
5999
|
});
|
4008
|
-
const schemaTables = await __privateMethod$2(this,
|
6000
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
4009
6001
|
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
4010
6002
|
} catch (e) {
|
4011
6003
|
if (isObject(e) && e.status === 404) {
|
@@ -4014,10 +6006,9 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
4014
6006
|
throw e;
|
4015
6007
|
}
|
4016
6008
|
};
|
4017
|
-
_updateRecords = new WeakSet();
|
4018
6009
|
updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
4019
6010
|
const operations = await promiseMap(objects, async ({ id, ...object }) => {
|
4020
|
-
const fields = await __privateMethod$2(this,
|
6011
|
+
const fields = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
4021
6012
|
return { update: { table: __privateGet$3(this, _table), id, ifVersion, upsert, fields } };
|
4022
6013
|
});
|
4023
6014
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -4042,10 +6033,8 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
|
4042
6033
|
}
|
4043
6034
|
return ids;
|
4044
6035
|
};
|
4045
|
-
_upsertRecordWithID = new WeakSet();
|
4046
6036
|
upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
4047
|
-
if (!recordId)
|
4048
|
-
return null;
|
6037
|
+
if (!recordId) return null;
|
4049
6038
|
const response = await upsertRecordWithID({
|
4050
6039
|
pathParams: {
|
4051
6040
|
workspace: "{workspaceId}",
|
@@ -4058,13 +6047,11 @@ upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
4058
6047
|
body: object,
|
4059
6048
|
...__privateGet$3(this, _getFetchProps).call(this)
|
4060
6049
|
});
|
4061
|
-
const schemaTables = await __privateMethod$2(this,
|
6050
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
4062
6051
|
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
4063
6052
|
};
|
4064
|
-
_deleteRecord = new WeakSet();
|
4065
6053
|
deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
4066
|
-
if (!recordId)
|
4067
|
-
return null;
|
6054
|
+
if (!recordId) return null;
|
4068
6055
|
try {
|
4069
6056
|
const response = await deleteRecord({
|
4070
6057
|
pathParams: {
|
@@ -4077,7 +6064,7 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
4077
6064
|
queryParams: { columns },
|
4078
6065
|
...__privateGet$3(this, _getFetchProps).call(this)
|
4079
6066
|
});
|
4080
|
-
const schemaTables = await __privateMethod$2(this,
|
6067
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
4081
6068
|
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
4082
6069
|
} catch (e) {
|
4083
6070
|
if (isObject(e) && e.status === 404) {
|
@@ -4086,7 +6073,6 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
4086
6073
|
throw e;
|
4087
6074
|
}
|
4088
6075
|
};
|
4089
|
-
_deleteRecords = new WeakSet();
|
4090
6076
|
deleteRecords_fn = async function(recordIds) {
|
4091
6077
|
const chunkedOperations = chunk(
|
4092
6078
|
compact(recordIds).map((id) => ({ delete: { table: __privateGet$3(this, _table), id } })),
|
@@ -4104,27 +6090,21 @@ deleteRecords_fn = async function(recordIds) {
|
|
4104
6090
|
});
|
4105
6091
|
}
|
4106
6092
|
};
|
4107
|
-
_setCacheQuery = new WeakSet();
|
4108
6093
|
setCacheQuery_fn = async function(query, meta, records) {
|
4109
6094
|
await __privateGet$3(this, _cache)?.set(`query_${__privateGet$3(this, _table)}:${query.key()}`, { date: /* @__PURE__ */ new Date(), meta, records });
|
4110
6095
|
};
|
4111
|
-
_getCacheQuery = new WeakSet();
|
4112
6096
|
getCacheQuery_fn = async function(query) {
|
4113
6097
|
const key = `query_${__privateGet$3(this, _table)}:${query.key()}`;
|
4114
6098
|
const result = await __privateGet$3(this, _cache)?.get(key);
|
4115
|
-
if (!result)
|
4116
|
-
return null;
|
6099
|
+
if (!result) return null;
|
4117
6100
|
const defaultTTL = __privateGet$3(this, _cache)?.defaultQueryTTL ?? -1;
|
4118
6101
|
const { cache: ttl = defaultTTL } = query.getQueryOptions();
|
4119
|
-
if (ttl < 0)
|
4120
|
-
return null;
|
6102
|
+
if (ttl < 0) return null;
|
4121
6103
|
const hasExpired = result.date.getTime() + ttl < Date.now();
|
4122
6104
|
return hasExpired ? null : result;
|
4123
6105
|
};
|
4124
|
-
_getSchemaTables = new WeakSet();
|
4125
6106
|
getSchemaTables_fn = async function() {
|
4126
|
-
if (__privateGet$3(this, _schemaTables))
|
4127
|
-
return __privateGet$3(this, _schemaTables);
|
6107
|
+
if (__privateGet$3(this, _schemaTables)) return __privateGet$3(this, _schemaTables);
|
4128
6108
|
const { schema } = await getBranchDetails({
|
4129
6109
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
4130
6110
|
...__privateGet$3(this, _getFetchProps).call(this)
|
@@ -4132,16 +6112,13 @@ getSchemaTables_fn = async function() {
|
|
4132
6112
|
__privateSet$2(this, _schemaTables, schema.tables);
|
4133
6113
|
return schema.tables;
|
4134
6114
|
};
|
4135
|
-
_transformObjectToApi = new WeakSet();
|
4136
6115
|
transformObjectToApi_fn = async function(object) {
|
4137
|
-
const schemaTables = await __privateMethod$2(this,
|
6116
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
4138
6117
|
const schema = schemaTables.find((table) => table.name === __privateGet$3(this, _table));
|
4139
|
-
if (!schema)
|
4140
|
-
throw new Error(`Table ${__privateGet$3(this, _table)} not found in schema`);
|
6118
|
+
if (!schema) throw new Error(`Table ${__privateGet$3(this, _table)} not found in schema`);
|
4141
6119
|
const result = {};
|
4142
6120
|
for (const [key, value] of Object.entries(object)) {
|
4143
|
-
if (key === "xata")
|
4144
|
-
continue;
|
6121
|
+
if (key === "xata") continue;
|
4145
6122
|
const type = schema.columns.find((column) => column.name === key)?.type;
|
4146
6123
|
switch (type) {
|
4147
6124
|
case "link": {
|
@@ -4172,11 +6149,9 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
4172
6149
|
const { xata, ...rest } = object ?? {};
|
4173
6150
|
Object.assign(data, rest);
|
4174
6151
|
const { columns } = schemaTables.find(({ name }) => name === table) ?? {};
|
4175
|
-
if (!columns)
|
4176
|
-
console.error(`Table ${table} not found in schema`);
|
6152
|
+
if (!columns) console.error(`Table ${table} not found in schema`);
|
4177
6153
|
for (const column of columns ?? []) {
|
4178
|
-
if (!isValidColumn(selectedColumns, column))
|
4179
|
-
continue;
|
6154
|
+
if (!isValidColumn(selectedColumns, column)) continue;
|
4180
6155
|
const value = data[column.name];
|
4181
6156
|
switch (column.type) {
|
4182
6157
|
case "datetime": {
|
@@ -4269,15 +6244,12 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
4269
6244
|
return record;
|
4270
6245
|
};
|
4271
6246
|
function extractId(value) {
|
4272
|
-
if (isString(value))
|
4273
|
-
|
4274
|
-
if (isObject(value) && isString(value.id))
|
4275
|
-
return value.id;
|
6247
|
+
if (isString(value)) return value;
|
6248
|
+
if (isObject(value) && isString(value.id)) return value.id;
|
4276
6249
|
return void 0;
|
4277
6250
|
}
|
4278
6251
|
function isValidColumn(columns, column) {
|
4279
|
-
if (columns.includes("*"))
|
4280
|
-
return true;
|
6252
|
+
if (columns.includes("*")) return true;
|
4281
6253
|
return columns.filter((item) => isString(item) && item.startsWith(column.name)).length > 0;
|
4282
6254
|
}
|
4283
6255
|
function parseIfVersion(...args) {
|
@@ -4289,28 +6261,17 @@ function parseIfVersion(...args) {
|
|
4289
6261
|
return void 0;
|
4290
6262
|
}
|
4291
6263
|
|
4292
|
-
var
|
4293
|
-
|
4294
|
-
throw TypeError("Cannot " + msg);
|
4295
|
-
};
|
4296
|
-
var __privateGet$2 = (obj, member, getter) => {
|
4297
|
-
__accessCheck$3(obj, member, "read from private field");
|
4298
|
-
return getter ? getter.call(obj) : member.get(obj);
|
4299
|
-
};
|
4300
|
-
var __privateAdd$3 = (obj, member, value) => {
|
4301
|
-
if (member.has(obj))
|
4302
|
-
throw TypeError("Cannot add the same private member more than once");
|
4303
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4304
|
-
};
|
4305
|
-
var __privateSet$1 = (obj, member, value, setter) => {
|
4306
|
-
__accessCheck$3(obj, member, "write to private field");
|
4307
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
4308
|
-
return value;
|
6264
|
+
var __typeError$3 = (msg) => {
|
6265
|
+
throw TypeError(msg);
|
4309
6266
|
};
|
6267
|
+
var __accessCheck$3 = (obj, member, msg) => member.has(obj) || __typeError$3("Cannot " + msg);
|
6268
|
+
var __privateGet$2 = (obj, member, getter) => (__accessCheck$3(obj, member, "read from private field"), member.get(obj));
|
6269
|
+
var __privateAdd$3 = (obj, member, value) => member.has(obj) ? __typeError$3("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
6270
|
+
var __privateSet$1 = (obj, member, value, setter) => (__accessCheck$3(obj, member, "write to private field"), member.set(obj, value), value);
|
4310
6271
|
var _map;
|
4311
6272
|
class SimpleCache {
|
4312
6273
|
constructor(options = {}) {
|
4313
|
-
__privateAdd$3(this, _map
|
6274
|
+
__privateAdd$3(this, _map);
|
4314
6275
|
__privateSet$1(this, _map, /* @__PURE__ */ new Map());
|
4315
6276
|
this.capacity = options.max ?? 500;
|
4316
6277
|
this.defaultQueryTTL = options.defaultQueryTTL ?? 60 * 1e3;
|
@@ -4366,19 +6327,12 @@ const includesAll = (value) => ({ $includesAll: value });
|
|
4366
6327
|
const includesNone = (value) => ({ $includesNone: value });
|
4367
6328
|
const includesAny = (value) => ({ $includesAny: value });
|
4368
6329
|
|
4369
|
-
var
|
4370
|
-
|
4371
|
-
throw TypeError("Cannot " + msg);
|
4372
|
-
};
|
4373
|
-
var __privateGet$1 = (obj, member, getter) => {
|
4374
|
-
__accessCheck$2(obj, member, "read from private field");
|
4375
|
-
return getter ? getter.call(obj) : member.get(obj);
|
4376
|
-
};
|
4377
|
-
var __privateAdd$2 = (obj, member, value) => {
|
4378
|
-
if (member.has(obj))
|
4379
|
-
throw TypeError("Cannot add the same private member more than once");
|
4380
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
6330
|
+
var __typeError$2 = (msg) => {
|
6331
|
+
throw TypeError(msg);
|
4381
6332
|
};
|
6333
|
+
var __accessCheck$2 = (obj, member, msg) => member.has(obj) || __typeError$2("Cannot " + msg);
|
6334
|
+
var __privateGet$1 = (obj, member, getter) => (__accessCheck$2(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
6335
|
+
var __privateAdd$2 = (obj, member, value) => member.has(obj) ? __typeError$2("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4382
6336
|
var _tables;
|
4383
6337
|
class SchemaPlugin extends XataPlugin {
|
4384
6338
|
constructor() {
|
@@ -4390,8 +6344,7 @@ class SchemaPlugin extends XataPlugin {
|
|
4390
6344
|
{},
|
4391
6345
|
{
|
4392
6346
|
get: (_target, table) => {
|
4393
|
-
if (!isString(table))
|
4394
|
-
throw new Error("Invalid table name");
|
6347
|
+
if (!isString(table)) throw new Error("Invalid table name");
|
4395
6348
|
if (__privateGet$1(this, _tables)[table] === void 0) {
|
4396
6349
|
__privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
4397
6350
|
}
|
@@ -4482,30 +6435,23 @@ function getContentType(file) {
|
|
4482
6435
|
return "application/octet-stream";
|
4483
6436
|
}
|
4484
6437
|
|
4485
|
-
var
|
4486
|
-
|
4487
|
-
throw TypeError("Cannot " + msg);
|
4488
|
-
};
|
4489
|
-
var __privateAdd$1 = (obj, member, value) => {
|
4490
|
-
if (member.has(obj))
|
4491
|
-
throw TypeError("Cannot add the same private member more than once");
|
4492
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
6438
|
+
var __typeError$1 = (msg) => {
|
6439
|
+
throw TypeError(msg);
|
4493
6440
|
};
|
4494
|
-
var
|
4495
|
-
|
4496
|
-
|
4497
|
-
|
4498
|
-
var _search, search_fn;
|
6441
|
+
var __accessCheck$1 = (obj, member, msg) => member.has(obj) || __typeError$1("Cannot " + msg);
|
6442
|
+
var __privateAdd$1 = (obj, member, value) => member.has(obj) ? __typeError$1("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
6443
|
+
var __privateMethod$1 = (obj, member, method) => (__accessCheck$1(obj, member, "access private method"), method);
|
6444
|
+
var _SearchPlugin_instances, search_fn;
|
4499
6445
|
class SearchPlugin extends XataPlugin {
|
4500
6446
|
constructor(db) {
|
4501
6447
|
super();
|
4502
6448
|
this.db = db;
|
4503
|
-
__privateAdd$1(this,
|
6449
|
+
__privateAdd$1(this, _SearchPlugin_instances);
|
4504
6450
|
}
|
4505
6451
|
build(pluginOptions) {
|
4506
6452
|
return {
|
4507
6453
|
all: async (query, options = {}) => {
|
4508
|
-
const { records, totalCount } = await __privateMethod$1(this,
|
6454
|
+
const { records, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
4509
6455
|
return {
|
4510
6456
|
totalCount,
|
4511
6457
|
records: records.map((record) => {
|
@@ -4515,7 +6461,7 @@ class SearchPlugin extends XataPlugin {
|
|
4515
6461
|
};
|
4516
6462
|
},
|
4517
6463
|
byTable: async (query, options = {}) => {
|
4518
|
-
const { records: rawRecords, totalCount } = await __privateMethod$1(this,
|
6464
|
+
const { records: rawRecords, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
4519
6465
|
const records = rawRecords.reduce((acc, record) => {
|
4520
6466
|
const { table = "orphan" } = record.xata;
|
4521
6467
|
const items = acc[table] ?? [];
|
@@ -4527,7 +6473,7 @@ class SearchPlugin extends XataPlugin {
|
|
4527
6473
|
};
|
4528
6474
|
}
|
4529
6475
|
}
|
4530
|
-
|
6476
|
+
_SearchPlugin_instances = new WeakSet();
|
4531
6477
|
search_fn = async function(query, options, pluginOptions) {
|
4532
6478
|
const { tables, fuzziness, highlight, prefix, page } = options ?? {};
|
4533
6479
|
const { records, totalCount } = await searchBranch({
|
@@ -4563,8 +6509,7 @@ function arrayString(val) {
|
|
4563
6509
|
return result;
|
4564
6510
|
}
|
4565
6511
|
function prepareValue(value) {
|
4566
|
-
if (!isDefined(value))
|
4567
|
-
return null;
|
6512
|
+
if (!isDefined(value)) return null;
|
4568
6513
|
if (value instanceof Date) {
|
4569
6514
|
return value.toISOString();
|
4570
6515
|
}
|
@@ -4617,6 +6562,18 @@ class SQLPlugin extends XataPlugin {
|
|
4617
6562
|
return { records, rows, warning, columns };
|
4618
6563
|
};
|
4619
6564
|
sqlFunction.connectionString = buildConnectionString(pluginOptions);
|
6565
|
+
sqlFunction.batch = async (query) => {
|
6566
|
+
const { results } = await sqlBatchQuery({
|
6567
|
+
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
6568
|
+
body: {
|
6569
|
+
statements: query.statements.map(({ statement, params }) => ({ statement, params })),
|
6570
|
+
consistency: query.consistency,
|
6571
|
+
responseType: query.responseType
|
6572
|
+
},
|
6573
|
+
...pluginOptions
|
6574
|
+
});
|
6575
|
+
return { results };
|
6576
|
+
};
|
4620
6577
|
return sqlFunction;
|
4621
6578
|
}
|
4622
6579
|
}
|
@@ -4643,8 +6600,7 @@ function buildDomain(host, region) {
|
|
4643
6600
|
function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
|
4644
6601
|
const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
|
4645
6602
|
const parts = parseWorkspacesUrlParts(url);
|
4646
|
-
if (!parts)
|
4647
|
-
throw new Error("Invalid workspaces URL");
|
6603
|
+
if (!parts) throw new Error("Invalid workspaces URL");
|
4648
6604
|
const { workspace: workspaceSlug, region, database, host } = parts;
|
4649
6605
|
const domain = buildDomain(host, region);
|
4650
6606
|
const workspace = workspaceSlug.split("-").pop();
|
@@ -4669,39 +6625,24 @@ class TransactionPlugin extends XataPlugin {
|
|
4669
6625
|
}
|
4670
6626
|
}
|
4671
6627
|
|
4672
|
-
var
|
4673
|
-
|
4674
|
-
throw TypeError("Cannot " + msg);
|
4675
|
-
};
|
4676
|
-
var __privateGet = (obj, member, getter) => {
|
4677
|
-
__accessCheck(obj, member, "read from private field");
|
4678
|
-
return getter ? getter.call(obj) : member.get(obj);
|
4679
|
-
};
|
4680
|
-
var __privateAdd = (obj, member, value) => {
|
4681
|
-
if (member.has(obj))
|
4682
|
-
throw TypeError("Cannot add the same private member more than once");
|
4683
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4684
|
-
};
|
4685
|
-
var __privateSet = (obj, member, value, setter) => {
|
4686
|
-
__accessCheck(obj, member, "write to private field");
|
4687
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
4688
|
-
return value;
|
4689
|
-
};
|
4690
|
-
var __privateMethod = (obj, member, method) => {
|
4691
|
-
__accessCheck(obj, member, "access private method");
|
4692
|
-
return method;
|
6628
|
+
var __typeError = (msg) => {
|
6629
|
+
throw TypeError(msg);
|
4693
6630
|
};
|
6631
|
+
var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
|
6632
|
+
var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
6633
|
+
var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
6634
|
+
var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), member.set(obj, value), value);
|
6635
|
+
var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
|
4694
6636
|
const buildClient = (plugins) => {
|
4695
|
-
var _options,
|
6637
|
+
var _options, _instances, parseOptions_fn, getFetchProps_fn, _a;
|
4696
6638
|
return _a = class {
|
4697
6639
|
constructor(options = {}, tables) {
|
4698
|
-
__privateAdd(this,
|
4699
|
-
__privateAdd(this,
|
4700
|
-
|
4701
|
-
const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
|
6640
|
+
__privateAdd(this, _instances);
|
6641
|
+
__privateAdd(this, _options);
|
6642
|
+
const safeOptions = __privateMethod(this, _instances, parseOptions_fn).call(this, options);
|
4702
6643
|
__privateSet(this, _options, safeOptions);
|
4703
6644
|
const pluginOptions = {
|
4704
|
-
...__privateMethod(this,
|
6645
|
+
...__privateMethod(this, _instances, getFetchProps_fn).call(this, safeOptions),
|
4705
6646
|
cache: safeOptions.cache,
|
4706
6647
|
host: safeOptions.host,
|
4707
6648
|
tables,
|
@@ -4719,8 +6660,7 @@ const buildClient = (plugins) => {
|
|
4719
6660
|
this.sql = sql;
|
4720
6661
|
this.files = files;
|
4721
6662
|
for (const [key, namespace] of Object.entries(plugins ?? {})) {
|
4722
|
-
if (namespace === void 0)
|
4723
|
-
continue;
|
6663
|
+
if (namespace === void 0) continue;
|
4724
6664
|
this[key] = namespace.build(pluginOptions);
|
4725
6665
|
}
|
4726
6666
|
}
|
@@ -4729,7 +6669,7 @@ const buildClient = (plugins) => {
|
|
4729
6669
|
const branch = __privateGet(this, _options).branch;
|
4730
6670
|
return { databaseURL, branch };
|
4731
6671
|
}
|
4732
|
-
}, _options = new WeakMap(),
|
6672
|
+
}, _options = new WeakMap(), _instances = new WeakSet(), parseOptions_fn = function(options) {
|
4733
6673
|
const enableBrowser = options?.enableBrowser ?? getEnableBrowserVariable() ?? false;
|
4734
6674
|
const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
|
4735
6675
|
if (isBrowser && !enableBrowser) {
|
@@ -4784,7 +6724,7 @@ const buildClient = (plugins) => {
|
|
4784
6724
|
clientName,
|
4785
6725
|
xataAgentExtra
|
4786
6726
|
};
|
4787
|
-
},
|
6727
|
+
}, getFetchProps_fn = function({
|
4788
6728
|
fetch,
|
4789
6729
|
apiKey,
|
4790
6730
|
databaseURL,
|
@@ -4825,26 +6765,19 @@ class Serializer {
|
|
4825
6765
|
}
|
4826
6766
|
toJSON(data) {
|
4827
6767
|
function visit(obj) {
|
4828
|
-
if (Array.isArray(obj))
|
4829
|
-
return obj.map(visit);
|
6768
|
+
if (Array.isArray(obj)) return obj.map(visit);
|
4830
6769
|
const type = typeof obj;
|
4831
|
-
if (type === "undefined")
|
4832
|
-
|
4833
|
-
if (
|
4834
|
-
return { [META]: "bigint", [VALUE]: obj.toString() };
|
4835
|
-
if (obj === null || type !== "object")
|
4836
|
-
return obj;
|
6770
|
+
if (type === "undefined") return { [META]: "undefined" };
|
6771
|
+
if (type === "bigint") return { [META]: "bigint", [VALUE]: obj.toString() };
|
6772
|
+
if (obj === null || type !== "object") return obj;
|
4837
6773
|
const constructor = obj.constructor;
|
4838
6774
|
const o = { [META]: constructor.name };
|
4839
6775
|
for (const [key, value] of Object.entries(obj)) {
|
4840
6776
|
o[key] = visit(value);
|
4841
6777
|
}
|
4842
|
-
if (constructor === Date)
|
4843
|
-
|
4844
|
-
if (constructor ===
|
4845
|
-
o[VALUE] = Object.fromEntries(obj);
|
4846
|
-
if (constructor === Set)
|
4847
|
-
o[VALUE] = [...obj];
|
6778
|
+
if (constructor === Date) o[VALUE] = obj.toISOString();
|
6779
|
+
if (constructor === Map) o[VALUE] = Object.fromEntries(obj);
|
6780
|
+
if (constructor === Set) o[VALUE] = [...obj];
|
4848
6781
|
return o;
|
4849
6782
|
}
|
4850
6783
|
return JSON.stringify(visit(data));
|
@@ -4857,16 +6790,11 @@ class Serializer {
|
|
4857
6790
|
if (constructor) {
|
4858
6791
|
return Object.assign(Object.create(constructor.prototype), rest);
|
4859
6792
|
}
|
4860
|
-
if (clazz === "Date")
|
4861
|
-
|
4862
|
-
if (clazz === "
|
4863
|
-
|
4864
|
-
if (clazz === "
|
4865
|
-
return new Map(Object.entries(val));
|
4866
|
-
if (clazz === "bigint")
|
4867
|
-
return BigInt(val);
|
4868
|
-
if (clazz === "undefined")
|
4869
|
-
return void 0;
|
6793
|
+
if (clazz === "Date") return new Date(val);
|
6794
|
+
if (clazz === "Set") return new Set(val);
|
6795
|
+
if (clazz === "Map") return new Map(Object.entries(val));
|
6796
|
+
if (clazz === "bigint") return BigInt(val);
|
6797
|
+
if (clazz === "undefined") return void 0;
|
4870
6798
|
return rest;
|
4871
6799
|
}
|
4872
6800
|
return value;
|
@@ -4888,5 +6816,5 @@ class XataError extends Error {
|
|
4888
6816
|
}
|
4889
6817
|
}
|
4890
6818
|
|
4891
|
-
export { BaseClient, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, SimpleCache, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge, getAPIKey, getAuthorizationCode, getBranch, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationJobStatus, getBranchMigrationPlan, getBranchSchemaHistory, getBranchStats, getCluster, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseSettings, getDatabaseURL, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationHistory, getMigrationJobStatus, getMigrationRequest, getMigrationRequestIsMerged, getPreviewBranch, getRecord, getSchema, getTableColumns, getTableSchema, getUser, getUserAPIKeys, getUserOAuthAccessTokens, getUserOAuthClients, getWorkspace, getWorkspaceMembersList, getWorkspaceSettings, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, iContains, iPattern, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isValidExpandedColumn, isValidSelectableColumns, isXataRecord, le, lessEquals, lessThan, lessThanEquals, listClusters, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern, previewBranchSchemaEdit, pushBranchMigrations, putFile, putFileItem, queryMigrationRequests, queryTable, removeGitBranchesEntry, removeWorkspaceMember, renameDatabase, resendWorkspaceMemberInvite, resolveBranch, searchBranch, searchTable, serialize, setTableSchema, sqlQuery, startsWith, summarizeTable, transformImage, updateBranchMetadata, updateBranchSchema, updateCluster, updateColumn, updateDatabaseGithubSettings, updateDatabaseMetadata, updateDatabaseSettings, updateMigrationRequest, updateOAuthAccessToken, updateRecordWithID, updateTable, updateUser, updateWorkspace, updateWorkspaceMemberInvite, updateWorkspaceMemberRole, updateWorkspaceSettings, upsertRecordWithID, vectorSearchTable };
|
6819
|
+
export { BaseClient, Buffer, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, SimpleCache, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptAllTables, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, completeMigration, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteCluster, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge, getAPIKey, getAuthorizationCode, getBranch, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationJobStatus, getBranchMigrationPlan, getBranchSchemaHistory, getBranchStats, getCluster, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseSettings, getDatabaseURL, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationHistory, getMigrationJobStatus, getMigrationRequest, getMigrationRequestIsMerged, getPreviewBranch, getRecord, getSchema, getTableColumns, getTableSchema, getUser, getUserAPIKeys, getUserOAuthAccessTokens, getUserOAuthClients, getWorkspace, getWorkspaceMembersList, getWorkspaceSettings, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, iContains, iPattern, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isValidExpandedColumn, isValidSelectableColumns, isXataRecord, le, lessEquals, lessThan, lessThanEquals, listClusters, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern, previewBranchSchemaEdit, pushBranchMigrations, putFile, putFileItem, queryMigrationRequests, queryTable, removeGitBranchesEntry, removeWorkspaceMember, renameDatabase, resendWorkspaceMemberInvite, resolveBranch, rollbackMigration, searchBranch, searchTable, serialize, setTableSchema, sqlBatchQuery, sqlQuery, startMigration, startsWith, summarizeTable, transformImage, updateBranchMetadata, updateBranchSchema, updateCluster, updateColumn, updateDatabaseGithubSettings, updateDatabaseMetadata, updateDatabaseSettings, updateMigrationRequest, updateOAuthAccessToken, updateRecordWithID, updateTable, updateUser, updateWorkspace, updateWorkspaceMemberInvite, updateWorkspaceMemberRole, updateWorkspaceSettings, upsertRecordWithID, vectorSearchTable };
|
4892
6820
|
//# sourceMappingURL=index.mjs.map
|