@xata.io/client 0.29.3 → 0.29.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-add-version.log +1 -1
- package/.turbo/turbo-build.log +4 -4
- package/CHANGELOG.md +14 -0
- package/dist/index.cjs +2437 -502
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +1014 -23
- package/dist/index.mjs +2431 -503
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
@@ -24,6 +24,1789 @@ const TraceAttributes = {
|
|
24
24
|
CLOUDFLARE_RAY_ID: "cf.ray"
|
25
25
|
};
|
26
26
|
|
27
|
+
const lookup = [];
|
28
|
+
const revLookup = [];
|
29
|
+
const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
30
|
+
for (let i = 0, len = code.length; i < len; ++i) {
|
31
|
+
lookup[i] = code[i];
|
32
|
+
revLookup[code.charCodeAt(i)] = i;
|
33
|
+
}
|
34
|
+
revLookup["-".charCodeAt(0)] = 62;
|
35
|
+
revLookup["_".charCodeAt(0)] = 63;
|
36
|
+
function getLens(b64) {
|
37
|
+
const len = b64.length;
|
38
|
+
if (len % 4 > 0) {
|
39
|
+
throw new Error("Invalid string. Length must be a multiple of 4");
|
40
|
+
}
|
41
|
+
let validLen = b64.indexOf("=");
|
42
|
+
if (validLen === -1) validLen = len;
|
43
|
+
const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
|
44
|
+
return [validLen, placeHoldersLen];
|
45
|
+
}
|
46
|
+
function _byteLength(_b64, validLen, placeHoldersLen) {
|
47
|
+
return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
|
48
|
+
}
|
49
|
+
function toByteArray(b64) {
|
50
|
+
let tmp;
|
51
|
+
const lens = getLens(b64);
|
52
|
+
const validLen = lens[0];
|
53
|
+
const placeHoldersLen = lens[1];
|
54
|
+
const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
|
55
|
+
let curByte = 0;
|
56
|
+
const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
|
57
|
+
let i;
|
58
|
+
for (i = 0; i < len; i += 4) {
|
59
|
+
tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
|
60
|
+
arr[curByte++] = tmp >> 16 & 255;
|
61
|
+
arr[curByte++] = tmp >> 8 & 255;
|
62
|
+
arr[curByte++] = tmp & 255;
|
63
|
+
}
|
64
|
+
if (placeHoldersLen === 2) {
|
65
|
+
tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
|
66
|
+
arr[curByte++] = tmp & 255;
|
67
|
+
}
|
68
|
+
if (placeHoldersLen === 1) {
|
69
|
+
tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
|
70
|
+
arr[curByte++] = tmp >> 8 & 255;
|
71
|
+
arr[curByte++] = tmp & 255;
|
72
|
+
}
|
73
|
+
return arr;
|
74
|
+
}
|
75
|
+
function tripletToBase64(num) {
|
76
|
+
return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
|
77
|
+
}
|
78
|
+
function encodeChunk(uint8, start, end) {
|
79
|
+
let tmp;
|
80
|
+
const output = [];
|
81
|
+
for (let i = start; i < end; i += 3) {
|
82
|
+
tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
|
83
|
+
output.push(tripletToBase64(tmp));
|
84
|
+
}
|
85
|
+
return output.join("");
|
86
|
+
}
|
87
|
+
function fromByteArray(uint8) {
|
88
|
+
let tmp;
|
89
|
+
const len = uint8.length;
|
90
|
+
const extraBytes = len % 3;
|
91
|
+
const parts = [];
|
92
|
+
const maxChunkLength = 16383;
|
93
|
+
for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
94
|
+
parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
|
95
|
+
}
|
96
|
+
if (extraBytes === 1) {
|
97
|
+
tmp = uint8[len - 1];
|
98
|
+
parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
|
99
|
+
} else if (extraBytes === 2) {
|
100
|
+
tmp = (uint8[len - 2] << 8) + uint8[len - 1];
|
101
|
+
parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
|
102
|
+
}
|
103
|
+
return parts.join("");
|
104
|
+
}
|
105
|
+
|
106
|
+
const K_MAX_LENGTH = 2147483647;
|
107
|
+
const MAX_ARGUMENTS_LENGTH = 4096;
|
108
|
+
class Buffer extends Uint8Array {
|
109
|
+
/**
|
110
|
+
* Constructs a new `Buffer` instance.
|
111
|
+
*
|
112
|
+
* @param value
|
113
|
+
* @param encodingOrOffset
|
114
|
+
* @param length
|
115
|
+
*/
|
116
|
+
constructor(value, encodingOrOffset, length) {
|
117
|
+
if (typeof value === "number") {
|
118
|
+
if (typeof encodingOrOffset === "string") {
|
119
|
+
throw new TypeError("The first argument must be of type string, received type number");
|
120
|
+
}
|
121
|
+
if (value < 0) {
|
122
|
+
throw new RangeError("The buffer size cannot be negative");
|
123
|
+
}
|
124
|
+
super(value < 0 ? 0 : Buffer._checked(value) | 0);
|
125
|
+
} else if (typeof value === "string") {
|
126
|
+
if (typeof encodingOrOffset !== "string") {
|
127
|
+
encodingOrOffset = "utf8";
|
128
|
+
}
|
129
|
+
if (!Buffer.isEncoding(encodingOrOffset)) {
|
130
|
+
throw new TypeError("Unknown encoding: " + encodingOrOffset);
|
131
|
+
}
|
132
|
+
const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
|
133
|
+
super(length2);
|
134
|
+
const written = this.write(value, 0, this.length, encodingOrOffset);
|
135
|
+
if (written !== length2) {
|
136
|
+
throw new TypeError(
|
137
|
+
"Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
|
138
|
+
);
|
139
|
+
}
|
140
|
+
} else if (ArrayBuffer.isView(value)) {
|
141
|
+
if (Buffer._isInstance(value, Uint8Array)) {
|
142
|
+
const copy = new Uint8Array(value);
|
143
|
+
const array = copy.buffer;
|
144
|
+
const byteOffset = copy.byteOffset;
|
145
|
+
const length2 = copy.byteLength;
|
146
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
147
|
+
throw new RangeError("offset is outside of buffer bounds");
|
148
|
+
}
|
149
|
+
if (array.byteLength < byteOffset + (length2 || 0)) {
|
150
|
+
throw new RangeError("length is outside of buffer bounds");
|
151
|
+
}
|
152
|
+
super(new Uint8Array(array, byteOffset, length2));
|
153
|
+
} else {
|
154
|
+
const array = value;
|
155
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
156
|
+
super(new Uint8Array(length2));
|
157
|
+
for (let i = 0; i < length2; i++) {
|
158
|
+
this[i] = array[i] & 255;
|
159
|
+
}
|
160
|
+
}
|
161
|
+
} else if (value == null) {
|
162
|
+
throw new TypeError(
|
163
|
+
"The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
|
164
|
+
);
|
165
|
+
} else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
|
166
|
+
const array = value;
|
167
|
+
const byteOffset = encodingOrOffset;
|
168
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
169
|
+
throw new RangeError("offset is outside of buffer bounds");
|
170
|
+
}
|
171
|
+
if (array.byteLength < byteOffset + (length || 0)) {
|
172
|
+
throw new RangeError("length is outside of buffer bounds");
|
173
|
+
}
|
174
|
+
super(new Uint8Array(array, byteOffset, length));
|
175
|
+
} else if (Array.isArray(value)) {
|
176
|
+
const array = value;
|
177
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
178
|
+
super(new Uint8Array(length2));
|
179
|
+
for (let i = 0; i < length2; i++) {
|
180
|
+
this[i] = array[i] & 255;
|
181
|
+
}
|
182
|
+
} else {
|
183
|
+
throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
|
184
|
+
}
|
185
|
+
}
|
186
|
+
/**
|
187
|
+
* Return JSON representation of the buffer.
|
188
|
+
*/
|
189
|
+
toJSON() {
|
190
|
+
return {
|
191
|
+
type: "Buffer",
|
192
|
+
data: Array.prototype.slice.call(this)
|
193
|
+
};
|
194
|
+
}
|
195
|
+
/**
|
196
|
+
* Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
|
197
|
+
* parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
|
198
|
+
* only part of `string` will be written. However, partially encoded characters will not be written.
|
199
|
+
*
|
200
|
+
* @param string String to write to `buf`.
|
201
|
+
* @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
|
202
|
+
* @param length Maximum number of bytes to write: Default: `buf.length - offset`.
|
203
|
+
* @param encoding The character encoding of `string`. Default: `utf8`.
|
204
|
+
*/
|
205
|
+
write(string, offset, length, encoding) {
|
206
|
+
if (typeof offset === "undefined") {
|
207
|
+
encoding = "utf8";
|
208
|
+
length = this.length;
|
209
|
+
offset = 0;
|
210
|
+
} else if (typeof length === "undefined" && typeof offset === "string") {
|
211
|
+
encoding = offset;
|
212
|
+
length = this.length;
|
213
|
+
offset = 0;
|
214
|
+
} else if (typeof offset === "number" && isFinite(offset)) {
|
215
|
+
offset = offset >>> 0;
|
216
|
+
if (typeof length === "number" && isFinite(length)) {
|
217
|
+
length = length >>> 0;
|
218
|
+
encoding ?? (encoding = "utf8");
|
219
|
+
} else if (typeof length === "string") {
|
220
|
+
encoding = length;
|
221
|
+
length = void 0;
|
222
|
+
}
|
223
|
+
} else {
|
224
|
+
throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
|
225
|
+
}
|
226
|
+
const remaining = this.length - offset;
|
227
|
+
if (typeof length === "undefined" || length > remaining) {
|
228
|
+
length = remaining;
|
229
|
+
}
|
230
|
+
if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
|
231
|
+
throw new RangeError("Attempt to write outside buffer bounds");
|
232
|
+
}
|
233
|
+
encoding || (encoding = "utf8");
|
234
|
+
switch (Buffer._getEncoding(encoding)) {
|
235
|
+
case "hex":
|
236
|
+
return Buffer._hexWrite(this, string, offset, length);
|
237
|
+
case "utf8":
|
238
|
+
return Buffer._utf8Write(this, string, offset, length);
|
239
|
+
case "ascii":
|
240
|
+
case "latin1":
|
241
|
+
case "binary":
|
242
|
+
return Buffer._asciiWrite(this, string, offset, length);
|
243
|
+
case "ucs2":
|
244
|
+
case "utf16le":
|
245
|
+
return Buffer._ucs2Write(this, string, offset, length);
|
246
|
+
case "base64":
|
247
|
+
return Buffer._base64Write(this, string, offset, length);
|
248
|
+
}
|
249
|
+
}
|
250
|
+
/**
|
251
|
+
* Decodes the buffer to a string according to the specified character encoding.
|
252
|
+
* Passing `start` and `end` will decode only a subset of the buffer.
|
253
|
+
*
|
254
|
+
* Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
|
255
|
+
* will be replaced with `U+FFFD`.
|
256
|
+
*
|
257
|
+
* @param encoding
|
258
|
+
* @param start
|
259
|
+
* @param end
|
260
|
+
*/
|
261
|
+
toString(encoding, start, end) {
|
262
|
+
const length = this.length;
|
263
|
+
if (length === 0) {
|
264
|
+
return "";
|
265
|
+
}
|
266
|
+
if (arguments.length === 0) {
|
267
|
+
return Buffer._utf8Slice(this, 0, length);
|
268
|
+
}
|
269
|
+
if (typeof start === "undefined" || start < 0) {
|
270
|
+
start = 0;
|
271
|
+
}
|
272
|
+
if (start > this.length) {
|
273
|
+
return "";
|
274
|
+
}
|
275
|
+
if (typeof end === "undefined" || end > this.length) {
|
276
|
+
end = this.length;
|
277
|
+
}
|
278
|
+
if (end <= 0) {
|
279
|
+
return "";
|
280
|
+
}
|
281
|
+
end >>>= 0;
|
282
|
+
start >>>= 0;
|
283
|
+
if (end <= start) {
|
284
|
+
return "";
|
285
|
+
}
|
286
|
+
if (!encoding) {
|
287
|
+
encoding = "utf8";
|
288
|
+
}
|
289
|
+
switch (Buffer._getEncoding(encoding)) {
|
290
|
+
case "hex":
|
291
|
+
return Buffer._hexSlice(this, start, end);
|
292
|
+
case "utf8":
|
293
|
+
return Buffer._utf8Slice(this, start, end);
|
294
|
+
case "ascii":
|
295
|
+
return Buffer._asciiSlice(this, start, end);
|
296
|
+
case "latin1":
|
297
|
+
case "binary":
|
298
|
+
return Buffer._latin1Slice(this, start, end);
|
299
|
+
case "ucs2":
|
300
|
+
case "utf16le":
|
301
|
+
return Buffer._utf16leSlice(this, start, end);
|
302
|
+
case "base64":
|
303
|
+
return Buffer._base64Slice(this, start, end);
|
304
|
+
}
|
305
|
+
}
|
306
|
+
/**
|
307
|
+
* Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
|
308
|
+
*
|
309
|
+
* @param otherBuffer
|
310
|
+
*/
|
311
|
+
equals(otherBuffer) {
|
312
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
313
|
+
throw new TypeError("Argument must be a Buffer");
|
314
|
+
}
|
315
|
+
if (this === otherBuffer) {
|
316
|
+
return true;
|
317
|
+
}
|
318
|
+
return Buffer.compare(this, otherBuffer) === 0;
|
319
|
+
}
|
320
|
+
/**
|
321
|
+
* Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
|
322
|
+
* or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
|
323
|
+
* buffer.
|
324
|
+
*
|
325
|
+
* - `0` is returned if `otherBuffer` is the same as this buffer.
|
326
|
+
* - `1` is returned if `otherBuffer` should come before this buffer when sorted.
|
327
|
+
* - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
|
328
|
+
*
|
329
|
+
* @param otherBuffer The buffer to compare to.
|
330
|
+
* @param targetStart The offset within `otherBuffer` at which to begin comparison.
|
331
|
+
* @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
|
332
|
+
* @param sourceStart The offset within this buffer at which to begin comparison.
|
333
|
+
* @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
|
334
|
+
*/
|
335
|
+
compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
|
336
|
+
if (Buffer._isInstance(otherBuffer, Uint8Array)) {
|
337
|
+
otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
|
338
|
+
}
|
339
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
340
|
+
throw new TypeError("Argument must be a Buffer or Uint8Array");
|
341
|
+
}
|
342
|
+
targetStart ?? (targetStart = 0);
|
343
|
+
targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
|
344
|
+
sourceStart ?? (sourceStart = 0);
|
345
|
+
sourceEnd ?? (sourceEnd = this.length);
|
346
|
+
if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
|
347
|
+
throw new RangeError("Out of range index");
|
348
|
+
}
|
349
|
+
if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
|
350
|
+
return 0;
|
351
|
+
}
|
352
|
+
if (sourceStart >= sourceEnd) {
|
353
|
+
return -1;
|
354
|
+
}
|
355
|
+
if (targetStart >= targetEnd) {
|
356
|
+
return 1;
|
357
|
+
}
|
358
|
+
targetStart >>>= 0;
|
359
|
+
targetEnd >>>= 0;
|
360
|
+
sourceStart >>>= 0;
|
361
|
+
sourceEnd >>>= 0;
|
362
|
+
if (this === otherBuffer) {
|
363
|
+
return 0;
|
364
|
+
}
|
365
|
+
let x = sourceEnd - sourceStart;
|
366
|
+
let y = targetEnd - targetStart;
|
367
|
+
const len = Math.min(x, y);
|
368
|
+
const thisCopy = this.slice(sourceStart, sourceEnd);
|
369
|
+
const targetCopy = otherBuffer.slice(targetStart, targetEnd);
|
370
|
+
for (let i = 0; i < len; ++i) {
|
371
|
+
if (thisCopy[i] !== targetCopy[i]) {
|
372
|
+
x = thisCopy[i];
|
373
|
+
y = targetCopy[i];
|
374
|
+
break;
|
375
|
+
}
|
376
|
+
}
|
377
|
+
if (x < y) return -1;
|
378
|
+
if (y < x) return 1;
|
379
|
+
return 0;
|
380
|
+
}
|
381
|
+
/**
|
382
|
+
* Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
|
383
|
+
* region overlaps with this buffer.
|
384
|
+
*
|
385
|
+
* @param targetBuffer The target buffer to copy into.
|
386
|
+
* @param targetStart The offset within `targetBuffer` at which to begin writing.
|
387
|
+
* @param sourceStart The offset within this buffer at which to begin copying.
|
388
|
+
* @param sourceEnd The offset within this buffer at which to end copying (exclusive).
|
389
|
+
*/
|
390
|
+
copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
|
391
|
+
if (!Buffer.isBuffer(targetBuffer)) throw new TypeError("argument should be a Buffer");
|
392
|
+
if (!sourceStart) sourceStart = 0;
|
393
|
+
if (!targetStart) targetStart = 0;
|
394
|
+
if (!sourceEnd && sourceEnd !== 0) sourceEnd = this.length;
|
395
|
+
if (targetStart >= targetBuffer.length) targetStart = targetBuffer.length;
|
396
|
+
if (!targetStart) targetStart = 0;
|
397
|
+
if (sourceEnd > 0 && sourceEnd < sourceStart) sourceEnd = sourceStart;
|
398
|
+
if (sourceEnd === sourceStart) return 0;
|
399
|
+
if (targetBuffer.length === 0 || this.length === 0) return 0;
|
400
|
+
if (targetStart < 0) {
|
401
|
+
throw new RangeError("targetStart out of bounds");
|
402
|
+
}
|
403
|
+
if (sourceStart < 0 || sourceStart >= this.length) throw new RangeError("Index out of range");
|
404
|
+
if (sourceEnd < 0) throw new RangeError("sourceEnd out of bounds");
|
405
|
+
if (sourceEnd > this.length) sourceEnd = this.length;
|
406
|
+
if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
|
407
|
+
sourceEnd = targetBuffer.length - targetStart + sourceStart;
|
408
|
+
}
|
409
|
+
const len = sourceEnd - sourceStart;
|
410
|
+
if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
|
411
|
+
this.copyWithin(targetStart, sourceStart, sourceEnd);
|
412
|
+
} else {
|
413
|
+
Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
|
414
|
+
}
|
415
|
+
return len;
|
416
|
+
}
|
417
|
+
/**
|
418
|
+
* Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
|
419
|
+
* and `end` indices. This is the same behavior as `buf.subarray()`.
|
420
|
+
*
|
421
|
+
* This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
|
422
|
+
* the slice, use `Uint8Array.prototype.slice()`.
|
423
|
+
*
|
424
|
+
* @param start
|
425
|
+
* @param end
|
426
|
+
*/
|
427
|
+
slice(start, end) {
|
428
|
+
if (!start) {
|
429
|
+
start = 0;
|
430
|
+
}
|
431
|
+
const len = this.length;
|
432
|
+
start = ~~start;
|
433
|
+
end = end === void 0 ? len : ~~end;
|
434
|
+
if (start < 0) {
|
435
|
+
start += len;
|
436
|
+
if (start < 0) {
|
437
|
+
start = 0;
|
438
|
+
}
|
439
|
+
} else if (start > len) {
|
440
|
+
start = len;
|
441
|
+
}
|
442
|
+
if (end < 0) {
|
443
|
+
end += len;
|
444
|
+
if (end < 0) {
|
445
|
+
end = 0;
|
446
|
+
}
|
447
|
+
} else if (end > len) {
|
448
|
+
end = len;
|
449
|
+
}
|
450
|
+
if (end < start) {
|
451
|
+
end = start;
|
452
|
+
}
|
453
|
+
const newBuf = this.subarray(start, end);
|
454
|
+
Object.setPrototypeOf(newBuf, Buffer.prototype);
|
455
|
+
return newBuf;
|
456
|
+
}
|
457
|
+
/**
|
458
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
459
|
+
* of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
|
460
|
+
*
|
461
|
+
* @param value Number to write.
|
462
|
+
* @param offset Number of bytes to skip before starting to write.
|
463
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
464
|
+
* @param noAssert
|
465
|
+
* @returns `offset` plus the number of bytes written.
|
466
|
+
*/
|
467
|
+
writeUIntLE(value, offset, byteLength, noAssert) {
|
468
|
+
value = +value;
|
469
|
+
offset = offset >>> 0;
|
470
|
+
byteLength = byteLength >>> 0;
|
471
|
+
if (!noAssert) {
|
472
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
473
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
474
|
+
}
|
475
|
+
let mul = 1;
|
476
|
+
let i = 0;
|
477
|
+
this[offset] = value & 255;
|
478
|
+
while (++i < byteLength && (mul *= 256)) {
|
479
|
+
this[offset + i] = value / mul & 255;
|
480
|
+
}
|
481
|
+
return offset + byteLength;
|
482
|
+
}
|
483
|
+
/**
|
484
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
|
485
|
+
* accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
|
486
|
+
*
|
487
|
+
* @param value Number to write.
|
488
|
+
* @param offset Number of bytes to skip before starting to write.
|
489
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
490
|
+
* @param noAssert
|
491
|
+
* @returns `offset` plus the number of bytes written.
|
492
|
+
*/
|
493
|
+
writeUIntBE(value, offset, byteLength, noAssert) {
|
494
|
+
value = +value;
|
495
|
+
offset = offset >>> 0;
|
496
|
+
byteLength = byteLength >>> 0;
|
497
|
+
if (!noAssert) {
|
498
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
499
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
500
|
+
}
|
501
|
+
let i = byteLength - 1;
|
502
|
+
let mul = 1;
|
503
|
+
this[offset + i] = value & 255;
|
504
|
+
while (--i >= 0 && (mul *= 256)) {
|
505
|
+
this[offset + i] = value / mul & 255;
|
506
|
+
}
|
507
|
+
return offset + byteLength;
|
508
|
+
}
|
509
|
+
/**
|
510
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
511
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
512
|
+
*
|
513
|
+
* @param value Number to write.
|
514
|
+
* @param offset Number of bytes to skip before starting to write.
|
515
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
516
|
+
* @param noAssert
|
517
|
+
* @returns `offset` plus the number of bytes written.
|
518
|
+
*/
|
519
|
+
writeIntLE(value, offset, byteLength, noAssert) {
|
520
|
+
value = +value;
|
521
|
+
offset = offset >>> 0;
|
522
|
+
if (!noAssert) {
|
523
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
524
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
525
|
+
}
|
526
|
+
let i = 0;
|
527
|
+
let mul = 1;
|
528
|
+
let sub = 0;
|
529
|
+
this[offset] = value & 255;
|
530
|
+
while (++i < byteLength && (mul *= 256)) {
|
531
|
+
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
532
|
+
sub = 1;
|
533
|
+
}
|
534
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
535
|
+
}
|
536
|
+
return offset + byteLength;
|
537
|
+
}
|
538
|
+
/**
|
539
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
|
540
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
541
|
+
*
|
542
|
+
* @param value Number to write.
|
543
|
+
* @param offset Number of bytes to skip before starting to write.
|
544
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
545
|
+
* @param noAssert
|
546
|
+
* @returns `offset` plus the number of bytes written.
|
547
|
+
*/
|
548
|
+
writeIntBE(value, offset, byteLength, noAssert) {
|
549
|
+
value = +value;
|
550
|
+
offset = offset >>> 0;
|
551
|
+
if (!noAssert) {
|
552
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
553
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
554
|
+
}
|
555
|
+
let i = byteLength - 1;
|
556
|
+
let mul = 1;
|
557
|
+
let sub = 0;
|
558
|
+
this[offset + i] = value & 255;
|
559
|
+
while (--i >= 0 && (mul *= 256)) {
|
560
|
+
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
561
|
+
sub = 1;
|
562
|
+
}
|
563
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
564
|
+
}
|
565
|
+
return offset + byteLength;
|
566
|
+
}
|
567
|
+
/**
|
568
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
569
|
+
* unsigned, little-endian integer supporting up to 48 bits of accuracy.
|
570
|
+
*
|
571
|
+
* @param offset Number of bytes to skip before starting to read.
|
572
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
573
|
+
* @param noAssert
|
574
|
+
*/
|
575
|
+
readUIntLE(offset, byteLength, noAssert) {
|
576
|
+
offset = offset >>> 0;
|
577
|
+
byteLength = byteLength >>> 0;
|
578
|
+
if (!noAssert) {
|
579
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
580
|
+
}
|
581
|
+
let val = this[offset];
|
582
|
+
let mul = 1;
|
583
|
+
let i = 0;
|
584
|
+
while (++i < byteLength && (mul *= 256)) {
|
585
|
+
val += this[offset + i] * mul;
|
586
|
+
}
|
587
|
+
return val;
|
588
|
+
}
|
589
|
+
/**
|
590
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
591
|
+
* unsigned, big-endian integer supporting up to 48 bits of accuracy.
|
592
|
+
*
|
593
|
+
* @param offset Number of bytes to skip before starting to read.
|
594
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
595
|
+
* @param noAssert
|
596
|
+
*/
|
597
|
+
readUIntBE(offset, byteLength, noAssert) {
|
598
|
+
offset = offset >>> 0;
|
599
|
+
byteLength = byteLength >>> 0;
|
600
|
+
if (!noAssert) {
|
601
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
602
|
+
}
|
603
|
+
let val = this[offset + --byteLength];
|
604
|
+
let mul = 1;
|
605
|
+
while (byteLength > 0 && (mul *= 256)) {
|
606
|
+
val += this[offset + --byteLength] * mul;
|
607
|
+
}
|
608
|
+
return val;
|
609
|
+
}
|
610
|
+
/**
|
611
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
612
|
+
* little-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
613
|
+
*
|
614
|
+
* @param offset Number of bytes to skip before starting to read.
|
615
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
616
|
+
* @param noAssert
|
617
|
+
*/
|
618
|
+
readIntLE(offset, byteLength, noAssert) {
|
619
|
+
offset = offset >>> 0;
|
620
|
+
byteLength = byteLength >>> 0;
|
621
|
+
if (!noAssert) {
|
622
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
623
|
+
}
|
624
|
+
let val = this[offset];
|
625
|
+
let mul = 1;
|
626
|
+
let i = 0;
|
627
|
+
while (++i < byteLength && (mul *= 256)) {
|
628
|
+
val += this[offset + i] * mul;
|
629
|
+
}
|
630
|
+
mul *= 128;
|
631
|
+
if (val >= mul) {
|
632
|
+
val -= Math.pow(2, 8 * byteLength);
|
633
|
+
}
|
634
|
+
return val;
|
635
|
+
}
|
636
|
+
/**
|
637
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
638
|
+
* big-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
639
|
+
*
|
640
|
+
* @param offset Number of bytes to skip before starting to read.
|
641
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
642
|
+
* @param noAssert
|
643
|
+
*/
|
644
|
+
readIntBE(offset, byteLength, noAssert) {
|
645
|
+
offset = offset >>> 0;
|
646
|
+
byteLength = byteLength >>> 0;
|
647
|
+
if (!noAssert) {
|
648
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
649
|
+
}
|
650
|
+
let i = byteLength;
|
651
|
+
let mul = 1;
|
652
|
+
let val = this[offset + --i];
|
653
|
+
while (i > 0 && (mul *= 256)) {
|
654
|
+
val += this[offset + --i] * mul;
|
655
|
+
}
|
656
|
+
mul *= 128;
|
657
|
+
if (val >= mul) {
|
658
|
+
val -= Math.pow(2, 8 * byteLength);
|
659
|
+
}
|
660
|
+
return val;
|
661
|
+
}
|
662
|
+
/**
|
663
|
+
* Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
|
664
|
+
*
|
665
|
+
* @param offset Number of bytes to skip before starting to read.
|
666
|
+
* @param noAssert
|
667
|
+
*/
|
668
|
+
readUInt8(offset, noAssert) {
|
669
|
+
offset = offset >>> 0;
|
670
|
+
if (!noAssert) {
|
671
|
+
Buffer._checkOffset(offset, 1, this.length);
|
672
|
+
}
|
673
|
+
return this[offset];
|
674
|
+
}
|
675
|
+
/**
|
676
|
+
* Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
|
677
|
+
*
|
678
|
+
* @param offset Number of bytes to skip before starting to read.
|
679
|
+
* @param noAssert
|
680
|
+
*/
|
681
|
+
readUInt16LE(offset, noAssert) {
|
682
|
+
offset = offset >>> 0;
|
683
|
+
if (!noAssert) {
|
684
|
+
Buffer._checkOffset(offset, 2, this.length);
|
685
|
+
}
|
686
|
+
return this[offset] | this[offset + 1] << 8;
|
687
|
+
}
|
688
|
+
/**
|
689
|
+
* Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
|
690
|
+
*
|
691
|
+
* @param offset Number of bytes to skip before starting to read.
|
692
|
+
* @param noAssert
|
693
|
+
*/
|
694
|
+
readUInt16BE(offset, noAssert) {
|
695
|
+
offset = offset >>> 0;
|
696
|
+
if (!noAssert) {
|
697
|
+
Buffer._checkOffset(offset, 2, this.length);
|
698
|
+
}
|
699
|
+
return this[offset] << 8 | this[offset + 1];
|
700
|
+
}
|
701
|
+
/**
|
702
|
+
* Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
|
703
|
+
*
|
704
|
+
* @param offset Number of bytes to skip before starting to read.
|
705
|
+
* @param noAssert
|
706
|
+
*/
|
707
|
+
readUInt32LE(offset, noAssert) {
|
708
|
+
offset = offset >>> 0;
|
709
|
+
if (!noAssert) {
|
710
|
+
Buffer._checkOffset(offset, 4, this.length);
|
711
|
+
}
|
712
|
+
return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
|
713
|
+
}
|
714
|
+
/**
|
715
|
+
* Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
|
716
|
+
*
|
717
|
+
* @param offset Number of bytes to skip before starting to read.
|
718
|
+
* @param noAssert
|
719
|
+
*/
|
720
|
+
readUInt32BE(offset, noAssert) {
|
721
|
+
offset = offset >>> 0;
|
722
|
+
if (!noAssert) {
|
723
|
+
Buffer._checkOffset(offset, 4, this.length);
|
724
|
+
}
|
725
|
+
return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
|
726
|
+
}
|
727
|
+
/**
|
728
|
+
* Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
|
729
|
+
* as two's complement signed values.
|
730
|
+
*
|
731
|
+
* @param offset Number of bytes to skip before starting to read.
|
732
|
+
* @param noAssert
|
733
|
+
*/
|
734
|
+
readInt8(offset, noAssert) {
|
735
|
+
offset = offset >>> 0;
|
736
|
+
if (!noAssert) {
|
737
|
+
Buffer._checkOffset(offset, 1, this.length);
|
738
|
+
}
|
739
|
+
if (!(this[offset] & 128)) {
|
740
|
+
return this[offset];
|
741
|
+
}
|
742
|
+
return (255 - this[offset] + 1) * -1;
|
743
|
+
}
|
744
|
+
/**
|
745
|
+
* Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
746
|
+
* are interpreted as two's complement signed values.
|
747
|
+
*
|
748
|
+
* @param offset Number of bytes to skip before starting to read.
|
749
|
+
* @param noAssert
|
750
|
+
*/
|
751
|
+
readInt16LE(offset, noAssert) {
|
752
|
+
offset = offset >>> 0;
|
753
|
+
if (!noAssert) {
|
754
|
+
Buffer._checkOffset(offset, 2, this.length);
|
755
|
+
}
|
756
|
+
const val = this[offset] | this[offset + 1] << 8;
|
757
|
+
return val & 32768 ? val | 4294901760 : val;
|
758
|
+
}
|
759
|
+
/**
|
760
|
+
* Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
761
|
+
* are interpreted as two's complement signed values.
|
762
|
+
*
|
763
|
+
* @param offset Number of bytes to skip before starting to read.
|
764
|
+
* @param noAssert
|
765
|
+
*/
|
766
|
+
readInt16BE(offset, noAssert) {
|
767
|
+
offset = offset >>> 0;
|
768
|
+
if (!noAssert) {
|
769
|
+
Buffer._checkOffset(offset, 2, this.length);
|
770
|
+
}
|
771
|
+
const val = this[offset + 1] | this[offset] << 8;
|
772
|
+
return val & 32768 ? val | 4294901760 : val;
|
773
|
+
}
|
774
|
+
/**
|
775
|
+
* Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
776
|
+
* are interpreted as two's complement signed values.
|
777
|
+
*
|
778
|
+
* @param offset Number of bytes to skip before starting to read.
|
779
|
+
* @param noAssert
|
780
|
+
*/
|
781
|
+
readInt32LE(offset, noAssert) {
|
782
|
+
offset = offset >>> 0;
|
783
|
+
if (!noAssert) {
|
784
|
+
Buffer._checkOffset(offset, 4, this.length);
|
785
|
+
}
|
786
|
+
return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
|
787
|
+
}
|
788
|
+
/**
|
789
|
+
* Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
790
|
+
* are interpreted as two's complement signed values.
|
791
|
+
*
|
792
|
+
* @param offset Number of bytes to skip before starting to read.
|
793
|
+
* @param noAssert
|
794
|
+
*/
|
795
|
+
readInt32BE(offset, noAssert) {
|
796
|
+
offset = offset >>> 0;
|
797
|
+
if (!noAssert) {
|
798
|
+
Buffer._checkOffset(offset, 4, this.length);
|
799
|
+
}
|
800
|
+
return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
|
801
|
+
}
|
802
|
+
/**
|
803
|
+
* Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
|
804
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 2.
|
805
|
+
*/
|
806
|
+
swap16() {
|
807
|
+
const len = this.length;
|
808
|
+
if (len % 2 !== 0) {
|
809
|
+
throw new RangeError("Buffer size must be a multiple of 16-bits");
|
810
|
+
}
|
811
|
+
for (let i = 0; i < len; i += 2) {
|
812
|
+
this._swap(this, i, i + 1);
|
813
|
+
}
|
814
|
+
return this;
|
815
|
+
}
|
816
|
+
/**
|
817
|
+
* Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
|
818
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 4.
|
819
|
+
*/
|
820
|
+
swap32() {
|
821
|
+
const len = this.length;
|
822
|
+
if (len % 4 !== 0) {
|
823
|
+
throw new RangeError("Buffer size must be a multiple of 32-bits");
|
824
|
+
}
|
825
|
+
for (let i = 0; i < len; i += 4) {
|
826
|
+
this._swap(this, i, i + 3);
|
827
|
+
this._swap(this, i + 1, i + 2);
|
828
|
+
}
|
829
|
+
return this;
|
830
|
+
}
|
831
|
+
/**
|
832
|
+
* Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
|
833
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 8.
|
834
|
+
*/
|
835
|
+
swap64() {
|
836
|
+
const len = this.length;
|
837
|
+
if (len % 8 !== 0) {
|
838
|
+
throw new RangeError("Buffer size must be a multiple of 64-bits");
|
839
|
+
}
|
840
|
+
for (let i = 0; i < len; i += 8) {
|
841
|
+
this._swap(this, i, i + 7);
|
842
|
+
this._swap(this, i + 1, i + 6);
|
843
|
+
this._swap(this, i + 2, i + 5);
|
844
|
+
this._swap(this, i + 3, i + 4);
|
845
|
+
}
|
846
|
+
return this;
|
847
|
+
}
|
848
|
+
/**
|
849
|
+
* Swaps two octets.
|
850
|
+
*
|
851
|
+
* @param b
|
852
|
+
* @param n
|
853
|
+
* @param m
|
854
|
+
*/
|
855
|
+
_swap(b, n, m) {
|
856
|
+
const i = b[n];
|
857
|
+
b[n] = b[m];
|
858
|
+
b[m] = i;
|
859
|
+
}
|
860
|
+
/**
|
861
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
|
862
|
+
* Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
|
863
|
+
*
|
864
|
+
* @param value Number to write.
|
865
|
+
* @param offset Number of bytes to skip before starting to write.
|
866
|
+
* @param noAssert
|
867
|
+
* @returns `offset` plus the number of bytes written.
|
868
|
+
*/
|
869
|
+
writeUInt8(value, offset, noAssert) {
|
870
|
+
value = +value;
|
871
|
+
offset = offset >>> 0;
|
872
|
+
if (!noAssert) {
|
873
|
+
Buffer._checkInt(this, value, offset, 1, 255, 0);
|
874
|
+
}
|
875
|
+
this[offset] = value & 255;
|
876
|
+
return offset + 1;
|
877
|
+
}
|
878
|
+
/**
|
879
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
|
880
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
881
|
+
*
|
882
|
+
* @param value Number to write.
|
883
|
+
* @param offset Number of bytes to skip before starting to write.
|
884
|
+
* @param noAssert
|
885
|
+
* @returns `offset` plus the number of bytes written.
|
886
|
+
*/
|
887
|
+
writeUInt16LE(value, offset, noAssert) {
|
888
|
+
value = +value;
|
889
|
+
offset = offset >>> 0;
|
890
|
+
if (!noAssert) {
|
891
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
892
|
+
}
|
893
|
+
this[offset] = value & 255;
|
894
|
+
this[offset + 1] = value >>> 8;
|
895
|
+
return offset + 2;
|
896
|
+
}
|
897
|
+
/**
|
898
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
|
899
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
900
|
+
*
|
901
|
+
* @param value Number to write.
|
902
|
+
* @param offset Number of bytes to skip before starting to write.
|
903
|
+
* @param noAssert
|
904
|
+
* @returns `offset` plus the number of bytes written.
|
905
|
+
*/
|
906
|
+
writeUInt16BE(value, offset, noAssert) {
|
907
|
+
value = +value;
|
908
|
+
offset = offset >>> 0;
|
909
|
+
if (!noAssert) {
|
910
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
911
|
+
}
|
912
|
+
this[offset] = value >>> 8;
|
913
|
+
this[offset + 1] = value & 255;
|
914
|
+
return offset + 2;
|
915
|
+
}
|
916
|
+
/**
|
917
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
|
918
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
919
|
+
*
|
920
|
+
* @param value Number to write.
|
921
|
+
* @param offset Number of bytes to skip before starting to write.
|
922
|
+
* @param noAssert
|
923
|
+
* @returns `offset` plus the number of bytes written.
|
924
|
+
*/
|
925
|
+
writeUInt32LE(value, offset, noAssert) {
|
926
|
+
value = +value;
|
927
|
+
offset = offset >>> 0;
|
928
|
+
if (!noAssert) {
|
929
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
930
|
+
}
|
931
|
+
this[offset + 3] = value >>> 24;
|
932
|
+
this[offset + 2] = value >>> 16;
|
933
|
+
this[offset + 1] = value >>> 8;
|
934
|
+
this[offset] = value & 255;
|
935
|
+
return offset + 4;
|
936
|
+
}
|
937
|
+
/**
|
938
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
|
939
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
940
|
+
*
|
941
|
+
* @param value Number to write.
|
942
|
+
* @param offset Number of bytes to skip before starting to write.
|
943
|
+
* @param noAssert
|
944
|
+
* @returns `offset` plus the number of bytes written.
|
945
|
+
*/
|
946
|
+
writeUInt32BE(value, offset, noAssert) {
|
947
|
+
value = +value;
|
948
|
+
offset = offset >>> 0;
|
949
|
+
if (!noAssert) {
|
950
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
951
|
+
}
|
952
|
+
this[offset] = value >>> 24;
|
953
|
+
this[offset + 1] = value >>> 16;
|
954
|
+
this[offset + 2] = value >>> 8;
|
955
|
+
this[offset + 3] = value & 255;
|
956
|
+
return offset + 4;
|
957
|
+
}
|
958
|
+
/**
|
959
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
|
960
|
+
* Behavior is undefined when `value` is anything other than a signed 8-bit integer.
|
961
|
+
*
|
962
|
+
* @param value Number to write.
|
963
|
+
* @param offset Number of bytes to skip before starting to write.
|
964
|
+
* @param noAssert
|
965
|
+
* @returns `offset` plus the number of bytes written.
|
966
|
+
*/
|
967
|
+
writeInt8(value, offset, noAssert) {
|
968
|
+
value = +value;
|
969
|
+
offset = offset >>> 0;
|
970
|
+
if (!noAssert) {
|
971
|
+
Buffer._checkInt(this, value, offset, 1, 127, -128);
|
972
|
+
}
|
973
|
+
if (value < 0) {
|
974
|
+
value = 255 + value + 1;
|
975
|
+
}
|
976
|
+
this[offset] = value & 255;
|
977
|
+
return offset + 1;
|
978
|
+
}
|
979
|
+
/**
|
980
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
|
981
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
982
|
+
*
|
983
|
+
* @param value Number to write.
|
984
|
+
* @param offset Number of bytes to skip before starting to write.
|
985
|
+
* @param noAssert
|
986
|
+
* @returns `offset` plus the number of bytes written.
|
987
|
+
*/
|
988
|
+
writeInt16LE(value, offset, noAssert) {
|
989
|
+
value = +value;
|
990
|
+
offset = offset >>> 0;
|
991
|
+
if (!noAssert) {
|
992
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
993
|
+
}
|
994
|
+
this[offset] = value & 255;
|
995
|
+
this[offset + 1] = value >>> 8;
|
996
|
+
return offset + 2;
|
997
|
+
}
|
998
|
+
/**
|
999
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
|
1000
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
1001
|
+
*
|
1002
|
+
* @param value Number to write.
|
1003
|
+
* @param offset Number of bytes to skip before starting to write.
|
1004
|
+
* @param noAssert
|
1005
|
+
* @returns `offset` plus the number of bytes written.
|
1006
|
+
*/
|
1007
|
+
writeInt16BE(value, offset, noAssert) {
|
1008
|
+
value = +value;
|
1009
|
+
offset = offset >>> 0;
|
1010
|
+
if (!noAssert) {
|
1011
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1012
|
+
}
|
1013
|
+
this[offset] = value >>> 8;
|
1014
|
+
this[offset + 1] = value & 255;
|
1015
|
+
return offset + 2;
|
1016
|
+
}
|
1017
|
+
/**
|
1018
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
|
1019
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1020
|
+
*
|
1021
|
+
* @param value Number to write.
|
1022
|
+
* @param offset Number of bytes to skip before starting to write.
|
1023
|
+
* @param noAssert
|
1024
|
+
* @returns `offset` plus the number of bytes written.
|
1025
|
+
*/
|
1026
|
+
writeInt32LE(value, offset, noAssert) {
|
1027
|
+
value = +value;
|
1028
|
+
offset = offset >>> 0;
|
1029
|
+
if (!noAssert) {
|
1030
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1031
|
+
}
|
1032
|
+
this[offset] = value & 255;
|
1033
|
+
this[offset + 1] = value >>> 8;
|
1034
|
+
this[offset + 2] = value >>> 16;
|
1035
|
+
this[offset + 3] = value >>> 24;
|
1036
|
+
return offset + 4;
|
1037
|
+
}
|
1038
|
+
/**
|
1039
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
|
1040
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1041
|
+
*
|
1042
|
+
* @param value Number to write.
|
1043
|
+
* @param offset Number of bytes to skip before starting to write.
|
1044
|
+
* @param noAssert
|
1045
|
+
* @returns `offset` plus the number of bytes written.
|
1046
|
+
*/
|
1047
|
+
writeInt32BE(value, offset, noAssert) {
|
1048
|
+
value = +value;
|
1049
|
+
offset = offset >>> 0;
|
1050
|
+
if (!noAssert) {
|
1051
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1052
|
+
}
|
1053
|
+
if (value < 0) {
|
1054
|
+
value = 4294967295 + value + 1;
|
1055
|
+
}
|
1056
|
+
this[offset] = value >>> 24;
|
1057
|
+
this[offset + 1] = value >>> 16;
|
1058
|
+
this[offset + 2] = value >>> 8;
|
1059
|
+
this[offset + 3] = value & 255;
|
1060
|
+
return offset + 4;
|
1061
|
+
}
|
1062
|
+
/**
|
1063
|
+
* Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
|
1064
|
+
* filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
|
1065
|
+
* integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
|
1066
|
+
*
|
1067
|
+
* If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
|
1068
|
+
* character that fit into `buf` are written.
|
1069
|
+
*
|
1070
|
+
* If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
|
1071
|
+
*
|
1072
|
+
* @param value
|
1073
|
+
* @param encoding
|
1074
|
+
*/
|
1075
|
+
fill(value, offset, end, encoding) {
|
1076
|
+
if (typeof value === "string") {
|
1077
|
+
if (typeof offset === "string") {
|
1078
|
+
encoding = offset;
|
1079
|
+
offset = 0;
|
1080
|
+
end = this.length;
|
1081
|
+
} else if (typeof end === "string") {
|
1082
|
+
encoding = end;
|
1083
|
+
end = this.length;
|
1084
|
+
}
|
1085
|
+
if (encoding !== void 0 && typeof encoding !== "string") {
|
1086
|
+
throw new TypeError("encoding must be a string");
|
1087
|
+
}
|
1088
|
+
if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
|
1089
|
+
throw new TypeError("Unknown encoding: " + encoding);
|
1090
|
+
}
|
1091
|
+
if (value.length === 1) {
|
1092
|
+
const code = value.charCodeAt(0);
|
1093
|
+
if (encoding === "utf8" && code < 128) {
|
1094
|
+
value = code;
|
1095
|
+
}
|
1096
|
+
}
|
1097
|
+
} else if (typeof value === "number") {
|
1098
|
+
value = value & 255;
|
1099
|
+
} else if (typeof value === "boolean") {
|
1100
|
+
value = Number(value);
|
1101
|
+
}
|
1102
|
+
offset ?? (offset = 0);
|
1103
|
+
end ?? (end = this.length);
|
1104
|
+
if (offset < 0 || this.length < offset || this.length < end) {
|
1105
|
+
throw new RangeError("Out of range index");
|
1106
|
+
}
|
1107
|
+
if (end <= offset) {
|
1108
|
+
return this;
|
1109
|
+
}
|
1110
|
+
offset = offset >>> 0;
|
1111
|
+
end = end === void 0 ? this.length : end >>> 0;
|
1112
|
+
value || (value = 0);
|
1113
|
+
let i;
|
1114
|
+
if (typeof value === "number") {
|
1115
|
+
for (i = offset; i < end; ++i) {
|
1116
|
+
this[i] = value;
|
1117
|
+
}
|
1118
|
+
} else {
|
1119
|
+
const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
|
1120
|
+
const len = bytes.length;
|
1121
|
+
if (len === 0) {
|
1122
|
+
throw new TypeError('The value "' + value + '" is invalid for argument "value"');
|
1123
|
+
}
|
1124
|
+
for (i = 0; i < end - offset; ++i) {
|
1125
|
+
this[i + offset] = bytes[i % len];
|
1126
|
+
}
|
1127
|
+
}
|
1128
|
+
return this;
|
1129
|
+
}
|
1130
|
+
/**
|
1131
|
+
* Returns the index of the specified value.
|
1132
|
+
*
|
1133
|
+
* If `value` is:
|
1134
|
+
* - a string, `value` is interpreted according to the character encoding in `encoding`.
|
1135
|
+
* - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
|
1136
|
+
* - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
|
1137
|
+
*
|
1138
|
+
* Any other types will throw a `TypeError`.
|
1139
|
+
*
|
1140
|
+
* @param value What to search for.
|
1141
|
+
* @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
|
1142
|
+
* @param encoding If `value` is a string, this is the encoding used to search.
|
1143
|
+
* @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
|
1144
|
+
*/
|
1145
|
+
indexOf(value, byteOffset, encoding) {
|
1146
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
|
1147
|
+
}
|
1148
|
+
/**
|
1149
|
+
* Gets the last index of the specified value.
|
1150
|
+
*
|
1151
|
+
* @see indexOf()
|
1152
|
+
* @param value
|
1153
|
+
* @param byteOffset
|
1154
|
+
* @param encoding
|
1155
|
+
*/
|
1156
|
+
lastIndexOf(value, byteOffset, encoding) {
|
1157
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
|
1158
|
+
}
|
1159
|
+
_bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
|
1160
|
+
if (buffer.length === 0) {
|
1161
|
+
return -1;
|
1162
|
+
}
|
1163
|
+
if (typeof byteOffset === "string") {
|
1164
|
+
encoding = byteOffset;
|
1165
|
+
byteOffset = 0;
|
1166
|
+
} else if (typeof byteOffset === "undefined") {
|
1167
|
+
byteOffset = 0;
|
1168
|
+
} else if (byteOffset > 2147483647) {
|
1169
|
+
byteOffset = 2147483647;
|
1170
|
+
} else if (byteOffset < -2147483648) {
|
1171
|
+
byteOffset = -2147483648;
|
1172
|
+
}
|
1173
|
+
byteOffset = +byteOffset;
|
1174
|
+
if (byteOffset !== byteOffset) {
|
1175
|
+
byteOffset = dir ? 0 : buffer.length - 1;
|
1176
|
+
}
|
1177
|
+
if (byteOffset < 0) {
|
1178
|
+
byteOffset = buffer.length + byteOffset;
|
1179
|
+
}
|
1180
|
+
if (byteOffset >= buffer.length) {
|
1181
|
+
if (dir) {
|
1182
|
+
return -1;
|
1183
|
+
} else {
|
1184
|
+
byteOffset = buffer.length - 1;
|
1185
|
+
}
|
1186
|
+
} else if (byteOffset < 0) {
|
1187
|
+
if (dir) {
|
1188
|
+
byteOffset = 0;
|
1189
|
+
} else {
|
1190
|
+
return -1;
|
1191
|
+
}
|
1192
|
+
}
|
1193
|
+
if (typeof val === "string") {
|
1194
|
+
val = Buffer.from(val, encoding);
|
1195
|
+
}
|
1196
|
+
if (Buffer.isBuffer(val)) {
|
1197
|
+
if (val.length === 0) {
|
1198
|
+
return -1;
|
1199
|
+
}
|
1200
|
+
return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
|
1201
|
+
} else if (typeof val === "number") {
|
1202
|
+
val = val & 255;
|
1203
|
+
if (typeof Uint8Array.prototype.indexOf === "function") {
|
1204
|
+
if (dir) {
|
1205
|
+
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
|
1206
|
+
} else {
|
1207
|
+
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
|
1208
|
+
}
|
1209
|
+
}
|
1210
|
+
return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
|
1211
|
+
}
|
1212
|
+
throw new TypeError("val must be string, number or Buffer");
|
1213
|
+
}
|
1214
|
+
/**
|
1215
|
+
* Equivalent to `buf.indexOf() !== -1`.
|
1216
|
+
*
|
1217
|
+
* @param value
|
1218
|
+
* @param byteOffset
|
1219
|
+
* @param encoding
|
1220
|
+
*/
|
1221
|
+
includes(value, byteOffset, encoding) {
|
1222
|
+
return this.indexOf(value, byteOffset, encoding) !== -1;
|
1223
|
+
}
|
1224
|
+
/**
|
1225
|
+
* Creates a new buffer from the given parameters.
|
1226
|
+
*
|
1227
|
+
* @param data
|
1228
|
+
* @param encoding
|
1229
|
+
*/
|
1230
|
+
static from(a, b, c) {
|
1231
|
+
return new Buffer(a, b, c);
|
1232
|
+
}
|
1233
|
+
/**
|
1234
|
+
* Returns true if `obj` is a Buffer.
|
1235
|
+
*
|
1236
|
+
* @param obj
|
1237
|
+
*/
|
1238
|
+
static isBuffer(obj) {
|
1239
|
+
return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
|
1240
|
+
}
|
1241
|
+
/**
|
1242
|
+
* Returns true if `encoding` is a supported encoding.
|
1243
|
+
*
|
1244
|
+
* @param encoding
|
1245
|
+
*/
|
1246
|
+
static isEncoding(encoding) {
|
1247
|
+
switch (encoding.toLowerCase()) {
|
1248
|
+
case "hex":
|
1249
|
+
case "utf8":
|
1250
|
+
case "ascii":
|
1251
|
+
case "binary":
|
1252
|
+
case "latin1":
|
1253
|
+
case "ucs2":
|
1254
|
+
case "utf16le":
|
1255
|
+
case "base64":
|
1256
|
+
return true;
|
1257
|
+
default:
|
1258
|
+
return false;
|
1259
|
+
}
|
1260
|
+
}
|
1261
|
+
/**
|
1262
|
+
* Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
|
1263
|
+
* returns the number of characters in the string.
|
1264
|
+
*
|
1265
|
+
* @param string The string to test.
|
1266
|
+
* @param encoding The encoding to use for calculation. Defaults is `utf8`.
|
1267
|
+
*/
|
1268
|
+
static byteLength(string, encoding) {
|
1269
|
+
if (Buffer.isBuffer(string)) {
|
1270
|
+
return string.length;
|
1271
|
+
}
|
1272
|
+
if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
|
1273
|
+
return string.byteLength;
|
1274
|
+
}
|
1275
|
+
if (typeof string !== "string") {
|
1276
|
+
throw new TypeError(
|
1277
|
+
'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
|
1278
|
+
);
|
1279
|
+
}
|
1280
|
+
const len = string.length;
|
1281
|
+
const mustMatch = arguments.length > 2 && arguments[2] === true;
|
1282
|
+
if (!mustMatch && len === 0) {
|
1283
|
+
return 0;
|
1284
|
+
}
|
1285
|
+
switch (encoding?.toLowerCase()) {
|
1286
|
+
case "ascii":
|
1287
|
+
case "latin1":
|
1288
|
+
case "binary":
|
1289
|
+
return len;
|
1290
|
+
case "utf8":
|
1291
|
+
return Buffer._utf8ToBytes(string).length;
|
1292
|
+
case "hex":
|
1293
|
+
return len >>> 1;
|
1294
|
+
case "ucs2":
|
1295
|
+
case "utf16le":
|
1296
|
+
return len * 2;
|
1297
|
+
case "base64":
|
1298
|
+
return Buffer._base64ToBytes(string).length;
|
1299
|
+
default:
|
1300
|
+
return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
|
1301
|
+
}
|
1302
|
+
}
|
1303
|
+
/**
|
1304
|
+
* Returns a Buffer which is the result of concatenating all the buffers in the list together.
|
1305
|
+
*
|
1306
|
+
* - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
|
1307
|
+
* - If the list has exactly one item, then the first item is returned.
|
1308
|
+
* - If the list has more than one item, then a new buffer is created.
|
1309
|
+
*
|
1310
|
+
* It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
|
1311
|
+
* a small computational expense.
|
1312
|
+
*
|
1313
|
+
* @param list An array of Buffer objects to concatenate.
|
1314
|
+
* @param totalLength Total length of the buffers when concatenated.
|
1315
|
+
*/
|
1316
|
+
static concat(list, totalLength) {
|
1317
|
+
if (!Array.isArray(list)) {
|
1318
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1319
|
+
}
|
1320
|
+
if (list.length === 0) {
|
1321
|
+
return Buffer.alloc(0);
|
1322
|
+
}
|
1323
|
+
let i;
|
1324
|
+
if (totalLength === void 0) {
|
1325
|
+
totalLength = 0;
|
1326
|
+
for (i = 0; i < list.length; ++i) {
|
1327
|
+
totalLength += list[i].length;
|
1328
|
+
}
|
1329
|
+
}
|
1330
|
+
const buffer = Buffer.allocUnsafe(totalLength);
|
1331
|
+
let pos = 0;
|
1332
|
+
for (i = 0; i < list.length; ++i) {
|
1333
|
+
let buf = list[i];
|
1334
|
+
if (Buffer._isInstance(buf, Uint8Array)) {
|
1335
|
+
if (pos + buf.length > buffer.length) {
|
1336
|
+
if (!Buffer.isBuffer(buf)) {
|
1337
|
+
buf = Buffer.from(buf);
|
1338
|
+
}
|
1339
|
+
buf.copy(buffer, pos);
|
1340
|
+
} else {
|
1341
|
+
Uint8Array.prototype.set.call(buffer, buf, pos);
|
1342
|
+
}
|
1343
|
+
} else if (!Buffer.isBuffer(buf)) {
|
1344
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1345
|
+
} else {
|
1346
|
+
buf.copy(buffer, pos);
|
1347
|
+
}
|
1348
|
+
pos += buf.length;
|
1349
|
+
}
|
1350
|
+
return buffer;
|
1351
|
+
}
|
1352
|
+
/**
|
1353
|
+
* The same as `buf1.compare(buf2)`.
|
1354
|
+
*/
|
1355
|
+
static compare(buf1, buf2) {
|
1356
|
+
if (Buffer._isInstance(buf1, Uint8Array)) {
|
1357
|
+
buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
|
1358
|
+
}
|
1359
|
+
if (Buffer._isInstance(buf2, Uint8Array)) {
|
1360
|
+
buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
|
1361
|
+
}
|
1362
|
+
if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
|
1363
|
+
throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
|
1364
|
+
}
|
1365
|
+
if (buf1 === buf2) {
|
1366
|
+
return 0;
|
1367
|
+
}
|
1368
|
+
let x = buf1.length;
|
1369
|
+
let y = buf2.length;
|
1370
|
+
for (let i = 0, len = Math.min(x, y); i < len; ++i) {
|
1371
|
+
if (buf1[i] !== buf2[i]) {
|
1372
|
+
x = buf1[i];
|
1373
|
+
y = buf2[i];
|
1374
|
+
break;
|
1375
|
+
}
|
1376
|
+
}
|
1377
|
+
if (x < y) {
|
1378
|
+
return -1;
|
1379
|
+
}
|
1380
|
+
if (y < x) {
|
1381
|
+
return 1;
|
1382
|
+
}
|
1383
|
+
return 0;
|
1384
|
+
}
|
1385
|
+
/**
|
1386
|
+
* Allocates a new buffer of `size` octets.
|
1387
|
+
*
|
1388
|
+
* @param size The number of octets to allocate.
|
1389
|
+
* @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
|
1390
|
+
* @param encoding The encoding used for the call to `buf.fill()` while initializing.
|
1391
|
+
*/
|
1392
|
+
static alloc(size, fill, encoding) {
|
1393
|
+
if (typeof size !== "number") {
|
1394
|
+
throw new TypeError('"size" argument must be of type number');
|
1395
|
+
} else if (size < 0) {
|
1396
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1397
|
+
}
|
1398
|
+
if (size <= 0) {
|
1399
|
+
return new Buffer(size);
|
1400
|
+
}
|
1401
|
+
if (fill !== void 0) {
|
1402
|
+
return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
|
1403
|
+
}
|
1404
|
+
return new Buffer(size);
|
1405
|
+
}
|
1406
|
+
/**
|
1407
|
+
* Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
|
1408
|
+
*
|
1409
|
+
* @param size
|
1410
|
+
*/
|
1411
|
+
static allocUnsafe(size) {
|
1412
|
+
if (typeof size !== "number") {
|
1413
|
+
throw new TypeError('"size" argument must be of type number');
|
1414
|
+
} else if (size < 0) {
|
1415
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1416
|
+
}
|
1417
|
+
return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
|
1418
|
+
}
|
1419
|
+
/**
|
1420
|
+
* Returns true if the given `obj` is an instance of `type`.
|
1421
|
+
*
|
1422
|
+
* @param obj
|
1423
|
+
* @param type
|
1424
|
+
*/
|
1425
|
+
static _isInstance(obj, type) {
|
1426
|
+
return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
|
1427
|
+
}
|
1428
|
+
static _checked(length) {
|
1429
|
+
if (length >= K_MAX_LENGTH) {
|
1430
|
+
throw new RangeError(
|
1431
|
+
"Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
|
1432
|
+
);
|
1433
|
+
}
|
1434
|
+
return length | 0;
|
1435
|
+
}
|
1436
|
+
static _blitBuffer(src, dst, offset, length) {
|
1437
|
+
let i;
|
1438
|
+
for (i = 0; i < length; ++i) {
|
1439
|
+
if (i + offset >= dst.length || i >= src.length) {
|
1440
|
+
break;
|
1441
|
+
}
|
1442
|
+
dst[i + offset] = src[i];
|
1443
|
+
}
|
1444
|
+
return i;
|
1445
|
+
}
|
1446
|
+
static _utf8Write(buf, string, offset, length) {
|
1447
|
+
return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
|
1448
|
+
}
|
1449
|
+
static _asciiWrite(buf, string, offset, length) {
|
1450
|
+
return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
|
1451
|
+
}
|
1452
|
+
static _base64Write(buf, string, offset, length) {
|
1453
|
+
return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
|
1454
|
+
}
|
1455
|
+
static _ucs2Write(buf, string, offset, length) {
|
1456
|
+
return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
|
1457
|
+
}
|
1458
|
+
static _hexWrite(buf, string, offset, length) {
|
1459
|
+
offset = Number(offset) || 0;
|
1460
|
+
const remaining = buf.length - offset;
|
1461
|
+
if (!length) {
|
1462
|
+
length = remaining;
|
1463
|
+
} else {
|
1464
|
+
length = Number(length);
|
1465
|
+
if (length > remaining) {
|
1466
|
+
length = remaining;
|
1467
|
+
}
|
1468
|
+
}
|
1469
|
+
const strLen = string.length;
|
1470
|
+
if (length > strLen / 2) {
|
1471
|
+
length = strLen / 2;
|
1472
|
+
}
|
1473
|
+
let i;
|
1474
|
+
for (i = 0; i < length; ++i) {
|
1475
|
+
const parsed = parseInt(string.substr(i * 2, 2), 16);
|
1476
|
+
if (parsed !== parsed) {
|
1477
|
+
return i;
|
1478
|
+
}
|
1479
|
+
buf[offset + i] = parsed;
|
1480
|
+
}
|
1481
|
+
return i;
|
1482
|
+
}
|
1483
|
+
static _utf8ToBytes(string, units) {
|
1484
|
+
units = units || Infinity;
|
1485
|
+
const length = string.length;
|
1486
|
+
const bytes = [];
|
1487
|
+
let codePoint;
|
1488
|
+
let leadSurrogate = null;
|
1489
|
+
for (let i = 0; i < length; ++i) {
|
1490
|
+
codePoint = string.charCodeAt(i);
|
1491
|
+
if (codePoint > 55295 && codePoint < 57344) {
|
1492
|
+
if (!leadSurrogate) {
|
1493
|
+
if (codePoint > 56319) {
|
1494
|
+
if ((units -= 3) > -1) {
|
1495
|
+
bytes.push(239, 191, 189);
|
1496
|
+
}
|
1497
|
+
continue;
|
1498
|
+
} else if (i + 1 === length) {
|
1499
|
+
if ((units -= 3) > -1) {
|
1500
|
+
bytes.push(239, 191, 189);
|
1501
|
+
}
|
1502
|
+
continue;
|
1503
|
+
}
|
1504
|
+
leadSurrogate = codePoint;
|
1505
|
+
continue;
|
1506
|
+
}
|
1507
|
+
if (codePoint < 56320) {
|
1508
|
+
if ((units -= 3) > -1) {
|
1509
|
+
bytes.push(239, 191, 189);
|
1510
|
+
}
|
1511
|
+
leadSurrogate = codePoint;
|
1512
|
+
continue;
|
1513
|
+
}
|
1514
|
+
codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
|
1515
|
+
} else if (leadSurrogate) {
|
1516
|
+
if ((units -= 3) > -1) {
|
1517
|
+
bytes.push(239, 191, 189);
|
1518
|
+
}
|
1519
|
+
}
|
1520
|
+
leadSurrogate = null;
|
1521
|
+
if (codePoint < 128) {
|
1522
|
+
if ((units -= 1) < 0) {
|
1523
|
+
break;
|
1524
|
+
}
|
1525
|
+
bytes.push(codePoint);
|
1526
|
+
} else if (codePoint < 2048) {
|
1527
|
+
if ((units -= 2) < 0) {
|
1528
|
+
break;
|
1529
|
+
}
|
1530
|
+
bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
|
1531
|
+
} else if (codePoint < 65536) {
|
1532
|
+
if ((units -= 3) < 0) {
|
1533
|
+
break;
|
1534
|
+
}
|
1535
|
+
bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
|
1536
|
+
} else if (codePoint < 1114112) {
|
1537
|
+
if ((units -= 4) < 0) {
|
1538
|
+
break;
|
1539
|
+
}
|
1540
|
+
bytes.push(
|
1541
|
+
codePoint >> 18 | 240,
|
1542
|
+
codePoint >> 12 & 63 | 128,
|
1543
|
+
codePoint >> 6 & 63 | 128,
|
1544
|
+
codePoint & 63 | 128
|
1545
|
+
);
|
1546
|
+
} else {
|
1547
|
+
throw new Error("Invalid code point");
|
1548
|
+
}
|
1549
|
+
}
|
1550
|
+
return bytes;
|
1551
|
+
}
|
1552
|
+
static _base64ToBytes(str) {
|
1553
|
+
return toByteArray(base64clean(str));
|
1554
|
+
}
|
1555
|
+
static _asciiToBytes(str) {
|
1556
|
+
const byteArray = [];
|
1557
|
+
for (let i = 0; i < str.length; ++i) {
|
1558
|
+
byteArray.push(str.charCodeAt(i) & 255);
|
1559
|
+
}
|
1560
|
+
return byteArray;
|
1561
|
+
}
|
1562
|
+
static _utf16leToBytes(str, units) {
|
1563
|
+
let c, hi, lo;
|
1564
|
+
const byteArray = [];
|
1565
|
+
for (let i = 0; i < str.length; ++i) {
|
1566
|
+
if ((units -= 2) < 0) break;
|
1567
|
+
c = str.charCodeAt(i);
|
1568
|
+
hi = c >> 8;
|
1569
|
+
lo = c % 256;
|
1570
|
+
byteArray.push(lo);
|
1571
|
+
byteArray.push(hi);
|
1572
|
+
}
|
1573
|
+
return byteArray;
|
1574
|
+
}
|
1575
|
+
static _hexSlice(buf, start, end) {
|
1576
|
+
const len = buf.length;
|
1577
|
+
if (!start || start < 0) {
|
1578
|
+
start = 0;
|
1579
|
+
}
|
1580
|
+
if (!end || end < 0 || end > len) {
|
1581
|
+
end = len;
|
1582
|
+
}
|
1583
|
+
let out = "";
|
1584
|
+
for (let i = start; i < end; ++i) {
|
1585
|
+
out += hexSliceLookupTable[buf[i]];
|
1586
|
+
}
|
1587
|
+
return out;
|
1588
|
+
}
|
1589
|
+
static _base64Slice(buf, start, end) {
|
1590
|
+
if (start === 0 && end === buf.length) {
|
1591
|
+
return fromByteArray(buf);
|
1592
|
+
} else {
|
1593
|
+
return fromByteArray(buf.slice(start, end));
|
1594
|
+
}
|
1595
|
+
}
|
1596
|
+
static _utf8Slice(buf, start, end) {
|
1597
|
+
end = Math.min(buf.length, end);
|
1598
|
+
const res = [];
|
1599
|
+
let i = start;
|
1600
|
+
while (i < end) {
|
1601
|
+
const firstByte = buf[i];
|
1602
|
+
let codePoint = null;
|
1603
|
+
let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
|
1604
|
+
if (i + bytesPerSequence <= end) {
|
1605
|
+
let secondByte, thirdByte, fourthByte, tempCodePoint;
|
1606
|
+
switch (bytesPerSequence) {
|
1607
|
+
case 1:
|
1608
|
+
if (firstByte < 128) {
|
1609
|
+
codePoint = firstByte;
|
1610
|
+
}
|
1611
|
+
break;
|
1612
|
+
case 2:
|
1613
|
+
secondByte = buf[i + 1];
|
1614
|
+
if ((secondByte & 192) === 128) {
|
1615
|
+
tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
|
1616
|
+
if (tempCodePoint > 127) {
|
1617
|
+
codePoint = tempCodePoint;
|
1618
|
+
}
|
1619
|
+
}
|
1620
|
+
break;
|
1621
|
+
case 3:
|
1622
|
+
secondByte = buf[i + 1];
|
1623
|
+
thirdByte = buf[i + 2];
|
1624
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
|
1625
|
+
tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
|
1626
|
+
if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
|
1627
|
+
codePoint = tempCodePoint;
|
1628
|
+
}
|
1629
|
+
}
|
1630
|
+
break;
|
1631
|
+
case 4:
|
1632
|
+
secondByte = buf[i + 1];
|
1633
|
+
thirdByte = buf[i + 2];
|
1634
|
+
fourthByte = buf[i + 3];
|
1635
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
|
1636
|
+
tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
|
1637
|
+
if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
|
1638
|
+
codePoint = tempCodePoint;
|
1639
|
+
}
|
1640
|
+
}
|
1641
|
+
}
|
1642
|
+
}
|
1643
|
+
if (codePoint === null) {
|
1644
|
+
codePoint = 65533;
|
1645
|
+
bytesPerSequence = 1;
|
1646
|
+
} else if (codePoint > 65535) {
|
1647
|
+
codePoint -= 65536;
|
1648
|
+
res.push(codePoint >>> 10 & 1023 | 55296);
|
1649
|
+
codePoint = 56320 | codePoint & 1023;
|
1650
|
+
}
|
1651
|
+
res.push(codePoint);
|
1652
|
+
i += bytesPerSequence;
|
1653
|
+
}
|
1654
|
+
return Buffer._decodeCodePointsArray(res);
|
1655
|
+
}
|
1656
|
+
static _decodeCodePointsArray(codePoints) {
|
1657
|
+
const len = codePoints.length;
|
1658
|
+
if (len <= MAX_ARGUMENTS_LENGTH) {
|
1659
|
+
return String.fromCharCode.apply(String, codePoints);
|
1660
|
+
}
|
1661
|
+
let res = "";
|
1662
|
+
let i = 0;
|
1663
|
+
while (i < len) {
|
1664
|
+
res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
|
1665
|
+
}
|
1666
|
+
return res;
|
1667
|
+
}
|
1668
|
+
static _asciiSlice(buf, start, end) {
|
1669
|
+
let ret = "";
|
1670
|
+
end = Math.min(buf.length, end);
|
1671
|
+
for (let i = start; i < end; ++i) {
|
1672
|
+
ret += String.fromCharCode(buf[i] & 127);
|
1673
|
+
}
|
1674
|
+
return ret;
|
1675
|
+
}
|
1676
|
+
static _latin1Slice(buf, start, end) {
|
1677
|
+
let ret = "";
|
1678
|
+
end = Math.min(buf.length, end);
|
1679
|
+
for (let i = start; i < end; ++i) {
|
1680
|
+
ret += String.fromCharCode(buf[i]);
|
1681
|
+
}
|
1682
|
+
return ret;
|
1683
|
+
}
|
1684
|
+
static _utf16leSlice(buf, start, end) {
|
1685
|
+
const bytes = buf.slice(start, end);
|
1686
|
+
let res = "";
|
1687
|
+
for (let i = 0; i < bytes.length - 1; i += 2) {
|
1688
|
+
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
|
1689
|
+
}
|
1690
|
+
return res;
|
1691
|
+
}
|
1692
|
+
static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
|
1693
|
+
let indexSize = 1;
|
1694
|
+
let arrLength = arr.length;
|
1695
|
+
let valLength = val.length;
|
1696
|
+
if (encoding !== void 0) {
|
1697
|
+
encoding = Buffer._getEncoding(encoding);
|
1698
|
+
if (encoding === "ucs2" || encoding === "utf16le") {
|
1699
|
+
if (arr.length < 2 || val.length < 2) {
|
1700
|
+
return -1;
|
1701
|
+
}
|
1702
|
+
indexSize = 2;
|
1703
|
+
arrLength /= 2;
|
1704
|
+
valLength /= 2;
|
1705
|
+
byteOffset /= 2;
|
1706
|
+
}
|
1707
|
+
}
|
1708
|
+
function read(buf, i2) {
|
1709
|
+
if (indexSize === 1) {
|
1710
|
+
return buf[i2];
|
1711
|
+
} else {
|
1712
|
+
return buf.readUInt16BE(i2 * indexSize);
|
1713
|
+
}
|
1714
|
+
}
|
1715
|
+
let i;
|
1716
|
+
if (dir) {
|
1717
|
+
let foundIndex = -1;
|
1718
|
+
for (i = byteOffset; i < arrLength; i++) {
|
1719
|
+
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
1720
|
+
if (foundIndex === -1) foundIndex = i;
|
1721
|
+
if (i - foundIndex + 1 === valLength) return foundIndex * indexSize;
|
1722
|
+
} else {
|
1723
|
+
if (foundIndex !== -1) i -= i - foundIndex;
|
1724
|
+
foundIndex = -1;
|
1725
|
+
}
|
1726
|
+
}
|
1727
|
+
} else {
|
1728
|
+
if (byteOffset + valLength > arrLength) {
|
1729
|
+
byteOffset = arrLength - valLength;
|
1730
|
+
}
|
1731
|
+
for (i = byteOffset; i >= 0; i--) {
|
1732
|
+
let found = true;
|
1733
|
+
for (let j = 0; j < valLength; j++) {
|
1734
|
+
if (read(arr, i + j) !== read(val, j)) {
|
1735
|
+
found = false;
|
1736
|
+
break;
|
1737
|
+
}
|
1738
|
+
}
|
1739
|
+
if (found) {
|
1740
|
+
return i;
|
1741
|
+
}
|
1742
|
+
}
|
1743
|
+
}
|
1744
|
+
return -1;
|
1745
|
+
}
|
1746
|
+
static _checkOffset(offset, ext, length) {
|
1747
|
+
if (offset % 1 !== 0 || offset < 0) throw new RangeError("offset is not uint");
|
1748
|
+
if (offset + ext > length) throw new RangeError("Trying to access beyond buffer length");
|
1749
|
+
}
|
1750
|
+
static _checkInt(buf, value, offset, ext, max, min) {
|
1751
|
+
if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance');
|
1752
|
+
if (value > max || value < min) throw new RangeError('"value" argument is out of bounds');
|
1753
|
+
if (offset + ext > buf.length) throw new RangeError("Index out of range");
|
1754
|
+
}
|
1755
|
+
static _getEncoding(encoding) {
|
1756
|
+
let toLowerCase = false;
|
1757
|
+
let originalEncoding = "";
|
1758
|
+
for (; ; ) {
|
1759
|
+
switch (encoding) {
|
1760
|
+
case "hex":
|
1761
|
+
return "hex";
|
1762
|
+
case "utf8":
|
1763
|
+
return "utf8";
|
1764
|
+
case "ascii":
|
1765
|
+
return "ascii";
|
1766
|
+
case "binary":
|
1767
|
+
return "binary";
|
1768
|
+
case "latin1":
|
1769
|
+
return "latin1";
|
1770
|
+
case "ucs2":
|
1771
|
+
return "ucs2";
|
1772
|
+
case "utf16le":
|
1773
|
+
return "utf16le";
|
1774
|
+
case "base64":
|
1775
|
+
return "base64";
|
1776
|
+
default: {
|
1777
|
+
if (toLowerCase) {
|
1778
|
+
throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
|
1779
|
+
}
|
1780
|
+
toLowerCase = true;
|
1781
|
+
originalEncoding = encoding;
|
1782
|
+
encoding = encoding.toLowerCase();
|
1783
|
+
}
|
1784
|
+
}
|
1785
|
+
}
|
1786
|
+
}
|
1787
|
+
}
|
1788
|
+
const hexSliceLookupTable = function() {
|
1789
|
+
const alphabet = "0123456789abcdef";
|
1790
|
+
const table = new Array(256);
|
1791
|
+
for (let i = 0; i < 16; ++i) {
|
1792
|
+
const i16 = i * 16;
|
1793
|
+
for (let j = 0; j < 16; ++j) {
|
1794
|
+
table[i16 + j] = alphabet[i] + alphabet[j];
|
1795
|
+
}
|
1796
|
+
}
|
1797
|
+
return table;
|
1798
|
+
}();
|
1799
|
+
const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
|
1800
|
+
function base64clean(str) {
|
1801
|
+
str = str.split("=")[0];
|
1802
|
+
str = str.trim().replace(INVALID_BASE64_RE, "");
|
1803
|
+
if (str.length < 2) return "";
|
1804
|
+
while (str.length % 4 !== 0) {
|
1805
|
+
str = str + "=";
|
1806
|
+
}
|
1807
|
+
return str;
|
1808
|
+
}
|
1809
|
+
|
27
1810
|
function notEmpty(value) {
|
28
1811
|
return value !== null && value !== void 0;
|
29
1812
|
}
|
@@ -227,8 +2010,7 @@ function buildPreviewBranchName({ org, branch }) {
|
|
227
2010
|
function getPreviewBranch() {
|
228
2011
|
try {
|
229
2012
|
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = getEnvironment();
|
230
|
-
if (deployPreviewBranch)
|
231
|
-
return deployPreviewBranch;
|
2013
|
+
if (deployPreviewBranch) return deployPreviewBranch;
|
232
2014
|
switch (deployPreview) {
|
233
2015
|
case "vercel": {
|
234
2016
|
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
@@ -244,29 +2026,15 @@ function getPreviewBranch() {
|
|
244
2026
|
}
|
245
2027
|
}
|
246
2028
|
|
247
|
-
var
|
248
|
-
|
249
|
-
throw TypeError("Cannot " + msg);
|
250
|
-
};
|
251
|
-
var __privateGet$7 = (obj, member, getter) => {
|
252
|
-
__accessCheck$8(obj, member, "read from private field");
|
253
|
-
return getter ? getter.call(obj) : member.get(obj);
|
254
|
-
};
|
255
|
-
var __privateAdd$8 = (obj, member, value) => {
|
256
|
-
if (member.has(obj))
|
257
|
-
throw TypeError("Cannot add the same private member more than once");
|
258
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2029
|
+
var __typeError$8 = (msg) => {
|
2030
|
+
throw TypeError(msg);
|
259
2031
|
};
|
260
|
-
var
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
var
|
266
|
-
__accessCheck$8(obj, member, "access private method");
|
267
|
-
return method;
|
268
|
-
};
|
269
|
-
var _fetch, _queue, _concurrency, _enqueue, enqueue_fn;
|
2032
|
+
var __accessCheck$8 = (obj, member, msg) => member.has(obj) || __typeError$8("Cannot " + msg);
|
2033
|
+
var __privateGet$7 = (obj, member, getter) => (__accessCheck$8(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
2034
|
+
var __privateAdd$8 = (obj, member, value) => member.has(obj) ? __typeError$8("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2035
|
+
var __privateSet$6 = (obj, member, value, setter) => (__accessCheck$8(obj, member, "write to private field"), member.set(obj, value), value);
|
2036
|
+
var __privateMethod$4 = (obj, member, method) => (__accessCheck$8(obj, member, "access private method"), method);
|
2037
|
+
var _fetch, _queue, _concurrency, _ApiRequestPool_instances, enqueue_fn;
|
270
2038
|
const REQUEST_TIMEOUT = 5 * 60 * 1e3;
|
271
2039
|
function getFetchImplementation(userFetch) {
|
272
2040
|
const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
|
@@ -279,10 +2047,10 @@ function getFetchImplementation(userFetch) {
|
|
279
2047
|
}
|
280
2048
|
class ApiRequestPool {
|
281
2049
|
constructor(concurrency = 10) {
|
282
|
-
__privateAdd$8(this,
|
283
|
-
__privateAdd$8(this, _fetch
|
284
|
-
__privateAdd$8(this, _queue
|
285
|
-
__privateAdd$8(this, _concurrency
|
2050
|
+
__privateAdd$8(this, _ApiRequestPool_instances);
|
2051
|
+
__privateAdd$8(this, _fetch);
|
2052
|
+
__privateAdd$8(this, _queue);
|
2053
|
+
__privateAdd$8(this, _concurrency);
|
286
2054
|
__privateSet$6(this, _queue, []);
|
287
2055
|
__privateSet$6(this, _concurrency, concurrency);
|
288
2056
|
this.running = 0;
|
@@ -317,7 +2085,7 @@ class ApiRequestPool {
|
|
317
2085
|
}
|
318
2086
|
return response;
|
319
2087
|
};
|
320
|
-
return __privateMethod$4(this,
|
2088
|
+
return __privateMethod$4(this, _ApiRequestPool_instances, enqueue_fn).call(this, async () => {
|
321
2089
|
return await runRequest();
|
322
2090
|
});
|
323
2091
|
}
|
@@ -325,7 +2093,7 @@ class ApiRequestPool {
|
|
325
2093
|
_fetch = new WeakMap();
|
326
2094
|
_queue = new WeakMap();
|
327
2095
|
_concurrency = new WeakMap();
|
328
|
-
|
2096
|
+
_ApiRequestPool_instances = new WeakSet();
|
329
2097
|
enqueue_fn = function(task) {
|
330
2098
|
const promise = new Promise((resolve) => __privateGet$7(this, _queue).push(resolve)).finally(() => {
|
331
2099
|
this.started--;
|
@@ -528,7 +2296,7 @@ function defaultOnOpen(response) {
|
|
528
2296
|
}
|
529
2297
|
}
|
530
2298
|
|
531
|
-
const VERSION = "0.29.
|
2299
|
+
const VERSION = "0.29.5";
|
532
2300
|
|
533
2301
|
class ErrorWithCause extends Error {
|
534
2302
|
constructor(message, options) {
|
@@ -608,35 +2376,30 @@ function parseProviderString(provider = "production") {
|
|
608
2376
|
return provider;
|
609
2377
|
}
|
610
2378
|
const [main, workspaces] = provider.split(",");
|
611
|
-
if (!main || !workspaces)
|
612
|
-
return null;
|
2379
|
+
if (!main || !workspaces) return null;
|
613
2380
|
return { main, workspaces };
|
614
2381
|
}
|
615
2382
|
function buildProviderString(provider) {
|
616
|
-
if (isHostProviderAlias(provider))
|
617
|
-
return provider;
|
2383
|
+
if (isHostProviderAlias(provider)) return provider;
|
618
2384
|
return `${provider.main},${provider.workspaces}`;
|
619
2385
|
}
|
620
2386
|
function parseWorkspacesUrlParts(url) {
|
621
|
-
if (!isString(url))
|
622
|
-
return null;
|
2387
|
+
if (!isString(url)) return null;
|
623
2388
|
const matches = {
|
624
2389
|
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
|
625
2390
|
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
|
626
2391
|
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
|
627
|
-
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:([^:]+):?(.*)?/)
|
2392
|
+
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
|
628
2393
|
};
|
629
2394
|
const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
|
630
|
-
if (!isHostProviderAlias(host) || !match)
|
631
|
-
return null;
|
2395
|
+
if (!isHostProviderAlias(host) || !match) return null;
|
632
2396
|
return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
|
633
2397
|
}
|
634
2398
|
|
635
2399
|
const pool = new ApiRequestPool();
|
636
2400
|
const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
|
637
2401
|
const cleanQueryParams = Object.entries(queryParams).reduce((acc, [key, value]) => {
|
638
|
-
if (value === void 0 || value === null)
|
639
|
-
return acc;
|
2402
|
+
if (value === void 0 || value === null) return acc;
|
640
2403
|
return { ...acc, [key]: value };
|
641
2404
|
}, {});
|
642
2405
|
const query = new URLSearchParams(cleanQueryParams).toString();
|
@@ -684,8 +2447,7 @@ function hostHeader(url) {
|
|
684
2447
|
return groups?.host ? { Host: groups.host } : {};
|
685
2448
|
}
|
686
2449
|
async function parseBody(body, headers) {
|
687
|
-
if (!isDefined(body))
|
688
|
-
return void 0;
|
2450
|
+
if (!isDefined(body)) return void 0;
|
689
2451
|
if (isBlob(body) || typeof body.text === "function") {
|
690
2452
|
return body;
|
691
2453
|
}
|
@@ -762,8 +2524,7 @@ async function fetch$1({
|
|
762
2524
|
[TraceAttributes.CLOUDFLARE_RAY_ID]: response.headers?.get("cf-ray") ?? void 0
|
763
2525
|
});
|
764
2526
|
const message = response.headers?.get("x-xata-message");
|
765
|
-
if (message)
|
766
|
-
console.warn(message);
|
2527
|
+
if (message) console.warn(message);
|
767
2528
|
if (response.status === 204) {
|
768
2529
|
return {};
|
769
2530
|
}
|
@@ -847,104 +2608,233 @@ function parseUrl(url) {
|
|
847
2608
|
|
848
2609
|
const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
|
849
2610
|
|
850
|
-
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2611
|
+
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2612
|
+
url: "/db/{dbBranchName}/migrations/apply",
|
2613
|
+
method: "post",
|
2614
|
+
...variables,
|
2615
|
+
signal
|
2616
|
+
});
|
2617
|
+
const startMigration = (variables, signal) => dataPlaneFetch({
|
2618
|
+
url: "/db/{dbBranchName}/migrations/start",
|
2619
|
+
method: "post",
|
2620
|
+
...variables,
|
2621
|
+
signal
|
2622
|
+
});
|
2623
|
+
const completeMigration = (variables, signal) => dataPlaneFetch({
|
2624
|
+
url: "/db/{dbBranchName}/migrations/complete",
|
2625
|
+
method: "post",
|
2626
|
+
...variables,
|
2627
|
+
signal
|
2628
|
+
});
|
2629
|
+
const rollbackMigration = (variables, signal) => dataPlaneFetch({
|
2630
|
+
url: "/db/{dbBranchName}/migrations/rollback",
|
2631
|
+
method: "post",
|
2632
|
+
...variables,
|
2633
|
+
signal
|
2634
|
+
});
|
851
2635
|
const adaptTable = (variables, signal) => dataPlaneFetch({
|
852
2636
|
url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
|
853
2637
|
method: "post",
|
854
2638
|
...variables,
|
855
2639
|
signal
|
856
2640
|
});
|
857
|
-
const
|
858
|
-
|
859
|
-
|
860
|
-
|
861
|
-
|
2641
|
+
const adaptAllTables = (variables, signal) => dataPlaneFetch({
|
2642
|
+
url: "/db/{dbBranchName}/migrations/adapt",
|
2643
|
+
method: "post",
|
2644
|
+
...variables,
|
2645
|
+
signal
|
2646
|
+
});
|
2647
|
+
const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2648
|
+
url: "/db/{dbBranchName}/migrations/status",
|
2649
|
+
method: "get",
|
2650
|
+
...variables,
|
2651
|
+
signal
|
2652
|
+
});
|
2653
|
+
const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2654
|
+
url: "/db/{dbBranchName}/migrations/jobs/{jobId}",
|
2655
|
+
method: "get",
|
2656
|
+
...variables,
|
2657
|
+
signal
|
2658
|
+
});
|
2659
|
+
const getMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2660
|
+
url: "/db/{dbBranchName}/migrations/history",
|
2661
|
+
method: "get",
|
2662
|
+
...variables,
|
2663
|
+
signal
|
2664
|
+
});
|
2665
|
+
const getBranchList = (variables, signal) => dataPlaneFetch({
|
2666
|
+
url: "/dbs/{dbName}",
|
2667
|
+
method: "get",
|
2668
|
+
...variables,
|
2669
|
+
signal
|
2670
|
+
});
|
2671
|
+
const getDatabaseSettings = (variables, signal) => dataPlaneFetch({
|
2672
|
+
url: "/dbs/{dbName}/settings",
|
2673
|
+
method: "get",
|
2674
|
+
...variables,
|
2675
|
+
signal
|
2676
|
+
});
|
2677
|
+
const updateDatabaseSettings = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/settings", method: "patch", ...variables, signal });
|
2678
|
+
const getBranchDetails = (variables, signal) => dataPlaneFetch({
|
2679
|
+
url: "/db/{dbBranchName}",
|
2680
|
+
method: "get",
|
2681
|
+
...variables,
|
2682
|
+
signal
|
2683
|
+
});
|
2684
|
+
const createBranch = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}", method: "put", ...variables, signal });
|
2685
|
+
const deleteBranch = (variables, signal) => dataPlaneFetch({
|
2686
|
+
url: "/db/{dbBranchName}",
|
2687
|
+
method: "delete",
|
2688
|
+
...variables,
|
2689
|
+
signal
|
2690
|
+
});
|
2691
|
+
const getSchema = (variables, signal) => dataPlaneFetch({
|
2692
|
+
url: "/db/{dbBranchName}/schema",
|
2693
|
+
method: "get",
|
2694
|
+
...variables,
|
2695
|
+
signal
|
2696
|
+
});
|
2697
|
+
const copyBranch = (variables, signal) => dataPlaneFetch({
|
2698
|
+
url: "/db/{dbBranchName}/copy",
|
2699
|
+
method: "post",
|
2700
|
+
...variables,
|
2701
|
+
signal
|
2702
|
+
});
|
2703
|
+
const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2704
|
+
url: "/db/{dbBranchName}/metadata",
|
2705
|
+
method: "put",
|
2706
|
+
...variables,
|
2707
|
+
signal
|
2708
|
+
});
|
2709
|
+
const getBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2710
|
+
url: "/db/{dbBranchName}/metadata",
|
2711
|
+
method: "get",
|
2712
|
+
...variables,
|
2713
|
+
signal
|
2714
|
+
});
|
2715
|
+
const getBranchStats = (variables, signal) => dataPlaneFetch({
|
2716
|
+
url: "/db/{dbBranchName}/stats",
|
2717
|
+
method: "get",
|
2718
|
+
...variables,
|
2719
|
+
signal
|
2720
|
+
});
|
2721
|
+
const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
|
2722
|
+
const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
|
2723
|
+
const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({
|
2724
|
+
url: "/dbs/{dbName}/gitBranches",
|
2725
|
+
method: "delete",
|
2726
|
+
...variables,
|
2727
|
+
signal
|
2728
|
+
});
|
2729
|
+
const resolveBranch = (variables, signal) => dataPlaneFetch({
|
2730
|
+
url: "/dbs/{dbName}/resolveBranch",
|
2731
|
+
method: "get",
|
2732
|
+
...variables,
|
2733
|
+
signal
|
2734
|
+
});
|
2735
|
+
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2736
|
+
url: "/db/{dbBranchName}/migrations",
|
2737
|
+
method: "get",
|
2738
|
+
...variables,
|
2739
|
+
signal
|
2740
|
+
});
|
2741
|
+
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2742
|
+
url: "/db/{dbBranchName}/migrations/plan",
|
2743
|
+
method: "post",
|
2744
|
+
...variables,
|
2745
|
+
signal
|
2746
|
+
});
|
2747
|
+
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2748
|
+
url: "/db/{dbBranchName}/migrations/execute",
|
2749
|
+
method: "post",
|
2750
|
+
...variables,
|
2751
|
+
signal
|
2752
|
+
});
|
2753
|
+
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({
|
2754
|
+
url: "/dbs/{dbName}/migrations/query",
|
2755
|
+
method: "post",
|
2756
|
+
...variables,
|
2757
|
+
signal
|
2758
|
+
});
|
2759
|
+
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
2760
|
+
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2761
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
862
2762
|
method: "get",
|
863
2763
|
...variables,
|
864
2764
|
signal
|
865
2765
|
});
|
866
|
-
const
|
867
|
-
url: "/dbs/{dbName}/
|
868
|
-
method: "
|
2766
|
+
const updateMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2767
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2768
|
+
method: "patch",
|
869
2769
|
...variables,
|
870
2770
|
signal
|
871
2771
|
});
|
872
|
-
const
|
873
|
-
|
874
|
-
|
875
|
-
method: "get",
|
2772
|
+
const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({
|
2773
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/commits",
|
2774
|
+
method: "post",
|
876
2775
|
...variables,
|
877
2776
|
signal
|
878
2777
|
});
|
879
|
-
const
|
880
|
-
|
881
|
-
|
882
|
-
method: "delete",
|
2778
|
+
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2779
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/compare",
|
2780
|
+
method: "post",
|
883
2781
|
...variables,
|
884
2782
|
signal
|
885
2783
|
});
|
886
|
-
const
|
887
|
-
url: "/
|
2784
|
+
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({
|
2785
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
888
2786
|
method: "get",
|
889
2787
|
...variables,
|
890
2788
|
signal
|
891
2789
|
});
|
892
|
-
const
|
893
|
-
url: "/
|
2790
|
+
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2791
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
894
2792
|
method: "post",
|
895
2793
|
...variables,
|
896
2794
|
signal
|
897
2795
|
});
|
898
|
-
const
|
899
|
-
url: "/db/{dbBranchName}/
|
900
|
-
method: "
|
2796
|
+
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({
|
2797
|
+
url: "/db/{dbBranchName}/schema/history",
|
2798
|
+
method: "post",
|
901
2799
|
...variables,
|
902
2800
|
signal
|
903
2801
|
});
|
904
|
-
const
|
905
|
-
url: "/db/{dbBranchName}/
|
906
|
-
method: "
|
2802
|
+
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({
|
2803
|
+
url: "/db/{dbBranchName}/schema/compare",
|
2804
|
+
method: "post",
|
907
2805
|
...variables,
|
908
2806
|
signal
|
909
2807
|
});
|
910
|
-
const
|
911
|
-
url: "/db/{dbBranchName}/
|
912
|
-
method: "
|
2808
|
+
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({
|
2809
|
+
url: "/db/{dbBranchName}/schema/compare/{branchName}",
|
2810
|
+
method: "post",
|
913
2811
|
...variables,
|
914
2812
|
signal
|
915
2813
|
});
|
916
|
-
const
|
917
|
-
|
918
|
-
|
919
|
-
const resolveBranch = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/resolveBranch", method: "get", ...variables, signal });
|
920
|
-
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations", method: "get", ...variables, signal });
|
921
|
-
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/plan", method: "post", ...variables, signal });
|
922
|
-
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/execute", method: "post", ...variables, signal });
|
923
|
-
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/query", method: "post", ...variables, signal });
|
924
|
-
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
925
|
-
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
926
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
927
|
-
method: "get",
|
2814
|
+
const updateBranchSchema = (variables, signal) => dataPlaneFetch({
|
2815
|
+
url: "/db/{dbBranchName}/schema/update",
|
2816
|
+
method: "post",
|
928
2817
|
...variables,
|
929
2818
|
signal
|
930
2819
|
});
|
931
|
-
const
|
932
|
-
|
933
|
-
|
934
|
-
|
935
|
-
|
936
|
-
|
2820
|
+
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2821
|
+
url: "/db/{dbBranchName}/schema/preview",
|
2822
|
+
method: "post",
|
2823
|
+
...variables,
|
2824
|
+
signal
|
2825
|
+
});
|
2826
|
+
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2827
|
+
url: "/db/{dbBranchName}/schema/apply",
|
2828
|
+
method: "post",
|
2829
|
+
...variables,
|
2830
|
+
signal
|
2831
|
+
});
|
2832
|
+
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({
|
2833
|
+
url: "/db/{dbBranchName}/schema/push",
|
937
2834
|
method: "post",
|
938
2835
|
...variables,
|
939
2836
|
signal
|
940
2837
|
});
|
941
|
-
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/history", method: "post", ...variables, signal });
|
942
|
-
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare", method: "post", ...variables, signal });
|
943
|
-
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare/{branchName}", method: "post", ...variables, signal });
|
944
|
-
const updateBranchSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/update", method: "post", ...variables, signal });
|
945
|
-
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/preview", method: "post", ...variables, signal });
|
946
|
-
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/apply", method: "post", ...variables, signal });
|
947
|
-
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/push", method: "post", ...variables, signal });
|
948
2838
|
const createTable = (variables, signal) => dataPlaneFetch({
|
949
2839
|
url: "/db/{dbBranchName}/tables/{tableName}",
|
950
2840
|
method: "put",
|
@@ -957,14 +2847,24 @@ const deleteTable = (variables, signal) => dataPlaneFetch({
|
|
957
2847
|
...variables,
|
958
2848
|
signal
|
959
2849
|
});
|
960
|
-
const updateTable = (variables, signal) => dataPlaneFetch({
|
2850
|
+
const updateTable = (variables, signal) => dataPlaneFetch({
|
2851
|
+
url: "/db/{dbBranchName}/tables/{tableName}",
|
2852
|
+
method: "patch",
|
2853
|
+
...variables,
|
2854
|
+
signal
|
2855
|
+
});
|
961
2856
|
const getTableSchema = (variables, signal) => dataPlaneFetch({
|
962
2857
|
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
963
2858
|
method: "get",
|
964
2859
|
...variables,
|
965
2860
|
signal
|
966
2861
|
});
|
967
|
-
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2862
|
+
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2863
|
+
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
2864
|
+
method: "put",
|
2865
|
+
...variables,
|
2866
|
+
signal
|
2867
|
+
});
|
968
2868
|
const getTableColumns = (variables, signal) => dataPlaneFetch({
|
969
2869
|
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
970
2870
|
method: "get",
|
@@ -972,7 +2872,12 @@ const getTableColumns = (variables, signal) => dataPlaneFetch({
|
|
972
2872
|
signal
|
973
2873
|
});
|
974
2874
|
const addTableColumn = (variables, signal) => dataPlaneFetch(
|
975
|
-
{
|
2875
|
+
{
|
2876
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
2877
|
+
method: "post",
|
2878
|
+
...variables,
|
2879
|
+
signal
|
2880
|
+
}
|
976
2881
|
);
|
977
2882
|
const getColumn = (variables, signal) => dataPlaneFetch({
|
978
2883
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
@@ -980,15 +2885,30 @@ const getColumn = (variables, signal) => dataPlaneFetch({
|
|
980
2885
|
...variables,
|
981
2886
|
signal
|
982
2887
|
});
|
983
|
-
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2888
|
+
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2889
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
2890
|
+
method: "patch",
|
2891
|
+
...variables,
|
2892
|
+
signal
|
2893
|
+
});
|
984
2894
|
const deleteColumn = (variables, signal) => dataPlaneFetch({
|
985
2895
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
986
2896
|
method: "delete",
|
987
2897
|
...variables,
|
988
2898
|
signal
|
989
2899
|
});
|
990
|
-
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
991
|
-
|
2900
|
+
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
2901
|
+
url: "/db/{dbBranchName}/transaction",
|
2902
|
+
method: "post",
|
2903
|
+
...variables,
|
2904
|
+
signal
|
2905
|
+
});
|
2906
|
+
const insertRecord = (variables, signal) => dataPlaneFetch({
|
2907
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data",
|
2908
|
+
method: "post",
|
2909
|
+
...variables,
|
2910
|
+
signal
|
2911
|
+
});
|
992
2912
|
const getFileItem = (variables, signal) => dataPlaneFetch({
|
993
2913
|
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
|
994
2914
|
method: "get",
|
@@ -1031,11 +2951,36 @@ const getRecord = (variables, signal) => dataPlaneFetch({
|
|
1031
2951
|
...variables,
|
1032
2952
|
signal
|
1033
2953
|
});
|
1034
|
-
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
1035
|
-
|
1036
|
-
|
1037
|
-
|
1038
|
-
|
2954
|
+
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2955
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2956
|
+
method: "put",
|
2957
|
+
...variables,
|
2958
|
+
signal
|
2959
|
+
});
|
2960
|
+
const updateRecordWithID = (variables, signal) => dataPlaneFetch({
|
2961
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2962
|
+
method: "patch",
|
2963
|
+
...variables,
|
2964
|
+
signal
|
2965
|
+
});
|
2966
|
+
const upsertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2967
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2968
|
+
method: "post",
|
2969
|
+
...variables,
|
2970
|
+
signal
|
2971
|
+
});
|
2972
|
+
const deleteRecord = (variables, signal) => dataPlaneFetch({
|
2973
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2974
|
+
method: "delete",
|
2975
|
+
...variables,
|
2976
|
+
signal
|
2977
|
+
});
|
2978
|
+
const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({
|
2979
|
+
url: "/db/{dbBranchName}/tables/{tableName}/bulk",
|
2980
|
+
method: "post",
|
2981
|
+
...variables,
|
2982
|
+
signal
|
2983
|
+
});
|
1039
2984
|
const queryTable = (variables, signal) => dataPlaneFetch({
|
1040
2985
|
url: "/db/{dbBranchName}/tables/{tableName}/query",
|
1041
2986
|
method: "post",
|
@@ -1054,16 +2999,36 @@ const searchTable = (variables, signal) => dataPlaneFetch({
|
|
1054
2999
|
...variables,
|
1055
3000
|
signal
|
1056
3001
|
});
|
1057
|
-
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
3002
|
+
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
3003
|
+
url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch",
|
3004
|
+
method: "post",
|
3005
|
+
...variables,
|
3006
|
+
signal
|
3007
|
+
});
|
1058
3008
|
const askTable = (variables, signal) => dataPlaneFetch({
|
1059
3009
|
url: "/db/{dbBranchName}/tables/{tableName}/ask",
|
1060
3010
|
method: "post",
|
1061
3011
|
...variables,
|
1062
3012
|
signal
|
1063
3013
|
});
|
1064
|
-
const askTableSession = (variables, signal) => dataPlaneFetch({
|
1065
|
-
|
1066
|
-
|
3014
|
+
const askTableSession = (variables, signal) => dataPlaneFetch({
|
3015
|
+
url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}",
|
3016
|
+
method: "post",
|
3017
|
+
...variables,
|
3018
|
+
signal
|
3019
|
+
});
|
3020
|
+
const summarizeTable = (variables, signal) => dataPlaneFetch({
|
3021
|
+
url: "/db/{dbBranchName}/tables/{tableName}/summarize",
|
3022
|
+
method: "post",
|
3023
|
+
...variables,
|
3024
|
+
signal
|
3025
|
+
});
|
3026
|
+
const aggregateTable = (variables, signal) => dataPlaneFetch({
|
3027
|
+
url: "/db/{dbBranchName}/tables/{tableName}/aggregate",
|
3028
|
+
method: "post",
|
3029
|
+
...variables,
|
3030
|
+
signal
|
3031
|
+
});
|
1067
3032
|
const fileAccess = (variables, signal) => dataPlaneFetch({
|
1068
3033
|
url: "/file/{fileId}",
|
1069
3034
|
method: "get",
|
@@ -1082,10 +3047,20 @@ const sqlQuery = (variables, signal) => dataPlaneFetch({
|
|
1082
3047
|
...variables,
|
1083
3048
|
signal
|
1084
3049
|
});
|
3050
|
+
const sqlBatchQuery = (variables, signal) => dataPlaneFetch({
|
3051
|
+
url: "/db/{dbBranchName}/sql/batch",
|
3052
|
+
method: "post",
|
3053
|
+
...variables,
|
3054
|
+
signal
|
3055
|
+
});
|
1085
3056
|
const operationsByTag$2 = {
|
1086
3057
|
migrations: {
|
1087
3058
|
applyMigration,
|
3059
|
+
startMigration,
|
3060
|
+
completeMigration,
|
3061
|
+
rollbackMigration,
|
1088
3062
|
adaptTable,
|
3063
|
+
adaptAllTables,
|
1089
3064
|
getBranchMigrationJobStatus,
|
1090
3065
|
getMigrationJobStatus,
|
1091
3066
|
getMigrationHistory,
|
@@ -1148,7 +3123,16 @@ const operationsByTag$2 = {
|
|
1148
3123
|
deleteRecord,
|
1149
3124
|
bulkInsertTableRecords
|
1150
3125
|
},
|
1151
|
-
files: {
|
3126
|
+
files: {
|
3127
|
+
getFileItem,
|
3128
|
+
putFileItem,
|
3129
|
+
deleteFileItem,
|
3130
|
+
getFile,
|
3131
|
+
putFile,
|
3132
|
+
deleteFile,
|
3133
|
+
fileAccess,
|
3134
|
+
fileUpload
|
3135
|
+
},
|
1152
3136
|
searchAndFilter: {
|
1153
3137
|
queryTable,
|
1154
3138
|
searchBranch,
|
@@ -1159,7 +3143,7 @@ const operationsByTag$2 = {
|
|
1159
3143
|
summarizeTable,
|
1160
3144
|
aggregateTable
|
1161
3145
|
},
|
1162
|
-
sql: { sqlQuery }
|
3146
|
+
sql: { sqlQuery, sqlBatchQuery }
|
1163
3147
|
};
|
1164
3148
|
|
1165
3149
|
const controlPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "controlPlane" });
|
@@ -1226,7 +3210,12 @@ const deleteOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
|
1226
3210
|
...variables,
|
1227
3211
|
signal
|
1228
3212
|
});
|
1229
|
-
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3213
|
+
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3214
|
+
url: "/user/oauth/tokens/{token}",
|
3215
|
+
method: "patch",
|
3216
|
+
...variables,
|
3217
|
+
signal
|
3218
|
+
});
|
1230
3219
|
const getWorkspacesList = (variables, signal) => controlPlaneFetch({
|
1231
3220
|
url: "/workspaces",
|
1232
3221
|
method: "get",
|
@@ -1257,49 +3246,150 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
|
|
1257
3246
|
...variables,
|
1258
3247
|
signal
|
1259
3248
|
});
|
1260
|
-
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
1261
|
-
|
1262
|
-
|
1263
|
-
|
3249
|
+
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3250
|
+
url: "/workspaces/{workspaceId}/settings",
|
3251
|
+
method: "get",
|
3252
|
+
...variables,
|
3253
|
+
signal
|
3254
|
+
});
|
3255
|
+
const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3256
|
+
url: "/workspaces/{workspaceId}/settings",
|
3257
|
+
method: "patch",
|
3258
|
+
...variables,
|
3259
|
+
signal
|
3260
|
+
});
|
3261
|
+
const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({
|
3262
|
+
url: "/workspaces/{workspaceId}/members",
|
3263
|
+
method: "get",
|
3264
|
+
...variables,
|
3265
|
+
signal
|
3266
|
+
});
|
3267
|
+
const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({
|
3268
|
+
url: "/workspaces/{workspaceId}/members/{userId}",
|
3269
|
+
method: "put",
|
3270
|
+
...variables,
|
3271
|
+
signal
|
3272
|
+
});
|
1264
3273
|
const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1265
3274
|
url: "/workspaces/{workspaceId}/members/{userId}",
|
1266
3275
|
method: "delete",
|
1267
3276
|
...variables,
|
1268
3277
|
signal
|
1269
3278
|
});
|
1270
|
-
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1271
|
-
|
1272
|
-
|
1273
|
-
|
1274
|
-
|
1275
|
-
|
1276
|
-
const
|
3279
|
+
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
3280
|
+
url: "/workspaces/{workspaceId}/invites",
|
3281
|
+
method: "post",
|
3282
|
+
...variables,
|
3283
|
+
signal
|
3284
|
+
});
|
3285
|
+
const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3286
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3287
|
+
method: "patch",
|
3288
|
+
...variables,
|
3289
|
+
signal
|
3290
|
+
});
|
3291
|
+
const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3292
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3293
|
+
method: "delete",
|
3294
|
+
...variables,
|
3295
|
+
signal
|
3296
|
+
});
|
3297
|
+
const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3298
|
+
url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept",
|
3299
|
+
method: "post",
|
3300
|
+
...variables,
|
3301
|
+
signal
|
3302
|
+
});
|
3303
|
+
const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3304
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}/resend",
|
3305
|
+
method: "post",
|
3306
|
+
...variables,
|
3307
|
+
signal
|
3308
|
+
});
|
3309
|
+
const listClusters = (variables, signal) => controlPlaneFetch({
|
3310
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3311
|
+
method: "get",
|
3312
|
+
...variables,
|
3313
|
+
signal
|
3314
|
+
});
|
3315
|
+
const createCluster = (variables, signal) => controlPlaneFetch({
|
3316
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3317
|
+
method: "post",
|
3318
|
+
...variables,
|
3319
|
+
signal
|
3320
|
+
});
|
1277
3321
|
const getCluster = (variables, signal) => controlPlaneFetch({
|
1278
3322
|
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
1279
3323
|
method: "get",
|
1280
3324
|
...variables,
|
1281
3325
|
signal
|
1282
3326
|
});
|
1283
|
-
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3327
|
+
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3328
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3329
|
+
method: "patch",
|
3330
|
+
...variables,
|
3331
|
+
signal
|
3332
|
+
});
|
3333
|
+
const deleteCluster = (variables, signal) => controlPlaneFetch({
|
3334
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3335
|
+
method: "delete",
|
3336
|
+
...variables,
|
3337
|
+
signal
|
3338
|
+
});
|
1284
3339
|
const getDatabaseList = (variables, signal) => controlPlaneFetch({
|
1285
3340
|
url: "/workspaces/{workspaceId}/dbs",
|
1286
3341
|
method: "get",
|
1287
3342
|
...variables,
|
1288
3343
|
signal
|
1289
3344
|
});
|
1290
|
-
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3345
|
+
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3346
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3347
|
+
method: "put",
|
3348
|
+
...variables,
|
3349
|
+
signal
|
3350
|
+
});
|
1291
3351
|
const deleteDatabase = (variables, signal) => controlPlaneFetch({
|
1292
3352
|
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
1293
3353
|
method: "delete",
|
1294
3354
|
...variables,
|
1295
3355
|
signal
|
1296
3356
|
});
|
1297
|
-
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
1298
|
-
|
1299
|
-
|
1300
|
-
|
1301
|
-
|
1302
|
-
|
3357
|
+
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3358
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3359
|
+
method: "get",
|
3360
|
+
...variables,
|
3361
|
+
signal
|
3362
|
+
});
|
3363
|
+
const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3364
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3365
|
+
method: "patch",
|
3366
|
+
...variables,
|
3367
|
+
signal
|
3368
|
+
});
|
3369
|
+
const renameDatabase = (variables, signal) => controlPlaneFetch({
|
3370
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/rename",
|
3371
|
+
method: "post",
|
3372
|
+
...variables,
|
3373
|
+
signal
|
3374
|
+
});
|
3375
|
+
const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3376
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3377
|
+
method: "get",
|
3378
|
+
...variables,
|
3379
|
+
signal
|
3380
|
+
});
|
3381
|
+
const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3382
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3383
|
+
method: "put",
|
3384
|
+
...variables,
|
3385
|
+
signal
|
3386
|
+
});
|
3387
|
+
const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3388
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3389
|
+
method: "delete",
|
3390
|
+
...variables,
|
3391
|
+
signal
|
3392
|
+
});
|
1303
3393
|
const listRegions = (variables, signal) => controlPlaneFetch({
|
1304
3394
|
url: "/workspaces/{workspaceId}/regions",
|
1305
3395
|
method: "get",
|
@@ -1337,7 +3427,13 @@ const operationsByTag$1 = {
|
|
1337
3427
|
acceptWorkspaceMemberInvite,
|
1338
3428
|
resendWorkspaceMemberInvite
|
1339
3429
|
},
|
1340
|
-
xbcontrolOther: {
|
3430
|
+
xbcontrolOther: {
|
3431
|
+
listClusters,
|
3432
|
+
createCluster,
|
3433
|
+
getCluster,
|
3434
|
+
updateCluster,
|
3435
|
+
deleteCluster
|
3436
|
+
},
|
1341
3437
|
databases: {
|
1342
3438
|
getDatabaseList,
|
1343
3439
|
createDatabase,
|
@@ -1354,28 +3450,17 @@ const operationsByTag$1 = {
|
|
1354
3450
|
|
1355
3451
|
const operationsByTag = deepMerge(operationsByTag$2, operationsByTag$1);
|
1356
3452
|
|
1357
|
-
var
|
1358
|
-
|
1359
|
-
throw TypeError("Cannot " + msg);
|
1360
|
-
};
|
1361
|
-
var __privateGet$6 = (obj, member, getter) => {
|
1362
|
-
__accessCheck$7(obj, member, "read from private field");
|
1363
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1364
|
-
};
|
1365
|
-
var __privateAdd$7 = (obj, member, value) => {
|
1366
|
-
if (member.has(obj))
|
1367
|
-
throw TypeError("Cannot add the same private member more than once");
|
1368
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1369
|
-
};
|
1370
|
-
var __privateSet$5 = (obj, member, value, setter) => {
|
1371
|
-
__accessCheck$7(obj, member, "write to private field");
|
1372
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1373
|
-
return value;
|
3453
|
+
var __typeError$7 = (msg) => {
|
3454
|
+
throw TypeError(msg);
|
1374
3455
|
};
|
3456
|
+
var __accessCheck$7 = (obj, member, msg) => member.has(obj) || __typeError$7("Cannot " + msg);
|
3457
|
+
var __privateGet$6 = (obj, member, getter) => (__accessCheck$7(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3458
|
+
var __privateAdd$7 = (obj, member, value) => member.has(obj) ? __typeError$7("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3459
|
+
var __privateSet$5 = (obj, member, value, setter) => (__accessCheck$7(obj, member, "write to private field"), member.set(obj, value), value);
|
1375
3460
|
var _extraProps, _namespaces;
|
1376
3461
|
class XataApiClient {
|
1377
3462
|
constructor(options = {}) {
|
1378
|
-
__privateAdd$7(this, _extraProps
|
3463
|
+
__privateAdd$7(this, _extraProps);
|
1379
3464
|
__privateAdd$7(this, _namespaces, {});
|
1380
3465
|
const provider = options.host ?? "production";
|
1381
3466
|
const apiKey = options.apiKey ?? getAPIKey();
|
@@ -1396,38 +3481,31 @@ class XataApiClient {
|
|
1396
3481
|
});
|
1397
3482
|
}
|
1398
3483
|
get user() {
|
1399
|
-
if (!__privateGet$6(this, _namespaces).user)
|
1400
|
-
__privateGet$6(this, _namespaces).user = new UserApi(__privateGet$6(this, _extraProps));
|
3484
|
+
if (!__privateGet$6(this, _namespaces).user) __privateGet$6(this, _namespaces).user = new UserApi(__privateGet$6(this, _extraProps));
|
1401
3485
|
return __privateGet$6(this, _namespaces).user;
|
1402
3486
|
}
|
1403
3487
|
get authentication() {
|
1404
|
-
if (!__privateGet$6(this, _namespaces).authentication)
|
1405
|
-
__privateGet$6(this, _namespaces).authentication = new AuthenticationApi(__privateGet$6(this, _extraProps));
|
3488
|
+
if (!__privateGet$6(this, _namespaces).authentication) __privateGet$6(this, _namespaces).authentication = new AuthenticationApi(__privateGet$6(this, _extraProps));
|
1406
3489
|
return __privateGet$6(this, _namespaces).authentication;
|
1407
3490
|
}
|
1408
3491
|
get workspaces() {
|
1409
|
-
if (!__privateGet$6(this, _namespaces).workspaces)
|
1410
|
-
__privateGet$6(this, _namespaces).workspaces = new WorkspaceApi(__privateGet$6(this, _extraProps));
|
3492
|
+
if (!__privateGet$6(this, _namespaces).workspaces) __privateGet$6(this, _namespaces).workspaces = new WorkspaceApi(__privateGet$6(this, _extraProps));
|
1411
3493
|
return __privateGet$6(this, _namespaces).workspaces;
|
1412
3494
|
}
|
1413
3495
|
get invites() {
|
1414
|
-
if (!__privateGet$6(this, _namespaces).invites)
|
1415
|
-
__privateGet$6(this, _namespaces).invites = new InvitesApi(__privateGet$6(this, _extraProps));
|
3496
|
+
if (!__privateGet$6(this, _namespaces).invites) __privateGet$6(this, _namespaces).invites = new InvitesApi(__privateGet$6(this, _extraProps));
|
1416
3497
|
return __privateGet$6(this, _namespaces).invites;
|
1417
3498
|
}
|
1418
3499
|
get database() {
|
1419
|
-
if (!__privateGet$6(this, _namespaces).database)
|
1420
|
-
__privateGet$6(this, _namespaces).database = new DatabaseApi(__privateGet$6(this, _extraProps));
|
3500
|
+
if (!__privateGet$6(this, _namespaces).database) __privateGet$6(this, _namespaces).database = new DatabaseApi(__privateGet$6(this, _extraProps));
|
1421
3501
|
return __privateGet$6(this, _namespaces).database;
|
1422
3502
|
}
|
1423
3503
|
get branches() {
|
1424
|
-
if (!__privateGet$6(this, _namespaces).branches)
|
1425
|
-
__privateGet$6(this, _namespaces).branches = new BranchApi(__privateGet$6(this, _extraProps));
|
3504
|
+
if (!__privateGet$6(this, _namespaces).branches) __privateGet$6(this, _namespaces).branches = new BranchApi(__privateGet$6(this, _extraProps));
|
1426
3505
|
return __privateGet$6(this, _namespaces).branches;
|
1427
3506
|
}
|
1428
3507
|
get migrations() {
|
1429
|
-
if (!__privateGet$6(this, _namespaces).migrations)
|
1430
|
-
__privateGet$6(this, _namespaces).migrations = new MigrationsApi(__privateGet$6(this, _extraProps));
|
3508
|
+
if (!__privateGet$6(this, _namespaces).migrations) __privateGet$6(this, _namespaces).migrations = new MigrationsApi(__privateGet$6(this, _extraProps));
|
1431
3509
|
return __privateGet$6(this, _namespaces).migrations;
|
1432
3510
|
}
|
1433
3511
|
get migrationRequests() {
|
@@ -1436,23 +3514,19 @@ class XataApiClient {
|
|
1436
3514
|
return __privateGet$6(this, _namespaces).migrationRequests;
|
1437
3515
|
}
|
1438
3516
|
get tables() {
|
1439
|
-
if (!__privateGet$6(this, _namespaces).tables)
|
1440
|
-
__privateGet$6(this, _namespaces).tables = new TableApi(__privateGet$6(this, _extraProps));
|
3517
|
+
if (!__privateGet$6(this, _namespaces).tables) __privateGet$6(this, _namespaces).tables = new TableApi(__privateGet$6(this, _extraProps));
|
1441
3518
|
return __privateGet$6(this, _namespaces).tables;
|
1442
3519
|
}
|
1443
3520
|
get records() {
|
1444
|
-
if (!__privateGet$6(this, _namespaces).records)
|
1445
|
-
__privateGet$6(this, _namespaces).records = new RecordsApi(__privateGet$6(this, _extraProps));
|
3521
|
+
if (!__privateGet$6(this, _namespaces).records) __privateGet$6(this, _namespaces).records = new RecordsApi(__privateGet$6(this, _extraProps));
|
1446
3522
|
return __privateGet$6(this, _namespaces).records;
|
1447
3523
|
}
|
1448
3524
|
get files() {
|
1449
|
-
if (!__privateGet$6(this, _namespaces).files)
|
1450
|
-
__privateGet$6(this, _namespaces).files = new FilesApi(__privateGet$6(this, _extraProps));
|
3525
|
+
if (!__privateGet$6(this, _namespaces).files) __privateGet$6(this, _namespaces).files = new FilesApi(__privateGet$6(this, _extraProps));
|
1451
3526
|
return __privateGet$6(this, _namespaces).files;
|
1452
3527
|
}
|
1453
3528
|
get searchAndFilter() {
|
1454
|
-
if (!__privateGet$6(this, _namespaces).searchAndFilter)
|
1455
|
-
__privateGet$6(this, _namespaces).searchAndFilter = new SearchAndFilterApi(__privateGet$6(this, _extraProps));
|
3529
|
+
if (!__privateGet$6(this, _namespaces).searchAndFilter) __privateGet$6(this, _namespaces).searchAndFilter = new SearchAndFilterApi(__privateGet$6(this, _extraProps));
|
1456
3530
|
return __privateGet$6(this, _namespaces).searchAndFilter;
|
1457
3531
|
}
|
1458
3532
|
}
|
@@ -2729,8 +4803,7 @@ function buildTransformString(transformations) {
|
|
2729
4803
|
).join(",");
|
2730
4804
|
}
|
2731
4805
|
function transformImage(url, ...transformations) {
|
2732
|
-
if (!isDefined(url))
|
2733
|
-
return void 0;
|
4806
|
+
if (!isDefined(url)) return void 0;
|
2734
4807
|
const newTransformations = buildTransformString(transformations);
|
2735
4808
|
const { hostname, pathname, search } = new URL(url);
|
2736
4809
|
const pathParts = pathname.split("/");
|
@@ -2843,8 +4916,7 @@ class XataFile {
|
|
2843
4916
|
}
|
2844
4917
|
}
|
2845
4918
|
const parseInputFileEntry = async (entry) => {
|
2846
|
-
if (!isDefined(entry))
|
2847
|
-
return null;
|
4919
|
+
if (!isDefined(entry)) return null;
|
2848
4920
|
const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout, uploadUrlTimeout } = await entry;
|
2849
4921
|
return compactObject({
|
2850
4922
|
id,
|
@@ -2859,24 +4931,19 @@ const parseInputFileEntry = async (entry) => {
|
|
2859
4931
|
};
|
2860
4932
|
|
2861
4933
|
function cleanFilter(filter) {
|
2862
|
-
if (!isDefined(filter))
|
2863
|
-
|
2864
|
-
if (!isObject(filter))
|
2865
|
-
return filter;
|
4934
|
+
if (!isDefined(filter)) return void 0;
|
4935
|
+
if (!isObject(filter)) return filter;
|
2866
4936
|
const values = Object.fromEntries(
|
2867
4937
|
Object.entries(filter).reduce((acc, [key, value]) => {
|
2868
|
-
if (!isDefined(value))
|
2869
|
-
return acc;
|
4938
|
+
if (!isDefined(value)) return acc;
|
2870
4939
|
if (Array.isArray(value)) {
|
2871
4940
|
const clean = value.map((item) => cleanFilter(item)).filter((item) => isDefined(item));
|
2872
|
-
if (clean.length === 0)
|
2873
|
-
return acc;
|
4941
|
+
if (clean.length === 0) return acc;
|
2874
4942
|
return [...acc, [key, clean]];
|
2875
4943
|
}
|
2876
4944
|
if (isObject(value)) {
|
2877
4945
|
const clean = cleanFilter(value);
|
2878
|
-
if (!isDefined(clean))
|
2879
|
-
return acc;
|
4946
|
+
if (!isDefined(clean)) return acc;
|
2880
4947
|
return [...acc, [key, clean]];
|
2881
4948
|
}
|
2882
4949
|
return [...acc, [key, value]];
|
@@ -2886,10 +4953,8 @@ function cleanFilter(filter) {
|
|
2886
4953
|
}
|
2887
4954
|
|
2888
4955
|
function stringifyJson(value) {
|
2889
|
-
if (!isDefined(value))
|
2890
|
-
|
2891
|
-
if (isString(value))
|
2892
|
-
return value;
|
4956
|
+
if (!isDefined(value)) return value;
|
4957
|
+
if (isString(value)) return value;
|
2893
4958
|
try {
|
2894
4959
|
return JSON.stringify(value);
|
2895
4960
|
} catch (e) {
|
@@ -2904,28 +4969,17 @@ function parseJson(value) {
|
|
2904
4969
|
}
|
2905
4970
|
}
|
2906
4971
|
|
2907
|
-
var
|
2908
|
-
|
2909
|
-
throw TypeError("Cannot " + msg);
|
2910
|
-
};
|
2911
|
-
var __privateGet$5 = (obj, member, getter) => {
|
2912
|
-
__accessCheck$6(obj, member, "read from private field");
|
2913
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2914
|
-
};
|
2915
|
-
var __privateAdd$6 = (obj, member, value) => {
|
2916
|
-
if (member.has(obj))
|
2917
|
-
throw TypeError("Cannot add the same private member more than once");
|
2918
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2919
|
-
};
|
2920
|
-
var __privateSet$4 = (obj, member, value, setter) => {
|
2921
|
-
__accessCheck$6(obj, member, "write to private field");
|
2922
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
2923
|
-
return value;
|
4972
|
+
var __typeError$6 = (msg) => {
|
4973
|
+
throw TypeError(msg);
|
2924
4974
|
};
|
4975
|
+
var __accessCheck$6 = (obj, member, msg) => member.has(obj) || __typeError$6("Cannot " + msg);
|
4976
|
+
var __privateGet$5 = (obj, member, getter) => (__accessCheck$6(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4977
|
+
var __privateAdd$6 = (obj, member, value) => member.has(obj) ? __typeError$6("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4978
|
+
var __privateSet$4 = (obj, member, value, setter) => (__accessCheck$6(obj, member, "write to private field"), member.set(obj, value), value);
|
2925
4979
|
var _query, _page;
|
2926
4980
|
class Page {
|
2927
4981
|
constructor(query, meta, records = []) {
|
2928
|
-
__privateAdd$6(this, _query
|
4982
|
+
__privateAdd$6(this, _query);
|
2929
4983
|
__privateSet$4(this, _query, query);
|
2930
4984
|
this.meta = meta;
|
2931
4985
|
this.records = new PageRecordArray(this, records);
|
@@ -3012,7 +5066,7 @@ class RecordArray extends Array {
|
|
3012
5066
|
const _PageRecordArray = class _PageRecordArray extends Array {
|
3013
5067
|
constructor(...args) {
|
3014
5068
|
super(..._PageRecordArray.parseConstructorParams(...args));
|
3015
|
-
__privateAdd$6(this, _page
|
5069
|
+
__privateAdd$6(this, _page);
|
3016
5070
|
__privateSet$4(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
|
3017
5071
|
}
|
3018
5072
|
static parseConstructorParams(...args) {
|
@@ -3083,34 +5137,20 @@ const _PageRecordArray = class _PageRecordArray extends Array {
|
|
3083
5137
|
_page = new WeakMap();
|
3084
5138
|
let PageRecordArray = _PageRecordArray;
|
3085
5139
|
|
3086
|
-
var
|
3087
|
-
|
3088
|
-
throw TypeError("Cannot " + msg);
|
3089
|
-
};
|
3090
|
-
var __privateGet$4 = (obj, member, getter) => {
|
3091
|
-
__accessCheck$5(obj, member, "read from private field");
|
3092
|
-
return getter ? getter.call(obj) : member.get(obj);
|
5140
|
+
var __typeError$5 = (msg) => {
|
5141
|
+
throw TypeError(msg);
|
3093
5142
|
};
|
3094
|
-
var
|
3095
|
-
|
3096
|
-
|
3097
|
-
|
3098
|
-
|
3099
|
-
var
|
3100
|
-
__accessCheck$5(obj, member, "write to private field");
|
3101
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
3102
|
-
return value;
|
3103
|
-
};
|
3104
|
-
var __privateMethod$3 = (obj, member, method) => {
|
3105
|
-
__accessCheck$5(obj, member, "access private method");
|
3106
|
-
return method;
|
3107
|
-
};
|
3108
|
-
var _table$1, _repository, _data, _cleanFilterConstraint, cleanFilterConstraint_fn;
|
5143
|
+
var __accessCheck$5 = (obj, member, msg) => member.has(obj) || __typeError$5("Cannot " + msg);
|
5144
|
+
var __privateGet$4 = (obj, member, getter) => (__accessCheck$5(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
5145
|
+
var __privateAdd$5 = (obj, member, value) => member.has(obj) ? __typeError$5("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5146
|
+
var __privateSet$3 = (obj, member, value, setter) => (__accessCheck$5(obj, member, "write to private field"), member.set(obj, value), value);
|
5147
|
+
var __privateMethod$3 = (obj, member, method) => (__accessCheck$5(obj, member, "access private method"), method);
|
5148
|
+
var _table$1, _repository, _data, _Query_instances, cleanFilterConstraint_fn;
|
3109
5149
|
const _Query = class _Query {
|
3110
5150
|
constructor(repository, table, data, rawParent) {
|
3111
|
-
__privateAdd$5(this,
|
3112
|
-
__privateAdd$5(this, _table$1
|
3113
|
-
__privateAdd$5(this, _repository
|
5151
|
+
__privateAdd$5(this, _Query_instances);
|
5152
|
+
__privateAdd$5(this, _table$1);
|
5153
|
+
__privateAdd$5(this, _repository);
|
3114
5154
|
__privateAdd$5(this, _data, { filter: {} });
|
3115
5155
|
// Implements pagination
|
3116
5156
|
this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
|
@@ -3189,12 +5229,12 @@ const _Query = class _Query {
|
|
3189
5229
|
filter(a, b) {
|
3190
5230
|
if (arguments.length === 1) {
|
3191
5231
|
const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
|
3192
|
-
[column]: __privateMethod$3(this,
|
5232
|
+
[column]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, column, constraint)
|
3193
5233
|
}));
|
3194
5234
|
const $all = compact([__privateGet$4(this, _data).filter?.$all].flat().concat(constraints));
|
3195
5235
|
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
|
3196
5236
|
} else {
|
3197
|
-
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this,
|
5237
|
+
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
|
3198
5238
|
const $all = compact([__privateGet$4(this, _data).filter?.$all].flat().concat(constraints));
|
3199
5239
|
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
|
3200
5240
|
}
|
@@ -3273,8 +5313,7 @@ const _Query = class _Query {
|
|
3273
5313
|
}
|
3274
5314
|
async getFirstOrThrow(options = {}) {
|
3275
5315
|
const records = await this.getMany({ ...options, pagination: { size: 1 } });
|
3276
|
-
if (records[0] === void 0)
|
3277
|
-
throw new Error("No results found.");
|
5316
|
+
if (records[0] === void 0) throw new Error("No results found.");
|
3278
5317
|
return records[0];
|
3279
5318
|
}
|
3280
5319
|
async summarize(params = {}) {
|
@@ -3337,7 +5376,7 @@ const _Query = class _Query {
|
|
3337
5376
|
_table$1 = new WeakMap();
|
3338
5377
|
_repository = new WeakMap();
|
3339
5378
|
_data = new WeakMap();
|
3340
|
-
|
5379
|
+
_Query_instances = new WeakSet();
|
3341
5380
|
cleanFilterConstraint_fn = function(column, value) {
|
3342
5381
|
const columnType = __privateGet$4(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
|
3343
5382
|
if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
|
@@ -3403,8 +5442,7 @@ function isSortFilterString(value) {
|
|
3403
5442
|
}
|
3404
5443
|
function isSortFilterBase(filter) {
|
3405
5444
|
return isObject(filter) && Object.entries(filter).every(([key, value]) => {
|
3406
|
-
if (key === "*")
|
3407
|
-
return value === "random";
|
5445
|
+
if (key === "*") return value === "random";
|
3408
5446
|
return value === "asc" || value === "desc";
|
3409
5447
|
});
|
3410
5448
|
}
|
@@ -3425,29 +5463,15 @@ function buildSortFilter(filter) {
|
|
3425
5463
|
}
|
3426
5464
|
}
|
3427
5465
|
|
3428
|
-
var
|
3429
|
-
|
3430
|
-
throw TypeError("Cannot " + msg);
|
3431
|
-
};
|
3432
|
-
var __privateGet$3 = (obj, member, getter) => {
|
3433
|
-
__accessCheck$4(obj, member, "read from private field");
|
3434
|
-
return getter ? getter.call(obj) : member.get(obj);
|
3435
|
-
};
|
3436
|
-
var __privateAdd$4 = (obj, member, value) => {
|
3437
|
-
if (member.has(obj))
|
3438
|
-
throw TypeError("Cannot add the same private member more than once");
|
3439
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3440
|
-
};
|
3441
|
-
var __privateSet$2 = (obj, member, value, setter) => {
|
3442
|
-
__accessCheck$4(obj, member, "write to private field");
|
3443
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
3444
|
-
return value;
|
5466
|
+
var __typeError$4 = (msg) => {
|
5467
|
+
throw TypeError(msg);
|
3445
5468
|
};
|
3446
|
-
var
|
3447
|
-
|
3448
|
-
|
3449
|
-
|
3450
|
-
var
|
5469
|
+
var __accessCheck$4 = (obj, member, msg) => member.has(obj) || __typeError$4("Cannot " + msg);
|
5470
|
+
var __privateGet$3 = (obj, member, getter) => (__accessCheck$4(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
5471
|
+
var __privateAdd$4 = (obj, member, value) => member.has(obj) ? __typeError$4("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5472
|
+
var __privateSet$2 = (obj, member, value, setter) => (__accessCheck$4(obj, member, "write to private field"), member.set(obj, value), value);
|
5473
|
+
var __privateMethod$2 = (obj, member, method) => (__accessCheck$4(obj, member, "access private method"), method);
|
5474
|
+
var _table, _getFetchProps, _db, _cache, _schemaTables, _trace, _RestRepository_instances, insertRecordWithoutId_fn, insertRecordWithId_fn, insertRecords_fn, updateRecordWithID_fn, updateRecords_fn, upsertRecordWithID_fn, deleteRecord_fn, deleteRecords_fn, setCacheQuery_fn, getCacheQuery_fn, getSchemaTables_fn, transformObjectToApi_fn;
|
3451
5475
|
const BULK_OPERATION_MAX_SIZE = 1e3;
|
3452
5476
|
class Repository extends Query {
|
3453
5477
|
}
|
@@ -3458,24 +5482,13 @@ class RestRepository extends Query {
|
|
3458
5482
|
{ name: options.table, schema: options.schemaTables?.find((table) => table.name === options.table) },
|
3459
5483
|
{}
|
3460
5484
|
);
|
3461
|
-
__privateAdd$4(this,
|
3462
|
-
__privateAdd$4(this,
|
3463
|
-
__privateAdd$4(this,
|
3464
|
-
__privateAdd$4(this,
|
3465
|
-
__privateAdd$4(this,
|
3466
|
-
__privateAdd$4(this,
|
3467
|
-
__privateAdd$4(this,
|
3468
|
-
__privateAdd$4(this, _deleteRecords);
|
3469
|
-
__privateAdd$4(this, _setCacheQuery);
|
3470
|
-
__privateAdd$4(this, _getCacheQuery);
|
3471
|
-
__privateAdd$4(this, _getSchemaTables);
|
3472
|
-
__privateAdd$4(this, _transformObjectToApi);
|
3473
|
-
__privateAdd$4(this, _table, void 0);
|
3474
|
-
__privateAdd$4(this, _getFetchProps, void 0);
|
3475
|
-
__privateAdd$4(this, _db, void 0);
|
3476
|
-
__privateAdd$4(this, _cache, void 0);
|
3477
|
-
__privateAdd$4(this, _schemaTables, void 0);
|
3478
|
-
__privateAdd$4(this, _trace, void 0);
|
5485
|
+
__privateAdd$4(this, _RestRepository_instances);
|
5486
|
+
__privateAdd$4(this, _table);
|
5487
|
+
__privateAdd$4(this, _getFetchProps);
|
5488
|
+
__privateAdd$4(this, _db);
|
5489
|
+
__privateAdd$4(this, _cache);
|
5490
|
+
__privateAdd$4(this, _schemaTables);
|
5491
|
+
__privateAdd$4(this, _trace);
|
3479
5492
|
__privateSet$2(this, _table, options.table);
|
3480
5493
|
__privateSet$2(this, _db, options.db);
|
3481
5494
|
__privateSet$2(this, _cache, options.pluginOptions.cache);
|
@@ -3495,28 +5508,25 @@ class RestRepository extends Query {
|
|
3495
5508
|
return __privateGet$3(this, _trace).call(this, "create", async () => {
|
3496
5509
|
const ifVersion = parseIfVersion(b, c, d);
|
3497
5510
|
if (Array.isArray(a)) {
|
3498
|
-
if (a.length === 0)
|
3499
|
-
|
3500
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
5511
|
+
if (a.length === 0) return [];
|
5512
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
3501
5513
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3502
5514
|
const result = await this.read(ids, columns);
|
3503
5515
|
return result;
|
3504
5516
|
}
|
3505
5517
|
if (isString(a) && isObject(b)) {
|
3506
|
-
if (a === "")
|
3507
|
-
throw new Error("The id can't be empty");
|
5518
|
+
if (a === "") throw new Error("The id can't be empty");
|
3508
5519
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3509
|
-
return await __privateMethod$2(this,
|
5520
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
|
3510
5521
|
}
|
3511
5522
|
if (isObject(a) && isString(a.id)) {
|
3512
|
-
if (a.id === "")
|
3513
|
-
throw new Error("The id can't be empty");
|
5523
|
+
if (a.id === "") throw new Error("The id can't be empty");
|
3514
5524
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
3515
|
-
return await __privateMethod$2(this,
|
5525
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.id, { ...a, id: void 0 }, columns, { createOnly: true, ifVersion });
|
3516
5526
|
}
|
3517
5527
|
if (isObject(a)) {
|
3518
5528
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
3519
|
-
return __privateMethod$2(this,
|
5529
|
+
return __privateMethod$2(this, _RestRepository_instances, insertRecordWithoutId_fn).call(this, a, columns);
|
3520
5530
|
}
|
3521
5531
|
throw new Error("Invalid arguments for create method");
|
3522
5532
|
});
|
@@ -3525,8 +5535,7 @@ class RestRepository extends Query {
|
|
3525
5535
|
return __privateGet$3(this, _trace).call(this, "read", async () => {
|
3526
5536
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3527
5537
|
if (Array.isArray(a)) {
|
3528
|
-
if (a.length === 0)
|
3529
|
-
return [];
|
5538
|
+
if (a.length === 0) return [];
|
3530
5539
|
const ids = a.map((item) => extractId(item));
|
3531
5540
|
const finalObjects = await this.getAll({ filter: { id: { $any: compact(ids) } }, columns });
|
3532
5541
|
const dictionary = finalObjects.reduce((acc, object) => {
|
@@ -3549,7 +5558,7 @@ class RestRepository extends Query {
|
|
3549
5558
|
queryParams: { columns },
|
3550
5559
|
...__privateGet$3(this, _getFetchProps).call(this)
|
3551
5560
|
});
|
3552
|
-
const schemaTables = await __privateMethod$2(this,
|
5561
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
3553
5562
|
return initObject(
|
3554
5563
|
__privateGet$3(this, _db),
|
3555
5564
|
schemaTables,
|
@@ -3590,11 +5599,10 @@ class RestRepository extends Query {
|
|
3590
5599
|
return __privateGet$3(this, _trace).call(this, "update", async () => {
|
3591
5600
|
const ifVersion = parseIfVersion(b, c, d);
|
3592
5601
|
if (Array.isArray(a)) {
|
3593
|
-
if (a.length === 0)
|
3594
|
-
return [];
|
5602
|
+
if (a.length === 0) return [];
|
3595
5603
|
const existing = await this.read(a, ["id"]);
|
3596
5604
|
const updates = a.filter((_item, index) => existing[index] !== null);
|
3597
|
-
await __privateMethod$2(this,
|
5605
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, updates, {
|
3598
5606
|
ifVersion,
|
3599
5607
|
upsert: false
|
3600
5608
|
});
|
@@ -3605,15 +5613,14 @@ class RestRepository extends Query {
|
|
3605
5613
|
try {
|
3606
5614
|
if (isString(a) && isObject(b)) {
|
3607
5615
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3608
|
-
return await __privateMethod$2(this,
|
5616
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
3609
5617
|
}
|
3610
5618
|
if (isObject(a) && isString(a.id)) {
|
3611
5619
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
3612
|
-
return await __privateMethod$2(this,
|
5620
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a.id, { ...a, id: void 0 }, columns, { ifVersion });
|
3613
5621
|
}
|
3614
5622
|
} catch (error) {
|
3615
|
-
if (error.status === 422)
|
3616
|
-
return null;
|
5623
|
+
if (error.status === 422) return null;
|
3617
5624
|
throw error;
|
3618
5625
|
}
|
3619
5626
|
throw new Error("Invalid arguments for update method");
|
@@ -3642,9 +5649,8 @@ class RestRepository extends Query {
|
|
3642
5649
|
return __privateGet$3(this, _trace).call(this, "createOrUpdate", async () => {
|
3643
5650
|
const ifVersion = parseIfVersion(b, c, d);
|
3644
5651
|
if (Array.isArray(a)) {
|
3645
|
-
if (a.length === 0)
|
3646
|
-
|
3647
|
-
await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, a, {
|
5652
|
+
if (a.length === 0) return [];
|
5653
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, a, {
|
3648
5654
|
ifVersion,
|
3649
5655
|
upsert: true
|
3650
5656
|
});
|
@@ -3653,16 +5659,14 @@ class RestRepository extends Query {
|
|
3653
5659
|
return result;
|
3654
5660
|
}
|
3655
5661
|
if (isString(a) && isObject(b)) {
|
3656
|
-
if (a === "")
|
3657
|
-
throw new Error("The id can't be empty");
|
5662
|
+
if (a === "") throw new Error("The id can't be empty");
|
3658
5663
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3659
|
-
return await __privateMethod$2(this,
|
5664
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
3660
5665
|
}
|
3661
5666
|
if (isObject(a) && isString(a.id)) {
|
3662
|
-
if (a.id === "")
|
3663
|
-
throw new Error("The id can't be empty");
|
5667
|
+
if (a.id === "") throw new Error("The id can't be empty");
|
3664
5668
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3665
|
-
return await __privateMethod$2(this,
|
5669
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a.id, { ...a, id: void 0 }, columns, { ifVersion });
|
3666
5670
|
}
|
3667
5671
|
if (!isDefined(a) && isObject(b)) {
|
3668
5672
|
return await this.create(b, c);
|
@@ -3677,24 +5681,21 @@ class RestRepository extends Query {
|
|
3677
5681
|
return __privateGet$3(this, _trace).call(this, "createOrReplace", async () => {
|
3678
5682
|
const ifVersion = parseIfVersion(b, c, d);
|
3679
5683
|
if (Array.isArray(a)) {
|
3680
|
-
if (a.length === 0)
|
3681
|
-
|
3682
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
5684
|
+
if (a.length === 0) return [];
|
5685
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
3683
5686
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3684
5687
|
const result = await this.read(ids, columns);
|
3685
5688
|
return result;
|
3686
5689
|
}
|
3687
5690
|
if (isString(a) && isObject(b)) {
|
3688
|
-
if (a === "")
|
3689
|
-
throw new Error("The id can't be empty");
|
5691
|
+
if (a === "") throw new Error("The id can't be empty");
|
3690
5692
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3691
|
-
return await __privateMethod$2(this,
|
5693
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
|
3692
5694
|
}
|
3693
5695
|
if (isObject(a) && isString(a.id)) {
|
3694
|
-
if (a.id === "")
|
3695
|
-
throw new Error("The id can't be empty");
|
5696
|
+
if (a.id === "") throw new Error("The id can't be empty");
|
3696
5697
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3697
|
-
return await __privateMethod$2(this,
|
5698
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.id, { ...a, id: void 0 }, columns, { createOnly: false, ifVersion });
|
3698
5699
|
}
|
3699
5700
|
if (!isDefined(a) && isObject(b)) {
|
3700
5701
|
return await this.create(b, c);
|
@@ -3708,25 +5709,22 @@ class RestRepository extends Query {
|
|
3708
5709
|
async delete(a, b) {
|
3709
5710
|
return __privateGet$3(this, _trace).call(this, "delete", async () => {
|
3710
5711
|
if (Array.isArray(a)) {
|
3711
|
-
if (a.length === 0)
|
3712
|
-
return [];
|
5712
|
+
if (a.length === 0) return [];
|
3713
5713
|
const ids = a.map((o) => {
|
3714
|
-
if (isString(o))
|
3715
|
-
|
3716
|
-
if (isString(o.id))
|
3717
|
-
return o.id;
|
5714
|
+
if (isString(o)) return o;
|
5715
|
+
if (isString(o.id)) return o.id;
|
3718
5716
|
throw new Error("Invalid arguments for delete method");
|
3719
5717
|
});
|
3720
5718
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3721
5719
|
const result = await this.read(a, columns);
|
3722
|
-
await __privateMethod$2(this,
|
5720
|
+
await __privateMethod$2(this, _RestRepository_instances, deleteRecords_fn).call(this, ids);
|
3723
5721
|
return result;
|
3724
5722
|
}
|
3725
5723
|
if (isString(a)) {
|
3726
|
-
return __privateMethod$2(this,
|
5724
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a, b);
|
3727
5725
|
}
|
3728
5726
|
if (isObject(a) && isString(a.id)) {
|
3729
|
-
return __privateMethod$2(this,
|
5727
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a.id, b);
|
3730
5728
|
}
|
3731
5729
|
throw new Error("Invalid arguments for delete method");
|
3732
5730
|
});
|
@@ -3770,7 +5768,7 @@ class RestRepository extends Query {
|
|
3770
5768
|
},
|
3771
5769
|
...__privateGet$3(this, _getFetchProps).call(this)
|
3772
5770
|
});
|
3773
|
-
const schemaTables = await __privateMethod$2(this,
|
5771
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
3774
5772
|
return {
|
3775
5773
|
records: records.map((item) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), item, ["*"])),
|
3776
5774
|
totalCount
|
@@ -3795,7 +5793,7 @@ class RestRepository extends Query {
|
|
3795
5793
|
},
|
3796
5794
|
...__privateGet$3(this, _getFetchProps).call(this)
|
3797
5795
|
});
|
3798
|
-
const schemaTables = await __privateMethod$2(this,
|
5796
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
3799
5797
|
return {
|
3800
5798
|
records: records.map((item) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), item, ["*"])),
|
3801
5799
|
totalCount
|
@@ -3819,9 +5817,8 @@ class RestRepository extends Query {
|
|
3819
5817
|
}
|
3820
5818
|
async query(query) {
|
3821
5819
|
return __privateGet$3(this, _trace).call(this, "query", async () => {
|
3822
|
-
const cacheQuery = await __privateMethod$2(this,
|
3823
|
-
if (cacheQuery)
|
3824
|
-
return new Page(query, cacheQuery.meta, cacheQuery.records);
|
5820
|
+
const cacheQuery = await __privateMethod$2(this, _RestRepository_instances, getCacheQuery_fn).call(this, query);
|
5821
|
+
if (cacheQuery) return new Page(query, cacheQuery.meta, cacheQuery.records);
|
3825
5822
|
const data = query.getQueryOptions();
|
3826
5823
|
const { meta, records: objects } = await queryTable({
|
3827
5824
|
pathParams: {
|
@@ -3840,7 +5837,7 @@ class RestRepository extends Query {
|
|
3840
5837
|
fetchOptions: data.fetchOptions,
|
3841
5838
|
...__privateGet$3(this, _getFetchProps).call(this)
|
3842
5839
|
});
|
3843
|
-
const schemaTables = await __privateMethod$2(this,
|
5840
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
3844
5841
|
const records = objects.map(
|
3845
5842
|
(record) => initObject(
|
3846
5843
|
__privateGet$3(this, _db),
|
@@ -3850,7 +5847,7 @@ class RestRepository extends Query {
|
|
3850
5847
|
data.columns ?? ["*"]
|
3851
5848
|
)
|
3852
5849
|
);
|
3853
|
-
await __privateMethod$2(this,
|
5850
|
+
await __privateMethod$2(this, _RestRepository_instances, setCacheQuery_fn).call(this, query, meta, records);
|
3854
5851
|
return new Page(query, meta, records);
|
3855
5852
|
});
|
3856
5853
|
}
|
@@ -3875,7 +5872,7 @@ class RestRepository extends Query {
|
|
3875
5872
|
},
|
3876
5873
|
...__privateGet$3(this, _getFetchProps).call(this)
|
3877
5874
|
});
|
3878
|
-
const schemaTables = await __privateMethod$2(this,
|
5875
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
3879
5876
|
return {
|
3880
5877
|
...result,
|
3881
5878
|
summaries: result.summaries.map(
|
@@ -3924,9 +5921,9 @@ _db = new WeakMap();
|
|
3924
5921
|
_cache = new WeakMap();
|
3925
5922
|
_schemaTables = new WeakMap();
|
3926
5923
|
_trace = new WeakMap();
|
3927
|
-
|
5924
|
+
_RestRepository_instances = new WeakSet();
|
3928
5925
|
insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
3929
|
-
const record = await __privateMethod$2(this,
|
5926
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
3930
5927
|
const response = await insertRecord({
|
3931
5928
|
pathParams: {
|
3932
5929
|
workspace: "{workspaceId}",
|
@@ -3938,14 +5935,12 @@ insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
|
3938
5935
|
body: record,
|
3939
5936
|
...__privateGet$3(this, _getFetchProps).call(this)
|
3940
5937
|
});
|
3941
|
-
const schemaTables = await __privateMethod$2(this,
|
5938
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
3942
5939
|
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3943
5940
|
};
|
3944
|
-
_insertRecordWithId = new WeakSet();
|
3945
5941
|
insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
|
3946
|
-
if (!recordId)
|
3947
|
-
|
3948
|
-
const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
5942
|
+
if (!recordId) return null;
|
5943
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
3949
5944
|
const response = await insertRecordWithID({
|
3950
5945
|
pathParams: {
|
3951
5946
|
workspace: "{workspaceId}",
|
@@ -3958,13 +5953,12 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
|
|
3958
5953
|
queryParams: { createOnly, columns, ifVersion },
|
3959
5954
|
...__privateGet$3(this, _getFetchProps).call(this)
|
3960
5955
|
});
|
3961
|
-
const schemaTables = await __privateMethod$2(this,
|
5956
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
3962
5957
|
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3963
5958
|
};
|
3964
|
-
_insertRecords = new WeakSet();
|
3965
5959
|
insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
3966
5960
|
const operations = await promiseMap(objects, async (object) => {
|
3967
|
-
const record = await __privateMethod$2(this,
|
5961
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
3968
5962
|
return { insert: { table: __privateGet$3(this, _table), record, createOnly, ifVersion } };
|
3969
5963
|
});
|
3970
5964
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -3989,11 +5983,9 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
|
3989
5983
|
}
|
3990
5984
|
return ids;
|
3991
5985
|
};
|
3992
|
-
_updateRecordWithID = new WeakSet();
|
3993
5986
|
updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
3994
|
-
if (!recordId)
|
3995
|
-
|
3996
|
-
const { id: _id, ...record } = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
5987
|
+
if (!recordId) return null;
|
5988
|
+
const { id: _id, ...record } = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
3997
5989
|
try {
|
3998
5990
|
const response = await updateRecordWithID({
|
3999
5991
|
pathParams: {
|
@@ -4007,7 +5999,7 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
4007
5999
|
body: record,
|
4008
6000
|
...__privateGet$3(this, _getFetchProps).call(this)
|
4009
6001
|
});
|
4010
|
-
const schemaTables = await __privateMethod$2(this,
|
6002
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
4011
6003
|
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
4012
6004
|
} catch (e) {
|
4013
6005
|
if (isObject(e) && e.status === 404) {
|
@@ -4016,10 +6008,9 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
4016
6008
|
throw e;
|
4017
6009
|
}
|
4018
6010
|
};
|
4019
|
-
_updateRecords = new WeakSet();
|
4020
6011
|
updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
4021
6012
|
const operations = await promiseMap(objects, async ({ id, ...object }) => {
|
4022
|
-
const fields = await __privateMethod$2(this,
|
6013
|
+
const fields = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
4023
6014
|
return { update: { table: __privateGet$3(this, _table), id, ifVersion, upsert, fields } };
|
4024
6015
|
});
|
4025
6016
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -4044,10 +6035,8 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
|
4044
6035
|
}
|
4045
6036
|
return ids;
|
4046
6037
|
};
|
4047
|
-
_upsertRecordWithID = new WeakSet();
|
4048
6038
|
upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
4049
|
-
if (!recordId)
|
4050
|
-
return null;
|
6039
|
+
if (!recordId) return null;
|
4051
6040
|
const response = await upsertRecordWithID({
|
4052
6041
|
pathParams: {
|
4053
6042
|
workspace: "{workspaceId}",
|
@@ -4060,13 +6049,11 @@ upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
4060
6049
|
body: object,
|
4061
6050
|
...__privateGet$3(this, _getFetchProps).call(this)
|
4062
6051
|
});
|
4063
|
-
const schemaTables = await __privateMethod$2(this,
|
6052
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
4064
6053
|
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
4065
6054
|
};
|
4066
|
-
_deleteRecord = new WeakSet();
|
4067
6055
|
deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
4068
|
-
if (!recordId)
|
4069
|
-
return null;
|
6056
|
+
if (!recordId) return null;
|
4070
6057
|
try {
|
4071
6058
|
const response = await deleteRecord({
|
4072
6059
|
pathParams: {
|
@@ -4079,7 +6066,7 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
4079
6066
|
queryParams: { columns },
|
4080
6067
|
...__privateGet$3(this, _getFetchProps).call(this)
|
4081
6068
|
});
|
4082
|
-
const schemaTables = await __privateMethod$2(this,
|
6069
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
4083
6070
|
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
4084
6071
|
} catch (e) {
|
4085
6072
|
if (isObject(e) && e.status === 404) {
|
@@ -4088,7 +6075,6 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
4088
6075
|
throw e;
|
4089
6076
|
}
|
4090
6077
|
};
|
4091
|
-
_deleteRecords = new WeakSet();
|
4092
6078
|
deleteRecords_fn = async function(recordIds) {
|
4093
6079
|
const chunkedOperations = chunk(
|
4094
6080
|
compact(recordIds).map((id) => ({ delete: { table: __privateGet$3(this, _table), id } })),
|
@@ -4106,27 +6092,21 @@ deleteRecords_fn = async function(recordIds) {
|
|
4106
6092
|
});
|
4107
6093
|
}
|
4108
6094
|
};
|
4109
|
-
_setCacheQuery = new WeakSet();
|
4110
6095
|
setCacheQuery_fn = async function(query, meta, records) {
|
4111
6096
|
await __privateGet$3(this, _cache)?.set(`query_${__privateGet$3(this, _table)}:${query.key()}`, { date: /* @__PURE__ */ new Date(), meta, records });
|
4112
6097
|
};
|
4113
|
-
_getCacheQuery = new WeakSet();
|
4114
6098
|
getCacheQuery_fn = async function(query) {
|
4115
6099
|
const key = `query_${__privateGet$3(this, _table)}:${query.key()}`;
|
4116
6100
|
const result = await __privateGet$3(this, _cache)?.get(key);
|
4117
|
-
if (!result)
|
4118
|
-
return null;
|
6101
|
+
if (!result) return null;
|
4119
6102
|
const defaultTTL = __privateGet$3(this, _cache)?.defaultQueryTTL ?? -1;
|
4120
6103
|
const { cache: ttl = defaultTTL } = query.getQueryOptions();
|
4121
|
-
if (ttl < 0)
|
4122
|
-
return null;
|
6104
|
+
if (ttl < 0) return null;
|
4123
6105
|
const hasExpired = result.date.getTime() + ttl < Date.now();
|
4124
6106
|
return hasExpired ? null : result;
|
4125
6107
|
};
|
4126
|
-
_getSchemaTables = new WeakSet();
|
4127
6108
|
getSchemaTables_fn = async function() {
|
4128
|
-
if (__privateGet$3(this, _schemaTables))
|
4129
|
-
return __privateGet$3(this, _schemaTables);
|
6109
|
+
if (__privateGet$3(this, _schemaTables)) return __privateGet$3(this, _schemaTables);
|
4130
6110
|
const { schema } = await getBranchDetails({
|
4131
6111
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
4132
6112
|
...__privateGet$3(this, _getFetchProps).call(this)
|
@@ -4134,16 +6114,13 @@ getSchemaTables_fn = async function() {
|
|
4134
6114
|
__privateSet$2(this, _schemaTables, schema.tables);
|
4135
6115
|
return schema.tables;
|
4136
6116
|
};
|
4137
|
-
_transformObjectToApi = new WeakSet();
|
4138
6117
|
transformObjectToApi_fn = async function(object) {
|
4139
|
-
const schemaTables = await __privateMethod$2(this,
|
6118
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
4140
6119
|
const schema = schemaTables.find((table) => table.name === __privateGet$3(this, _table));
|
4141
|
-
if (!schema)
|
4142
|
-
throw new Error(`Table ${__privateGet$3(this, _table)} not found in schema`);
|
6120
|
+
if (!schema) throw new Error(`Table ${__privateGet$3(this, _table)} not found in schema`);
|
4143
6121
|
const result = {};
|
4144
6122
|
for (const [key, value] of Object.entries(object)) {
|
4145
|
-
if (key === "xata")
|
4146
|
-
continue;
|
6123
|
+
if (key === "xata") continue;
|
4147
6124
|
const type = schema.columns.find((column) => column.name === key)?.type;
|
4148
6125
|
switch (type) {
|
4149
6126
|
case "link": {
|
@@ -4174,11 +6151,9 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
4174
6151
|
const { xata, ...rest } = object ?? {};
|
4175
6152
|
Object.assign(data, rest);
|
4176
6153
|
const { columns } = schemaTables.find(({ name }) => name === table) ?? {};
|
4177
|
-
if (!columns)
|
4178
|
-
console.error(`Table ${table} not found in schema`);
|
6154
|
+
if (!columns) console.error(`Table ${table} not found in schema`);
|
4179
6155
|
for (const column of columns ?? []) {
|
4180
|
-
if (!isValidColumn(selectedColumns, column))
|
4181
|
-
continue;
|
6156
|
+
if (!isValidColumn(selectedColumns, column)) continue;
|
4182
6157
|
const value = data[column.name];
|
4183
6158
|
switch (column.type) {
|
4184
6159
|
case "datetime": {
|
@@ -4271,15 +6246,12 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
4271
6246
|
return record;
|
4272
6247
|
};
|
4273
6248
|
function extractId(value) {
|
4274
|
-
if (isString(value))
|
4275
|
-
|
4276
|
-
if (isObject(value) && isString(value.id))
|
4277
|
-
return value.id;
|
6249
|
+
if (isString(value)) return value;
|
6250
|
+
if (isObject(value) && isString(value.id)) return value.id;
|
4278
6251
|
return void 0;
|
4279
6252
|
}
|
4280
6253
|
function isValidColumn(columns, column) {
|
4281
|
-
if (columns.includes("*"))
|
4282
|
-
return true;
|
6254
|
+
if (columns.includes("*")) return true;
|
4283
6255
|
return columns.filter((item) => isString(item) && item.startsWith(column.name)).length > 0;
|
4284
6256
|
}
|
4285
6257
|
function parseIfVersion(...args) {
|
@@ -4291,28 +6263,17 @@ function parseIfVersion(...args) {
|
|
4291
6263
|
return void 0;
|
4292
6264
|
}
|
4293
6265
|
|
4294
|
-
var
|
4295
|
-
|
4296
|
-
throw TypeError("Cannot " + msg);
|
4297
|
-
};
|
4298
|
-
var __privateGet$2 = (obj, member, getter) => {
|
4299
|
-
__accessCheck$3(obj, member, "read from private field");
|
4300
|
-
return getter ? getter.call(obj) : member.get(obj);
|
4301
|
-
};
|
4302
|
-
var __privateAdd$3 = (obj, member, value) => {
|
4303
|
-
if (member.has(obj))
|
4304
|
-
throw TypeError("Cannot add the same private member more than once");
|
4305
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4306
|
-
};
|
4307
|
-
var __privateSet$1 = (obj, member, value, setter) => {
|
4308
|
-
__accessCheck$3(obj, member, "write to private field");
|
4309
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
4310
|
-
return value;
|
6266
|
+
var __typeError$3 = (msg) => {
|
6267
|
+
throw TypeError(msg);
|
4311
6268
|
};
|
6269
|
+
var __accessCheck$3 = (obj, member, msg) => member.has(obj) || __typeError$3("Cannot " + msg);
|
6270
|
+
var __privateGet$2 = (obj, member, getter) => (__accessCheck$3(obj, member, "read from private field"), member.get(obj));
|
6271
|
+
var __privateAdd$3 = (obj, member, value) => member.has(obj) ? __typeError$3("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
6272
|
+
var __privateSet$1 = (obj, member, value, setter) => (__accessCheck$3(obj, member, "write to private field"), member.set(obj, value), value);
|
4312
6273
|
var _map;
|
4313
6274
|
class SimpleCache {
|
4314
6275
|
constructor(options = {}) {
|
4315
|
-
__privateAdd$3(this, _map
|
6276
|
+
__privateAdd$3(this, _map);
|
4316
6277
|
__privateSet$1(this, _map, /* @__PURE__ */ new Map());
|
4317
6278
|
this.capacity = options.max ?? 500;
|
4318
6279
|
this.defaultQueryTTL = options.defaultQueryTTL ?? 60 * 1e3;
|
@@ -4368,19 +6329,12 @@ const includesAll = (value) => ({ $includesAll: value });
|
|
4368
6329
|
const includesNone = (value) => ({ $includesNone: value });
|
4369
6330
|
const includesAny = (value) => ({ $includesAny: value });
|
4370
6331
|
|
4371
|
-
var
|
4372
|
-
|
4373
|
-
throw TypeError("Cannot " + msg);
|
4374
|
-
};
|
4375
|
-
var __privateGet$1 = (obj, member, getter) => {
|
4376
|
-
__accessCheck$2(obj, member, "read from private field");
|
4377
|
-
return getter ? getter.call(obj) : member.get(obj);
|
4378
|
-
};
|
4379
|
-
var __privateAdd$2 = (obj, member, value) => {
|
4380
|
-
if (member.has(obj))
|
4381
|
-
throw TypeError("Cannot add the same private member more than once");
|
4382
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
6332
|
+
var __typeError$2 = (msg) => {
|
6333
|
+
throw TypeError(msg);
|
4383
6334
|
};
|
6335
|
+
var __accessCheck$2 = (obj, member, msg) => member.has(obj) || __typeError$2("Cannot " + msg);
|
6336
|
+
var __privateGet$1 = (obj, member, getter) => (__accessCheck$2(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
6337
|
+
var __privateAdd$2 = (obj, member, value) => member.has(obj) ? __typeError$2("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4384
6338
|
var _tables;
|
4385
6339
|
class SchemaPlugin extends XataPlugin {
|
4386
6340
|
constructor() {
|
@@ -4392,8 +6346,7 @@ class SchemaPlugin extends XataPlugin {
|
|
4392
6346
|
{},
|
4393
6347
|
{
|
4394
6348
|
get: (_target, table) => {
|
4395
|
-
if (!isString(table))
|
4396
|
-
throw new Error("Invalid table name");
|
6349
|
+
if (!isString(table)) throw new Error("Invalid table name");
|
4397
6350
|
if (__privateGet$1(this, _tables)[table] === void 0) {
|
4398
6351
|
__privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
4399
6352
|
}
|
@@ -4484,30 +6437,23 @@ function getContentType(file) {
|
|
4484
6437
|
return "application/octet-stream";
|
4485
6438
|
}
|
4486
6439
|
|
4487
|
-
var
|
4488
|
-
|
4489
|
-
throw TypeError("Cannot " + msg);
|
4490
|
-
};
|
4491
|
-
var __privateAdd$1 = (obj, member, value) => {
|
4492
|
-
if (member.has(obj))
|
4493
|
-
throw TypeError("Cannot add the same private member more than once");
|
4494
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
6440
|
+
var __typeError$1 = (msg) => {
|
6441
|
+
throw TypeError(msg);
|
4495
6442
|
};
|
4496
|
-
var
|
4497
|
-
|
4498
|
-
|
4499
|
-
|
4500
|
-
var _search, search_fn;
|
6443
|
+
var __accessCheck$1 = (obj, member, msg) => member.has(obj) || __typeError$1("Cannot " + msg);
|
6444
|
+
var __privateAdd$1 = (obj, member, value) => member.has(obj) ? __typeError$1("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
6445
|
+
var __privateMethod$1 = (obj, member, method) => (__accessCheck$1(obj, member, "access private method"), method);
|
6446
|
+
var _SearchPlugin_instances, search_fn;
|
4501
6447
|
class SearchPlugin extends XataPlugin {
|
4502
6448
|
constructor(db) {
|
4503
6449
|
super();
|
4504
6450
|
this.db = db;
|
4505
|
-
__privateAdd$1(this,
|
6451
|
+
__privateAdd$1(this, _SearchPlugin_instances);
|
4506
6452
|
}
|
4507
6453
|
build(pluginOptions) {
|
4508
6454
|
return {
|
4509
6455
|
all: async (query, options = {}) => {
|
4510
|
-
const { records, totalCount } = await __privateMethod$1(this,
|
6456
|
+
const { records, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
4511
6457
|
return {
|
4512
6458
|
totalCount,
|
4513
6459
|
records: records.map((record) => {
|
@@ -4517,7 +6463,7 @@ class SearchPlugin extends XataPlugin {
|
|
4517
6463
|
};
|
4518
6464
|
},
|
4519
6465
|
byTable: async (query, options = {}) => {
|
4520
|
-
const { records: rawRecords, totalCount } = await __privateMethod$1(this,
|
6466
|
+
const { records: rawRecords, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
4521
6467
|
const records = rawRecords.reduce((acc, record) => {
|
4522
6468
|
const { table = "orphan" } = record.xata;
|
4523
6469
|
const items = acc[table] ?? [];
|
@@ -4529,7 +6475,7 @@ class SearchPlugin extends XataPlugin {
|
|
4529
6475
|
};
|
4530
6476
|
}
|
4531
6477
|
}
|
4532
|
-
|
6478
|
+
_SearchPlugin_instances = new WeakSet();
|
4533
6479
|
search_fn = async function(query, options, pluginOptions) {
|
4534
6480
|
const { tables, fuzziness, highlight, prefix, page } = options ?? {};
|
4535
6481
|
const { records, totalCount } = await searchBranch({
|
@@ -4565,8 +6511,7 @@ function arrayString(val) {
|
|
4565
6511
|
return result;
|
4566
6512
|
}
|
4567
6513
|
function prepareValue(value) {
|
4568
|
-
if (!isDefined(value))
|
4569
|
-
return null;
|
6514
|
+
if (!isDefined(value)) return null;
|
4570
6515
|
if (value instanceof Date) {
|
4571
6516
|
return value.toISOString();
|
4572
6517
|
}
|
@@ -4619,6 +6564,18 @@ class SQLPlugin extends XataPlugin {
|
|
4619
6564
|
return { records, rows, warning, columns };
|
4620
6565
|
};
|
4621
6566
|
sqlFunction.connectionString = buildConnectionString(pluginOptions);
|
6567
|
+
sqlFunction.batch = async (query) => {
|
6568
|
+
const { results } = await sqlBatchQuery({
|
6569
|
+
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
6570
|
+
body: {
|
6571
|
+
statements: query.statements.map(({ statement, params }) => ({ statement, params })),
|
6572
|
+
consistency: query.consistency,
|
6573
|
+
responseType: query.responseType
|
6574
|
+
},
|
6575
|
+
...pluginOptions
|
6576
|
+
});
|
6577
|
+
return { results };
|
6578
|
+
};
|
4622
6579
|
return sqlFunction;
|
4623
6580
|
}
|
4624
6581
|
}
|
@@ -4645,8 +6602,7 @@ function buildDomain(host, region) {
|
|
4645
6602
|
function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
|
4646
6603
|
const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
|
4647
6604
|
const parts = parseWorkspacesUrlParts(url);
|
4648
|
-
if (!parts)
|
4649
|
-
throw new Error("Invalid workspaces URL");
|
6605
|
+
if (!parts) throw new Error("Invalid workspaces URL");
|
4650
6606
|
const { workspace: workspaceSlug, region, database, host } = parts;
|
4651
6607
|
const domain = buildDomain(host, region);
|
4652
6608
|
const workspace = workspaceSlug.split("-").pop();
|
@@ -4671,39 +6627,24 @@ class TransactionPlugin extends XataPlugin {
|
|
4671
6627
|
}
|
4672
6628
|
}
|
4673
6629
|
|
4674
|
-
var
|
4675
|
-
|
4676
|
-
throw TypeError("Cannot " + msg);
|
4677
|
-
};
|
4678
|
-
var __privateGet = (obj, member, getter) => {
|
4679
|
-
__accessCheck(obj, member, "read from private field");
|
4680
|
-
return getter ? getter.call(obj) : member.get(obj);
|
4681
|
-
};
|
4682
|
-
var __privateAdd = (obj, member, value) => {
|
4683
|
-
if (member.has(obj))
|
4684
|
-
throw TypeError("Cannot add the same private member more than once");
|
4685
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4686
|
-
};
|
4687
|
-
var __privateSet = (obj, member, value, setter) => {
|
4688
|
-
__accessCheck(obj, member, "write to private field");
|
4689
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
4690
|
-
return value;
|
4691
|
-
};
|
4692
|
-
var __privateMethod = (obj, member, method) => {
|
4693
|
-
__accessCheck(obj, member, "access private method");
|
4694
|
-
return method;
|
6630
|
+
var __typeError = (msg) => {
|
6631
|
+
throw TypeError(msg);
|
4695
6632
|
};
|
6633
|
+
var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
|
6634
|
+
var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
6635
|
+
var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
6636
|
+
var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), member.set(obj, value), value);
|
6637
|
+
var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
|
4696
6638
|
const buildClient = (plugins) => {
|
4697
|
-
var _options,
|
6639
|
+
var _options, _instances, parseOptions_fn, getFetchProps_fn, _a;
|
4698
6640
|
return _a = class {
|
4699
6641
|
constructor(options = {}, tables) {
|
4700
|
-
__privateAdd(this,
|
4701
|
-
__privateAdd(this,
|
4702
|
-
|
4703
|
-
const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
|
6642
|
+
__privateAdd(this, _instances);
|
6643
|
+
__privateAdd(this, _options);
|
6644
|
+
const safeOptions = __privateMethod(this, _instances, parseOptions_fn).call(this, options);
|
4704
6645
|
__privateSet(this, _options, safeOptions);
|
4705
6646
|
const pluginOptions = {
|
4706
|
-
...__privateMethod(this,
|
6647
|
+
...__privateMethod(this, _instances, getFetchProps_fn).call(this, safeOptions),
|
4707
6648
|
cache: safeOptions.cache,
|
4708
6649
|
host: safeOptions.host,
|
4709
6650
|
tables,
|
@@ -4721,8 +6662,7 @@ const buildClient = (plugins) => {
|
|
4721
6662
|
this.sql = sql;
|
4722
6663
|
this.files = files;
|
4723
6664
|
for (const [key, namespace] of Object.entries(plugins ?? {})) {
|
4724
|
-
if (namespace === void 0)
|
4725
|
-
continue;
|
6665
|
+
if (namespace === void 0) continue;
|
4726
6666
|
this[key] = namespace.build(pluginOptions);
|
4727
6667
|
}
|
4728
6668
|
}
|
@@ -4731,7 +6671,7 @@ const buildClient = (plugins) => {
|
|
4731
6671
|
const branch = __privateGet(this, _options).branch;
|
4732
6672
|
return { databaseURL, branch };
|
4733
6673
|
}
|
4734
|
-
}, _options = new WeakMap(),
|
6674
|
+
}, _options = new WeakMap(), _instances = new WeakSet(), parseOptions_fn = function(options) {
|
4735
6675
|
const enableBrowser = options?.enableBrowser ?? getEnableBrowserVariable() ?? false;
|
4736
6676
|
const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
|
4737
6677
|
if (isBrowser && !enableBrowser) {
|
@@ -4786,7 +6726,7 @@ const buildClient = (plugins) => {
|
|
4786
6726
|
clientName,
|
4787
6727
|
xataAgentExtra
|
4788
6728
|
};
|
4789
|
-
},
|
6729
|
+
}, getFetchProps_fn = function({
|
4790
6730
|
fetch,
|
4791
6731
|
apiKey,
|
4792
6732
|
databaseURL,
|
@@ -4827,26 +6767,19 @@ class Serializer {
|
|
4827
6767
|
}
|
4828
6768
|
toJSON(data) {
|
4829
6769
|
function visit(obj) {
|
4830
|
-
if (Array.isArray(obj))
|
4831
|
-
return obj.map(visit);
|
6770
|
+
if (Array.isArray(obj)) return obj.map(visit);
|
4832
6771
|
const type = typeof obj;
|
4833
|
-
if (type === "undefined")
|
4834
|
-
|
4835
|
-
if (
|
4836
|
-
return { [META]: "bigint", [VALUE]: obj.toString() };
|
4837
|
-
if (obj === null || type !== "object")
|
4838
|
-
return obj;
|
6772
|
+
if (type === "undefined") return { [META]: "undefined" };
|
6773
|
+
if (type === "bigint") return { [META]: "bigint", [VALUE]: obj.toString() };
|
6774
|
+
if (obj === null || type !== "object") return obj;
|
4839
6775
|
const constructor = obj.constructor;
|
4840
6776
|
const o = { [META]: constructor.name };
|
4841
6777
|
for (const [key, value] of Object.entries(obj)) {
|
4842
6778
|
o[key] = visit(value);
|
4843
6779
|
}
|
4844
|
-
if (constructor === Date)
|
4845
|
-
|
4846
|
-
if (constructor ===
|
4847
|
-
o[VALUE] = Object.fromEntries(obj);
|
4848
|
-
if (constructor === Set)
|
4849
|
-
o[VALUE] = [...obj];
|
6780
|
+
if (constructor === Date) o[VALUE] = obj.toISOString();
|
6781
|
+
if (constructor === Map) o[VALUE] = Object.fromEntries(obj);
|
6782
|
+
if (constructor === Set) o[VALUE] = [...obj];
|
4850
6783
|
return o;
|
4851
6784
|
}
|
4852
6785
|
return JSON.stringify(visit(data));
|
@@ -4859,16 +6792,11 @@ class Serializer {
|
|
4859
6792
|
if (constructor) {
|
4860
6793
|
return Object.assign(Object.create(constructor.prototype), rest);
|
4861
6794
|
}
|
4862
|
-
if (clazz === "Date")
|
4863
|
-
|
4864
|
-
if (clazz === "
|
4865
|
-
|
4866
|
-
if (clazz === "
|
4867
|
-
return new Map(Object.entries(val));
|
4868
|
-
if (clazz === "bigint")
|
4869
|
-
return BigInt(val);
|
4870
|
-
if (clazz === "undefined")
|
4871
|
-
return void 0;
|
6795
|
+
if (clazz === "Date") return new Date(val);
|
6796
|
+
if (clazz === "Set") return new Set(val);
|
6797
|
+
if (clazz === "Map") return new Map(Object.entries(val));
|
6798
|
+
if (clazz === "bigint") return BigInt(val);
|
6799
|
+
if (clazz === "undefined") return void 0;
|
4872
6800
|
return rest;
|
4873
6801
|
}
|
4874
6802
|
return value;
|
@@ -4891,6 +6819,7 @@ class XataError extends Error {
|
|
4891
6819
|
}
|
4892
6820
|
|
4893
6821
|
exports.BaseClient = BaseClient;
|
6822
|
+
exports.Buffer = Buffer;
|
4894
6823
|
exports.FetcherError = FetcherError;
|
4895
6824
|
exports.FilesPlugin = FilesPlugin;
|
4896
6825
|
exports.Operations = operationsByTag;
|
@@ -4917,6 +6846,7 @@ exports.XataError = XataError;
|
|
4917
6846
|
exports.XataFile = XataFile;
|
4918
6847
|
exports.XataPlugin = XataPlugin;
|
4919
6848
|
exports.acceptWorkspaceMemberInvite = acceptWorkspaceMemberInvite;
|
6849
|
+
exports.adaptAllTables = adaptAllTables;
|
4920
6850
|
exports.adaptTable = adaptTable;
|
4921
6851
|
exports.addGitBranchesEntry = addGitBranchesEntry;
|
4922
6852
|
exports.addTableColumn = addTableColumn;
|
@@ -4934,6 +6864,7 @@ exports.cancelWorkspaceMemberInvite = cancelWorkspaceMemberInvite;
|
|
4934
6864
|
exports.compareBranchSchemas = compareBranchSchemas;
|
4935
6865
|
exports.compareBranchWithUserSchema = compareBranchWithUserSchema;
|
4936
6866
|
exports.compareMigrationRequest = compareMigrationRequest;
|
6867
|
+
exports.completeMigration = completeMigration;
|
4937
6868
|
exports.contains = contains;
|
4938
6869
|
exports.copyBranch = copyBranch;
|
4939
6870
|
exports.createBranch = createBranch;
|
@@ -4944,6 +6875,7 @@ exports.createTable = createTable;
|
|
4944
6875
|
exports.createUserAPIKey = createUserAPIKey;
|
4945
6876
|
exports.createWorkspace = createWorkspace;
|
4946
6877
|
exports.deleteBranch = deleteBranch;
|
6878
|
+
exports.deleteCluster = deleteCluster;
|
4947
6879
|
exports.deleteColumn = deleteColumn;
|
4948
6880
|
exports.deleteDatabase = deleteDatabase;
|
4949
6881
|
exports.deleteDatabaseGithubSettings = deleteDatabaseGithubSettings;
|
@@ -5053,11 +6985,14 @@ exports.removeWorkspaceMember = removeWorkspaceMember;
|
|
5053
6985
|
exports.renameDatabase = renameDatabase;
|
5054
6986
|
exports.resendWorkspaceMemberInvite = resendWorkspaceMemberInvite;
|
5055
6987
|
exports.resolveBranch = resolveBranch;
|
6988
|
+
exports.rollbackMigration = rollbackMigration;
|
5056
6989
|
exports.searchBranch = searchBranch;
|
5057
6990
|
exports.searchTable = searchTable;
|
5058
6991
|
exports.serialize = serialize;
|
5059
6992
|
exports.setTableSchema = setTableSchema;
|
6993
|
+
exports.sqlBatchQuery = sqlBatchQuery;
|
5060
6994
|
exports.sqlQuery = sqlQuery;
|
6995
|
+
exports.startMigration = startMigration;
|
5061
6996
|
exports.startsWith = startsWith;
|
5062
6997
|
exports.summarizeTable = summarizeTable;
|
5063
6998
|
exports.transformImage = transformImage;
|