@xata.io/client 0.0.0-next.v534362888c93b458fd4536898c3b687b59a5e171 → 0.0.0-next.v5bc7e82e3b5ca04db80756931b6ccba0db817c72
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-add-version.log +1 -1
- package/.turbo/turbo-build.log +4 -4
- package/CHANGELOG.md +11 -3
- package/dist/index.cjs +2426 -574
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +4752 -3854
- package/dist/index.mjs +2420 -571
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
@@ -24,6 +24,1789 @@ const TraceAttributes = {
|
|
24
24
|
CLOUDFLARE_RAY_ID: "cf.ray"
|
25
25
|
};
|
26
26
|
|
27
|
+
const lookup = [];
|
28
|
+
const revLookup = [];
|
29
|
+
const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
30
|
+
for (let i = 0, len = code.length; i < len; ++i) {
|
31
|
+
lookup[i] = code[i];
|
32
|
+
revLookup[code.charCodeAt(i)] = i;
|
33
|
+
}
|
34
|
+
revLookup["-".charCodeAt(0)] = 62;
|
35
|
+
revLookup["_".charCodeAt(0)] = 63;
|
36
|
+
function getLens(b64) {
|
37
|
+
const len = b64.length;
|
38
|
+
if (len % 4 > 0) {
|
39
|
+
throw new Error("Invalid string. Length must be a multiple of 4");
|
40
|
+
}
|
41
|
+
let validLen = b64.indexOf("=");
|
42
|
+
if (validLen === -1) validLen = len;
|
43
|
+
const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
|
44
|
+
return [validLen, placeHoldersLen];
|
45
|
+
}
|
46
|
+
function _byteLength(_b64, validLen, placeHoldersLen) {
|
47
|
+
return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
|
48
|
+
}
|
49
|
+
function toByteArray(b64) {
|
50
|
+
let tmp;
|
51
|
+
const lens = getLens(b64);
|
52
|
+
const validLen = lens[0];
|
53
|
+
const placeHoldersLen = lens[1];
|
54
|
+
const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
|
55
|
+
let curByte = 0;
|
56
|
+
const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
|
57
|
+
let i;
|
58
|
+
for (i = 0; i < len; i += 4) {
|
59
|
+
tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
|
60
|
+
arr[curByte++] = tmp >> 16 & 255;
|
61
|
+
arr[curByte++] = tmp >> 8 & 255;
|
62
|
+
arr[curByte++] = tmp & 255;
|
63
|
+
}
|
64
|
+
if (placeHoldersLen === 2) {
|
65
|
+
tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
|
66
|
+
arr[curByte++] = tmp & 255;
|
67
|
+
}
|
68
|
+
if (placeHoldersLen === 1) {
|
69
|
+
tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
|
70
|
+
arr[curByte++] = tmp >> 8 & 255;
|
71
|
+
arr[curByte++] = tmp & 255;
|
72
|
+
}
|
73
|
+
return arr;
|
74
|
+
}
|
75
|
+
function tripletToBase64(num) {
|
76
|
+
return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
|
77
|
+
}
|
78
|
+
function encodeChunk(uint8, start, end) {
|
79
|
+
let tmp;
|
80
|
+
const output = [];
|
81
|
+
for (let i = start; i < end; i += 3) {
|
82
|
+
tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
|
83
|
+
output.push(tripletToBase64(tmp));
|
84
|
+
}
|
85
|
+
return output.join("");
|
86
|
+
}
|
87
|
+
function fromByteArray(uint8) {
|
88
|
+
let tmp;
|
89
|
+
const len = uint8.length;
|
90
|
+
const extraBytes = len % 3;
|
91
|
+
const parts = [];
|
92
|
+
const maxChunkLength = 16383;
|
93
|
+
for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
94
|
+
parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
|
95
|
+
}
|
96
|
+
if (extraBytes === 1) {
|
97
|
+
tmp = uint8[len - 1];
|
98
|
+
parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
|
99
|
+
} else if (extraBytes === 2) {
|
100
|
+
tmp = (uint8[len - 2] << 8) + uint8[len - 1];
|
101
|
+
parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
|
102
|
+
}
|
103
|
+
return parts.join("");
|
104
|
+
}
|
105
|
+
|
106
|
+
const K_MAX_LENGTH = 2147483647;
|
107
|
+
const MAX_ARGUMENTS_LENGTH = 4096;
|
108
|
+
class Buffer extends Uint8Array {
|
109
|
+
/**
|
110
|
+
* Constructs a new `Buffer` instance.
|
111
|
+
*
|
112
|
+
* @param value
|
113
|
+
* @param encodingOrOffset
|
114
|
+
* @param length
|
115
|
+
*/
|
116
|
+
constructor(value, encodingOrOffset, length) {
|
117
|
+
if (typeof value === "number") {
|
118
|
+
if (typeof encodingOrOffset === "string") {
|
119
|
+
throw new TypeError("The first argument must be of type string, received type number");
|
120
|
+
}
|
121
|
+
if (value < 0) {
|
122
|
+
throw new RangeError("The buffer size cannot be negative");
|
123
|
+
}
|
124
|
+
super(value < 0 ? 0 : Buffer._checked(value) | 0);
|
125
|
+
} else if (typeof value === "string") {
|
126
|
+
if (typeof encodingOrOffset !== "string") {
|
127
|
+
encodingOrOffset = "utf8";
|
128
|
+
}
|
129
|
+
if (!Buffer.isEncoding(encodingOrOffset)) {
|
130
|
+
throw new TypeError("Unknown encoding: " + encodingOrOffset);
|
131
|
+
}
|
132
|
+
const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
|
133
|
+
super(length2);
|
134
|
+
const written = this.write(value, 0, this.length, encodingOrOffset);
|
135
|
+
if (written !== length2) {
|
136
|
+
throw new TypeError(
|
137
|
+
"Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
|
138
|
+
);
|
139
|
+
}
|
140
|
+
} else if (ArrayBuffer.isView(value)) {
|
141
|
+
if (Buffer._isInstance(value, Uint8Array)) {
|
142
|
+
const copy = new Uint8Array(value);
|
143
|
+
const array = copy.buffer;
|
144
|
+
const byteOffset = copy.byteOffset;
|
145
|
+
const length2 = copy.byteLength;
|
146
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
147
|
+
throw new RangeError("offset is outside of buffer bounds");
|
148
|
+
}
|
149
|
+
if (array.byteLength < byteOffset + (length2 || 0)) {
|
150
|
+
throw new RangeError("length is outside of buffer bounds");
|
151
|
+
}
|
152
|
+
super(new Uint8Array(array, byteOffset, length2));
|
153
|
+
} else {
|
154
|
+
const array = value;
|
155
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
156
|
+
super(new Uint8Array(length2));
|
157
|
+
for (let i = 0; i < length2; i++) {
|
158
|
+
this[i] = array[i] & 255;
|
159
|
+
}
|
160
|
+
}
|
161
|
+
} else if (value == null) {
|
162
|
+
throw new TypeError(
|
163
|
+
"The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
|
164
|
+
);
|
165
|
+
} else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
|
166
|
+
const array = value;
|
167
|
+
const byteOffset = encodingOrOffset;
|
168
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
169
|
+
throw new RangeError("offset is outside of buffer bounds");
|
170
|
+
}
|
171
|
+
if (array.byteLength < byteOffset + (length || 0)) {
|
172
|
+
throw new RangeError("length is outside of buffer bounds");
|
173
|
+
}
|
174
|
+
super(new Uint8Array(array, byteOffset, length));
|
175
|
+
} else if (Array.isArray(value)) {
|
176
|
+
const array = value;
|
177
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
178
|
+
super(new Uint8Array(length2));
|
179
|
+
for (let i = 0; i < length2; i++) {
|
180
|
+
this[i] = array[i] & 255;
|
181
|
+
}
|
182
|
+
} else {
|
183
|
+
throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
|
184
|
+
}
|
185
|
+
}
|
186
|
+
/**
|
187
|
+
* Return JSON representation of the buffer.
|
188
|
+
*/
|
189
|
+
toJSON() {
|
190
|
+
return {
|
191
|
+
type: "Buffer",
|
192
|
+
data: Array.prototype.slice.call(this)
|
193
|
+
};
|
194
|
+
}
|
195
|
+
/**
|
196
|
+
* Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
|
197
|
+
* parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
|
198
|
+
* only part of `string` will be written. However, partially encoded characters will not be written.
|
199
|
+
*
|
200
|
+
* @param string String to write to `buf`.
|
201
|
+
* @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
|
202
|
+
* @param length Maximum number of bytes to write: Default: `buf.length - offset`.
|
203
|
+
* @param encoding The character encoding of `string`. Default: `utf8`.
|
204
|
+
*/
|
205
|
+
write(string, offset, length, encoding) {
|
206
|
+
if (typeof offset === "undefined") {
|
207
|
+
encoding = "utf8";
|
208
|
+
length = this.length;
|
209
|
+
offset = 0;
|
210
|
+
} else if (typeof length === "undefined" && typeof offset === "string") {
|
211
|
+
encoding = offset;
|
212
|
+
length = this.length;
|
213
|
+
offset = 0;
|
214
|
+
} else if (typeof offset === "number" && isFinite(offset)) {
|
215
|
+
offset = offset >>> 0;
|
216
|
+
if (typeof length === "number" && isFinite(length)) {
|
217
|
+
length = length >>> 0;
|
218
|
+
encoding ?? (encoding = "utf8");
|
219
|
+
} else if (typeof length === "string") {
|
220
|
+
encoding = length;
|
221
|
+
length = void 0;
|
222
|
+
}
|
223
|
+
} else {
|
224
|
+
throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
|
225
|
+
}
|
226
|
+
const remaining = this.length - offset;
|
227
|
+
if (typeof length === "undefined" || length > remaining) {
|
228
|
+
length = remaining;
|
229
|
+
}
|
230
|
+
if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
|
231
|
+
throw new RangeError("Attempt to write outside buffer bounds");
|
232
|
+
}
|
233
|
+
encoding || (encoding = "utf8");
|
234
|
+
switch (Buffer._getEncoding(encoding)) {
|
235
|
+
case "hex":
|
236
|
+
return Buffer._hexWrite(this, string, offset, length);
|
237
|
+
case "utf8":
|
238
|
+
return Buffer._utf8Write(this, string, offset, length);
|
239
|
+
case "ascii":
|
240
|
+
case "latin1":
|
241
|
+
case "binary":
|
242
|
+
return Buffer._asciiWrite(this, string, offset, length);
|
243
|
+
case "ucs2":
|
244
|
+
case "utf16le":
|
245
|
+
return Buffer._ucs2Write(this, string, offset, length);
|
246
|
+
case "base64":
|
247
|
+
return Buffer._base64Write(this, string, offset, length);
|
248
|
+
}
|
249
|
+
}
|
250
|
+
/**
|
251
|
+
* Decodes the buffer to a string according to the specified character encoding.
|
252
|
+
* Passing `start` and `end` will decode only a subset of the buffer.
|
253
|
+
*
|
254
|
+
* Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
|
255
|
+
* will be replaced with `U+FFFD`.
|
256
|
+
*
|
257
|
+
* @param encoding
|
258
|
+
* @param start
|
259
|
+
* @param end
|
260
|
+
*/
|
261
|
+
toString(encoding, start, end) {
|
262
|
+
const length = this.length;
|
263
|
+
if (length === 0) {
|
264
|
+
return "";
|
265
|
+
}
|
266
|
+
if (arguments.length === 0) {
|
267
|
+
return Buffer._utf8Slice(this, 0, length);
|
268
|
+
}
|
269
|
+
if (typeof start === "undefined" || start < 0) {
|
270
|
+
start = 0;
|
271
|
+
}
|
272
|
+
if (start > this.length) {
|
273
|
+
return "";
|
274
|
+
}
|
275
|
+
if (typeof end === "undefined" || end > this.length) {
|
276
|
+
end = this.length;
|
277
|
+
}
|
278
|
+
if (end <= 0) {
|
279
|
+
return "";
|
280
|
+
}
|
281
|
+
end >>>= 0;
|
282
|
+
start >>>= 0;
|
283
|
+
if (end <= start) {
|
284
|
+
return "";
|
285
|
+
}
|
286
|
+
if (!encoding) {
|
287
|
+
encoding = "utf8";
|
288
|
+
}
|
289
|
+
switch (Buffer._getEncoding(encoding)) {
|
290
|
+
case "hex":
|
291
|
+
return Buffer._hexSlice(this, start, end);
|
292
|
+
case "utf8":
|
293
|
+
return Buffer._utf8Slice(this, start, end);
|
294
|
+
case "ascii":
|
295
|
+
return Buffer._asciiSlice(this, start, end);
|
296
|
+
case "latin1":
|
297
|
+
case "binary":
|
298
|
+
return Buffer._latin1Slice(this, start, end);
|
299
|
+
case "ucs2":
|
300
|
+
case "utf16le":
|
301
|
+
return Buffer._utf16leSlice(this, start, end);
|
302
|
+
case "base64":
|
303
|
+
return Buffer._base64Slice(this, start, end);
|
304
|
+
}
|
305
|
+
}
|
306
|
+
/**
|
307
|
+
* Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
|
308
|
+
*
|
309
|
+
* @param otherBuffer
|
310
|
+
*/
|
311
|
+
equals(otherBuffer) {
|
312
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
313
|
+
throw new TypeError("Argument must be a Buffer");
|
314
|
+
}
|
315
|
+
if (this === otherBuffer) {
|
316
|
+
return true;
|
317
|
+
}
|
318
|
+
return Buffer.compare(this, otherBuffer) === 0;
|
319
|
+
}
|
320
|
+
/**
|
321
|
+
* Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
|
322
|
+
* or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
|
323
|
+
* buffer.
|
324
|
+
*
|
325
|
+
* - `0` is returned if `otherBuffer` is the same as this buffer.
|
326
|
+
* - `1` is returned if `otherBuffer` should come before this buffer when sorted.
|
327
|
+
* - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
|
328
|
+
*
|
329
|
+
* @param otherBuffer The buffer to compare to.
|
330
|
+
* @param targetStart The offset within `otherBuffer` at which to begin comparison.
|
331
|
+
* @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
|
332
|
+
* @param sourceStart The offset within this buffer at which to begin comparison.
|
333
|
+
* @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
|
334
|
+
*/
|
335
|
+
compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
|
336
|
+
if (Buffer._isInstance(otherBuffer, Uint8Array)) {
|
337
|
+
otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
|
338
|
+
}
|
339
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
340
|
+
throw new TypeError("Argument must be a Buffer or Uint8Array");
|
341
|
+
}
|
342
|
+
targetStart ?? (targetStart = 0);
|
343
|
+
targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
|
344
|
+
sourceStart ?? (sourceStart = 0);
|
345
|
+
sourceEnd ?? (sourceEnd = this.length);
|
346
|
+
if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
|
347
|
+
throw new RangeError("Out of range index");
|
348
|
+
}
|
349
|
+
if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
|
350
|
+
return 0;
|
351
|
+
}
|
352
|
+
if (sourceStart >= sourceEnd) {
|
353
|
+
return -1;
|
354
|
+
}
|
355
|
+
if (targetStart >= targetEnd) {
|
356
|
+
return 1;
|
357
|
+
}
|
358
|
+
targetStart >>>= 0;
|
359
|
+
targetEnd >>>= 0;
|
360
|
+
sourceStart >>>= 0;
|
361
|
+
sourceEnd >>>= 0;
|
362
|
+
if (this === otherBuffer) {
|
363
|
+
return 0;
|
364
|
+
}
|
365
|
+
let x = sourceEnd - sourceStart;
|
366
|
+
let y = targetEnd - targetStart;
|
367
|
+
const len = Math.min(x, y);
|
368
|
+
const thisCopy = this.slice(sourceStart, sourceEnd);
|
369
|
+
const targetCopy = otherBuffer.slice(targetStart, targetEnd);
|
370
|
+
for (let i = 0; i < len; ++i) {
|
371
|
+
if (thisCopy[i] !== targetCopy[i]) {
|
372
|
+
x = thisCopy[i];
|
373
|
+
y = targetCopy[i];
|
374
|
+
break;
|
375
|
+
}
|
376
|
+
}
|
377
|
+
if (x < y) return -1;
|
378
|
+
if (y < x) return 1;
|
379
|
+
return 0;
|
380
|
+
}
|
381
|
+
/**
|
382
|
+
* Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
|
383
|
+
* region overlaps with this buffer.
|
384
|
+
*
|
385
|
+
* @param targetBuffer The target buffer to copy into.
|
386
|
+
* @param targetStart The offset within `targetBuffer` at which to begin writing.
|
387
|
+
* @param sourceStart The offset within this buffer at which to begin copying.
|
388
|
+
* @param sourceEnd The offset within this buffer at which to end copying (exclusive).
|
389
|
+
*/
|
390
|
+
copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
|
391
|
+
if (!Buffer.isBuffer(targetBuffer)) throw new TypeError("argument should be a Buffer");
|
392
|
+
if (!sourceStart) sourceStart = 0;
|
393
|
+
if (!targetStart) targetStart = 0;
|
394
|
+
if (!sourceEnd && sourceEnd !== 0) sourceEnd = this.length;
|
395
|
+
if (targetStart >= targetBuffer.length) targetStart = targetBuffer.length;
|
396
|
+
if (!targetStart) targetStart = 0;
|
397
|
+
if (sourceEnd > 0 && sourceEnd < sourceStart) sourceEnd = sourceStart;
|
398
|
+
if (sourceEnd === sourceStart) return 0;
|
399
|
+
if (targetBuffer.length === 0 || this.length === 0) return 0;
|
400
|
+
if (targetStart < 0) {
|
401
|
+
throw new RangeError("targetStart out of bounds");
|
402
|
+
}
|
403
|
+
if (sourceStart < 0 || sourceStart >= this.length) throw new RangeError("Index out of range");
|
404
|
+
if (sourceEnd < 0) throw new RangeError("sourceEnd out of bounds");
|
405
|
+
if (sourceEnd > this.length) sourceEnd = this.length;
|
406
|
+
if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
|
407
|
+
sourceEnd = targetBuffer.length - targetStart + sourceStart;
|
408
|
+
}
|
409
|
+
const len = sourceEnd - sourceStart;
|
410
|
+
if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
|
411
|
+
this.copyWithin(targetStart, sourceStart, sourceEnd);
|
412
|
+
} else {
|
413
|
+
Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
|
414
|
+
}
|
415
|
+
return len;
|
416
|
+
}
|
417
|
+
/**
|
418
|
+
* Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
|
419
|
+
* and `end` indices. This is the same behavior as `buf.subarray()`.
|
420
|
+
*
|
421
|
+
* This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
|
422
|
+
* the slice, use `Uint8Array.prototype.slice()`.
|
423
|
+
*
|
424
|
+
* @param start
|
425
|
+
* @param end
|
426
|
+
*/
|
427
|
+
slice(start, end) {
|
428
|
+
if (!start) {
|
429
|
+
start = 0;
|
430
|
+
}
|
431
|
+
const len = this.length;
|
432
|
+
start = ~~start;
|
433
|
+
end = end === void 0 ? len : ~~end;
|
434
|
+
if (start < 0) {
|
435
|
+
start += len;
|
436
|
+
if (start < 0) {
|
437
|
+
start = 0;
|
438
|
+
}
|
439
|
+
} else if (start > len) {
|
440
|
+
start = len;
|
441
|
+
}
|
442
|
+
if (end < 0) {
|
443
|
+
end += len;
|
444
|
+
if (end < 0) {
|
445
|
+
end = 0;
|
446
|
+
}
|
447
|
+
} else if (end > len) {
|
448
|
+
end = len;
|
449
|
+
}
|
450
|
+
if (end < start) {
|
451
|
+
end = start;
|
452
|
+
}
|
453
|
+
const newBuf = this.subarray(start, end);
|
454
|
+
Object.setPrototypeOf(newBuf, Buffer.prototype);
|
455
|
+
return newBuf;
|
456
|
+
}
|
457
|
+
/**
|
458
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
459
|
+
* of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
|
460
|
+
*
|
461
|
+
* @param value Number to write.
|
462
|
+
* @param offset Number of bytes to skip before starting to write.
|
463
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
464
|
+
* @param noAssert
|
465
|
+
* @returns `offset` plus the number of bytes written.
|
466
|
+
*/
|
467
|
+
writeUIntLE(value, offset, byteLength, noAssert) {
|
468
|
+
value = +value;
|
469
|
+
offset = offset >>> 0;
|
470
|
+
byteLength = byteLength >>> 0;
|
471
|
+
if (!noAssert) {
|
472
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
473
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
474
|
+
}
|
475
|
+
let mul = 1;
|
476
|
+
let i = 0;
|
477
|
+
this[offset] = value & 255;
|
478
|
+
while (++i < byteLength && (mul *= 256)) {
|
479
|
+
this[offset + i] = value / mul & 255;
|
480
|
+
}
|
481
|
+
return offset + byteLength;
|
482
|
+
}
|
483
|
+
/**
|
484
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
|
485
|
+
* accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
|
486
|
+
*
|
487
|
+
* @param value Number to write.
|
488
|
+
* @param offset Number of bytes to skip before starting to write.
|
489
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
490
|
+
* @param noAssert
|
491
|
+
* @returns `offset` plus the number of bytes written.
|
492
|
+
*/
|
493
|
+
writeUIntBE(value, offset, byteLength, noAssert) {
|
494
|
+
value = +value;
|
495
|
+
offset = offset >>> 0;
|
496
|
+
byteLength = byteLength >>> 0;
|
497
|
+
if (!noAssert) {
|
498
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
499
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
500
|
+
}
|
501
|
+
let i = byteLength - 1;
|
502
|
+
let mul = 1;
|
503
|
+
this[offset + i] = value & 255;
|
504
|
+
while (--i >= 0 && (mul *= 256)) {
|
505
|
+
this[offset + i] = value / mul & 255;
|
506
|
+
}
|
507
|
+
return offset + byteLength;
|
508
|
+
}
|
509
|
+
/**
|
510
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
511
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
512
|
+
*
|
513
|
+
* @param value Number to write.
|
514
|
+
* @param offset Number of bytes to skip before starting to write.
|
515
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
516
|
+
* @param noAssert
|
517
|
+
* @returns `offset` plus the number of bytes written.
|
518
|
+
*/
|
519
|
+
writeIntLE(value, offset, byteLength, noAssert) {
|
520
|
+
value = +value;
|
521
|
+
offset = offset >>> 0;
|
522
|
+
if (!noAssert) {
|
523
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
524
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
525
|
+
}
|
526
|
+
let i = 0;
|
527
|
+
let mul = 1;
|
528
|
+
let sub = 0;
|
529
|
+
this[offset] = value & 255;
|
530
|
+
while (++i < byteLength && (mul *= 256)) {
|
531
|
+
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
532
|
+
sub = 1;
|
533
|
+
}
|
534
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
535
|
+
}
|
536
|
+
return offset + byteLength;
|
537
|
+
}
|
538
|
+
/**
|
539
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
|
540
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
541
|
+
*
|
542
|
+
* @param value Number to write.
|
543
|
+
* @param offset Number of bytes to skip before starting to write.
|
544
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
545
|
+
* @param noAssert
|
546
|
+
* @returns `offset` plus the number of bytes written.
|
547
|
+
*/
|
548
|
+
writeIntBE(value, offset, byteLength, noAssert) {
|
549
|
+
value = +value;
|
550
|
+
offset = offset >>> 0;
|
551
|
+
if (!noAssert) {
|
552
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
553
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
554
|
+
}
|
555
|
+
let i = byteLength - 1;
|
556
|
+
let mul = 1;
|
557
|
+
let sub = 0;
|
558
|
+
this[offset + i] = value & 255;
|
559
|
+
while (--i >= 0 && (mul *= 256)) {
|
560
|
+
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
561
|
+
sub = 1;
|
562
|
+
}
|
563
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
564
|
+
}
|
565
|
+
return offset + byteLength;
|
566
|
+
}
|
567
|
+
/**
|
568
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
569
|
+
* unsigned, little-endian integer supporting up to 48 bits of accuracy.
|
570
|
+
*
|
571
|
+
* @param offset Number of bytes to skip before starting to read.
|
572
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
573
|
+
* @param noAssert
|
574
|
+
*/
|
575
|
+
readUIntLE(offset, byteLength, noAssert) {
|
576
|
+
offset = offset >>> 0;
|
577
|
+
byteLength = byteLength >>> 0;
|
578
|
+
if (!noAssert) {
|
579
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
580
|
+
}
|
581
|
+
let val = this[offset];
|
582
|
+
let mul = 1;
|
583
|
+
let i = 0;
|
584
|
+
while (++i < byteLength && (mul *= 256)) {
|
585
|
+
val += this[offset + i] * mul;
|
586
|
+
}
|
587
|
+
return val;
|
588
|
+
}
|
589
|
+
/**
|
590
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
591
|
+
* unsigned, big-endian integer supporting up to 48 bits of accuracy.
|
592
|
+
*
|
593
|
+
* @param offset Number of bytes to skip before starting to read.
|
594
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
595
|
+
* @param noAssert
|
596
|
+
*/
|
597
|
+
readUIntBE(offset, byteLength, noAssert) {
|
598
|
+
offset = offset >>> 0;
|
599
|
+
byteLength = byteLength >>> 0;
|
600
|
+
if (!noAssert) {
|
601
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
602
|
+
}
|
603
|
+
let val = this[offset + --byteLength];
|
604
|
+
let mul = 1;
|
605
|
+
while (byteLength > 0 && (mul *= 256)) {
|
606
|
+
val += this[offset + --byteLength] * mul;
|
607
|
+
}
|
608
|
+
return val;
|
609
|
+
}
|
610
|
+
/**
|
611
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
612
|
+
* little-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
613
|
+
*
|
614
|
+
* @param offset Number of bytes to skip before starting to read.
|
615
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
616
|
+
* @param noAssert
|
617
|
+
*/
|
618
|
+
readIntLE(offset, byteLength, noAssert) {
|
619
|
+
offset = offset >>> 0;
|
620
|
+
byteLength = byteLength >>> 0;
|
621
|
+
if (!noAssert) {
|
622
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
623
|
+
}
|
624
|
+
let val = this[offset];
|
625
|
+
let mul = 1;
|
626
|
+
let i = 0;
|
627
|
+
while (++i < byteLength && (mul *= 256)) {
|
628
|
+
val += this[offset + i] * mul;
|
629
|
+
}
|
630
|
+
mul *= 128;
|
631
|
+
if (val >= mul) {
|
632
|
+
val -= Math.pow(2, 8 * byteLength);
|
633
|
+
}
|
634
|
+
return val;
|
635
|
+
}
|
636
|
+
/**
|
637
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
638
|
+
* big-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
639
|
+
*
|
640
|
+
* @param offset Number of bytes to skip before starting to read.
|
641
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
642
|
+
* @param noAssert
|
643
|
+
*/
|
644
|
+
readIntBE(offset, byteLength, noAssert) {
|
645
|
+
offset = offset >>> 0;
|
646
|
+
byteLength = byteLength >>> 0;
|
647
|
+
if (!noAssert) {
|
648
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
649
|
+
}
|
650
|
+
let i = byteLength;
|
651
|
+
let mul = 1;
|
652
|
+
let val = this[offset + --i];
|
653
|
+
while (i > 0 && (mul *= 256)) {
|
654
|
+
val += this[offset + --i] * mul;
|
655
|
+
}
|
656
|
+
mul *= 128;
|
657
|
+
if (val >= mul) {
|
658
|
+
val -= Math.pow(2, 8 * byteLength);
|
659
|
+
}
|
660
|
+
return val;
|
661
|
+
}
|
662
|
+
/**
|
663
|
+
* Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
|
664
|
+
*
|
665
|
+
* @param offset Number of bytes to skip before starting to read.
|
666
|
+
* @param noAssert
|
667
|
+
*/
|
668
|
+
readUInt8(offset, noAssert) {
|
669
|
+
offset = offset >>> 0;
|
670
|
+
if (!noAssert) {
|
671
|
+
Buffer._checkOffset(offset, 1, this.length);
|
672
|
+
}
|
673
|
+
return this[offset];
|
674
|
+
}
|
675
|
+
/**
|
676
|
+
* Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
|
677
|
+
*
|
678
|
+
* @param offset Number of bytes to skip before starting to read.
|
679
|
+
* @param noAssert
|
680
|
+
*/
|
681
|
+
readUInt16LE(offset, noAssert) {
|
682
|
+
offset = offset >>> 0;
|
683
|
+
if (!noAssert) {
|
684
|
+
Buffer._checkOffset(offset, 2, this.length);
|
685
|
+
}
|
686
|
+
return this[offset] | this[offset + 1] << 8;
|
687
|
+
}
|
688
|
+
/**
|
689
|
+
* Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
|
690
|
+
*
|
691
|
+
* @param offset Number of bytes to skip before starting to read.
|
692
|
+
* @param noAssert
|
693
|
+
*/
|
694
|
+
readUInt16BE(offset, noAssert) {
|
695
|
+
offset = offset >>> 0;
|
696
|
+
if (!noAssert) {
|
697
|
+
Buffer._checkOffset(offset, 2, this.length);
|
698
|
+
}
|
699
|
+
return this[offset] << 8 | this[offset + 1];
|
700
|
+
}
|
701
|
+
/**
|
702
|
+
* Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
|
703
|
+
*
|
704
|
+
* @param offset Number of bytes to skip before starting to read.
|
705
|
+
* @param noAssert
|
706
|
+
*/
|
707
|
+
readUInt32LE(offset, noAssert) {
|
708
|
+
offset = offset >>> 0;
|
709
|
+
if (!noAssert) {
|
710
|
+
Buffer._checkOffset(offset, 4, this.length);
|
711
|
+
}
|
712
|
+
return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
|
713
|
+
}
|
714
|
+
/**
|
715
|
+
* Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
|
716
|
+
*
|
717
|
+
* @param offset Number of bytes to skip before starting to read.
|
718
|
+
* @param noAssert
|
719
|
+
*/
|
720
|
+
readUInt32BE(offset, noAssert) {
|
721
|
+
offset = offset >>> 0;
|
722
|
+
if (!noAssert) {
|
723
|
+
Buffer._checkOffset(offset, 4, this.length);
|
724
|
+
}
|
725
|
+
return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
|
726
|
+
}
|
727
|
+
/**
|
728
|
+
* Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
|
729
|
+
* as two's complement signed values.
|
730
|
+
*
|
731
|
+
* @param offset Number of bytes to skip before starting to read.
|
732
|
+
* @param noAssert
|
733
|
+
*/
|
734
|
+
readInt8(offset, noAssert) {
|
735
|
+
offset = offset >>> 0;
|
736
|
+
if (!noAssert) {
|
737
|
+
Buffer._checkOffset(offset, 1, this.length);
|
738
|
+
}
|
739
|
+
if (!(this[offset] & 128)) {
|
740
|
+
return this[offset];
|
741
|
+
}
|
742
|
+
return (255 - this[offset] + 1) * -1;
|
743
|
+
}
|
744
|
+
/**
|
745
|
+
* Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
746
|
+
* are interpreted as two's complement signed values.
|
747
|
+
*
|
748
|
+
* @param offset Number of bytes to skip before starting to read.
|
749
|
+
* @param noAssert
|
750
|
+
*/
|
751
|
+
readInt16LE(offset, noAssert) {
|
752
|
+
offset = offset >>> 0;
|
753
|
+
if (!noAssert) {
|
754
|
+
Buffer._checkOffset(offset, 2, this.length);
|
755
|
+
}
|
756
|
+
const val = this[offset] | this[offset + 1] << 8;
|
757
|
+
return val & 32768 ? val | 4294901760 : val;
|
758
|
+
}
|
759
|
+
/**
|
760
|
+
* Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
761
|
+
* are interpreted as two's complement signed values.
|
762
|
+
*
|
763
|
+
* @param offset Number of bytes to skip before starting to read.
|
764
|
+
* @param noAssert
|
765
|
+
*/
|
766
|
+
readInt16BE(offset, noAssert) {
|
767
|
+
offset = offset >>> 0;
|
768
|
+
if (!noAssert) {
|
769
|
+
Buffer._checkOffset(offset, 2, this.length);
|
770
|
+
}
|
771
|
+
const val = this[offset + 1] | this[offset] << 8;
|
772
|
+
return val & 32768 ? val | 4294901760 : val;
|
773
|
+
}
|
774
|
+
/**
|
775
|
+
* Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
776
|
+
* are interpreted as two's complement signed values.
|
777
|
+
*
|
778
|
+
* @param offset Number of bytes to skip before starting to read.
|
779
|
+
* @param noAssert
|
780
|
+
*/
|
781
|
+
readInt32LE(offset, noAssert) {
|
782
|
+
offset = offset >>> 0;
|
783
|
+
if (!noAssert) {
|
784
|
+
Buffer._checkOffset(offset, 4, this.length);
|
785
|
+
}
|
786
|
+
return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
|
787
|
+
}
|
788
|
+
/**
|
789
|
+
* Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
790
|
+
* are interpreted as two's complement signed values.
|
791
|
+
*
|
792
|
+
* @param offset Number of bytes to skip before starting to read.
|
793
|
+
* @param noAssert
|
794
|
+
*/
|
795
|
+
readInt32BE(offset, noAssert) {
|
796
|
+
offset = offset >>> 0;
|
797
|
+
if (!noAssert) {
|
798
|
+
Buffer._checkOffset(offset, 4, this.length);
|
799
|
+
}
|
800
|
+
return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
|
801
|
+
}
|
802
|
+
/**
|
803
|
+
* Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
|
804
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 2.
|
805
|
+
*/
|
806
|
+
swap16() {
|
807
|
+
const len = this.length;
|
808
|
+
if (len % 2 !== 0) {
|
809
|
+
throw new RangeError("Buffer size must be a multiple of 16-bits");
|
810
|
+
}
|
811
|
+
for (let i = 0; i < len; i += 2) {
|
812
|
+
this._swap(this, i, i + 1);
|
813
|
+
}
|
814
|
+
return this;
|
815
|
+
}
|
816
|
+
/**
|
817
|
+
* Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
|
818
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 4.
|
819
|
+
*/
|
820
|
+
swap32() {
|
821
|
+
const len = this.length;
|
822
|
+
if (len % 4 !== 0) {
|
823
|
+
throw new RangeError("Buffer size must be a multiple of 32-bits");
|
824
|
+
}
|
825
|
+
for (let i = 0; i < len; i += 4) {
|
826
|
+
this._swap(this, i, i + 3);
|
827
|
+
this._swap(this, i + 1, i + 2);
|
828
|
+
}
|
829
|
+
return this;
|
830
|
+
}
|
831
|
+
/**
|
832
|
+
* Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
|
833
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 8.
|
834
|
+
*/
|
835
|
+
swap64() {
|
836
|
+
const len = this.length;
|
837
|
+
if (len % 8 !== 0) {
|
838
|
+
throw new RangeError("Buffer size must be a multiple of 64-bits");
|
839
|
+
}
|
840
|
+
for (let i = 0; i < len; i += 8) {
|
841
|
+
this._swap(this, i, i + 7);
|
842
|
+
this._swap(this, i + 1, i + 6);
|
843
|
+
this._swap(this, i + 2, i + 5);
|
844
|
+
this._swap(this, i + 3, i + 4);
|
845
|
+
}
|
846
|
+
return this;
|
847
|
+
}
|
848
|
+
/**
|
849
|
+
* Swaps two octets.
|
850
|
+
*
|
851
|
+
* @param b
|
852
|
+
* @param n
|
853
|
+
* @param m
|
854
|
+
*/
|
855
|
+
_swap(b, n, m) {
|
856
|
+
const i = b[n];
|
857
|
+
b[n] = b[m];
|
858
|
+
b[m] = i;
|
859
|
+
}
|
860
|
+
/**
|
861
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
|
862
|
+
* Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
|
863
|
+
*
|
864
|
+
* @param value Number to write.
|
865
|
+
* @param offset Number of bytes to skip before starting to write.
|
866
|
+
* @param noAssert
|
867
|
+
* @returns `offset` plus the number of bytes written.
|
868
|
+
*/
|
869
|
+
writeUInt8(value, offset, noAssert) {
|
870
|
+
value = +value;
|
871
|
+
offset = offset >>> 0;
|
872
|
+
if (!noAssert) {
|
873
|
+
Buffer._checkInt(this, value, offset, 1, 255, 0);
|
874
|
+
}
|
875
|
+
this[offset] = value & 255;
|
876
|
+
return offset + 1;
|
877
|
+
}
|
878
|
+
/**
|
879
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
|
880
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
881
|
+
*
|
882
|
+
* @param value Number to write.
|
883
|
+
* @param offset Number of bytes to skip before starting to write.
|
884
|
+
* @param noAssert
|
885
|
+
* @returns `offset` plus the number of bytes written.
|
886
|
+
*/
|
887
|
+
writeUInt16LE(value, offset, noAssert) {
|
888
|
+
value = +value;
|
889
|
+
offset = offset >>> 0;
|
890
|
+
if (!noAssert) {
|
891
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
892
|
+
}
|
893
|
+
this[offset] = value & 255;
|
894
|
+
this[offset + 1] = value >>> 8;
|
895
|
+
return offset + 2;
|
896
|
+
}
|
897
|
+
/**
|
898
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
|
899
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
900
|
+
*
|
901
|
+
* @param value Number to write.
|
902
|
+
* @param offset Number of bytes to skip before starting to write.
|
903
|
+
* @param noAssert
|
904
|
+
* @returns `offset` plus the number of bytes written.
|
905
|
+
*/
|
906
|
+
writeUInt16BE(value, offset, noAssert) {
|
907
|
+
value = +value;
|
908
|
+
offset = offset >>> 0;
|
909
|
+
if (!noAssert) {
|
910
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
911
|
+
}
|
912
|
+
this[offset] = value >>> 8;
|
913
|
+
this[offset + 1] = value & 255;
|
914
|
+
return offset + 2;
|
915
|
+
}
|
916
|
+
/**
|
917
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
|
918
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
919
|
+
*
|
920
|
+
* @param value Number to write.
|
921
|
+
* @param offset Number of bytes to skip before starting to write.
|
922
|
+
* @param noAssert
|
923
|
+
* @returns `offset` plus the number of bytes written.
|
924
|
+
*/
|
925
|
+
writeUInt32LE(value, offset, noAssert) {
|
926
|
+
value = +value;
|
927
|
+
offset = offset >>> 0;
|
928
|
+
if (!noAssert) {
|
929
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
930
|
+
}
|
931
|
+
this[offset + 3] = value >>> 24;
|
932
|
+
this[offset + 2] = value >>> 16;
|
933
|
+
this[offset + 1] = value >>> 8;
|
934
|
+
this[offset] = value & 255;
|
935
|
+
return offset + 4;
|
936
|
+
}
|
937
|
+
/**
|
938
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
|
939
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
940
|
+
*
|
941
|
+
* @param value Number to write.
|
942
|
+
* @param offset Number of bytes to skip before starting to write.
|
943
|
+
* @param noAssert
|
944
|
+
* @returns `offset` plus the number of bytes written.
|
945
|
+
*/
|
946
|
+
writeUInt32BE(value, offset, noAssert) {
|
947
|
+
value = +value;
|
948
|
+
offset = offset >>> 0;
|
949
|
+
if (!noAssert) {
|
950
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
951
|
+
}
|
952
|
+
this[offset] = value >>> 24;
|
953
|
+
this[offset + 1] = value >>> 16;
|
954
|
+
this[offset + 2] = value >>> 8;
|
955
|
+
this[offset + 3] = value & 255;
|
956
|
+
return offset + 4;
|
957
|
+
}
|
958
|
+
/**
|
959
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
|
960
|
+
* Behavior is undefined when `value` is anything other than a signed 8-bit integer.
|
961
|
+
*
|
962
|
+
* @param value Number to write.
|
963
|
+
* @param offset Number of bytes to skip before starting to write.
|
964
|
+
* @param noAssert
|
965
|
+
* @returns `offset` plus the number of bytes written.
|
966
|
+
*/
|
967
|
+
writeInt8(value, offset, noAssert) {
|
968
|
+
value = +value;
|
969
|
+
offset = offset >>> 0;
|
970
|
+
if (!noAssert) {
|
971
|
+
Buffer._checkInt(this, value, offset, 1, 127, -128);
|
972
|
+
}
|
973
|
+
if (value < 0) {
|
974
|
+
value = 255 + value + 1;
|
975
|
+
}
|
976
|
+
this[offset] = value & 255;
|
977
|
+
return offset + 1;
|
978
|
+
}
|
979
|
+
/**
|
980
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
|
981
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
982
|
+
*
|
983
|
+
* @param value Number to write.
|
984
|
+
* @param offset Number of bytes to skip before starting to write.
|
985
|
+
* @param noAssert
|
986
|
+
* @returns `offset` plus the number of bytes written.
|
987
|
+
*/
|
988
|
+
writeInt16LE(value, offset, noAssert) {
|
989
|
+
value = +value;
|
990
|
+
offset = offset >>> 0;
|
991
|
+
if (!noAssert) {
|
992
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
993
|
+
}
|
994
|
+
this[offset] = value & 255;
|
995
|
+
this[offset + 1] = value >>> 8;
|
996
|
+
return offset + 2;
|
997
|
+
}
|
998
|
+
/**
|
999
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
|
1000
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
1001
|
+
*
|
1002
|
+
* @param value Number to write.
|
1003
|
+
* @param offset Number of bytes to skip before starting to write.
|
1004
|
+
* @param noAssert
|
1005
|
+
* @returns `offset` plus the number of bytes written.
|
1006
|
+
*/
|
1007
|
+
writeInt16BE(value, offset, noAssert) {
|
1008
|
+
value = +value;
|
1009
|
+
offset = offset >>> 0;
|
1010
|
+
if (!noAssert) {
|
1011
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1012
|
+
}
|
1013
|
+
this[offset] = value >>> 8;
|
1014
|
+
this[offset + 1] = value & 255;
|
1015
|
+
return offset + 2;
|
1016
|
+
}
|
1017
|
+
/**
|
1018
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
|
1019
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1020
|
+
*
|
1021
|
+
* @param value Number to write.
|
1022
|
+
* @param offset Number of bytes to skip before starting to write.
|
1023
|
+
* @param noAssert
|
1024
|
+
* @returns `offset` plus the number of bytes written.
|
1025
|
+
*/
|
1026
|
+
writeInt32LE(value, offset, noAssert) {
|
1027
|
+
value = +value;
|
1028
|
+
offset = offset >>> 0;
|
1029
|
+
if (!noAssert) {
|
1030
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1031
|
+
}
|
1032
|
+
this[offset] = value & 255;
|
1033
|
+
this[offset + 1] = value >>> 8;
|
1034
|
+
this[offset + 2] = value >>> 16;
|
1035
|
+
this[offset + 3] = value >>> 24;
|
1036
|
+
return offset + 4;
|
1037
|
+
}
|
1038
|
+
/**
|
1039
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
|
1040
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1041
|
+
*
|
1042
|
+
* @param value Number to write.
|
1043
|
+
* @param offset Number of bytes to skip before starting to write.
|
1044
|
+
* @param noAssert
|
1045
|
+
* @returns `offset` plus the number of bytes written.
|
1046
|
+
*/
|
1047
|
+
writeInt32BE(value, offset, noAssert) {
|
1048
|
+
value = +value;
|
1049
|
+
offset = offset >>> 0;
|
1050
|
+
if (!noAssert) {
|
1051
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1052
|
+
}
|
1053
|
+
if (value < 0) {
|
1054
|
+
value = 4294967295 + value + 1;
|
1055
|
+
}
|
1056
|
+
this[offset] = value >>> 24;
|
1057
|
+
this[offset + 1] = value >>> 16;
|
1058
|
+
this[offset + 2] = value >>> 8;
|
1059
|
+
this[offset + 3] = value & 255;
|
1060
|
+
return offset + 4;
|
1061
|
+
}
|
1062
|
+
/**
|
1063
|
+
* Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
|
1064
|
+
* filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
|
1065
|
+
* integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
|
1066
|
+
*
|
1067
|
+
* If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
|
1068
|
+
* character that fit into `buf` are written.
|
1069
|
+
*
|
1070
|
+
* If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
|
1071
|
+
*
|
1072
|
+
* @param value
|
1073
|
+
* @param encoding
|
1074
|
+
*/
|
1075
|
+
fill(value, offset, end, encoding) {
|
1076
|
+
if (typeof value === "string") {
|
1077
|
+
if (typeof offset === "string") {
|
1078
|
+
encoding = offset;
|
1079
|
+
offset = 0;
|
1080
|
+
end = this.length;
|
1081
|
+
} else if (typeof end === "string") {
|
1082
|
+
encoding = end;
|
1083
|
+
end = this.length;
|
1084
|
+
}
|
1085
|
+
if (encoding !== void 0 && typeof encoding !== "string") {
|
1086
|
+
throw new TypeError("encoding must be a string");
|
1087
|
+
}
|
1088
|
+
if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
|
1089
|
+
throw new TypeError("Unknown encoding: " + encoding);
|
1090
|
+
}
|
1091
|
+
if (value.length === 1) {
|
1092
|
+
const code = value.charCodeAt(0);
|
1093
|
+
if (encoding === "utf8" && code < 128) {
|
1094
|
+
value = code;
|
1095
|
+
}
|
1096
|
+
}
|
1097
|
+
} else if (typeof value === "number") {
|
1098
|
+
value = value & 255;
|
1099
|
+
} else if (typeof value === "boolean") {
|
1100
|
+
value = Number(value);
|
1101
|
+
}
|
1102
|
+
offset ?? (offset = 0);
|
1103
|
+
end ?? (end = this.length);
|
1104
|
+
if (offset < 0 || this.length < offset || this.length < end) {
|
1105
|
+
throw new RangeError("Out of range index");
|
1106
|
+
}
|
1107
|
+
if (end <= offset) {
|
1108
|
+
return this;
|
1109
|
+
}
|
1110
|
+
offset = offset >>> 0;
|
1111
|
+
end = end === void 0 ? this.length : end >>> 0;
|
1112
|
+
value || (value = 0);
|
1113
|
+
let i;
|
1114
|
+
if (typeof value === "number") {
|
1115
|
+
for (i = offset; i < end; ++i) {
|
1116
|
+
this[i] = value;
|
1117
|
+
}
|
1118
|
+
} else {
|
1119
|
+
const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
|
1120
|
+
const len = bytes.length;
|
1121
|
+
if (len === 0) {
|
1122
|
+
throw new TypeError('The value "' + value + '" is invalid for argument "value"');
|
1123
|
+
}
|
1124
|
+
for (i = 0; i < end - offset; ++i) {
|
1125
|
+
this[i + offset] = bytes[i % len];
|
1126
|
+
}
|
1127
|
+
}
|
1128
|
+
return this;
|
1129
|
+
}
|
1130
|
+
/**
|
1131
|
+
* Returns the index of the specified value.
|
1132
|
+
*
|
1133
|
+
* If `value` is:
|
1134
|
+
* - a string, `value` is interpreted according to the character encoding in `encoding`.
|
1135
|
+
* - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
|
1136
|
+
* - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
|
1137
|
+
*
|
1138
|
+
* Any other types will throw a `TypeError`.
|
1139
|
+
*
|
1140
|
+
* @param value What to search for.
|
1141
|
+
* @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
|
1142
|
+
* @param encoding If `value` is a string, this is the encoding used to search.
|
1143
|
+
* @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
|
1144
|
+
*/
|
1145
|
+
indexOf(value, byteOffset, encoding) {
|
1146
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
|
1147
|
+
}
|
1148
|
+
/**
|
1149
|
+
* Gets the last index of the specified value.
|
1150
|
+
*
|
1151
|
+
* @see indexOf()
|
1152
|
+
* @param value
|
1153
|
+
* @param byteOffset
|
1154
|
+
* @param encoding
|
1155
|
+
*/
|
1156
|
+
lastIndexOf(value, byteOffset, encoding) {
|
1157
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
|
1158
|
+
}
|
1159
|
+
_bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
|
1160
|
+
if (buffer.length === 0) {
|
1161
|
+
return -1;
|
1162
|
+
}
|
1163
|
+
if (typeof byteOffset === "string") {
|
1164
|
+
encoding = byteOffset;
|
1165
|
+
byteOffset = 0;
|
1166
|
+
} else if (typeof byteOffset === "undefined") {
|
1167
|
+
byteOffset = 0;
|
1168
|
+
} else if (byteOffset > 2147483647) {
|
1169
|
+
byteOffset = 2147483647;
|
1170
|
+
} else if (byteOffset < -2147483648) {
|
1171
|
+
byteOffset = -2147483648;
|
1172
|
+
}
|
1173
|
+
byteOffset = +byteOffset;
|
1174
|
+
if (byteOffset !== byteOffset) {
|
1175
|
+
byteOffset = dir ? 0 : buffer.length - 1;
|
1176
|
+
}
|
1177
|
+
if (byteOffset < 0) {
|
1178
|
+
byteOffset = buffer.length + byteOffset;
|
1179
|
+
}
|
1180
|
+
if (byteOffset >= buffer.length) {
|
1181
|
+
if (dir) {
|
1182
|
+
return -1;
|
1183
|
+
} else {
|
1184
|
+
byteOffset = buffer.length - 1;
|
1185
|
+
}
|
1186
|
+
} else if (byteOffset < 0) {
|
1187
|
+
if (dir) {
|
1188
|
+
byteOffset = 0;
|
1189
|
+
} else {
|
1190
|
+
return -1;
|
1191
|
+
}
|
1192
|
+
}
|
1193
|
+
if (typeof val === "string") {
|
1194
|
+
val = Buffer.from(val, encoding);
|
1195
|
+
}
|
1196
|
+
if (Buffer.isBuffer(val)) {
|
1197
|
+
if (val.length === 0) {
|
1198
|
+
return -1;
|
1199
|
+
}
|
1200
|
+
return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
|
1201
|
+
} else if (typeof val === "number") {
|
1202
|
+
val = val & 255;
|
1203
|
+
if (typeof Uint8Array.prototype.indexOf === "function") {
|
1204
|
+
if (dir) {
|
1205
|
+
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
|
1206
|
+
} else {
|
1207
|
+
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
|
1208
|
+
}
|
1209
|
+
}
|
1210
|
+
return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
|
1211
|
+
}
|
1212
|
+
throw new TypeError("val must be string, number or Buffer");
|
1213
|
+
}
|
1214
|
+
/**
|
1215
|
+
* Equivalent to `buf.indexOf() !== -1`.
|
1216
|
+
*
|
1217
|
+
* @param value
|
1218
|
+
* @param byteOffset
|
1219
|
+
* @param encoding
|
1220
|
+
*/
|
1221
|
+
includes(value, byteOffset, encoding) {
|
1222
|
+
return this.indexOf(value, byteOffset, encoding) !== -1;
|
1223
|
+
}
|
1224
|
+
/**
|
1225
|
+
* Creates a new buffer from the given parameters.
|
1226
|
+
*
|
1227
|
+
* @param data
|
1228
|
+
* @param encoding
|
1229
|
+
*/
|
1230
|
+
static from(a, b, c) {
|
1231
|
+
return new Buffer(a, b, c);
|
1232
|
+
}
|
1233
|
+
/**
|
1234
|
+
* Returns true if `obj` is a Buffer.
|
1235
|
+
*
|
1236
|
+
* @param obj
|
1237
|
+
*/
|
1238
|
+
static isBuffer(obj) {
|
1239
|
+
return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
|
1240
|
+
}
|
1241
|
+
/**
|
1242
|
+
* Returns true if `encoding` is a supported encoding.
|
1243
|
+
*
|
1244
|
+
* @param encoding
|
1245
|
+
*/
|
1246
|
+
static isEncoding(encoding) {
|
1247
|
+
switch (encoding.toLowerCase()) {
|
1248
|
+
case "hex":
|
1249
|
+
case "utf8":
|
1250
|
+
case "ascii":
|
1251
|
+
case "binary":
|
1252
|
+
case "latin1":
|
1253
|
+
case "ucs2":
|
1254
|
+
case "utf16le":
|
1255
|
+
case "base64":
|
1256
|
+
return true;
|
1257
|
+
default:
|
1258
|
+
return false;
|
1259
|
+
}
|
1260
|
+
}
|
1261
|
+
/**
|
1262
|
+
* Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
|
1263
|
+
* returns the number of characters in the string.
|
1264
|
+
*
|
1265
|
+
* @param string The string to test.
|
1266
|
+
* @param encoding The encoding to use for calculation. Defaults is `utf8`.
|
1267
|
+
*/
|
1268
|
+
static byteLength(string, encoding) {
|
1269
|
+
if (Buffer.isBuffer(string)) {
|
1270
|
+
return string.length;
|
1271
|
+
}
|
1272
|
+
if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
|
1273
|
+
return string.byteLength;
|
1274
|
+
}
|
1275
|
+
if (typeof string !== "string") {
|
1276
|
+
throw new TypeError(
|
1277
|
+
'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
|
1278
|
+
);
|
1279
|
+
}
|
1280
|
+
const len = string.length;
|
1281
|
+
const mustMatch = arguments.length > 2 && arguments[2] === true;
|
1282
|
+
if (!mustMatch && len === 0) {
|
1283
|
+
return 0;
|
1284
|
+
}
|
1285
|
+
switch (encoding?.toLowerCase()) {
|
1286
|
+
case "ascii":
|
1287
|
+
case "latin1":
|
1288
|
+
case "binary":
|
1289
|
+
return len;
|
1290
|
+
case "utf8":
|
1291
|
+
return Buffer._utf8ToBytes(string).length;
|
1292
|
+
case "hex":
|
1293
|
+
return len >>> 1;
|
1294
|
+
case "ucs2":
|
1295
|
+
case "utf16le":
|
1296
|
+
return len * 2;
|
1297
|
+
case "base64":
|
1298
|
+
return Buffer._base64ToBytes(string).length;
|
1299
|
+
default:
|
1300
|
+
return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
|
1301
|
+
}
|
1302
|
+
}
|
1303
|
+
/**
|
1304
|
+
* Returns a Buffer which is the result of concatenating all the buffers in the list together.
|
1305
|
+
*
|
1306
|
+
* - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
|
1307
|
+
* - If the list has exactly one item, then the first item is returned.
|
1308
|
+
* - If the list has more than one item, then a new buffer is created.
|
1309
|
+
*
|
1310
|
+
* It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
|
1311
|
+
* a small computational expense.
|
1312
|
+
*
|
1313
|
+
* @param list An array of Buffer objects to concatenate.
|
1314
|
+
* @param totalLength Total length of the buffers when concatenated.
|
1315
|
+
*/
|
1316
|
+
static concat(list, totalLength) {
|
1317
|
+
if (!Array.isArray(list)) {
|
1318
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1319
|
+
}
|
1320
|
+
if (list.length === 0) {
|
1321
|
+
return Buffer.alloc(0);
|
1322
|
+
}
|
1323
|
+
let i;
|
1324
|
+
if (totalLength === void 0) {
|
1325
|
+
totalLength = 0;
|
1326
|
+
for (i = 0; i < list.length; ++i) {
|
1327
|
+
totalLength += list[i].length;
|
1328
|
+
}
|
1329
|
+
}
|
1330
|
+
const buffer = Buffer.allocUnsafe(totalLength);
|
1331
|
+
let pos = 0;
|
1332
|
+
for (i = 0; i < list.length; ++i) {
|
1333
|
+
let buf = list[i];
|
1334
|
+
if (Buffer._isInstance(buf, Uint8Array)) {
|
1335
|
+
if (pos + buf.length > buffer.length) {
|
1336
|
+
if (!Buffer.isBuffer(buf)) {
|
1337
|
+
buf = Buffer.from(buf);
|
1338
|
+
}
|
1339
|
+
buf.copy(buffer, pos);
|
1340
|
+
} else {
|
1341
|
+
Uint8Array.prototype.set.call(buffer, buf, pos);
|
1342
|
+
}
|
1343
|
+
} else if (!Buffer.isBuffer(buf)) {
|
1344
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1345
|
+
} else {
|
1346
|
+
buf.copy(buffer, pos);
|
1347
|
+
}
|
1348
|
+
pos += buf.length;
|
1349
|
+
}
|
1350
|
+
return buffer;
|
1351
|
+
}
|
1352
|
+
/**
|
1353
|
+
* The same as `buf1.compare(buf2)`.
|
1354
|
+
*/
|
1355
|
+
static compare(buf1, buf2) {
|
1356
|
+
if (Buffer._isInstance(buf1, Uint8Array)) {
|
1357
|
+
buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
|
1358
|
+
}
|
1359
|
+
if (Buffer._isInstance(buf2, Uint8Array)) {
|
1360
|
+
buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
|
1361
|
+
}
|
1362
|
+
if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
|
1363
|
+
throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
|
1364
|
+
}
|
1365
|
+
if (buf1 === buf2) {
|
1366
|
+
return 0;
|
1367
|
+
}
|
1368
|
+
let x = buf1.length;
|
1369
|
+
let y = buf2.length;
|
1370
|
+
for (let i = 0, len = Math.min(x, y); i < len; ++i) {
|
1371
|
+
if (buf1[i] !== buf2[i]) {
|
1372
|
+
x = buf1[i];
|
1373
|
+
y = buf2[i];
|
1374
|
+
break;
|
1375
|
+
}
|
1376
|
+
}
|
1377
|
+
if (x < y) {
|
1378
|
+
return -1;
|
1379
|
+
}
|
1380
|
+
if (y < x) {
|
1381
|
+
return 1;
|
1382
|
+
}
|
1383
|
+
return 0;
|
1384
|
+
}
|
1385
|
+
/**
|
1386
|
+
* Allocates a new buffer of `size` octets.
|
1387
|
+
*
|
1388
|
+
* @param size The number of octets to allocate.
|
1389
|
+
* @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
|
1390
|
+
* @param encoding The encoding used for the call to `buf.fill()` while initializing.
|
1391
|
+
*/
|
1392
|
+
static alloc(size, fill, encoding) {
|
1393
|
+
if (typeof size !== "number") {
|
1394
|
+
throw new TypeError('"size" argument must be of type number');
|
1395
|
+
} else if (size < 0) {
|
1396
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1397
|
+
}
|
1398
|
+
if (size <= 0) {
|
1399
|
+
return new Buffer(size);
|
1400
|
+
}
|
1401
|
+
if (fill !== void 0) {
|
1402
|
+
return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
|
1403
|
+
}
|
1404
|
+
return new Buffer(size);
|
1405
|
+
}
|
1406
|
+
/**
|
1407
|
+
* Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
|
1408
|
+
*
|
1409
|
+
* @param size
|
1410
|
+
*/
|
1411
|
+
static allocUnsafe(size) {
|
1412
|
+
if (typeof size !== "number") {
|
1413
|
+
throw new TypeError('"size" argument must be of type number');
|
1414
|
+
} else if (size < 0) {
|
1415
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1416
|
+
}
|
1417
|
+
return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
|
1418
|
+
}
|
1419
|
+
/**
|
1420
|
+
* Returns true if the given `obj` is an instance of `type`.
|
1421
|
+
*
|
1422
|
+
* @param obj
|
1423
|
+
* @param type
|
1424
|
+
*/
|
1425
|
+
static _isInstance(obj, type) {
|
1426
|
+
return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
|
1427
|
+
}
|
1428
|
+
static _checked(length) {
|
1429
|
+
if (length >= K_MAX_LENGTH) {
|
1430
|
+
throw new RangeError(
|
1431
|
+
"Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
|
1432
|
+
);
|
1433
|
+
}
|
1434
|
+
return length | 0;
|
1435
|
+
}
|
1436
|
+
static _blitBuffer(src, dst, offset, length) {
|
1437
|
+
let i;
|
1438
|
+
for (i = 0; i < length; ++i) {
|
1439
|
+
if (i + offset >= dst.length || i >= src.length) {
|
1440
|
+
break;
|
1441
|
+
}
|
1442
|
+
dst[i + offset] = src[i];
|
1443
|
+
}
|
1444
|
+
return i;
|
1445
|
+
}
|
1446
|
+
static _utf8Write(buf, string, offset, length) {
|
1447
|
+
return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
|
1448
|
+
}
|
1449
|
+
static _asciiWrite(buf, string, offset, length) {
|
1450
|
+
return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
|
1451
|
+
}
|
1452
|
+
static _base64Write(buf, string, offset, length) {
|
1453
|
+
return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
|
1454
|
+
}
|
1455
|
+
static _ucs2Write(buf, string, offset, length) {
|
1456
|
+
return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
|
1457
|
+
}
|
1458
|
+
static _hexWrite(buf, string, offset, length) {
|
1459
|
+
offset = Number(offset) || 0;
|
1460
|
+
const remaining = buf.length - offset;
|
1461
|
+
if (!length) {
|
1462
|
+
length = remaining;
|
1463
|
+
} else {
|
1464
|
+
length = Number(length);
|
1465
|
+
if (length > remaining) {
|
1466
|
+
length = remaining;
|
1467
|
+
}
|
1468
|
+
}
|
1469
|
+
const strLen = string.length;
|
1470
|
+
if (length > strLen / 2) {
|
1471
|
+
length = strLen / 2;
|
1472
|
+
}
|
1473
|
+
let i;
|
1474
|
+
for (i = 0; i < length; ++i) {
|
1475
|
+
const parsed = parseInt(string.substr(i * 2, 2), 16);
|
1476
|
+
if (parsed !== parsed) {
|
1477
|
+
return i;
|
1478
|
+
}
|
1479
|
+
buf[offset + i] = parsed;
|
1480
|
+
}
|
1481
|
+
return i;
|
1482
|
+
}
|
1483
|
+
static _utf8ToBytes(string, units) {
|
1484
|
+
units = units || Infinity;
|
1485
|
+
const length = string.length;
|
1486
|
+
const bytes = [];
|
1487
|
+
let codePoint;
|
1488
|
+
let leadSurrogate = null;
|
1489
|
+
for (let i = 0; i < length; ++i) {
|
1490
|
+
codePoint = string.charCodeAt(i);
|
1491
|
+
if (codePoint > 55295 && codePoint < 57344) {
|
1492
|
+
if (!leadSurrogate) {
|
1493
|
+
if (codePoint > 56319) {
|
1494
|
+
if ((units -= 3) > -1) {
|
1495
|
+
bytes.push(239, 191, 189);
|
1496
|
+
}
|
1497
|
+
continue;
|
1498
|
+
} else if (i + 1 === length) {
|
1499
|
+
if ((units -= 3) > -1) {
|
1500
|
+
bytes.push(239, 191, 189);
|
1501
|
+
}
|
1502
|
+
continue;
|
1503
|
+
}
|
1504
|
+
leadSurrogate = codePoint;
|
1505
|
+
continue;
|
1506
|
+
}
|
1507
|
+
if (codePoint < 56320) {
|
1508
|
+
if ((units -= 3) > -1) {
|
1509
|
+
bytes.push(239, 191, 189);
|
1510
|
+
}
|
1511
|
+
leadSurrogate = codePoint;
|
1512
|
+
continue;
|
1513
|
+
}
|
1514
|
+
codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
|
1515
|
+
} else if (leadSurrogate) {
|
1516
|
+
if ((units -= 3) > -1) {
|
1517
|
+
bytes.push(239, 191, 189);
|
1518
|
+
}
|
1519
|
+
}
|
1520
|
+
leadSurrogate = null;
|
1521
|
+
if (codePoint < 128) {
|
1522
|
+
if ((units -= 1) < 0) {
|
1523
|
+
break;
|
1524
|
+
}
|
1525
|
+
bytes.push(codePoint);
|
1526
|
+
} else if (codePoint < 2048) {
|
1527
|
+
if ((units -= 2) < 0) {
|
1528
|
+
break;
|
1529
|
+
}
|
1530
|
+
bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
|
1531
|
+
} else if (codePoint < 65536) {
|
1532
|
+
if ((units -= 3) < 0) {
|
1533
|
+
break;
|
1534
|
+
}
|
1535
|
+
bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
|
1536
|
+
} else if (codePoint < 1114112) {
|
1537
|
+
if ((units -= 4) < 0) {
|
1538
|
+
break;
|
1539
|
+
}
|
1540
|
+
bytes.push(
|
1541
|
+
codePoint >> 18 | 240,
|
1542
|
+
codePoint >> 12 & 63 | 128,
|
1543
|
+
codePoint >> 6 & 63 | 128,
|
1544
|
+
codePoint & 63 | 128
|
1545
|
+
);
|
1546
|
+
} else {
|
1547
|
+
throw new Error("Invalid code point");
|
1548
|
+
}
|
1549
|
+
}
|
1550
|
+
return bytes;
|
1551
|
+
}
|
1552
|
+
static _base64ToBytes(str) {
|
1553
|
+
return toByteArray(base64clean(str));
|
1554
|
+
}
|
1555
|
+
static _asciiToBytes(str) {
|
1556
|
+
const byteArray = [];
|
1557
|
+
for (let i = 0; i < str.length; ++i) {
|
1558
|
+
byteArray.push(str.charCodeAt(i) & 255);
|
1559
|
+
}
|
1560
|
+
return byteArray;
|
1561
|
+
}
|
1562
|
+
static _utf16leToBytes(str, units) {
|
1563
|
+
let c, hi, lo;
|
1564
|
+
const byteArray = [];
|
1565
|
+
for (let i = 0; i < str.length; ++i) {
|
1566
|
+
if ((units -= 2) < 0) break;
|
1567
|
+
c = str.charCodeAt(i);
|
1568
|
+
hi = c >> 8;
|
1569
|
+
lo = c % 256;
|
1570
|
+
byteArray.push(lo);
|
1571
|
+
byteArray.push(hi);
|
1572
|
+
}
|
1573
|
+
return byteArray;
|
1574
|
+
}
|
1575
|
+
static _hexSlice(buf, start, end) {
|
1576
|
+
const len = buf.length;
|
1577
|
+
if (!start || start < 0) {
|
1578
|
+
start = 0;
|
1579
|
+
}
|
1580
|
+
if (!end || end < 0 || end > len) {
|
1581
|
+
end = len;
|
1582
|
+
}
|
1583
|
+
let out = "";
|
1584
|
+
for (let i = start; i < end; ++i) {
|
1585
|
+
out += hexSliceLookupTable[buf[i]];
|
1586
|
+
}
|
1587
|
+
return out;
|
1588
|
+
}
|
1589
|
+
static _base64Slice(buf, start, end) {
|
1590
|
+
if (start === 0 && end === buf.length) {
|
1591
|
+
return fromByteArray(buf);
|
1592
|
+
} else {
|
1593
|
+
return fromByteArray(buf.slice(start, end));
|
1594
|
+
}
|
1595
|
+
}
|
1596
|
+
static _utf8Slice(buf, start, end) {
|
1597
|
+
end = Math.min(buf.length, end);
|
1598
|
+
const res = [];
|
1599
|
+
let i = start;
|
1600
|
+
while (i < end) {
|
1601
|
+
const firstByte = buf[i];
|
1602
|
+
let codePoint = null;
|
1603
|
+
let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
|
1604
|
+
if (i + bytesPerSequence <= end) {
|
1605
|
+
let secondByte, thirdByte, fourthByte, tempCodePoint;
|
1606
|
+
switch (bytesPerSequence) {
|
1607
|
+
case 1:
|
1608
|
+
if (firstByte < 128) {
|
1609
|
+
codePoint = firstByte;
|
1610
|
+
}
|
1611
|
+
break;
|
1612
|
+
case 2:
|
1613
|
+
secondByte = buf[i + 1];
|
1614
|
+
if ((secondByte & 192) === 128) {
|
1615
|
+
tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
|
1616
|
+
if (tempCodePoint > 127) {
|
1617
|
+
codePoint = tempCodePoint;
|
1618
|
+
}
|
1619
|
+
}
|
1620
|
+
break;
|
1621
|
+
case 3:
|
1622
|
+
secondByte = buf[i + 1];
|
1623
|
+
thirdByte = buf[i + 2];
|
1624
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
|
1625
|
+
tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
|
1626
|
+
if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
|
1627
|
+
codePoint = tempCodePoint;
|
1628
|
+
}
|
1629
|
+
}
|
1630
|
+
break;
|
1631
|
+
case 4:
|
1632
|
+
secondByte = buf[i + 1];
|
1633
|
+
thirdByte = buf[i + 2];
|
1634
|
+
fourthByte = buf[i + 3];
|
1635
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
|
1636
|
+
tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
|
1637
|
+
if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
|
1638
|
+
codePoint = tempCodePoint;
|
1639
|
+
}
|
1640
|
+
}
|
1641
|
+
}
|
1642
|
+
}
|
1643
|
+
if (codePoint === null) {
|
1644
|
+
codePoint = 65533;
|
1645
|
+
bytesPerSequence = 1;
|
1646
|
+
} else if (codePoint > 65535) {
|
1647
|
+
codePoint -= 65536;
|
1648
|
+
res.push(codePoint >>> 10 & 1023 | 55296);
|
1649
|
+
codePoint = 56320 | codePoint & 1023;
|
1650
|
+
}
|
1651
|
+
res.push(codePoint);
|
1652
|
+
i += bytesPerSequence;
|
1653
|
+
}
|
1654
|
+
return Buffer._decodeCodePointsArray(res);
|
1655
|
+
}
|
1656
|
+
static _decodeCodePointsArray(codePoints) {
|
1657
|
+
const len = codePoints.length;
|
1658
|
+
if (len <= MAX_ARGUMENTS_LENGTH) {
|
1659
|
+
return String.fromCharCode.apply(String, codePoints);
|
1660
|
+
}
|
1661
|
+
let res = "";
|
1662
|
+
let i = 0;
|
1663
|
+
while (i < len) {
|
1664
|
+
res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
|
1665
|
+
}
|
1666
|
+
return res;
|
1667
|
+
}
|
1668
|
+
static _asciiSlice(buf, start, end) {
|
1669
|
+
let ret = "";
|
1670
|
+
end = Math.min(buf.length, end);
|
1671
|
+
for (let i = start; i < end; ++i) {
|
1672
|
+
ret += String.fromCharCode(buf[i] & 127);
|
1673
|
+
}
|
1674
|
+
return ret;
|
1675
|
+
}
|
1676
|
+
static _latin1Slice(buf, start, end) {
|
1677
|
+
let ret = "";
|
1678
|
+
end = Math.min(buf.length, end);
|
1679
|
+
for (let i = start; i < end; ++i) {
|
1680
|
+
ret += String.fromCharCode(buf[i]);
|
1681
|
+
}
|
1682
|
+
return ret;
|
1683
|
+
}
|
1684
|
+
static _utf16leSlice(buf, start, end) {
|
1685
|
+
const bytes = buf.slice(start, end);
|
1686
|
+
let res = "";
|
1687
|
+
for (let i = 0; i < bytes.length - 1; i += 2) {
|
1688
|
+
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
|
1689
|
+
}
|
1690
|
+
return res;
|
1691
|
+
}
|
1692
|
+
static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
|
1693
|
+
let indexSize = 1;
|
1694
|
+
let arrLength = arr.length;
|
1695
|
+
let valLength = val.length;
|
1696
|
+
if (encoding !== void 0) {
|
1697
|
+
encoding = Buffer._getEncoding(encoding);
|
1698
|
+
if (encoding === "ucs2" || encoding === "utf16le") {
|
1699
|
+
if (arr.length < 2 || val.length < 2) {
|
1700
|
+
return -1;
|
1701
|
+
}
|
1702
|
+
indexSize = 2;
|
1703
|
+
arrLength /= 2;
|
1704
|
+
valLength /= 2;
|
1705
|
+
byteOffset /= 2;
|
1706
|
+
}
|
1707
|
+
}
|
1708
|
+
function read(buf, i2) {
|
1709
|
+
if (indexSize === 1) {
|
1710
|
+
return buf[i2];
|
1711
|
+
} else {
|
1712
|
+
return buf.readUInt16BE(i2 * indexSize);
|
1713
|
+
}
|
1714
|
+
}
|
1715
|
+
let i;
|
1716
|
+
if (dir) {
|
1717
|
+
let foundIndex = -1;
|
1718
|
+
for (i = byteOffset; i < arrLength; i++) {
|
1719
|
+
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
1720
|
+
if (foundIndex === -1) foundIndex = i;
|
1721
|
+
if (i - foundIndex + 1 === valLength) return foundIndex * indexSize;
|
1722
|
+
} else {
|
1723
|
+
if (foundIndex !== -1) i -= i - foundIndex;
|
1724
|
+
foundIndex = -1;
|
1725
|
+
}
|
1726
|
+
}
|
1727
|
+
} else {
|
1728
|
+
if (byteOffset + valLength > arrLength) {
|
1729
|
+
byteOffset = arrLength - valLength;
|
1730
|
+
}
|
1731
|
+
for (i = byteOffset; i >= 0; i--) {
|
1732
|
+
let found = true;
|
1733
|
+
for (let j = 0; j < valLength; j++) {
|
1734
|
+
if (read(arr, i + j) !== read(val, j)) {
|
1735
|
+
found = false;
|
1736
|
+
break;
|
1737
|
+
}
|
1738
|
+
}
|
1739
|
+
if (found) {
|
1740
|
+
return i;
|
1741
|
+
}
|
1742
|
+
}
|
1743
|
+
}
|
1744
|
+
return -1;
|
1745
|
+
}
|
1746
|
+
static _checkOffset(offset, ext, length) {
|
1747
|
+
if (offset % 1 !== 0 || offset < 0) throw new RangeError("offset is not uint");
|
1748
|
+
if (offset + ext > length) throw new RangeError("Trying to access beyond buffer length");
|
1749
|
+
}
|
1750
|
+
static _checkInt(buf, value, offset, ext, max, min) {
|
1751
|
+
if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance');
|
1752
|
+
if (value > max || value < min) throw new RangeError('"value" argument is out of bounds');
|
1753
|
+
if (offset + ext > buf.length) throw new RangeError("Index out of range");
|
1754
|
+
}
|
1755
|
+
static _getEncoding(encoding) {
|
1756
|
+
let toLowerCase = false;
|
1757
|
+
let originalEncoding = "";
|
1758
|
+
for (; ; ) {
|
1759
|
+
switch (encoding) {
|
1760
|
+
case "hex":
|
1761
|
+
return "hex";
|
1762
|
+
case "utf8":
|
1763
|
+
return "utf8";
|
1764
|
+
case "ascii":
|
1765
|
+
return "ascii";
|
1766
|
+
case "binary":
|
1767
|
+
return "binary";
|
1768
|
+
case "latin1":
|
1769
|
+
return "latin1";
|
1770
|
+
case "ucs2":
|
1771
|
+
return "ucs2";
|
1772
|
+
case "utf16le":
|
1773
|
+
return "utf16le";
|
1774
|
+
case "base64":
|
1775
|
+
return "base64";
|
1776
|
+
default: {
|
1777
|
+
if (toLowerCase) {
|
1778
|
+
throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
|
1779
|
+
}
|
1780
|
+
toLowerCase = true;
|
1781
|
+
originalEncoding = encoding;
|
1782
|
+
encoding = encoding.toLowerCase();
|
1783
|
+
}
|
1784
|
+
}
|
1785
|
+
}
|
1786
|
+
}
|
1787
|
+
}
|
1788
|
+
const hexSliceLookupTable = function() {
|
1789
|
+
const alphabet = "0123456789abcdef";
|
1790
|
+
const table = new Array(256);
|
1791
|
+
for (let i = 0; i < 16; ++i) {
|
1792
|
+
const i16 = i * 16;
|
1793
|
+
for (let j = 0; j < 16; ++j) {
|
1794
|
+
table[i16 + j] = alphabet[i] + alphabet[j];
|
1795
|
+
}
|
1796
|
+
}
|
1797
|
+
return table;
|
1798
|
+
}();
|
1799
|
+
const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
|
1800
|
+
function base64clean(str) {
|
1801
|
+
str = str.split("=")[0];
|
1802
|
+
str = str.trim().replace(INVALID_BASE64_RE, "");
|
1803
|
+
if (str.length < 2) return "";
|
1804
|
+
while (str.length % 4 !== 0) {
|
1805
|
+
str = str + "=";
|
1806
|
+
}
|
1807
|
+
return str;
|
1808
|
+
}
|
1809
|
+
|
27
1810
|
function notEmpty(value) {
|
28
1811
|
return value !== null && value !== void 0;
|
29
1812
|
}
|
@@ -118,155 +1901,15 @@ function promiseMap(inputValues, mapper) {
|
|
118
1901
|
return inputValues.reduce(reducer, Promise.resolve([]));
|
119
1902
|
}
|
120
1903
|
|
121
|
-
|
122
|
-
|
123
|
-
if (isDefined(process) && isDefined(process.env)) {
|
124
|
-
return {
|
125
|
-
apiKey: process.env.XATA_API_KEY ?? getGlobalApiKey(),
|
126
|
-
databaseURL: process.env.XATA_DATABASE_URL ?? getGlobalDatabaseURL(),
|
127
|
-
branch: process.env.XATA_BRANCH ?? getGlobalBranch(),
|
128
|
-
deployPreview: process.env.XATA_PREVIEW,
|
129
|
-
deployPreviewBranch: process.env.XATA_PREVIEW_BRANCH,
|
130
|
-
vercelGitCommitRef: process.env.VERCEL_GIT_COMMIT_REF,
|
131
|
-
vercelGitRepoOwner: process.env.VERCEL_GIT_REPO_OWNER
|
132
|
-
};
|
133
|
-
}
|
134
|
-
} catch (err) {
|
135
|
-
}
|
136
|
-
try {
|
137
|
-
if (isObject(Deno) && isObject(Deno.env)) {
|
138
|
-
return {
|
139
|
-
apiKey: Deno.env.get("XATA_API_KEY") ?? getGlobalApiKey(),
|
140
|
-
databaseURL: Deno.env.get("XATA_DATABASE_URL") ?? getGlobalDatabaseURL(),
|
141
|
-
branch: Deno.env.get("XATA_BRANCH") ?? getGlobalBranch(),
|
142
|
-
deployPreview: Deno.env.get("XATA_PREVIEW"),
|
143
|
-
deployPreviewBranch: Deno.env.get("XATA_PREVIEW_BRANCH"),
|
144
|
-
vercelGitCommitRef: Deno.env.get("VERCEL_GIT_COMMIT_REF"),
|
145
|
-
vercelGitRepoOwner: Deno.env.get("VERCEL_GIT_REPO_OWNER")
|
146
|
-
};
|
147
|
-
}
|
148
|
-
} catch (err) {
|
149
|
-
}
|
150
|
-
return {
|
151
|
-
apiKey: getGlobalApiKey(),
|
152
|
-
databaseURL: getGlobalDatabaseURL(),
|
153
|
-
branch: getGlobalBranch(),
|
154
|
-
deployPreview: void 0,
|
155
|
-
deployPreviewBranch: void 0,
|
156
|
-
vercelGitCommitRef: void 0,
|
157
|
-
vercelGitRepoOwner: void 0
|
158
|
-
};
|
159
|
-
}
|
160
|
-
function getEnableBrowserVariable() {
|
161
|
-
try {
|
162
|
-
if (isObject(process) && isObject(process.env) && process.env.XATA_ENABLE_BROWSER !== void 0) {
|
163
|
-
return process.env.XATA_ENABLE_BROWSER === "true";
|
164
|
-
}
|
165
|
-
} catch (err) {
|
166
|
-
}
|
167
|
-
try {
|
168
|
-
if (isObject(Deno) && isObject(Deno.env) && Deno.env.get("XATA_ENABLE_BROWSER") !== void 0) {
|
169
|
-
return Deno.env.get("XATA_ENABLE_BROWSER") === "true";
|
170
|
-
}
|
171
|
-
} catch (err) {
|
172
|
-
}
|
173
|
-
try {
|
174
|
-
return XATA_ENABLE_BROWSER === true || XATA_ENABLE_BROWSER === "true";
|
175
|
-
} catch (err) {
|
176
|
-
return void 0;
|
177
|
-
}
|
178
|
-
}
|
179
|
-
function getGlobalApiKey() {
|
180
|
-
try {
|
181
|
-
return XATA_API_KEY;
|
182
|
-
} catch (err) {
|
183
|
-
return void 0;
|
184
|
-
}
|
185
|
-
}
|
186
|
-
function getGlobalDatabaseURL() {
|
187
|
-
try {
|
188
|
-
return XATA_DATABASE_URL;
|
189
|
-
} catch (err) {
|
190
|
-
return void 0;
|
191
|
-
}
|
192
|
-
}
|
193
|
-
function getGlobalBranch() {
|
194
|
-
try {
|
195
|
-
return XATA_BRANCH;
|
196
|
-
} catch (err) {
|
197
|
-
return void 0;
|
198
|
-
}
|
199
|
-
}
|
200
|
-
function getDatabaseURL() {
|
201
|
-
try {
|
202
|
-
const { databaseURL } = getEnvironment();
|
203
|
-
return databaseURL;
|
204
|
-
} catch (err) {
|
205
|
-
return void 0;
|
206
|
-
}
|
207
|
-
}
|
208
|
-
function getAPIKey() {
|
209
|
-
try {
|
210
|
-
const { apiKey } = getEnvironment();
|
211
|
-
return apiKey;
|
212
|
-
} catch (err) {
|
213
|
-
return void 0;
|
214
|
-
}
|
215
|
-
}
|
216
|
-
function getBranch() {
|
217
|
-
try {
|
218
|
-
const { branch } = getEnvironment();
|
219
|
-
return branch;
|
220
|
-
} catch (err) {
|
221
|
-
return void 0;
|
222
|
-
}
|
223
|
-
}
|
224
|
-
function buildPreviewBranchName({ org, branch }) {
|
225
|
-
return `preview-${org}-${branch}`;
|
226
|
-
}
|
227
|
-
function getPreviewBranch() {
|
228
|
-
try {
|
229
|
-
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = getEnvironment();
|
230
|
-
if (deployPreviewBranch)
|
231
|
-
return deployPreviewBranch;
|
232
|
-
switch (deployPreview) {
|
233
|
-
case "vercel": {
|
234
|
-
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
235
|
-
console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
|
236
|
-
return void 0;
|
237
|
-
}
|
238
|
-
return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
|
239
|
-
}
|
240
|
-
}
|
241
|
-
return void 0;
|
242
|
-
} catch (err) {
|
243
|
-
return void 0;
|
244
|
-
}
|
245
|
-
}
|
246
|
-
|
247
|
-
var __accessCheck$6 = (obj, member, msg) => {
|
248
|
-
if (!member.has(obj))
|
249
|
-
throw TypeError("Cannot " + msg);
|
1904
|
+
var __typeError$6 = (msg) => {
|
1905
|
+
throw TypeError(msg);
|
250
1906
|
};
|
251
|
-
var
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
var
|
256
|
-
|
257
|
-
throw TypeError("Cannot add the same private member more than once");
|
258
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
259
|
-
};
|
260
|
-
var __privateSet$4 = (obj, member, value, setter) => {
|
261
|
-
__accessCheck$6(obj, member, "write to private field");
|
262
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
263
|
-
return value;
|
264
|
-
};
|
265
|
-
var __privateMethod$4 = (obj, member, method) => {
|
266
|
-
__accessCheck$6(obj, member, "access private method");
|
267
|
-
return method;
|
268
|
-
};
|
269
|
-
var _fetch, _queue, _concurrency, _enqueue, enqueue_fn;
|
1907
|
+
var __accessCheck$6 = (obj, member, msg) => member.has(obj) || __typeError$6("Cannot " + msg);
|
1908
|
+
var __privateGet$5 = (obj, member, getter) => (__accessCheck$6(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
1909
|
+
var __privateAdd$6 = (obj, member, value) => member.has(obj) ? __typeError$6("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1910
|
+
var __privateSet$4 = (obj, member, value, setter) => (__accessCheck$6(obj, member, "write to private field"), member.set(obj, value), value);
|
1911
|
+
var __privateMethod$4 = (obj, member, method) => (__accessCheck$6(obj, member, "access private method"), method);
|
1912
|
+
var _fetch, _queue, _concurrency, _ApiRequestPool_instances, enqueue_fn;
|
270
1913
|
const REQUEST_TIMEOUT = 5 * 60 * 1e3;
|
271
1914
|
function getFetchImplementation(userFetch) {
|
272
1915
|
const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
|
@@ -279,10 +1922,10 @@ function getFetchImplementation(userFetch) {
|
|
279
1922
|
}
|
280
1923
|
class ApiRequestPool {
|
281
1924
|
constructor(concurrency = 10) {
|
282
|
-
__privateAdd$6(this,
|
283
|
-
__privateAdd$6(this, _fetch
|
284
|
-
__privateAdd$6(this, _queue
|
285
|
-
__privateAdd$6(this, _concurrency
|
1925
|
+
__privateAdd$6(this, _ApiRequestPool_instances);
|
1926
|
+
__privateAdd$6(this, _fetch);
|
1927
|
+
__privateAdd$6(this, _queue);
|
1928
|
+
__privateAdd$6(this, _concurrency);
|
286
1929
|
__privateSet$4(this, _queue, []);
|
287
1930
|
__privateSet$4(this, _concurrency, concurrency);
|
288
1931
|
this.running = 0;
|
@@ -317,7 +1960,7 @@ class ApiRequestPool {
|
|
317
1960
|
}
|
318
1961
|
return response;
|
319
1962
|
};
|
320
|
-
return __privateMethod$4(this,
|
1963
|
+
return __privateMethod$4(this, _ApiRequestPool_instances, enqueue_fn).call(this, async () => {
|
321
1964
|
return await runRequest();
|
322
1965
|
});
|
323
1966
|
}
|
@@ -325,7 +1968,7 @@ class ApiRequestPool {
|
|
325
1968
|
_fetch = new WeakMap();
|
326
1969
|
_queue = new WeakMap();
|
327
1970
|
_concurrency = new WeakMap();
|
328
|
-
|
1971
|
+
_ApiRequestPool_instances = new WeakSet();
|
329
1972
|
enqueue_fn = function(task) {
|
330
1973
|
const promise = new Promise((resolve) => __privateGet$5(this, _queue).push(resolve)).finally(() => {
|
331
1974
|
this.started--;
|
@@ -528,7 +2171,7 @@ function defaultOnOpen(response) {
|
|
528
2171
|
}
|
529
2172
|
}
|
530
2173
|
|
531
|
-
const VERSION = "0.29.
|
2174
|
+
const VERSION = "0.29.4";
|
532
2175
|
|
533
2176
|
class ErrorWithCause extends Error {
|
534
2177
|
constructor(message, options) {
|
@@ -608,35 +2251,30 @@ function parseProviderString(provider = "production") {
|
|
608
2251
|
return provider;
|
609
2252
|
}
|
610
2253
|
const [main, workspaces] = provider.split(",");
|
611
|
-
if (!main || !workspaces)
|
612
|
-
return null;
|
2254
|
+
if (!main || !workspaces) return null;
|
613
2255
|
return { main, workspaces };
|
614
2256
|
}
|
615
2257
|
function buildProviderString(provider) {
|
616
|
-
if (isHostProviderAlias(provider))
|
617
|
-
return provider;
|
2258
|
+
if (isHostProviderAlias(provider)) return provider;
|
618
2259
|
return `${provider.main},${provider.workspaces}`;
|
619
2260
|
}
|
620
2261
|
function parseWorkspacesUrlParts(url) {
|
621
|
-
if (!isString(url))
|
622
|
-
return null;
|
2262
|
+
if (!isString(url)) return null;
|
623
2263
|
const matches = {
|
624
2264
|
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
|
625
2265
|
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
|
626
2266
|
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
|
627
|
-
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:([^:]+):?(.*)?/)
|
2267
|
+
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
|
628
2268
|
};
|
629
2269
|
const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
|
630
|
-
if (!isHostProviderAlias(host) || !match)
|
631
|
-
return null;
|
2270
|
+
if (!isHostProviderAlias(host) || !match) return null;
|
632
2271
|
return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
|
633
2272
|
}
|
634
2273
|
|
635
2274
|
const pool = new ApiRequestPool();
|
636
2275
|
const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
|
637
2276
|
const cleanQueryParams = Object.entries(queryParams).reduce((acc, [key, value]) => {
|
638
|
-
if (value === void 0 || value === null)
|
639
|
-
return acc;
|
2277
|
+
if (value === void 0 || value === null) return acc;
|
640
2278
|
return { ...acc, [key]: value };
|
641
2279
|
}, {});
|
642
2280
|
const query = new URLSearchParams(cleanQueryParams).toString();
|
@@ -684,8 +2322,7 @@ function hostHeader(url) {
|
|
684
2322
|
return groups?.host ? { Host: groups.host } : {};
|
685
2323
|
}
|
686
2324
|
async function parseBody(body, headers) {
|
687
|
-
if (!isDefined(body))
|
688
|
-
return void 0;
|
2325
|
+
if (!isDefined(body)) return void 0;
|
689
2326
|
if (isBlob(body) || typeof body.text === "function") {
|
690
2327
|
return body;
|
691
2328
|
}
|
@@ -764,8 +2401,7 @@ async function fetch$1({
|
|
764
2401
|
[TraceAttributes.CLOUDFLARE_RAY_ID]: response.headers?.get("cf-ray") ?? void 0
|
765
2402
|
});
|
766
2403
|
const message = response.headers?.get("x-xata-message");
|
767
|
-
if (message)
|
768
|
-
console.warn(message);
|
2404
|
+
if (message) console.warn(message);
|
769
2405
|
if (response.status === 204) {
|
770
2406
|
return {};
|
771
2407
|
}
|
@@ -849,7 +2485,30 @@ function parseUrl(url) {
|
|
849
2485
|
|
850
2486
|
const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
|
851
2487
|
|
852
|
-
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2488
|
+
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2489
|
+
url: "/db/{dbBranchName}/migrations/apply",
|
2490
|
+
method: "post",
|
2491
|
+
...variables,
|
2492
|
+
signal
|
2493
|
+
});
|
2494
|
+
const startMigration = (variables, signal) => dataPlaneFetch({
|
2495
|
+
url: "/db/{dbBranchName}/migrations/start",
|
2496
|
+
method: "post",
|
2497
|
+
...variables,
|
2498
|
+
signal
|
2499
|
+
});
|
2500
|
+
const completeMigration = (variables, signal) => dataPlaneFetch({
|
2501
|
+
url: "/db/{dbBranchName}/migrations/complete",
|
2502
|
+
method: "post",
|
2503
|
+
...variables,
|
2504
|
+
signal
|
2505
|
+
});
|
2506
|
+
const rollbackMigration = (variables, signal) => dataPlaneFetch({
|
2507
|
+
url: "/db/{dbBranchName}/migrations/rollback",
|
2508
|
+
method: "post",
|
2509
|
+
...variables,
|
2510
|
+
signal
|
2511
|
+
});
|
853
2512
|
const adaptTable = (variables, signal) => dataPlaneFetch({
|
854
2513
|
url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
|
855
2514
|
method: "post",
|
@@ -862,9 +2521,24 @@ const adaptAllTables = (variables, signal) => dataPlaneFetch({
|
|
862
2521
|
...variables,
|
863
2522
|
signal
|
864
2523
|
});
|
865
|
-
const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
866
|
-
|
867
|
-
|
2524
|
+
const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2525
|
+
url: "/db/{dbBranchName}/migrations/status",
|
2526
|
+
method: "get",
|
2527
|
+
...variables,
|
2528
|
+
signal
|
2529
|
+
});
|
2530
|
+
const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2531
|
+
url: "/db/{dbBranchName}/migrations/jobs/{jobId}",
|
2532
|
+
method: "get",
|
2533
|
+
...variables,
|
2534
|
+
signal
|
2535
|
+
});
|
2536
|
+
const getMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2537
|
+
url: "/db/{dbBranchName}/migrations/history",
|
2538
|
+
method: "get",
|
2539
|
+
...variables,
|
2540
|
+
signal
|
2541
|
+
});
|
868
2542
|
const getBranchList = (variables, signal) => dataPlaneFetch({
|
869
2543
|
url: "/dbs/{dbName}",
|
870
2544
|
method: "get",
|
@@ -884,75 +2558,160 @@ const getBranchDetails = (variables, signal) => dataPlaneFetch({
|
|
884
2558
|
...variables,
|
885
2559
|
signal
|
886
2560
|
});
|
887
|
-
const createBranch = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}", method: "put", ...variables, signal });
|
888
|
-
const deleteBranch = (variables, signal) => dataPlaneFetch({
|
889
|
-
url: "/db/{dbBranchName}",
|
890
|
-
method: "delete",
|
2561
|
+
const createBranch = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}", method: "put", ...variables, signal });
|
2562
|
+
const deleteBranch = (variables, signal) => dataPlaneFetch({
|
2563
|
+
url: "/db/{dbBranchName}",
|
2564
|
+
method: "delete",
|
2565
|
+
...variables,
|
2566
|
+
signal
|
2567
|
+
});
|
2568
|
+
const getSchema = (variables, signal) => dataPlaneFetch({
|
2569
|
+
url: "/db/{dbBranchName}/schema",
|
2570
|
+
method: "get",
|
2571
|
+
...variables,
|
2572
|
+
signal
|
2573
|
+
});
|
2574
|
+
const copyBranch = (variables, signal) => dataPlaneFetch({
|
2575
|
+
url: "/db/{dbBranchName}/copy",
|
2576
|
+
method: "post",
|
2577
|
+
...variables,
|
2578
|
+
signal
|
2579
|
+
});
|
2580
|
+
const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2581
|
+
url: "/db/{dbBranchName}/metadata",
|
2582
|
+
method: "put",
|
2583
|
+
...variables,
|
2584
|
+
signal
|
2585
|
+
});
|
2586
|
+
const getBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2587
|
+
url: "/db/{dbBranchName}/metadata",
|
2588
|
+
method: "get",
|
2589
|
+
...variables,
|
2590
|
+
signal
|
2591
|
+
});
|
2592
|
+
const getBranchStats = (variables, signal) => dataPlaneFetch({
|
2593
|
+
url: "/db/{dbBranchName}/stats",
|
2594
|
+
method: "get",
|
2595
|
+
...variables,
|
2596
|
+
signal
|
2597
|
+
});
|
2598
|
+
const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
|
2599
|
+
const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
|
2600
|
+
const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({
|
2601
|
+
url: "/dbs/{dbName}/gitBranches",
|
2602
|
+
method: "delete",
|
2603
|
+
...variables,
|
2604
|
+
signal
|
2605
|
+
});
|
2606
|
+
const resolveBranch = (variables, signal) => dataPlaneFetch({
|
2607
|
+
url: "/dbs/{dbName}/resolveBranch",
|
2608
|
+
method: "get",
|
2609
|
+
...variables,
|
2610
|
+
signal
|
2611
|
+
});
|
2612
|
+
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2613
|
+
url: "/db/{dbBranchName}/migrations",
|
2614
|
+
method: "get",
|
2615
|
+
...variables,
|
2616
|
+
signal
|
2617
|
+
});
|
2618
|
+
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2619
|
+
url: "/db/{dbBranchName}/migrations/plan",
|
2620
|
+
method: "post",
|
2621
|
+
...variables,
|
2622
|
+
signal
|
2623
|
+
});
|
2624
|
+
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2625
|
+
url: "/db/{dbBranchName}/migrations/execute",
|
2626
|
+
method: "post",
|
2627
|
+
...variables,
|
2628
|
+
signal
|
2629
|
+
});
|
2630
|
+
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({
|
2631
|
+
url: "/dbs/{dbName}/migrations/query",
|
2632
|
+
method: "post",
|
2633
|
+
...variables,
|
2634
|
+
signal
|
2635
|
+
});
|
2636
|
+
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
2637
|
+
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2638
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2639
|
+
method: "get",
|
2640
|
+
...variables,
|
2641
|
+
signal
|
2642
|
+
});
|
2643
|
+
const updateMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2644
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2645
|
+
method: "patch",
|
2646
|
+
...variables,
|
2647
|
+
signal
|
2648
|
+
});
|
2649
|
+
const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({
|
2650
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/commits",
|
2651
|
+
method: "post",
|
2652
|
+
...variables,
|
2653
|
+
signal
|
2654
|
+
});
|
2655
|
+
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2656
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/compare",
|
2657
|
+
method: "post",
|
2658
|
+
...variables,
|
2659
|
+
signal
|
2660
|
+
});
|
2661
|
+
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({
|
2662
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2663
|
+
method: "get",
|
2664
|
+
...variables,
|
2665
|
+
signal
|
2666
|
+
});
|
2667
|
+
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2668
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2669
|
+
method: "post",
|
891
2670
|
...variables,
|
892
2671
|
signal
|
893
2672
|
});
|
894
|
-
const
|
895
|
-
url: "/db/{dbBranchName}/schema",
|
896
|
-
method: "
|
2673
|
+
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({
|
2674
|
+
url: "/db/{dbBranchName}/schema/history",
|
2675
|
+
method: "post",
|
897
2676
|
...variables,
|
898
2677
|
signal
|
899
2678
|
});
|
900
|
-
const
|
901
|
-
url: "/db/{dbBranchName}/
|
2679
|
+
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({
|
2680
|
+
url: "/db/{dbBranchName}/schema/compare",
|
902
2681
|
method: "post",
|
903
2682
|
...variables,
|
904
2683
|
signal
|
905
2684
|
});
|
906
|
-
const
|
907
|
-
url: "/db/{dbBranchName}/
|
908
|
-
method: "
|
2685
|
+
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({
|
2686
|
+
url: "/db/{dbBranchName}/schema/compare/{branchName}",
|
2687
|
+
method: "post",
|
909
2688
|
...variables,
|
910
2689
|
signal
|
911
2690
|
});
|
912
|
-
const
|
913
|
-
url: "/db/{dbBranchName}/
|
914
|
-
method: "
|
2691
|
+
const updateBranchSchema = (variables, signal) => dataPlaneFetch({
|
2692
|
+
url: "/db/{dbBranchName}/schema/update",
|
2693
|
+
method: "post",
|
915
2694
|
...variables,
|
916
2695
|
signal
|
917
2696
|
});
|
918
|
-
const
|
919
|
-
url: "/db/{dbBranchName}/
|
920
|
-
method: "
|
2697
|
+
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2698
|
+
url: "/db/{dbBranchName}/schema/preview",
|
2699
|
+
method: "post",
|
921
2700
|
...variables,
|
922
2701
|
signal
|
923
2702
|
});
|
924
|
-
const
|
925
|
-
|
926
|
-
|
927
|
-
const resolveBranch = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/resolveBranch", method: "get", ...variables, signal });
|
928
|
-
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations", method: "get", ...variables, signal });
|
929
|
-
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/plan", method: "post", ...variables, signal });
|
930
|
-
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/execute", method: "post", ...variables, signal });
|
931
|
-
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/query", method: "post", ...variables, signal });
|
932
|
-
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
933
|
-
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
934
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
935
|
-
method: "get",
|
2703
|
+
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2704
|
+
url: "/db/{dbBranchName}/schema/apply",
|
2705
|
+
method: "post",
|
936
2706
|
...variables,
|
937
2707
|
signal
|
938
2708
|
});
|
939
|
-
const
|
940
|
-
|
941
|
-
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/compare", method: "post", ...variables, signal });
|
942
|
-
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/merge", method: "get", ...variables, signal });
|
943
|
-
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
944
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2709
|
+
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({
|
2710
|
+
url: "/db/{dbBranchName}/schema/push",
|
945
2711
|
method: "post",
|
946
2712
|
...variables,
|
947
2713
|
signal
|
948
2714
|
});
|
949
|
-
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/history", method: "post", ...variables, signal });
|
950
|
-
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare", method: "post", ...variables, signal });
|
951
|
-
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare/{branchName}", method: "post", ...variables, signal });
|
952
|
-
const updateBranchSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/update", method: "post", ...variables, signal });
|
953
|
-
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/preview", method: "post", ...variables, signal });
|
954
|
-
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/apply", method: "post", ...variables, signal });
|
955
|
-
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/push", method: "post", ...variables, signal });
|
956
2715
|
const createTable = (variables, signal) => dataPlaneFetch({
|
957
2716
|
url: "/db/{dbBranchName}/tables/{tableName}",
|
958
2717
|
method: "put",
|
@@ -965,14 +2724,24 @@ const deleteTable = (variables, signal) => dataPlaneFetch({
|
|
965
2724
|
...variables,
|
966
2725
|
signal
|
967
2726
|
});
|
968
|
-
const updateTable = (variables, signal) => dataPlaneFetch({
|
2727
|
+
const updateTable = (variables, signal) => dataPlaneFetch({
|
2728
|
+
url: "/db/{dbBranchName}/tables/{tableName}",
|
2729
|
+
method: "patch",
|
2730
|
+
...variables,
|
2731
|
+
signal
|
2732
|
+
});
|
969
2733
|
const getTableSchema = (variables, signal) => dataPlaneFetch({
|
970
2734
|
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
971
2735
|
method: "get",
|
972
2736
|
...variables,
|
973
2737
|
signal
|
974
2738
|
});
|
975
|
-
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2739
|
+
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2740
|
+
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
2741
|
+
method: "put",
|
2742
|
+
...variables,
|
2743
|
+
signal
|
2744
|
+
});
|
976
2745
|
const getTableColumns = (variables, signal) => dataPlaneFetch({
|
977
2746
|
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
978
2747
|
method: "get",
|
@@ -980,7 +2749,12 @@ const getTableColumns = (variables, signal) => dataPlaneFetch({
|
|
980
2749
|
signal
|
981
2750
|
});
|
982
2751
|
const addTableColumn = (variables, signal) => dataPlaneFetch(
|
983
|
-
{
|
2752
|
+
{
|
2753
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
2754
|
+
method: "post",
|
2755
|
+
...variables,
|
2756
|
+
signal
|
2757
|
+
}
|
984
2758
|
);
|
985
2759
|
const getColumn = (variables, signal) => dataPlaneFetch({
|
986
2760
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
@@ -988,15 +2762,30 @@ const getColumn = (variables, signal) => dataPlaneFetch({
|
|
988
2762
|
...variables,
|
989
2763
|
signal
|
990
2764
|
});
|
991
|
-
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2765
|
+
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2766
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
2767
|
+
method: "patch",
|
2768
|
+
...variables,
|
2769
|
+
signal
|
2770
|
+
});
|
992
2771
|
const deleteColumn = (variables, signal) => dataPlaneFetch({
|
993
2772
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
994
2773
|
method: "delete",
|
995
2774
|
...variables,
|
996
2775
|
signal
|
997
2776
|
});
|
998
|
-
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
999
|
-
|
2777
|
+
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
2778
|
+
url: "/db/{dbBranchName}/transaction",
|
2779
|
+
method: "post",
|
2780
|
+
...variables,
|
2781
|
+
signal
|
2782
|
+
});
|
2783
|
+
const insertRecord = (variables, signal) => dataPlaneFetch({
|
2784
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data",
|
2785
|
+
method: "post",
|
2786
|
+
...variables,
|
2787
|
+
signal
|
2788
|
+
});
|
1000
2789
|
const getFileItem = (variables, signal) => dataPlaneFetch({
|
1001
2790
|
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
|
1002
2791
|
method: "get",
|
@@ -1039,11 +2828,36 @@ const getRecord = (variables, signal) => dataPlaneFetch({
|
|
1039
2828
|
...variables,
|
1040
2829
|
signal
|
1041
2830
|
});
|
1042
|
-
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
1043
|
-
|
1044
|
-
|
1045
|
-
|
1046
|
-
|
2831
|
+
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2832
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2833
|
+
method: "put",
|
2834
|
+
...variables,
|
2835
|
+
signal
|
2836
|
+
});
|
2837
|
+
const updateRecordWithID = (variables, signal) => dataPlaneFetch({
|
2838
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2839
|
+
method: "patch",
|
2840
|
+
...variables,
|
2841
|
+
signal
|
2842
|
+
});
|
2843
|
+
const upsertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2844
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2845
|
+
method: "post",
|
2846
|
+
...variables,
|
2847
|
+
signal
|
2848
|
+
});
|
2849
|
+
const deleteRecord = (variables, signal) => dataPlaneFetch({
|
2850
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2851
|
+
method: "delete",
|
2852
|
+
...variables,
|
2853
|
+
signal
|
2854
|
+
});
|
2855
|
+
const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({
|
2856
|
+
url: "/db/{dbBranchName}/tables/{tableName}/bulk",
|
2857
|
+
method: "post",
|
2858
|
+
...variables,
|
2859
|
+
signal
|
2860
|
+
});
|
1047
2861
|
const queryTable = (variables, signal) => dataPlaneFetch({
|
1048
2862
|
url: "/db/{dbBranchName}/tables/{tableName}/query",
|
1049
2863
|
method: "post",
|
@@ -1062,16 +2876,36 @@ const searchTable = (variables, signal) => dataPlaneFetch({
|
|
1062
2876
|
...variables,
|
1063
2877
|
signal
|
1064
2878
|
});
|
1065
|
-
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
2879
|
+
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
2880
|
+
url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch",
|
2881
|
+
method: "post",
|
2882
|
+
...variables,
|
2883
|
+
signal
|
2884
|
+
});
|
1066
2885
|
const askTable = (variables, signal) => dataPlaneFetch({
|
1067
2886
|
url: "/db/{dbBranchName}/tables/{tableName}/ask",
|
1068
2887
|
method: "post",
|
1069
2888
|
...variables,
|
1070
2889
|
signal
|
1071
2890
|
});
|
1072
|
-
const askTableSession = (variables, signal) => dataPlaneFetch({
|
1073
|
-
|
1074
|
-
|
2891
|
+
const askTableSession = (variables, signal) => dataPlaneFetch({
|
2892
|
+
url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}",
|
2893
|
+
method: "post",
|
2894
|
+
...variables,
|
2895
|
+
signal
|
2896
|
+
});
|
2897
|
+
const summarizeTable = (variables, signal) => dataPlaneFetch({
|
2898
|
+
url: "/db/{dbBranchName}/tables/{tableName}/summarize",
|
2899
|
+
method: "post",
|
2900
|
+
...variables,
|
2901
|
+
signal
|
2902
|
+
});
|
2903
|
+
const aggregateTable = (variables, signal) => dataPlaneFetch({
|
2904
|
+
url: "/db/{dbBranchName}/tables/{tableName}/aggregate",
|
2905
|
+
method: "post",
|
2906
|
+
...variables,
|
2907
|
+
signal
|
2908
|
+
});
|
1075
2909
|
const fileAccess = (variables, signal) => dataPlaneFetch({
|
1076
2910
|
url: "/file/{fileId}",
|
1077
2911
|
method: "get",
|
@@ -1090,9 +2924,18 @@ const sqlQuery = (variables, signal) => dataPlaneFetch({
|
|
1090
2924
|
...variables,
|
1091
2925
|
signal
|
1092
2926
|
});
|
2927
|
+
const sqlBatchQuery = (variables, signal) => dataPlaneFetch({
|
2928
|
+
url: "/db/{dbBranchName}/sql/batch",
|
2929
|
+
method: "post",
|
2930
|
+
...variables,
|
2931
|
+
signal
|
2932
|
+
});
|
1093
2933
|
const operationsByTag$2 = {
|
1094
2934
|
migrations: {
|
1095
2935
|
applyMigration,
|
2936
|
+
startMigration,
|
2937
|
+
completeMigration,
|
2938
|
+
rollbackMigration,
|
1096
2939
|
adaptTable,
|
1097
2940
|
adaptAllTables,
|
1098
2941
|
getBranchMigrationJobStatus,
|
@@ -1157,7 +3000,16 @@ const operationsByTag$2 = {
|
|
1157
3000
|
deleteRecord,
|
1158
3001
|
bulkInsertTableRecords
|
1159
3002
|
},
|
1160
|
-
files: {
|
3003
|
+
files: {
|
3004
|
+
getFileItem,
|
3005
|
+
putFileItem,
|
3006
|
+
deleteFileItem,
|
3007
|
+
getFile,
|
3008
|
+
putFile,
|
3009
|
+
deleteFile,
|
3010
|
+
fileAccess,
|
3011
|
+
fileUpload
|
3012
|
+
},
|
1161
3013
|
searchAndFilter: {
|
1162
3014
|
queryTable,
|
1163
3015
|
searchBranch,
|
@@ -1168,7 +3020,7 @@ const operationsByTag$2 = {
|
|
1168
3020
|
summarizeTable,
|
1169
3021
|
aggregateTable
|
1170
3022
|
},
|
1171
|
-
sql: { sqlQuery }
|
3023
|
+
sql: { sqlQuery, sqlBatchQuery }
|
1172
3024
|
};
|
1173
3025
|
|
1174
3026
|
const controlPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "controlPlane" });
|
@@ -1235,7 +3087,12 @@ const deleteOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
|
1235
3087
|
...variables,
|
1236
3088
|
signal
|
1237
3089
|
});
|
1238
|
-
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3090
|
+
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3091
|
+
url: "/user/oauth/tokens/{token}",
|
3092
|
+
method: "patch",
|
3093
|
+
...variables,
|
3094
|
+
signal
|
3095
|
+
});
|
1239
3096
|
const getWorkspacesList = (variables, signal) => controlPlaneFetch({
|
1240
3097
|
url: "/workspaces",
|
1241
3098
|
method: "get",
|
@@ -1266,49 +3123,150 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
|
|
1266
3123
|
...variables,
|
1267
3124
|
signal
|
1268
3125
|
});
|
1269
|
-
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
1270
|
-
|
1271
|
-
|
1272
|
-
|
3126
|
+
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3127
|
+
url: "/workspaces/{workspaceId}/settings",
|
3128
|
+
method: "get",
|
3129
|
+
...variables,
|
3130
|
+
signal
|
3131
|
+
});
|
3132
|
+
const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3133
|
+
url: "/workspaces/{workspaceId}/settings",
|
3134
|
+
method: "patch",
|
3135
|
+
...variables,
|
3136
|
+
signal
|
3137
|
+
});
|
3138
|
+
const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({
|
3139
|
+
url: "/workspaces/{workspaceId}/members",
|
3140
|
+
method: "get",
|
3141
|
+
...variables,
|
3142
|
+
signal
|
3143
|
+
});
|
3144
|
+
const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({
|
3145
|
+
url: "/workspaces/{workspaceId}/members/{userId}",
|
3146
|
+
method: "put",
|
3147
|
+
...variables,
|
3148
|
+
signal
|
3149
|
+
});
|
1273
3150
|
const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1274
3151
|
url: "/workspaces/{workspaceId}/members/{userId}",
|
1275
3152
|
method: "delete",
|
1276
3153
|
...variables,
|
1277
3154
|
signal
|
1278
3155
|
});
|
1279
|
-
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1280
|
-
|
1281
|
-
|
1282
|
-
|
1283
|
-
|
1284
|
-
|
1285
|
-
const
|
3156
|
+
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
3157
|
+
url: "/workspaces/{workspaceId}/invites",
|
3158
|
+
method: "post",
|
3159
|
+
...variables,
|
3160
|
+
signal
|
3161
|
+
});
|
3162
|
+
const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3163
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3164
|
+
method: "patch",
|
3165
|
+
...variables,
|
3166
|
+
signal
|
3167
|
+
});
|
3168
|
+
const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3169
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3170
|
+
method: "delete",
|
3171
|
+
...variables,
|
3172
|
+
signal
|
3173
|
+
});
|
3174
|
+
const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3175
|
+
url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept",
|
3176
|
+
method: "post",
|
3177
|
+
...variables,
|
3178
|
+
signal
|
3179
|
+
});
|
3180
|
+
const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3181
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}/resend",
|
3182
|
+
method: "post",
|
3183
|
+
...variables,
|
3184
|
+
signal
|
3185
|
+
});
|
3186
|
+
const listClusters = (variables, signal) => controlPlaneFetch({
|
3187
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3188
|
+
method: "get",
|
3189
|
+
...variables,
|
3190
|
+
signal
|
3191
|
+
});
|
3192
|
+
const createCluster = (variables, signal) => controlPlaneFetch({
|
3193
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3194
|
+
method: "post",
|
3195
|
+
...variables,
|
3196
|
+
signal
|
3197
|
+
});
|
1286
3198
|
const getCluster = (variables, signal) => controlPlaneFetch({
|
1287
3199
|
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
1288
3200
|
method: "get",
|
1289
3201
|
...variables,
|
1290
3202
|
signal
|
1291
3203
|
});
|
1292
|
-
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3204
|
+
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3205
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3206
|
+
method: "patch",
|
3207
|
+
...variables,
|
3208
|
+
signal
|
3209
|
+
});
|
3210
|
+
const deleteCluster = (variables, signal) => controlPlaneFetch({
|
3211
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3212
|
+
method: "delete",
|
3213
|
+
...variables,
|
3214
|
+
signal
|
3215
|
+
});
|
1293
3216
|
const getDatabaseList = (variables, signal) => controlPlaneFetch({
|
1294
3217
|
url: "/workspaces/{workspaceId}/dbs",
|
1295
3218
|
method: "get",
|
1296
3219
|
...variables,
|
1297
3220
|
signal
|
1298
3221
|
});
|
1299
|
-
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3222
|
+
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3223
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3224
|
+
method: "put",
|
3225
|
+
...variables,
|
3226
|
+
signal
|
3227
|
+
});
|
1300
3228
|
const deleteDatabase = (variables, signal) => controlPlaneFetch({
|
1301
3229
|
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
1302
3230
|
method: "delete",
|
1303
3231
|
...variables,
|
1304
3232
|
signal
|
1305
3233
|
});
|
1306
|
-
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
1307
|
-
|
1308
|
-
|
1309
|
-
|
1310
|
-
|
1311
|
-
|
3234
|
+
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3235
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3236
|
+
method: "get",
|
3237
|
+
...variables,
|
3238
|
+
signal
|
3239
|
+
});
|
3240
|
+
const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3241
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3242
|
+
method: "patch",
|
3243
|
+
...variables,
|
3244
|
+
signal
|
3245
|
+
});
|
3246
|
+
const renameDatabase = (variables, signal) => controlPlaneFetch({
|
3247
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/rename",
|
3248
|
+
method: "post",
|
3249
|
+
...variables,
|
3250
|
+
signal
|
3251
|
+
});
|
3252
|
+
const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3253
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3254
|
+
method: "get",
|
3255
|
+
...variables,
|
3256
|
+
signal
|
3257
|
+
});
|
3258
|
+
const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3259
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3260
|
+
method: "put",
|
3261
|
+
...variables,
|
3262
|
+
signal
|
3263
|
+
});
|
3264
|
+
const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3265
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3266
|
+
method: "delete",
|
3267
|
+
...variables,
|
3268
|
+
signal
|
3269
|
+
});
|
1312
3270
|
const listRegions = (variables, signal) => controlPlaneFetch({
|
1313
3271
|
url: "/workspaces/{workspaceId}/regions",
|
1314
3272
|
method: "get",
|
@@ -1346,7 +3304,13 @@ const operationsByTag$1 = {
|
|
1346
3304
|
acceptWorkspaceMemberInvite,
|
1347
3305
|
resendWorkspaceMemberInvite
|
1348
3306
|
},
|
1349
|
-
xbcontrolOther: {
|
3307
|
+
xbcontrolOther: {
|
3308
|
+
listClusters,
|
3309
|
+
createCluster,
|
3310
|
+
getCluster,
|
3311
|
+
updateCluster,
|
3312
|
+
deleteCluster
|
3313
|
+
},
|
1350
3314
|
databases: {
|
1351
3315
|
getDatabaseList,
|
1352
3316
|
createDatabase,
|
@@ -1366,7 +3330,7 @@ const operationsByTag = deepMerge(operationsByTag$2, operationsByTag$1);
|
|
1366
3330
|
const buildApiClient = () => class {
|
1367
3331
|
constructor(options = {}) {
|
1368
3332
|
const provider = options.host ?? "production";
|
1369
|
-
const apiKey = options.apiKey
|
3333
|
+
const apiKey = options.apiKey;
|
1370
3334
|
const trace = options.trace ?? defaultTrace;
|
1371
3335
|
const clientID = generateUUID();
|
1372
3336
|
if (!apiKey) {
|
@@ -1433,8 +3397,7 @@ function buildTransformString(transformations) {
|
|
1433
3397
|
).join(",");
|
1434
3398
|
}
|
1435
3399
|
function transformImage(url, ...transformations) {
|
1436
|
-
if (!isDefined(url))
|
1437
|
-
return void 0;
|
3400
|
+
if (!isDefined(url)) return void 0;
|
1438
3401
|
const newTransformations = buildTransformString(transformations);
|
1439
3402
|
const { hostname, pathname, search } = new URL(url);
|
1440
3403
|
const pathParts = pathname.split("/");
|
@@ -1547,8 +3510,7 @@ class XataFile {
|
|
1547
3510
|
}
|
1548
3511
|
}
|
1549
3512
|
const parseInputFileEntry = async (entry) => {
|
1550
|
-
if (!isDefined(entry))
|
1551
|
-
return null;
|
3513
|
+
if (!isDefined(entry)) return null;
|
1552
3514
|
const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout, uploadUrlTimeout } = await entry;
|
1553
3515
|
return compactObject({
|
1554
3516
|
id,
|
@@ -1563,24 +3525,19 @@ const parseInputFileEntry = async (entry) => {
|
|
1563
3525
|
};
|
1564
3526
|
|
1565
3527
|
function cleanFilter(filter) {
|
1566
|
-
if (!isDefined(filter))
|
1567
|
-
|
1568
|
-
if (!isObject(filter))
|
1569
|
-
return filter;
|
3528
|
+
if (!isDefined(filter)) return void 0;
|
3529
|
+
if (!isObject(filter)) return filter;
|
1570
3530
|
const values = Object.fromEntries(
|
1571
3531
|
Object.entries(filter).reduce((acc, [key, value]) => {
|
1572
|
-
if (!isDefined(value))
|
1573
|
-
return acc;
|
3532
|
+
if (!isDefined(value)) return acc;
|
1574
3533
|
if (Array.isArray(value)) {
|
1575
3534
|
const clean = value.map((item) => cleanFilter(item)).filter((item) => isDefined(item));
|
1576
|
-
if (clean.length === 0)
|
1577
|
-
return acc;
|
3535
|
+
if (clean.length === 0) return acc;
|
1578
3536
|
return [...acc, [key, clean]];
|
1579
3537
|
}
|
1580
3538
|
if (isObject(value)) {
|
1581
3539
|
const clean = cleanFilter(value);
|
1582
|
-
if (!isDefined(clean))
|
1583
|
-
return acc;
|
3540
|
+
if (!isDefined(clean)) return acc;
|
1584
3541
|
return [...acc, [key, clean]];
|
1585
3542
|
}
|
1586
3543
|
return [...acc, [key, value]];
|
@@ -1590,10 +3547,8 @@ function cleanFilter(filter) {
|
|
1590
3547
|
}
|
1591
3548
|
|
1592
3549
|
function stringifyJson(value) {
|
1593
|
-
if (!isDefined(value))
|
1594
|
-
|
1595
|
-
if (isString(value))
|
1596
|
-
return value;
|
3550
|
+
if (!isDefined(value)) return value;
|
3551
|
+
if (isString(value)) return value;
|
1597
3552
|
try {
|
1598
3553
|
return JSON.stringify(value);
|
1599
3554
|
} catch (e) {
|
@@ -1608,28 +3563,17 @@ function parseJson(value) {
|
|
1608
3563
|
}
|
1609
3564
|
}
|
1610
3565
|
|
1611
|
-
var
|
1612
|
-
|
1613
|
-
throw TypeError("Cannot " + msg);
|
1614
|
-
};
|
1615
|
-
var __privateGet$4 = (obj, member, getter) => {
|
1616
|
-
__accessCheck$5(obj, member, "read from private field");
|
1617
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1618
|
-
};
|
1619
|
-
var __privateAdd$5 = (obj, member, value) => {
|
1620
|
-
if (member.has(obj))
|
1621
|
-
throw TypeError("Cannot add the same private member more than once");
|
1622
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1623
|
-
};
|
1624
|
-
var __privateSet$3 = (obj, member, value, setter) => {
|
1625
|
-
__accessCheck$5(obj, member, "write to private field");
|
1626
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1627
|
-
return value;
|
3566
|
+
var __typeError$5 = (msg) => {
|
3567
|
+
throw TypeError(msg);
|
1628
3568
|
};
|
3569
|
+
var __accessCheck$5 = (obj, member, msg) => member.has(obj) || __typeError$5("Cannot " + msg);
|
3570
|
+
var __privateGet$4 = (obj, member, getter) => (__accessCheck$5(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3571
|
+
var __privateAdd$5 = (obj, member, value) => member.has(obj) ? __typeError$5("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3572
|
+
var __privateSet$3 = (obj, member, value, setter) => (__accessCheck$5(obj, member, "write to private field"), member.set(obj, value), value);
|
1629
3573
|
var _query, _page;
|
1630
3574
|
class Page {
|
1631
3575
|
constructor(query, meta, records = []) {
|
1632
|
-
__privateAdd$5(this, _query
|
3576
|
+
__privateAdd$5(this, _query);
|
1633
3577
|
__privateSet$3(this, _query, query);
|
1634
3578
|
this.meta = meta;
|
1635
3579
|
this.records = new PageRecordArray(this, records);
|
@@ -1716,7 +3660,7 @@ class RecordArray extends Array {
|
|
1716
3660
|
const _PageRecordArray = class _PageRecordArray extends Array {
|
1717
3661
|
constructor(...args) {
|
1718
3662
|
super(..._PageRecordArray.parseConstructorParams(...args));
|
1719
|
-
__privateAdd$5(this, _page
|
3663
|
+
__privateAdd$5(this, _page);
|
1720
3664
|
__privateSet$3(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
|
1721
3665
|
}
|
1722
3666
|
static parseConstructorParams(...args) {
|
@@ -1787,34 +3731,20 @@ const _PageRecordArray = class _PageRecordArray extends Array {
|
|
1787
3731
|
_page = new WeakMap();
|
1788
3732
|
let PageRecordArray = _PageRecordArray;
|
1789
3733
|
|
1790
|
-
var
|
1791
|
-
|
1792
|
-
throw TypeError("Cannot " + msg);
|
1793
|
-
};
|
1794
|
-
var __privateGet$3 = (obj, member, getter) => {
|
1795
|
-
__accessCheck$4(obj, member, "read from private field");
|
1796
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1797
|
-
};
|
1798
|
-
var __privateAdd$4 = (obj, member, value) => {
|
1799
|
-
if (member.has(obj))
|
1800
|
-
throw TypeError("Cannot add the same private member more than once");
|
1801
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1802
|
-
};
|
1803
|
-
var __privateSet$2 = (obj, member, value, setter) => {
|
1804
|
-
__accessCheck$4(obj, member, "write to private field");
|
1805
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1806
|
-
return value;
|
1807
|
-
};
|
1808
|
-
var __privateMethod$3 = (obj, member, method) => {
|
1809
|
-
__accessCheck$4(obj, member, "access private method");
|
1810
|
-
return method;
|
3734
|
+
var __typeError$4 = (msg) => {
|
3735
|
+
throw TypeError(msg);
|
1811
3736
|
};
|
1812
|
-
var
|
3737
|
+
var __accessCheck$4 = (obj, member, msg) => member.has(obj) || __typeError$4("Cannot " + msg);
|
3738
|
+
var __privateGet$3 = (obj, member, getter) => (__accessCheck$4(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3739
|
+
var __privateAdd$4 = (obj, member, value) => member.has(obj) ? __typeError$4("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3740
|
+
var __privateSet$2 = (obj, member, value, setter) => (__accessCheck$4(obj, member, "write to private field"), member.set(obj, value), value);
|
3741
|
+
var __privateMethod$3 = (obj, member, method) => (__accessCheck$4(obj, member, "access private method"), method);
|
3742
|
+
var _table$1, _repository, _data, _Query_instances, cleanFilterConstraint_fn;
|
1813
3743
|
const _Query = class _Query {
|
1814
3744
|
constructor(repository, table, data, rawParent) {
|
1815
|
-
__privateAdd$4(this,
|
1816
|
-
__privateAdd$4(this, _table$1
|
1817
|
-
__privateAdd$4(this, _repository
|
3745
|
+
__privateAdd$4(this, _Query_instances);
|
3746
|
+
__privateAdd$4(this, _table$1);
|
3747
|
+
__privateAdd$4(this, _repository);
|
1818
3748
|
__privateAdd$4(this, _data, { filter: {} });
|
1819
3749
|
// Implements pagination
|
1820
3750
|
this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
|
@@ -1892,12 +3822,12 @@ const _Query = class _Query {
|
|
1892
3822
|
filter(a, b) {
|
1893
3823
|
if (arguments.length === 1) {
|
1894
3824
|
const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
|
1895
|
-
[column]: __privateMethod$3(this,
|
3825
|
+
[column]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, column, constraint)
|
1896
3826
|
}));
|
1897
3827
|
const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
|
1898
3828
|
return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
|
1899
3829
|
} else {
|
1900
|
-
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this,
|
3830
|
+
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
|
1901
3831
|
const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
|
1902
3832
|
return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
|
1903
3833
|
}
|
@@ -1976,8 +3906,7 @@ const _Query = class _Query {
|
|
1976
3906
|
}
|
1977
3907
|
async getFirstOrThrow(options = {}) {
|
1978
3908
|
const records = await this.getMany({ ...options, pagination: { size: 1 } });
|
1979
|
-
if (records[0] === void 0)
|
1980
|
-
throw new Error("No results found.");
|
3909
|
+
if (records[0] === void 0) throw new Error("No results found.");
|
1981
3910
|
return records[0];
|
1982
3911
|
}
|
1983
3912
|
async summarize(params = {}) {
|
@@ -2032,7 +3961,7 @@ const _Query = class _Query {
|
|
2032
3961
|
_table$1 = new WeakMap();
|
2033
3962
|
_repository = new WeakMap();
|
2034
3963
|
_data = new WeakMap();
|
2035
|
-
|
3964
|
+
_Query_instances = new WeakSet();
|
2036
3965
|
cleanFilterConstraint_fn = function(column, value) {
|
2037
3966
|
const columnType = __privateGet$3(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
|
2038
3967
|
if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
|
@@ -2093,8 +4022,7 @@ function isSortFilterString(value) {
|
|
2093
4022
|
}
|
2094
4023
|
function isSortFilterBase(filter) {
|
2095
4024
|
return isObject(filter) && Object.entries(filter).every(([key, value]) => {
|
2096
|
-
if (key === "*")
|
2097
|
-
return value === "random";
|
4025
|
+
if (key === "*") return value === "random";
|
2098
4026
|
return value === "asc" || value === "desc";
|
2099
4027
|
});
|
2100
4028
|
}
|
@@ -2115,29 +4043,15 @@ function buildSortFilter(filter) {
|
|
2115
4043
|
}
|
2116
4044
|
}
|
2117
4045
|
|
2118
|
-
var
|
2119
|
-
|
2120
|
-
throw TypeError("Cannot " + msg);
|
2121
|
-
};
|
2122
|
-
var __privateGet$2 = (obj, member, getter) => {
|
2123
|
-
__accessCheck$3(obj, member, "read from private field");
|
2124
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2125
|
-
};
|
2126
|
-
var __privateAdd$3 = (obj, member, value) => {
|
2127
|
-
if (member.has(obj))
|
2128
|
-
throw TypeError("Cannot add the same private member more than once");
|
2129
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4046
|
+
var __typeError$3 = (msg) => {
|
4047
|
+
throw TypeError(msg);
|
2130
4048
|
};
|
2131
|
-
var
|
2132
|
-
|
2133
|
-
|
2134
|
-
|
2135
|
-
|
2136
|
-
var
|
2137
|
-
__accessCheck$3(obj, member, "access private method");
|
2138
|
-
return method;
|
2139
|
-
};
|
2140
|
-
var _table, _getFetchProps, _db, _schemaTables, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn, _insertRecords, insertRecords_fn, _updateRecordWithID, updateRecordWithID_fn, _updateRecords, updateRecords_fn, _upsertRecordWithID, upsertRecordWithID_fn, _deleteRecord, deleteRecord_fn, _deleteRecords, deleteRecords_fn, _getSchemaTables, getSchemaTables_fn, _transformObjectToApi, transformObjectToApi_fn;
|
4049
|
+
var __accessCheck$3 = (obj, member, msg) => member.has(obj) || __typeError$3("Cannot " + msg);
|
4050
|
+
var __privateGet$2 = (obj, member, getter) => (__accessCheck$3(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4051
|
+
var __privateAdd$3 = (obj, member, value) => member.has(obj) ? __typeError$3("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4052
|
+
var __privateSet$1 = (obj, member, value, setter) => (__accessCheck$3(obj, member, "write to private field"), member.set(obj, value), value);
|
4053
|
+
var __privateMethod$2 = (obj, member, method) => (__accessCheck$3(obj, member, "access private method"), method);
|
4054
|
+
var _table, _getFetchProps, _db, _schemaTables, _trace, _RestRepository_instances, insertRecordWithoutId_fn, insertRecordWithId_fn, insertRecords_fn, updateRecordWithID_fn, updateRecords_fn, upsertRecordWithID_fn, deleteRecord_fn, deleteRecords_fn, getSchemaTables_fn, transformObjectToApi_fn;
|
2141
4055
|
const BULK_OPERATION_MAX_SIZE = 1e3;
|
2142
4056
|
class Repository extends Query {
|
2143
4057
|
}
|
@@ -2148,21 +4062,12 @@ class RestRepository extends Query {
|
|
2148
4062
|
{ name: options.table, schema: options.schemaTables?.find((table) => table.name === options.table) },
|
2149
4063
|
{}
|
2150
4064
|
);
|
2151
|
-
__privateAdd$3(this,
|
2152
|
-
__privateAdd$3(this,
|
2153
|
-
__privateAdd$3(this,
|
2154
|
-
__privateAdd$3(this,
|
2155
|
-
__privateAdd$3(this,
|
2156
|
-
__privateAdd$3(this,
|
2157
|
-
__privateAdd$3(this, _deleteRecord);
|
2158
|
-
__privateAdd$3(this, _deleteRecords);
|
2159
|
-
__privateAdd$3(this, _getSchemaTables);
|
2160
|
-
__privateAdd$3(this, _transformObjectToApi);
|
2161
|
-
__privateAdd$3(this, _table, void 0);
|
2162
|
-
__privateAdd$3(this, _getFetchProps, void 0);
|
2163
|
-
__privateAdd$3(this, _db, void 0);
|
2164
|
-
__privateAdd$3(this, _schemaTables, void 0);
|
2165
|
-
__privateAdd$3(this, _trace, void 0);
|
4065
|
+
__privateAdd$3(this, _RestRepository_instances);
|
4066
|
+
__privateAdd$3(this, _table);
|
4067
|
+
__privateAdd$3(this, _getFetchProps);
|
4068
|
+
__privateAdd$3(this, _db);
|
4069
|
+
__privateAdd$3(this, _schemaTables);
|
4070
|
+
__privateAdd$3(this, _trace);
|
2166
4071
|
__privateSet$1(this, _table, options.table);
|
2167
4072
|
__privateSet$1(this, _db, options.db);
|
2168
4073
|
__privateSet$1(this, _schemaTables, options.schemaTables);
|
@@ -2181,31 +4086,28 @@ class RestRepository extends Query {
|
|
2181
4086
|
return __privateGet$2(this, _trace).call(this, "create", async () => {
|
2182
4087
|
const ifVersion = parseIfVersion(b, c, d);
|
2183
4088
|
if (Array.isArray(a)) {
|
2184
|
-
if (a.length === 0)
|
2185
|
-
|
2186
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
4089
|
+
if (a.length === 0) return [];
|
4090
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
2187
4091
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2188
4092
|
const result = await this.read(ids, columns);
|
2189
4093
|
return result;
|
2190
4094
|
}
|
2191
4095
|
if (isString(a) && isObject(b)) {
|
2192
|
-
if (a === "")
|
2193
|
-
throw new Error("The id can't be empty");
|
4096
|
+
if (a === "") throw new Error("The id can't be empty");
|
2194
4097
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2195
|
-
return await __privateMethod$2(this,
|
4098
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
|
2196
4099
|
}
|
2197
4100
|
if (isObject(a) && isString(a.xata_id)) {
|
2198
|
-
if (a.xata_id === "")
|
2199
|
-
throw new Error("The id can't be empty");
|
4101
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2200
4102
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2201
|
-
return await __privateMethod$2(this,
|
4103
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
2202
4104
|
createOnly: true,
|
2203
4105
|
ifVersion
|
2204
4106
|
});
|
2205
4107
|
}
|
2206
4108
|
if (isObject(a)) {
|
2207
4109
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2208
|
-
return __privateMethod$2(this,
|
4110
|
+
return __privateMethod$2(this, _RestRepository_instances, insertRecordWithoutId_fn).call(this, a, columns);
|
2209
4111
|
}
|
2210
4112
|
throw new Error("Invalid arguments for create method");
|
2211
4113
|
});
|
@@ -2214,8 +4116,7 @@ class RestRepository extends Query {
|
|
2214
4116
|
return __privateGet$2(this, _trace).call(this, "read", async () => {
|
2215
4117
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2216
4118
|
if (Array.isArray(a)) {
|
2217
|
-
if (a.length === 0)
|
2218
|
-
return [];
|
4119
|
+
if (a.length === 0) return [];
|
2219
4120
|
const ids = a.map((item) => extractId(item));
|
2220
4121
|
const finalObjects = await this.getAll({ filter: { xata_id: { $any: compact(ids) } }, columns });
|
2221
4122
|
const dictionary = finalObjects.reduce((acc, object) => {
|
@@ -2238,7 +4139,7 @@ class RestRepository extends Query {
|
|
2238
4139
|
queryParams: { columns },
|
2239
4140
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2240
4141
|
});
|
2241
|
-
const schemaTables = await __privateMethod$2(this,
|
4142
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2242
4143
|
return initObject(
|
2243
4144
|
__privateGet$2(this, _db),
|
2244
4145
|
schemaTables,
|
@@ -2279,11 +4180,10 @@ class RestRepository extends Query {
|
|
2279
4180
|
return __privateGet$2(this, _trace).call(this, "update", async () => {
|
2280
4181
|
const ifVersion = parseIfVersion(b, c, d);
|
2281
4182
|
if (Array.isArray(a)) {
|
2282
|
-
if (a.length === 0)
|
2283
|
-
return [];
|
4183
|
+
if (a.length === 0) return [];
|
2284
4184
|
const existing = await this.read(a, ["xata_id"]);
|
2285
4185
|
const updates = a.filter((_item, index) => existing[index] !== null);
|
2286
|
-
await __privateMethod$2(this,
|
4186
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, updates, {
|
2287
4187
|
ifVersion,
|
2288
4188
|
upsert: false
|
2289
4189
|
});
|
@@ -2294,15 +4194,14 @@ class RestRepository extends Query {
|
|
2294
4194
|
try {
|
2295
4195
|
if (isString(a) && isObject(b)) {
|
2296
4196
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2297
|
-
return await __privateMethod$2(this,
|
4197
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2298
4198
|
}
|
2299
4199
|
if (isObject(a) && isString(a.xata_id)) {
|
2300
4200
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2301
|
-
return await __privateMethod$2(this,
|
4201
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2302
4202
|
}
|
2303
4203
|
} catch (error) {
|
2304
|
-
if (error.status === 422)
|
2305
|
-
return null;
|
4204
|
+
if (error.status === 422) return null;
|
2306
4205
|
throw error;
|
2307
4206
|
}
|
2308
4207
|
throw new Error("Invalid arguments for update method");
|
@@ -2331,9 +4230,8 @@ class RestRepository extends Query {
|
|
2331
4230
|
return __privateGet$2(this, _trace).call(this, "createOrUpdate", async () => {
|
2332
4231
|
const ifVersion = parseIfVersion(b, c, d);
|
2333
4232
|
if (Array.isArray(a)) {
|
2334
|
-
if (a.length === 0)
|
2335
|
-
|
2336
|
-
await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, a, {
|
4233
|
+
if (a.length === 0) return [];
|
4234
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, a, {
|
2337
4235
|
ifVersion,
|
2338
4236
|
upsert: true
|
2339
4237
|
});
|
@@ -2342,16 +4240,14 @@ class RestRepository extends Query {
|
|
2342
4240
|
return result;
|
2343
4241
|
}
|
2344
4242
|
if (isString(a) && isObject(b)) {
|
2345
|
-
if (a === "")
|
2346
|
-
throw new Error("The id can't be empty");
|
4243
|
+
if (a === "") throw new Error("The id can't be empty");
|
2347
4244
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2348
|
-
return await __privateMethod$2(this,
|
4245
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2349
4246
|
}
|
2350
4247
|
if (isObject(a) && isString(a.xata_id)) {
|
2351
|
-
if (a.xata_id === "")
|
2352
|
-
throw new Error("The id can't be empty");
|
4248
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2353
4249
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2354
|
-
return await __privateMethod$2(this,
|
4250
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2355
4251
|
}
|
2356
4252
|
if (!isDefined(a) && isObject(b)) {
|
2357
4253
|
return await this.create(b, c);
|
@@ -2366,24 +4262,21 @@ class RestRepository extends Query {
|
|
2366
4262
|
return __privateGet$2(this, _trace).call(this, "createOrReplace", async () => {
|
2367
4263
|
const ifVersion = parseIfVersion(b, c, d);
|
2368
4264
|
if (Array.isArray(a)) {
|
2369
|
-
if (a.length === 0)
|
2370
|
-
|
2371
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
4265
|
+
if (a.length === 0) return [];
|
4266
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
2372
4267
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2373
4268
|
const result = await this.read(ids, columns);
|
2374
4269
|
return result;
|
2375
4270
|
}
|
2376
4271
|
if (isString(a) && isObject(b)) {
|
2377
|
-
if (a === "")
|
2378
|
-
throw new Error("The id can't be empty");
|
4272
|
+
if (a === "") throw new Error("The id can't be empty");
|
2379
4273
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2380
|
-
return await __privateMethod$2(this,
|
4274
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
|
2381
4275
|
}
|
2382
4276
|
if (isObject(a) && isString(a.xata_id)) {
|
2383
|
-
if (a.xata_id === "")
|
2384
|
-
throw new Error("The id can't be empty");
|
4277
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2385
4278
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2386
|
-
return await __privateMethod$2(this,
|
4279
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
2387
4280
|
createOnly: false,
|
2388
4281
|
ifVersion
|
2389
4282
|
});
|
@@ -2400,25 +4293,22 @@ class RestRepository extends Query {
|
|
2400
4293
|
async delete(a, b) {
|
2401
4294
|
return __privateGet$2(this, _trace).call(this, "delete", async () => {
|
2402
4295
|
if (Array.isArray(a)) {
|
2403
|
-
if (a.length === 0)
|
2404
|
-
return [];
|
4296
|
+
if (a.length === 0) return [];
|
2405
4297
|
const ids = a.map((o) => {
|
2406
|
-
if (isString(o))
|
2407
|
-
|
2408
|
-
if (isString(o.xata_id))
|
2409
|
-
return o.xata_id;
|
4298
|
+
if (isString(o)) return o;
|
4299
|
+
if (isString(o.xata_id)) return o.xata_id;
|
2410
4300
|
throw new Error("Invalid arguments for delete method");
|
2411
4301
|
});
|
2412
4302
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2413
4303
|
const result = await this.read(a, columns);
|
2414
|
-
await __privateMethod$2(this,
|
4304
|
+
await __privateMethod$2(this, _RestRepository_instances, deleteRecords_fn).call(this, ids);
|
2415
4305
|
return result;
|
2416
4306
|
}
|
2417
4307
|
if (isString(a)) {
|
2418
|
-
return __privateMethod$2(this,
|
4308
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a, b);
|
2419
4309
|
}
|
2420
4310
|
if (isObject(a) && isString(a.xata_id)) {
|
2421
|
-
return __privateMethod$2(this,
|
4311
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a.xata_id, b);
|
2422
4312
|
}
|
2423
4313
|
throw new Error("Invalid arguments for delete method");
|
2424
4314
|
});
|
@@ -2462,7 +4352,7 @@ class RestRepository extends Query {
|
|
2462
4352
|
},
|
2463
4353
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2464
4354
|
});
|
2465
|
-
const schemaTables = await __privateMethod$2(this,
|
4355
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2466
4356
|
return {
|
2467
4357
|
records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
|
2468
4358
|
totalCount
|
@@ -2487,7 +4377,7 @@ class RestRepository extends Query {
|
|
2487
4377
|
},
|
2488
4378
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2489
4379
|
});
|
2490
|
-
const schemaTables = await __privateMethod$2(this,
|
4380
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2491
4381
|
return {
|
2492
4382
|
records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
|
2493
4383
|
totalCount
|
@@ -2529,7 +4419,7 @@ class RestRepository extends Query {
|
|
2529
4419
|
fetchOptions: data.fetchOptions,
|
2530
4420
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2531
4421
|
});
|
2532
|
-
const schemaTables = await __privateMethod$2(this,
|
4422
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2533
4423
|
const records = objects.map(
|
2534
4424
|
(record) => initObject(
|
2535
4425
|
__privateGet$2(this, _db),
|
@@ -2563,7 +4453,7 @@ class RestRepository extends Query {
|
|
2563
4453
|
},
|
2564
4454
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2565
4455
|
});
|
2566
|
-
const schemaTables = await __privateMethod$2(this,
|
4456
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2567
4457
|
return {
|
2568
4458
|
...result,
|
2569
4459
|
summaries: result.summaries.map(
|
@@ -2611,9 +4501,9 @@ _getFetchProps = new WeakMap();
|
|
2611
4501
|
_db = new WeakMap();
|
2612
4502
|
_schemaTables = new WeakMap();
|
2613
4503
|
_trace = new WeakMap();
|
2614
|
-
|
4504
|
+
_RestRepository_instances = new WeakSet();
|
2615
4505
|
insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
2616
|
-
const record = await __privateMethod$2(this,
|
4506
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2617
4507
|
const response = await insertRecord({
|
2618
4508
|
pathParams: {
|
2619
4509
|
workspace: "{workspaceId}",
|
@@ -2625,14 +4515,12 @@ insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
|
2625
4515
|
body: record,
|
2626
4516
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2627
4517
|
});
|
2628
|
-
const schemaTables = await __privateMethod$2(this,
|
4518
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2629
4519
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2630
4520
|
};
|
2631
|
-
_insertRecordWithId = new WeakSet();
|
2632
4521
|
insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
|
2633
|
-
if (!recordId)
|
2634
|
-
|
2635
|
-
const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4522
|
+
if (!recordId) return null;
|
4523
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2636
4524
|
const response = await insertRecordWithID({
|
2637
4525
|
pathParams: {
|
2638
4526
|
workspace: "{workspaceId}",
|
@@ -2645,13 +4533,12 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
|
|
2645
4533
|
queryParams: { createOnly, columns, ifVersion },
|
2646
4534
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2647
4535
|
});
|
2648
|
-
const schemaTables = await __privateMethod$2(this,
|
4536
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2649
4537
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2650
4538
|
};
|
2651
|
-
_insertRecords = new WeakSet();
|
2652
4539
|
insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
2653
4540
|
const operations = await promiseMap(objects, async (object) => {
|
2654
|
-
const record = await __privateMethod$2(this,
|
4541
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2655
4542
|
return { insert: { table: __privateGet$2(this, _table), record, createOnly, ifVersion } };
|
2656
4543
|
});
|
2657
4544
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -2676,11 +4563,9 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
|
2676
4563
|
}
|
2677
4564
|
return ids;
|
2678
4565
|
};
|
2679
|
-
_updateRecordWithID = new WeakSet();
|
2680
4566
|
updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2681
|
-
if (!recordId)
|
2682
|
-
|
2683
|
-
const { xata_id: _id, ...record } = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4567
|
+
if (!recordId) return null;
|
4568
|
+
const { xata_id: _id, ...record } = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2684
4569
|
try {
|
2685
4570
|
const response = await updateRecordWithID({
|
2686
4571
|
pathParams: {
|
@@ -2694,7 +4579,7 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2694
4579
|
body: record,
|
2695
4580
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2696
4581
|
});
|
2697
|
-
const schemaTables = await __privateMethod$2(this,
|
4582
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2698
4583
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2699
4584
|
} catch (e) {
|
2700
4585
|
if (isObject(e) && e.status === 404) {
|
@@ -2703,10 +4588,9 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2703
4588
|
throw e;
|
2704
4589
|
}
|
2705
4590
|
};
|
2706
|
-
_updateRecords = new WeakSet();
|
2707
4591
|
updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
2708
4592
|
const operations = await promiseMap(objects, async ({ xata_id, ...object }) => {
|
2709
|
-
const fields = await __privateMethod$2(this,
|
4593
|
+
const fields = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2710
4594
|
return { update: { table: __privateGet$2(this, _table), id: xata_id, ifVersion, upsert, fields } };
|
2711
4595
|
});
|
2712
4596
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -2731,10 +4615,8 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
|
2731
4615
|
}
|
2732
4616
|
return ids;
|
2733
4617
|
};
|
2734
|
-
_upsertRecordWithID = new WeakSet();
|
2735
4618
|
upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2736
|
-
if (!recordId)
|
2737
|
-
return null;
|
4619
|
+
if (!recordId) return null;
|
2738
4620
|
const response = await upsertRecordWithID({
|
2739
4621
|
pathParams: {
|
2740
4622
|
workspace: "{workspaceId}",
|
@@ -2747,13 +4629,11 @@ upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2747
4629
|
body: object,
|
2748
4630
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2749
4631
|
});
|
2750
|
-
const schemaTables = await __privateMethod$2(this,
|
4632
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2751
4633
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2752
4634
|
};
|
2753
|
-
_deleteRecord = new WeakSet();
|
2754
4635
|
deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
2755
|
-
if (!recordId)
|
2756
|
-
return null;
|
4636
|
+
if (!recordId) return null;
|
2757
4637
|
try {
|
2758
4638
|
const response = await deleteRecord({
|
2759
4639
|
pathParams: {
|
@@ -2766,7 +4646,7 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2766
4646
|
queryParams: { columns },
|
2767
4647
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2768
4648
|
});
|
2769
|
-
const schemaTables = await __privateMethod$2(this,
|
4649
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2770
4650
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2771
4651
|
} catch (e) {
|
2772
4652
|
if (isObject(e) && e.status === 404) {
|
@@ -2775,7 +4655,6 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2775
4655
|
throw e;
|
2776
4656
|
}
|
2777
4657
|
};
|
2778
|
-
_deleteRecords = new WeakSet();
|
2779
4658
|
deleteRecords_fn = async function(recordIds) {
|
2780
4659
|
const chunkedOperations = chunk(
|
2781
4660
|
compact(recordIds).map((id) => ({ delete: { table: __privateGet$2(this, _table), id } })),
|
@@ -2793,10 +4672,8 @@ deleteRecords_fn = async function(recordIds) {
|
|
2793
4672
|
});
|
2794
4673
|
}
|
2795
4674
|
};
|
2796
|
-
_getSchemaTables = new WeakSet();
|
2797
4675
|
getSchemaTables_fn = async function() {
|
2798
|
-
if (__privateGet$2(this, _schemaTables))
|
2799
|
-
return __privateGet$2(this, _schemaTables);
|
4676
|
+
if (__privateGet$2(this, _schemaTables)) return __privateGet$2(this, _schemaTables);
|
2800
4677
|
const { schema } = await getBranchDetails({
|
2801
4678
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
2802
4679
|
...__privateGet$2(this, _getFetchProps).call(this)
|
@@ -2804,16 +4681,13 @@ getSchemaTables_fn = async function() {
|
|
2804
4681
|
__privateSet$1(this, _schemaTables, schema.tables);
|
2805
4682
|
return schema.tables;
|
2806
4683
|
};
|
2807
|
-
_transformObjectToApi = new WeakSet();
|
2808
4684
|
transformObjectToApi_fn = async function(object) {
|
2809
|
-
const schemaTables = await __privateMethod$2(this,
|
4685
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2810
4686
|
const schema = schemaTables.find((table) => table.name === __privateGet$2(this, _table));
|
2811
|
-
if (!schema)
|
2812
|
-
throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
|
4687
|
+
if (!schema) throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
|
2813
4688
|
const result = {};
|
2814
4689
|
for (const [key, value] of Object.entries(object)) {
|
2815
|
-
if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key))
|
2816
|
-
continue;
|
4690
|
+
if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key)) continue;
|
2817
4691
|
const type = schema.columns.find((column) => column.name === key)?.type;
|
2818
4692
|
switch (type) {
|
2819
4693
|
case "link": {
|
@@ -2843,11 +4717,9 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2843
4717
|
const data = {};
|
2844
4718
|
Object.assign(data, { ...object });
|
2845
4719
|
const { columns } = schemaTables.find(({ name }) => name === table) ?? {};
|
2846
|
-
if (!columns)
|
2847
|
-
console.error(`Table ${table} not found in schema`);
|
4720
|
+
if (!columns) console.error(`Table ${table} not found in schema`);
|
2848
4721
|
for (const column of columns ?? []) {
|
2849
|
-
if (!isValidColumn(selectedColumns, column))
|
2850
|
-
continue;
|
4722
|
+
if (!isValidColumn(selectedColumns, column)) continue;
|
2851
4723
|
const value = data[column.name];
|
2852
4724
|
switch (column.type) {
|
2853
4725
|
case "datetime": {
|
@@ -2933,15 +4805,12 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2933
4805
|
return record;
|
2934
4806
|
};
|
2935
4807
|
function extractId(value) {
|
2936
|
-
if (isString(value))
|
2937
|
-
|
2938
|
-
if (isObject(value) && isString(value.xata_id))
|
2939
|
-
return value.xata_id;
|
4808
|
+
if (isString(value)) return value;
|
4809
|
+
if (isObject(value) && isString(value.xata_id)) return value.xata_id;
|
2940
4810
|
return void 0;
|
2941
4811
|
}
|
2942
4812
|
function isValidColumn(columns, column) {
|
2943
|
-
if (columns.includes("*"))
|
2944
|
-
return true;
|
4813
|
+
if (columns.includes("*")) return true;
|
2945
4814
|
return columns.filter((item) => isString(item) && item.startsWith(column.name)).length > 0;
|
2946
4815
|
}
|
2947
4816
|
function parseIfVersion(...args) {
|
@@ -2981,19 +4850,12 @@ const includesAll = (value) => ({ $includesAll: value });
|
|
2981
4850
|
const includesNone = (value) => ({ $includesNone: value });
|
2982
4851
|
const includesAny = (value) => ({ $includesAny: value });
|
2983
4852
|
|
2984
|
-
var
|
2985
|
-
|
2986
|
-
throw TypeError("Cannot " + msg);
|
2987
|
-
};
|
2988
|
-
var __privateGet$1 = (obj, member, getter) => {
|
2989
|
-
__accessCheck$2(obj, member, "read from private field");
|
2990
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2991
|
-
};
|
2992
|
-
var __privateAdd$2 = (obj, member, value) => {
|
2993
|
-
if (member.has(obj))
|
2994
|
-
throw TypeError("Cannot add the same private member more than once");
|
2995
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4853
|
+
var __typeError$2 = (msg) => {
|
4854
|
+
throw TypeError(msg);
|
2996
4855
|
};
|
4856
|
+
var __accessCheck$2 = (obj, member, msg) => member.has(obj) || __typeError$2("Cannot " + msg);
|
4857
|
+
var __privateGet$1 = (obj, member, getter) => (__accessCheck$2(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4858
|
+
var __privateAdd$2 = (obj, member, value) => member.has(obj) ? __typeError$2("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2997
4859
|
var _tables;
|
2998
4860
|
class SchemaPlugin extends XataPlugin {
|
2999
4861
|
constructor() {
|
@@ -3005,8 +4867,7 @@ class SchemaPlugin extends XataPlugin {
|
|
3005
4867
|
{},
|
3006
4868
|
{
|
3007
4869
|
get: (_target, table) => {
|
3008
|
-
if (!isString(table))
|
3009
|
-
throw new Error("Invalid table name");
|
4870
|
+
if (!isString(table)) throw new Error("Invalid table name");
|
3010
4871
|
if (__privateGet$1(this, _tables)[table] === void 0) {
|
3011
4872
|
__privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
3012
4873
|
}
|
@@ -3097,30 +4958,23 @@ function getContentType(file) {
|
|
3097
4958
|
return "application/octet-stream";
|
3098
4959
|
}
|
3099
4960
|
|
3100
|
-
var
|
3101
|
-
|
3102
|
-
throw TypeError("Cannot " + msg);
|
3103
|
-
};
|
3104
|
-
var __privateAdd$1 = (obj, member, value) => {
|
3105
|
-
if (member.has(obj))
|
3106
|
-
throw TypeError("Cannot add the same private member more than once");
|
3107
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4961
|
+
var __typeError$1 = (msg) => {
|
4962
|
+
throw TypeError(msg);
|
3108
4963
|
};
|
3109
|
-
var
|
3110
|
-
|
3111
|
-
|
3112
|
-
|
3113
|
-
var _search, search_fn;
|
4964
|
+
var __accessCheck$1 = (obj, member, msg) => member.has(obj) || __typeError$1("Cannot " + msg);
|
4965
|
+
var __privateAdd$1 = (obj, member, value) => member.has(obj) ? __typeError$1("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4966
|
+
var __privateMethod$1 = (obj, member, method) => (__accessCheck$1(obj, member, "access private method"), method);
|
4967
|
+
var _SearchPlugin_instances, search_fn;
|
3114
4968
|
class SearchPlugin extends XataPlugin {
|
3115
4969
|
constructor(db) {
|
3116
4970
|
super();
|
3117
4971
|
this.db = db;
|
3118
|
-
__privateAdd$1(this,
|
4972
|
+
__privateAdd$1(this, _SearchPlugin_instances);
|
3119
4973
|
}
|
3120
4974
|
build(pluginOptions) {
|
3121
4975
|
return {
|
3122
4976
|
all: async (query, options = {}) => {
|
3123
|
-
const { records, totalCount } = await __privateMethod$1(this,
|
4977
|
+
const { records, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3124
4978
|
return {
|
3125
4979
|
totalCount,
|
3126
4980
|
records: records.map((record) => {
|
@@ -3130,7 +4984,7 @@ class SearchPlugin extends XataPlugin {
|
|
3130
4984
|
};
|
3131
4985
|
},
|
3132
4986
|
byTable: async (query, options = {}) => {
|
3133
|
-
const { records: rawRecords, totalCount } = await __privateMethod$1(this,
|
4987
|
+
const { records: rawRecords, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3134
4988
|
const records = rawRecords.reduce((acc, record) => {
|
3135
4989
|
const table = record.xata_table;
|
3136
4990
|
const items = acc[table] ?? [];
|
@@ -3142,7 +4996,7 @@ class SearchPlugin extends XataPlugin {
|
|
3142
4996
|
};
|
3143
4997
|
}
|
3144
4998
|
}
|
3145
|
-
|
4999
|
+
_SearchPlugin_instances = new WeakSet();
|
3146
5000
|
search_fn = async function(query, options, pluginOptions) {
|
3147
5001
|
const { tables, fuzziness, highlight, prefix, page } = options ?? {};
|
3148
5002
|
const { records, totalCount } = await searchBranch({
|
@@ -3178,8 +5032,7 @@ function arrayString(val) {
|
|
3178
5032
|
return result;
|
3179
5033
|
}
|
3180
5034
|
function prepareValue(value) {
|
3181
|
-
if (!isDefined(value))
|
3182
|
-
return null;
|
5035
|
+
if (!isDefined(value)) return null;
|
3183
5036
|
if (value instanceof Date) {
|
3184
5037
|
return value.toISOString();
|
3185
5038
|
}
|
@@ -3258,8 +5111,7 @@ function buildDomain(host, region) {
|
|
3258
5111
|
function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
|
3259
5112
|
const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
|
3260
5113
|
const parts = parseWorkspacesUrlParts(url);
|
3261
|
-
if (!parts)
|
3262
|
-
throw new Error("Invalid workspaces URL");
|
5114
|
+
if (!parts) throw new Error("Invalid workspaces URL");
|
3263
5115
|
const { workspace: workspaceSlug, region, database, host } = parts;
|
3264
5116
|
const domain = buildDomain(host, region);
|
3265
5117
|
const workspace = workspaceSlug.split("-").pop();
|
@@ -3284,39 +5136,24 @@ class TransactionPlugin extends XataPlugin {
|
|
3284
5136
|
}
|
3285
5137
|
}
|
3286
5138
|
|
3287
|
-
var
|
3288
|
-
|
3289
|
-
throw TypeError("Cannot " + msg);
|
3290
|
-
};
|
3291
|
-
var __privateGet = (obj, member, getter) => {
|
3292
|
-
__accessCheck(obj, member, "read from private field");
|
3293
|
-
return getter ? getter.call(obj) : member.get(obj);
|
3294
|
-
};
|
3295
|
-
var __privateAdd = (obj, member, value) => {
|
3296
|
-
if (member.has(obj))
|
3297
|
-
throw TypeError("Cannot add the same private member more than once");
|
3298
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3299
|
-
};
|
3300
|
-
var __privateSet = (obj, member, value, setter) => {
|
3301
|
-
__accessCheck(obj, member, "write to private field");
|
3302
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
3303
|
-
return value;
|
3304
|
-
};
|
3305
|
-
var __privateMethod = (obj, member, method) => {
|
3306
|
-
__accessCheck(obj, member, "access private method");
|
3307
|
-
return method;
|
5139
|
+
var __typeError = (msg) => {
|
5140
|
+
throw TypeError(msg);
|
3308
5141
|
};
|
5142
|
+
var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
|
5143
|
+
var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
5144
|
+
var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5145
|
+
var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), member.set(obj, value), value);
|
5146
|
+
var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
|
3309
5147
|
const buildClient = (plugins) => {
|
3310
|
-
var _options,
|
5148
|
+
var _options, _instances, parseOptions_fn, getFetchProps_fn, _a;
|
3311
5149
|
return _a = class {
|
3312
5150
|
constructor(options = {}, tables) {
|
3313
|
-
__privateAdd(this,
|
3314
|
-
__privateAdd(this,
|
3315
|
-
|
3316
|
-
const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
|
5151
|
+
__privateAdd(this, _instances);
|
5152
|
+
__privateAdd(this, _options);
|
5153
|
+
const safeOptions = __privateMethod(this, _instances, parseOptions_fn).call(this, options);
|
3317
5154
|
__privateSet(this, _options, safeOptions);
|
3318
5155
|
const pluginOptions = {
|
3319
|
-
...__privateMethod(this,
|
5156
|
+
...__privateMethod(this, _instances, getFetchProps_fn).call(this, safeOptions),
|
3320
5157
|
host: safeOptions.host,
|
3321
5158
|
tables,
|
3322
5159
|
branch: safeOptions.branch
|
@@ -3333,8 +5170,7 @@ const buildClient = (plugins) => {
|
|
3333
5170
|
this.sql = sql;
|
3334
5171
|
this.files = files;
|
3335
5172
|
for (const [key, namespace] of Object.entries(plugins ?? {})) {
|
3336
|
-
if (namespace === void 0)
|
3337
|
-
continue;
|
5173
|
+
if (namespace === void 0) continue;
|
3338
5174
|
this[key] = namespace.build(pluginOptions);
|
3339
5175
|
}
|
3340
5176
|
}
|
@@ -3343,8 +5179,8 @@ const buildClient = (plugins) => {
|
|
3343
5179
|
const branch = __privateGet(this, _options).branch;
|
3344
5180
|
return { databaseURL, branch };
|
3345
5181
|
}
|
3346
|
-
}, _options = new WeakMap(),
|
3347
|
-
const enableBrowser = options?.enableBrowser ??
|
5182
|
+
}, _options = new WeakMap(), _instances = new WeakSet(), parseOptions_fn = function(options) {
|
5183
|
+
const enableBrowser = options?.enableBrowser ?? false;
|
3348
5184
|
const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
|
3349
5185
|
if (isBrowser && !enableBrowser) {
|
3350
5186
|
throw new Error(
|
@@ -3352,8 +5188,9 @@ const buildClient = (plugins) => {
|
|
3352
5188
|
);
|
3353
5189
|
}
|
3354
5190
|
const fetch = getFetchImplementation(options?.fetch);
|
3355
|
-
const databaseURL = options?.databaseURL
|
3356
|
-
const apiKey = options?.apiKey
|
5191
|
+
const databaseURL = options?.databaseURL;
|
5192
|
+
const apiKey = options?.apiKey;
|
5193
|
+
const branch = options?.branch;
|
3357
5194
|
const trace = options?.trace ?? defaultTrace;
|
3358
5195
|
const clientName = options?.clientName;
|
3359
5196
|
const host = options?.host ?? "production";
|
@@ -3364,25 +5201,8 @@ const buildClient = (plugins) => {
|
|
3364
5201
|
if (!databaseURL) {
|
3365
5202
|
throw new Error("Option databaseURL is required");
|
3366
5203
|
}
|
3367
|
-
|
3368
|
-
|
3369
|
-
const branch = options?.branch || previewBranch || envBranch || "main";
|
3370
|
-
if (!!previewBranch && branch !== previewBranch) {
|
3371
|
-
console.warn(
|
3372
|
-
`Ignoring preview branch ${previewBranch} because branch option was passed to the client constructor with value ${branch}`
|
3373
|
-
);
|
3374
|
-
} else if (!!envBranch && branch !== envBranch) {
|
3375
|
-
console.warn(
|
3376
|
-
`Ignoring branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
|
3377
|
-
);
|
3378
|
-
} else if (!!previewBranch && !!envBranch && previewBranch !== envBranch) {
|
3379
|
-
console.warn(
|
3380
|
-
`Ignoring preview branch ${previewBranch} and branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
|
3381
|
-
);
|
3382
|
-
} else if (!previewBranch && !envBranch && options?.branch === void 0) {
|
3383
|
-
console.warn(
|
3384
|
-
`No branch was passed to the client constructor. Using default branch ${branch}. You can set the branch with the environment variable XATA_BRANCH or by passing the branch option to the client constructor.`
|
3385
|
-
);
|
5204
|
+
if (!branch) {
|
5205
|
+
throw new Error("Option branch is required");
|
3386
5206
|
}
|
3387
5207
|
return {
|
3388
5208
|
fetch,
|
@@ -3396,7 +5216,7 @@ const buildClient = (plugins) => {
|
|
3396
5216
|
clientName,
|
3397
5217
|
xataAgentExtra
|
3398
5218
|
};
|
3399
|
-
},
|
5219
|
+
}, getFetchProps_fn = function({
|
3400
5220
|
fetch,
|
3401
5221
|
apiKey,
|
3402
5222
|
databaseURL,
|
@@ -3437,26 +5257,19 @@ class Serializer {
|
|
3437
5257
|
}
|
3438
5258
|
toJSON(data) {
|
3439
5259
|
function visit(obj) {
|
3440
|
-
if (Array.isArray(obj))
|
3441
|
-
return obj.map(visit);
|
5260
|
+
if (Array.isArray(obj)) return obj.map(visit);
|
3442
5261
|
const type = typeof obj;
|
3443
|
-
if (type === "undefined")
|
3444
|
-
|
3445
|
-
if (
|
3446
|
-
return { [META]: "bigint", [VALUE]: obj.toString() };
|
3447
|
-
if (obj === null || type !== "object")
|
3448
|
-
return obj;
|
5262
|
+
if (type === "undefined") return { [META]: "undefined" };
|
5263
|
+
if (type === "bigint") return { [META]: "bigint", [VALUE]: obj.toString() };
|
5264
|
+
if (obj === null || type !== "object") return obj;
|
3449
5265
|
const constructor = obj.constructor;
|
3450
5266
|
const o = { [META]: constructor.name };
|
3451
5267
|
for (const [key, value] of Object.entries(obj)) {
|
3452
5268
|
o[key] = visit(value);
|
3453
5269
|
}
|
3454
|
-
if (constructor === Date)
|
3455
|
-
|
3456
|
-
if (constructor ===
|
3457
|
-
o[VALUE] = Object.fromEntries(obj);
|
3458
|
-
if (constructor === Set)
|
3459
|
-
o[VALUE] = [...obj];
|
5270
|
+
if (constructor === Date) o[VALUE] = obj.toISOString();
|
5271
|
+
if (constructor === Map) o[VALUE] = Object.fromEntries(obj);
|
5272
|
+
if (constructor === Set) o[VALUE] = [...obj];
|
3460
5273
|
return o;
|
3461
5274
|
}
|
3462
5275
|
return JSON.stringify(visit(data));
|
@@ -3469,16 +5282,11 @@ class Serializer {
|
|
3469
5282
|
if (constructor) {
|
3470
5283
|
return Object.assign(Object.create(constructor.prototype), rest);
|
3471
5284
|
}
|
3472
|
-
if (clazz === "Date")
|
3473
|
-
|
3474
|
-
if (clazz === "
|
3475
|
-
|
3476
|
-
if (clazz === "
|
3477
|
-
return new Map(Object.entries(val));
|
3478
|
-
if (clazz === "bigint")
|
3479
|
-
return BigInt(val);
|
3480
|
-
if (clazz === "undefined")
|
3481
|
-
return void 0;
|
5285
|
+
if (clazz === "Date") return new Date(val);
|
5286
|
+
if (clazz === "Set") return new Set(val);
|
5287
|
+
if (clazz === "Map") return new Map(Object.entries(val));
|
5288
|
+
if (clazz === "bigint") return BigInt(val);
|
5289
|
+
if (clazz === "undefined") return void 0;
|
3482
5290
|
return rest;
|
3483
5291
|
}
|
3484
5292
|
return value;
|
@@ -3493,6 +5301,47 @@ const deserialize = (json) => {
|
|
3493
5301
|
return defaultSerializer.fromJSON(json);
|
3494
5302
|
};
|
3495
5303
|
|
5304
|
+
function parseEnvironment(environment) {
|
5305
|
+
try {
|
5306
|
+
if (typeof environment === "function") {
|
5307
|
+
return new Proxy(
|
5308
|
+
{},
|
5309
|
+
{
|
5310
|
+
get(target) {
|
5311
|
+
return environment(target);
|
5312
|
+
}
|
5313
|
+
}
|
5314
|
+
);
|
5315
|
+
}
|
5316
|
+
if (isObject(environment)) {
|
5317
|
+
return environment;
|
5318
|
+
}
|
5319
|
+
} catch (error) {
|
5320
|
+
}
|
5321
|
+
return {};
|
5322
|
+
}
|
5323
|
+
function buildPreviewBranchName({ org, branch }) {
|
5324
|
+
return `preview-${org}-${branch}`;
|
5325
|
+
}
|
5326
|
+
function getDeployPreviewBranch(environment) {
|
5327
|
+
try {
|
5328
|
+
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = parseEnvironment(environment);
|
5329
|
+
if (deployPreviewBranch) return deployPreviewBranch;
|
5330
|
+
switch (deployPreview) {
|
5331
|
+
case "vercel": {
|
5332
|
+
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
5333
|
+
console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
|
5334
|
+
return void 0;
|
5335
|
+
}
|
5336
|
+
return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
|
5337
|
+
}
|
5338
|
+
}
|
5339
|
+
return void 0;
|
5340
|
+
} catch (err) {
|
5341
|
+
return void 0;
|
5342
|
+
}
|
5343
|
+
}
|
5344
|
+
|
3496
5345
|
class XataError extends Error {
|
3497
5346
|
constructor(message, status) {
|
3498
5347
|
super(message);
|
@@ -3501,6 +5350,7 @@ class XataError extends Error {
|
|
3501
5350
|
}
|
3502
5351
|
|
3503
5352
|
exports.BaseClient = BaseClient;
|
5353
|
+
exports.Buffer = Buffer;
|
3504
5354
|
exports.FetcherError = FetcherError;
|
3505
5355
|
exports.FilesPlugin = FilesPlugin;
|
3506
5356
|
exports.Operations = operationsByTag;
|
@@ -3544,6 +5394,7 @@ exports.cancelWorkspaceMemberInvite = cancelWorkspaceMemberInvite;
|
|
3544
5394
|
exports.compareBranchSchemas = compareBranchSchemas;
|
3545
5395
|
exports.compareBranchWithUserSchema = compareBranchWithUserSchema;
|
3546
5396
|
exports.compareMigrationRequest = compareMigrationRequest;
|
5397
|
+
exports.completeMigration = completeMigration;
|
3547
5398
|
exports.contains = contains;
|
3548
5399
|
exports.copyBranch = copyBranch;
|
3549
5400
|
exports.createBranch = createBranch;
|
@@ -3554,6 +5405,7 @@ exports.createTable = createTable;
|
|
3554
5405
|
exports.createUserAPIKey = createUserAPIKey;
|
3555
5406
|
exports.createWorkspace = createWorkspace;
|
3556
5407
|
exports.deleteBranch = deleteBranch;
|
5408
|
+
exports.deleteCluster = deleteCluster;
|
3557
5409
|
exports.deleteColumn = deleteColumn;
|
3558
5410
|
exports.deleteDatabase = deleteDatabase;
|
3559
5411
|
exports.deleteDatabaseGithubSettings = deleteDatabaseGithubSettings;
|
@@ -3574,9 +5426,7 @@ exports.exists = exists;
|
|
3574
5426
|
exports.fileAccess = fileAccess;
|
3575
5427
|
exports.fileUpload = fileUpload;
|
3576
5428
|
exports.ge = ge;
|
3577
|
-
exports.getAPIKey = getAPIKey;
|
3578
5429
|
exports.getAuthorizationCode = getAuthorizationCode;
|
3579
|
-
exports.getBranch = getBranch;
|
3580
5430
|
exports.getBranchDetails = getBranchDetails;
|
3581
5431
|
exports.getBranchList = getBranchList;
|
3582
5432
|
exports.getBranchMetadata = getBranchMetadata;
|
@@ -3591,7 +5441,7 @@ exports.getDatabaseGithubSettings = getDatabaseGithubSettings;
|
|
3591
5441
|
exports.getDatabaseList = getDatabaseList;
|
3592
5442
|
exports.getDatabaseMetadata = getDatabaseMetadata;
|
3593
5443
|
exports.getDatabaseSettings = getDatabaseSettings;
|
3594
|
-
exports.
|
5444
|
+
exports.getDeployPreviewBranch = getDeployPreviewBranch;
|
3595
5445
|
exports.getFile = getFile;
|
3596
5446
|
exports.getFileItem = getFileItem;
|
3597
5447
|
exports.getGitBranchesMapping = getGitBranchesMapping;
|
@@ -3600,7 +5450,6 @@ exports.getMigrationHistory = getMigrationHistory;
|
|
3600
5450
|
exports.getMigrationJobStatus = getMigrationJobStatus;
|
3601
5451
|
exports.getMigrationRequest = getMigrationRequest;
|
3602
5452
|
exports.getMigrationRequestIsMerged = getMigrationRequestIsMerged;
|
3603
|
-
exports.getPreviewBranch = getPreviewBranch;
|
3604
5453
|
exports.getRecord = getRecord;
|
3605
5454
|
exports.getSchema = getSchema;
|
3606
5455
|
exports.getTableColumns = getTableColumns;
|
@@ -3662,11 +5511,14 @@ exports.removeWorkspaceMember = removeWorkspaceMember;
|
|
3662
5511
|
exports.renameDatabase = renameDatabase;
|
3663
5512
|
exports.resendWorkspaceMemberInvite = resendWorkspaceMemberInvite;
|
3664
5513
|
exports.resolveBranch = resolveBranch;
|
5514
|
+
exports.rollbackMigration = rollbackMigration;
|
3665
5515
|
exports.searchBranch = searchBranch;
|
3666
5516
|
exports.searchTable = searchTable;
|
3667
5517
|
exports.serialize = serialize;
|
3668
5518
|
exports.setTableSchema = setTableSchema;
|
5519
|
+
exports.sqlBatchQuery = sqlBatchQuery;
|
3669
5520
|
exports.sqlQuery = sqlQuery;
|
5521
|
+
exports.startMigration = startMigration;
|
3670
5522
|
exports.startsWith = startsWith;
|
3671
5523
|
exports.summarizeTable = summarizeTable;
|
3672
5524
|
exports.transformImage = transformImage;
|