@xata.io/client 0.0.0-next.v534362888c93b458fd4536898c3b687b59a5e171 → 0.0.0-next.v5bc7e82e3b5ca04db80756931b6ccba0db817c72
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-add-version.log +1 -1
- package/.turbo/turbo-build.log +4 -4
- package/CHANGELOG.md +11 -3
- package/dist/index.cjs +2426 -574
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +4752 -3854
- package/dist/index.mjs +2420 -571
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
@@ -22,6 +22,1789 @@ const TraceAttributes = {
|
|
22
22
|
CLOUDFLARE_RAY_ID: "cf.ray"
|
23
23
|
};
|
24
24
|
|
25
|
+
const lookup = [];
|
26
|
+
const revLookup = [];
|
27
|
+
const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
28
|
+
for (let i = 0, len = code.length; i < len; ++i) {
|
29
|
+
lookup[i] = code[i];
|
30
|
+
revLookup[code.charCodeAt(i)] = i;
|
31
|
+
}
|
32
|
+
revLookup["-".charCodeAt(0)] = 62;
|
33
|
+
revLookup["_".charCodeAt(0)] = 63;
|
34
|
+
function getLens(b64) {
|
35
|
+
const len = b64.length;
|
36
|
+
if (len % 4 > 0) {
|
37
|
+
throw new Error("Invalid string. Length must be a multiple of 4");
|
38
|
+
}
|
39
|
+
let validLen = b64.indexOf("=");
|
40
|
+
if (validLen === -1) validLen = len;
|
41
|
+
const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
|
42
|
+
return [validLen, placeHoldersLen];
|
43
|
+
}
|
44
|
+
function _byteLength(_b64, validLen, placeHoldersLen) {
|
45
|
+
return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
|
46
|
+
}
|
47
|
+
function toByteArray(b64) {
|
48
|
+
let tmp;
|
49
|
+
const lens = getLens(b64);
|
50
|
+
const validLen = lens[0];
|
51
|
+
const placeHoldersLen = lens[1];
|
52
|
+
const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
|
53
|
+
let curByte = 0;
|
54
|
+
const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
|
55
|
+
let i;
|
56
|
+
for (i = 0; i < len; i += 4) {
|
57
|
+
tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
|
58
|
+
arr[curByte++] = tmp >> 16 & 255;
|
59
|
+
arr[curByte++] = tmp >> 8 & 255;
|
60
|
+
arr[curByte++] = tmp & 255;
|
61
|
+
}
|
62
|
+
if (placeHoldersLen === 2) {
|
63
|
+
tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
|
64
|
+
arr[curByte++] = tmp & 255;
|
65
|
+
}
|
66
|
+
if (placeHoldersLen === 1) {
|
67
|
+
tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
|
68
|
+
arr[curByte++] = tmp >> 8 & 255;
|
69
|
+
arr[curByte++] = tmp & 255;
|
70
|
+
}
|
71
|
+
return arr;
|
72
|
+
}
|
73
|
+
function tripletToBase64(num) {
|
74
|
+
return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
|
75
|
+
}
|
76
|
+
function encodeChunk(uint8, start, end) {
|
77
|
+
let tmp;
|
78
|
+
const output = [];
|
79
|
+
for (let i = start; i < end; i += 3) {
|
80
|
+
tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
|
81
|
+
output.push(tripletToBase64(tmp));
|
82
|
+
}
|
83
|
+
return output.join("");
|
84
|
+
}
|
85
|
+
function fromByteArray(uint8) {
|
86
|
+
let tmp;
|
87
|
+
const len = uint8.length;
|
88
|
+
const extraBytes = len % 3;
|
89
|
+
const parts = [];
|
90
|
+
const maxChunkLength = 16383;
|
91
|
+
for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
92
|
+
parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
|
93
|
+
}
|
94
|
+
if (extraBytes === 1) {
|
95
|
+
tmp = uint8[len - 1];
|
96
|
+
parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
|
97
|
+
} else if (extraBytes === 2) {
|
98
|
+
tmp = (uint8[len - 2] << 8) + uint8[len - 1];
|
99
|
+
parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
|
100
|
+
}
|
101
|
+
return parts.join("");
|
102
|
+
}
|
103
|
+
|
104
|
+
const K_MAX_LENGTH = 2147483647;
|
105
|
+
const MAX_ARGUMENTS_LENGTH = 4096;
|
106
|
+
class Buffer extends Uint8Array {
|
107
|
+
/**
|
108
|
+
* Constructs a new `Buffer` instance.
|
109
|
+
*
|
110
|
+
* @param value
|
111
|
+
* @param encodingOrOffset
|
112
|
+
* @param length
|
113
|
+
*/
|
114
|
+
constructor(value, encodingOrOffset, length) {
|
115
|
+
if (typeof value === "number") {
|
116
|
+
if (typeof encodingOrOffset === "string") {
|
117
|
+
throw new TypeError("The first argument must be of type string, received type number");
|
118
|
+
}
|
119
|
+
if (value < 0) {
|
120
|
+
throw new RangeError("The buffer size cannot be negative");
|
121
|
+
}
|
122
|
+
super(value < 0 ? 0 : Buffer._checked(value) | 0);
|
123
|
+
} else if (typeof value === "string") {
|
124
|
+
if (typeof encodingOrOffset !== "string") {
|
125
|
+
encodingOrOffset = "utf8";
|
126
|
+
}
|
127
|
+
if (!Buffer.isEncoding(encodingOrOffset)) {
|
128
|
+
throw new TypeError("Unknown encoding: " + encodingOrOffset);
|
129
|
+
}
|
130
|
+
const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
|
131
|
+
super(length2);
|
132
|
+
const written = this.write(value, 0, this.length, encodingOrOffset);
|
133
|
+
if (written !== length2) {
|
134
|
+
throw new TypeError(
|
135
|
+
"Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
|
136
|
+
);
|
137
|
+
}
|
138
|
+
} else if (ArrayBuffer.isView(value)) {
|
139
|
+
if (Buffer._isInstance(value, Uint8Array)) {
|
140
|
+
const copy = new Uint8Array(value);
|
141
|
+
const array = copy.buffer;
|
142
|
+
const byteOffset = copy.byteOffset;
|
143
|
+
const length2 = copy.byteLength;
|
144
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
145
|
+
throw new RangeError("offset is outside of buffer bounds");
|
146
|
+
}
|
147
|
+
if (array.byteLength < byteOffset + (length2 || 0)) {
|
148
|
+
throw new RangeError("length is outside of buffer bounds");
|
149
|
+
}
|
150
|
+
super(new Uint8Array(array, byteOffset, length2));
|
151
|
+
} else {
|
152
|
+
const array = value;
|
153
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
154
|
+
super(new Uint8Array(length2));
|
155
|
+
for (let i = 0; i < length2; i++) {
|
156
|
+
this[i] = array[i] & 255;
|
157
|
+
}
|
158
|
+
}
|
159
|
+
} else if (value == null) {
|
160
|
+
throw new TypeError(
|
161
|
+
"The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
|
162
|
+
);
|
163
|
+
} else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
|
164
|
+
const array = value;
|
165
|
+
const byteOffset = encodingOrOffset;
|
166
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
167
|
+
throw new RangeError("offset is outside of buffer bounds");
|
168
|
+
}
|
169
|
+
if (array.byteLength < byteOffset + (length || 0)) {
|
170
|
+
throw new RangeError("length is outside of buffer bounds");
|
171
|
+
}
|
172
|
+
super(new Uint8Array(array, byteOffset, length));
|
173
|
+
} else if (Array.isArray(value)) {
|
174
|
+
const array = value;
|
175
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
176
|
+
super(new Uint8Array(length2));
|
177
|
+
for (let i = 0; i < length2; i++) {
|
178
|
+
this[i] = array[i] & 255;
|
179
|
+
}
|
180
|
+
} else {
|
181
|
+
throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
|
182
|
+
}
|
183
|
+
}
|
184
|
+
/**
|
185
|
+
* Return JSON representation of the buffer.
|
186
|
+
*/
|
187
|
+
toJSON() {
|
188
|
+
return {
|
189
|
+
type: "Buffer",
|
190
|
+
data: Array.prototype.slice.call(this)
|
191
|
+
};
|
192
|
+
}
|
193
|
+
/**
|
194
|
+
* Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
|
195
|
+
* parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
|
196
|
+
* only part of `string` will be written. However, partially encoded characters will not be written.
|
197
|
+
*
|
198
|
+
* @param string String to write to `buf`.
|
199
|
+
* @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
|
200
|
+
* @param length Maximum number of bytes to write: Default: `buf.length - offset`.
|
201
|
+
* @param encoding The character encoding of `string`. Default: `utf8`.
|
202
|
+
*/
|
203
|
+
write(string, offset, length, encoding) {
|
204
|
+
if (typeof offset === "undefined") {
|
205
|
+
encoding = "utf8";
|
206
|
+
length = this.length;
|
207
|
+
offset = 0;
|
208
|
+
} else if (typeof length === "undefined" && typeof offset === "string") {
|
209
|
+
encoding = offset;
|
210
|
+
length = this.length;
|
211
|
+
offset = 0;
|
212
|
+
} else if (typeof offset === "number" && isFinite(offset)) {
|
213
|
+
offset = offset >>> 0;
|
214
|
+
if (typeof length === "number" && isFinite(length)) {
|
215
|
+
length = length >>> 0;
|
216
|
+
encoding ?? (encoding = "utf8");
|
217
|
+
} else if (typeof length === "string") {
|
218
|
+
encoding = length;
|
219
|
+
length = void 0;
|
220
|
+
}
|
221
|
+
} else {
|
222
|
+
throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
|
223
|
+
}
|
224
|
+
const remaining = this.length - offset;
|
225
|
+
if (typeof length === "undefined" || length > remaining) {
|
226
|
+
length = remaining;
|
227
|
+
}
|
228
|
+
if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
|
229
|
+
throw new RangeError("Attempt to write outside buffer bounds");
|
230
|
+
}
|
231
|
+
encoding || (encoding = "utf8");
|
232
|
+
switch (Buffer._getEncoding(encoding)) {
|
233
|
+
case "hex":
|
234
|
+
return Buffer._hexWrite(this, string, offset, length);
|
235
|
+
case "utf8":
|
236
|
+
return Buffer._utf8Write(this, string, offset, length);
|
237
|
+
case "ascii":
|
238
|
+
case "latin1":
|
239
|
+
case "binary":
|
240
|
+
return Buffer._asciiWrite(this, string, offset, length);
|
241
|
+
case "ucs2":
|
242
|
+
case "utf16le":
|
243
|
+
return Buffer._ucs2Write(this, string, offset, length);
|
244
|
+
case "base64":
|
245
|
+
return Buffer._base64Write(this, string, offset, length);
|
246
|
+
}
|
247
|
+
}
|
248
|
+
/**
|
249
|
+
* Decodes the buffer to a string according to the specified character encoding.
|
250
|
+
* Passing `start` and `end` will decode only a subset of the buffer.
|
251
|
+
*
|
252
|
+
* Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
|
253
|
+
* will be replaced with `U+FFFD`.
|
254
|
+
*
|
255
|
+
* @param encoding
|
256
|
+
* @param start
|
257
|
+
* @param end
|
258
|
+
*/
|
259
|
+
toString(encoding, start, end) {
|
260
|
+
const length = this.length;
|
261
|
+
if (length === 0) {
|
262
|
+
return "";
|
263
|
+
}
|
264
|
+
if (arguments.length === 0) {
|
265
|
+
return Buffer._utf8Slice(this, 0, length);
|
266
|
+
}
|
267
|
+
if (typeof start === "undefined" || start < 0) {
|
268
|
+
start = 0;
|
269
|
+
}
|
270
|
+
if (start > this.length) {
|
271
|
+
return "";
|
272
|
+
}
|
273
|
+
if (typeof end === "undefined" || end > this.length) {
|
274
|
+
end = this.length;
|
275
|
+
}
|
276
|
+
if (end <= 0) {
|
277
|
+
return "";
|
278
|
+
}
|
279
|
+
end >>>= 0;
|
280
|
+
start >>>= 0;
|
281
|
+
if (end <= start) {
|
282
|
+
return "";
|
283
|
+
}
|
284
|
+
if (!encoding) {
|
285
|
+
encoding = "utf8";
|
286
|
+
}
|
287
|
+
switch (Buffer._getEncoding(encoding)) {
|
288
|
+
case "hex":
|
289
|
+
return Buffer._hexSlice(this, start, end);
|
290
|
+
case "utf8":
|
291
|
+
return Buffer._utf8Slice(this, start, end);
|
292
|
+
case "ascii":
|
293
|
+
return Buffer._asciiSlice(this, start, end);
|
294
|
+
case "latin1":
|
295
|
+
case "binary":
|
296
|
+
return Buffer._latin1Slice(this, start, end);
|
297
|
+
case "ucs2":
|
298
|
+
case "utf16le":
|
299
|
+
return Buffer._utf16leSlice(this, start, end);
|
300
|
+
case "base64":
|
301
|
+
return Buffer._base64Slice(this, start, end);
|
302
|
+
}
|
303
|
+
}
|
304
|
+
/**
|
305
|
+
* Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
|
306
|
+
*
|
307
|
+
* @param otherBuffer
|
308
|
+
*/
|
309
|
+
equals(otherBuffer) {
|
310
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
311
|
+
throw new TypeError("Argument must be a Buffer");
|
312
|
+
}
|
313
|
+
if (this === otherBuffer) {
|
314
|
+
return true;
|
315
|
+
}
|
316
|
+
return Buffer.compare(this, otherBuffer) === 0;
|
317
|
+
}
|
318
|
+
/**
|
319
|
+
* Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
|
320
|
+
* or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
|
321
|
+
* buffer.
|
322
|
+
*
|
323
|
+
* - `0` is returned if `otherBuffer` is the same as this buffer.
|
324
|
+
* - `1` is returned if `otherBuffer` should come before this buffer when sorted.
|
325
|
+
* - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
|
326
|
+
*
|
327
|
+
* @param otherBuffer The buffer to compare to.
|
328
|
+
* @param targetStart The offset within `otherBuffer` at which to begin comparison.
|
329
|
+
* @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
|
330
|
+
* @param sourceStart The offset within this buffer at which to begin comparison.
|
331
|
+
* @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
|
332
|
+
*/
|
333
|
+
compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
|
334
|
+
if (Buffer._isInstance(otherBuffer, Uint8Array)) {
|
335
|
+
otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
|
336
|
+
}
|
337
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
338
|
+
throw new TypeError("Argument must be a Buffer or Uint8Array");
|
339
|
+
}
|
340
|
+
targetStart ?? (targetStart = 0);
|
341
|
+
targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
|
342
|
+
sourceStart ?? (sourceStart = 0);
|
343
|
+
sourceEnd ?? (sourceEnd = this.length);
|
344
|
+
if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
|
345
|
+
throw new RangeError("Out of range index");
|
346
|
+
}
|
347
|
+
if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
|
348
|
+
return 0;
|
349
|
+
}
|
350
|
+
if (sourceStart >= sourceEnd) {
|
351
|
+
return -1;
|
352
|
+
}
|
353
|
+
if (targetStart >= targetEnd) {
|
354
|
+
return 1;
|
355
|
+
}
|
356
|
+
targetStart >>>= 0;
|
357
|
+
targetEnd >>>= 0;
|
358
|
+
sourceStart >>>= 0;
|
359
|
+
sourceEnd >>>= 0;
|
360
|
+
if (this === otherBuffer) {
|
361
|
+
return 0;
|
362
|
+
}
|
363
|
+
let x = sourceEnd - sourceStart;
|
364
|
+
let y = targetEnd - targetStart;
|
365
|
+
const len = Math.min(x, y);
|
366
|
+
const thisCopy = this.slice(sourceStart, sourceEnd);
|
367
|
+
const targetCopy = otherBuffer.slice(targetStart, targetEnd);
|
368
|
+
for (let i = 0; i < len; ++i) {
|
369
|
+
if (thisCopy[i] !== targetCopy[i]) {
|
370
|
+
x = thisCopy[i];
|
371
|
+
y = targetCopy[i];
|
372
|
+
break;
|
373
|
+
}
|
374
|
+
}
|
375
|
+
if (x < y) return -1;
|
376
|
+
if (y < x) return 1;
|
377
|
+
return 0;
|
378
|
+
}
|
379
|
+
/**
|
380
|
+
* Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
|
381
|
+
* region overlaps with this buffer.
|
382
|
+
*
|
383
|
+
* @param targetBuffer The target buffer to copy into.
|
384
|
+
* @param targetStart The offset within `targetBuffer` at which to begin writing.
|
385
|
+
* @param sourceStart The offset within this buffer at which to begin copying.
|
386
|
+
* @param sourceEnd The offset within this buffer at which to end copying (exclusive).
|
387
|
+
*/
|
388
|
+
copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
|
389
|
+
if (!Buffer.isBuffer(targetBuffer)) throw new TypeError("argument should be a Buffer");
|
390
|
+
if (!sourceStart) sourceStart = 0;
|
391
|
+
if (!targetStart) targetStart = 0;
|
392
|
+
if (!sourceEnd && sourceEnd !== 0) sourceEnd = this.length;
|
393
|
+
if (targetStart >= targetBuffer.length) targetStart = targetBuffer.length;
|
394
|
+
if (!targetStart) targetStart = 0;
|
395
|
+
if (sourceEnd > 0 && sourceEnd < sourceStart) sourceEnd = sourceStart;
|
396
|
+
if (sourceEnd === sourceStart) return 0;
|
397
|
+
if (targetBuffer.length === 0 || this.length === 0) return 0;
|
398
|
+
if (targetStart < 0) {
|
399
|
+
throw new RangeError("targetStart out of bounds");
|
400
|
+
}
|
401
|
+
if (sourceStart < 0 || sourceStart >= this.length) throw new RangeError("Index out of range");
|
402
|
+
if (sourceEnd < 0) throw new RangeError("sourceEnd out of bounds");
|
403
|
+
if (sourceEnd > this.length) sourceEnd = this.length;
|
404
|
+
if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
|
405
|
+
sourceEnd = targetBuffer.length - targetStart + sourceStart;
|
406
|
+
}
|
407
|
+
const len = sourceEnd - sourceStart;
|
408
|
+
if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
|
409
|
+
this.copyWithin(targetStart, sourceStart, sourceEnd);
|
410
|
+
} else {
|
411
|
+
Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
|
412
|
+
}
|
413
|
+
return len;
|
414
|
+
}
|
415
|
+
/**
|
416
|
+
* Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
|
417
|
+
* and `end` indices. This is the same behavior as `buf.subarray()`.
|
418
|
+
*
|
419
|
+
* This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
|
420
|
+
* the slice, use `Uint8Array.prototype.slice()`.
|
421
|
+
*
|
422
|
+
* @param start
|
423
|
+
* @param end
|
424
|
+
*/
|
425
|
+
slice(start, end) {
|
426
|
+
if (!start) {
|
427
|
+
start = 0;
|
428
|
+
}
|
429
|
+
const len = this.length;
|
430
|
+
start = ~~start;
|
431
|
+
end = end === void 0 ? len : ~~end;
|
432
|
+
if (start < 0) {
|
433
|
+
start += len;
|
434
|
+
if (start < 0) {
|
435
|
+
start = 0;
|
436
|
+
}
|
437
|
+
} else if (start > len) {
|
438
|
+
start = len;
|
439
|
+
}
|
440
|
+
if (end < 0) {
|
441
|
+
end += len;
|
442
|
+
if (end < 0) {
|
443
|
+
end = 0;
|
444
|
+
}
|
445
|
+
} else if (end > len) {
|
446
|
+
end = len;
|
447
|
+
}
|
448
|
+
if (end < start) {
|
449
|
+
end = start;
|
450
|
+
}
|
451
|
+
const newBuf = this.subarray(start, end);
|
452
|
+
Object.setPrototypeOf(newBuf, Buffer.prototype);
|
453
|
+
return newBuf;
|
454
|
+
}
|
455
|
+
/**
|
456
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
457
|
+
* of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
|
458
|
+
*
|
459
|
+
* @param value Number to write.
|
460
|
+
* @param offset Number of bytes to skip before starting to write.
|
461
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
462
|
+
* @param noAssert
|
463
|
+
* @returns `offset` plus the number of bytes written.
|
464
|
+
*/
|
465
|
+
writeUIntLE(value, offset, byteLength, noAssert) {
|
466
|
+
value = +value;
|
467
|
+
offset = offset >>> 0;
|
468
|
+
byteLength = byteLength >>> 0;
|
469
|
+
if (!noAssert) {
|
470
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
471
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
472
|
+
}
|
473
|
+
let mul = 1;
|
474
|
+
let i = 0;
|
475
|
+
this[offset] = value & 255;
|
476
|
+
while (++i < byteLength && (mul *= 256)) {
|
477
|
+
this[offset + i] = value / mul & 255;
|
478
|
+
}
|
479
|
+
return offset + byteLength;
|
480
|
+
}
|
481
|
+
/**
|
482
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
|
483
|
+
* accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
|
484
|
+
*
|
485
|
+
* @param value Number to write.
|
486
|
+
* @param offset Number of bytes to skip before starting to write.
|
487
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
488
|
+
* @param noAssert
|
489
|
+
* @returns `offset` plus the number of bytes written.
|
490
|
+
*/
|
491
|
+
writeUIntBE(value, offset, byteLength, noAssert) {
|
492
|
+
value = +value;
|
493
|
+
offset = offset >>> 0;
|
494
|
+
byteLength = byteLength >>> 0;
|
495
|
+
if (!noAssert) {
|
496
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
497
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
498
|
+
}
|
499
|
+
let i = byteLength - 1;
|
500
|
+
let mul = 1;
|
501
|
+
this[offset + i] = value & 255;
|
502
|
+
while (--i >= 0 && (mul *= 256)) {
|
503
|
+
this[offset + i] = value / mul & 255;
|
504
|
+
}
|
505
|
+
return offset + byteLength;
|
506
|
+
}
|
507
|
+
/**
|
508
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
509
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
510
|
+
*
|
511
|
+
* @param value Number to write.
|
512
|
+
* @param offset Number of bytes to skip before starting to write.
|
513
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
514
|
+
* @param noAssert
|
515
|
+
* @returns `offset` plus the number of bytes written.
|
516
|
+
*/
|
517
|
+
writeIntLE(value, offset, byteLength, noAssert) {
|
518
|
+
value = +value;
|
519
|
+
offset = offset >>> 0;
|
520
|
+
if (!noAssert) {
|
521
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
522
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
523
|
+
}
|
524
|
+
let i = 0;
|
525
|
+
let mul = 1;
|
526
|
+
let sub = 0;
|
527
|
+
this[offset] = value & 255;
|
528
|
+
while (++i < byteLength && (mul *= 256)) {
|
529
|
+
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
530
|
+
sub = 1;
|
531
|
+
}
|
532
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
533
|
+
}
|
534
|
+
return offset + byteLength;
|
535
|
+
}
|
536
|
+
/**
|
537
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
|
538
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
539
|
+
*
|
540
|
+
* @param value Number to write.
|
541
|
+
* @param offset Number of bytes to skip before starting to write.
|
542
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
543
|
+
* @param noAssert
|
544
|
+
* @returns `offset` plus the number of bytes written.
|
545
|
+
*/
|
546
|
+
writeIntBE(value, offset, byteLength, noAssert) {
|
547
|
+
value = +value;
|
548
|
+
offset = offset >>> 0;
|
549
|
+
if (!noAssert) {
|
550
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
551
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
552
|
+
}
|
553
|
+
let i = byteLength - 1;
|
554
|
+
let mul = 1;
|
555
|
+
let sub = 0;
|
556
|
+
this[offset + i] = value & 255;
|
557
|
+
while (--i >= 0 && (mul *= 256)) {
|
558
|
+
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
559
|
+
sub = 1;
|
560
|
+
}
|
561
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
562
|
+
}
|
563
|
+
return offset + byteLength;
|
564
|
+
}
|
565
|
+
/**
|
566
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
567
|
+
* unsigned, little-endian integer supporting up to 48 bits of accuracy.
|
568
|
+
*
|
569
|
+
* @param offset Number of bytes to skip before starting to read.
|
570
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
571
|
+
* @param noAssert
|
572
|
+
*/
|
573
|
+
readUIntLE(offset, byteLength, noAssert) {
|
574
|
+
offset = offset >>> 0;
|
575
|
+
byteLength = byteLength >>> 0;
|
576
|
+
if (!noAssert) {
|
577
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
578
|
+
}
|
579
|
+
let val = this[offset];
|
580
|
+
let mul = 1;
|
581
|
+
let i = 0;
|
582
|
+
while (++i < byteLength && (mul *= 256)) {
|
583
|
+
val += this[offset + i] * mul;
|
584
|
+
}
|
585
|
+
return val;
|
586
|
+
}
|
587
|
+
/**
|
588
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
589
|
+
* unsigned, big-endian integer supporting up to 48 bits of accuracy.
|
590
|
+
*
|
591
|
+
* @param offset Number of bytes to skip before starting to read.
|
592
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
593
|
+
* @param noAssert
|
594
|
+
*/
|
595
|
+
readUIntBE(offset, byteLength, noAssert) {
|
596
|
+
offset = offset >>> 0;
|
597
|
+
byteLength = byteLength >>> 0;
|
598
|
+
if (!noAssert) {
|
599
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
600
|
+
}
|
601
|
+
let val = this[offset + --byteLength];
|
602
|
+
let mul = 1;
|
603
|
+
while (byteLength > 0 && (mul *= 256)) {
|
604
|
+
val += this[offset + --byteLength] * mul;
|
605
|
+
}
|
606
|
+
return val;
|
607
|
+
}
|
608
|
+
/**
|
609
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
610
|
+
* little-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
611
|
+
*
|
612
|
+
* @param offset Number of bytes to skip before starting to read.
|
613
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
614
|
+
* @param noAssert
|
615
|
+
*/
|
616
|
+
readIntLE(offset, byteLength, noAssert) {
|
617
|
+
offset = offset >>> 0;
|
618
|
+
byteLength = byteLength >>> 0;
|
619
|
+
if (!noAssert) {
|
620
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
621
|
+
}
|
622
|
+
let val = this[offset];
|
623
|
+
let mul = 1;
|
624
|
+
let i = 0;
|
625
|
+
while (++i < byteLength && (mul *= 256)) {
|
626
|
+
val += this[offset + i] * mul;
|
627
|
+
}
|
628
|
+
mul *= 128;
|
629
|
+
if (val >= mul) {
|
630
|
+
val -= Math.pow(2, 8 * byteLength);
|
631
|
+
}
|
632
|
+
return val;
|
633
|
+
}
|
634
|
+
/**
|
635
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
636
|
+
* big-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
637
|
+
*
|
638
|
+
* @param offset Number of bytes to skip before starting to read.
|
639
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
640
|
+
* @param noAssert
|
641
|
+
*/
|
642
|
+
readIntBE(offset, byteLength, noAssert) {
|
643
|
+
offset = offset >>> 0;
|
644
|
+
byteLength = byteLength >>> 0;
|
645
|
+
if (!noAssert) {
|
646
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
647
|
+
}
|
648
|
+
let i = byteLength;
|
649
|
+
let mul = 1;
|
650
|
+
let val = this[offset + --i];
|
651
|
+
while (i > 0 && (mul *= 256)) {
|
652
|
+
val += this[offset + --i] * mul;
|
653
|
+
}
|
654
|
+
mul *= 128;
|
655
|
+
if (val >= mul) {
|
656
|
+
val -= Math.pow(2, 8 * byteLength);
|
657
|
+
}
|
658
|
+
return val;
|
659
|
+
}
|
660
|
+
/**
|
661
|
+
* Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
|
662
|
+
*
|
663
|
+
* @param offset Number of bytes to skip before starting to read.
|
664
|
+
* @param noAssert
|
665
|
+
*/
|
666
|
+
readUInt8(offset, noAssert) {
|
667
|
+
offset = offset >>> 0;
|
668
|
+
if (!noAssert) {
|
669
|
+
Buffer._checkOffset(offset, 1, this.length);
|
670
|
+
}
|
671
|
+
return this[offset];
|
672
|
+
}
|
673
|
+
/**
|
674
|
+
* Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
|
675
|
+
*
|
676
|
+
* @param offset Number of bytes to skip before starting to read.
|
677
|
+
* @param noAssert
|
678
|
+
*/
|
679
|
+
readUInt16LE(offset, noAssert) {
|
680
|
+
offset = offset >>> 0;
|
681
|
+
if (!noAssert) {
|
682
|
+
Buffer._checkOffset(offset, 2, this.length);
|
683
|
+
}
|
684
|
+
return this[offset] | this[offset + 1] << 8;
|
685
|
+
}
|
686
|
+
/**
|
687
|
+
* Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
|
688
|
+
*
|
689
|
+
* @param offset Number of bytes to skip before starting to read.
|
690
|
+
* @param noAssert
|
691
|
+
*/
|
692
|
+
readUInt16BE(offset, noAssert) {
|
693
|
+
offset = offset >>> 0;
|
694
|
+
if (!noAssert) {
|
695
|
+
Buffer._checkOffset(offset, 2, this.length);
|
696
|
+
}
|
697
|
+
return this[offset] << 8 | this[offset + 1];
|
698
|
+
}
|
699
|
+
/**
|
700
|
+
* Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
|
701
|
+
*
|
702
|
+
* @param offset Number of bytes to skip before starting to read.
|
703
|
+
* @param noAssert
|
704
|
+
*/
|
705
|
+
readUInt32LE(offset, noAssert) {
|
706
|
+
offset = offset >>> 0;
|
707
|
+
if (!noAssert) {
|
708
|
+
Buffer._checkOffset(offset, 4, this.length);
|
709
|
+
}
|
710
|
+
return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
|
711
|
+
}
|
712
|
+
/**
|
713
|
+
* Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
|
714
|
+
*
|
715
|
+
* @param offset Number of bytes to skip before starting to read.
|
716
|
+
* @param noAssert
|
717
|
+
*/
|
718
|
+
readUInt32BE(offset, noAssert) {
|
719
|
+
offset = offset >>> 0;
|
720
|
+
if (!noAssert) {
|
721
|
+
Buffer._checkOffset(offset, 4, this.length);
|
722
|
+
}
|
723
|
+
return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
|
724
|
+
}
|
725
|
+
/**
|
726
|
+
* Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
|
727
|
+
* as two's complement signed values.
|
728
|
+
*
|
729
|
+
* @param offset Number of bytes to skip before starting to read.
|
730
|
+
* @param noAssert
|
731
|
+
*/
|
732
|
+
readInt8(offset, noAssert) {
|
733
|
+
offset = offset >>> 0;
|
734
|
+
if (!noAssert) {
|
735
|
+
Buffer._checkOffset(offset, 1, this.length);
|
736
|
+
}
|
737
|
+
if (!(this[offset] & 128)) {
|
738
|
+
return this[offset];
|
739
|
+
}
|
740
|
+
return (255 - this[offset] + 1) * -1;
|
741
|
+
}
|
742
|
+
/**
|
743
|
+
* Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
744
|
+
* are interpreted as two's complement signed values.
|
745
|
+
*
|
746
|
+
* @param offset Number of bytes to skip before starting to read.
|
747
|
+
* @param noAssert
|
748
|
+
*/
|
749
|
+
readInt16LE(offset, noAssert) {
|
750
|
+
offset = offset >>> 0;
|
751
|
+
if (!noAssert) {
|
752
|
+
Buffer._checkOffset(offset, 2, this.length);
|
753
|
+
}
|
754
|
+
const val = this[offset] | this[offset + 1] << 8;
|
755
|
+
return val & 32768 ? val | 4294901760 : val;
|
756
|
+
}
|
757
|
+
/**
|
758
|
+
* Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
759
|
+
* are interpreted as two's complement signed values.
|
760
|
+
*
|
761
|
+
* @param offset Number of bytes to skip before starting to read.
|
762
|
+
* @param noAssert
|
763
|
+
*/
|
764
|
+
readInt16BE(offset, noAssert) {
|
765
|
+
offset = offset >>> 0;
|
766
|
+
if (!noAssert) {
|
767
|
+
Buffer._checkOffset(offset, 2, this.length);
|
768
|
+
}
|
769
|
+
const val = this[offset + 1] | this[offset] << 8;
|
770
|
+
return val & 32768 ? val | 4294901760 : val;
|
771
|
+
}
|
772
|
+
/**
|
773
|
+
* Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
774
|
+
* are interpreted as two's complement signed values.
|
775
|
+
*
|
776
|
+
* @param offset Number of bytes to skip before starting to read.
|
777
|
+
* @param noAssert
|
778
|
+
*/
|
779
|
+
readInt32LE(offset, noAssert) {
|
780
|
+
offset = offset >>> 0;
|
781
|
+
if (!noAssert) {
|
782
|
+
Buffer._checkOffset(offset, 4, this.length);
|
783
|
+
}
|
784
|
+
return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
|
785
|
+
}
|
786
|
+
/**
|
787
|
+
* Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
788
|
+
* are interpreted as two's complement signed values.
|
789
|
+
*
|
790
|
+
* @param offset Number of bytes to skip before starting to read.
|
791
|
+
* @param noAssert
|
792
|
+
*/
|
793
|
+
readInt32BE(offset, noAssert) {
|
794
|
+
offset = offset >>> 0;
|
795
|
+
if (!noAssert) {
|
796
|
+
Buffer._checkOffset(offset, 4, this.length);
|
797
|
+
}
|
798
|
+
return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
|
799
|
+
}
|
800
|
+
/**
|
801
|
+
* Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
|
802
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 2.
|
803
|
+
*/
|
804
|
+
swap16() {
|
805
|
+
const len = this.length;
|
806
|
+
if (len % 2 !== 0) {
|
807
|
+
throw new RangeError("Buffer size must be a multiple of 16-bits");
|
808
|
+
}
|
809
|
+
for (let i = 0; i < len; i += 2) {
|
810
|
+
this._swap(this, i, i + 1);
|
811
|
+
}
|
812
|
+
return this;
|
813
|
+
}
|
814
|
+
/**
|
815
|
+
* Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
|
816
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 4.
|
817
|
+
*/
|
818
|
+
swap32() {
|
819
|
+
const len = this.length;
|
820
|
+
if (len % 4 !== 0) {
|
821
|
+
throw new RangeError("Buffer size must be a multiple of 32-bits");
|
822
|
+
}
|
823
|
+
for (let i = 0; i < len; i += 4) {
|
824
|
+
this._swap(this, i, i + 3);
|
825
|
+
this._swap(this, i + 1, i + 2);
|
826
|
+
}
|
827
|
+
return this;
|
828
|
+
}
|
829
|
+
/**
|
830
|
+
* Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
|
831
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 8.
|
832
|
+
*/
|
833
|
+
swap64() {
|
834
|
+
const len = this.length;
|
835
|
+
if (len % 8 !== 0) {
|
836
|
+
throw new RangeError("Buffer size must be a multiple of 64-bits");
|
837
|
+
}
|
838
|
+
for (let i = 0; i < len; i += 8) {
|
839
|
+
this._swap(this, i, i + 7);
|
840
|
+
this._swap(this, i + 1, i + 6);
|
841
|
+
this._swap(this, i + 2, i + 5);
|
842
|
+
this._swap(this, i + 3, i + 4);
|
843
|
+
}
|
844
|
+
return this;
|
845
|
+
}
|
846
|
+
/**
|
847
|
+
* Swaps two octets.
|
848
|
+
*
|
849
|
+
* @param b
|
850
|
+
* @param n
|
851
|
+
* @param m
|
852
|
+
*/
|
853
|
+
_swap(b, n, m) {
|
854
|
+
const i = b[n];
|
855
|
+
b[n] = b[m];
|
856
|
+
b[m] = i;
|
857
|
+
}
|
858
|
+
/**
|
859
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
|
860
|
+
* Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
|
861
|
+
*
|
862
|
+
* @param value Number to write.
|
863
|
+
* @param offset Number of bytes to skip before starting to write.
|
864
|
+
* @param noAssert
|
865
|
+
* @returns `offset` plus the number of bytes written.
|
866
|
+
*/
|
867
|
+
writeUInt8(value, offset, noAssert) {
|
868
|
+
value = +value;
|
869
|
+
offset = offset >>> 0;
|
870
|
+
if (!noAssert) {
|
871
|
+
Buffer._checkInt(this, value, offset, 1, 255, 0);
|
872
|
+
}
|
873
|
+
this[offset] = value & 255;
|
874
|
+
return offset + 1;
|
875
|
+
}
|
876
|
+
/**
|
877
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
|
878
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
879
|
+
*
|
880
|
+
* @param value Number to write.
|
881
|
+
* @param offset Number of bytes to skip before starting to write.
|
882
|
+
* @param noAssert
|
883
|
+
* @returns `offset` plus the number of bytes written.
|
884
|
+
*/
|
885
|
+
writeUInt16LE(value, offset, noAssert) {
|
886
|
+
value = +value;
|
887
|
+
offset = offset >>> 0;
|
888
|
+
if (!noAssert) {
|
889
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
890
|
+
}
|
891
|
+
this[offset] = value & 255;
|
892
|
+
this[offset + 1] = value >>> 8;
|
893
|
+
return offset + 2;
|
894
|
+
}
|
895
|
+
/**
|
896
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
|
897
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
898
|
+
*
|
899
|
+
* @param value Number to write.
|
900
|
+
* @param offset Number of bytes to skip before starting to write.
|
901
|
+
* @param noAssert
|
902
|
+
* @returns `offset` plus the number of bytes written.
|
903
|
+
*/
|
904
|
+
writeUInt16BE(value, offset, noAssert) {
|
905
|
+
value = +value;
|
906
|
+
offset = offset >>> 0;
|
907
|
+
if (!noAssert) {
|
908
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
909
|
+
}
|
910
|
+
this[offset] = value >>> 8;
|
911
|
+
this[offset + 1] = value & 255;
|
912
|
+
return offset + 2;
|
913
|
+
}
|
914
|
+
/**
|
915
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
|
916
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
917
|
+
*
|
918
|
+
* @param value Number to write.
|
919
|
+
* @param offset Number of bytes to skip before starting to write.
|
920
|
+
* @param noAssert
|
921
|
+
* @returns `offset` plus the number of bytes written.
|
922
|
+
*/
|
923
|
+
writeUInt32LE(value, offset, noAssert) {
|
924
|
+
value = +value;
|
925
|
+
offset = offset >>> 0;
|
926
|
+
if (!noAssert) {
|
927
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
928
|
+
}
|
929
|
+
this[offset + 3] = value >>> 24;
|
930
|
+
this[offset + 2] = value >>> 16;
|
931
|
+
this[offset + 1] = value >>> 8;
|
932
|
+
this[offset] = value & 255;
|
933
|
+
return offset + 4;
|
934
|
+
}
|
935
|
+
/**
|
936
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
|
937
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
938
|
+
*
|
939
|
+
* @param value Number to write.
|
940
|
+
* @param offset Number of bytes to skip before starting to write.
|
941
|
+
* @param noAssert
|
942
|
+
* @returns `offset` plus the number of bytes written.
|
943
|
+
*/
|
944
|
+
writeUInt32BE(value, offset, noAssert) {
|
945
|
+
value = +value;
|
946
|
+
offset = offset >>> 0;
|
947
|
+
if (!noAssert) {
|
948
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
949
|
+
}
|
950
|
+
this[offset] = value >>> 24;
|
951
|
+
this[offset + 1] = value >>> 16;
|
952
|
+
this[offset + 2] = value >>> 8;
|
953
|
+
this[offset + 3] = value & 255;
|
954
|
+
return offset + 4;
|
955
|
+
}
|
956
|
+
/**
|
957
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
|
958
|
+
* Behavior is undefined when `value` is anything other than a signed 8-bit integer.
|
959
|
+
*
|
960
|
+
* @param value Number to write.
|
961
|
+
* @param offset Number of bytes to skip before starting to write.
|
962
|
+
* @param noAssert
|
963
|
+
* @returns `offset` plus the number of bytes written.
|
964
|
+
*/
|
965
|
+
writeInt8(value, offset, noAssert) {
|
966
|
+
value = +value;
|
967
|
+
offset = offset >>> 0;
|
968
|
+
if (!noAssert) {
|
969
|
+
Buffer._checkInt(this, value, offset, 1, 127, -128);
|
970
|
+
}
|
971
|
+
if (value < 0) {
|
972
|
+
value = 255 + value + 1;
|
973
|
+
}
|
974
|
+
this[offset] = value & 255;
|
975
|
+
return offset + 1;
|
976
|
+
}
|
977
|
+
/**
|
978
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
|
979
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
980
|
+
*
|
981
|
+
* @param value Number to write.
|
982
|
+
* @param offset Number of bytes to skip before starting to write.
|
983
|
+
* @param noAssert
|
984
|
+
* @returns `offset` plus the number of bytes written.
|
985
|
+
*/
|
986
|
+
writeInt16LE(value, offset, noAssert) {
|
987
|
+
value = +value;
|
988
|
+
offset = offset >>> 0;
|
989
|
+
if (!noAssert) {
|
990
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
991
|
+
}
|
992
|
+
this[offset] = value & 255;
|
993
|
+
this[offset + 1] = value >>> 8;
|
994
|
+
return offset + 2;
|
995
|
+
}
|
996
|
+
/**
|
997
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
|
998
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
999
|
+
*
|
1000
|
+
* @param value Number to write.
|
1001
|
+
* @param offset Number of bytes to skip before starting to write.
|
1002
|
+
* @param noAssert
|
1003
|
+
* @returns `offset` plus the number of bytes written.
|
1004
|
+
*/
|
1005
|
+
writeInt16BE(value, offset, noAssert) {
|
1006
|
+
value = +value;
|
1007
|
+
offset = offset >>> 0;
|
1008
|
+
if (!noAssert) {
|
1009
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1010
|
+
}
|
1011
|
+
this[offset] = value >>> 8;
|
1012
|
+
this[offset + 1] = value & 255;
|
1013
|
+
return offset + 2;
|
1014
|
+
}
|
1015
|
+
/**
|
1016
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
|
1017
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1018
|
+
*
|
1019
|
+
* @param value Number to write.
|
1020
|
+
* @param offset Number of bytes to skip before starting to write.
|
1021
|
+
* @param noAssert
|
1022
|
+
* @returns `offset` plus the number of bytes written.
|
1023
|
+
*/
|
1024
|
+
writeInt32LE(value, offset, noAssert) {
|
1025
|
+
value = +value;
|
1026
|
+
offset = offset >>> 0;
|
1027
|
+
if (!noAssert) {
|
1028
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1029
|
+
}
|
1030
|
+
this[offset] = value & 255;
|
1031
|
+
this[offset + 1] = value >>> 8;
|
1032
|
+
this[offset + 2] = value >>> 16;
|
1033
|
+
this[offset + 3] = value >>> 24;
|
1034
|
+
return offset + 4;
|
1035
|
+
}
|
1036
|
+
/**
|
1037
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
|
1038
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1039
|
+
*
|
1040
|
+
* @param value Number to write.
|
1041
|
+
* @param offset Number of bytes to skip before starting to write.
|
1042
|
+
* @param noAssert
|
1043
|
+
* @returns `offset` plus the number of bytes written.
|
1044
|
+
*/
|
1045
|
+
writeInt32BE(value, offset, noAssert) {
|
1046
|
+
value = +value;
|
1047
|
+
offset = offset >>> 0;
|
1048
|
+
if (!noAssert) {
|
1049
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1050
|
+
}
|
1051
|
+
if (value < 0) {
|
1052
|
+
value = 4294967295 + value + 1;
|
1053
|
+
}
|
1054
|
+
this[offset] = value >>> 24;
|
1055
|
+
this[offset + 1] = value >>> 16;
|
1056
|
+
this[offset + 2] = value >>> 8;
|
1057
|
+
this[offset + 3] = value & 255;
|
1058
|
+
return offset + 4;
|
1059
|
+
}
|
1060
|
+
/**
|
1061
|
+
* Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
|
1062
|
+
* filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
|
1063
|
+
* integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
|
1064
|
+
*
|
1065
|
+
* If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
|
1066
|
+
* character that fit into `buf` are written.
|
1067
|
+
*
|
1068
|
+
* If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
|
1069
|
+
*
|
1070
|
+
* @param value
|
1071
|
+
* @param encoding
|
1072
|
+
*/
|
1073
|
+
fill(value, offset, end, encoding) {
|
1074
|
+
if (typeof value === "string") {
|
1075
|
+
if (typeof offset === "string") {
|
1076
|
+
encoding = offset;
|
1077
|
+
offset = 0;
|
1078
|
+
end = this.length;
|
1079
|
+
} else if (typeof end === "string") {
|
1080
|
+
encoding = end;
|
1081
|
+
end = this.length;
|
1082
|
+
}
|
1083
|
+
if (encoding !== void 0 && typeof encoding !== "string") {
|
1084
|
+
throw new TypeError("encoding must be a string");
|
1085
|
+
}
|
1086
|
+
if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
|
1087
|
+
throw new TypeError("Unknown encoding: " + encoding);
|
1088
|
+
}
|
1089
|
+
if (value.length === 1) {
|
1090
|
+
const code = value.charCodeAt(0);
|
1091
|
+
if (encoding === "utf8" && code < 128) {
|
1092
|
+
value = code;
|
1093
|
+
}
|
1094
|
+
}
|
1095
|
+
} else if (typeof value === "number") {
|
1096
|
+
value = value & 255;
|
1097
|
+
} else if (typeof value === "boolean") {
|
1098
|
+
value = Number(value);
|
1099
|
+
}
|
1100
|
+
offset ?? (offset = 0);
|
1101
|
+
end ?? (end = this.length);
|
1102
|
+
if (offset < 0 || this.length < offset || this.length < end) {
|
1103
|
+
throw new RangeError("Out of range index");
|
1104
|
+
}
|
1105
|
+
if (end <= offset) {
|
1106
|
+
return this;
|
1107
|
+
}
|
1108
|
+
offset = offset >>> 0;
|
1109
|
+
end = end === void 0 ? this.length : end >>> 0;
|
1110
|
+
value || (value = 0);
|
1111
|
+
let i;
|
1112
|
+
if (typeof value === "number") {
|
1113
|
+
for (i = offset; i < end; ++i) {
|
1114
|
+
this[i] = value;
|
1115
|
+
}
|
1116
|
+
} else {
|
1117
|
+
const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
|
1118
|
+
const len = bytes.length;
|
1119
|
+
if (len === 0) {
|
1120
|
+
throw new TypeError('The value "' + value + '" is invalid for argument "value"');
|
1121
|
+
}
|
1122
|
+
for (i = 0; i < end - offset; ++i) {
|
1123
|
+
this[i + offset] = bytes[i % len];
|
1124
|
+
}
|
1125
|
+
}
|
1126
|
+
return this;
|
1127
|
+
}
|
1128
|
+
/**
|
1129
|
+
* Returns the index of the specified value.
|
1130
|
+
*
|
1131
|
+
* If `value` is:
|
1132
|
+
* - a string, `value` is interpreted according to the character encoding in `encoding`.
|
1133
|
+
* - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
|
1134
|
+
* - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
|
1135
|
+
*
|
1136
|
+
* Any other types will throw a `TypeError`.
|
1137
|
+
*
|
1138
|
+
* @param value What to search for.
|
1139
|
+
* @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
|
1140
|
+
* @param encoding If `value` is a string, this is the encoding used to search.
|
1141
|
+
* @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
|
1142
|
+
*/
|
1143
|
+
indexOf(value, byteOffset, encoding) {
|
1144
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
|
1145
|
+
}
|
1146
|
+
/**
|
1147
|
+
* Gets the last index of the specified value.
|
1148
|
+
*
|
1149
|
+
* @see indexOf()
|
1150
|
+
* @param value
|
1151
|
+
* @param byteOffset
|
1152
|
+
* @param encoding
|
1153
|
+
*/
|
1154
|
+
lastIndexOf(value, byteOffset, encoding) {
|
1155
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
|
1156
|
+
}
|
1157
|
+
_bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
|
1158
|
+
if (buffer.length === 0) {
|
1159
|
+
return -1;
|
1160
|
+
}
|
1161
|
+
if (typeof byteOffset === "string") {
|
1162
|
+
encoding = byteOffset;
|
1163
|
+
byteOffset = 0;
|
1164
|
+
} else if (typeof byteOffset === "undefined") {
|
1165
|
+
byteOffset = 0;
|
1166
|
+
} else if (byteOffset > 2147483647) {
|
1167
|
+
byteOffset = 2147483647;
|
1168
|
+
} else if (byteOffset < -2147483648) {
|
1169
|
+
byteOffset = -2147483648;
|
1170
|
+
}
|
1171
|
+
byteOffset = +byteOffset;
|
1172
|
+
if (byteOffset !== byteOffset) {
|
1173
|
+
byteOffset = dir ? 0 : buffer.length - 1;
|
1174
|
+
}
|
1175
|
+
if (byteOffset < 0) {
|
1176
|
+
byteOffset = buffer.length + byteOffset;
|
1177
|
+
}
|
1178
|
+
if (byteOffset >= buffer.length) {
|
1179
|
+
if (dir) {
|
1180
|
+
return -1;
|
1181
|
+
} else {
|
1182
|
+
byteOffset = buffer.length - 1;
|
1183
|
+
}
|
1184
|
+
} else if (byteOffset < 0) {
|
1185
|
+
if (dir) {
|
1186
|
+
byteOffset = 0;
|
1187
|
+
} else {
|
1188
|
+
return -1;
|
1189
|
+
}
|
1190
|
+
}
|
1191
|
+
if (typeof val === "string") {
|
1192
|
+
val = Buffer.from(val, encoding);
|
1193
|
+
}
|
1194
|
+
if (Buffer.isBuffer(val)) {
|
1195
|
+
if (val.length === 0) {
|
1196
|
+
return -1;
|
1197
|
+
}
|
1198
|
+
return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
|
1199
|
+
} else if (typeof val === "number") {
|
1200
|
+
val = val & 255;
|
1201
|
+
if (typeof Uint8Array.prototype.indexOf === "function") {
|
1202
|
+
if (dir) {
|
1203
|
+
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
|
1204
|
+
} else {
|
1205
|
+
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
|
1206
|
+
}
|
1207
|
+
}
|
1208
|
+
return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
|
1209
|
+
}
|
1210
|
+
throw new TypeError("val must be string, number or Buffer");
|
1211
|
+
}
|
1212
|
+
/**
|
1213
|
+
* Equivalent to `buf.indexOf() !== -1`.
|
1214
|
+
*
|
1215
|
+
* @param value
|
1216
|
+
* @param byteOffset
|
1217
|
+
* @param encoding
|
1218
|
+
*/
|
1219
|
+
includes(value, byteOffset, encoding) {
|
1220
|
+
return this.indexOf(value, byteOffset, encoding) !== -1;
|
1221
|
+
}
|
1222
|
+
/**
|
1223
|
+
* Creates a new buffer from the given parameters.
|
1224
|
+
*
|
1225
|
+
* @param data
|
1226
|
+
* @param encoding
|
1227
|
+
*/
|
1228
|
+
static from(a, b, c) {
|
1229
|
+
return new Buffer(a, b, c);
|
1230
|
+
}
|
1231
|
+
/**
|
1232
|
+
* Returns true if `obj` is a Buffer.
|
1233
|
+
*
|
1234
|
+
* @param obj
|
1235
|
+
*/
|
1236
|
+
static isBuffer(obj) {
|
1237
|
+
return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
|
1238
|
+
}
|
1239
|
+
/**
|
1240
|
+
* Returns true if `encoding` is a supported encoding.
|
1241
|
+
*
|
1242
|
+
* @param encoding
|
1243
|
+
*/
|
1244
|
+
static isEncoding(encoding) {
|
1245
|
+
switch (encoding.toLowerCase()) {
|
1246
|
+
case "hex":
|
1247
|
+
case "utf8":
|
1248
|
+
case "ascii":
|
1249
|
+
case "binary":
|
1250
|
+
case "latin1":
|
1251
|
+
case "ucs2":
|
1252
|
+
case "utf16le":
|
1253
|
+
case "base64":
|
1254
|
+
return true;
|
1255
|
+
default:
|
1256
|
+
return false;
|
1257
|
+
}
|
1258
|
+
}
|
1259
|
+
/**
|
1260
|
+
* Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
|
1261
|
+
* returns the number of characters in the string.
|
1262
|
+
*
|
1263
|
+
* @param string The string to test.
|
1264
|
+
* @param encoding The encoding to use for calculation. Defaults is `utf8`.
|
1265
|
+
*/
|
1266
|
+
static byteLength(string, encoding) {
|
1267
|
+
if (Buffer.isBuffer(string)) {
|
1268
|
+
return string.length;
|
1269
|
+
}
|
1270
|
+
if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
|
1271
|
+
return string.byteLength;
|
1272
|
+
}
|
1273
|
+
if (typeof string !== "string") {
|
1274
|
+
throw new TypeError(
|
1275
|
+
'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
|
1276
|
+
);
|
1277
|
+
}
|
1278
|
+
const len = string.length;
|
1279
|
+
const mustMatch = arguments.length > 2 && arguments[2] === true;
|
1280
|
+
if (!mustMatch && len === 0) {
|
1281
|
+
return 0;
|
1282
|
+
}
|
1283
|
+
switch (encoding?.toLowerCase()) {
|
1284
|
+
case "ascii":
|
1285
|
+
case "latin1":
|
1286
|
+
case "binary":
|
1287
|
+
return len;
|
1288
|
+
case "utf8":
|
1289
|
+
return Buffer._utf8ToBytes(string).length;
|
1290
|
+
case "hex":
|
1291
|
+
return len >>> 1;
|
1292
|
+
case "ucs2":
|
1293
|
+
case "utf16le":
|
1294
|
+
return len * 2;
|
1295
|
+
case "base64":
|
1296
|
+
return Buffer._base64ToBytes(string).length;
|
1297
|
+
default:
|
1298
|
+
return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
|
1299
|
+
}
|
1300
|
+
}
|
1301
|
+
/**
|
1302
|
+
* Returns a Buffer which is the result of concatenating all the buffers in the list together.
|
1303
|
+
*
|
1304
|
+
* - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
|
1305
|
+
* - If the list has exactly one item, then the first item is returned.
|
1306
|
+
* - If the list has more than one item, then a new buffer is created.
|
1307
|
+
*
|
1308
|
+
* It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
|
1309
|
+
* a small computational expense.
|
1310
|
+
*
|
1311
|
+
* @param list An array of Buffer objects to concatenate.
|
1312
|
+
* @param totalLength Total length of the buffers when concatenated.
|
1313
|
+
*/
|
1314
|
+
static concat(list, totalLength) {
|
1315
|
+
if (!Array.isArray(list)) {
|
1316
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1317
|
+
}
|
1318
|
+
if (list.length === 0) {
|
1319
|
+
return Buffer.alloc(0);
|
1320
|
+
}
|
1321
|
+
let i;
|
1322
|
+
if (totalLength === void 0) {
|
1323
|
+
totalLength = 0;
|
1324
|
+
for (i = 0; i < list.length; ++i) {
|
1325
|
+
totalLength += list[i].length;
|
1326
|
+
}
|
1327
|
+
}
|
1328
|
+
const buffer = Buffer.allocUnsafe(totalLength);
|
1329
|
+
let pos = 0;
|
1330
|
+
for (i = 0; i < list.length; ++i) {
|
1331
|
+
let buf = list[i];
|
1332
|
+
if (Buffer._isInstance(buf, Uint8Array)) {
|
1333
|
+
if (pos + buf.length > buffer.length) {
|
1334
|
+
if (!Buffer.isBuffer(buf)) {
|
1335
|
+
buf = Buffer.from(buf);
|
1336
|
+
}
|
1337
|
+
buf.copy(buffer, pos);
|
1338
|
+
} else {
|
1339
|
+
Uint8Array.prototype.set.call(buffer, buf, pos);
|
1340
|
+
}
|
1341
|
+
} else if (!Buffer.isBuffer(buf)) {
|
1342
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1343
|
+
} else {
|
1344
|
+
buf.copy(buffer, pos);
|
1345
|
+
}
|
1346
|
+
pos += buf.length;
|
1347
|
+
}
|
1348
|
+
return buffer;
|
1349
|
+
}
|
1350
|
+
/**
|
1351
|
+
* The same as `buf1.compare(buf2)`.
|
1352
|
+
*/
|
1353
|
+
static compare(buf1, buf2) {
|
1354
|
+
if (Buffer._isInstance(buf1, Uint8Array)) {
|
1355
|
+
buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
|
1356
|
+
}
|
1357
|
+
if (Buffer._isInstance(buf2, Uint8Array)) {
|
1358
|
+
buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
|
1359
|
+
}
|
1360
|
+
if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
|
1361
|
+
throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
|
1362
|
+
}
|
1363
|
+
if (buf1 === buf2) {
|
1364
|
+
return 0;
|
1365
|
+
}
|
1366
|
+
let x = buf1.length;
|
1367
|
+
let y = buf2.length;
|
1368
|
+
for (let i = 0, len = Math.min(x, y); i < len; ++i) {
|
1369
|
+
if (buf1[i] !== buf2[i]) {
|
1370
|
+
x = buf1[i];
|
1371
|
+
y = buf2[i];
|
1372
|
+
break;
|
1373
|
+
}
|
1374
|
+
}
|
1375
|
+
if (x < y) {
|
1376
|
+
return -1;
|
1377
|
+
}
|
1378
|
+
if (y < x) {
|
1379
|
+
return 1;
|
1380
|
+
}
|
1381
|
+
return 0;
|
1382
|
+
}
|
1383
|
+
/**
|
1384
|
+
* Allocates a new buffer of `size` octets.
|
1385
|
+
*
|
1386
|
+
* @param size The number of octets to allocate.
|
1387
|
+
* @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
|
1388
|
+
* @param encoding The encoding used for the call to `buf.fill()` while initializing.
|
1389
|
+
*/
|
1390
|
+
static alloc(size, fill, encoding) {
|
1391
|
+
if (typeof size !== "number") {
|
1392
|
+
throw new TypeError('"size" argument must be of type number');
|
1393
|
+
} else if (size < 0) {
|
1394
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1395
|
+
}
|
1396
|
+
if (size <= 0) {
|
1397
|
+
return new Buffer(size);
|
1398
|
+
}
|
1399
|
+
if (fill !== void 0) {
|
1400
|
+
return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
|
1401
|
+
}
|
1402
|
+
return new Buffer(size);
|
1403
|
+
}
|
1404
|
+
/**
|
1405
|
+
* Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
|
1406
|
+
*
|
1407
|
+
* @param size
|
1408
|
+
*/
|
1409
|
+
static allocUnsafe(size) {
|
1410
|
+
if (typeof size !== "number") {
|
1411
|
+
throw new TypeError('"size" argument must be of type number');
|
1412
|
+
} else if (size < 0) {
|
1413
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1414
|
+
}
|
1415
|
+
return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
|
1416
|
+
}
|
1417
|
+
/**
|
1418
|
+
* Returns true if the given `obj` is an instance of `type`.
|
1419
|
+
*
|
1420
|
+
* @param obj
|
1421
|
+
* @param type
|
1422
|
+
*/
|
1423
|
+
static _isInstance(obj, type) {
|
1424
|
+
return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
|
1425
|
+
}
|
1426
|
+
static _checked(length) {
|
1427
|
+
if (length >= K_MAX_LENGTH) {
|
1428
|
+
throw new RangeError(
|
1429
|
+
"Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
|
1430
|
+
);
|
1431
|
+
}
|
1432
|
+
return length | 0;
|
1433
|
+
}
|
1434
|
+
static _blitBuffer(src, dst, offset, length) {
|
1435
|
+
let i;
|
1436
|
+
for (i = 0; i < length; ++i) {
|
1437
|
+
if (i + offset >= dst.length || i >= src.length) {
|
1438
|
+
break;
|
1439
|
+
}
|
1440
|
+
dst[i + offset] = src[i];
|
1441
|
+
}
|
1442
|
+
return i;
|
1443
|
+
}
|
1444
|
+
static _utf8Write(buf, string, offset, length) {
|
1445
|
+
return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
|
1446
|
+
}
|
1447
|
+
static _asciiWrite(buf, string, offset, length) {
|
1448
|
+
return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
|
1449
|
+
}
|
1450
|
+
static _base64Write(buf, string, offset, length) {
|
1451
|
+
return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
|
1452
|
+
}
|
1453
|
+
static _ucs2Write(buf, string, offset, length) {
|
1454
|
+
return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
|
1455
|
+
}
|
1456
|
+
static _hexWrite(buf, string, offset, length) {
|
1457
|
+
offset = Number(offset) || 0;
|
1458
|
+
const remaining = buf.length - offset;
|
1459
|
+
if (!length) {
|
1460
|
+
length = remaining;
|
1461
|
+
} else {
|
1462
|
+
length = Number(length);
|
1463
|
+
if (length > remaining) {
|
1464
|
+
length = remaining;
|
1465
|
+
}
|
1466
|
+
}
|
1467
|
+
const strLen = string.length;
|
1468
|
+
if (length > strLen / 2) {
|
1469
|
+
length = strLen / 2;
|
1470
|
+
}
|
1471
|
+
let i;
|
1472
|
+
for (i = 0; i < length; ++i) {
|
1473
|
+
const parsed = parseInt(string.substr(i * 2, 2), 16);
|
1474
|
+
if (parsed !== parsed) {
|
1475
|
+
return i;
|
1476
|
+
}
|
1477
|
+
buf[offset + i] = parsed;
|
1478
|
+
}
|
1479
|
+
return i;
|
1480
|
+
}
|
1481
|
+
static _utf8ToBytes(string, units) {
|
1482
|
+
units = units || Infinity;
|
1483
|
+
const length = string.length;
|
1484
|
+
const bytes = [];
|
1485
|
+
let codePoint;
|
1486
|
+
let leadSurrogate = null;
|
1487
|
+
for (let i = 0; i < length; ++i) {
|
1488
|
+
codePoint = string.charCodeAt(i);
|
1489
|
+
if (codePoint > 55295 && codePoint < 57344) {
|
1490
|
+
if (!leadSurrogate) {
|
1491
|
+
if (codePoint > 56319) {
|
1492
|
+
if ((units -= 3) > -1) {
|
1493
|
+
bytes.push(239, 191, 189);
|
1494
|
+
}
|
1495
|
+
continue;
|
1496
|
+
} else if (i + 1 === length) {
|
1497
|
+
if ((units -= 3) > -1) {
|
1498
|
+
bytes.push(239, 191, 189);
|
1499
|
+
}
|
1500
|
+
continue;
|
1501
|
+
}
|
1502
|
+
leadSurrogate = codePoint;
|
1503
|
+
continue;
|
1504
|
+
}
|
1505
|
+
if (codePoint < 56320) {
|
1506
|
+
if ((units -= 3) > -1) {
|
1507
|
+
bytes.push(239, 191, 189);
|
1508
|
+
}
|
1509
|
+
leadSurrogate = codePoint;
|
1510
|
+
continue;
|
1511
|
+
}
|
1512
|
+
codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
|
1513
|
+
} else if (leadSurrogate) {
|
1514
|
+
if ((units -= 3) > -1) {
|
1515
|
+
bytes.push(239, 191, 189);
|
1516
|
+
}
|
1517
|
+
}
|
1518
|
+
leadSurrogate = null;
|
1519
|
+
if (codePoint < 128) {
|
1520
|
+
if ((units -= 1) < 0) {
|
1521
|
+
break;
|
1522
|
+
}
|
1523
|
+
bytes.push(codePoint);
|
1524
|
+
} else if (codePoint < 2048) {
|
1525
|
+
if ((units -= 2) < 0) {
|
1526
|
+
break;
|
1527
|
+
}
|
1528
|
+
bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
|
1529
|
+
} else if (codePoint < 65536) {
|
1530
|
+
if ((units -= 3) < 0) {
|
1531
|
+
break;
|
1532
|
+
}
|
1533
|
+
bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
|
1534
|
+
} else if (codePoint < 1114112) {
|
1535
|
+
if ((units -= 4) < 0) {
|
1536
|
+
break;
|
1537
|
+
}
|
1538
|
+
bytes.push(
|
1539
|
+
codePoint >> 18 | 240,
|
1540
|
+
codePoint >> 12 & 63 | 128,
|
1541
|
+
codePoint >> 6 & 63 | 128,
|
1542
|
+
codePoint & 63 | 128
|
1543
|
+
);
|
1544
|
+
} else {
|
1545
|
+
throw new Error("Invalid code point");
|
1546
|
+
}
|
1547
|
+
}
|
1548
|
+
return bytes;
|
1549
|
+
}
|
1550
|
+
static _base64ToBytes(str) {
|
1551
|
+
return toByteArray(base64clean(str));
|
1552
|
+
}
|
1553
|
+
static _asciiToBytes(str) {
|
1554
|
+
const byteArray = [];
|
1555
|
+
for (let i = 0; i < str.length; ++i) {
|
1556
|
+
byteArray.push(str.charCodeAt(i) & 255);
|
1557
|
+
}
|
1558
|
+
return byteArray;
|
1559
|
+
}
|
1560
|
+
static _utf16leToBytes(str, units) {
|
1561
|
+
let c, hi, lo;
|
1562
|
+
const byteArray = [];
|
1563
|
+
for (let i = 0; i < str.length; ++i) {
|
1564
|
+
if ((units -= 2) < 0) break;
|
1565
|
+
c = str.charCodeAt(i);
|
1566
|
+
hi = c >> 8;
|
1567
|
+
lo = c % 256;
|
1568
|
+
byteArray.push(lo);
|
1569
|
+
byteArray.push(hi);
|
1570
|
+
}
|
1571
|
+
return byteArray;
|
1572
|
+
}
|
1573
|
+
static _hexSlice(buf, start, end) {
|
1574
|
+
const len = buf.length;
|
1575
|
+
if (!start || start < 0) {
|
1576
|
+
start = 0;
|
1577
|
+
}
|
1578
|
+
if (!end || end < 0 || end > len) {
|
1579
|
+
end = len;
|
1580
|
+
}
|
1581
|
+
let out = "";
|
1582
|
+
for (let i = start; i < end; ++i) {
|
1583
|
+
out += hexSliceLookupTable[buf[i]];
|
1584
|
+
}
|
1585
|
+
return out;
|
1586
|
+
}
|
1587
|
+
static _base64Slice(buf, start, end) {
|
1588
|
+
if (start === 0 && end === buf.length) {
|
1589
|
+
return fromByteArray(buf);
|
1590
|
+
} else {
|
1591
|
+
return fromByteArray(buf.slice(start, end));
|
1592
|
+
}
|
1593
|
+
}
|
1594
|
+
static _utf8Slice(buf, start, end) {
|
1595
|
+
end = Math.min(buf.length, end);
|
1596
|
+
const res = [];
|
1597
|
+
let i = start;
|
1598
|
+
while (i < end) {
|
1599
|
+
const firstByte = buf[i];
|
1600
|
+
let codePoint = null;
|
1601
|
+
let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
|
1602
|
+
if (i + bytesPerSequence <= end) {
|
1603
|
+
let secondByte, thirdByte, fourthByte, tempCodePoint;
|
1604
|
+
switch (bytesPerSequence) {
|
1605
|
+
case 1:
|
1606
|
+
if (firstByte < 128) {
|
1607
|
+
codePoint = firstByte;
|
1608
|
+
}
|
1609
|
+
break;
|
1610
|
+
case 2:
|
1611
|
+
secondByte = buf[i + 1];
|
1612
|
+
if ((secondByte & 192) === 128) {
|
1613
|
+
tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
|
1614
|
+
if (tempCodePoint > 127) {
|
1615
|
+
codePoint = tempCodePoint;
|
1616
|
+
}
|
1617
|
+
}
|
1618
|
+
break;
|
1619
|
+
case 3:
|
1620
|
+
secondByte = buf[i + 1];
|
1621
|
+
thirdByte = buf[i + 2];
|
1622
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
|
1623
|
+
tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
|
1624
|
+
if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
|
1625
|
+
codePoint = tempCodePoint;
|
1626
|
+
}
|
1627
|
+
}
|
1628
|
+
break;
|
1629
|
+
case 4:
|
1630
|
+
secondByte = buf[i + 1];
|
1631
|
+
thirdByte = buf[i + 2];
|
1632
|
+
fourthByte = buf[i + 3];
|
1633
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
|
1634
|
+
tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
|
1635
|
+
if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
|
1636
|
+
codePoint = tempCodePoint;
|
1637
|
+
}
|
1638
|
+
}
|
1639
|
+
}
|
1640
|
+
}
|
1641
|
+
if (codePoint === null) {
|
1642
|
+
codePoint = 65533;
|
1643
|
+
bytesPerSequence = 1;
|
1644
|
+
} else if (codePoint > 65535) {
|
1645
|
+
codePoint -= 65536;
|
1646
|
+
res.push(codePoint >>> 10 & 1023 | 55296);
|
1647
|
+
codePoint = 56320 | codePoint & 1023;
|
1648
|
+
}
|
1649
|
+
res.push(codePoint);
|
1650
|
+
i += bytesPerSequence;
|
1651
|
+
}
|
1652
|
+
return Buffer._decodeCodePointsArray(res);
|
1653
|
+
}
|
1654
|
+
static _decodeCodePointsArray(codePoints) {
|
1655
|
+
const len = codePoints.length;
|
1656
|
+
if (len <= MAX_ARGUMENTS_LENGTH) {
|
1657
|
+
return String.fromCharCode.apply(String, codePoints);
|
1658
|
+
}
|
1659
|
+
let res = "";
|
1660
|
+
let i = 0;
|
1661
|
+
while (i < len) {
|
1662
|
+
res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
|
1663
|
+
}
|
1664
|
+
return res;
|
1665
|
+
}
|
1666
|
+
static _asciiSlice(buf, start, end) {
|
1667
|
+
let ret = "";
|
1668
|
+
end = Math.min(buf.length, end);
|
1669
|
+
for (let i = start; i < end; ++i) {
|
1670
|
+
ret += String.fromCharCode(buf[i] & 127);
|
1671
|
+
}
|
1672
|
+
return ret;
|
1673
|
+
}
|
1674
|
+
static _latin1Slice(buf, start, end) {
|
1675
|
+
let ret = "";
|
1676
|
+
end = Math.min(buf.length, end);
|
1677
|
+
for (let i = start; i < end; ++i) {
|
1678
|
+
ret += String.fromCharCode(buf[i]);
|
1679
|
+
}
|
1680
|
+
return ret;
|
1681
|
+
}
|
1682
|
+
static _utf16leSlice(buf, start, end) {
|
1683
|
+
const bytes = buf.slice(start, end);
|
1684
|
+
let res = "";
|
1685
|
+
for (let i = 0; i < bytes.length - 1; i += 2) {
|
1686
|
+
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
|
1687
|
+
}
|
1688
|
+
return res;
|
1689
|
+
}
|
1690
|
+
static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
|
1691
|
+
let indexSize = 1;
|
1692
|
+
let arrLength = arr.length;
|
1693
|
+
let valLength = val.length;
|
1694
|
+
if (encoding !== void 0) {
|
1695
|
+
encoding = Buffer._getEncoding(encoding);
|
1696
|
+
if (encoding === "ucs2" || encoding === "utf16le") {
|
1697
|
+
if (arr.length < 2 || val.length < 2) {
|
1698
|
+
return -1;
|
1699
|
+
}
|
1700
|
+
indexSize = 2;
|
1701
|
+
arrLength /= 2;
|
1702
|
+
valLength /= 2;
|
1703
|
+
byteOffset /= 2;
|
1704
|
+
}
|
1705
|
+
}
|
1706
|
+
function read(buf, i2) {
|
1707
|
+
if (indexSize === 1) {
|
1708
|
+
return buf[i2];
|
1709
|
+
} else {
|
1710
|
+
return buf.readUInt16BE(i2 * indexSize);
|
1711
|
+
}
|
1712
|
+
}
|
1713
|
+
let i;
|
1714
|
+
if (dir) {
|
1715
|
+
let foundIndex = -1;
|
1716
|
+
for (i = byteOffset; i < arrLength; i++) {
|
1717
|
+
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
1718
|
+
if (foundIndex === -1) foundIndex = i;
|
1719
|
+
if (i - foundIndex + 1 === valLength) return foundIndex * indexSize;
|
1720
|
+
} else {
|
1721
|
+
if (foundIndex !== -1) i -= i - foundIndex;
|
1722
|
+
foundIndex = -1;
|
1723
|
+
}
|
1724
|
+
}
|
1725
|
+
} else {
|
1726
|
+
if (byteOffset + valLength > arrLength) {
|
1727
|
+
byteOffset = arrLength - valLength;
|
1728
|
+
}
|
1729
|
+
for (i = byteOffset; i >= 0; i--) {
|
1730
|
+
let found = true;
|
1731
|
+
for (let j = 0; j < valLength; j++) {
|
1732
|
+
if (read(arr, i + j) !== read(val, j)) {
|
1733
|
+
found = false;
|
1734
|
+
break;
|
1735
|
+
}
|
1736
|
+
}
|
1737
|
+
if (found) {
|
1738
|
+
return i;
|
1739
|
+
}
|
1740
|
+
}
|
1741
|
+
}
|
1742
|
+
return -1;
|
1743
|
+
}
|
1744
|
+
static _checkOffset(offset, ext, length) {
|
1745
|
+
if (offset % 1 !== 0 || offset < 0) throw new RangeError("offset is not uint");
|
1746
|
+
if (offset + ext > length) throw new RangeError("Trying to access beyond buffer length");
|
1747
|
+
}
|
1748
|
+
static _checkInt(buf, value, offset, ext, max, min) {
|
1749
|
+
if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance');
|
1750
|
+
if (value > max || value < min) throw new RangeError('"value" argument is out of bounds');
|
1751
|
+
if (offset + ext > buf.length) throw new RangeError("Index out of range");
|
1752
|
+
}
|
1753
|
+
static _getEncoding(encoding) {
|
1754
|
+
let toLowerCase = false;
|
1755
|
+
let originalEncoding = "";
|
1756
|
+
for (; ; ) {
|
1757
|
+
switch (encoding) {
|
1758
|
+
case "hex":
|
1759
|
+
return "hex";
|
1760
|
+
case "utf8":
|
1761
|
+
return "utf8";
|
1762
|
+
case "ascii":
|
1763
|
+
return "ascii";
|
1764
|
+
case "binary":
|
1765
|
+
return "binary";
|
1766
|
+
case "latin1":
|
1767
|
+
return "latin1";
|
1768
|
+
case "ucs2":
|
1769
|
+
return "ucs2";
|
1770
|
+
case "utf16le":
|
1771
|
+
return "utf16le";
|
1772
|
+
case "base64":
|
1773
|
+
return "base64";
|
1774
|
+
default: {
|
1775
|
+
if (toLowerCase) {
|
1776
|
+
throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
|
1777
|
+
}
|
1778
|
+
toLowerCase = true;
|
1779
|
+
originalEncoding = encoding;
|
1780
|
+
encoding = encoding.toLowerCase();
|
1781
|
+
}
|
1782
|
+
}
|
1783
|
+
}
|
1784
|
+
}
|
1785
|
+
}
|
1786
|
+
const hexSliceLookupTable = function() {
|
1787
|
+
const alphabet = "0123456789abcdef";
|
1788
|
+
const table = new Array(256);
|
1789
|
+
for (let i = 0; i < 16; ++i) {
|
1790
|
+
const i16 = i * 16;
|
1791
|
+
for (let j = 0; j < 16; ++j) {
|
1792
|
+
table[i16 + j] = alphabet[i] + alphabet[j];
|
1793
|
+
}
|
1794
|
+
}
|
1795
|
+
return table;
|
1796
|
+
}();
|
1797
|
+
const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
|
1798
|
+
function base64clean(str) {
|
1799
|
+
str = str.split("=")[0];
|
1800
|
+
str = str.trim().replace(INVALID_BASE64_RE, "");
|
1801
|
+
if (str.length < 2) return "";
|
1802
|
+
while (str.length % 4 !== 0) {
|
1803
|
+
str = str + "=";
|
1804
|
+
}
|
1805
|
+
return str;
|
1806
|
+
}
|
1807
|
+
|
25
1808
|
function notEmpty(value) {
|
26
1809
|
return value !== null && value !== void 0;
|
27
1810
|
}
|
@@ -116,155 +1899,15 @@ function promiseMap(inputValues, mapper) {
|
|
116
1899
|
return inputValues.reduce(reducer, Promise.resolve([]));
|
117
1900
|
}
|
118
1901
|
|
119
|
-
|
120
|
-
|
121
|
-
if (isDefined(process) && isDefined(process.env)) {
|
122
|
-
return {
|
123
|
-
apiKey: process.env.XATA_API_KEY ?? getGlobalApiKey(),
|
124
|
-
databaseURL: process.env.XATA_DATABASE_URL ?? getGlobalDatabaseURL(),
|
125
|
-
branch: process.env.XATA_BRANCH ?? getGlobalBranch(),
|
126
|
-
deployPreview: process.env.XATA_PREVIEW,
|
127
|
-
deployPreviewBranch: process.env.XATA_PREVIEW_BRANCH,
|
128
|
-
vercelGitCommitRef: process.env.VERCEL_GIT_COMMIT_REF,
|
129
|
-
vercelGitRepoOwner: process.env.VERCEL_GIT_REPO_OWNER
|
130
|
-
};
|
131
|
-
}
|
132
|
-
} catch (err) {
|
133
|
-
}
|
134
|
-
try {
|
135
|
-
if (isObject(Deno) && isObject(Deno.env)) {
|
136
|
-
return {
|
137
|
-
apiKey: Deno.env.get("XATA_API_KEY") ?? getGlobalApiKey(),
|
138
|
-
databaseURL: Deno.env.get("XATA_DATABASE_URL") ?? getGlobalDatabaseURL(),
|
139
|
-
branch: Deno.env.get("XATA_BRANCH") ?? getGlobalBranch(),
|
140
|
-
deployPreview: Deno.env.get("XATA_PREVIEW"),
|
141
|
-
deployPreviewBranch: Deno.env.get("XATA_PREVIEW_BRANCH"),
|
142
|
-
vercelGitCommitRef: Deno.env.get("VERCEL_GIT_COMMIT_REF"),
|
143
|
-
vercelGitRepoOwner: Deno.env.get("VERCEL_GIT_REPO_OWNER")
|
144
|
-
};
|
145
|
-
}
|
146
|
-
} catch (err) {
|
147
|
-
}
|
148
|
-
return {
|
149
|
-
apiKey: getGlobalApiKey(),
|
150
|
-
databaseURL: getGlobalDatabaseURL(),
|
151
|
-
branch: getGlobalBranch(),
|
152
|
-
deployPreview: void 0,
|
153
|
-
deployPreviewBranch: void 0,
|
154
|
-
vercelGitCommitRef: void 0,
|
155
|
-
vercelGitRepoOwner: void 0
|
156
|
-
};
|
157
|
-
}
|
158
|
-
function getEnableBrowserVariable() {
|
159
|
-
try {
|
160
|
-
if (isObject(process) && isObject(process.env) && process.env.XATA_ENABLE_BROWSER !== void 0) {
|
161
|
-
return process.env.XATA_ENABLE_BROWSER === "true";
|
162
|
-
}
|
163
|
-
} catch (err) {
|
164
|
-
}
|
165
|
-
try {
|
166
|
-
if (isObject(Deno) && isObject(Deno.env) && Deno.env.get("XATA_ENABLE_BROWSER") !== void 0) {
|
167
|
-
return Deno.env.get("XATA_ENABLE_BROWSER") === "true";
|
168
|
-
}
|
169
|
-
} catch (err) {
|
170
|
-
}
|
171
|
-
try {
|
172
|
-
return XATA_ENABLE_BROWSER === true || XATA_ENABLE_BROWSER === "true";
|
173
|
-
} catch (err) {
|
174
|
-
return void 0;
|
175
|
-
}
|
176
|
-
}
|
177
|
-
function getGlobalApiKey() {
|
178
|
-
try {
|
179
|
-
return XATA_API_KEY;
|
180
|
-
} catch (err) {
|
181
|
-
return void 0;
|
182
|
-
}
|
183
|
-
}
|
184
|
-
function getGlobalDatabaseURL() {
|
185
|
-
try {
|
186
|
-
return XATA_DATABASE_URL;
|
187
|
-
} catch (err) {
|
188
|
-
return void 0;
|
189
|
-
}
|
190
|
-
}
|
191
|
-
function getGlobalBranch() {
|
192
|
-
try {
|
193
|
-
return XATA_BRANCH;
|
194
|
-
} catch (err) {
|
195
|
-
return void 0;
|
196
|
-
}
|
197
|
-
}
|
198
|
-
function getDatabaseURL() {
|
199
|
-
try {
|
200
|
-
const { databaseURL } = getEnvironment();
|
201
|
-
return databaseURL;
|
202
|
-
} catch (err) {
|
203
|
-
return void 0;
|
204
|
-
}
|
205
|
-
}
|
206
|
-
function getAPIKey() {
|
207
|
-
try {
|
208
|
-
const { apiKey } = getEnvironment();
|
209
|
-
return apiKey;
|
210
|
-
} catch (err) {
|
211
|
-
return void 0;
|
212
|
-
}
|
213
|
-
}
|
214
|
-
function getBranch() {
|
215
|
-
try {
|
216
|
-
const { branch } = getEnvironment();
|
217
|
-
return branch;
|
218
|
-
} catch (err) {
|
219
|
-
return void 0;
|
220
|
-
}
|
221
|
-
}
|
222
|
-
function buildPreviewBranchName({ org, branch }) {
|
223
|
-
return `preview-${org}-${branch}`;
|
224
|
-
}
|
225
|
-
function getPreviewBranch() {
|
226
|
-
try {
|
227
|
-
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = getEnvironment();
|
228
|
-
if (deployPreviewBranch)
|
229
|
-
return deployPreviewBranch;
|
230
|
-
switch (deployPreview) {
|
231
|
-
case "vercel": {
|
232
|
-
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
233
|
-
console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
|
234
|
-
return void 0;
|
235
|
-
}
|
236
|
-
return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
|
237
|
-
}
|
238
|
-
}
|
239
|
-
return void 0;
|
240
|
-
} catch (err) {
|
241
|
-
return void 0;
|
242
|
-
}
|
243
|
-
}
|
244
|
-
|
245
|
-
var __accessCheck$6 = (obj, member, msg) => {
|
246
|
-
if (!member.has(obj))
|
247
|
-
throw TypeError("Cannot " + msg);
|
1902
|
+
var __typeError$6 = (msg) => {
|
1903
|
+
throw TypeError(msg);
|
248
1904
|
};
|
249
|
-
var
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
var
|
254
|
-
|
255
|
-
throw TypeError("Cannot add the same private member more than once");
|
256
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
257
|
-
};
|
258
|
-
var __privateSet$4 = (obj, member, value, setter) => {
|
259
|
-
__accessCheck$6(obj, member, "write to private field");
|
260
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
261
|
-
return value;
|
262
|
-
};
|
263
|
-
var __privateMethod$4 = (obj, member, method) => {
|
264
|
-
__accessCheck$6(obj, member, "access private method");
|
265
|
-
return method;
|
266
|
-
};
|
267
|
-
var _fetch, _queue, _concurrency, _enqueue, enqueue_fn;
|
1905
|
+
var __accessCheck$6 = (obj, member, msg) => member.has(obj) || __typeError$6("Cannot " + msg);
|
1906
|
+
var __privateGet$5 = (obj, member, getter) => (__accessCheck$6(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
1907
|
+
var __privateAdd$6 = (obj, member, value) => member.has(obj) ? __typeError$6("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1908
|
+
var __privateSet$4 = (obj, member, value, setter) => (__accessCheck$6(obj, member, "write to private field"), member.set(obj, value), value);
|
1909
|
+
var __privateMethod$4 = (obj, member, method) => (__accessCheck$6(obj, member, "access private method"), method);
|
1910
|
+
var _fetch, _queue, _concurrency, _ApiRequestPool_instances, enqueue_fn;
|
268
1911
|
const REQUEST_TIMEOUT = 5 * 60 * 1e3;
|
269
1912
|
function getFetchImplementation(userFetch) {
|
270
1913
|
const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
|
@@ -277,10 +1920,10 @@ function getFetchImplementation(userFetch) {
|
|
277
1920
|
}
|
278
1921
|
class ApiRequestPool {
|
279
1922
|
constructor(concurrency = 10) {
|
280
|
-
__privateAdd$6(this,
|
281
|
-
__privateAdd$6(this, _fetch
|
282
|
-
__privateAdd$6(this, _queue
|
283
|
-
__privateAdd$6(this, _concurrency
|
1923
|
+
__privateAdd$6(this, _ApiRequestPool_instances);
|
1924
|
+
__privateAdd$6(this, _fetch);
|
1925
|
+
__privateAdd$6(this, _queue);
|
1926
|
+
__privateAdd$6(this, _concurrency);
|
284
1927
|
__privateSet$4(this, _queue, []);
|
285
1928
|
__privateSet$4(this, _concurrency, concurrency);
|
286
1929
|
this.running = 0;
|
@@ -315,7 +1958,7 @@ class ApiRequestPool {
|
|
315
1958
|
}
|
316
1959
|
return response;
|
317
1960
|
};
|
318
|
-
return __privateMethod$4(this,
|
1961
|
+
return __privateMethod$4(this, _ApiRequestPool_instances, enqueue_fn).call(this, async () => {
|
319
1962
|
return await runRequest();
|
320
1963
|
});
|
321
1964
|
}
|
@@ -323,7 +1966,7 @@ class ApiRequestPool {
|
|
323
1966
|
_fetch = new WeakMap();
|
324
1967
|
_queue = new WeakMap();
|
325
1968
|
_concurrency = new WeakMap();
|
326
|
-
|
1969
|
+
_ApiRequestPool_instances = new WeakSet();
|
327
1970
|
enqueue_fn = function(task) {
|
328
1971
|
const promise = new Promise((resolve) => __privateGet$5(this, _queue).push(resolve)).finally(() => {
|
329
1972
|
this.started--;
|
@@ -526,7 +2169,7 @@ function defaultOnOpen(response) {
|
|
526
2169
|
}
|
527
2170
|
}
|
528
2171
|
|
529
|
-
const VERSION = "0.29.
|
2172
|
+
const VERSION = "0.29.4";
|
530
2173
|
|
531
2174
|
class ErrorWithCause extends Error {
|
532
2175
|
constructor(message, options) {
|
@@ -606,35 +2249,30 @@ function parseProviderString(provider = "production") {
|
|
606
2249
|
return provider;
|
607
2250
|
}
|
608
2251
|
const [main, workspaces] = provider.split(",");
|
609
|
-
if (!main || !workspaces)
|
610
|
-
return null;
|
2252
|
+
if (!main || !workspaces) return null;
|
611
2253
|
return { main, workspaces };
|
612
2254
|
}
|
613
2255
|
function buildProviderString(provider) {
|
614
|
-
if (isHostProviderAlias(provider))
|
615
|
-
return provider;
|
2256
|
+
if (isHostProviderAlias(provider)) return provider;
|
616
2257
|
return `${provider.main},${provider.workspaces}`;
|
617
2258
|
}
|
618
2259
|
function parseWorkspacesUrlParts(url) {
|
619
|
-
if (!isString(url))
|
620
|
-
return null;
|
2260
|
+
if (!isString(url)) return null;
|
621
2261
|
const matches = {
|
622
2262
|
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
|
623
2263
|
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
|
624
2264
|
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
|
625
|
-
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:([^:]+):?(.*)?/)
|
2265
|
+
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
|
626
2266
|
};
|
627
2267
|
const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
|
628
|
-
if (!isHostProviderAlias(host) || !match)
|
629
|
-
return null;
|
2268
|
+
if (!isHostProviderAlias(host) || !match) return null;
|
630
2269
|
return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
|
631
2270
|
}
|
632
2271
|
|
633
2272
|
const pool = new ApiRequestPool();
|
634
2273
|
const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
|
635
2274
|
const cleanQueryParams = Object.entries(queryParams).reduce((acc, [key, value]) => {
|
636
|
-
if (value === void 0 || value === null)
|
637
|
-
return acc;
|
2275
|
+
if (value === void 0 || value === null) return acc;
|
638
2276
|
return { ...acc, [key]: value };
|
639
2277
|
}, {});
|
640
2278
|
const query = new URLSearchParams(cleanQueryParams).toString();
|
@@ -682,8 +2320,7 @@ function hostHeader(url) {
|
|
682
2320
|
return groups?.host ? { Host: groups.host } : {};
|
683
2321
|
}
|
684
2322
|
async function parseBody(body, headers) {
|
685
|
-
if (!isDefined(body))
|
686
|
-
return void 0;
|
2323
|
+
if (!isDefined(body)) return void 0;
|
687
2324
|
if (isBlob(body) || typeof body.text === "function") {
|
688
2325
|
return body;
|
689
2326
|
}
|
@@ -762,8 +2399,7 @@ async function fetch$1({
|
|
762
2399
|
[TraceAttributes.CLOUDFLARE_RAY_ID]: response.headers?.get("cf-ray") ?? void 0
|
763
2400
|
});
|
764
2401
|
const message = response.headers?.get("x-xata-message");
|
765
|
-
if (message)
|
766
|
-
console.warn(message);
|
2402
|
+
if (message) console.warn(message);
|
767
2403
|
if (response.status === 204) {
|
768
2404
|
return {};
|
769
2405
|
}
|
@@ -847,7 +2483,30 @@ function parseUrl(url) {
|
|
847
2483
|
|
848
2484
|
const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
|
849
2485
|
|
850
|
-
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2486
|
+
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2487
|
+
url: "/db/{dbBranchName}/migrations/apply",
|
2488
|
+
method: "post",
|
2489
|
+
...variables,
|
2490
|
+
signal
|
2491
|
+
});
|
2492
|
+
const startMigration = (variables, signal) => dataPlaneFetch({
|
2493
|
+
url: "/db/{dbBranchName}/migrations/start",
|
2494
|
+
method: "post",
|
2495
|
+
...variables,
|
2496
|
+
signal
|
2497
|
+
});
|
2498
|
+
const completeMigration = (variables, signal) => dataPlaneFetch({
|
2499
|
+
url: "/db/{dbBranchName}/migrations/complete",
|
2500
|
+
method: "post",
|
2501
|
+
...variables,
|
2502
|
+
signal
|
2503
|
+
});
|
2504
|
+
const rollbackMigration = (variables, signal) => dataPlaneFetch({
|
2505
|
+
url: "/db/{dbBranchName}/migrations/rollback",
|
2506
|
+
method: "post",
|
2507
|
+
...variables,
|
2508
|
+
signal
|
2509
|
+
});
|
851
2510
|
const adaptTable = (variables, signal) => dataPlaneFetch({
|
852
2511
|
url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
|
853
2512
|
method: "post",
|
@@ -860,9 +2519,24 @@ const adaptAllTables = (variables, signal) => dataPlaneFetch({
|
|
860
2519
|
...variables,
|
861
2520
|
signal
|
862
2521
|
});
|
863
|
-
const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
864
|
-
|
865
|
-
|
2522
|
+
const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2523
|
+
url: "/db/{dbBranchName}/migrations/status",
|
2524
|
+
method: "get",
|
2525
|
+
...variables,
|
2526
|
+
signal
|
2527
|
+
});
|
2528
|
+
const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2529
|
+
url: "/db/{dbBranchName}/migrations/jobs/{jobId}",
|
2530
|
+
method: "get",
|
2531
|
+
...variables,
|
2532
|
+
signal
|
2533
|
+
});
|
2534
|
+
const getMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2535
|
+
url: "/db/{dbBranchName}/migrations/history",
|
2536
|
+
method: "get",
|
2537
|
+
...variables,
|
2538
|
+
signal
|
2539
|
+
});
|
866
2540
|
const getBranchList = (variables, signal) => dataPlaneFetch({
|
867
2541
|
url: "/dbs/{dbName}",
|
868
2542
|
method: "get",
|
@@ -882,75 +2556,160 @@ const getBranchDetails = (variables, signal) => dataPlaneFetch({
|
|
882
2556
|
...variables,
|
883
2557
|
signal
|
884
2558
|
});
|
885
|
-
const createBranch = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}", method: "put", ...variables, signal });
|
886
|
-
const deleteBranch = (variables, signal) => dataPlaneFetch({
|
887
|
-
url: "/db/{dbBranchName}",
|
888
|
-
method: "delete",
|
2559
|
+
const createBranch = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}", method: "put", ...variables, signal });
|
2560
|
+
const deleteBranch = (variables, signal) => dataPlaneFetch({
|
2561
|
+
url: "/db/{dbBranchName}",
|
2562
|
+
method: "delete",
|
2563
|
+
...variables,
|
2564
|
+
signal
|
2565
|
+
});
|
2566
|
+
const getSchema = (variables, signal) => dataPlaneFetch({
|
2567
|
+
url: "/db/{dbBranchName}/schema",
|
2568
|
+
method: "get",
|
2569
|
+
...variables,
|
2570
|
+
signal
|
2571
|
+
});
|
2572
|
+
const copyBranch = (variables, signal) => dataPlaneFetch({
|
2573
|
+
url: "/db/{dbBranchName}/copy",
|
2574
|
+
method: "post",
|
2575
|
+
...variables,
|
2576
|
+
signal
|
2577
|
+
});
|
2578
|
+
const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2579
|
+
url: "/db/{dbBranchName}/metadata",
|
2580
|
+
method: "put",
|
2581
|
+
...variables,
|
2582
|
+
signal
|
2583
|
+
});
|
2584
|
+
const getBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2585
|
+
url: "/db/{dbBranchName}/metadata",
|
2586
|
+
method: "get",
|
2587
|
+
...variables,
|
2588
|
+
signal
|
2589
|
+
});
|
2590
|
+
const getBranchStats = (variables, signal) => dataPlaneFetch({
|
2591
|
+
url: "/db/{dbBranchName}/stats",
|
2592
|
+
method: "get",
|
2593
|
+
...variables,
|
2594
|
+
signal
|
2595
|
+
});
|
2596
|
+
const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
|
2597
|
+
const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
|
2598
|
+
const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({
|
2599
|
+
url: "/dbs/{dbName}/gitBranches",
|
2600
|
+
method: "delete",
|
2601
|
+
...variables,
|
2602
|
+
signal
|
2603
|
+
});
|
2604
|
+
const resolveBranch = (variables, signal) => dataPlaneFetch({
|
2605
|
+
url: "/dbs/{dbName}/resolveBranch",
|
2606
|
+
method: "get",
|
2607
|
+
...variables,
|
2608
|
+
signal
|
2609
|
+
});
|
2610
|
+
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2611
|
+
url: "/db/{dbBranchName}/migrations",
|
2612
|
+
method: "get",
|
2613
|
+
...variables,
|
2614
|
+
signal
|
2615
|
+
});
|
2616
|
+
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2617
|
+
url: "/db/{dbBranchName}/migrations/plan",
|
2618
|
+
method: "post",
|
2619
|
+
...variables,
|
2620
|
+
signal
|
2621
|
+
});
|
2622
|
+
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2623
|
+
url: "/db/{dbBranchName}/migrations/execute",
|
2624
|
+
method: "post",
|
2625
|
+
...variables,
|
2626
|
+
signal
|
2627
|
+
});
|
2628
|
+
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({
|
2629
|
+
url: "/dbs/{dbName}/migrations/query",
|
2630
|
+
method: "post",
|
2631
|
+
...variables,
|
2632
|
+
signal
|
2633
|
+
});
|
2634
|
+
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
2635
|
+
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2636
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2637
|
+
method: "get",
|
2638
|
+
...variables,
|
2639
|
+
signal
|
2640
|
+
});
|
2641
|
+
const updateMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2642
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2643
|
+
method: "patch",
|
2644
|
+
...variables,
|
2645
|
+
signal
|
2646
|
+
});
|
2647
|
+
const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({
|
2648
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/commits",
|
2649
|
+
method: "post",
|
2650
|
+
...variables,
|
2651
|
+
signal
|
2652
|
+
});
|
2653
|
+
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2654
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/compare",
|
2655
|
+
method: "post",
|
2656
|
+
...variables,
|
2657
|
+
signal
|
2658
|
+
});
|
2659
|
+
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({
|
2660
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2661
|
+
method: "get",
|
2662
|
+
...variables,
|
2663
|
+
signal
|
2664
|
+
});
|
2665
|
+
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2666
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2667
|
+
method: "post",
|
889
2668
|
...variables,
|
890
2669
|
signal
|
891
2670
|
});
|
892
|
-
const
|
893
|
-
url: "/db/{dbBranchName}/schema",
|
894
|
-
method: "
|
2671
|
+
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({
|
2672
|
+
url: "/db/{dbBranchName}/schema/history",
|
2673
|
+
method: "post",
|
895
2674
|
...variables,
|
896
2675
|
signal
|
897
2676
|
});
|
898
|
-
const
|
899
|
-
url: "/db/{dbBranchName}/
|
2677
|
+
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({
|
2678
|
+
url: "/db/{dbBranchName}/schema/compare",
|
900
2679
|
method: "post",
|
901
2680
|
...variables,
|
902
2681
|
signal
|
903
2682
|
});
|
904
|
-
const
|
905
|
-
url: "/db/{dbBranchName}/
|
906
|
-
method: "
|
2683
|
+
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({
|
2684
|
+
url: "/db/{dbBranchName}/schema/compare/{branchName}",
|
2685
|
+
method: "post",
|
907
2686
|
...variables,
|
908
2687
|
signal
|
909
2688
|
});
|
910
|
-
const
|
911
|
-
url: "/db/{dbBranchName}/
|
912
|
-
method: "
|
2689
|
+
const updateBranchSchema = (variables, signal) => dataPlaneFetch({
|
2690
|
+
url: "/db/{dbBranchName}/schema/update",
|
2691
|
+
method: "post",
|
913
2692
|
...variables,
|
914
2693
|
signal
|
915
2694
|
});
|
916
|
-
const
|
917
|
-
url: "/db/{dbBranchName}/
|
918
|
-
method: "
|
2695
|
+
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2696
|
+
url: "/db/{dbBranchName}/schema/preview",
|
2697
|
+
method: "post",
|
919
2698
|
...variables,
|
920
2699
|
signal
|
921
2700
|
});
|
922
|
-
const
|
923
|
-
|
924
|
-
|
925
|
-
const resolveBranch = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/resolveBranch", method: "get", ...variables, signal });
|
926
|
-
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations", method: "get", ...variables, signal });
|
927
|
-
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/plan", method: "post", ...variables, signal });
|
928
|
-
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/execute", method: "post", ...variables, signal });
|
929
|
-
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/query", method: "post", ...variables, signal });
|
930
|
-
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
931
|
-
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
932
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
933
|
-
method: "get",
|
2701
|
+
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2702
|
+
url: "/db/{dbBranchName}/schema/apply",
|
2703
|
+
method: "post",
|
934
2704
|
...variables,
|
935
2705
|
signal
|
936
2706
|
});
|
937
|
-
const
|
938
|
-
|
939
|
-
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/compare", method: "post", ...variables, signal });
|
940
|
-
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/merge", method: "get", ...variables, signal });
|
941
|
-
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
942
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2707
|
+
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({
|
2708
|
+
url: "/db/{dbBranchName}/schema/push",
|
943
2709
|
method: "post",
|
944
2710
|
...variables,
|
945
2711
|
signal
|
946
2712
|
});
|
947
|
-
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/history", method: "post", ...variables, signal });
|
948
|
-
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare", method: "post", ...variables, signal });
|
949
|
-
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare/{branchName}", method: "post", ...variables, signal });
|
950
|
-
const updateBranchSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/update", method: "post", ...variables, signal });
|
951
|
-
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/preview", method: "post", ...variables, signal });
|
952
|
-
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/apply", method: "post", ...variables, signal });
|
953
|
-
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/push", method: "post", ...variables, signal });
|
954
2713
|
const createTable = (variables, signal) => dataPlaneFetch({
|
955
2714
|
url: "/db/{dbBranchName}/tables/{tableName}",
|
956
2715
|
method: "put",
|
@@ -963,14 +2722,24 @@ const deleteTable = (variables, signal) => dataPlaneFetch({
|
|
963
2722
|
...variables,
|
964
2723
|
signal
|
965
2724
|
});
|
966
|
-
const updateTable = (variables, signal) => dataPlaneFetch({
|
2725
|
+
const updateTable = (variables, signal) => dataPlaneFetch({
|
2726
|
+
url: "/db/{dbBranchName}/tables/{tableName}",
|
2727
|
+
method: "patch",
|
2728
|
+
...variables,
|
2729
|
+
signal
|
2730
|
+
});
|
967
2731
|
const getTableSchema = (variables, signal) => dataPlaneFetch({
|
968
2732
|
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
969
2733
|
method: "get",
|
970
2734
|
...variables,
|
971
2735
|
signal
|
972
2736
|
});
|
973
|
-
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2737
|
+
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2738
|
+
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
2739
|
+
method: "put",
|
2740
|
+
...variables,
|
2741
|
+
signal
|
2742
|
+
});
|
974
2743
|
const getTableColumns = (variables, signal) => dataPlaneFetch({
|
975
2744
|
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
976
2745
|
method: "get",
|
@@ -978,7 +2747,12 @@ const getTableColumns = (variables, signal) => dataPlaneFetch({
|
|
978
2747
|
signal
|
979
2748
|
});
|
980
2749
|
const addTableColumn = (variables, signal) => dataPlaneFetch(
|
981
|
-
{
|
2750
|
+
{
|
2751
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
2752
|
+
method: "post",
|
2753
|
+
...variables,
|
2754
|
+
signal
|
2755
|
+
}
|
982
2756
|
);
|
983
2757
|
const getColumn = (variables, signal) => dataPlaneFetch({
|
984
2758
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
@@ -986,15 +2760,30 @@ const getColumn = (variables, signal) => dataPlaneFetch({
|
|
986
2760
|
...variables,
|
987
2761
|
signal
|
988
2762
|
});
|
989
|
-
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2763
|
+
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2764
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
2765
|
+
method: "patch",
|
2766
|
+
...variables,
|
2767
|
+
signal
|
2768
|
+
});
|
990
2769
|
const deleteColumn = (variables, signal) => dataPlaneFetch({
|
991
2770
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
992
2771
|
method: "delete",
|
993
2772
|
...variables,
|
994
2773
|
signal
|
995
2774
|
});
|
996
|
-
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
997
|
-
|
2775
|
+
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
2776
|
+
url: "/db/{dbBranchName}/transaction",
|
2777
|
+
method: "post",
|
2778
|
+
...variables,
|
2779
|
+
signal
|
2780
|
+
});
|
2781
|
+
const insertRecord = (variables, signal) => dataPlaneFetch({
|
2782
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data",
|
2783
|
+
method: "post",
|
2784
|
+
...variables,
|
2785
|
+
signal
|
2786
|
+
});
|
998
2787
|
const getFileItem = (variables, signal) => dataPlaneFetch({
|
999
2788
|
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
|
1000
2789
|
method: "get",
|
@@ -1037,11 +2826,36 @@ const getRecord = (variables, signal) => dataPlaneFetch({
|
|
1037
2826
|
...variables,
|
1038
2827
|
signal
|
1039
2828
|
});
|
1040
|
-
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
1041
|
-
|
1042
|
-
|
1043
|
-
|
1044
|
-
|
2829
|
+
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2830
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2831
|
+
method: "put",
|
2832
|
+
...variables,
|
2833
|
+
signal
|
2834
|
+
});
|
2835
|
+
const updateRecordWithID = (variables, signal) => dataPlaneFetch({
|
2836
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2837
|
+
method: "patch",
|
2838
|
+
...variables,
|
2839
|
+
signal
|
2840
|
+
});
|
2841
|
+
const upsertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2842
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2843
|
+
method: "post",
|
2844
|
+
...variables,
|
2845
|
+
signal
|
2846
|
+
});
|
2847
|
+
const deleteRecord = (variables, signal) => dataPlaneFetch({
|
2848
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2849
|
+
method: "delete",
|
2850
|
+
...variables,
|
2851
|
+
signal
|
2852
|
+
});
|
2853
|
+
const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({
|
2854
|
+
url: "/db/{dbBranchName}/tables/{tableName}/bulk",
|
2855
|
+
method: "post",
|
2856
|
+
...variables,
|
2857
|
+
signal
|
2858
|
+
});
|
1045
2859
|
const queryTable = (variables, signal) => dataPlaneFetch({
|
1046
2860
|
url: "/db/{dbBranchName}/tables/{tableName}/query",
|
1047
2861
|
method: "post",
|
@@ -1060,16 +2874,36 @@ const searchTable = (variables, signal) => dataPlaneFetch({
|
|
1060
2874
|
...variables,
|
1061
2875
|
signal
|
1062
2876
|
});
|
1063
|
-
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
2877
|
+
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
2878
|
+
url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch",
|
2879
|
+
method: "post",
|
2880
|
+
...variables,
|
2881
|
+
signal
|
2882
|
+
});
|
1064
2883
|
const askTable = (variables, signal) => dataPlaneFetch({
|
1065
2884
|
url: "/db/{dbBranchName}/tables/{tableName}/ask",
|
1066
2885
|
method: "post",
|
1067
2886
|
...variables,
|
1068
2887
|
signal
|
1069
2888
|
});
|
1070
|
-
const askTableSession = (variables, signal) => dataPlaneFetch({
|
1071
|
-
|
1072
|
-
|
2889
|
+
const askTableSession = (variables, signal) => dataPlaneFetch({
|
2890
|
+
url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}",
|
2891
|
+
method: "post",
|
2892
|
+
...variables,
|
2893
|
+
signal
|
2894
|
+
});
|
2895
|
+
const summarizeTable = (variables, signal) => dataPlaneFetch({
|
2896
|
+
url: "/db/{dbBranchName}/tables/{tableName}/summarize",
|
2897
|
+
method: "post",
|
2898
|
+
...variables,
|
2899
|
+
signal
|
2900
|
+
});
|
2901
|
+
const aggregateTable = (variables, signal) => dataPlaneFetch({
|
2902
|
+
url: "/db/{dbBranchName}/tables/{tableName}/aggregate",
|
2903
|
+
method: "post",
|
2904
|
+
...variables,
|
2905
|
+
signal
|
2906
|
+
});
|
1073
2907
|
const fileAccess = (variables, signal) => dataPlaneFetch({
|
1074
2908
|
url: "/file/{fileId}",
|
1075
2909
|
method: "get",
|
@@ -1088,9 +2922,18 @@ const sqlQuery = (variables, signal) => dataPlaneFetch({
|
|
1088
2922
|
...variables,
|
1089
2923
|
signal
|
1090
2924
|
});
|
2925
|
+
const sqlBatchQuery = (variables, signal) => dataPlaneFetch({
|
2926
|
+
url: "/db/{dbBranchName}/sql/batch",
|
2927
|
+
method: "post",
|
2928
|
+
...variables,
|
2929
|
+
signal
|
2930
|
+
});
|
1091
2931
|
const operationsByTag$2 = {
|
1092
2932
|
migrations: {
|
1093
2933
|
applyMigration,
|
2934
|
+
startMigration,
|
2935
|
+
completeMigration,
|
2936
|
+
rollbackMigration,
|
1094
2937
|
adaptTable,
|
1095
2938
|
adaptAllTables,
|
1096
2939
|
getBranchMigrationJobStatus,
|
@@ -1155,7 +2998,16 @@ const operationsByTag$2 = {
|
|
1155
2998
|
deleteRecord,
|
1156
2999
|
bulkInsertTableRecords
|
1157
3000
|
},
|
1158
|
-
files: {
|
3001
|
+
files: {
|
3002
|
+
getFileItem,
|
3003
|
+
putFileItem,
|
3004
|
+
deleteFileItem,
|
3005
|
+
getFile,
|
3006
|
+
putFile,
|
3007
|
+
deleteFile,
|
3008
|
+
fileAccess,
|
3009
|
+
fileUpload
|
3010
|
+
},
|
1159
3011
|
searchAndFilter: {
|
1160
3012
|
queryTable,
|
1161
3013
|
searchBranch,
|
@@ -1166,7 +3018,7 @@ const operationsByTag$2 = {
|
|
1166
3018
|
summarizeTable,
|
1167
3019
|
aggregateTable
|
1168
3020
|
},
|
1169
|
-
sql: { sqlQuery }
|
3021
|
+
sql: { sqlQuery, sqlBatchQuery }
|
1170
3022
|
};
|
1171
3023
|
|
1172
3024
|
const controlPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "controlPlane" });
|
@@ -1233,7 +3085,12 @@ const deleteOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
|
1233
3085
|
...variables,
|
1234
3086
|
signal
|
1235
3087
|
});
|
1236
|
-
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3088
|
+
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3089
|
+
url: "/user/oauth/tokens/{token}",
|
3090
|
+
method: "patch",
|
3091
|
+
...variables,
|
3092
|
+
signal
|
3093
|
+
});
|
1237
3094
|
const getWorkspacesList = (variables, signal) => controlPlaneFetch({
|
1238
3095
|
url: "/workspaces",
|
1239
3096
|
method: "get",
|
@@ -1264,49 +3121,150 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
|
|
1264
3121
|
...variables,
|
1265
3122
|
signal
|
1266
3123
|
});
|
1267
|
-
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
1268
|
-
|
1269
|
-
|
1270
|
-
|
3124
|
+
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3125
|
+
url: "/workspaces/{workspaceId}/settings",
|
3126
|
+
method: "get",
|
3127
|
+
...variables,
|
3128
|
+
signal
|
3129
|
+
});
|
3130
|
+
const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3131
|
+
url: "/workspaces/{workspaceId}/settings",
|
3132
|
+
method: "patch",
|
3133
|
+
...variables,
|
3134
|
+
signal
|
3135
|
+
});
|
3136
|
+
const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({
|
3137
|
+
url: "/workspaces/{workspaceId}/members",
|
3138
|
+
method: "get",
|
3139
|
+
...variables,
|
3140
|
+
signal
|
3141
|
+
});
|
3142
|
+
const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({
|
3143
|
+
url: "/workspaces/{workspaceId}/members/{userId}",
|
3144
|
+
method: "put",
|
3145
|
+
...variables,
|
3146
|
+
signal
|
3147
|
+
});
|
1271
3148
|
const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1272
3149
|
url: "/workspaces/{workspaceId}/members/{userId}",
|
1273
3150
|
method: "delete",
|
1274
3151
|
...variables,
|
1275
3152
|
signal
|
1276
3153
|
});
|
1277
|
-
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1278
|
-
|
1279
|
-
|
1280
|
-
|
1281
|
-
|
1282
|
-
|
1283
|
-
const
|
3154
|
+
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
3155
|
+
url: "/workspaces/{workspaceId}/invites",
|
3156
|
+
method: "post",
|
3157
|
+
...variables,
|
3158
|
+
signal
|
3159
|
+
});
|
3160
|
+
const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3161
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3162
|
+
method: "patch",
|
3163
|
+
...variables,
|
3164
|
+
signal
|
3165
|
+
});
|
3166
|
+
const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3167
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3168
|
+
method: "delete",
|
3169
|
+
...variables,
|
3170
|
+
signal
|
3171
|
+
});
|
3172
|
+
const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3173
|
+
url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept",
|
3174
|
+
method: "post",
|
3175
|
+
...variables,
|
3176
|
+
signal
|
3177
|
+
});
|
3178
|
+
const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3179
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}/resend",
|
3180
|
+
method: "post",
|
3181
|
+
...variables,
|
3182
|
+
signal
|
3183
|
+
});
|
3184
|
+
const listClusters = (variables, signal) => controlPlaneFetch({
|
3185
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3186
|
+
method: "get",
|
3187
|
+
...variables,
|
3188
|
+
signal
|
3189
|
+
});
|
3190
|
+
const createCluster = (variables, signal) => controlPlaneFetch({
|
3191
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3192
|
+
method: "post",
|
3193
|
+
...variables,
|
3194
|
+
signal
|
3195
|
+
});
|
1284
3196
|
const getCluster = (variables, signal) => controlPlaneFetch({
|
1285
3197
|
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
1286
3198
|
method: "get",
|
1287
3199
|
...variables,
|
1288
3200
|
signal
|
1289
3201
|
});
|
1290
|
-
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3202
|
+
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3203
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3204
|
+
method: "patch",
|
3205
|
+
...variables,
|
3206
|
+
signal
|
3207
|
+
});
|
3208
|
+
const deleteCluster = (variables, signal) => controlPlaneFetch({
|
3209
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3210
|
+
method: "delete",
|
3211
|
+
...variables,
|
3212
|
+
signal
|
3213
|
+
});
|
1291
3214
|
const getDatabaseList = (variables, signal) => controlPlaneFetch({
|
1292
3215
|
url: "/workspaces/{workspaceId}/dbs",
|
1293
3216
|
method: "get",
|
1294
3217
|
...variables,
|
1295
3218
|
signal
|
1296
3219
|
});
|
1297
|
-
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3220
|
+
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3221
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3222
|
+
method: "put",
|
3223
|
+
...variables,
|
3224
|
+
signal
|
3225
|
+
});
|
1298
3226
|
const deleteDatabase = (variables, signal) => controlPlaneFetch({
|
1299
3227
|
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
1300
3228
|
method: "delete",
|
1301
3229
|
...variables,
|
1302
3230
|
signal
|
1303
3231
|
});
|
1304
|
-
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
1305
|
-
|
1306
|
-
|
1307
|
-
|
1308
|
-
|
1309
|
-
|
3232
|
+
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3233
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3234
|
+
method: "get",
|
3235
|
+
...variables,
|
3236
|
+
signal
|
3237
|
+
});
|
3238
|
+
const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3239
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3240
|
+
method: "patch",
|
3241
|
+
...variables,
|
3242
|
+
signal
|
3243
|
+
});
|
3244
|
+
const renameDatabase = (variables, signal) => controlPlaneFetch({
|
3245
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/rename",
|
3246
|
+
method: "post",
|
3247
|
+
...variables,
|
3248
|
+
signal
|
3249
|
+
});
|
3250
|
+
const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3251
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3252
|
+
method: "get",
|
3253
|
+
...variables,
|
3254
|
+
signal
|
3255
|
+
});
|
3256
|
+
const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3257
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3258
|
+
method: "put",
|
3259
|
+
...variables,
|
3260
|
+
signal
|
3261
|
+
});
|
3262
|
+
const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3263
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3264
|
+
method: "delete",
|
3265
|
+
...variables,
|
3266
|
+
signal
|
3267
|
+
});
|
1310
3268
|
const listRegions = (variables, signal) => controlPlaneFetch({
|
1311
3269
|
url: "/workspaces/{workspaceId}/regions",
|
1312
3270
|
method: "get",
|
@@ -1344,7 +3302,13 @@ const operationsByTag$1 = {
|
|
1344
3302
|
acceptWorkspaceMemberInvite,
|
1345
3303
|
resendWorkspaceMemberInvite
|
1346
3304
|
},
|
1347
|
-
xbcontrolOther: {
|
3305
|
+
xbcontrolOther: {
|
3306
|
+
listClusters,
|
3307
|
+
createCluster,
|
3308
|
+
getCluster,
|
3309
|
+
updateCluster,
|
3310
|
+
deleteCluster
|
3311
|
+
},
|
1348
3312
|
databases: {
|
1349
3313
|
getDatabaseList,
|
1350
3314
|
createDatabase,
|
@@ -1364,7 +3328,7 @@ const operationsByTag = deepMerge(operationsByTag$2, operationsByTag$1);
|
|
1364
3328
|
const buildApiClient = () => class {
|
1365
3329
|
constructor(options = {}) {
|
1366
3330
|
const provider = options.host ?? "production";
|
1367
|
-
const apiKey = options.apiKey
|
3331
|
+
const apiKey = options.apiKey;
|
1368
3332
|
const trace = options.trace ?? defaultTrace;
|
1369
3333
|
const clientID = generateUUID();
|
1370
3334
|
if (!apiKey) {
|
@@ -1431,8 +3395,7 @@ function buildTransformString(transformations) {
|
|
1431
3395
|
).join(",");
|
1432
3396
|
}
|
1433
3397
|
function transformImage(url, ...transformations) {
|
1434
|
-
if (!isDefined(url))
|
1435
|
-
return void 0;
|
3398
|
+
if (!isDefined(url)) return void 0;
|
1436
3399
|
const newTransformations = buildTransformString(transformations);
|
1437
3400
|
const { hostname, pathname, search } = new URL(url);
|
1438
3401
|
const pathParts = pathname.split("/");
|
@@ -1545,8 +3508,7 @@ class XataFile {
|
|
1545
3508
|
}
|
1546
3509
|
}
|
1547
3510
|
const parseInputFileEntry = async (entry) => {
|
1548
|
-
if (!isDefined(entry))
|
1549
|
-
return null;
|
3511
|
+
if (!isDefined(entry)) return null;
|
1550
3512
|
const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout, uploadUrlTimeout } = await entry;
|
1551
3513
|
return compactObject({
|
1552
3514
|
id,
|
@@ -1561,24 +3523,19 @@ const parseInputFileEntry = async (entry) => {
|
|
1561
3523
|
};
|
1562
3524
|
|
1563
3525
|
function cleanFilter(filter) {
|
1564
|
-
if (!isDefined(filter))
|
1565
|
-
|
1566
|
-
if (!isObject(filter))
|
1567
|
-
return filter;
|
3526
|
+
if (!isDefined(filter)) return void 0;
|
3527
|
+
if (!isObject(filter)) return filter;
|
1568
3528
|
const values = Object.fromEntries(
|
1569
3529
|
Object.entries(filter).reduce((acc, [key, value]) => {
|
1570
|
-
if (!isDefined(value))
|
1571
|
-
return acc;
|
3530
|
+
if (!isDefined(value)) return acc;
|
1572
3531
|
if (Array.isArray(value)) {
|
1573
3532
|
const clean = value.map((item) => cleanFilter(item)).filter((item) => isDefined(item));
|
1574
|
-
if (clean.length === 0)
|
1575
|
-
return acc;
|
3533
|
+
if (clean.length === 0) return acc;
|
1576
3534
|
return [...acc, [key, clean]];
|
1577
3535
|
}
|
1578
3536
|
if (isObject(value)) {
|
1579
3537
|
const clean = cleanFilter(value);
|
1580
|
-
if (!isDefined(clean))
|
1581
|
-
return acc;
|
3538
|
+
if (!isDefined(clean)) return acc;
|
1582
3539
|
return [...acc, [key, clean]];
|
1583
3540
|
}
|
1584
3541
|
return [...acc, [key, value]];
|
@@ -1588,10 +3545,8 @@ function cleanFilter(filter) {
|
|
1588
3545
|
}
|
1589
3546
|
|
1590
3547
|
function stringifyJson(value) {
|
1591
|
-
if (!isDefined(value))
|
1592
|
-
|
1593
|
-
if (isString(value))
|
1594
|
-
return value;
|
3548
|
+
if (!isDefined(value)) return value;
|
3549
|
+
if (isString(value)) return value;
|
1595
3550
|
try {
|
1596
3551
|
return JSON.stringify(value);
|
1597
3552
|
} catch (e) {
|
@@ -1606,28 +3561,17 @@ function parseJson(value) {
|
|
1606
3561
|
}
|
1607
3562
|
}
|
1608
3563
|
|
1609
|
-
var
|
1610
|
-
|
1611
|
-
throw TypeError("Cannot " + msg);
|
1612
|
-
};
|
1613
|
-
var __privateGet$4 = (obj, member, getter) => {
|
1614
|
-
__accessCheck$5(obj, member, "read from private field");
|
1615
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1616
|
-
};
|
1617
|
-
var __privateAdd$5 = (obj, member, value) => {
|
1618
|
-
if (member.has(obj))
|
1619
|
-
throw TypeError("Cannot add the same private member more than once");
|
1620
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1621
|
-
};
|
1622
|
-
var __privateSet$3 = (obj, member, value, setter) => {
|
1623
|
-
__accessCheck$5(obj, member, "write to private field");
|
1624
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1625
|
-
return value;
|
3564
|
+
var __typeError$5 = (msg) => {
|
3565
|
+
throw TypeError(msg);
|
1626
3566
|
};
|
3567
|
+
var __accessCheck$5 = (obj, member, msg) => member.has(obj) || __typeError$5("Cannot " + msg);
|
3568
|
+
var __privateGet$4 = (obj, member, getter) => (__accessCheck$5(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3569
|
+
var __privateAdd$5 = (obj, member, value) => member.has(obj) ? __typeError$5("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3570
|
+
var __privateSet$3 = (obj, member, value, setter) => (__accessCheck$5(obj, member, "write to private field"), member.set(obj, value), value);
|
1627
3571
|
var _query, _page;
|
1628
3572
|
class Page {
|
1629
3573
|
constructor(query, meta, records = []) {
|
1630
|
-
__privateAdd$5(this, _query
|
3574
|
+
__privateAdd$5(this, _query);
|
1631
3575
|
__privateSet$3(this, _query, query);
|
1632
3576
|
this.meta = meta;
|
1633
3577
|
this.records = new PageRecordArray(this, records);
|
@@ -1714,7 +3658,7 @@ class RecordArray extends Array {
|
|
1714
3658
|
const _PageRecordArray = class _PageRecordArray extends Array {
|
1715
3659
|
constructor(...args) {
|
1716
3660
|
super(..._PageRecordArray.parseConstructorParams(...args));
|
1717
|
-
__privateAdd$5(this, _page
|
3661
|
+
__privateAdd$5(this, _page);
|
1718
3662
|
__privateSet$3(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
|
1719
3663
|
}
|
1720
3664
|
static parseConstructorParams(...args) {
|
@@ -1785,34 +3729,20 @@ const _PageRecordArray = class _PageRecordArray extends Array {
|
|
1785
3729
|
_page = new WeakMap();
|
1786
3730
|
let PageRecordArray = _PageRecordArray;
|
1787
3731
|
|
1788
|
-
var
|
1789
|
-
|
1790
|
-
throw TypeError("Cannot " + msg);
|
1791
|
-
};
|
1792
|
-
var __privateGet$3 = (obj, member, getter) => {
|
1793
|
-
__accessCheck$4(obj, member, "read from private field");
|
1794
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1795
|
-
};
|
1796
|
-
var __privateAdd$4 = (obj, member, value) => {
|
1797
|
-
if (member.has(obj))
|
1798
|
-
throw TypeError("Cannot add the same private member more than once");
|
1799
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1800
|
-
};
|
1801
|
-
var __privateSet$2 = (obj, member, value, setter) => {
|
1802
|
-
__accessCheck$4(obj, member, "write to private field");
|
1803
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1804
|
-
return value;
|
1805
|
-
};
|
1806
|
-
var __privateMethod$3 = (obj, member, method) => {
|
1807
|
-
__accessCheck$4(obj, member, "access private method");
|
1808
|
-
return method;
|
3732
|
+
var __typeError$4 = (msg) => {
|
3733
|
+
throw TypeError(msg);
|
1809
3734
|
};
|
1810
|
-
var
|
3735
|
+
var __accessCheck$4 = (obj, member, msg) => member.has(obj) || __typeError$4("Cannot " + msg);
|
3736
|
+
var __privateGet$3 = (obj, member, getter) => (__accessCheck$4(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3737
|
+
var __privateAdd$4 = (obj, member, value) => member.has(obj) ? __typeError$4("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3738
|
+
var __privateSet$2 = (obj, member, value, setter) => (__accessCheck$4(obj, member, "write to private field"), member.set(obj, value), value);
|
3739
|
+
var __privateMethod$3 = (obj, member, method) => (__accessCheck$4(obj, member, "access private method"), method);
|
3740
|
+
var _table$1, _repository, _data, _Query_instances, cleanFilterConstraint_fn;
|
1811
3741
|
const _Query = class _Query {
|
1812
3742
|
constructor(repository, table, data, rawParent) {
|
1813
|
-
__privateAdd$4(this,
|
1814
|
-
__privateAdd$4(this, _table$1
|
1815
|
-
__privateAdd$4(this, _repository
|
3743
|
+
__privateAdd$4(this, _Query_instances);
|
3744
|
+
__privateAdd$4(this, _table$1);
|
3745
|
+
__privateAdd$4(this, _repository);
|
1816
3746
|
__privateAdd$4(this, _data, { filter: {} });
|
1817
3747
|
// Implements pagination
|
1818
3748
|
this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
|
@@ -1890,12 +3820,12 @@ const _Query = class _Query {
|
|
1890
3820
|
filter(a, b) {
|
1891
3821
|
if (arguments.length === 1) {
|
1892
3822
|
const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
|
1893
|
-
[column]: __privateMethod$3(this,
|
3823
|
+
[column]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, column, constraint)
|
1894
3824
|
}));
|
1895
3825
|
const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
|
1896
3826
|
return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
|
1897
3827
|
} else {
|
1898
|
-
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this,
|
3828
|
+
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
|
1899
3829
|
const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
|
1900
3830
|
return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
|
1901
3831
|
}
|
@@ -1974,8 +3904,7 @@ const _Query = class _Query {
|
|
1974
3904
|
}
|
1975
3905
|
async getFirstOrThrow(options = {}) {
|
1976
3906
|
const records = await this.getMany({ ...options, pagination: { size: 1 } });
|
1977
|
-
if (records[0] === void 0)
|
1978
|
-
throw new Error("No results found.");
|
3907
|
+
if (records[0] === void 0) throw new Error("No results found.");
|
1979
3908
|
return records[0];
|
1980
3909
|
}
|
1981
3910
|
async summarize(params = {}) {
|
@@ -2030,7 +3959,7 @@ const _Query = class _Query {
|
|
2030
3959
|
_table$1 = new WeakMap();
|
2031
3960
|
_repository = new WeakMap();
|
2032
3961
|
_data = new WeakMap();
|
2033
|
-
|
3962
|
+
_Query_instances = new WeakSet();
|
2034
3963
|
cleanFilterConstraint_fn = function(column, value) {
|
2035
3964
|
const columnType = __privateGet$3(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
|
2036
3965
|
if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
|
@@ -2091,8 +4020,7 @@ function isSortFilterString(value) {
|
|
2091
4020
|
}
|
2092
4021
|
function isSortFilterBase(filter) {
|
2093
4022
|
return isObject(filter) && Object.entries(filter).every(([key, value]) => {
|
2094
|
-
if (key === "*")
|
2095
|
-
return value === "random";
|
4023
|
+
if (key === "*") return value === "random";
|
2096
4024
|
return value === "asc" || value === "desc";
|
2097
4025
|
});
|
2098
4026
|
}
|
@@ -2113,29 +4041,15 @@ function buildSortFilter(filter) {
|
|
2113
4041
|
}
|
2114
4042
|
}
|
2115
4043
|
|
2116
|
-
var
|
2117
|
-
|
2118
|
-
throw TypeError("Cannot " + msg);
|
2119
|
-
};
|
2120
|
-
var __privateGet$2 = (obj, member, getter) => {
|
2121
|
-
__accessCheck$3(obj, member, "read from private field");
|
2122
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2123
|
-
};
|
2124
|
-
var __privateAdd$3 = (obj, member, value) => {
|
2125
|
-
if (member.has(obj))
|
2126
|
-
throw TypeError("Cannot add the same private member more than once");
|
2127
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4044
|
+
var __typeError$3 = (msg) => {
|
4045
|
+
throw TypeError(msg);
|
2128
4046
|
};
|
2129
|
-
var
|
2130
|
-
|
2131
|
-
|
2132
|
-
|
2133
|
-
|
2134
|
-
var
|
2135
|
-
__accessCheck$3(obj, member, "access private method");
|
2136
|
-
return method;
|
2137
|
-
};
|
2138
|
-
var _table, _getFetchProps, _db, _schemaTables, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn, _insertRecords, insertRecords_fn, _updateRecordWithID, updateRecordWithID_fn, _updateRecords, updateRecords_fn, _upsertRecordWithID, upsertRecordWithID_fn, _deleteRecord, deleteRecord_fn, _deleteRecords, deleteRecords_fn, _getSchemaTables, getSchemaTables_fn, _transformObjectToApi, transformObjectToApi_fn;
|
4047
|
+
var __accessCheck$3 = (obj, member, msg) => member.has(obj) || __typeError$3("Cannot " + msg);
|
4048
|
+
var __privateGet$2 = (obj, member, getter) => (__accessCheck$3(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4049
|
+
var __privateAdd$3 = (obj, member, value) => member.has(obj) ? __typeError$3("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4050
|
+
var __privateSet$1 = (obj, member, value, setter) => (__accessCheck$3(obj, member, "write to private field"), member.set(obj, value), value);
|
4051
|
+
var __privateMethod$2 = (obj, member, method) => (__accessCheck$3(obj, member, "access private method"), method);
|
4052
|
+
var _table, _getFetchProps, _db, _schemaTables, _trace, _RestRepository_instances, insertRecordWithoutId_fn, insertRecordWithId_fn, insertRecords_fn, updateRecordWithID_fn, updateRecords_fn, upsertRecordWithID_fn, deleteRecord_fn, deleteRecords_fn, getSchemaTables_fn, transformObjectToApi_fn;
|
2139
4053
|
const BULK_OPERATION_MAX_SIZE = 1e3;
|
2140
4054
|
class Repository extends Query {
|
2141
4055
|
}
|
@@ -2146,21 +4060,12 @@ class RestRepository extends Query {
|
|
2146
4060
|
{ name: options.table, schema: options.schemaTables?.find((table) => table.name === options.table) },
|
2147
4061
|
{}
|
2148
4062
|
);
|
2149
|
-
__privateAdd$3(this,
|
2150
|
-
__privateAdd$3(this,
|
2151
|
-
__privateAdd$3(this,
|
2152
|
-
__privateAdd$3(this,
|
2153
|
-
__privateAdd$3(this,
|
2154
|
-
__privateAdd$3(this,
|
2155
|
-
__privateAdd$3(this, _deleteRecord);
|
2156
|
-
__privateAdd$3(this, _deleteRecords);
|
2157
|
-
__privateAdd$3(this, _getSchemaTables);
|
2158
|
-
__privateAdd$3(this, _transformObjectToApi);
|
2159
|
-
__privateAdd$3(this, _table, void 0);
|
2160
|
-
__privateAdd$3(this, _getFetchProps, void 0);
|
2161
|
-
__privateAdd$3(this, _db, void 0);
|
2162
|
-
__privateAdd$3(this, _schemaTables, void 0);
|
2163
|
-
__privateAdd$3(this, _trace, void 0);
|
4063
|
+
__privateAdd$3(this, _RestRepository_instances);
|
4064
|
+
__privateAdd$3(this, _table);
|
4065
|
+
__privateAdd$3(this, _getFetchProps);
|
4066
|
+
__privateAdd$3(this, _db);
|
4067
|
+
__privateAdd$3(this, _schemaTables);
|
4068
|
+
__privateAdd$3(this, _trace);
|
2164
4069
|
__privateSet$1(this, _table, options.table);
|
2165
4070
|
__privateSet$1(this, _db, options.db);
|
2166
4071
|
__privateSet$1(this, _schemaTables, options.schemaTables);
|
@@ -2179,31 +4084,28 @@ class RestRepository extends Query {
|
|
2179
4084
|
return __privateGet$2(this, _trace).call(this, "create", async () => {
|
2180
4085
|
const ifVersion = parseIfVersion(b, c, d);
|
2181
4086
|
if (Array.isArray(a)) {
|
2182
|
-
if (a.length === 0)
|
2183
|
-
|
2184
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
4087
|
+
if (a.length === 0) return [];
|
4088
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
2185
4089
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2186
4090
|
const result = await this.read(ids, columns);
|
2187
4091
|
return result;
|
2188
4092
|
}
|
2189
4093
|
if (isString(a) && isObject(b)) {
|
2190
|
-
if (a === "")
|
2191
|
-
throw new Error("The id can't be empty");
|
4094
|
+
if (a === "") throw new Error("The id can't be empty");
|
2192
4095
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2193
|
-
return await __privateMethod$2(this,
|
4096
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
|
2194
4097
|
}
|
2195
4098
|
if (isObject(a) && isString(a.xata_id)) {
|
2196
|
-
if (a.xata_id === "")
|
2197
|
-
throw new Error("The id can't be empty");
|
4099
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2198
4100
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2199
|
-
return await __privateMethod$2(this,
|
4101
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
2200
4102
|
createOnly: true,
|
2201
4103
|
ifVersion
|
2202
4104
|
});
|
2203
4105
|
}
|
2204
4106
|
if (isObject(a)) {
|
2205
4107
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2206
|
-
return __privateMethod$2(this,
|
4108
|
+
return __privateMethod$2(this, _RestRepository_instances, insertRecordWithoutId_fn).call(this, a, columns);
|
2207
4109
|
}
|
2208
4110
|
throw new Error("Invalid arguments for create method");
|
2209
4111
|
});
|
@@ -2212,8 +4114,7 @@ class RestRepository extends Query {
|
|
2212
4114
|
return __privateGet$2(this, _trace).call(this, "read", async () => {
|
2213
4115
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2214
4116
|
if (Array.isArray(a)) {
|
2215
|
-
if (a.length === 0)
|
2216
|
-
return [];
|
4117
|
+
if (a.length === 0) return [];
|
2217
4118
|
const ids = a.map((item) => extractId(item));
|
2218
4119
|
const finalObjects = await this.getAll({ filter: { xata_id: { $any: compact(ids) } }, columns });
|
2219
4120
|
const dictionary = finalObjects.reduce((acc, object) => {
|
@@ -2236,7 +4137,7 @@ class RestRepository extends Query {
|
|
2236
4137
|
queryParams: { columns },
|
2237
4138
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2238
4139
|
});
|
2239
|
-
const schemaTables = await __privateMethod$2(this,
|
4140
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2240
4141
|
return initObject(
|
2241
4142
|
__privateGet$2(this, _db),
|
2242
4143
|
schemaTables,
|
@@ -2277,11 +4178,10 @@ class RestRepository extends Query {
|
|
2277
4178
|
return __privateGet$2(this, _trace).call(this, "update", async () => {
|
2278
4179
|
const ifVersion = parseIfVersion(b, c, d);
|
2279
4180
|
if (Array.isArray(a)) {
|
2280
|
-
if (a.length === 0)
|
2281
|
-
return [];
|
4181
|
+
if (a.length === 0) return [];
|
2282
4182
|
const existing = await this.read(a, ["xata_id"]);
|
2283
4183
|
const updates = a.filter((_item, index) => existing[index] !== null);
|
2284
|
-
await __privateMethod$2(this,
|
4184
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, updates, {
|
2285
4185
|
ifVersion,
|
2286
4186
|
upsert: false
|
2287
4187
|
});
|
@@ -2292,15 +4192,14 @@ class RestRepository extends Query {
|
|
2292
4192
|
try {
|
2293
4193
|
if (isString(a) && isObject(b)) {
|
2294
4194
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2295
|
-
return await __privateMethod$2(this,
|
4195
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2296
4196
|
}
|
2297
4197
|
if (isObject(a) && isString(a.xata_id)) {
|
2298
4198
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2299
|
-
return await __privateMethod$2(this,
|
4199
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2300
4200
|
}
|
2301
4201
|
} catch (error) {
|
2302
|
-
if (error.status === 422)
|
2303
|
-
return null;
|
4202
|
+
if (error.status === 422) return null;
|
2304
4203
|
throw error;
|
2305
4204
|
}
|
2306
4205
|
throw new Error("Invalid arguments for update method");
|
@@ -2329,9 +4228,8 @@ class RestRepository extends Query {
|
|
2329
4228
|
return __privateGet$2(this, _trace).call(this, "createOrUpdate", async () => {
|
2330
4229
|
const ifVersion = parseIfVersion(b, c, d);
|
2331
4230
|
if (Array.isArray(a)) {
|
2332
|
-
if (a.length === 0)
|
2333
|
-
|
2334
|
-
await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, a, {
|
4231
|
+
if (a.length === 0) return [];
|
4232
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, a, {
|
2335
4233
|
ifVersion,
|
2336
4234
|
upsert: true
|
2337
4235
|
});
|
@@ -2340,16 +4238,14 @@ class RestRepository extends Query {
|
|
2340
4238
|
return result;
|
2341
4239
|
}
|
2342
4240
|
if (isString(a) && isObject(b)) {
|
2343
|
-
if (a === "")
|
2344
|
-
throw new Error("The id can't be empty");
|
4241
|
+
if (a === "") throw new Error("The id can't be empty");
|
2345
4242
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2346
|
-
return await __privateMethod$2(this,
|
4243
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2347
4244
|
}
|
2348
4245
|
if (isObject(a) && isString(a.xata_id)) {
|
2349
|
-
if (a.xata_id === "")
|
2350
|
-
throw new Error("The id can't be empty");
|
4246
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2351
4247
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2352
|
-
return await __privateMethod$2(this,
|
4248
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2353
4249
|
}
|
2354
4250
|
if (!isDefined(a) && isObject(b)) {
|
2355
4251
|
return await this.create(b, c);
|
@@ -2364,24 +4260,21 @@ class RestRepository extends Query {
|
|
2364
4260
|
return __privateGet$2(this, _trace).call(this, "createOrReplace", async () => {
|
2365
4261
|
const ifVersion = parseIfVersion(b, c, d);
|
2366
4262
|
if (Array.isArray(a)) {
|
2367
|
-
if (a.length === 0)
|
2368
|
-
|
2369
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
4263
|
+
if (a.length === 0) return [];
|
4264
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
2370
4265
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2371
4266
|
const result = await this.read(ids, columns);
|
2372
4267
|
return result;
|
2373
4268
|
}
|
2374
4269
|
if (isString(a) && isObject(b)) {
|
2375
|
-
if (a === "")
|
2376
|
-
throw new Error("The id can't be empty");
|
4270
|
+
if (a === "") throw new Error("The id can't be empty");
|
2377
4271
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2378
|
-
return await __privateMethod$2(this,
|
4272
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
|
2379
4273
|
}
|
2380
4274
|
if (isObject(a) && isString(a.xata_id)) {
|
2381
|
-
if (a.xata_id === "")
|
2382
|
-
throw new Error("The id can't be empty");
|
4275
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2383
4276
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2384
|
-
return await __privateMethod$2(this,
|
4277
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
2385
4278
|
createOnly: false,
|
2386
4279
|
ifVersion
|
2387
4280
|
});
|
@@ -2398,25 +4291,22 @@ class RestRepository extends Query {
|
|
2398
4291
|
async delete(a, b) {
|
2399
4292
|
return __privateGet$2(this, _trace).call(this, "delete", async () => {
|
2400
4293
|
if (Array.isArray(a)) {
|
2401
|
-
if (a.length === 0)
|
2402
|
-
return [];
|
4294
|
+
if (a.length === 0) return [];
|
2403
4295
|
const ids = a.map((o) => {
|
2404
|
-
if (isString(o))
|
2405
|
-
|
2406
|
-
if (isString(o.xata_id))
|
2407
|
-
return o.xata_id;
|
4296
|
+
if (isString(o)) return o;
|
4297
|
+
if (isString(o.xata_id)) return o.xata_id;
|
2408
4298
|
throw new Error("Invalid arguments for delete method");
|
2409
4299
|
});
|
2410
4300
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2411
4301
|
const result = await this.read(a, columns);
|
2412
|
-
await __privateMethod$2(this,
|
4302
|
+
await __privateMethod$2(this, _RestRepository_instances, deleteRecords_fn).call(this, ids);
|
2413
4303
|
return result;
|
2414
4304
|
}
|
2415
4305
|
if (isString(a)) {
|
2416
|
-
return __privateMethod$2(this,
|
4306
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a, b);
|
2417
4307
|
}
|
2418
4308
|
if (isObject(a) && isString(a.xata_id)) {
|
2419
|
-
return __privateMethod$2(this,
|
4309
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a.xata_id, b);
|
2420
4310
|
}
|
2421
4311
|
throw new Error("Invalid arguments for delete method");
|
2422
4312
|
});
|
@@ -2460,7 +4350,7 @@ class RestRepository extends Query {
|
|
2460
4350
|
},
|
2461
4351
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2462
4352
|
});
|
2463
|
-
const schemaTables = await __privateMethod$2(this,
|
4353
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2464
4354
|
return {
|
2465
4355
|
records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
|
2466
4356
|
totalCount
|
@@ -2485,7 +4375,7 @@ class RestRepository extends Query {
|
|
2485
4375
|
},
|
2486
4376
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2487
4377
|
});
|
2488
|
-
const schemaTables = await __privateMethod$2(this,
|
4378
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2489
4379
|
return {
|
2490
4380
|
records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
|
2491
4381
|
totalCount
|
@@ -2527,7 +4417,7 @@ class RestRepository extends Query {
|
|
2527
4417
|
fetchOptions: data.fetchOptions,
|
2528
4418
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2529
4419
|
});
|
2530
|
-
const schemaTables = await __privateMethod$2(this,
|
4420
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2531
4421
|
const records = objects.map(
|
2532
4422
|
(record) => initObject(
|
2533
4423
|
__privateGet$2(this, _db),
|
@@ -2561,7 +4451,7 @@ class RestRepository extends Query {
|
|
2561
4451
|
},
|
2562
4452
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2563
4453
|
});
|
2564
|
-
const schemaTables = await __privateMethod$2(this,
|
4454
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2565
4455
|
return {
|
2566
4456
|
...result,
|
2567
4457
|
summaries: result.summaries.map(
|
@@ -2609,9 +4499,9 @@ _getFetchProps = new WeakMap();
|
|
2609
4499
|
_db = new WeakMap();
|
2610
4500
|
_schemaTables = new WeakMap();
|
2611
4501
|
_trace = new WeakMap();
|
2612
|
-
|
4502
|
+
_RestRepository_instances = new WeakSet();
|
2613
4503
|
insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
2614
|
-
const record = await __privateMethod$2(this,
|
4504
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2615
4505
|
const response = await insertRecord({
|
2616
4506
|
pathParams: {
|
2617
4507
|
workspace: "{workspaceId}",
|
@@ -2623,14 +4513,12 @@ insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
|
2623
4513
|
body: record,
|
2624
4514
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2625
4515
|
});
|
2626
|
-
const schemaTables = await __privateMethod$2(this,
|
4516
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2627
4517
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2628
4518
|
};
|
2629
|
-
_insertRecordWithId = new WeakSet();
|
2630
4519
|
insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
|
2631
|
-
if (!recordId)
|
2632
|
-
|
2633
|
-
const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4520
|
+
if (!recordId) return null;
|
4521
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2634
4522
|
const response = await insertRecordWithID({
|
2635
4523
|
pathParams: {
|
2636
4524
|
workspace: "{workspaceId}",
|
@@ -2643,13 +4531,12 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
|
|
2643
4531
|
queryParams: { createOnly, columns, ifVersion },
|
2644
4532
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2645
4533
|
});
|
2646
|
-
const schemaTables = await __privateMethod$2(this,
|
4534
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2647
4535
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2648
4536
|
};
|
2649
|
-
_insertRecords = new WeakSet();
|
2650
4537
|
insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
2651
4538
|
const operations = await promiseMap(objects, async (object) => {
|
2652
|
-
const record = await __privateMethod$2(this,
|
4539
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2653
4540
|
return { insert: { table: __privateGet$2(this, _table), record, createOnly, ifVersion } };
|
2654
4541
|
});
|
2655
4542
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -2674,11 +4561,9 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
|
2674
4561
|
}
|
2675
4562
|
return ids;
|
2676
4563
|
};
|
2677
|
-
_updateRecordWithID = new WeakSet();
|
2678
4564
|
updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2679
|
-
if (!recordId)
|
2680
|
-
|
2681
|
-
const { xata_id: _id, ...record } = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4565
|
+
if (!recordId) return null;
|
4566
|
+
const { xata_id: _id, ...record } = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2682
4567
|
try {
|
2683
4568
|
const response = await updateRecordWithID({
|
2684
4569
|
pathParams: {
|
@@ -2692,7 +4577,7 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2692
4577
|
body: record,
|
2693
4578
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2694
4579
|
});
|
2695
|
-
const schemaTables = await __privateMethod$2(this,
|
4580
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2696
4581
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2697
4582
|
} catch (e) {
|
2698
4583
|
if (isObject(e) && e.status === 404) {
|
@@ -2701,10 +4586,9 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2701
4586
|
throw e;
|
2702
4587
|
}
|
2703
4588
|
};
|
2704
|
-
_updateRecords = new WeakSet();
|
2705
4589
|
updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
2706
4590
|
const operations = await promiseMap(objects, async ({ xata_id, ...object }) => {
|
2707
|
-
const fields = await __privateMethod$2(this,
|
4591
|
+
const fields = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2708
4592
|
return { update: { table: __privateGet$2(this, _table), id: xata_id, ifVersion, upsert, fields } };
|
2709
4593
|
});
|
2710
4594
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -2729,10 +4613,8 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
|
2729
4613
|
}
|
2730
4614
|
return ids;
|
2731
4615
|
};
|
2732
|
-
_upsertRecordWithID = new WeakSet();
|
2733
4616
|
upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2734
|
-
if (!recordId)
|
2735
|
-
return null;
|
4617
|
+
if (!recordId) return null;
|
2736
4618
|
const response = await upsertRecordWithID({
|
2737
4619
|
pathParams: {
|
2738
4620
|
workspace: "{workspaceId}",
|
@@ -2745,13 +4627,11 @@ upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2745
4627
|
body: object,
|
2746
4628
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2747
4629
|
});
|
2748
|
-
const schemaTables = await __privateMethod$2(this,
|
4630
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2749
4631
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2750
4632
|
};
|
2751
|
-
_deleteRecord = new WeakSet();
|
2752
4633
|
deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
2753
|
-
if (!recordId)
|
2754
|
-
return null;
|
4634
|
+
if (!recordId) return null;
|
2755
4635
|
try {
|
2756
4636
|
const response = await deleteRecord({
|
2757
4637
|
pathParams: {
|
@@ -2764,7 +4644,7 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2764
4644
|
queryParams: { columns },
|
2765
4645
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2766
4646
|
});
|
2767
|
-
const schemaTables = await __privateMethod$2(this,
|
4647
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2768
4648
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2769
4649
|
} catch (e) {
|
2770
4650
|
if (isObject(e) && e.status === 404) {
|
@@ -2773,7 +4653,6 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2773
4653
|
throw e;
|
2774
4654
|
}
|
2775
4655
|
};
|
2776
|
-
_deleteRecords = new WeakSet();
|
2777
4656
|
deleteRecords_fn = async function(recordIds) {
|
2778
4657
|
const chunkedOperations = chunk(
|
2779
4658
|
compact(recordIds).map((id) => ({ delete: { table: __privateGet$2(this, _table), id } })),
|
@@ -2791,10 +4670,8 @@ deleteRecords_fn = async function(recordIds) {
|
|
2791
4670
|
});
|
2792
4671
|
}
|
2793
4672
|
};
|
2794
|
-
_getSchemaTables = new WeakSet();
|
2795
4673
|
getSchemaTables_fn = async function() {
|
2796
|
-
if (__privateGet$2(this, _schemaTables))
|
2797
|
-
return __privateGet$2(this, _schemaTables);
|
4674
|
+
if (__privateGet$2(this, _schemaTables)) return __privateGet$2(this, _schemaTables);
|
2798
4675
|
const { schema } = await getBranchDetails({
|
2799
4676
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
2800
4677
|
...__privateGet$2(this, _getFetchProps).call(this)
|
@@ -2802,16 +4679,13 @@ getSchemaTables_fn = async function() {
|
|
2802
4679
|
__privateSet$1(this, _schemaTables, schema.tables);
|
2803
4680
|
return schema.tables;
|
2804
4681
|
};
|
2805
|
-
_transformObjectToApi = new WeakSet();
|
2806
4682
|
transformObjectToApi_fn = async function(object) {
|
2807
|
-
const schemaTables = await __privateMethod$2(this,
|
4683
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2808
4684
|
const schema = schemaTables.find((table) => table.name === __privateGet$2(this, _table));
|
2809
|
-
if (!schema)
|
2810
|
-
throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
|
4685
|
+
if (!schema) throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
|
2811
4686
|
const result = {};
|
2812
4687
|
for (const [key, value] of Object.entries(object)) {
|
2813
|
-
if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key))
|
2814
|
-
continue;
|
4688
|
+
if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key)) continue;
|
2815
4689
|
const type = schema.columns.find((column) => column.name === key)?.type;
|
2816
4690
|
switch (type) {
|
2817
4691
|
case "link": {
|
@@ -2841,11 +4715,9 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2841
4715
|
const data = {};
|
2842
4716
|
Object.assign(data, { ...object });
|
2843
4717
|
const { columns } = schemaTables.find(({ name }) => name === table) ?? {};
|
2844
|
-
if (!columns)
|
2845
|
-
console.error(`Table ${table} not found in schema`);
|
4718
|
+
if (!columns) console.error(`Table ${table} not found in schema`);
|
2846
4719
|
for (const column of columns ?? []) {
|
2847
|
-
if (!isValidColumn(selectedColumns, column))
|
2848
|
-
continue;
|
4720
|
+
if (!isValidColumn(selectedColumns, column)) continue;
|
2849
4721
|
const value = data[column.name];
|
2850
4722
|
switch (column.type) {
|
2851
4723
|
case "datetime": {
|
@@ -2931,15 +4803,12 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2931
4803
|
return record;
|
2932
4804
|
};
|
2933
4805
|
function extractId(value) {
|
2934
|
-
if (isString(value))
|
2935
|
-
|
2936
|
-
if (isObject(value) && isString(value.xata_id))
|
2937
|
-
return value.xata_id;
|
4806
|
+
if (isString(value)) return value;
|
4807
|
+
if (isObject(value) && isString(value.xata_id)) return value.xata_id;
|
2938
4808
|
return void 0;
|
2939
4809
|
}
|
2940
4810
|
function isValidColumn(columns, column) {
|
2941
|
-
if (columns.includes("*"))
|
2942
|
-
return true;
|
4811
|
+
if (columns.includes("*")) return true;
|
2943
4812
|
return columns.filter((item) => isString(item) && item.startsWith(column.name)).length > 0;
|
2944
4813
|
}
|
2945
4814
|
function parseIfVersion(...args) {
|
@@ -2979,19 +4848,12 @@ const includesAll = (value) => ({ $includesAll: value });
|
|
2979
4848
|
const includesNone = (value) => ({ $includesNone: value });
|
2980
4849
|
const includesAny = (value) => ({ $includesAny: value });
|
2981
4850
|
|
2982
|
-
var
|
2983
|
-
|
2984
|
-
throw TypeError("Cannot " + msg);
|
2985
|
-
};
|
2986
|
-
var __privateGet$1 = (obj, member, getter) => {
|
2987
|
-
__accessCheck$2(obj, member, "read from private field");
|
2988
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2989
|
-
};
|
2990
|
-
var __privateAdd$2 = (obj, member, value) => {
|
2991
|
-
if (member.has(obj))
|
2992
|
-
throw TypeError("Cannot add the same private member more than once");
|
2993
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4851
|
+
var __typeError$2 = (msg) => {
|
4852
|
+
throw TypeError(msg);
|
2994
4853
|
};
|
4854
|
+
var __accessCheck$2 = (obj, member, msg) => member.has(obj) || __typeError$2("Cannot " + msg);
|
4855
|
+
var __privateGet$1 = (obj, member, getter) => (__accessCheck$2(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4856
|
+
var __privateAdd$2 = (obj, member, value) => member.has(obj) ? __typeError$2("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2995
4857
|
var _tables;
|
2996
4858
|
class SchemaPlugin extends XataPlugin {
|
2997
4859
|
constructor() {
|
@@ -3003,8 +4865,7 @@ class SchemaPlugin extends XataPlugin {
|
|
3003
4865
|
{},
|
3004
4866
|
{
|
3005
4867
|
get: (_target, table) => {
|
3006
|
-
if (!isString(table))
|
3007
|
-
throw new Error("Invalid table name");
|
4868
|
+
if (!isString(table)) throw new Error("Invalid table name");
|
3008
4869
|
if (__privateGet$1(this, _tables)[table] === void 0) {
|
3009
4870
|
__privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
3010
4871
|
}
|
@@ -3095,30 +4956,23 @@ function getContentType(file) {
|
|
3095
4956
|
return "application/octet-stream";
|
3096
4957
|
}
|
3097
4958
|
|
3098
|
-
var
|
3099
|
-
|
3100
|
-
throw TypeError("Cannot " + msg);
|
3101
|
-
};
|
3102
|
-
var __privateAdd$1 = (obj, member, value) => {
|
3103
|
-
if (member.has(obj))
|
3104
|
-
throw TypeError("Cannot add the same private member more than once");
|
3105
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4959
|
+
var __typeError$1 = (msg) => {
|
4960
|
+
throw TypeError(msg);
|
3106
4961
|
};
|
3107
|
-
var
|
3108
|
-
|
3109
|
-
|
3110
|
-
|
3111
|
-
var _search, search_fn;
|
4962
|
+
var __accessCheck$1 = (obj, member, msg) => member.has(obj) || __typeError$1("Cannot " + msg);
|
4963
|
+
var __privateAdd$1 = (obj, member, value) => member.has(obj) ? __typeError$1("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4964
|
+
var __privateMethod$1 = (obj, member, method) => (__accessCheck$1(obj, member, "access private method"), method);
|
4965
|
+
var _SearchPlugin_instances, search_fn;
|
3112
4966
|
class SearchPlugin extends XataPlugin {
|
3113
4967
|
constructor(db) {
|
3114
4968
|
super();
|
3115
4969
|
this.db = db;
|
3116
|
-
__privateAdd$1(this,
|
4970
|
+
__privateAdd$1(this, _SearchPlugin_instances);
|
3117
4971
|
}
|
3118
4972
|
build(pluginOptions) {
|
3119
4973
|
return {
|
3120
4974
|
all: async (query, options = {}) => {
|
3121
|
-
const { records, totalCount } = await __privateMethod$1(this,
|
4975
|
+
const { records, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3122
4976
|
return {
|
3123
4977
|
totalCount,
|
3124
4978
|
records: records.map((record) => {
|
@@ -3128,7 +4982,7 @@ class SearchPlugin extends XataPlugin {
|
|
3128
4982
|
};
|
3129
4983
|
},
|
3130
4984
|
byTable: async (query, options = {}) => {
|
3131
|
-
const { records: rawRecords, totalCount } = await __privateMethod$1(this,
|
4985
|
+
const { records: rawRecords, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3132
4986
|
const records = rawRecords.reduce((acc, record) => {
|
3133
4987
|
const table = record.xata_table;
|
3134
4988
|
const items = acc[table] ?? [];
|
@@ -3140,7 +4994,7 @@ class SearchPlugin extends XataPlugin {
|
|
3140
4994
|
};
|
3141
4995
|
}
|
3142
4996
|
}
|
3143
|
-
|
4997
|
+
_SearchPlugin_instances = new WeakSet();
|
3144
4998
|
search_fn = async function(query, options, pluginOptions) {
|
3145
4999
|
const { tables, fuzziness, highlight, prefix, page } = options ?? {};
|
3146
5000
|
const { records, totalCount } = await searchBranch({
|
@@ -3176,8 +5030,7 @@ function arrayString(val) {
|
|
3176
5030
|
return result;
|
3177
5031
|
}
|
3178
5032
|
function prepareValue(value) {
|
3179
|
-
if (!isDefined(value))
|
3180
|
-
return null;
|
5033
|
+
if (!isDefined(value)) return null;
|
3181
5034
|
if (value instanceof Date) {
|
3182
5035
|
return value.toISOString();
|
3183
5036
|
}
|
@@ -3256,8 +5109,7 @@ function buildDomain(host, region) {
|
|
3256
5109
|
function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
|
3257
5110
|
const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
|
3258
5111
|
const parts = parseWorkspacesUrlParts(url);
|
3259
|
-
if (!parts)
|
3260
|
-
throw new Error("Invalid workspaces URL");
|
5112
|
+
if (!parts) throw new Error("Invalid workspaces URL");
|
3261
5113
|
const { workspace: workspaceSlug, region, database, host } = parts;
|
3262
5114
|
const domain = buildDomain(host, region);
|
3263
5115
|
const workspace = workspaceSlug.split("-").pop();
|
@@ -3282,39 +5134,24 @@ class TransactionPlugin extends XataPlugin {
|
|
3282
5134
|
}
|
3283
5135
|
}
|
3284
5136
|
|
3285
|
-
var
|
3286
|
-
|
3287
|
-
throw TypeError("Cannot " + msg);
|
3288
|
-
};
|
3289
|
-
var __privateGet = (obj, member, getter) => {
|
3290
|
-
__accessCheck(obj, member, "read from private field");
|
3291
|
-
return getter ? getter.call(obj) : member.get(obj);
|
3292
|
-
};
|
3293
|
-
var __privateAdd = (obj, member, value) => {
|
3294
|
-
if (member.has(obj))
|
3295
|
-
throw TypeError("Cannot add the same private member more than once");
|
3296
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3297
|
-
};
|
3298
|
-
var __privateSet = (obj, member, value, setter) => {
|
3299
|
-
__accessCheck(obj, member, "write to private field");
|
3300
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
3301
|
-
return value;
|
3302
|
-
};
|
3303
|
-
var __privateMethod = (obj, member, method) => {
|
3304
|
-
__accessCheck(obj, member, "access private method");
|
3305
|
-
return method;
|
5137
|
+
var __typeError = (msg) => {
|
5138
|
+
throw TypeError(msg);
|
3306
5139
|
};
|
5140
|
+
var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
|
5141
|
+
var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
5142
|
+
var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5143
|
+
var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), member.set(obj, value), value);
|
5144
|
+
var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
|
3307
5145
|
const buildClient = (plugins) => {
|
3308
|
-
var _options,
|
5146
|
+
var _options, _instances, parseOptions_fn, getFetchProps_fn, _a;
|
3309
5147
|
return _a = class {
|
3310
5148
|
constructor(options = {}, tables) {
|
3311
|
-
__privateAdd(this,
|
3312
|
-
__privateAdd(this,
|
3313
|
-
|
3314
|
-
const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
|
5149
|
+
__privateAdd(this, _instances);
|
5150
|
+
__privateAdd(this, _options);
|
5151
|
+
const safeOptions = __privateMethod(this, _instances, parseOptions_fn).call(this, options);
|
3315
5152
|
__privateSet(this, _options, safeOptions);
|
3316
5153
|
const pluginOptions = {
|
3317
|
-
...__privateMethod(this,
|
5154
|
+
...__privateMethod(this, _instances, getFetchProps_fn).call(this, safeOptions),
|
3318
5155
|
host: safeOptions.host,
|
3319
5156
|
tables,
|
3320
5157
|
branch: safeOptions.branch
|
@@ -3331,8 +5168,7 @@ const buildClient = (plugins) => {
|
|
3331
5168
|
this.sql = sql;
|
3332
5169
|
this.files = files;
|
3333
5170
|
for (const [key, namespace] of Object.entries(plugins ?? {})) {
|
3334
|
-
if (namespace === void 0)
|
3335
|
-
continue;
|
5171
|
+
if (namespace === void 0) continue;
|
3336
5172
|
this[key] = namespace.build(pluginOptions);
|
3337
5173
|
}
|
3338
5174
|
}
|
@@ -3341,8 +5177,8 @@ const buildClient = (plugins) => {
|
|
3341
5177
|
const branch = __privateGet(this, _options).branch;
|
3342
5178
|
return { databaseURL, branch };
|
3343
5179
|
}
|
3344
|
-
}, _options = new WeakMap(),
|
3345
|
-
const enableBrowser = options?.enableBrowser ??
|
5180
|
+
}, _options = new WeakMap(), _instances = new WeakSet(), parseOptions_fn = function(options) {
|
5181
|
+
const enableBrowser = options?.enableBrowser ?? false;
|
3346
5182
|
const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
|
3347
5183
|
if (isBrowser && !enableBrowser) {
|
3348
5184
|
throw new Error(
|
@@ -3350,8 +5186,9 @@ const buildClient = (plugins) => {
|
|
3350
5186
|
);
|
3351
5187
|
}
|
3352
5188
|
const fetch = getFetchImplementation(options?.fetch);
|
3353
|
-
const databaseURL = options?.databaseURL
|
3354
|
-
const apiKey = options?.apiKey
|
5189
|
+
const databaseURL = options?.databaseURL;
|
5190
|
+
const apiKey = options?.apiKey;
|
5191
|
+
const branch = options?.branch;
|
3355
5192
|
const trace = options?.trace ?? defaultTrace;
|
3356
5193
|
const clientName = options?.clientName;
|
3357
5194
|
const host = options?.host ?? "production";
|
@@ -3362,25 +5199,8 @@ const buildClient = (plugins) => {
|
|
3362
5199
|
if (!databaseURL) {
|
3363
5200
|
throw new Error("Option databaseURL is required");
|
3364
5201
|
}
|
3365
|
-
|
3366
|
-
|
3367
|
-
const branch = options?.branch || previewBranch || envBranch || "main";
|
3368
|
-
if (!!previewBranch && branch !== previewBranch) {
|
3369
|
-
console.warn(
|
3370
|
-
`Ignoring preview branch ${previewBranch} because branch option was passed to the client constructor with value ${branch}`
|
3371
|
-
);
|
3372
|
-
} else if (!!envBranch && branch !== envBranch) {
|
3373
|
-
console.warn(
|
3374
|
-
`Ignoring branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
|
3375
|
-
);
|
3376
|
-
} else if (!!previewBranch && !!envBranch && previewBranch !== envBranch) {
|
3377
|
-
console.warn(
|
3378
|
-
`Ignoring preview branch ${previewBranch} and branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
|
3379
|
-
);
|
3380
|
-
} else if (!previewBranch && !envBranch && options?.branch === void 0) {
|
3381
|
-
console.warn(
|
3382
|
-
`No branch was passed to the client constructor. Using default branch ${branch}. You can set the branch with the environment variable XATA_BRANCH or by passing the branch option to the client constructor.`
|
3383
|
-
);
|
5202
|
+
if (!branch) {
|
5203
|
+
throw new Error("Option branch is required");
|
3384
5204
|
}
|
3385
5205
|
return {
|
3386
5206
|
fetch,
|
@@ -3394,7 +5214,7 @@ const buildClient = (plugins) => {
|
|
3394
5214
|
clientName,
|
3395
5215
|
xataAgentExtra
|
3396
5216
|
};
|
3397
|
-
},
|
5217
|
+
}, getFetchProps_fn = function({
|
3398
5218
|
fetch,
|
3399
5219
|
apiKey,
|
3400
5220
|
databaseURL,
|
@@ -3435,26 +5255,19 @@ class Serializer {
|
|
3435
5255
|
}
|
3436
5256
|
toJSON(data) {
|
3437
5257
|
function visit(obj) {
|
3438
|
-
if (Array.isArray(obj))
|
3439
|
-
return obj.map(visit);
|
5258
|
+
if (Array.isArray(obj)) return obj.map(visit);
|
3440
5259
|
const type = typeof obj;
|
3441
|
-
if (type === "undefined")
|
3442
|
-
|
3443
|
-
if (
|
3444
|
-
return { [META]: "bigint", [VALUE]: obj.toString() };
|
3445
|
-
if (obj === null || type !== "object")
|
3446
|
-
return obj;
|
5260
|
+
if (type === "undefined") return { [META]: "undefined" };
|
5261
|
+
if (type === "bigint") return { [META]: "bigint", [VALUE]: obj.toString() };
|
5262
|
+
if (obj === null || type !== "object") return obj;
|
3447
5263
|
const constructor = obj.constructor;
|
3448
5264
|
const o = { [META]: constructor.name };
|
3449
5265
|
for (const [key, value] of Object.entries(obj)) {
|
3450
5266
|
o[key] = visit(value);
|
3451
5267
|
}
|
3452
|
-
if (constructor === Date)
|
3453
|
-
|
3454
|
-
if (constructor ===
|
3455
|
-
o[VALUE] = Object.fromEntries(obj);
|
3456
|
-
if (constructor === Set)
|
3457
|
-
o[VALUE] = [...obj];
|
5268
|
+
if (constructor === Date) o[VALUE] = obj.toISOString();
|
5269
|
+
if (constructor === Map) o[VALUE] = Object.fromEntries(obj);
|
5270
|
+
if (constructor === Set) o[VALUE] = [...obj];
|
3458
5271
|
return o;
|
3459
5272
|
}
|
3460
5273
|
return JSON.stringify(visit(data));
|
@@ -3467,16 +5280,11 @@ class Serializer {
|
|
3467
5280
|
if (constructor) {
|
3468
5281
|
return Object.assign(Object.create(constructor.prototype), rest);
|
3469
5282
|
}
|
3470
|
-
if (clazz === "Date")
|
3471
|
-
|
3472
|
-
if (clazz === "
|
3473
|
-
|
3474
|
-
if (clazz === "
|
3475
|
-
return new Map(Object.entries(val));
|
3476
|
-
if (clazz === "bigint")
|
3477
|
-
return BigInt(val);
|
3478
|
-
if (clazz === "undefined")
|
3479
|
-
return void 0;
|
5283
|
+
if (clazz === "Date") return new Date(val);
|
5284
|
+
if (clazz === "Set") return new Set(val);
|
5285
|
+
if (clazz === "Map") return new Map(Object.entries(val));
|
5286
|
+
if (clazz === "bigint") return BigInt(val);
|
5287
|
+
if (clazz === "undefined") return void 0;
|
3480
5288
|
return rest;
|
3481
5289
|
}
|
3482
5290
|
return value;
|
@@ -3491,6 +5299,47 @@ const deserialize = (json) => {
|
|
3491
5299
|
return defaultSerializer.fromJSON(json);
|
3492
5300
|
};
|
3493
5301
|
|
5302
|
+
function parseEnvironment(environment) {
|
5303
|
+
try {
|
5304
|
+
if (typeof environment === "function") {
|
5305
|
+
return new Proxy(
|
5306
|
+
{},
|
5307
|
+
{
|
5308
|
+
get(target) {
|
5309
|
+
return environment(target);
|
5310
|
+
}
|
5311
|
+
}
|
5312
|
+
);
|
5313
|
+
}
|
5314
|
+
if (isObject(environment)) {
|
5315
|
+
return environment;
|
5316
|
+
}
|
5317
|
+
} catch (error) {
|
5318
|
+
}
|
5319
|
+
return {};
|
5320
|
+
}
|
5321
|
+
function buildPreviewBranchName({ org, branch }) {
|
5322
|
+
return `preview-${org}-${branch}`;
|
5323
|
+
}
|
5324
|
+
function getDeployPreviewBranch(environment) {
|
5325
|
+
try {
|
5326
|
+
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = parseEnvironment(environment);
|
5327
|
+
if (deployPreviewBranch) return deployPreviewBranch;
|
5328
|
+
switch (deployPreview) {
|
5329
|
+
case "vercel": {
|
5330
|
+
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
5331
|
+
console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
|
5332
|
+
return void 0;
|
5333
|
+
}
|
5334
|
+
return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
|
5335
|
+
}
|
5336
|
+
}
|
5337
|
+
return void 0;
|
5338
|
+
} catch (err) {
|
5339
|
+
return void 0;
|
5340
|
+
}
|
5341
|
+
}
|
5342
|
+
|
3494
5343
|
class XataError extends Error {
|
3495
5344
|
constructor(message, status) {
|
3496
5345
|
super(message);
|
@@ -3498,5 +5347,5 @@ class XataError extends Error {
|
|
3498
5347
|
}
|
3499
5348
|
}
|
3500
5349
|
|
3501
|
-
export { BaseClient, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptAllTables, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge,
|
5350
|
+
export { BaseClient, Buffer, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptAllTables, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, completeMigration, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteCluster, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge, getAuthorizationCode, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationJobStatus, getBranchMigrationPlan, getBranchSchemaHistory, getBranchStats, getCluster, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseSettings, getDeployPreviewBranch, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationHistory, getMigrationJobStatus, getMigrationRequest, getMigrationRequestIsMerged, getRecord, getSchema, getTableColumns, getTableSchema, getUser, getUserAPIKeys, getUserOAuthAccessTokens, getUserOAuthClients, getWorkspace, getWorkspaceMembersList, getWorkspaceSettings, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, iContains, iPattern, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isValidExpandedColumn, isValidSelectableColumns, le, lessEquals, lessThan, lessThanEquals, listClusters, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern, previewBranchSchemaEdit, pushBranchMigrations, putFile, putFileItem, queryMigrationRequests, queryTable, removeGitBranchesEntry, removeWorkspaceMember, renameDatabase, resendWorkspaceMemberInvite, resolveBranch, rollbackMigration, searchBranch, searchTable, serialize, setTableSchema, sqlBatchQuery, sqlQuery, startMigration, startsWith, summarizeTable, transformImage, updateBranchMetadata, updateBranchSchema, updateCluster, updateColumn, updateDatabaseGithubSettings, updateDatabaseMetadata, updateDatabaseSettings, updateMigrationRequest, updateOAuthAccessToken, updateRecordWithID, updateTable, updateUser, updateWorkspace, updateWorkspaceMemberInvite, updateWorkspaceMemberRole, updateWorkspaceSettings, upsertRecordWithID, vectorSearchTable };
|
3502
5351
|
//# sourceMappingURL=index.mjs.map
|