@xata.io/client 0.0.0-next.vdf3a7b8c70c130a3e9c73decc8494a3f8c8febcb → 0.0.0-next.ve109eeeef360444eb9f061f3d745eed662493f78
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-add-version.log +1 -1
- package/.turbo/turbo-build.log +4 -4
- package/CHANGELOG.md +15 -3
- package/dist/index.cjs +2417 -580
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +4687 -4010
- package/dist/index.mjs +2413 -577
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
@@ -24,6 +24,1789 @@ const TraceAttributes = {
|
|
24
24
|
CLOUDFLARE_RAY_ID: "cf.ray"
|
25
25
|
};
|
26
26
|
|
27
|
+
const lookup = [];
|
28
|
+
const revLookup = [];
|
29
|
+
const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
30
|
+
for (let i = 0, len = code.length; i < len; ++i) {
|
31
|
+
lookup[i] = code[i];
|
32
|
+
revLookup[code.charCodeAt(i)] = i;
|
33
|
+
}
|
34
|
+
revLookup["-".charCodeAt(0)] = 62;
|
35
|
+
revLookup["_".charCodeAt(0)] = 63;
|
36
|
+
function getLens(b64) {
|
37
|
+
const len = b64.length;
|
38
|
+
if (len % 4 > 0) {
|
39
|
+
throw new Error("Invalid string. Length must be a multiple of 4");
|
40
|
+
}
|
41
|
+
let validLen = b64.indexOf("=");
|
42
|
+
if (validLen === -1) validLen = len;
|
43
|
+
const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
|
44
|
+
return [validLen, placeHoldersLen];
|
45
|
+
}
|
46
|
+
function _byteLength(_b64, validLen, placeHoldersLen) {
|
47
|
+
return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
|
48
|
+
}
|
49
|
+
function toByteArray(b64) {
|
50
|
+
let tmp;
|
51
|
+
const lens = getLens(b64);
|
52
|
+
const validLen = lens[0];
|
53
|
+
const placeHoldersLen = lens[1];
|
54
|
+
const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
|
55
|
+
let curByte = 0;
|
56
|
+
const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
|
57
|
+
let i;
|
58
|
+
for (i = 0; i < len; i += 4) {
|
59
|
+
tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
|
60
|
+
arr[curByte++] = tmp >> 16 & 255;
|
61
|
+
arr[curByte++] = tmp >> 8 & 255;
|
62
|
+
arr[curByte++] = tmp & 255;
|
63
|
+
}
|
64
|
+
if (placeHoldersLen === 2) {
|
65
|
+
tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
|
66
|
+
arr[curByte++] = tmp & 255;
|
67
|
+
}
|
68
|
+
if (placeHoldersLen === 1) {
|
69
|
+
tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
|
70
|
+
arr[curByte++] = tmp >> 8 & 255;
|
71
|
+
arr[curByte++] = tmp & 255;
|
72
|
+
}
|
73
|
+
return arr;
|
74
|
+
}
|
75
|
+
function tripletToBase64(num) {
|
76
|
+
return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
|
77
|
+
}
|
78
|
+
function encodeChunk(uint8, start, end) {
|
79
|
+
let tmp;
|
80
|
+
const output = [];
|
81
|
+
for (let i = start; i < end; i += 3) {
|
82
|
+
tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
|
83
|
+
output.push(tripletToBase64(tmp));
|
84
|
+
}
|
85
|
+
return output.join("");
|
86
|
+
}
|
87
|
+
function fromByteArray(uint8) {
|
88
|
+
let tmp;
|
89
|
+
const len = uint8.length;
|
90
|
+
const extraBytes = len % 3;
|
91
|
+
const parts = [];
|
92
|
+
const maxChunkLength = 16383;
|
93
|
+
for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
94
|
+
parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
|
95
|
+
}
|
96
|
+
if (extraBytes === 1) {
|
97
|
+
tmp = uint8[len - 1];
|
98
|
+
parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
|
99
|
+
} else if (extraBytes === 2) {
|
100
|
+
tmp = (uint8[len - 2] << 8) + uint8[len - 1];
|
101
|
+
parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
|
102
|
+
}
|
103
|
+
return parts.join("");
|
104
|
+
}
|
105
|
+
|
106
|
+
const K_MAX_LENGTH = 2147483647;
|
107
|
+
const MAX_ARGUMENTS_LENGTH = 4096;
|
108
|
+
class Buffer extends Uint8Array {
|
109
|
+
/**
|
110
|
+
* Constructs a new `Buffer` instance.
|
111
|
+
*
|
112
|
+
* @param value
|
113
|
+
* @param encodingOrOffset
|
114
|
+
* @param length
|
115
|
+
*/
|
116
|
+
constructor(value, encodingOrOffset, length) {
|
117
|
+
if (typeof value === "number") {
|
118
|
+
if (typeof encodingOrOffset === "string") {
|
119
|
+
throw new TypeError("The first argument must be of type string, received type number");
|
120
|
+
}
|
121
|
+
if (value < 0) {
|
122
|
+
throw new RangeError("The buffer size cannot be negative");
|
123
|
+
}
|
124
|
+
super(value < 0 ? 0 : Buffer._checked(value) | 0);
|
125
|
+
} else if (typeof value === "string") {
|
126
|
+
if (typeof encodingOrOffset !== "string") {
|
127
|
+
encodingOrOffset = "utf8";
|
128
|
+
}
|
129
|
+
if (!Buffer.isEncoding(encodingOrOffset)) {
|
130
|
+
throw new TypeError("Unknown encoding: " + encodingOrOffset);
|
131
|
+
}
|
132
|
+
const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
|
133
|
+
super(length2);
|
134
|
+
const written = this.write(value, 0, this.length, encodingOrOffset);
|
135
|
+
if (written !== length2) {
|
136
|
+
throw new TypeError(
|
137
|
+
"Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
|
138
|
+
);
|
139
|
+
}
|
140
|
+
} else if (ArrayBuffer.isView(value)) {
|
141
|
+
if (Buffer._isInstance(value, Uint8Array)) {
|
142
|
+
const copy = new Uint8Array(value);
|
143
|
+
const array = copy.buffer;
|
144
|
+
const byteOffset = copy.byteOffset;
|
145
|
+
const length2 = copy.byteLength;
|
146
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
147
|
+
throw new RangeError("offset is outside of buffer bounds");
|
148
|
+
}
|
149
|
+
if (array.byteLength < byteOffset + (length2 || 0)) {
|
150
|
+
throw new RangeError("length is outside of buffer bounds");
|
151
|
+
}
|
152
|
+
super(new Uint8Array(array, byteOffset, length2));
|
153
|
+
} else {
|
154
|
+
const array = value;
|
155
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
156
|
+
super(new Uint8Array(length2));
|
157
|
+
for (let i = 0; i < length2; i++) {
|
158
|
+
this[i] = array[i] & 255;
|
159
|
+
}
|
160
|
+
}
|
161
|
+
} else if (value == null) {
|
162
|
+
throw new TypeError(
|
163
|
+
"The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
|
164
|
+
);
|
165
|
+
} else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
|
166
|
+
const array = value;
|
167
|
+
const byteOffset = encodingOrOffset;
|
168
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
169
|
+
throw new RangeError("offset is outside of buffer bounds");
|
170
|
+
}
|
171
|
+
if (array.byteLength < byteOffset + (length || 0)) {
|
172
|
+
throw new RangeError("length is outside of buffer bounds");
|
173
|
+
}
|
174
|
+
super(new Uint8Array(array, byteOffset, length));
|
175
|
+
} else if (Array.isArray(value)) {
|
176
|
+
const array = value;
|
177
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
178
|
+
super(new Uint8Array(length2));
|
179
|
+
for (let i = 0; i < length2; i++) {
|
180
|
+
this[i] = array[i] & 255;
|
181
|
+
}
|
182
|
+
} else {
|
183
|
+
throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
|
184
|
+
}
|
185
|
+
}
|
186
|
+
/**
|
187
|
+
* Return JSON representation of the buffer.
|
188
|
+
*/
|
189
|
+
toJSON() {
|
190
|
+
return {
|
191
|
+
type: "Buffer",
|
192
|
+
data: Array.prototype.slice.call(this)
|
193
|
+
};
|
194
|
+
}
|
195
|
+
/**
|
196
|
+
* Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
|
197
|
+
* parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
|
198
|
+
* only part of `string` will be written. However, partially encoded characters will not be written.
|
199
|
+
*
|
200
|
+
* @param string String to write to `buf`.
|
201
|
+
* @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
|
202
|
+
* @param length Maximum number of bytes to write: Default: `buf.length - offset`.
|
203
|
+
* @param encoding The character encoding of `string`. Default: `utf8`.
|
204
|
+
*/
|
205
|
+
write(string, offset, length, encoding) {
|
206
|
+
if (typeof offset === "undefined") {
|
207
|
+
encoding = "utf8";
|
208
|
+
length = this.length;
|
209
|
+
offset = 0;
|
210
|
+
} else if (typeof length === "undefined" && typeof offset === "string") {
|
211
|
+
encoding = offset;
|
212
|
+
length = this.length;
|
213
|
+
offset = 0;
|
214
|
+
} else if (typeof offset === "number" && isFinite(offset)) {
|
215
|
+
offset = offset >>> 0;
|
216
|
+
if (typeof length === "number" && isFinite(length)) {
|
217
|
+
length = length >>> 0;
|
218
|
+
encoding ?? (encoding = "utf8");
|
219
|
+
} else if (typeof length === "string") {
|
220
|
+
encoding = length;
|
221
|
+
length = void 0;
|
222
|
+
}
|
223
|
+
} else {
|
224
|
+
throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
|
225
|
+
}
|
226
|
+
const remaining = this.length - offset;
|
227
|
+
if (typeof length === "undefined" || length > remaining) {
|
228
|
+
length = remaining;
|
229
|
+
}
|
230
|
+
if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
|
231
|
+
throw new RangeError("Attempt to write outside buffer bounds");
|
232
|
+
}
|
233
|
+
encoding || (encoding = "utf8");
|
234
|
+
switch (Buffer._getEncoding(encoding)) {
|
235
|
+
case "hex":
|
236
|
+
return Buffer._hexWrite(this, string, offset, length);
|
237
|
+
case "utf8":
|
238
|
+
return Buffer._utf8Write(this, string, offset, length);
|
239
|
+
case "ascii":
|
240
|
+
case "latin1":
|
241
|
+
case "binary":
|
242
|
+
return Buffer._asciiWrite(this, string, offset, length);
|
243
|
+
case "ucs2":
|
244
|
+
case "utf16le":
|
245
|
+
return Buffer._ucs2Write(this, string, offset, length);
|
246
|
+
case "base64":
|
247
|
+
return Buffer._base64Write(this, string, offset, length);
|
248
|
+
}
|
249
|
+
}
|
250
|
+
/**
|
251
|
+
* Decodes the buffer to a string according to the specified character encoding.
|
252
|
+
* Passing `start` and `end` will decode only a subset of the buffer.
|
253
|
+
*
|
254
|
+
* Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
|
255
|
+
* will be replaced with `U+FFFD`.
|
256
|
+
*
|
257
|
+
* @param encoding
|
258
|
+
* @param start
|
259
|
+
* @param end
|
260
|
+
*/
|
261
|
+
toString(encoding, start, end) {
|
262
|
+
const length = this.length;
|
263
|
+
if (length === 0) {
|
264
|
+
return "";
|
265
|
+
}
|
266
|
+
if (arguments.length === 0) {
|
267
|
+
return Buffer._utf8Slice(this, 0, length);
|
268
|
+
}
|
269
|
+
if (typeof start === "undefined" || start < 0) {
|
270
|
+
start = 0;
|
271
|
+
}
|
272
|
+
if (start > this.length) {
|
273
|
+
return "";
|
274
|
+
}
|
275
|
+
if (typeof end === "undefined" || end > this.length) {
|
276
|
+
end = this.length;
|
277
|
+
}
|
278
|
+
if (end <= 0) {
|
279
|
+
return "";
|
280
|
+
}
|
281
|
+
end >>>= 0;
|
282
|
+
start >>>= 0;
|
283
|
+
if (end <= start) {
|
284
|
+
return "";
|
285
|
+
}
|
286
|
+
if (!encoding) {
|
287
|
+
encoding = "utf8";
|
288
|
+
}
|
289
|
+
switch (Buffer._getEncoding(encoding)) {
|
290
|
+
case "hex":
|
291
|
+
return Buffer._hexSlice(this, start, end);
|
292
|
+
case "utf8":
|
293
|
+
return Buffer._utf8Slice(this, start, end);
|
294
|
+
case "ascii":
|
295
|
+
return Buffer._asciiSlice(this, start, end);
|
296
|
+
case "latin1":
|
297
|
+
case "binary":
|
298
|
+
return Buffer._latin1Slice(this, start, end);
|
299
|
+
case "ucs2":
|
300
|
+
case "utf16le":
|
301
|
+
return Buffer._utf16leSlice(this, start, end);
|
302
|
+
case "base64":
|
303
|
+
return Buffer._base64Slice(this, start, end);
|
304
|
+
}
|
305
|
+
}
|
306
|
+
/**
|
307
|
+
* Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
|
308
|
+
*
|
309
|
+
* @param otherBuffer
|
310
|
+
*/
|
311
|
+
equals(otherBuffer) {
|
312
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
313
|
+
throw new TypeError("Argument must be a Buffer");
|
314
|
+
}
|
315
|
+
if (this === otherBuffer) {
|
316
|
+
return true;
|
317
|
+
}
|
318
|
+
return Buffer.compare(this, otherBuffer) === 0;
|
319
|
+
}
|
320
|
+
/**
|
321
|
+
* Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
|
322
|
+
* or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
|
323
|
+
* buffer.
|
324
|
+
*
|
325
|
+
* - `0` is returned if `otherBuffer` is the same as this buffer.
|
326
|
+
* - `1` is returned if `otherBuffer` should come before this buffer when sorted.
|
327
|
+
* - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
|
328
|
+
*
|
329
|
+
* @param otherBuffer The buffer to compare to.
|
330
|
+
* @param targetStart The offset within `otherBuffer` at which to begin comparison.
|
331
|
+
* @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
|
332
|
+
* @param sourceStart The offset within this buffer at which to begin comparison.
|
333
|
+
* @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
|
334
|
+
*/
|
335
|
+
compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
|
336
|
+
if (Buffer._isInstance(otherBuffer, Uint8Array)) {
|
337
|
+
otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
|
338
|
+
}
|
339
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
340
|
+
throw new TypeError("Argument must be a Buffer or Uint8Array");
|
341
|
+
}
|
342
|
+
targetStart ?? (targetStart = 0);
|
343
|
+
targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
|
344
|
+
sourceStart ?? (sourceStart = 0);
|
345
|
+
sourceEnd ?? (sourceEnd = this.length);
|
346
|
+
if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
|
347
|
+
throw new RangeError("Out of range index");
|
348
|
+
}
|
349
|
+
if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
|
350
|
+
return 0;
|
351
|
+
}
|
352
|
+
if (sourceStart >= sourceEnd) {
|
353
|
+
return -1;
|
354
|
+
}
|
355
|
+
if (targetStart >= targetEnd) {
|
356
|
+
return 1;
|
357
|
+
}
|
358
|
+
targetStart >>>= 0;
|
359
|
+
targetEnd >>>= 0;
|
360
|
+
sourceStart >>>= 0;
|
361
|
+
sourceEnd >>>= 0;
|
362
|
+
if (this === otherBuffer) {
|
363
|
+
return 0;
|
364
|
+
}
|
365
|
+
let x = sourceEnd - sourceStart;
|
366
|
+
let y = targetEnd - targetStart;
|
367
|
+
const len = Math.min(x, y);
|
368
|
+
const thisCopy = this.slice(sourceStart, sourceEnd);
|
369
|
+
const targetCopy = otherBuffer.slice(targetStart, targetEnd);
|
370
|
+
for (let i = 0; i < len; ++i) {
|
371
|
+
if (thisCopy[i] !== targetCopy[i]) {
|
372
|
+
x = thisCopy[i];
|
373
|
+
y = targetCopy[i];
|
374
|
+
break;
|
375
|
+
}
|
376
|
+
}
|
377
|
+
if (x < y) return -1;
|
378
|
+
if (y < x) return 1;
|
379
|
+
return 0;
|
380
|
+
}
|
381
|
+
/**
|
382
|
+
* Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
|
383
|
+
* region overlaps with this buffer.
|
384
|
+
*
|
385
|
+
* @param targetBuffer The target buffer to copy into.
|
386
|
+
* @param targetStart The offset within `targetBuffer` at which to begin writing.
|
387
|
+
* @param sourceStart The offset within this buffer at which to begin copying.
|
388
|
+
* @param sourceEnd The offset within this buffer at which to end copying (exclusive).
|
389
|
+
*/
|
390
|
+
copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
|
391
|
+
if (!Buffer.isBuffer(targetBuffer)) throw new TypeError("argument should be a Buffer");
|
392
|
+
if (!sourceStart) sourceStart = 0;
|
393
|
+
if (!targetStart) targetStart = 0;
|
394
|
+
if (!sourceEnd && sourceEnd !== 0) sourceEnd = this.length;
|
395
|
+
if (targetStart >= targetBuffer.length) targetStart = targetBuffer.length;
|
396
|
+
if (!targetStart) targetStart = 0;
|
397
|
+
if (sourceEnd > 0 && sourceEnd < sourceStart) sourceEnd = sourceStart;
|
398
|
+
if (sourceEnd === sourceStart) return 0;
|
399
|
+
if (targetBuffer.length === 0 || this.length === 0) return 0;
|
400
|
+
if (targetStart < 0) {
|
401
|
+
throw new RangeError("targetStart out of bounds");
|
402
|
+
}
|
403
|
+
if (sourceStart < 0 || sourceStart >= this.length) throw new RangeError("Index out of range");
|
404
|
+
if (sourceEnd < 0) throw new RangeError("sourceEnd out of bounds");
|
405
|
+
if (sourceEnd > this.length) sourceEnd = this.length;
|
406
|
+
if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
|
407
|
+
sourceEnd = targetBuffer.length - targetStart + sourceStart;
|
408
|
+
}
|
409
|
+
const len = sourceEnd - sourceStart;
|
410
|
+
if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
|
411
|
+
this.copyWithin(targetStart, sourceStart, sourceEnd);
|
412
|
+
} else {
|
413
|
+
Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
|
414
|
+
}
|
415
|
+
return len;
|
416
|
+
}
|
417
|
+
/**
|
418
|
+
* Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
|
419
|
+
* and `end` indices. This is the same behavior as `buf.subarray()`.
|
420
|
+
*
|
421
|
+
* This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
|
422
|
+
* the slice, use `Uint8Array.prototype.slice()`.
|
423
|
+
*
|
424
|
+
* @param start
|
425
|
+
* @param end
|
426
|
+
*/
|
427
|
+
slice(start, end) {
|
428
|
+
if (!start) {
|
429
|
+
start = 0;
|
430
|
+
}
|
431
|
+
const len = this.length;
|
432
|
+
start = ~~start;
|
433
|
+
end = end === void 0 ? len : ~~end;
|
434
|
+
if (start < 0) {
|
435
|
+
start += len;
|
436
|
+
if (start < 0) {
|
437
|
+
start = 0;
|
438
|
+
}
|
439
|
+
} else if (start > len) {
|
440
|
+
start = len;
|
441
|
+
}
|
442
|
+
if (end < 0) {
|
443
|
+
end += len;
|
444
|
+
if (end < 0) {
|
445
|
+
end = 0;
|
446
|
+
}
|
447
|
+
} else if (end > len) {
|
448
|
+
end = len;
|
449
|
+
}
|
450
|
+
if (end < start) {
|
451
|
+
end = start;
|
452
|
+
}
|
453
|
+
const newBuf = this.subarray(start, end);
|
454
|
+
Object.setPrototypeOf(newBuf, Buffer.prototype);
|
455
|
+
return newBuf;
|
456
|
+
}
|
457
|
+
/**
|
458
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
459
|
+
* of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
|
460
|
+
*
|
461
|
+
* @param value Number to write.
|
462
|
+
* @param offset Number of bytes to skip before starting to write.
|
463
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
464
|
+
* @param noAssert
|
465
|
+
* @returns `offset` plus the number of bytes written.
|
466
|
+
*/
|
467
|
+
writeUIntLE(value, offset, byteLength, noAssert) {
|
468
|
+
value = +value;
|
469
|
+
offset = offset >>> 0;
|
470
|
+
byteLength = byteLength >>> 0;
|
471
|
+
if (!noAssert) {
|
472
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
473
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
474
|
+
}
|
475
|
+
let mul = 1;
|
476
|
+
let i = 0;
|
477
|
+
this[offset] = value & 255;
|
478
|
+
while (++i < byteLength && (mul *= 256)) {
|
479
|
+
this[offset + i] = value / mul & 255;
|
480
|
+
}
|
481
|
+
return offset + byteLength;
|
482
|
+
}
|
483
|
+
/**
|
484
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
|
485
|
+
* accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
|
486
|
+
*
|
487
|
+
* @param value Number to write.
|
488
|
+
* @param offset Number of bytes to skip before starting to write.
|
489
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
490
|
+
* @param noAssert
|
491
|
+
* @returns `offset` plus the number of bytes written.
|
492
|
+
*/
|
493
|
+
writeUIntBE(value, offset, byteLength, noAssert) {
|
494
|
+
value = +value;
|
495
|
+
offset = offset >>> 0;
|
496
|
+
byteLength = byteLength >>> 0;
|
497
|
+
if (!noAssert) {
|
498
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
499
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
500
|
+
}
|
501
|
+
let i = byteLength - 1;
|
502
|
+
let mul = 1;
|
503
|
+
this[offset + i] = value & 255;
|
504
|
+
while (--i >= 0 && (mul *= 256)) {
|
505
|
+
this[offset + i] = value / mul & 255;
|
506
|
+
}
|
507
|
+
return offset + byteLength;
|
508
|
+
}
|
509
|
+
/**
|
510
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
511
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
512
|
+
*
|
513
|
+
* @param value Number to write.
|
514
|
+
* @param offset Number of bytes to skip before starting to write.
|
515
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
516
|
+
* @param noAssert
|
517
|
+
* @returns `offset` plus the number of bytes written.
|
518
|
+
*/
|
519
|
+
writeIntLE(value, offset, byteLength, noAssert) {
|
520
|
+
value = +value;
|
521
|
+
offset = offset >>> 0;
|
522
|
+
if (!noAssert) {
|
523
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
524
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
525
|
+
}
|
526
|
+
let i = 0;
|
527
|
+
let mul = 1;
|
528
|
+
let sub = 0;
|
529
|
+
this[offset] = value & 255;
|
530
|
+
while (++i < byteLength && (mul *= 256)) {
|
531
|
+
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
532
|
+
sub = 1;
|
533
|
+
}
|
534
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
535
|
+
}
|
536
|
+
return offset + byteLength;
|
537
|
+
}
|
538
|
+
/**
|
539
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
|
540
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
541
|
+
*
|
542
|
+
* @param value Number to write.
|
543
|
+
* @param offset Number of bytes to skip before starting to write.
|
544
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
545
|
+
* @param noAssert
|
546
|
+
* @returns `offset` plus the number of bytes written.
|
547
|
+
*/
|
548
|
+
writeIntBE(value, offset, byteLength, noAssert) {
|
549
|
+
value = +value;
|
550
|
+
offset = offset >>> 0;
|
551
|
+
if (!noAssert) {
|
552
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
553
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
554
|
+
}
|
555
|
+
let i = byteLength - 1;
|
556
|
+
let mul = 1;
|
557
|
+
let sub = 0;
|
558
|
+
this[offset + i] = value & 255;
|
559
|
+
while (--i >= 0 && (mul *= 256)) {
|
560
|
+
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
561
|
+
sub = 1;
|
562
|
+
}
|
563
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
564
|
+
}
|
565
|
+
return offset + byteLength;
|
566
|
+
}
|
567
|
+
/**
|
568
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
569
|
+
* unsigned, little-endian integer supporting up to 48 bits of accuracy.
|
570
|
+
*
|
571
|
+
* @param offset Number of bytes to skip before starting to read.
|
572
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
573
|
+
* @param noAssert
|
574
|
+
*/
|
575
|
+
readUIntLE(offset, byteLength, noAssert) {
|
576
|
+
offset = offset >>> 0;
|
577
|
+
byteLength = byteLength >>> 0;
|
578
|
+
if (!noAssert) {
|
579
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
580
|
+
}
|
581
|
+
let val = this[offset];
|
582
|
+
let mul = 1;
|
583
|
+
let i = 0;
|
584
|
+
while (++i < byteLength && (mul *= 256)) {
|
585
|
+
val += this[offset + i] * mul;
|
586
|
+
}
|
587
|
+
return val;
|
588
|
+
}
|
589
|
+
/**
|
590
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
591
|
+
* unsigned, big-endian integer supporting up to 48 bits of accuracy.
|
592
|
+
*
|
593
|
+
* @param offset Number of bytes to skip before starting to read.
|
594
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
595
|
+
* @param noAssert
|
596
|
+
*/
|
597
|
+
readUIntBE(offset, byteLength, noAssert) {
|
598
|
+
offset = offset >>> 0;
|
599
|
+
byteLength = byteLength >>> 0;
|
600
|
+
if (!noAssert) {
|
601
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
602
|
+
}
|
603
|
+
let val = this[offset + --byteLength];
|
604
|
+
let mul = 1;
|
605
|
+
while (byteLength > 0 && (mul *= 256)) {
|
606
|
+
val += this[offset + --byteLength] * mul;
|
607
|
+
}
|
608
|
+
return val;
|
609
|
+
}
|
610
|
+
/**
|
611
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
612
|
+
* little-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
613
|
+
*
|
614
|
+
* @param offset Number of bytes to skip before starting to read.
|
615
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
616
|
+
* @param noAssert
|
617
|
+
*/
|
618
|
+
readIntLE(offset, byteLength, noAssert) {
|
619
|
+
offset = offset >>> 0;
|
620
|
+
byteLength = byteLength >>> 0;
|
621
|
+
if (!noAssert) {
|
622
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
623
|
+
}
|
624
|
+
let val = this[offset];
|
625
|
+
let mul = 1;
|
626
|
+
let i = 0;
|
627
|
+
while (++i < byteLength && (mul *= 256)) {
|
628
|
+
val += this[offset + i] * mul;
|
629
|
+
}
|
630
|
+
mul *= 128;
|
631
|
+
if (val >= mul) {
|
632
|
+
val -= Math.pow(2, 8 * byteLength);
|
633
|
+
}
|
634
|
+
return val;
|
635
|
+
}
|
636
|
+
/**
|
637
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
638
|
+
* big-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
639
|
+
*
|
640
|
+
* @param offset Number of bytes to skip before starting to read.
|
641
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
642
|
+
* @param noAssert
|
643
|
+
*/
|
644
|
+
readIntBE(offset, byteLength, noAssert) {
|
645
|
+
offset = offset >>> 0;
|
646
|
+
byteLength = byteLength >>> 0;
|
647
|
+
if (!noAssert) {
|
648
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
649
|
+
}
|
650
|
+
let i = byteLength;
|
651
|
+
let mul = 1;
|
652
|
+
let val = this[offset + --i];
|
653
|
+
while (i > 0 && (mul *= 256)) {
|
654
|
+
val += this[offset + --i] * mul;
|
655
|
+
}
|
656
|
+
mul *= 128;
|
657
|
+
if (val >= mul) {
|
658
|
+
val -= Math.pow(2, 8 * byteLength);
|
659
|
+
}
|
660
|
+
return val;
|
661
|
+
}
|
662
|
+
/**
|
663
|
+
* Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
|
664
|
+
*
|
665
|
+
* @param offset Number of bytes to skip before starting to read.
|
666
|
+
* @param noAssert
|
667
|
+
*/
|
668
|
+
readUInt8(offset, noAssert) {
|
669
|
+
offset = offset >>> 0;
|
670
|
+
if (!noAssert) {
|
671
|
+
Buffer._checkOffset(offset, 1, this.length);
|
672
|
+
}
|
673
|
+
return this[offset];
|
674
|
+
}
|
675
|
+
/**
|
676
|
+
* Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
|
677
|
+
*
|
678
|
+
* @param offset Number of bytes to skip before starting to read.
|
679
|
+
* @param noAssert
|
680
|
+
*/
|
681
|
+
readUInt16LE(offset, noAssert) {
|
682
|
+
offset = offset >>> 0;
|
683
|
+
if (!noAssert) {
|
684
|
+
Buffer._checkOffset(offset, 2, this.length);
|
685
|
+
}
|
686
|
+
return this[offset] | this[offset + 1] << 8;
|
687
|
+
}
|
688
|
+
/**
|
689
|
+
* Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
|
690
|
+
*
|
691
|
+
* @param offset Number of bytes to skip before starting to read.
|
692
|
+
* @param noAssert
|
693
|
+
*/
|
694
|
+
readUInt16BE(offset, noAssert) {
|
695
|
+
offset = offset >>> 0;
|
696
|
+
if (!noAssert) {
|
697
|
+
Buffer._checkOffset(offset, 2, this.length);
|
698
|
+
}
|
699
|
+
return this[offset] << 8 | this[offset + 1];
|
700
|
+
}
|
701
|
+
/**
|
702
|
+
* Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
|
703
|
+
*
|
704
|
+
* @param offset Number of bytes to skip before starting to read.
|
705
|
+
* @param noAssert
|
706
|
+
*/
|
707
|
+
readUInt32LE(offset, noAssert) {
|
708
|
+
offset = offset >>> 0;
|
709
|
+
if (!noAssert) {
|
710
|
+
Buffer._checkOffset(offset, 4, this.length);
|
711
|
+
}
|
712
|
+
return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
|
713
|
+
}
|
714
|
+
/**
|
715
|
+
* Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
|
716
|
+
*
|
717
|
+
* @param offset Number of bytes to skip before starting to read.
|
718
|
+
* @param noAssert
|
719
|
+
*/
|
720
|
+
readUInt32BE(offset, noAssert) {
|
721
|
+
offset = offset >>> 0;
|
722
|
+
if (!noAssert) {
|
723
|
+
Buffer._checkOffset(offset, 4, this.length);
|
724
|
+
}
|
725
|
+
return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
|
726
|
+
}
|
727
|
+
/**
|
728
|
+
* Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
|
729
|
+
* as two's complement signed values.
|
730
|
+
*
|
731
|
+
* @param offset Number of bytes to skip before starting to read.
|
732
|
+
* @param noAssert
|
733
|
+
*/
|
734
|
+
readInt8(offset, noAssert) {
|
735
|
+
offset = offset >>> 0;
|
736
|
+
if (!noAssert) {
|
737
|
+
Buffer._checkOffset(offset, 1, this.length);
|
738
|
+
}
|
739
|
+
if (!(this[offset] & 128)) {
|
740
|
+
return this[offset];
|
741
|
+
}
|
742
|
+
return (255 - this[offset] + 1) * -1;
|
743
|
+
}
|
744
|
+
/**
|
745
|
+
* Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
746
|
+
* are interpreted as two's complement signed values.
|
747
|
+
*
|
748
|
+
* @param offset Number of bytes to skip before starting to read.
|
749
|
+
* @param noAssert
|
750
|
+
*/
|
751
|
+
readInt16LE(offset, noAssert) {
|
752
|
+
offset = offset >>> 0;
|
753
|
+
if (!noAssert) {
|
754
|
+
Buffer._checkOffset(offset, 2, this.length);
|
755
|
+
}
|
756
|
+
const val = this[offset] | this[offset + 1] << 8;
|
757
|
+
return val & 32768 ? val | 4294901760 : val;
|
758
|
+
}
|
759
|
+
/**
|
760
|
+
* Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
761
|
+
* are interpreted as two's complement signed values.
|
762
|
+
*
|
763
|
+
* @param offset Number of bytes to skip before starting to read.
|
764
|
+
* @param noAssert
|
765
|
+
*/
|
766
|
+
readInt16BE(offset, noAssert) {
|
767
|
+
offset = offset >>> 0;
|
768
|
+
if (!noAssert) {
|
769
|
+
Buffer._checkOffset(offset, 2, this.length);
|
770
|
+
}
|
771
|
+
const val = this[offset + 1] | this[offset] << 8;
|
772
|
+
return val & 32768 ? val | 4294901760 : val;
|
773
|
+
}
|
774
|
+
/**
|
775
|
+
* Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
776
|
+
* are interpreted as two's complement signed values.
|
777
|
+
*
|
778
|
+
* @param offset Number of bytes to skip before starting to read.
|
779
|
+
* @param noAssert
|
780
|
+
*/
|
781
|
+
readInt32LE(offset, noAssert) {
|
782
|
+
offset = offset >>> 0;
|
783
|
+
if (!noAssert) {
|
784
|
+
Buffer._checkOffset(offset, 4, this.length);
|
785
|
+
}
|
786
|
+
return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
|
787
|
+
}
|
788
|
+
/**
|
789
|
+
* Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
790
|
+
* are interpreted as two's complement signed values.
|
791
|
+
*
|
792
|
+
* @param offset Number of bytes to skip before starting to read.
|
793
|
+
* @param noAssert
|
794
|
+
*/
|
795
|
+
readInt32BE(offset, noAssert) {
|
796
|
+
offset = offset >>> 0;
|
797
|
+
if (!noAssert) {
|
798
|
+
Buffer._checkOffset(offset, 4, this.length);
|
799
|
+
}
|
800
|
+
return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
|
801
|
+
}
|
802
|
+
/**
|
803
|
+
* Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
|
804
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 2.
|
805
|
+
*/
|
806
|
+
swap16() {
|
807
|
+
const len = this.length;
|
808
|
+
if (len % 2 !== 0) {
|
809
|
+
throw new RangeError("Buffer size must be a multiple of 16-bits");
|
810
|
+
}
|
811
|
+
for (let i = 0; i < len; i += 2) {
|
812
|
+
this._swap(this, i, i + 1);
|
813
|
+
}
|
814
|
+
return this;
|
815
|
+
}
|
816
|
+
/**
|
817
|
+
* Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
|
818
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 4.
|
819
|
+
*/
|
820
|
+
swap32() {
|
821
|
+
const len = this.length;
|
822
|
+
if (len % 4 !== 0) {
|
823
|
+
throw new RangeError("Buffer size must be a multiple of 32-bits");
|
824
|
+
}
|
825
|
+
for (let i = 0; i < len; i += 4) {
|
826
|
+
this._swap(this, i, i + 3);
|
827
|
+
this._swap(this, i + 1, i + 2);
|
828
|
+
}
|
829
|
+
return this;
|
830
|
+
}
|
831
|
+
/**
|
832
|
+
* Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
|
833
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 8.
|
834
|
+
*/
|
835
|
+
swap64() {
|
836
|
+
const len = this.length;
|
837
|
+
if (len % 8 !== 0) {
|
838
|
+
throw new RangeError("Buffer size must be a multiple of 64-bits");
|
839
|
+
}
|
840
|
+
for (let i = 0; i < len; i += 8) {
|
841
|
+
this._swap(this, i, i + 7);
|
842
|
+
this._swap(this, i + 1, i + 6);
|
843
|
+
this._swap(this, i + 2, i + 5);
|
844
|
+
this._swap(this, i + 3, i + 4);
|
845
|
+
}
|
846
|
+
return this;
|
847
|
+
}
|
848
|
+
/**
|
849
|
+
* Swaps two octets.
|
850
|
+
*
|
851
|
+
* @param b
|
852
|
+
* @param n
|
853
|
+
* @param m
|
854
|
+
*/
|
855
|
+
_swap(b, n, m) {
|
856
|
+
const i = b[n];
|
857
|
+
b[n] = b[m];
|
858
|
+
b[m] = i;
|
859
|
+
}
|
860
|
+
/**
|
861
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
|
862
|
+
* Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
|
863
|
+
*
|
864
|
+
* @param value Number to write.
|
865
|
+
* @param offset Number of bytes to skip before starting to write.
|
866
|
+
* @param noAssert
|
867
|
+
* @returns `offset` plus the number of bytes written.
|
868
|
+
*/
|
869
|
+
writeUInt8(value, offset, noAssert) {
|
870
|
+
value = +value;
|
871
|
+
offset = offset >>> 0;
|
872
|
+
if (!noAssert) {
|
873
|
+
Buffer._checkInt(this, value, offset, 1, 255, 0);
|
874
|
+
}
|
875
|
+
this[offset] = value & 255;
|
876
|
+
return offset + 1;
|
877
|
+
}
|
878
|
+
/**
|
879
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
|
880
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
881
|
+
*
|
882
|
+
* @param value Number to write.
|
883
|
+
* @param offset Number of bytes to skip before starting to write.
|
884
|
+
* @param noAssert
|
885
|
+
* @returns `offset` plus the number of bytes written.
|
886
|
+
*/
|
887
|
+
writeUInt16LE(value, offset, noAssert) {
|
888
|
+
value = +value;
|
889
|
+
offset = offset >>> 0;
|
890
|
+
if (!noAssert) {
|
891
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
892
|
+
}
|
893
|
+
this[offset] = value & 255;
|
894
|
+
this[offset + 1] = value >>> 8;
|
895
|
+
return offset + 2;
|
896
|
+
}
|
897
|
+
/**
|
898
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
|
899
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
900
|
+
*
|
901
|
+
* @param value Number to write.
|
902
|
+
* @param offset Number of bytes to skip before starting to write.
|
903
|
+
* @param noAssert
|
904
|
+
* @returns `offset` plus the number of bytes written.
|
905
|
+
*/
|
906
|
+
writeUInt16BE(value, offset, noAssert) {
|
907
|
+
value = +value;
|
908
|
+
offset = offset >>> 0;
|
909
|
+
if (!noAssert) {
|
910
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
911
|
+
}
|
912
|
+
this[offset] = value >>> 8;
|
913
|
+
this[offset + 1] = value & 255;
|
914
|
+
return offset + 2;
|
915
|
+
}
|
916
|
+
/**
|
917
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
|
918
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
919
|
+
*
|
920
|
+
* @param value Number to write.
|
921
|
+
* @param offset Number of bytes to skip before starting to write.
|
922
|
+
* @param noAssert
|
923
|
+
* @returns `offset` plus the number of bytes written.
|
924
|
+
*/
|
925
|
+
writeUInt32LE(value, offset, noAssert) {
|
926
|
+
value = +value;
|
927
|
+
offset = offset >>> 0;
|
928
|
+
if (!noAssert) {
|
929
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
930
|
+
}
|
931
|
+
this[offset + 3] = value >>> 24;
|
932
|
+
this[offset + 2] = value >>> 16;
|
933
|
+
this[offset + 1] = value >>> 8;
|
934
|
+
this[offset] = value & 255;
|
935
|
+
return offset + 4;
|
936
|
+
}
|
937
|
+
/**
|
938
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
|
939
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
940
|
+
*
|
941
|
+
* @param value Number to write.
|
942
|
+
* @param offset Number of bytes to skip before starting to write.
|
943
|
+
* @param noAssert
|
944
|
+
* @returns `offset` plus the number of bytes written.
|
945
|
+
*/
|
946
|
+
writeUInt32BE(value, offset, noAssert) {
|
947
|
+
value = +value;
|
948
|
+
offset = offset >>> 0;
|
949
|
+
if (!noAssert) {
|
950
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
951
|
+
}
|
952
|
+
this[offset] = value >>> 24;
|
953
|
+
this[offset + 1] = value >>> 16;
|
954
|
+
this[offset + 2] = value >>> 8;
|
955
|
+
this[offset + 3] = value & 255;
|
956
|
+
return offset + 4;
|
957
|
+
}
|
958
|
+
/**
|
959
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
|
960
|
+
* Behavior is undefined when `value` is anything other than a signed 8-bit integer.
|
961
|
+
*
|
962
|
+
* @param value Number to write.
|
963
|
+
* @param offset Number of bytes to skip before starting to write.
|
964
|
+
* @param noAssert
|
965
|
+
* @returns `offset` plus the number of bytes written.
|
966
|
+
*/
|
967
|
+
writeInt8(value, offset, noAssert) {
|
968
|
+
value = +value;
|
969
|
+
offset = offset >>> 0;
|
970
|
+
if (!noAssert) {
|
971
|
+
Buffer._checkInt(this, value, offset, 1, 127, -128);
|
972
|
+
}
|
973
|
+
if (value < 0) {
|
974
|
+
value = 255 + value + 1;
|
975
|
+
}
|
976
|
+
this[offset] = value & 255;
|
977
|
+
return offset + 1;
|
978
|
+
}
|
979
|
+
/**
|
980
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
|
981
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
982
|
+
*
|
983
|
+
* @param value Number to write.
|
984
|
+
* @param offset Number of bytes to skip before starting to write.
|
985
|
+
* @param noAssert
|
986
|
+
* @returns `offset` plus the number of bytes written.
|
987
|
+
*/
|
988
|
+
writeInt16LE(value, offset, noAssert) {
|
989
|
+
value = +value;
|
990
|
+
offset = offset >>> 0;
|
991
|
+
if (!noAssert) {
|
992
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
993
|
+
}
|
994
|
+
this[offset] = value & 255;
|
995
|
+
this[offset + 1] = value >>> 8;
|
996
|
+
return offset + 2;
|
997
|
+
}
|
998
|
+
/**
|
999
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
|
1000
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
1001
|
+
*
|
1002
|
+
* @param value Number to write.
|
1003
|
+
* @param offset Number of bytes to skip before starting to write.
|
1004
|
+
* @param noAssert
|
1005
|
+
* @returns `offset` plus the number of bytes written.
|
1006
|
+
*/
|
1007
|
+
writeInt16BE(value, offset, noAssert) {
|
1008
|
+
value = +value;
|
1009
|
+
offset = offset >>> 0;
|
1010
|
+
if (!noAssert) {
|
1011
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1012
|
+
}
|
1013
|
+
this[offset] = value >>> 8;
|
1014
|
+
this[offset + 1] = value & 255;
|
1015
|
+
return offset + 2;
|
1016
|
+
}
|
1017
|
+
/**
|
1018
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
|
1019
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1020
|
+
*
|
1021
|
+
* @param value Number to write.
|
1022
|
+
* @param offset Number of bytes to skip before starting to write.
|
1023
|
+
* @param noAssert
|
1024
|
+
* @returns `offset` plus the number of bytes written.
|
1025
|
+
*/
|
1026
|
+
writeInt32LE(value, offset, noAssert) {
|
1027
|
+
value = +value;
|
1028
|
+
offset = offset >>> 0;
|
1029
|
+
if (!noAssert) {
|
1030
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1031
|
+
}
|
1032
|
+
this[offset] = value & 255;
|
1033
|
+
this[offset + 1] = value >>> 8;
|
1034
|
+
this[offset + 2] = value >>> 16;
|
1035
|
+
this[offset + 3] = value >>> 24;
|
1036
|
+
return offset + 4;
|
1037
|
+
}
|
1038
|
+
/**
|
1039
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
|
1040
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1041
|
+
*
|
1042
|
+
* @param value Number to write.
|
1043
|
+
* @param offset Number of bytes to skip before starting to write.
|
1044
|
+
* @param noAssert
|
1045
|
+
* @returns `offset` plus the number of bytes written.
|
1046
|
+
*/
|
1047
|
+
writeInt32BE(value, offset, noAssert) {
|
1048
|
+
value = +value;
|
1049
|
+
offset = offset >>> 0;
|
1050
|
+
if (!noAssert) {
|
1051
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1052
|
+
}
|
1053
|
+
if (value < 0) {
|
1054
|
+
value = 4294967295 + value + 1;
|
1055
|
+
}
|
1056
|
+
this[offset] = value >>> 24;
|
1057
|
+
this[offset + 1] = value >>> 16;
|
1058
|
+
this[offset + 2] = value >>> 8;
|
1059
|
+
this[offset + 3] = value & 255;
|
1060
|
+
return offset + 4;
|
1061
|
+
}
|
1062
|
+
/**
|
1063
|
+
* Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
|
1064
|
+
* filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
|
1065
|
+
* integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
|
1066
|
+
*
|
1067
|
+
* If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
|
1068
|
+
* character that fit into `buf` are written.
|
1069
|
+
*
|
1070
|
+
* If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
|
1071
|
+
*
|
1072
|
+
* @param value
|
1073
|
+
* @param encoding
|
1074
|
+
*/
|
1075
|
+
fill(value, offset, end, encoding) {
|
1076
|
+
if (typeof value === "string") {
|
1077
|
+
if (typeof offset === "string") {
|
1078
|
+
encoding = offset;
|
1079
|
+
offset = 0;
|
1080
|
+
end = this.length;
|
1081
|
+
} else if (typeof end === "string") {
|
1082
|
+
encoding = end;
|
1083
|
+
end = this.length;
|
1084
|
+
}
|
1085
|
+
if (encoding !== void 0 && typeof encoding !== "string") {
|
1086
|
+
throw new TypeError("encoding must be a string");
|
1087
|
+
}
|
1088
|
+
if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
|
1089
|
+
throw new TypeError("Unknown encoding: " + encoding);
|
1090
|
+
}
|
1091
|
+
if (value.length === 1) {
|
1092
|
+
const code = value.charCodeAt(0);
|
1093
|
+
if (encoding === "utf8" && code < 128) {
|
1094
|
+
value = code;
|
1095
|
+
}
|
1096
|
+
}
|
1097
|
+
} else if (typeof value === "number") {
|
1098
|
+
value = value & 255;
|
1099
|
+
} else if (typeof value === "boolean") {
|
1100
|
+
value = Number(value);
|
1101
|
+
}
|
1102
|
+
offset ?? (offset = 0);
|
1103
|
+
end ?? (end = this.length);
|
1104
|
+
if (offset < 0 || this.length < offset || this.length < end) {
|
1105
|
+
throw new RangeError("Out of range index");
|
1106
|
+
}
|
1107
|
+
if (end <= offset) {
|
1108
|
+
return this;
|
1109
|
+
}
|
1110
|
+
offset = offset >>> 0;
|
1111
|
+
end = end === void 0 ? this.length : end >>> 0;
|
1112
|
+
value || (value = 0);
|
1113
|
+
let i;
|
1114
|
+
if (typeof value === "number") {
|
1115
|
+
for (i = offset; i < end; ++i) {
|
1116
|
+
this[i] = value;
|
1117
|
+
}
|
1118
|
+
} else {
|
1119
|
+
const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
|
1120
|
+
const len = bytes.length;
|
1121
|
+
if (len === 0) {
|
1122
|
+
throw new TypeError('The value "' + value + '" is invalid for argument "value"');
|
1123
|
+
}
|
1124
|
+
for (i = 0; i < end - offset; ++i) {
|
1125
|
+
this[i + offset] = bytes[i % len];
|
1126
|
+
}
|
1127
|
+
}
|
1128
|
+
return this;
|
1129
|
+
}
|
1130
|
+
/**
|
1131
|
+
* Returns the index of the specified value.
|
1132
|
+
*
|
1133
|
+
* If `value` is:
|
1134
|
+
* - a string, `value` is interpreted according to the character encoding in `encoding`.
|
1135
|
+
* - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
|
1136
|
+
* - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
|
1137
|
+
*
|
1138
|
+
* Any other types will throw a `TypeError`.
|
1139
|
+
*
|
1140
|
+
* @param value What to search for.
|
1141
|
+
* @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
|
1142
|
+
* @param encoding If `value` is a string, this is the encoding used to search.
|
1143
|
+
* @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
|
1144
|
+
*/
|
1145
|
+
indexOf(value, byteOffset, encoding) {
|
1146
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
|
1147
|
+
}
|
1148
|
+
/**
|
1149
|
+
* Gets the last index of the specified value.
|
1150
|
+
*
|
1151
|
+
* @see indexOf()
|
1152
|
+
* @param value
|
1153
|
+
* @param byteOffset
|
1154
|
+
* @param encoding
|
1155
|
+
*/
|
1156
|
+
lastIndexOf(value, byteOffset, encoding) {
|
1157
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
|
1158
|
+
}
|
1159
|
+
_bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
|
1160
|
+
if (buffer.length === 0) {
|
1161
|
+
return -1;
|
1162
|
+
}
|
1163
|
+
if (typeof byteOffset === "string") {
|
1164
|
+
encoding = byteOffset;
|
1165
|
+
byteOffset = 0;
|
1166
|
+
} else if (typeof byteOffset === "undefined") {
|
1167
|
+
byteOffset = 0;
|
1168
|
+
} else if (byteOffset > 2147483647) {
|
1169
|
+
byteOffset = 2147483647;
|
1170
|
+
} else if (byteOffset < -2147483648) {
|
1171
|
+
byteOffset = -2147483648;
|
1172
|
+
}
|
1173
|
+
byteOffset = +byteOffset;
|
1174
|
+
if (byteOffset !== byteOffset) {
|
1175
|
+
byteOffset = dir ? 0 : buffer.length - 1;
|
1176
|
+
}
|
1177
|
+
if (byteOffset < 0) {
|
1178
|
+
byteOffset = buffer.length + byteOffset;
|
1179
|
+
}
|
1180
|
+
if (byteOffset >= buffer.length) {
|
1181
|
+
if (dir) {
|
1182
|
+
return -1;
|
1183
|
+
} else {
|
1184
|
+
byteOffset = buffer.length - 1;
|
1185
|
+
}
|
1186
|
+
} else if (byteOffset < 0) {
|
1187
|
+
if (dir) {
|
1188
|
+
byteOffset = 0;
|
1189
|
+
} else {
|
1190
|
+
return -1;
|
1191
|
+
}
|
1192
|
+
}
|
1193
|
+
if (typeof val === "string") {
|
1194
|
+
val = Buffer.from(val, encoding);
|
1195
|
+
}
|
1196
|
+
if (Buffer.isBuffer(val)) {
|
1197
|
+
if (val.length === 0) {
|
1198
|
+
return -1;
|
1199
|
+
}
|
1200
|
+
return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
|
1201
|
+
} else if (typeof val === "number") {
|
1202
|
+
val = val & 255;
|
1203
|
+
if (typeof Uint8Array.prototype.indexOf === "function") {
|
1204
|
+
if (dir) {
|
1205
|
+
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
|
1206
|
+
} else {
|
1207
|
+
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
|
1208
|
+
}
|
1209
|
+
}
|
1210
|
+
return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
|
1211
|
+
}
|
1212
|
+
throw new TypeError("val must be string, number or Buffer");
|
1213
|
+
}
|
1214
|
+
/**
|
1215
|
+
* Equivalent to `buf.indexOf() !== -1`.
|
1216
|
+
*
|
1217
|
+
* @param value
|
1218
|
+
* @param byteOffset
|
1219
|
+
* @param encoding
|
1220
|
+
*/
|
1221
|
+
includes(value, byteOffset, encoding) {
|
1222
|
+
return this.indexOf(value, byteOffset, encoding) !== -1;
|
1223
|
+
}
|
1224
|
+
/**
|
1225
|
+
* Creates a new buffer from the given parameters.
|
1226
|
+
*
|
1227
|
+
* @param data
|
1228
|
+
* @param encoding
|
1229
|
+
*/
|
1230
|
+
static from(a, b, c) {
|
1231
|
+
return new Buffer(a, b, c);
|
1232
|
+
}
|
1233
|
+
/**
|
1234
|
+
* Returns true if `obj` is a Buffer.
|
1235
|
+
*
|
1236
|
+
* @param obj
|
1237
|
+
*/
|
1238
|
+
static isBuffer(obj) {
|
1239
|
+
return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
|
1240
|
+
}
|
1241
|
+
/**
|
1242
|
+
* Returns true if `encoding` is a supported encoding.
|
1243
|
+
*
|
1244
|
+
* @param encoding
|
1245
|
+
*/
|
1246
|
+
static isEncoding(encoding) {
|
1247
|
+
switch (encoding.toLowerCase()) {
|
1248
|
+
case "hex":
|
1249
|
+
case "utf8":
|
1250
|
+
case "ascii":
|
1251
|
+
case "binary":
|
1252
|
+
case "latin1":
|
1253
|
+
case "ucs2":
|
1254
|
+
case "utf16le":
|
1255
|
+
case "base64":
|
1256
|
+
return true;
|
1257
|
+
default:
|
1258
|
+
return false;
|
1259
|
+
}
|
1260
|
+
}
|
1261
|
+
/**
|
1262
|
+
* Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
|
1263
|
+
* returns the number of characters in the string.
|
1264
|
+
*
|
1265
|
+
* @param string The string to test.
|
1266
|
+
* @param encoding The encoding to use for calculation. Defaults is `utf8`.
|
1267
|
+
*/
|
1268
|
+
static byteLength(string, encoding) {
|
1269
|
+
if (Buffer.isBuffer(string)) {
|
1270
|
+
return string.length;
|
1271
|
+
}
|
1272
|
+
if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
|
1273
|
+
return string.byteLength;
|
1274
|
+
}
|
1275
|
+
if (typeof string !== "string") {
|
1276
|
+
throw new TypeError(
|
1277
|
+
'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
|
1278
|
+
);
|
1279
|
+
}
|
1280
|
+
const len = string.length;
|
1281
|
+
const mustMatch = arguments.length > 2 && arguments[2] === true;
|
1282
|
+
if (!mustMatch && len === 0) {
|
1283
|
+
return 0;
|
1284
|
+
}
|
1285
|
+
switch (encoding?.toLowerCase()) {
|
1286
|
+
case "ascii":
|
1287
|
+
case "latin1":
|
1288
|
+
case "binary":
|
1289
|
+
return len;
|
1290
|
+
case "utf8":
|
1291
|
+
return Buffer._utf8ToBytes(string).length;
|
1292
|
+
case "hex":
|
1293
|
+
return len >>> 1;
|
1294
|
+
case "ucs2":
|
1295
|
+
case "utf16le":
|
1296
|
+
return len * 2;
|
1297
|
+
case "base64":
|
1298
|
+
return Buffer._base64ToBytes(string).length;
|
1299
|
+
default:
|
1300
|
+
return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
|
1301
|
+
}
|
1302
|
+
}
|
1303
|
+
/**
|
1304
|
+
* Returns a Buffer which is the result of concatenating all the buffers in the list together.
|
1305
|
+
*
|
1306
|
+
* - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
|
1307
|
+
* - If the list has exactly one item, then the first item is returned.
|
1308
|
+
* - If the list has more than one item, then a new buffer is created.
|
1309
|
+
*
|
1310
|
+
* It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
|
1311
|
+
* a small computational expense.
|
1312
|
+
*
|
1313
|
+
* @param list An array of Buffer objects to concatenate.
|
1314
|
+
* @param totalLength Total length of the buffers when concatenated.
|
1315
|
+
*/
|
1316
|
+
static concat(list, totalLength) {
|
1317
|
+
if (!Array.isArray(list)) {
|
1318
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1319
|
+
}
|
1320
|
+
if (list.length === 0) {
|
1321
|
+
return Buffer.alloc(0);
|
1322
|
+
}
|
1323
|
+
let i;
|
1324
|
+
if (totalLength === void 0) {
|
1325
|
+
totalLength = 0;
|
1326
|
+
for (i = 0; i < list.length; ++i) {
|
1327
|
+
totalLength += list[i].length;
|
1328
|
+
}
|
1329
|
+
}
|
1330
|
+
const buffer = Buffer.allocUnsafe(totalLength);
|
1331
|
+
let pos = 0;
|
1332
|
+
for (i = 0; i < list.length; ++i) {
|
1333
|
+
let buf = list[i];
|
1334
|
+
if (Buffer._isInstance(buf, Uint8Array)) {
|
1335
|
+
if (pos + buf.length > buffer.length) {
|
1336
|
+
if (!Buffer.isBuffer(buf)) {
|
1337
|
+
buf = Buffer.from(buf);
|
1338
|
+
}
|
1339
|
+
buf.copy(buffer, pos);
|
1340
|
+
} else {
|
1341
|
+
Uint8Array.prototype.set.call(buffer, buf, pos);
|
1342
|
+
}
|
1343
|
+
} else if (!Buffer.isBuffer(buf)) {
|
1344
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1345
|
+
} else {
|
1346
|
+
buf.copy(buffer, pos);
|
1347
|
+
}
|
1348
|
+
pos += buf.length;
|
1349
|
+
}
|
1350
|
+
return buffer;
|
1351
|
+
}
|
1352
|
+
/**
|
1353
|
+
* The same as `buf1.compare(buf2)`.
|
1354
|
+
*/
|
1355
|
+
static compare(buf1, buf2) {
|
1356
|
+
if (Buffer._isInstance(buf1, Uint8Array)) {
|
1357
|
+
buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
|
1358
|
+
}
|
1359
|
+
if (Buffer._isInstance(buf2, Uint8Array)) {
|
1360
|
+
buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
|
1361
|
+
}
|
1362
|
+
if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
|
1363
|
+
throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
|
1364
|
+
}
|
1365
|
+
if (buf1 === buf2) {
|
1366
|
+
return 0;
|
1367
|
+
}
|
1368
|
+
let x = buf1.length;
|
1369
|
+
let y = buf2.length;
|
1370
|
+
for (let i = 0, len = Math.min(x, y); i < len; ++i) {
|
1371
|
+
if (buf1[i] !== buf2[i]) {
|
1372
|
+
x = buf1[i];
|
1373
|
+
y = buf2[i];
|
1374
|
+
break;
|
1375
|
+
}
|
1376
|
+
}
|
1377
|
+
if (x < y) {
|
1378
|
+
return -1;
|
1379
|
+
}
|
1380
|
+
if (y < x) {
|
1381
|
+
return 1;
|
1382
|
+
}
|
1383
|
+
return 0;
|
1384
|
+
}
|
1385
|
+
/**
|
1386
|
+
* Allocates a new buffer of `size` octets.
|
1387
|
+
*
|
1388
|
+
* @param size The number of octets to allocate.
|
1389
|
+
* @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
|
1390
|
+
* @param encoding The encoding used for the call to `buf.fill()` while initializing.
|
1391
|
+
*/
|
1392
|
+
static alloc(size, fill, encoding) {
|
1393
|
+
if (typeof size !== "number") {
|
1394
|
+
throw new TypeError('"size" argument must be of type number');
|
1395
|
+
} else if (size < 0) {
|
1396
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1397
|
+
}
|
1398
|
+
if (size <= 0) {
|
1399
|
+
return new Buffer(size);
|
1400
|
+
}
|
1401
|
+
if (fill !== void 0) {
|
1402
|
+
return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
|
1403
|
+
}
|
1404
|
+
return new Buffer(size);
|
1405
|
+
}
|
1406
|
+
/**
|
1407
|
+
* Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
|
1408
|
+
*
|
1409
|
+
* @param size
|
1410
|
+
*/
|
1411
|
+
static allocUnsafe(size) {
|
1412
|
+
if (typeof size !== "number") {
|
1413
|
+
throw new TypeError('"size" argument must be of type number');
|
1414
|
+
} else if (size < 0) {
|
1415
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1416
|
+
}
|
1417
|
+
return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
|
1418
|
+
}
|
1419
|
+
/**
|
1420
|
+
* Returns true if the given `obj` is an instance of `type`.
|
1421
|
+
*
|
1422
|
+
* @param obj
|
1423
|
+
* @param type
|
1424
|
+
*/
|
1425
|
+
static _isInstance(obj, type) {
|
1426
|
+
return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
|
1427
|
+
}
|
1428
|
+
static _checked(length) {
|
1429
|
+
if (length >= K_MAX_LENGTH) {
|
1430
|
+
throw new RangeError(
|
1431
|
+
"Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
|
1432
|
+
);
|
1433
|
+
}
|
1434
|
+
return length | 0;
|
1435
|
+
}
|
1436
|
+
static _blitBuffer(src, dst, offset, length) {
|
1437
|
+
let i;
|
1438
|
+
for (i = 0; i < length; ++i) {
|
1439
|
+
if (i + offset >= dst.length || i >= src.length) {
|
1440
|
+
break;
|
1441
|
+
}
|
1442
|
+
dst[i + offset] = src[i];
|
1443
|
+
}
|
1444
|
+
return i;
|
1445
|
+
}
|
1446
|
+
static _utf8Write(buf, string, offset, length) {
|
1447
|
+
return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
|
1448
|
+
}
|
1449
|
+
static _asciiWrite(buf, string, offset, length) {
|
1450
|
+
return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
|
1451
|
+
}
|
1452
|
+
static _base64Write(buf, string, offset, length) {
|
1453
|
+
return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
|
1454
|
+
}
|
1455
|
+
static _ucs2Write(buf, string, offset, length) {
|
1456
|
+
return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
|
1457
|
+
}
|
1458
|
+
static _hexWrite(buf, string, offset, length) {
|
1459
|
+
offset = Number(offset) || 0;
|
1460
|
+
const remaining = buf.length - offset;
|
1461
|
+
if (!length) {
|
1462
|
+
length = remaining;
|
1463
|
+
} else {
|
1464
|
+
length = Number(length);
|
1465
|
+
if (length > remaining) {
|
1466
|
+
length = remaining;
|
1467
|
+
}
|
1468
|
+
}
|
1469
|
+
const strLen = string.length;
|
1470
|
+
if (length > strLen / 2) {
|
1471
|
+
length = strLen / 2;
|
1472
|
+
}
|
1473
|
+
let i;
|
1474
|
+
for (i = 0; i < length; ++i) {
|
1475
|
+
const parsed = parseInt(string.substr(i * 2, 2), 16);
|
1476
|
+
if (parsed !== parsed) {
|
1477
|
+
return i;
|
1478
|
+
}
|
1479
|
+
buf[offset + i] = parsed;
|
1480
|
+
}
|
1481
|
+
return i;
|
1482
|
+
}
|
1483
|
+
static _utf8ToBytes(string, units) {
|
1484
|
+
units = units || Infinity;
|
1485
|
+
const length = string.length;
|
1486
|
+
const bytes = [];
|
1487
|
+
let codePoint;
|
1488
|
+
let leadSurrogate = null;
|
1489
|
+
for (let i = 0; i < length; ++i) {
|
1490
|
+
codePoint = string.charCodeAt(i);
|
1491
|
+
if (codePoint > 55295 && codePoint < 57344) {
|
1492
|
+
if (!leadSurrogate) {
|
1493
|
+
if (codePoint > 56319) {
|
1494
|
+
if ((units -= 3) > -1) {
|
1495
|
+
bytes.push(239, 191, 189);
|
1496
|
+
}
|
1497
|
+
continue;
|
1498
|
+
} else if (i + 1 === length) {
|
1499
|
+
if ((units -= 3) > -1) {
|
1500
|
+
bytes.push(239, 191, 189);
|
1501
|
+
}
|
1502
|
+
continue;
|
1503
|
+
}
|
1504
|
+
leadSurrogate = codePoint;
|
1505
|
+
continue;
|
1506
|
+
}
|
1507
|
+
if (codePoint < 56320) {
|
1508
|
+
if ((units -= 3) > -1) {
|
1509
|
+
bytes.push(239, 191, 189);
|
1510
|
+
}
|
1511
|
+
leadSurrogate = codePoint;
|
1512
|
+
continue;
|
1513
|
+
}
|
1514
|
+
codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
|
1515
|
+
} else if (leadSurrogate) {
|
1516
|
+
if ((units -= 3) > -1) {
|
1517
|
+
bytes.push(239, 191, 189);
|
1518
|
+
}
|
1519
|
+
}
|
1520
|
+
leadSurrogate = null;
|
1521
|
+
if (codePoint < 128) {
|
1522
|
+
if ((units -= 1) < 0) {
|
1523
|
+
break;
|
1524
|
+
}
|
1525
|
+
bytes.push(codePoint);
|
1526
|
+
} else if (codePoint < 2048) {
|
1527
|
+
if ((units -= 2) < 0) {
|
1528
|
+
break;
|
1529
|
+
}
|
1530
|
+
bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
|
1531
|
+
} else if (codePoint < 65536) {
|
1532
|
+
if ((units -= 3) < 0) {
|
1533
|
+
break;
|
1534
|
+
}
|
1535
|
+
bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
|
1536
|
+
} else if (codePoint < 1114112) {
|
1537
|
+
if ((units -= 4) < 0) {
|
1538
|
+
break;
|
1539
|
+
}
|
1540
|
+
bytes.push(
|
1541
|
+
codePoint >> 18 | 240,
|
1542
|
+
codePoint >> 12 & 63 | 128,
|
1543
|
+
codePoint >> 6 & 63 | 128,
|
1544
|
+
codePoint & 63 | 128
|
1545
|
+
);
|
1546
|
+
} else {
|
1547
|
+
throw new Error("Invalid code point");
|
1548
|
+
}
|
1549
|
+
}
|
1550
|
+
return bytes;
|
1551
|
+
}
|
1552
|
+
static _base64ToBytes(str) {
|
1553
|
+
return toByteArray(base64clean(str));
|
1554
|
+
}
|
1555
|
+
static _asciiToBytes(str) {
|
1556
|
+
const byteArray = [];
|
1557
|
+
for (let i = 0; i < str.length; ++i) {
|
1558
|
+
byteArray.push(str.charCodeAt(i) & 255);
|
1559
|
+
}
|
1560
|
+
return byteArray;
|
1561
|
+
}
|
1562
|
+
static _utf16leToBytes(str, units) {
|
1563
|
+
let c, hi, lo;
|
1564
|
+
const byteArray = [];
|
1565
|
+
for (let i = 0; i < str.length; ++i) {
|
1566
|
+
if ((units -= 2) < 0) break;
|
1567
|
+
c = str.charCodeAt(i);
|
1568
|
+
hi = c >> 8;
|
1569
|
+
lo = c % 256;
|
1570
|
+
byteArray.push(lo);
|
1571
|
+
byteArray.push(hi);
|
1572
|
+
}
|
1573
|
+
return byteArray;
|
1574
|
+
}
|
1575
|
+
static _hexSlice(buf, start, end) {
|
1576
|
+
const len = buf.length;
|
1577
|
+
if (!start || start < 0) {
|
1578
|
+
start = 0;
|
1579
|
+
}
|
1580
|
+
if (!end || end < 0 || end > len) {
|
1581
|
+
end = len;
|
1582
|
+
}
|
1583
|
+
let out = "";
|
1584
|
+
for (let i = start; i < end; ++i) {
|
1585
|
+
out += hexSliceLookupTable[buf[i]];
|
1586
|
+
}
|
1587
|
+
return out;
|
1588
|
+
}
|
1589
|
+
static _base64Slice(buf, start, end) {
|
1590
|
+
if (start === 0 && end === buf.length) {
|
1591
|
+
return fromByteArray(buf);
|
1592
|
+
} else {
|
1593
|
+
return fromByteArray(buf.slice(start, end));
|
1594
|
+
}
|
1595
|
+
}
|
1596
|
+
static _utf8Slice(buf, start, end) {
|
1597
|
+
end = Math.min(buf.length, end);
|
1598
|
+
const res = [];
|
1599
|
+
let i = start;
|
1600
|
+
while (i < end) {
|
1601
|
+
const firstByte = buf[i];
|
1602
|
+
let codePoint = null;
|
1603
|
+
let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
|
1604
|
+
if (i + bytesPerSequence <= end) {
|
1605
|
+
let secondByte, thirdByte, fourthByte, tempCodePoint;
|
1606
|
+
switch (bytesPerSequence) {
|
1607
|
+
case 1:
|
1608
|
+
if (firstByte < 128) {
|
1609
|
+
codePoint = firstByte;
|
1610
|
+
}
|
1611
|
+
break;
|
1612
|
+
case 2:
|
1613
|
+
secondByte = buf[i + 1];
|
1614
|
+
if ((secondByte & 192) === 128) {
|
1615
|
+
tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
|
1616
|
+
if (tempCodePoint > 127) {
|
1617
|
+
codePoint = tempCodePoint;
|
1618
|
+
}
|
1619
|
+
}
|
1620
|
+
break;
|
1621
|
+
case 3:
|
1622
|
+
secondByte = buf[i + 1];
|
1623
|
+
thirdByte = buf[i + 2];
|
1624
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
|
1625
|
+
tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
|
1626
|
+
if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
|
1627
|
+
codePoint = tempCodePoint;
|
1628
|
+
}
|
1629
|
+
}
|
1630
|
+
break;
|
1631
|
+
case 4:
|
1632
|
+
secondByte = buf[i + 1];
|
1633
|
+
thirdByte = buf[i + 2];
|
1634
|
+
fourthByte = buf[i + 3];
|
1635
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
|
1636
|
+
tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
|
1637
|
+
if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
|
1638
|
+
codePoint = tempCodePoint;
|
1639
|
+
}
|
1640
|
+
}
|
1641
|
+
}
|
1642
|
+
}
|
1643
|
+
if (codePoint === null) {
|
1644
|
+
codePoint = 65533;
|
1645
|
+
bytesPerSequence = 1;
|
1646
|
+
} else if (codePoint > 65535) {
|
1647
|
+
codePoint -= 65536;
|
1648
|
+
res.push(codePoint >>> 10 & 1023 | 55296);
|
1649
|
+
codePoint = 56320 | codePoint & 1023;
|
1650
|
+
}
|
1651
|
+
res.push(codePoint);
|
1652
|
+
i += bytesPerSequence;
|
1653
|
+
}
|
1654
|
+
return Buffer._decodeCodePointsArray(res);
|
1655
|
+
}
|
1656
|
+
static _decodeCodePointsArray(codePoints) {
|
1657
|
+
const len = codePoints.length;
|
1658
|
+
if (len <= MAX_ARGUMENTS_LENGTH) {
|
1659
|
+
return String.fromCharCode.apply(String, codePoints);
|
1660
|
+
}
|
1661
|
+
let res = "";
|
1662
|
+
let i = 0;
|
1663
|
+
while (i < len) {
|
1664
|
+
res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
|
1665
|
+
}
|
1666
|
+
return res;
|
1667
|
+
}
|
1668
|
+
static _asciiSlice(buf, start, end) {
|
1669
|
+
let ret = "";
|
1670
|
+
end = Math.min(buf.length, end);
|
1671
|
+
for (let i = start; i < end; ++i) {
|
1672
|
+
ret += String.fromCharCode(buf[i] & 127);
|
1673
|
+
}
|
1674
|
+
return ret;
|
1675
|
+
}
|
1676
|
+
static _latin1Slice(buf, start, end) {
|
1677
|
+
let ret = "";
|
1678
|
+
end = Math.min(buf.length, end);
|
1679
|
+
for (let i = start; i < end; ++i) {
|
1680
|
+
ret += String.fromCharCode(buf[i]);
|
1681
|
+
}
|
1682
|
+
return ret;
|
1683
|
+
}
|
1684
|
+
static _utf16leSlice(buf, start, end) {
|
1685
|
+
const bytes = buf.slice(start, end);
|
1686
|
+
let res = "";
|
1687
|
+
for (let i = 0; i < bytes.length - 1; i += 2) {
|
1688
|
+
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
|
1689
|
+
}
|
1690
|
+
return res;
|
1691
|
+
}
|
1692
|
+
static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
|
1693
|
+
let indexSize = 1;
|
1694
|
+
let arrLength = arr.length;
|
1695
|
+
let valLength = val.length;
|
1696
|
+
if (encoding !== void 0) {
|
1697
|
+
encoding = Buffer._getEncoding(encoding);
|
1698
|
+
if (encoding === "ucs2" || encoding === "utf16le") {
|
1699
|
+
if (arr.length < 2 || val.length < 2) {
|
1700
|
+
return -1;
|
1701
|
+
}
|
1702
|
+
indexSize = 2;
|
1703
|
+
arrLength /= 2;
|
1704
|
+
valLength /= 2;
|
1705
|
+
byteOffset /= 2;
|
1706
|
+
}
|
1707
|
+
}
|
1708
|
+
function read(buf, i2) {
|
1709
|
+
if (indexSize === 1) {
|
1710
|
+
return buf[i2];
|
1711
|
+
} else {
|
1712
|
+
return buf.readUInt16BE(i2 * indexSize);
|
1713
|
+
}
|
1714
|
+
}
|
1715
|
+
let i;
|
1716
|
+
if (dir) {
|
1717
|
+
let foundIndex = -1;
|
1718
|
+
for (i = byteOffset; i < arrLength; i++) {
|
1719
|
+
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
1720
|
+
if (foundIndex === -1) foundIndex = i;
|
1721
|
+
if (i - foundIndex + 1 === valLength) return foundIndex * indexSize;
|
1722
|
+
} else {
|
1723
|
+
if (foundIndex !== -1) i -= i - foundIndex;
|
1724
|
+
foundIndex = -1;
|
1725
|
+
}
|
1726
|
+
}
|
1727
|
+
} else {
|
1728
|
+
if (byteOffset + valLength > arrLength) {
|
1729
|
+
byteOffset = arrLength - valLength;
|
1730
|
+
}
|
1731
|
+
for (i = byteOffset; i >= 0; i--) {
|
1732
|
+
let found = true;
|
1733
|
+
for (let j = 0; j < valLength; j++) {
|
1734
|
+
if (read(arr, i + j) !== read(val, j)) {
|
1735
|
+
found = false;
|
1736
|
+
break;
|
1737
|
+
}
|
1738
|
+
}
|
1739
|
+
if (found) {
|
1740
|
+
return i;
|
1741
|
+
}
|
1742
|
+
}
|
1743
|
+
}
|
1744
|
+
return -1;
|
1745
|
+
}
|
1746
|
+
static _checkOffset(offset, ext, length) {
|
1747
|
+
if (offset % 1 !== 0 || offset < 0) throw new RangeError("offset is not uint");
|
1748
|
+
if (offset + ext > length) throw new RangeError("Trying to access beyond buffer length");
|
1749
|
+
}
|
1750
|
+
static _checkInt(buf, value, offset, ext, max, min) {
|
1751
|
+
if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance');
|
1752
|
+
if (value > max || value < min) throw new RangeError('"value" argument is out of bounds');
|
1753
|
+
if (offset + ext > buf.length) throw new RangeError("Index out of range");
|
1754
|
+
}
|
1755
|
+
static _getEncoding(encoding) {
|
1756
|
+
let toLowerCase = false;
|
1757
|
+
let originalEncoding = "";
|
1758
|
+
for (; ; ) {
|
1759
|
+
switch (encoding) {
|
1760
|
+
case "hex":
|
1761
|
+
return "hex";
|
1762
|
+
case "utf8":
|
1763
|
+
return "utf8";
|
1764
|
+
case "ascii":
|
1765
|
+
return "ascii";
|
1766
|
+
case "binary":
|
1767
|
+
return "binary";
|
1768
|
+
case "latin1":
|
1769
|
+
return "latin1";
|
1770
|
+
case "ucs2":
|
1771
|
+
return "ucs2";
|
1772
|
+
case "utf16le":
|
1773
|
+
return "utf16le";
|
1774
|
+
case "base64":
|
1775
|
+
return "base64";
|
1776
|
+
default: {
|
1777
|
+
if (toLowerCase) {
|
1778
|
+
throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
|
1779
|
+
}
|
1780
|
+
toLowerCase = true;
|
1781
|
+
originalEncoding = encoding;
|
1782
|
+
encoding = encoding.toLowerCase();
|
1783
|
+
}
|
1784
|
+
}
|
1785
|
+
}
|
1786
|
+
}
|
1787
|
+
}
|
1788
|
+
const hexSliceLookupTable = function() {
|
1789
|
+
const alphabet = "0123456789abcdef";
|
1790
|
+
const table = new Array(256);
|
1791
|
+
for (let i = 0; i < 16; ++i) {
|
1792
|
+
const i16 = i * 16;
|
1793
|
+
for (let j = 0; j < 16; ++j) {
|
1794
|
+
table[i16 + j] = alphabet[i] + alphabet[j];
|
1795
|
+
}
|
1796
|
+
}
|
1797
|
+
return table;
|
1798
|
+
}();
|
1799
|
+
const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
|
1800
|
+
function base64clean(str) {
|
1801
|
+
str = str.split("=")[0];
|
1802
|
+
str = str.trim().replace(INVALID_BASE64_RE, "");
|
1803
|
+
if (str.length < 2) return "";
|
1804
|
+
while (str.length % 4 !== 0) {
|
1805
|
+
str = str + "=";
|
1806
|
+
}
|
1807
|
+
return str;
|
1808
|
+
}
|
1809
|
+
|
27
1810
|
function notEmpty(value) {
|
28
1811
|
return value !== null && value !== void 0;
|
29
1812
|
}
|
@@ -118,155 +1901,15 @@ function promiseMap(inputValues, mapper) {
|
|
118
1901
|
return inputValues.reduce(reducer, Promise.resolve([]));
|
119
1902
|
}
|
120
1903
|
|
121
|
-
|
122
|
-
|
123
|
-
if (isDefined(process) && isDefined(process.env)) {
|
124
|
-
return {
|
125
|
-
apiKey: process.env.XATA_API_KEY ?? getGlobalApiKey(),
|
126
|
-
databaseURL: process.env.XATA_DATABASE_URL ?? getGlobalDatabaseURL(),
|
127
|
-
branch: process.env.XATA_BRANCH ?? getGlobalBranch(),
|
128
|
-
deployPreview: process.env.XATA_PREVIEW,
|
129
|
-
deployPreviewBranch: process.env.XATA_PREVIEW_BRANCH,
|
130
|
-
vercelGitCommitRef: process.env.VERCEL_GIT_COMMIT_REF,
|
131
|
-
vercelGitRepoOwner: process.env.VERCEL_GIT_REPO_OWNER
|
132
|
-
};
|
133
|
-
}
|
134
|
-
} catch (err) {
|
135
|
-
}
|
136
|
-
try {
|
137
|
-
if (isObject(Deno) && isObject(Deno.env)) {
|
138
|
-
return {
|
139
|
-
apiKey: Deno.env.get("XATA_API_KEY") ?? getGlobalApiKey(),
|
140
|
-
databaseURL: Deno.env.get("XATA_DATABASE_URL") ?? getGlobalDatabaseURL(),
|
141
|
-
branch: Deno.env.get("XATA_BRANCH") ?? getGlobalBranch(),
|
142
|
-
deployPreview: Deno.env.get("XATA_PREVIEW"),
|
143
|
-
deployPreviewBranch: Deno.env.get("XATA_PREVIEW_BRANCH"),
|
144
|
-
vercelGitCommitRef: Deno.env.get("VERCEL_GIT_COMMIT_REF"),
|
145
|
-
vercelGitRepoOwner: Deno.env.get("VERCEL_GIT_REPO_OWNER")
|
146
|
-
};
|
147
|
-
}
|
148
|
-
} catch (err) {
|
149
|
-
}
|
150
|
-
return {
|
151
|
-
apiKey: getGlobalApiKey(),
|
152
|
-
databaseURL: getGlobalDatabaseURL(),
|
153
|
-
branch: getGlobalBranch(),
|
154
|
-
deployPreview: void 0,
|
155
|
-
deployPreviewBranch: void 0,
|
156
|
-
vercelGitCommitRef: void 0,
|
157
|
-
vercelGitRepoOwner: void 0
|
158
|
-
};
|
159
|
-
}
|
160
|
-
function getEnableBrowserVariable() {
|
161
|
-
try {
|
162
|
-
if (isObject(process) && isObject(process.env) && process.env.XATA_ENABLE_BROWSER !== void 0) {
|
163
|
-
return process.env.XATA_ENABLE_BROWSER === "true";
|
164
|
-
}
|
165
|
-
} catch (err) {
|
166
|
-
}
|
167
|
-
try {
|
168
|
-
if (isObject(Deno) && isObject(Deno.env) && Deno.env.get("XATA_ENABLE_BROWSER") !== void 0) {
|
169
|
-
return Deno.env.get("XATA_ENABLE_BROWSER") === "true";
|
170
|
-
}
|
171
|
-
} catch (err) {
|
172
|
-
}
|
173
|
-
try {
|
174
|
-
return XATA_ENABLE_BROWSER === true || XATA_ENABLE_BROWSER === "true";
|
175
|
-
} catch (err) {
|
176
|
-
return void 0;
|
177
|
-
}
|
178
|
-
}
|
179
|
-
function getGlobalApiKey() {
|
180
|
-
try {
|
181
|
-
return XATA_API_KEY;
|
182
|
-
} catch (err) {
|
183
|
-
return void 0;
|
184
|
-
}
|
185
|
-
}
|
186
|
-
function getGlobalDatabaseURL() {
|
187
|
-
try {
|
188
|
-
return XATA_DATABASE_URL;
|
189
|
-
} catch (err) {
|
190
|
-
return void 0;
|
191
|
-
}
|
192
|
-
}
|
193
|
-
function getGlobalBranch() {
|
194
|
-
try {
|
195
|
-
return XATA_BRANCH;
|
196
|
-
} catch (err) {
|
197
|
-
return void 0;
|
198
|
-
}
|
199
|
-
}
|
200
|
-
function getDatabaseURL() {
|
201
|
-
try {
|
202
|
-
const { databaseURL } = getEnvironment();
|
203
|
-
return databaseURL;
|
204
|
-
} catch (err) {
|
205
|
-
return void 0;
|
206
|
-
}
|
207
|
-
}
|
208
|
-
function getAPIKey() {
|
209
|
-
try {
|
210
|
-
const { apiKey } = getEnvironment();
|
211
|
-
return apiKey;
|
212
|
-
} catch (err) {
|
213
|
-
return void 0;
|
214
|
-
}
|
215
|
-
}
|
216
|
-
function getBranch() {
|
217
|
-
try {
|
218
|
-
const { branch } = getEnvironment();
|
219
|
-
return branch;
|
220
|
-
} catch (err) {
|
221
|
-
return void 0;
|
222
|
-
}
|
223
|
-
}
|
224
|
-
function buildPreviewBranchName({ org, branch }) {
|
225
|
-
return `preview-${org}-${branch}`;
|
226
|
-
}
|
227
|
-
function getPreviewBranch() {
|
228
|
-
try {
|
229
|
-
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = getEnvironment();
|
230
|
-
if (deployPreviewBranch)
|
231
|
-
return deployPreviewBranch;
|
232
|
-
switch (deployPreview) {
|
233
|
-
case "vercel": {
|
234
|
-
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
235
|
-
console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
|
236
|
-
return void 0;
|
237
|
-
}
|
238
|
-
return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
|
239
|
-
}
|
240
|
-
}
|
241
|
-
return void 0;
|
242
|
-
} catch (err) {
|
243
|
-
return void 0;
|
244
|
-
}
|
245
|
-
}
|
246
|
-
|
247
|
-
var __accessCheck$6 = (obj, member, msg) => {
|
248
|
-
if (!member.has(obj))
|
249
|
-
throw TypeError("Cannot " + msg);
|
250
|
-
};
|
251
|
-
var __privateGet$5 = (obj, member, getter) => {
|
252
|
-
__accessCheck$6(obj, member, "read from private field");
|
253
|
-
return getter ? getter.call(obj) : member.get(obj);
|
254
|
-
};
|
255
|
-
var __privateAdd$6 = (obj, member, value) => {
|
256
|
-
if (member.has(obj))
|
257
|
-
throw TypeError("Cannot add the same private member more than once");
|
258
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1904
|
+
var __typeError$6 = (msg) => {
|
1905
|
+
throw TypeError(msg);
|
259
1906
|
};
|
260
|
-
var
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
var
|
266
|
-
__accessCheck$6(obj, member, "access private method");
|
267
|
-
return method;
|
268
|
-
};
|
269
|
-
var _fetch, _queue, _concurrency, _enqueue, enqueue_fn;
|
1907
|
+
var __accessCheck$6 = (obj, member, msg) => member.has(obj) || __typeError$6("Cannot " + msg);
|
1908
|
+
var __privateGet$5 = (obj, member, getter) => (__accessCheck$6(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
1909
|
+
var __privateAdd$6 = (obj, member, value) => member.has(obj) ? __typeError$6("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1910
|
+
var __privateSet$4 = (obj, member, value, setter) => (__accessCheck$6(obj, member, "write to private field"), member.set(obj, value), value);
|
1911
|
+
var __privateMethod$4 = (obj, member, method) => (__accessCheck$6(obj, member, "access private method"), method);
|
1912
|
+
var _fetch, _queue, _concurrency, _ApiRequestPool_instances, enqueue_fn;
|
270
1913
|
const REQUEST_TIMEOUT = 5 * 60 * 1e3;
|
271
1914
|
function getFetchImplementation(userFetch) {
|
272
1915
|
const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
|
@@ -279,10 +1922,10 @@ function getFetchImplementation(userFetch) {
|
|
279
1922
|
}
|
280
1923
|
class ApiRequestPool {
|
281
1924
|
constructor(concurrency = 10) {
|
282
|
-
__privateAdd$6(this,
|
283
|
-
__privateAdd$6(this, _fetch
|
284
|
-
__privateAdd$6(this, _queue
|
285
|
-
__privateAdd$6(this, _concurrency
|
1925
|
+
__privateAdd$6(this, _ApiRequestPool_instances);
|
1926
|
+
__privateAdd$6(this, _fetch);
|
1927
|
+
__privateAdd$6(this, _queue);
|
1928
|
+
__privateAdd$6(this, _concurrency);
|
286
1929
|
__privateSet$4(this, _queue, []);
|
287
1930
|
__privateSet$4(this, _concurrency, concurrency);
|
288
1931
|
this.running = 0;
|
@@ -317,7 +1960,7 @@ class ApiRequestPool {
|
|
317
1960
|
}
|
318
1961
|
return response;
|
319
1962
|
};
|
320
|
-
return __privateMethod$4(this,
|
1963
|
+
return __privateMethod$4(this, _ApiRequestPool_instances, enqueue_fn).call(this, async () => {
|
321
1964
|
return await runRequest();
|
322
1965
|
});
|
323
1966
|
}
|
@@ -325,7 +1968,7 @@ class ApiRequestPool {
|
|
325
1968
|
_fetch = new WeakMap();
|
326
1969
|
_queue = new WeakMap();
|
327
1970
|
_concurrency = new WeakMap();
|
328
|
-
|
1971
|
+
_ApiRequestPool_instances = new WeakSet();
|
329
1972
|
enqueue_fn = function(task) {
|
330
1973
|
const promise = new Promise((resolve) => __privateGet$5(this, _queue).push(resolve)).finally(() => {
|
331
1974
|
this.started--;
|
@@ -528,7 +2171,7 @@ function defaultOnOpen(response) {
|
|
528
2171
|
}
|
529
2172
|
}
|
530
2173
|
|
531
|
-
const VERSION = "0.29.
|
2174
|
+
const VERSION = "0.29.4";
|
532
2175
|
|
533
2176
|
class ErrorWithCause extends Error {
|
534
2177
|
constructor(message, options) {
|
@@ -608,35 +2251,30 @@ function parseProviderString(provider = "production") {
|
|
608
2251
|
return provider;
|
609
2252
|
}
|
610
2253
|
const [main, workspaces] = provider.split(",");
|
611
|
-
if (!main || !workspaces)
|
612
|
-
return null;
|
2254
|
+
if (!main || !workspaces) return null;
|
613
2255
|
return { main, workspaces };
|
614
2256
|
}
|
615
2257
|
function buildProviderString(provider) {
|
616
|
-
if (isHostProviderAlias(provider))
|
617
|
-
return provider;
|
2258
|
+
if (isHostProviderAlias(provider)) return provider;
|
618
2259
|
return `${provider.main},${provider.workspaces}`;
|
619
2260
|
}
|
620
2261
|
function parseWorkspacesUrlParts(url) {
|
621
|
-
if (!isString(url))
|
622
|
-
return null;
|
2262
|
+
if (!isString(url)) return null;
|
623
2263
|
const matches = {
|
624
2264
|
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
|
625
2265
|
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
|
626
2266
|
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
|
627
|
-
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:([^:]+):?(.*)?/)
|
2267
|
+
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
|
628
2268
|
};
|
629
2269
|
const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
|
630
|
-
if (!isHostProviderAlias(host) || !match)
|
631
|
-
return null;
|
2270
|
+
if (!isHostProviderAlias(host) || !match) return null;
|
632
2271
|
return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
|
633
2272
|
}
|
634
2273
|
|
635
2274
|
const pool = new ApiRequestPool();
|
636
2275
|
const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
|
637
2276
|
const cleanQueryParams = Object.entries(queryParams).reduce((acc, [key, value]) => {
|
638
|
-
if (value === void 0 || value === null)
|
639
|
-
return acc;
|
2277
|
+
if (value === void 0 || value === null) return acc;
|
640
2278
|
return { ...acc, [key]: value };
|
641
2279
|
}, {});
|
642
2280
|
const query = new URLSearchParams(cleanQueryParams).toString();
|
@@ -684,8 +2322,7 @@ function hostHeader(url) {
|
|
684
2322
|
return groups?.host ? { Host: groups.host } : {};
|
685
2323
|
}
|
686
2324
|
async function parseBody(body, headers) {
|
687
|
-
if (!isDefined(body))
|
688
|
-
return void 0;
|
2325
|
+
if (!isDefined(body)) return void 0;
|
689
2326
|
if (isBlob(body) || typeof body.text === "function") {
|
690
2327
|
return body;
|
691
2328
|
}
|
@@ -764,8 +2401,7 @@ async function fetch$1({
|
|
764
2401
|
[TraceAttributes.CLOUDFLARE_RAY_ID]: response.headers?.get("cf-ray") ?? void 0
|
765
2402
|
});
|
766
2403
|
const message = response.headers?.get("x-xata-message");
|
767
|
-
if (message)
|
768
|
-
console.warn(message);
|
2404
|
+
if (message) console.warn(message);
|
769
2405
|
if (response.status === 204) {
|
770
2406
|
return {};
|
771
2407
|
}
|
@@ -849,16 +2485,48 @@ function parseUrl(url) {
|
|
849
2485
|
|
850
2486
|
const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
|
851
2487
|
|
852
|
-
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2488
|
+
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2489
|
+
url: "/db/{dbBranchName}/migrations/apply",
|
2490
|
+
method: "post",
|
2491
|
+
...variables,
|
2492
|
+
signal
|
2493
|
+
});
|
2494
|
+
const startMigration = (variables, signal) => dataPlaneFetch({
|
2495
|
+
url: "/db/{dbBranchName}/migrations/start",
|
2496
|
+
method: "post",
|
2497
|
+
...variables,
|
2498
|
+
signal
|
2499
|
+
});
|
853
2500
|
const adaptTable = (variables, signal) => dataPlaneFetch({
|
854
2501
|
url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
|
855
2502
|
method: "post",
|
856
2503
|
...variables,
|
857
2504
|
signal
|
858
2505
|
});
|
859
|
-
const
|
860
|
-
|
861
|
-
|
2506
|
+
const adaptAllTables = (variables, signal) => dataPlaneFetch({
|
2507
|
+
url: "/db/{dbBranchName}/migrations/adapt",
|
2508
|
+
method: "post",
|
2509
|
+
...variables,
|
2510
|
+
signal
|
2511
|
+
});
|
2512
|
+
const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2513
|
+
url: "/db/{dbBranchName}/migrations/status",
|
2514
|
+
method: "get",
|
2515
|
+
...variables,
|
2516
|
+
signal
|
2517
|
+
});
|
2518
|
+
const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2519
|
+
url: "/db/{dbBranchName}/migrations/jobs/{jobId}",
|
2520
|
+
method: "get",
|
2521
|
+
...variables,
|
2522
|
+
signal
|
2523
|
+
});
|
2524
|
+
const getMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2525
|
+
url: "/db/{dbBranchName}/migrations/history",
|
2526
|
+
method: "get",
|
2527
|
+
...variables,
|
2528
|
+
signal
|
2529
|
+
});
|
862
2530
|
const getBranchList = (variables, signal) => dataPlaneFetch({
|
863
2531
|
url: "/dbs/{dbName}",
|
864
2532
|
method: "get",
|
@@ -871,82 +2539,167 @@ const getDatabaseSettings = (variables, signal) => dataPlaneFetch({
|
|
871
2539
|
...variables,
|
872
2540
|
signal
|
873
2541
|
});
|
874
|
-
const updateDatabaseSettings = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/settings", method: "patch", ...variables, signal });
|
875
|
-
const getBranchDetails = (variables, signal) => dataPlaneFetch({
|
876
|
-
url: "/db/{dbBranchName}",
|
2542
|
+
const updateDatabaseSettings = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/settings", method: "patch", ...variables, signal });
|
2543
|
+
const getBranchDetails = (variables, signal) => dataPlaneFetch({
|
2544
|
+
url: "/db/{dbBranchName}",
|
2545
|
+
method: "get",
|
2546
|
+
...variables,
|
2547
|
+
signal
|
2548
|
+
});
|
2549
|
+
const createBranch = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}", method: "put", ...variables, signal });
|
2550
|
+
const deleteBranch = (variables, signal) => dataPlaneFetch({
|
2551
|
+
url: "/db/{dbBranchName}",
|
2552
|
+
method: "delete",
|
2553
|
+
...variables,
|
2554
|
+
signal
|
2555
|
+
});
|
2556
|
+
const getSchema = (variables, signal) => dataPlaneFetch({
|
2557
|
+
url: "/db/{dbBranchName}/schema",
|
2558
|
+
method: "get",
|
2559
|
+
...variables,
|
2560
|
+
signal
|
2561
|
+
});
|
2562
|
+
const copyBranch = (variables, signal) => dataPlaneFetch({
|
2563
|
+
url: "/db/{dbBranchName}/copy",
|
2564
|
+
method: "post",
|
2565
|
+
...variables,
|
2566
|
+
signal
|
2567
|
+
});
|
2568
|
+
const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2569
|
+
url: "/db/{dbBranchName}/metadata",
|
2570
|
+
method: "put",
|
2571
|
+
...variables,
|
2572
|
+
signal
|
2573
|
+
});
|
2574
|
+
const getBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2575
|
+
url: "/db/{dbBranchName}/metadata",
|
2576
|
+
method: "get",
|
2577
|
+
...variables,
|
2578
|
+
signal
|
2579
|
+
});
|
2580
|
+
const getBranchStats = (variables, signal) => dataPlaneFetch({
|
2581
|
+
url: "/db/{dbBranchName}/stats",
|
2582
|
+
method: "get",
|
2583
|
+
...variables,
|
2584
|
+
signal
|
2585
|
+
});
|
2586
|
+
const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
|
2587
|
+
const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
|
2588
|
+
const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({
|
2589
|
+
url: "/dbs/{dbName}/gitBranches",
|
2590
|
+
method: "delete",
|
2591
|
+
...variables,
|
2592
|
+
signal
|
2593
|
+
});
|
2594
|
+
const resolveBranch = (variables, signal) => dataPlaneFetch({
|
2595
|
+
url: "/dbs/{dbName}/resolveBranch",
|
2596
|
+
method: "get",
|
2597
|
+
...variables,
|
2598
|
+
signal
|
2599
|
+
});
|
2600
|
+
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2601
|
+
url: "/db/{dbBranchName}/migrations",
|
2602
|
+
method: "get",
|
2603
|
+
...variables,
|
2604
|
+
signal
|
2605
|
+
});
|
2606
|
+
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2607
|
+
url: "/db/{dbBranchName}/migrations/plan",
|
2608
|
+
method: "post",
|
2609
|
+
...variables,
|
2610
|
+
signal
|
2611
|
+
});
|
2612
|
+
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2613
|
+
url: "/db/{dbBranchName}/migrations/execute",
|
2614
|
+
method: "post",
|
2615
|
+
...variables,
|
2616
|
+
signal
|
2617
|
+
});
|
2618
|
+
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({
|
2619
|
+
url: "/dbs/{dbName}/migrations/query",
|
2620
|
+
method: "post",
|
2621
|
+
...variables,
|
2622
|
+
signal
|
2623
|
+
});
|
2624
|
+
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
2625
|
+
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2626
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2627
|
+
method: "get",
|
2628
|
+
...variables,
|
2629
|
+
signal
|
2630
|
+
});
|
2631
|
+
const updateMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2632
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2633
|
+
method: "patch",
|
2634
|
+
...variables,
|
2635
|
+
signal
|
2636
|
+
});
|
2637
|
+
const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({
|
2638
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/commits",
|
2639
|
+
method: "post",
|
2640
|
+
...variables,
|
2641
|
+
signal
|
2642
|
+
});
|
2643
|
+
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2644
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/compare",
|
2645
|
+
method: "post",
|
2646
|
+
...variables,
|
2647
|
+
signal
|
2648
|
+
});
|
2649
|
+
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({
|
2650
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
877
2651
|
method: "get",
|
878
2652
|
...variables,
|
879
2653
|
signal
|
880
2654
|
});
|
881
|
-
const
|
882
|
-
|
883
|
-
|
884
|
-
method: "delete",
|
2655
|
+
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2656
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2657
|
+
method: "post",
|
885
2658
|
...variables,
|
886
2659
|
signal
|
887
2660
|
});
|
888
|
-
const
|
889
|
-
url: "/db/{dbBranchName}/schema",
|
890
|
-
method: "
|
2661
|
+
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({
|
2662
|
+
url: "/db/{dbBranchName}/schema/history",
|
2663
|
+
method: "post",
|
891
2664
|
...variables,
|
892
2665
|
signal
|
893
2666
|
});
|
894
|
-
const
|
895
|
-
url: "/db/{dbBranchName}/
|
2667
|
+
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({
|
2668
|
+
url: "/db/{dbBranchName}/schema/compare",
|
896
2669
|
method: "post",
|
897
2670
|
...variables,
|
898
2671
|
signal
|
899
2672
|
});
|
900
|
-
const
|
901
|
-
url: "/db/{dbBranchName}/
|
902
|
-
method: "
|
2673
|
+
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({
|
2674
|
+
url: "/db/{dbBranchName}/schema/compare/{branchName}",
|
2675
|
+
method: "post",
|
903
2676
|
...variables,
|
904
2677
|
signal
|
905
2678
|
});
|
906
|
-
const
|
907
|
-
url: "/db/{dbBranchName}/
|
908
|
-
method: "
|
2679
|
+
const updateBranchSchema = (variables, signal) => dataPlaneFetch({
|
2680
|
+
url: "/db/{dbBranchName}/schema/update",
|
2681
|
+
method: "post",
|
909
2682
|
...variables,
|
910
2683
|
signal
|
911
2684
|
});
|
912
|
-
const
|
913
|
-
url: "/db/{dbBranchName}/
|
914
|
-
method: "
|
2685
|
+
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2686
|
+
url: "/db/{dbBranchName}/schema/preview",
|
2687
|
+
method: "post",
|
915
2688
|
...variables,
|
916
2689
|
signal
|
917
2690
|
});
|
918
|
-
const
|
919
|
-
|
920
|
-
|
921
|
-
const resolveBranch = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/resolveBranch", method: "get", ...variables, signal });
|
922
|
-
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations", method: "get", ...variables, signal });
|
923
|
-
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/plan", method: "post", ...variables, signal });
|
924
|
-
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/execute", method: "post", ...variables, signal });
|
925
|
-
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/query", method: "post", ...variables, signal });
|
926
|
-
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
927
|
-
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
928
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
929
|
-
method: "get",
|
2691
|
+
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2692
|
+
url: "/db/{dbBranchName}/schema/apply",
|
2693
|
+
method: "post",
|
930
2694
|
...variables,
|
931
2695
|
signal
|
932
2696
|
});
|
933
|
-
const
|
934
|
-
|
935
|
-
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/compare", method: "post", ...variables, signal });
|
936
|
-
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/merge", method: "get", ...variables, signal });
|
937
|
-
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
938
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2697
|
+
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({
|
2698
|
+
url: "/db/{dbBranchName}/schema/push",
|
939
2699
|
method: "post",
|
940
2700
|
...variables,
|
941
2701
|
signal
|
942
2702
|
});
|
943
|
-
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/history", method: "post", ...variables, signal });
|
944
|
-
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare", method: "post", ...variables, signal });
|
945
|
-
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare/{branchName}", method: "post", ...variables, signal });
|
946
|
-
const updateBranchSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/update", method: "post", ...variables, signal });
|
947
|
-
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/preview", method: "post", ...variables, signal });
|
948
|
-
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/apply", method: "post", ...variables, signal });
|
949
|
-
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/push", method: "post", ...variables, signal });
|
950
2703
|
const createTable = (variables, signal) => dataPlaneFetch({
|
951
2704
|
url: "/db/{dbBranchName}/tables/{tableName}",
|
952
2705
|
method: "put",
|
@@ -959,14 +2712,24 @@ const deleteTable = (variables, signal) => dataPlaneFetch({
|
|
959
2712
|
...variables,
|
960
2713
|
signal
|
961
2714
|
});
|
962
|
-
const updateTable = (variables, signal) => dataPlaneFetch({
|
2715
|
+
const updateTable = (variables, signal) => dataPlaneFetch({
|
2716
|
+
url: "/db/{dbBranchName}/tables/{tableName}",
|
2717
|
+
method: "patch",
|
2718
|
+
...variables,
|
2719
|
+
signal
|
2720
|
+
});
|
963
2721
|
const getTableSchema = (variables, signal) => dataPlaneFetch({
|
964
2722
|
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
965
2723
|
method: "get",
|
966
2724
|
...variables,
|
967
2725
|
signal
|
968
2726
|
});
|
969
|
-
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2727
|
+
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2728
|
+
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
2729
|
+
method: "put",
|
2730
|
+
...variables,
|
2731
|
+
signal
|
2732
|
+
});
|
970
2733
|
const getTableColumns = (variables, signal) => dataPlaneFetch({
|
971
2734
|
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
972
2735
|
method: "get",
|
@@ -974,7 +2737,12 @@ const getTableColumns = (variables, signal) => dataPlaneFetch({
|
|
974
2737
|
signal
|
975
2738
|
});
|
976
2739
|
const addTableColumn = (variables, signal) => dataPlaneFetch(
|
977
|
-
{
|
2740
|
+
{
|
2741
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
2742
|
+
method: "post",
|
2743
|
+
...variables,
|
2744
|
+
signal
|
2745
|
+
}
|
978
2746
|
);
|
979
2747
|
const getColumn = (variables, signal) => dataPlaneFetch({
|
980
2748
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
@@ -982,15 +2750,30 @@ const getColumn = (variables, signal) => dataPlaneFetch({
|
|
982
2750
|
...variables,
|
983
2751
|
signal
|
984
2752
|
});
|
985
|
-
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2753
|
+
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2754
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
2755
|
+
method: "patch",
|
2756
|
+
...variables,
|
2757
|
+
signal
|
2758
|
+
});
|
986
2759
|
const deleteColumn = (variables, signal) => dataPlaneFetch({
|
987
2760
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
988
2761
|
method: "delete",
|
989
2762
|
...variables,
|
990
2763
|
signal
|
991
2764
|
});
|
992
|
-
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
993
|
-
|
2765
|
+
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
2766
|
+
url: "/db/{dbBranchName}/transaction",
|
2767
|
+
method: "post",
|
2768
|
+
...variables,
|
2769
|
+
signal
|
2770
|
+
});
|
2771
|
+
const insertRecord = (variables, signal) => dataPlaneFetch({
|
2772
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data",
|
2773
|
+
method: "post",
|
2774
|
+
...variables,
|
2775
|
+
signal
|
2776
|
+
});
|
994
2777
|
const getFileItem = (variables, signal) => dataPlaneFetch({
|
995
2778
|
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
|
996
2779
|
method: "get",
|
@@ -1033,11 +2816,36 @@ const getRecord = (variables, signal) => dataPlaneFetch({
|
|
1033
2816
|
...variables,
|
1034
2817
|
signal
|
1035
2818
|
});
|
1036
|
-
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
1037
|
-
|
1038
|
-
|
1039
|
-
|
1040
|
-
|
2819
|
+
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2820
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2821
|
+
method: "put",
|
2822
|
+
...variables,
|
2823
|
+
signal
|
2824
|
+
});
|
2825
|
+
const updateRecordWithID = (variables, signal) => dataPlaneFetch({
|
2826
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2827
|
+
method: "patch",
|
2828
|
+
...variables,
|
2829
|
+
signal
|
2830
|
+
});
|
2831
|
+
const upsertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2832
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2833
|
+
method: "post",
|
2834
|
+
...variables,
|
2835
|
+
signal
|
2836
|
+
});
|
2837
|
+
const deleteRecord = (variables, signal) => dataPlaneFetch({
|
2838
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2839
|
+
method: "delete",
|
2840
|
+
...variables,
|
2841
|
+
signal
|
2842
|
+
});
|
2843
|
+
const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({
|
2844
|
+
url: "/db/{dbBranchName}/tables/{tableName}/bulk",
|
2845
|
+
method: "post",
|
2846
|
+
...variables,
|
2847
|
+
signal
|
2848
|
+
});
|
1041
2849
|
const queryTable = (variables, signal) => dataPlaneFetch({
|
1042
2850
|
url: "/db/{dbBranchName}/tables/{tableName}/query",
|
1043
2851
|
method: "post",
|
@@ -1056,16 +2864,36 @@ const searchTable = (variables, signal) => dataPlaneFetch({
|
|
1056
2864
|
...variables,
|
1057
2865
|
signal
|
1058
2866
|
});
|
1059
|
-
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
2867
|
+
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
2868
|
+
url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch",
|
2869
|
+
method: "post",
|
2870
|
+
...variables,
|
2871
|
+
signal
|
2872
|
+
});
|
1060
2873
|
const askTable = (variables, signal) => dataPlaneFetch({
|
1061
2874
|
url: "/db/{dbBranchName}/tables/{tableName}/ask",
|
1062
2875
|
method: "post",
|
1063
2876
|
...variables,
|
1064
2877
|
signal
|
1065
2878
|
});
|
1066
|
-
const askTableSession = (variables, signal) => dataPlaneFetch({
|
1067
|
-
|
1068
|
-
|
2879
|
+
const askTableSession = (variables, signal) => dataPlaneFetch({
|
2880
|
+
url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}",
|
2881
|
+
method: "post",
|
2882
|
+
...variables,
|
2883
|
+
signal
|
2884
|
+
});
|
2885
|
+
const summarizeTable = (variables, signal) => dataPlaneFetch({
|
2886
|
+
url: "/db/{dbBranchName}/tables/{tableName}/summarize",
|
2887
|
+
method: "post",
|
2888
|
+
...variables,
|
2889
|
+
signal
|
2890
|
+
});
|
2891
|
+
const aggregateTable = (variables, signal) => dataPlaneFetch({
|
2892
|
+
url: "/db/{dbBranchName}/tables/{tableName}/aggregate",
|
2893
|
+
method: "post",
|
2894
|
+
...variables,
|
2895
|
+
signal
|
2896
|
+
});
|
1069
2897
|
const fileAccess = (variables, signal) => dataPlaneFetch({
|
1070
2898
|
url: "/file/{fileId}",
|
1071
2899
|
method: "get",
|
@@ -1087,7 +2915,9 @@ const sqlQuery = (variables, signal) => dataPlaneFetch({
|
|
1087
2915
|
const operationsByTag$2 = {
|
1088
2916
|
migrations: {
|
1089
2917
|
applyMigration,
|
2918
|
+
startMigration,
|
1090
2919
|
adaptTable,
|
2920
|
+
adaptAllTables,
|
1091
2921
|
getBranchMigrationJobStatus,
|
1092
2922
|
getMigrationJobStatus,
|
1093
2923
|
getMigrationHistory,
|
@@ -1150,7 +2980,16 @@ const operationsByTag$2 = {
|
|
1150
2980
|
deleteRecord,
|
1151
2981
|
bulkInsertTableRecords
|
1152
2982
|
},
|
1153
|
-
files: {
|
2983
|
+
files: {
|
2984
|
+
getFileItem,
|
2985
|
+
putFileItem,
|
2986
|
+
deleteFileItem,
|
2987
|
+
getFile,
|
2988
|
+
putFile,
|
2989
|
+
deleteFile,
|
2990
|
+
fileAccess,
|
2991
|
+
fileUpload
|
2992
|
+
},
|
1154
2993
|
searchAndFilter: {
|
1155
2994
|
queryTable,
|
1156
2995
|
searchBranch,
|
@@ -1228,7 +3067,12 @@ const deleteOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
|
1228
3067
|
...variables,
|
1229
3068
|
signal
|
1230
3069
|
});
|
1231
|
-
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3070
|
+
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3071
|
+
url: "/user/oauth/tokens/{token}",
|
3072
|
+
method: "patch",
|
3073
|
+
...variables,
|
3074
|
+
signal
|
3075
|
+
});
|
1232
3076
|
const getWorkspacesList = (variables, signal) => controlPlaneFetch({
|
1233
3077
|
url: "/workspaces",
|
1234
3078
|
method: "get",
|
@@ -1259,49 +3103,150 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
|
|
1259
3103
|
...variables,
|
1260
3104
|
signal
|
1261
3105
|
});
|
1262
|
-
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
1263
|
-
|
1264
|
-
|
1265
|
-
|
3106
|
+
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3107
|
+
url: "/workspaces/{workspaceId}/settings",
|
3108
|
+
method: "get",
|
3109
|
+
...variables,
|
3110
|
+
signal
|
3111
|
+
});
|
3112
|
+
const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3113
|
+
url: "/workspaces/{workspaceId}/settings",
|
3114
|
+
method: "patch",
|
3115
|
+
...variables,
|
3116
|
+
signal
|
3117
|
+
});
|
3118
|
+
const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({
|
3119
|
+
url: "/workspaces/{workspaceId}/members",
|
3120
|
+
method: "get",
|
3121
|
+
...variables,
|
3122
|
+
signal
|
3123
|
+
});
|
3124
|
+
const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({
|
3125
|
+
url: "/workspaces/{workspaceId}/members/{userId}",
|
3126
|
+
method: "put",
|
3127
|
+
...variables,
|
3128
|
+
signal
|
3129
|
+
});
|
1266
3130
|
const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1267
3131
|
url: "/workspaces/{workspaceId}/members/{userId}",
|
1268
3132
|
method: "delete",
|
1269
3133
|
...variables,
|
1270
3134
|
signal
|
1271
3135
|
});
|
1272
|
-
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1273
|
-
|
1274
|
-
|
1275
|
-
|
1276
|
-
|
1277
|
-
|
1278
|
-
const
|
3136
|
+
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
3137
|
+
url: "/workspaces/{workspaceId}/invites",
|
3138
|
+
method: "post",
|
3139
|
+
...variables,
|
3140
|
+
signal
|
3141
|
+
});
|
3142
|
+
const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3143
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3144
|
+
method: "patch",
|
3145
|
+
...variables,
|
3146
|
+
signal
|
3147
|
+
});
|
3148
|
+
const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3149
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3150
|
+
method: "delete",
|
3151
|
+
...variables,
|
3152
|
+
signal
|
3153
|
+
});
|
3154
|
+
const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3155
|
+
url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept",
|
3156
|
+
method: "post",
|
3157
|
+
...variables,
|
3158
|
+
signal
|
3159
|
+
});
|
3160
|
+
const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3161
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}/resend",
|
3162
|
+
method: "post",
|
3163
|
+
...variables,
|
3164
|
+
signal
|
3165
|
+
});
|
3166
|
+
const listClusters = (variables, signal) => controlPlaneFetch({
|
3167
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3168
|
+
method: "get",
|
3169
|
+
...variables,
|
3170
|
+
signal
|
3171
|
+
});
|
3172
|
+
const createCluster = (variables, signal) => controlPlaneFetch({
|
3173
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3174
|
+
method: "post",
|
3175
|
+
...variables,
|
3176
|
+
signal
|
3177
|
+
});
|
1279
3178
|
const getCluster = (variables, signal) => controlPlaneFetch({
|
1280
3179
|
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
1281
3180
|
method: "get",
|
1282
3181
|
...variables,
|
1283
3182
|
signal
|
1284
3183
|
});
|
1285
|
-
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3184
|
+
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3185
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3186
|
+
method: "patch",
|
3187
|
+
...variables,
|
3188
|
+
signal
|
3189
|
+
});
|
3190
|
+
const deleteCluster = (variables, signal) => controlPlaneFetch({
|
3191
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3192
|
+
method: "delete",
|
3193
|
+
...variables,
|
3194
|
+
signal
|
3195
|
+
});
|
1286
3196
|
const getDatabaseList = (variables, signal) => controlPlaneFetch({
|
1287
3197
|
url: "/workspaces/{workspaceId}/dbs",
|
1288
3198
|
method: "get",
|
1289
3199
|
...variables,
|
1290
3200
|
signal
|
1291
3201
|
});
|
1292
|
-
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3202
|
+
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3203
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3204
|
+
method: "put",
|
3205
|
+
...variables,
|
3206
|
+
signal
|
3207
|
+
});
|
1293
3208
|
const deleteDatabase = (variables, signal) => controlPlaneFetch({
|
1294
3209
|
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
1295
3210
|
method: "delete",
|
1296
3211
|
...variables,
|
1297
3212
|
signal
|
1298
3213
|
});
|
1299
|
-
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
1300
|
-
|
1301
|
-
|
1302
|
-
|
1303
|
-
|
1304
|
-
|
3214
|
+
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3215
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3216
|
+
method: "get",
|
3217
|
+
...variables,
|
3218
|
+
signal
|
3219
|
+
});
|
3220
|
+
const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3221
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3222
|
+
method: "patch",
|
3223
|
+
...variables,
|
3224
|
+
signal
|
3225
|
+
});
|
3226
|
+
const renameDatabase = (variables, signal) => controlPlaneFetch({
|
3227
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/rename",
|
3228
|
+
method: "post",
|
3229
|
+
...variables,
|
3230
|
+
signal
|
3231
|
+
});
|
3232
|
+
const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3233
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3234
|
+
method: "get",
|
3235
|
+
...variables,
|
3236
|
+
signal
|
3237
|
+
});
|
3238
|
+
const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3239
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3240
|
+
method: "put",
|
3241
|
+
...variables,
|
3242
|
+
signal
|
3243
|
+
});
|
3244
|
+
const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3245
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3246
|
+
method: "delete",
|
3247
|
+
...variables,
|
3248
|
+
signal
|
3249
|
+
});
|
1305
3250
|
const listRegions = (variables, signal) => controlPlaneFetch({
|
1306
3251
|
url: "/workspaces/{workspaceId}/regions",
|
1307
3252
|
method: "get",
|
@@ -1339,7 +3284,13 @@ const operationsByTag$1 = {
|
|
1339
3284
|
acceptWorkspaceMemberInvite,
|
1340
3285
|
resendWorkspaceMemberInvite
|
1341
3286
|
},
|
1342
|
-
xbcontrolOther: {
|
3287
|
+
xbcontrolOther: {
|
3288
|
+
listClusters,
|
3289
|
+
createCluster,
|
3290
|
+
getCluster,
|
3291
|
+
updateCluster,
|
3292
|
+
deleteCluster
|
3293
|
+
},
|
1343
3294
|
databases: {
|
1344
3295
|
getDatabaseList,
|
1345
3296
|
createDatabase,
|
@@ -1359,7 +3310,7 @@ const operationsByTag = deepMerge(operationsByTag$2, operationsByTag$1);
|
|
1359
3310
|
const buildApiClient = () => class {
|
1360
3311
|
constructor(options = {}) {
|
1361
3312
|
const provider = options.host ?? "production";
|
1362
|
-
const apiKey = options.apiKey
|
3313
|
+
const apiKey = options.apiKey;
|
1363
3314
|
const trace = options.trace ?? defaultTrace;
|
1364
3315
|
const clientID = generateUUID();
|
1365
3316
|
if (!apiKey) {
|
@@ -1426,8 +3377,7 @@ function buildTransformString(transformations) {
|
|
1426
3377
|
).join(",");
|
1427
3378
|
}
|
1428
3379
|
function transformImage(url, ...transformations) {
|
1429
|
-
if (!isDefined(url))
|
1430
|
-
return void 0;
|
3380
|
+
if (!isDefined(url)) return void 0;
|
1431
3381
|
const newTransformations = buildTransformString(transformations);
|
1432
3382
|
const { hostname, pathname, search } = new URL(url);
|
1433
3383
|
const pathParts = pathname.split("/");
|
@@ -1540,8 +3490,7 @@ class XataFile {
|
|
1540
3490
|
}
|
1541
3491
|
}
|
1542
3492
|
const parseInputFileEntry = async (entry) => {
|
1543
|
-
if (!isDefined(entry))
|
1544
|
-
return null;
|
3493
|
+
if (!isDefined(entry)) return null;
|
1545
3494
|
const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout, uploadUrlTimeout } = await entry;
|
1546
3495
|
return compactObject({
|
1547
3496
|
id,
|
@@ -1556,24 +3505,19 @@ const parseInputFileEntry = async (entry) => {
|
|
1556
3505
|
};
|
1557
3506
|
|
1558
3507
|
function cleanFilter(filter) {
|
1559
|
-
if (!isDefined(filter))
|
1560
|
-
|
1561
|
-
if (!isObject(filter))
|
1562
|
-
return filter;
|
3508
|
+
if (!isDefined(filter)) return void 0;
|
3509
|
+
if (!isObject(filter)) return filter;
|
1563
3510
|
const values = Object.fromEntries(
|
1564
3511
|
Object.entries(filter).reduce((acc, [key, value]) => {
|
1565
|
-
if (!isDefined(value))
|
1566
|
-
return acc;
|
3512
|
+
if (!isDefined(value)) return acc;
|
1567
3513
|
if (Array.isArray(value)) {
|
1568
3514
|
const clean = value.map((item) => cleanFilter(item)).filter((item) => isDefined(item));
|
1569
|
-
if (clean.length === 0)
|
1570
|
-
return acc;
|
3515
|
+
if (clean.length === 0) return acc;
|
1571
3516
|
return [...acc, [key, clean]];
|
1572
3517
|
}
|
1573
3518
|
if (isObject(value)) {
|
1574
3519
|
const clean = cleanFilter(value);
|
1575
|
-
if (!isDefined(clean))
|
1576
|
-
return acc;
|
3520
|
+
if (!isDefined(clean)) return acc;
|
1577
3521
|
return [...acc, [key, clean]];
|
1578
3522
|
}
|
1579
3523
|
return [...acc, [key, value]];
|
@@ -1583,10 +3527,8 @@ function cleanFilter(filter) {
|
|
1583
3527
|
}
|
1584
3528
|
|
1585
3529
|
function stringifyJson(value) {
|
1586
|
-
if (!isDefined(value))
|
1587
|
-
|
1588
|
-
if (isString(value))
|
1589
|
-
return value;
|
3530
|
+
if (!isDefined(value)) return value;
|
3531
|
+
if (isString(value)) return value;
|
1590
3532
|
try {
|
1591
3533
|
return JSON.stringify(value);
|
1592
3534
|
} catch (e) {
|
@@ -1601,28 +3543,17 @@ function parseJson(value) {
|
|
1601
3543
|
}
|
1602
3544
|
}
|
1603
3545
|
|
1604
|
-
var
|
1605
|
-
|
1606
|
-
throw TypeError("Cannot " + msg);
|
1607
|
-
};
|
1608
|
-
var __privateGet$4 = (obj, member, getter) => {
|
1609
|
-
__accessCheck$5(obj, member, "read from private field");
|
1610
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1611
|
-
};
|
1612
|
-
var __privateAdd$5 = (obj, member, value) => {
|
1613
|
-
if (member.has(obj))
|
1614
|
-
throw TypeError("Cannot add the same private member more than once");
|
1615
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1616
|
-
};
|
1617
|
-
var __privateSet$3 = (obj, member, value, setter) => {
|
1618
|
-
__accessCheck$5(obj, member, "write to private field");
|
1619
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1620
|
-
return value;
|
3546
|
+
var __typeError$5 = (msg) => {
|
3547
|
+
throw TypeError(msg);
|
1621
3548
|
};
|
3549
|
+
var __accessCheck$5 = (obj, member, msg) => member.has(obj) || __typeError$5("Cannot " + msg);
|
3550
|
+
var __privateGet$4 = (obj, member, getter) => (__accessCheck$5(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3551
|
+
var __privateAdd$5 = (obj, member, value) => member.has(obj) ? __typeError$5("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3552
|
+
var __privateSet$3 = (obj, member, value, setter) => (__accessCheck$5(obj, member, "write to private field"), member.set(obj, value), value);
|
1622
3553
|
var _query, _page;
|
1623
3554
|
class Page {
|
1624
3555
|
constructor(query, meta, records = []) {
|
1625
|
-
__privateAdd$5(this, _query
|
3556
|
+
__privateAdd$5(this, _query);
|
1626
3557
|
__privateSet$3(this, _query, query);
|
1627
3558
|
this.meta = meta;
|
1628
3559
|
this.records = new PageRecordArray(this, records);
|
@@ -1709,7 +3640,7 @@ class RecordArray extends Array {
|
|
1709
3640
|
const _PageRecordArray = class _PageRecordArray extends Array {
|
1710
3641
|
constructor(...args) {
|
1711
3642
|
super(..._PageRecordArray.parseConstructorParams(...args));
|
1712
|
-
__privateAdd$5(this, _page
|
3643
|
+
__privateAdd$5(this, _page);
|
1713
3644
|
__privateSet$3(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
|
1714
3645
|
}
|
1715
3646
|
static parseConstructorParams(...args) {
|
@@ -1780,34 +3711,20 @@ const _PageRecordArray = class _PageRecordArray extends Array {
|
|
1780
3711
|
_page = new WeakMap();
|
1781
3712
|
let PageRecordArray = _PageRecordArray;
|
1782
3713
|
|
1783
|
-
var
|
1784
|
-
|
1785
|
-
throw TypeError("Cannot " + msg);
|
1786
|
-
};
|
1787
|
-
var __privateGet$3 = (obj, member, getter) => {
|
1788
|
-
__accessCheck$4(obj, member, "read from private field");
|
1789
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1790
|
-
};
|
1791
|
-
var __privateAdd$4 = (obj, member, value) => {
|
1792
|
-
if (member.has(obj))
|
1793
|
-
throw TypeError("Cannot add the same private member more than once");
|
1794
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1795
|
-
};
|
1796
|
-
var __privateSet$2 = (obj, member, value, setter) => {
|
1797
|
-
__accessCheck$4(obj, member, "write to private field");
|
1798
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1799
|
-
return value;
|
1800
|
-
};
|
1801
|
-
var __privateMethod$3 = (obj, member, method) => {
|
1802
|
-
__accessCheck$4(obj, member, "access private method");
|
1803
|
-
return method;
|
3714
|
+
var __typeError$4 = (msg) => {
|
3715
|
+
throw TypeError(msg);
|
1804
3716
|
};
|
1805
|
-
var
|
3717
|
+
var __accessCheck$4 = (obj, member, msg) => member.has(obj) || __typeError$4("Cannot " + msg);
|
3718
|
+
var __privateGet$3 = (obj, member, getter) => (__accessCheck$4(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3719
|
+
var __privateAdd$4 = (obj, member, value) => member.has(obj) ? __typeError$4("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3720
|
+
var __privateSet$2 = (obj, member, value, setter) => (__accessCheck$4(obj, member, "write to private field"), member.set(obj, value), value);
|
3721
|
+
var __privateMethod$3 = (obj, member, method) => (__accessCheck$4(obj, member, "access private method"), method);
|
3722
|
+
var _table$1, _repository, _data, _Query_instances, cleanFilterConstraint_fn;
|
1806
3723
|
const _Query = class _Query {
|
1807
3724
|
constructor(repository, table, data, rawParent) {
|
1808
|
-
__privateAdd$4(this,
|
1809
|
-
__privateAdd$4(this, _table$1
|
1810
|
-
__privateAdd$4(this, _repository
|
3725
|
+
__privateAdd$4(this, _Query_instances);
|
3726
|
+
__privateAdd$4(this, _table$1);
|
3727
|
+
__privateAdd$4(this, _repository);
|
1811
3728
|
__privateAdd$4(this, _data, { filter: {} });
|
1812
3729
|
// Implements pagination
|
1813
3730
|
this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
|
@@ -1885,12 +3802,12 @@ const _Query = class _Query {
|
|
1885
3802
|
filter(a, b) {
|
1886
3803
|
if (arguments.length === 1) {
|
1887
3804
|
const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
|
1888
|
-
[column]: __privateMethod$3(this,
|
3805
|
+
[column]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, column, constraint)
|
1889
3806
|
}));
|
1890
3807
|
const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
|
1891
3808
|
return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
|
1892
3809
|
} else {
|
1893
|
-
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this,
|
3810
|
+
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
|
1894
3811
|
const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
|
1895
3812
|
return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
|
1896
3813
|
}
|
@@ -1969,8 +3886,7 @@ const _Query = class _Query {
|
|
1969
3886
|
}
|
1970
3887
|
async getFirstOrThrow(options = {}) {
|
1971
3888
|
const records = await this.getMany({ ...options, pagination: { size: 1 } });
|
1972
|
-
if (records[0] === void 0)
|
1973
|
-
throw new Error("No results found.");
|
3889
|
+
if (records[0] === void 0) throw new Error("No results found.");
|
1974
3890
|
return records[0];
|
1975
3891
|
}
|
1976
3892
|
async summarize(params = {}) {
|
@@ -2025,7 +3941,7 @@ const _Query = class _Query {
|
|
2025
3941
|
_table$1 = new WeakMap();
|
2026
3942
|
_repository = new WeakMap();
|
2027
3943
|
_data = new WeakMap();
|
2028
|
-
|
3944
|
+
_Query_instances = new WeakSet();
|
2029
3945
|
cleanFilterConstraint_fn = function(column, value) {
|
2030
3946
|
const columnType = __privateGet$3(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
|
2031
3947
|
if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
|
@@ -2086,8 +4002,7 @@ function isSortFilterString(value) {
|
|
2086
4002
|
}
|
2087
4003
|
function isSortFilterBase(filter) {
|
2088
4004
|
return isObject(filter) && Object.entries(filter).every(([key, value]) => {
|
2089
|
-
if (key === "*")
|
2090
|
-
return value === "random";
|
4005
|
+
if (key === "*") return value === "random";
|
2091
4006
|
return value === "asc" || value === "desc";
|
2092
4007
|
});
|
2093
4008
|
}
|
@@ -2108,29 +4023,15 @@ function buildSortFilter(filter) {
|
|
2108
4023
|
}
|
2109
4024
|
}
|
2110
4025
|
|
2111
|
-
var
|
2112
|
-
|
2113
|
-
throw TypeError("Cannot " + msg);
|
2114
|
-
};
|
2115
|
-
var __privateGet$2 = (obj, member, getter) => {
|
2116
|
-
__accessCheck$3(obj, member, "read from private field");
|
2117
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2118
|
-
};
|
2119
|
-
var __privateAdd$3 = (obj, member, value) => {
|
2120
|
-
if (member.has(obj))
|
2121
|
-
throw TypeError("Cannot add the same private member more than once");
|
2122
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4026
|
+
var __typeError$3 = (msg) => {
|
4027
|
+
throw TypeError(msg);
|
2123
4028
|
};
|
2124
|
-
var
|
2125
|
-
|
2126
|
-
|
2127
|
-
|
2128
|
-
|
2129
|
-
var
|
2130
|
-
__accessCheck$3(obj, member, "access private method");
|
2131
|
-
return method;
|
2132
|
-
};
|
2133
|
-
var _table, _getFetchProps, _db, _schemaTables, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn, _insertRecords, insertRecords_fn, _updateRecordWithID, updateRecordWithID_fn, _updateRecords, updateRecords_fn, _upsertRecordWithID, upsertRecordWithID_fn, _deleteRecord, deleteRecord_fn, _deleteRecords, deleteRecords_fn, _getSchemaTables, getSchemaTables_fn, _transformObjectToApi, transformObjectToApi_fn;
|
4029
|
+
var __accessCheck$3 = (obj, member, msg) => member.has(obj) || __typeError$3("Cannot " + msg);
|
4030
|
+
var __privateGet$2 = (obj, member, getter) => (__accessCheck$3(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4031
|
+
var __privateAdd$3 = (obj, member, value) => member.has(obj) ? __typeError$3("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4032
|
+
var __privateSet$1 = (obj, member, value, setter) => (__accessCheck$3(obj, member, "write to private field"), member.set(obj, value), value);
|
4033
|
+
var __privateMethod$2 = (obj, member, method) => (__accessCheck$3(obj, member, "access private method"), method);
|
4034
|
+
var _table, _getFetchProps, _db, _schemaTables, _trace, _RestRepository_instances, insertRecordWithoutId_fn, insertRecordWithId_fn, insertRecords_fn, updateRecordWithID_fn, updateRecords_fn, upsertRecordWithID_fn, deleteRecord_fn, deleteRecords_fn, getSchemaTables_fn, transformObjectToApi_fn;
|
2134
4035
|
const BULK_OPERATION_MAX_SIZE = 1e3;
|
2135
4036
|
class Repository extends Query {
|
2136
4037
|
}
|
@@ -2141,21 +4042,12 @@ class RestRepository extends Query {
|
|
2141
4042
|
{ name: options.table, schema: options.schemaTables?.find((table) => table.name === options.table) },
|
2142
4043
|
{}
|
2143
4044
|
);
|
2144
|
-
__privateAdd$3(this,
|
2145
|
-
__privateAdd$3(this,
|
2146
|
-
__privateAdd$3(this,
|
2147
|
-
__privateAdd$3(this,
|
2148
|
-
__privateAdd$3(this,
|
2149
|
-
__privateAdd$3(this,
|
2150
|
-
__privateAdd$3(this, _deleteRecord);
|
2151
|
-
__privateAdd$3(this, _deleteRecords);
|
2152
|
-
__privateAdd$3(this, _getSchemaTables);
|
2153
|
-
__privateAdd$3(this, _transformObjectToApi);
|
2154
|
-
__privateAdd$3(this, _table, void 0);
|
2155
|
-
__privateAdd$3(this, _getFetchProps, void 0);
|
2156
|
-
__privateAdd$3(this, _db, void 0);
|
2157
|
-
__privateAdd$3(this, _schemaTables, void 0);
|
2158
|
-
__privateAdd$3(this, _trace, void 0);
|
4045
|
+
__privateAdd$3(this, _RestRepository_instances);
|
4046
|
+
__privateAdd$3(this, _table);
|
4047
|
+
__privateAdd$3(this, _getFetchProps);
|
4048
|
+
__privateAdd$3(this, _db);
|
4049
|
+
__privateAdd$3(this, _schemaTables);
|
4050
|
+
__privateAdd$3(this, _trace);
|
2159
4051
|
__privateSet$1(this, _table, options.table);
|
2160
4052
|
__privateSet$1(this, _db, options.db);
|
2161
4053
|
__privateSet$1(this, _schemaTables, options.schemaTables);
|
@@ -2174,31 +4066,28 @@ class RestRepository extends Query {
|
|
2174
4066
|
return __privateGet$2(this, _trace).call(this, "create", async () => {
|
2175
4067
|
const ifVersion = parseIfVersion(b, c, d);
|
2176
4068
|
if (Array.isArray(a)) {
|
2177
|
-
if (a.length === 0)
|
2178
|
-
|
2179
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
4069
|
+
if (a.length === 0) return [];
|
4070
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
2180
4071
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2181
4072
|
const result = await this.read(ids, columns);
|
2182
4073
|
return result;
|
2183
4074
|
}
|
2184
4075
|
if (isString(a) && isObject(b)) {
|
2185
|
-
if (a === "")
|
2186
|
-
throw new Error("The id can't be empty");
|
4076
|
+
if (a === "") throw new Error("The id can't be empty");
|
2187
4077
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2188
|
-
return await __privateMethod$2(this,
|
4078
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
|
2189
4079
|
}
|
2190
4080
|
if (isObject(a) && isString(a.xata_id)) {
|
2191
|
-
if (a.xata_id === "")
|
2192
|
-
throw new Error("The id can't be empty");
|
4081
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2193
4082
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2194
|
-
return await __privateMethod$2(this,
|
4083
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
2195
4084
|
createOnly: true,
|
2196
4085
|
ifVersion
|
2197
4086
|
});
|
2198
4087
|
}
|
2199
4088
|
if (isObject(a)) {
|
2200
4089
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2201
|
-
return __privateMethod$2(this,
|
4090
|
+
return __privateMethod$2(this, _RestRepository_instances, insertRecordWithoutId_fn).call(this, a, columns);
|
2202
4091
|
}
|
2203
4092
|
throw new Error("Invalid arguments for create method");
|
2204
4093
|
});
|
@@ -2207,8 +4096,7 @@ class RestRepository extends Query {
|
|
2207
4096
|
return __privateGet$2(this, _trace).call(this, "read", async () => {
|
2208
4097
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2209
4098
|
if (Array.isArray(a)) {
|
2210
|
-
if (a.length === 0)
|
2211
|
-
return [];
|
4099
|
+
if (a.length === 0) return [];
|
2212
4100
|
const ids = a.map((item) => extractId(item));
|
2213
4101
|
const finalObjects = await this.getAll({ filter: { xata_id: { $any: compact(ids) } }, columns });
|
2214
4102
|
const dictionary = finalObjects.reduce((acc, object) => {
|
@@ -2231,7 +4119,7 @@ class RestRepository extends Query {
|
|
2231
4119
|
queryParams: { columns },
|
2232
4120
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2233
4121
|
});
|
2234
|
-
const schemaTables = await __privateMethod$2(this,
|
4122
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2235
4123
|
return initObject(
|
2236
4124
|
__privateGet$2(this, _db),
|
2237
4125
|
schemaTables,
|
@@ -2272,11 +4160,10 @@ class RestRepository extends Query {
|
|
2272
4160
|
return __privateGet$2(this, _trace).call(this, "update", async () => {
|
2273
4161
|
const ifVersion = parseIfVersion(b, c, d);
|
2274
4162
|
if (Array.isArray(a)) {
|
2275
|
-
if (a.length === 0)
|
2276
|
-
return [];
|
4163
|
+
if (a.length === 0) return [];
|
2277
4164
|
const existing = await this.read(a, ["xata_id"]);
|
2278
4165
|
const updates = a.filter((_item, index) => existing[index] !== null);
|
2279
|
-
await __privateMethod$2(this,
|
4166
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, updates, {
|
2280
4167
|
ifVersion,
|
2281
4168
|
upsert: false
|
2282
4169
|
});
|
@@ -2287,15 +4174,14 @@ class RestRepository extends Query {
|
|
2287
4174
|
try {
|
2288
4175
|
if (isString(a) && isObject(b)) {
|
2289
4176
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2290
|
-
return await __privateMethod$2(this,
|
4177
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2291
4178
|
}
|
2292
4179
|
if (isObject(a) && isString(a.xata_id)) {
|
2293
4180
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2294
|
-
return await __privateMethod$2(this,
|
4181
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2295
4182
|
}
|
2296
4183
|
} catch (error) {
|
2297
|
-
if (error.status === 422)
|
2298
|
-
return null;
|
4184
|
+
if (error.status === 422) return null;
|
2299
4185
|
throw error;
|
2300
4186
|
}
|
2301
4187
|
throw new Error("Invalid arguments for update method");
|
@@ -2324,9 +4210,8 @@ class RestRepository extends Query {
|
|
2324
4210
|
return __privateGet$2(this, _trace).call(this, "createOrUpdate", async () => {
|
2325
4211
|
const ifVersion = parseIfVersion(b, c, d);
|
2326
4212
|
if (Array.isArray(a)) {
|
2327
|
-
if (a.length === 0)
|
2328
|
-
|
2329
|
-
await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, a, {
|
4213
|
+
if (a.length === 0) return [];
|
4214
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, a, {
|
2330
4215
|
ifVersion,
|
2331
4216
|
upsert: true
|
2332
4217
|
});
|
@@ -2335,16 +4220,14 @@ class RestRepository extends Query {
|
|
2335
4220
|
return result;
|
2336
4221
|
}
|
2337
4222
|
if (isString(a) && isObject(b)) {
|
2338
|
-
if (a === "")
|
2339
|
-
throw new Error("The id can't be empty");
|
4223
|
+
if (a === "") throw new Error("The id can't be empty");
|
2340
4224
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2341
|
-
return await __privateMethod$2(this,
|
4225
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2342
4226
|
}
|
2343
4227
|
if (isObject(a) && isString(a.xata_id)) {
|
2344
|
-
if (a.xata_id === "")
|
2345
|
-
throw new Error("The id can't be empty");
|
4228
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2346
4229
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2347
|
-
return await __privateMethod$2(this,
|
4230
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2348
4231
|
}
|
2349
4232
|
if (!isDefined(a) && isObject(b)) {
|
2350
4233
|
return await this.create(b, c);
|
@@ -2359,24 +4242,21 @@ class RestRepository extends Query {
|
|
2359
4242
|
return __privateGet$2(this, _trace).call(this, "createOrReplace", async () => {
|
2360
4243
|
const ifVersion = parseIfVersion(b, c, d);
|
2361
4244
|
if (Array.isArray(a)) {
|
2362
|
-
if (a.length === 0)
|
2363
|
-
|
2364
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
4245
|
+
if (a.length === 0) return [];
|
4246
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
2365
4247
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2366
4248
|
const result = await this.read(ids, columns);
|
2367
4249
|
return result;
|
2368
4250
|
}
|
2369
4251
|
if (isString(a) && isObject(b)) {
|
2370
|
-
if (a === "")
|
2371
|
-
throw new Error("The id can't be empty");
|
4252
|
+
if (a === "") throw new Error("The id can't be empty");
|
2372
4253
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2373
|
-
return await __privateMethod$2(this,
|
4254
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
|
2374
4255
|
}
|
2375
4256
|
if (isObject(a) && isString(a.xata_id)) {
|
2376
|
-
if (a.xata_id === "")
|
2377
|
-
throw new Error("The id can't be empty");
|
4257
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2378
4258
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2379
|
-
return await __privateMethod$2(this,
|
4259
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
2380
4260
|
createOnly: false,
|
2381
4261
|
ifVersion
|
2382
4262
|
});
|
@@ -2393,25 +4273,22 @@ class RestRepository extends Query {
|
|
2393
4273
|
async delete(a, b) {
|
2394
4274
|
return __privateGet$2(this, _trace).call(this, "delete", async () => {
|
2395
4275
|
if (Array.isArray(a)) {
|
2396
|
-
if (a.length === 0)
|
2397
|
-
return [];
|
4276
|
+
if (a.length === 0) return [];
|
2398
4277
|
const ids = a.map((o) => {
|
2399
|
-
if (isString(o))
|
2400
|
-
|
2401
|
-
if (isString(o.xata_id))
|
2402
|
-
return o.xata_id;
|
4278
|
+
if (isString(o)) return o;
|
4279
|
+
if (isString(o.xata_id)) return o.xata_id;
|
2403
4280
|
throw new Error("Invalid arguments for delete method");
|
2404
4281
|
});
|
2405
4282
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2406
4283
|
const result = await this.read(a, columns);
|
2407
|
-
await __privateMethod$2(this,
|
4284
|
+
await __privateMethod$2(this, _RestRepository_instances, deleteRecords_fn).call(this, ids);
|
2408
4285
|
return result;
|
2409
4286
|
}
|
2410
4287
|
if (isString(a)) {
|
2411
|
-
return __privateMethod$2(this,
|
4288
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a, b);
|
2412
4289
|
}
|
2413
4290
|
if (isObject(a) && isString(a.xata_id)) {
|
2414
|
-
return __privateMethod$2(this,
|
4291
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a.xata_id, b);
|
2415
4292
|
}
|
2416
4293
|
throw new Error("Invalid arguments for delete method");
|
2417
4294
|
});
|
@@ -2455,7 +4332,7 @@ class RestRepository extends Query {
|
|
2455
4332
|
},
|
2456
4333
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2457
4334
|
});
|
2458
|
-
const schemaTables = await __privateMethod$2(this,
|
4335
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2459
4336
|
return {
|
2460
4337
|
records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
|
2461
4338
|
totalCount
|
@@ -2480,7 +4357,7 @@ class RestRepository extends Query {
|
|
2480
4357
|
},
|
2481
4358
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2482
4359
|
});
|
2483
|
-
const schemaTables = await __privateMethod$2(this,
|
4360
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2484
4361
|
return {
|
2485
4362
|
records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
|
2486
4363
|
totalCount
|
@@ -2522,7 +4399,7 @@ class RestRepository extends Query {
|
|
2522
4399
|
fetchOptions: data.fetchOptions,
|
2523
4400
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2524
4401
|
});
|
2525
|
-
const schemaTables = await __privateMethod$2(this,
|
4402
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2526
4403
|
const records = objects.map(
|
2527
4404
|
(record) => initObject(
|
2528
4405
|
__privateGet$2(this, _db),
|
@@ -2556,7 +4433,7 @@ class RestRepository extends Query {
|
|
2556
4433
|
},
|
2557
4434
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2558
4435
|
});
|
2559
|
-
const schemaTables = await __privateMethod$2(this,
|
4436
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2560
4437
|
return {
|
2561
4438
|
...result,
|
2562
4439
|
summaries: result.summaries.map(
|
@@ -2604,9 +4481,9 @@ _getFetchProps = new WeakMap();
|
|
2604
4481
|
_db = new WeakMap();
|
2605
4482
|
_schemaTables = new WeakMap();
|
2606
4483
|
_trace = new WeakMap();
|
2607
|
-
|
4484
|
+
_RestRepository_instances = new WeakSet();
|
2608
4485
|
insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
2609
|
-
const record = await __privateMethod$2(this,
|
4486
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2610
4487
|
const response = await insertRecord({
|
2611
4488
|
pathParams: {
|
2612
4489
|
workspace: "{workspaceId}",
|
@@ -2618,14 +4495,12 @@ insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
|
2618
4495
|
body: record,
|
2619
4496
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2620
4497
|
});
|
2621
|
-
const schemaTables = await __privateMethod$2(this,
|
4498
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2622
4499
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2623
4500
|
};
|
2624
|
-
_insertRecordWithId = new WeakSet();
|
2625
4501
|
insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
|
2626
|
-
if (!recordId)
|
2627
|
-
|
2628
|
-
const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4502
|
+
if (!recordId) return null;
|
4503
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2629
4504
|
const response = await insertRecordWithID({
|
2630
4505
|
pathParams: {
|
2631
4506
|
workspace: "{workspaceId}",
|
@@ -2638,13 +4513,12 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
|
|
2638
4513
|
queryParams: { createOnly, columns, ifVersion },
|
2639
4514
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2640
4515
|
});
|
2641
|
-
const schemaTables = await __privateMethod$2(this,
|
4516
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2642
4517
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2643
4518
|
};
|
2644
|
-
_insertRecords = new WeakSet();
|
2645
4519
|
insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
2646
4520
|
const operations = await promiseMap(objects, async (object) => {
|
2647
|
-
const record = await __privateMethod$2(this,
|
4521
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2648
4522
|
return { insert: { table: __privateGet$2(this, _table), record, createOnly, ifVersion } };
|
2649
4523
|
});
|
2650
4524
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -2669,11 +4543,9 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
|
2669
4543
|
}
|
2670
4544
|
return ids;
|
2671
4545
|
};
|
2672
|
-
_updateRecordWithID = new WeakSet();
|
2673
4546
|
updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2674
|
-
if (!recordId)
|
2675
|
-
|
2676
|
-
const { xata_id: _id, ...record } = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4547
|
+
if (!recordId) return null;
|
4548
|
+
const { xata_id: _id, ...record } = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2677
4549
|
try {
|
2678
4550
|
const response = await updateRecordWithID({
|
2679
4551
|
pathParams: {
|
@@ -2687,7 +4559,7 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2687
4559
|
body: record,
|
2688
4560
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2689
4561
|
});
|
2690
|
-
const schemaTables = await __privateMethod$2(this,
|
4562
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2691
4563
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2692
4564
|
} catch (e) {
|
2693
4565
|
if (isObject(e) && e.status === 404) {
|
@@ -2696,10 +4568,9 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2696
4568
|
throw e;
|
2697
4569
|
}
|
2698
4570
|
};
|
2699
|
-
_updateRecords = new WeakSet();
|
2700
4571
|
updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
2701
4572
|
const operations = await promiseMap(objects, async ({ xata_id, ...object }) => {
|
2702
|
-
const fields = await __privateMethod$2(this,
|
4573
|
+
const fields = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2703
4574
|
return { update: { table: __privateGet$2(this, _table), id: xata_id, ifVersion, upsert, fields } };
|
2704
4575
|
});
|
2705
4576
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -2724,10 +4595,8 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
|
2724
4595
|
}
|
2725
4596
|
return ids;
|
2726
4597
|
};
|
2727
|
-
_upsertRecordWithID = new WeakSet();
|
2728
4598
|
upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2729
|
-
if (!recordId)
|
2730
|
-
return null;
|
4599
|
+
if (!recordId) return null;
|
2731
4600
|
const response = await upsertRecordWithID({
|
2732
4601
|
pathParams: {
|
2733
4602
|
workspace: "{workspaceId}",
|
@@ -2740,13 +4609,11 @@ upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2740
4609
|
body: object,
|
2741
4610
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2742
4611
|
});
|
2743
|
-
const schemaTables = await __privateMethod$2(this,
|
4612
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2744
4613
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2745
4614
|
};
|
2746
|
-
_deleteRecord = new WeakSet();
|
2747
4615
|
deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
2748
|
-
if (!recordId)
|
2749
|
-
return null;
|
4616
|
+
if (!recordId) return null;
|
2750
4617
|
try {
|
2751
4618
|
const response = await deleteRecord({
|
2752
4619
|
pathParams: {
|
@@ -2759,7 +4626,7 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2759
4626
|
queryParams: { columns },
|
2760
4627
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2761
4628
|
});
|
2762
|
-
const schemaTables = await __privateMethod$2(this,
|
4629
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2763
4630
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2764
4631
|
} catch (e) {
|
2765
4632
|
if (isObject(e) && e.status === 404) {
|
@@ -2768,7 +4635,6 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2768
4635
|
throw e;
|
2769
4636
|
}
|
2770
4637
|
};
|
2771
|
-
_deleteRecords = new WeakSet();
|
2772
4638
|
deleteRecords_fn = async function(recordIds) {
|
2773
4639
|
const chunkedOperations = chunk(
|
2774
4640
|
compact(recordIds).map((id) => ({ delete: { table: __privateGet$2(this, _table), id } })),
|
@@ -2786,10 +4652,8 @@ deleteRecords_fn = async function(recordIds) {
|
|
2786
4652
|
});
|
2787
4653
|
}
|
2788
4654
|
};
|
2789
|
-
_getSchemaTables = new WeakSet();
|
2790
4655
|
getSchemaTables_fn = async function() {
|
2791
|
-
if (__privateGet$2(this, _schemaTables))
|
2792
|
-
return __privateGet$2(this, _schemaTables);
|
4656
|
+
if (__privateGet$2(this, _schemaTables)) return __privateGet$2(this, _schemaTables);
|
2793
4657
|
const { schema } = await getBranchDetails({
|
2794
4658
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
2795
4659
|
...__privateGet$2(this, _getFetchProps).call(this)
|
@@ -2797,16 +4661,13 @@ getSchemaTables_fn = async function() {
|
|
2797
4661
|
__privateSet$1(this, _schemaTables, schema.tables);
|
2798
4662
|
return schema.tables;
|
2799
4663
|
};
|
2800
|
-
_transformObjectToApi = new WeakSet();
|
2801
4664
|
transformObjectToApi_fn = async function(object) {
|
2802
|
-
const schemaTables = await __privateMethod$2(this,
|
4665
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2803
4666
|
const schema = schemaTables.find((table) => table.name === __privateGet$2(this, _table));
|
2804
|
-
if (!schema)
|
2805
|
-
throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
|
4667
|
+
if (!schema) throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
|
2806
4668
|
const result = {};
|
2807
4669
|
for (const [key, value] of Object.entries(object)) {
|
2808
|
-
if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key))
|
2809
|
-
continue;
|
4670
|
+
if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key)) continue;
|
2810
4671
|
const type = schema.columns.find((column) => column.name === key)?.type;
|
2811
4672
|
switch (type) {
|
2812
4673
|
case "link": {
|
@@ -2836,11 +4697,9 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2836
4697
|
const data = {};
|
2837
4698
|
Object.assign(data, { ...object });
|
2838
4699
|
const { columns } = schemaTables.find(({ name }) => name === table) ?? {};
|
2839
|
-
if (!columns)
|
2840
|
-
console.error(`Table ${table} not found in schema`);
|
4700
|
+
if (!columns) console.error(`Table ${table} not found in schema`);
|
2841
4701
|
for (const column of columns ?? []) {
|
2842
|
-
if (!isValidColumn(selectedColumns, column))
|
2843
|
-
continue;
|
4702
|
+
if (!isValidColumn(selectedColumns, column)) continue;
|
2844
4703
|
const value = data[column.name];
|
2845
4704
|
switch (column.type) {
|
2846
4705
|
case "datetime": {
|
@@ -2926,15 +4785,12 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2926
4785
|
return record;
|
2927
4786
|
};
|
2928
4787
|
function extractId(value) {
|
2929
|
-
if (isString(value))
|
2930
|
-
|
2931
|
-
if (isObject(value) && isString(value.xata_id))
|
2932
|
-
return value.xata_id;
|
4788
|
+
if (isString(value)) return value;
|
4789
|
+
if (isObject(value) && isString(value.xata_id)) return value.xata_id;
|
2933
4790
|
return void 0;
|
2934
4791
|
}
|
2935
4792
|
function isValidColumn(columns, column) {
|
2936
|
-
if (columns.includes("*"))
|
2937
|
-
return true;
|
4793
|
+
if (columns.includes("*")) return true;
|
2938
4794
|
return columns.filter((item) => isString(item) && item.startsWith(column.name)).length > 0;
|
2939
4795
|
}
|
2940
4796
|
function parseIfVersion(...args) {
|
@@ -2974,19 +4830,12 @@ const includesAll = (value) => ({ $includesAll: value });
|
|
2974
4830
|
const includesNone = (value) => ({ $includesNone: value });
|
2975
4831
|
const includesAny = (value) => ({ $includesAny: value });
|
2976
4832
|
|
2977
|
-
var
|
2978
|
-
|
2979
|
-
throw TypeError("Cannot " + msg);
|
2980
|
-
};
|
2981
|
-
var __privateGet$1 = (obj, member, getter) => {
|
2982
|
-
__accessCheck$2(obj, member, "read from private field");
|
2983
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2984
|
-
};
|
2985
|
-
var __privateAdd$2 = (obj, member, value) => {
|
2986
|
-
if (member.has(obj))
|
2987
|
-
throw TypeError("Cannot add the same private member more than once");
|
2988
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4833
|
+
var __typeError$2 = (msg) => {
|
4834
|
+
throw TypeError(msg);
|
2989
4835
|
};
|
4836
|
+
var __accessCheck$2 = (obj, member, msg) => member.has(obj) || __typeError$2("Cannot " + msg);
|
4837
|
+
var __privateGet$1 = (obj, member, getter) => (__accessCheck$2(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4838
|
+
var __privateAdd$2 = (obj, member, value) => member.has(obj) ? __typeError$2("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2990
4839
|
var _tables;
|
2991
4840
|
class SchemaPlugin extends XataPlugin {
|
2992
4841
|
constructor() {
|
@@ -2998,8 +4847,7 @@ class SchemaPlugin extends XataPlugin {
|
|
2998
4847
|
{},
|
2999
4848
|
{
|
3000
4849
|
get: (_target, table) => {
|
3001
|
-
if (!isString(table))
|
3002
|
-
throw new Error("Invalid table name");
|
4850
|
+
if (!isString(table)) throw new Error("Invalid table name");
|
3003
4851
|
if (__privateGet$1(this, _tables)[table] === void 0) {
|
3004
4852
|
__privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
3005
4853
|
}
|
@@ -3090,30 +4938,23 @@ function getContentType(file) {
|
|
3090
4938
|
return "application/octet-stream";
|
3091
4939
|
}
|
3092
4940
|
|
3093
|
-
var
|
3094
|
-
|
3095
|
-
throw TypeError("Cannot " + msg);
|
3096
|
-
};
|
3097
|
-
var __privateAdd$1 = (obj, member, value) => {
|
3098
|
-
if (member.has(obj))
|
3099
|
-
throw TypeError("Cannot add the same private member more than once");
|
3100
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4941
|
+
var __typeError$1 = (msg) => {
|
4942
|
+
throw TypeError(msg);
|
3101
4943
|
};
|
3102
|
-
var
|
3103
|
-
|
3104
|
-
|
3105
|
-
|
3106
|
-
var _search, search_fn;
|
4944
|
+
var __accessCheck$1 = (obj, member, msg) => member.has(obj) || __typeError$1("Cannot " + msg);
|
4945
|
+
var __privateAdd$1 = (obj, member, value) => member.has(obj) ? __typeError$1("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4946
|
+
var __privateMethod$1 = (obj, member, method) => (__accessCheck$1(obj, member, "access private method"), method);
|
4947
|
+
var _SearchPlugin_instances, search_fn;
|
3107
4948
|
class SearchPlugin extends XataPlugin {
|
3108
4949
|
constructor(db) {
|
3109
4950
|
super();
|
3110
4951
|
this.db = db;
|
3111
|
-
__privateAdd$1(this,
|
4952
|
+
__privateAdd$1(this, _SearchPlugin_instances);
|
3112
4953
|
}
|
3113
4954
|
build(pluginOptions) {
|
3114
4955
|
return {
|
3115
4956
|
all: async (query, options = {}) => {
|
3116
|
-
const { records, totalCount } = await __privateMethod$1(this,
|
4957
|
+
const { records, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3117
4958
|
return {
|
3118
4959
|
totalCount,
|
3119
4960
|
records: records.map((record) => {
|
@@ -3123,7 +4964,7 @@ class SearchPlugin extends XataPlugin {
|
|
3123
4964
|
};
|
3124
4965
|
},
|
3125
4966
|
byTable: async (query, options = {}) => {
|
3126
|
-
const { records: rawRecords, totalCount } = await __privateMethod$1(this,
|
4967
|
+
const { records: rawRecords, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3127
4968
|
const records = rawRecords.reduce((acc, record) => {
|
3128
4969
|
const table = record.xata_table;
|
3129
4970
|
const items = acc[table] ?? [];
|
@@ -3135,7 +4976,7 @@ class SearchPlugin extends XataPlugin {
|
|
3135
4976
|
};
|
3136
4977
|
}
|
3137
4978
|
}
|
3138
|
-
|
4979
|
+
_SearchPlugin_instances = new WeakSet();
|
3139
4980
|
search_fn = async function(query, options, pluginOptions) {
|
3140
4981
|
const { tables, fuzziness, highlight, prefix, page } = options ?? {};
|
3141
4982
|
const { records, totalCount } = await searchBranch({
|
@@ -3171,8 +5012,7 @@ function arrayString(val) {
|
|
3171
5012
|
return result;
|
3172
5013
|
}
|
3173
5014
|
function prepareValue(value) {
|
3174
|
-
if (!isDefined(value))
|
3175
|
-
return null;
|
5015
|
+
if (!isDefined(value)) return null;
|
3176
5016
|
if (value instanceof Date) {
|
3177
5017
|
return value.toISOString();
|
3178
5018
|
}
|
@@ -3199,8 +5039,8 @@ function prepareParams(param1, param2) {
|
|
3199
5039
|
return { statement, params: param2?.map((value) => prepareValue(value)) };
|
3200
5040
|
}
|
3201
5041
|
if (isObject(param1)) {
|
3202
|
-
const { statement, params, consistency } = param1;
|
3203
|
-
return { statement, params: params?.map((value) => prepareValue(value)), consistency };
|
5042
|
+
const { statement, params, consistency, responseType } = param1;
|
5043
|
+
return { statement, params: params?.map((value) => prepareValue(value)), consistency, responseType };
|
3204
5044
|
}
|
3205
5045
|
throw new Error("Invalid query");
|
3206
5046
|
}
|
@@ -3211,7 +5051,7 @@ class SQLPlugin extends XataPlugin {
|
|
3211
5051
|
if (!isParamsObject(query) && (!isTemplateStringsArray(query) || !Array.isArray(parameters))) {
|
3212
5052
|
throw new Error("Invalid usage of `xata.sql`. Please use it as a tagged template or with an object.");
|
3213
5053
|
}
|
3214
|
-
const { statement, params, consistency } = prepareParams(query, parameters);
|
5054
|
+
const { statement, params, consistency, responseType } = prepareParams(query, parameters);
|
3215
5055
|
const {
|
3216
5056
|
records,
|
3217
5057
|
rows,
|
@@ -3219,7 +5059,7 @@ class SQLPlugin extends XataPlugin {
|
|
3219
5059
|
columns = []
|
3220
5060
|
} = await sqlQuery({
|
3221
5061
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
3222
|
-
body: { statement, params, consistency },
|
5062
|
+
body: { statement, params, consistency, responseType },
|
3223
5063
|
...pluginOptions
|
3224
5064
|
});
|
3225
5065
|
return { records, rows, warning, columns };
|
@@ -3251,8 +5091,7 @@ function buildDomain(host, region) {
|
|
3251
5091
|
function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
|
3252
5092
|
const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
|
3253
5093
|
const parts = parseWorkspacesUrlParts(url);
|
3254
|
-
if (!parts)
|
3255
|
-
throw new Error("Invalid workspaces URL");
|
5094
|
+
if (!parts) throw new Error("Invalid workspaces URL");
|
3256
5095
|
const { workspace: workspaceSlug, region, database, host } = parts;
|
3257
5096
|
const domain = buildDomain(host, region);
|
3258
5097
|
const workspace = workspaceSlug.split("-").pop();
|
@@ -3277,39 +5116,24 @@ class TransactionPlugin extends XataPlugin {
|
|
3277
5116
|
}
|
3278
5117
|
}
|
3279
5118
|
|
3280
|
-
var
|
3281
|
-
|
3282
|
-
throw TypeError("Cannot " + msg);
|
3283
|
-
};
|
3284
|
-
var __privateGet = (obj, member, getter) => {
|
3285
|
-
__accessCheck(obj, member, "read from private field");
|
3286
|
-
return getter ? getter.call(obj) : member.get(obj);
|
3287
|
-
};
|
3288
|
-
var __privateAdd = (obj, member, value) => {
|
3289
|
-
if (member.has(obj))
|
3290
|
-
throw TypeError("Cannot add the same private member more than once");
|
3291
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3292
|
-
};
|
3293
|
-
var __privateSet = (obj, member, value, setter) => {
|
3294
|
-
__accessCheck(obj, member, "write to private field");
|
3295
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
3296
|
-
return value;
|
3297
|
-
};
|
3298
|
-
var __privateMethod = (obj, member, method) => {
|
3299
|
-
__accessCheck(obj, member, "access private method");
|
3300
|
-
return method;
|
5119
|
+
var __typeError = (msg) => {
|
5120
|
+
throw TypeError(msg);
|
3301
5121
|
};
|
5122
|
+
var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
|
5123
|
+
var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
5124
|
+
var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5125
|
+
var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), member.set(obj, value), value);
|
5126
|
+
var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
|
3302
5127
|
const buildClient = (plugins) => {
|
3303
|
-
var _options,
|
5128
|
+
var _options, _instances, parseOptions_fn, getFetchProps_fn, _a;
|
3304
5129
|
return _a = class {
|
3305
5130
|
constructor(options = {}, tables) {
|
3306
|
-
__privateAdd(this,
|
3307
|
-
__privateAdd(this,
|
3308
|
-
|
3309
|
-
const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
|
5131
|
+
__privateAdd(this, _instances);
|
5132
|
+
__privateAdd(this, _options);
|
5133
|
+
const safeOptions = __privateMethod(this, _instances, parseOptions_fn).call(this, options);
|
3310
5134
|
__privateSet(this, _options, safeOptions);
|
3311
5135
|
const pluginOptions = {
|
3312
|
-
...__privateMethod(this,
|
5136
|
+
...__privateMethod(this, _instances, getFetchProps_fn).call(this, safeOptions),
|
3313
5137
|
host: safeOptions.host,
|
3314
5138
|
tables,
|
3315
5139
|
branch: safeOptions.branch
|
@@ -3326,8 +5150,7 @@ const buildClient = (plugins) => {
|
|
3326
5150
|
this.sql = sql;
|
3327
5151
|
this.files = files;
|
3328
5152
|
for (const [key, namespace] of Object.entries(plugins ?? {})) {
|
3329
|
-
if (namespace === void 0)
|
3330
|
-
continue;
|
5153
|
+
if (namespace === void 0) continue;
|
3331
5154
|
this[key] = namespace.build(pluginOptions);
|
3332
5155
|
}
|
3333
5156
|
}
|
@@ -3336,8 +5159,8 @@ const buildClient = (plugins) => {
|
|
3336
5159
|
const branch = __privateGet(this, _options).branch;
|
3337
5160
|
return { databaseURL, branch };
|
3338
5161
|
}
|
3339
|
-
}, _options = new WeakMap(),
|
3340
|
-
const enableBrowser = options?.enableBrowser ??
|
5162
|
+
}, _options = new WeakMap(), _instances = new WeakSet(), parseOptions_fn = function(options) {
|
5163
|
+
const enableBrowser = options?.enableBrowser ?? false;
|
3341
5164
|
const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
|
3342
5165
|
if (isBrowser && !enableBrowser) {
|
3343
5166
|
throw new Error(
|
@@ -3345,8 +5168,9 @@ const buildClient = (plugins) => {
|
|
3345
5168
|
);
|
3346
5169
|
}
|
3347
5170
|
const fetch = getFetchImplementation(options?.fetch);
|
3348
|
-
const databaseURL = options?.databaseURL
|
3349
|
-
const apiKey = options?.apiKey
|
5171
|
+
const databaseURL = options?.databaseURL;
|
5172
|
+
const apiKey = options?.apiKey;
|
5173
|
+
const branch = options?.branch;
|
3350
5174
|
const trace = options?.trace ?? defaultTrace;
|
3351
5175
|
const clientName = options?.clientName;
|
3352
5176
|
const host = options?.host ?? "production";
|
@@ -3357,25 +5181,8 @@ const buildClient = (plugins) => {
|
|
3357
5181
|
if (!databaseURL) {
|
3358
5182
|
throw new Error("Option databaseURL is required");
|
3359
5183
|
}
|
3360
|
-
|
3361
|
-
|
3362
|
-
const branch = options?.branch || previewBranch || envBranch || "main";
|
3363
|
-
if (!!previewBranch && branch !== previewBranch) {
|
3364
|
-
console.warn(
|
3365
|
-
`Ignoring preview branch ${previewBranch} because branch option was passed to the client constructor with value ${branch}`
|
3366
|
-
);
|
3367
|
-
} else if (!!envBranch && branch !== envBranch) {
|
3368
|
-
console.warn(
|
3369
|
-
`Ignoring branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
|
3370
|
-
);
|
3371
|
-
} else if (!!previewBranch && !!envBranch && previewBranch !== envBranch) {
|
3372
|
-
console.warn(
|
3373
|
-
`Ignoring preview branch ${previewBranch} and branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
|
3374
|
-
);
|
3375
|
-
} else if (!previewBranch && !envBranch && options?.branch === void 0) {
|
3376
|
-
console.warn(
|
3377
|
-
`No branch was passed to the client constructor. Using default branch ${branch}. You can set the branch with the environment variable XATA_BRANCH or by passing the branch option to the client constructor.`
|
3378
|
-
);
|
5184
|
+
if (!branch) {
|
5185
|
+
throw new Error("Option branch is required");
|
3379
5186
|
}
|
3380
5187
|
return {
|
3381
5188
|
fetch,
|
@@ -3389,7 +5196,7 @@ const buildClient = (plugins) => {
|
|
3389
5196
|
clientName,
|
3390
5197
|
xataAgentExtra
|
3391
5198
|
};
|
3392
|
-
},
|
5199
|
+
}, getFetchProps_fn = function({
|
3393
5200
|
fetch,
|
3394
5201
|
apiKey,
|
3395
5202
|
databaseURL,
|
@@ -3430,26 +5237,19 @@ class Serializer {
|
|
3430
5237
|
}
|
3431
5238
|
toJSON(data) {
|
3432
5239
|
function visit(obj) {
|
3433
|
-
if (Array.isArray(obj))
|
3434
|
-
return obj.map(visit);
|
5240
|
+
if (Array.isArray(obj)) return obj.map(visit);
|
3435
5241
|
const type = typeof obj;
|
3436
|
-
if (type === "undefined")
|
3437
|
-
|
3438
|
-
if (
|
3439
|
-
return { [META]: "bigint", [VALUE]: obj.toString() };
|
3440
|
-
if (obj === null || type !== "object")
|
3441
|
-
return obj;
|
5242
|
+
if (type === "undefined") return { [META]: "undefined" };
|
5243
|
+
if (type === "bigint") return { [META]: "bigint", [VALUE]: obj.toString() };
|
5244
|
+
if (obj === null || type !== "object") return obj;
|
3442
5245
|
const constructor = obj.constructor;
|
3443
5246
|
const o = { [META]: constructor.name };
|
3444
5247
|
for (const [key, value] of Object.entries(obj)) {
|
3445
5248
|
o[key] = visit(value);
|
3446
5249
|
}
|
3447
|
-
if (constructor === Date)
|
3448
|
-
|
3449
|
-
if (constructor ===
|
3450
|
-
o[VALUE] = Object.fromEntries(obj);
|
3451
|
-
if (constructor === Set)
|
3452
|
-
o[VALUE] = [...obj];
|
5250
|
+
if (constructor === Date) o[VALUE] = obj.toISOString();
|
5251
|
+
if (constructor === Map) o[VALUE] = Object.fromEntries(obj);
|
5252
|
+
if (constructor === Set) o[VALUE] = [...obj];
|
3453
5253
|
return o;
|
3454
5254
|
}
|
3455
5255
|
return JSON.stringify(visit(data));
|
@@ -3462,16 +5262,11 @@ class Serializer {
|
|
3462
5262
|
if (constructor) {
|
3463
5263
|
return Object.assign(Object.create(constructor.prototype), rest);
|
3464
5264
|
}
|
3465
|
-
if (clazz === "Date")
|
3466
|
-
|
3467
|
-
if (clazz === "
|
3468
|
-
|
3469
|
-
if (clazz === "
|
3470
|
-
return new Map(Object.entries(val));
|
3471
|
-
if (clazz === "bigint")
|
3472
|
-
return BigInt(val);
|
3473
|
-
if (clazz === "undefined")
|
3474
|
-
return void 0;
|
5265
|
+
if (clazz === "Date") return new Date(val);
|
5266
|
+
if (clazz === "Set") return new Set(val);
|
5267
|
+
if (clazz === "Map") return new Map(Object.entries(val));
|
5268
|
+
if (clazz === "bigint") return BigInt(val);
|
5269
|
+
if (clazz === "undefined") return void 0;
|
3475
5270
|
return rest;
|
3476
5271
|
}
|
3477
5272
|
return value;
|
@@ -3486,6 +5281,47 @@ const deserialize = (json) => {
|
|
3486
5281
|
return defaultSerializer.fromJSON(json);
|
3487
5282
|
};
|
3488
5283
|
|
5284
|
+
function parseEnvironment(environment) {
|
5285
|
+
try {
|
5286
|
+
if (typeof environment === "function") {
|
5287
|
+
return new Proxy(
|
5288
|
+
{},
|
5289
|
+
{
|
5290
|
+
get(target) {
|
5291
|
+
return environment(target);
|
5292
|
+
}
|
5293
|
+
}
|
5294
|
+
);
|
5295
|
+
}
|
5296
|
+
if (isObject(environment)) {
|
5297
|
+
return environment;
|
5298
|
+
}
|
5299
|
+
} catch (error) {
|
5300
|
+
}
|
5301
|
+
return {};
|
5302
|
+
}
|
5303
|
+
function buildPreviewBranchName({ org, branch }) {
|
5304
|
+
return `preview-${org}-${branch}`;
|
5305
|
+
}
|
5306
|
+
function getDeployPreviewBranch(environment) {
|
5307
|
+
try {
|
5308
|
+
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = parseEnvironment(environment);
|
5309
|
+
if (deployPreviewBranch) return deployPreviewBranch;
|
5310
|
+
switch (deployPreview) {
|
5311
|
+
case "vercel": {
|
5312
|
+
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
5313
|
+
console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
|
5314
|
+
return void 0;
|
5315
|
+
}
|
5316
|
+
return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
|
5317
|
+
}
|
5318
|
+
}
|
5319
|
+
return void 0;
|
5320
|
+
} catch (err) {
|
5321
|
+
return void 0;
|
5322
|
+
}
|
5323
|
+
}
|
5324
|
+
|
3489
5325
|
class XataError extends Error {
|
3490
5326
|
constructor(message, status) {
|
3491
5327
|
super(message);
|
@@ -3494,6 +5330,7 @@ class XataError extends Error {
|
|
3494
5330
|
}
|
3495
5331
|
|
3496
5332
|
exports.BaseClient = BaseClient;
|
5333
|
+
exports.Buffer = Buffer;
|
3497
5334
|
exports.FetcherError = FetcherError;
|
3498
5335
|
exports.FilesPlugin = FilesPlugin;
|
3499
5336
|
exports.Operations = operationsByTag;
|
@@ -3519,6 +5356,7 @@ exports.XataError = XataError;
|
|
3519
5356
|
exports.XataFile = XataFile;
|
3520
5357
|
exports.XataPlugin = XataPlugin;
|
3521
5358
|
exports.acceptWorkspaceMemberInvite = acceptWorkspaceMemberInvite;
|
5359
|
+
exports.adaptAllTables = adaptAllTables;
|
3522
5360
|
exports.adaptTable = adaptTable;
|
3523
5361
|
exports.addGitBranchesEntry = addGitBranchesEntry;
|
3524
5362
|
exports.addTableColumn = addTableColumn;
|
@@ -3546,6 +5384,7 @@ exports.createTable = createTable;
|
|
3546
5384
|
exports.createUserAPIKey = createUserAPIKey;
|
3547
5385
|
exports.createWorkspace = createWorkspace;
|
3548
5386
|
exports.deleteBranch = deleteBranch;
|
5387
|
+
exports.deleteCluster = deleteCluster;
|
3549
5388
|
exports.deleteColumn = deleteColumn;
|
3550
5389
|
exports.deleteDatabase = deleteDatabase;
|
3551
5390
|
exports.deleteDatabaseGithubSettings = deleteDatabaseGithubSettings;
|
@@ -3566,9 +5405,7 @@ exports.exists = exists;
|
|
3566
5405
|
exports.fileAccess = fileAccess;
|
3567
5406
|
exports.fileUpload = fileUpload;
|
3568
5407
|
exports.ge = ge;
|
3569
|
-
exports.getAPIKey = getAPIKey;
|
3570
5408
|
exports.getAuthorizationCode = getAuthorizationCode;
|
3571
|
-
exports.getBranch = getBranch;
|
3572
5409
|
exports.getBranchDetails = getBranchDetails;
|
3573
5410
|
exports.getBranchList = getBranchList;
|
3574
5411
|
exports.getBranchMetadata = getBranchMetadata;
|
@@ -3583,7 +5420,7 @@ exports.getDatabaseGithubSettings = getDatabaseGithubSettings;
|
|
3583
5420
|
exports.getDatabaseList = getDatabaseList;
|
3584
5421
|
exports.getDatabaseMetadata = getDatabaseMetadata;
|
3585
5422
|
exports.getDatabaseSettings = getDatabaseSettings;
|
3586
|
-
exports.
|
5423
|
+
exports.getDeployPreviewBranch = getDeployPreviewBranch;
|
3587
5424
|
exports.getFile = getFile;
|
3588
5425
|
exports.getFileItem = getFileItem;
|
3589
5426
|
exports.getGitBranchesMapping = getGitBranchesMapping;
|
@@ -3592,7 +5429,6 @@ exports.getMigrationHistory = getMigrationHistory;
|
|
3592
5429
|
exports.getMigrationJobStatus = getMigrationJobStatus;
|
3593
5430
|
exports.getMigrationRequest = getMigrationRequest;
|
3594
5431
|
exports.getMigrationRequestIsMerged = getMigrationRequestIsMerged;
|
3595
|
-
exports.getPreviewBranch = getPreviewBranch;
|
3596
5432
|
exports.getRecord = getRecord;
|
3597
5433
|
exports.getSchema = getSchema;
|
3598
5434
|
exports.getTableColumns = getTableColumns;
|
@@ -3659,6 +5495,7 @@ exports.searchTable = searchTable;
|
|
3659
5495
|
exports.serialize = serialize;
|
3660
5496
|
exports.setTableSchema = setTableSchema;
|
3661
5497
|
exports.sqlQuery = sqlQuery;
|
5498
|
+
exports.startMigration = startMigration;
|
3662
5499
|
exports.startsWith = startsWith;
|
3663
5500
|
exports.summarizeTable = summarizeTable;
|
3664
5501
|
exports.transformImage = transformImage;
|