@xata.io/client 0.0.0-next.vdf3a7b8c70c130a3e9c73decc8494a3f8c8febcb → 0.0.0-next.ve109eeeef360444eb9f061f3d745eed662493f78
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-add-version.log +1 -1
- package/.turbo/turbo-build.log +4 -4
- package/CHANGELOG.md +15 -3
- package/dist/index.cjs +2417 -580
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +4687 -4010
- package/dist/index.mjs +2413 -577
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
@@ -22,6 +22,1789 @@ const TraceAttributes = {
|
|
22
22
|
CLOUDFLARE_RAY_ID: "cf.ray"
|
23
23
|
};
|
24
24
|
|
25
|
+
const lookup = [];
|
26
|
+
const revLookup = [];
|
27
|
+
const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
28
|
+
for (let i = 0, len = code.length; i < len; ++i) {
|
29
|
+
lookup[i] = code[i];
|
30
|
+
revLookup[code.charCodeAt(i)] = i;
|
31
|
+
}
|
32
|
+
revLookup["-".charCodeAt(0)] = 62;
|
33
|
+
revLookup["_".charCodeAt(0)] = 63;
|
34
|
+
function getLens(b64) {
|
35
|
+
const len = b64.length;
|
36
|
+
if (len % 4 > 0) {
|
37
|
+
throw new Error("Invalid string. Length must be a multiple of 4");
|
38
|
+
}
|
39
|
+
let validLen = b64.indexOf("=");
|
40
|
+
if (validLen === -1) validLen = len;
|
41
|
+
const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
|
42
|
+
return [validLen, placeHoldersLen];
|
43
|
+
}
|
44
|
+
function _byteLength(_b64, validLen, placeHoldersLen) {
|
45
|
+
return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
|
46
|
+
}
|
47
|
+
function toByteArray(b64) {
|
48
|
+
let tmp;
|
49
|
+
const lens = getLens(b64);
|
50
|
+
const validLen = lens[0];
|
51
|
+
const placeHoldersLen = lens[1];
|
52
|
+
const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
|
53
|
+
let curByte = 0;
|
54
|
+
const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
|
55
|
+
let i;
|
56
|
+
for (i = 0; i < len; i += 4) {
|
57
|
+
tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
|
58
|
+
arr[curByte++] = tmp >> 16 & 255;
|
59
|
+
arr[curByte++] = tmp >> 8 & 255;
|
60
|
+
arr[curByte++] = tmp & 255;
|
61
|
+
}
|
62
|
+
if (placeHoldersLen === 2) {
|
63
|
+
tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
|
64
|
+
arr[curByte++] = tmp & 255;
|
65
|
+
}
|
66
|
+
if (placeHoldersLen === 1) {
|
67
|
+
tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
|
68
|
+
arr[curByte++] = tmp >> 8 & 255;
|
69
|
+
arr[curByte++] = tmp & 255;
|
70
|
+
}
|
71
|
+
return arr;
|
72
|
+
}
|
73
|
+
function tripletToBase64(num) {
|
74
|
+
return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
|
75
|
+
}
|
76
|
+
function encodeChunk(uint8, start, end) {
|
77
|
+
let tmp;
|
78
|
+
const output = [];
|
79
|
+
for (let i = start; i < end; i += 3) {
|
80
|
+
tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
|
81
|
+
output.push(tripletToBase64(tmp));
|
82
|
+
}
|
83
|
+
return output.join("");
|
84
|
+
}
|
85
|
+
function fromByteArray(uint8) {
|
86
|
+
let tmp;
|
87
|
+
const len = uint8.length;
|
88
|
+
const extraBytes = len % 3;
|
89
|
+
const parts = [];
|
90
|
+
const maxChunkLength = 16383;
|
91
|
+
for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
92
|
+
parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
|
93
|
+
}
|
94
|
+
if (extraBytes === 1) {
|
95
|
+
tmp = uint8[len - 1];
|
96
|
+
parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
|
97
|
+
} else if (extraBytes === 2) {
|
98
|
+
tmp = (uint8[len - 2] << 8) + uint8[len - 1];
|
99
|
+
parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
|
100
|
+
}
|
101
|
+
return parts.join("");
|
102
|
+
}
|
103
|
+
|
104
|
+
const K_MAX_LENGTH = 2147483647;
|
105
|
+
const MAX_ARGUMENTS_LENGTH = 4096;
|
106
|
+
class Buffer extends Uint8Array {
|
107
|
+
/**
|
108
|
+
* Constructs a new `Buffer` instance.
|
109
|
+
*
|
110
|
+
* @param value
|
111
|
+
* @param encodingOrOffset
|
112
|
+
* @param length
|
113
|
+
*/
|
114
|
+
constructor(value, encodingOrOffset, length) {
|
115
|
+
if (typeof value === "number") {
|
116
|
+
if (typeof encodingOrOffset === "string") {
|
117
|
+
throw new TypeError("The first argument must be of type string, received type number");
|
118
|
+
}
|
119
|
+
if (value < 0) {
|
120
|
+
throw new RangeError("The buffer size cannot be negative");
|
121
|
+
}
|
122
|
+
super(value < 0 ? 0 : Buffer._checked(value) | 0);
|
123
|
+
} else if (typeof value === "string") {
|
124
|
+
if (typeof encodingOrOffset !== "string") {
|
125
|
+
encodingOrOffset = "utf8";
|
126
|
+
}
|
127
|
+
if (!Buffer.isEncoding(encodingOrOffset)) {
|
128
|
+
throw new TypeError("Unknown encoding: " + encodingOrOffset);
|
129
|
+
}
|
130
|
+
const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
|
131
|
+
super(length2);
|
132
|
+
const written = this.write(value, 0, this.length, encodingOrOffset);
|
133
|
+
if (written !== length2) {
|
134
|
+
throw new TypeError(
|
135
|
+
"Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
|
136
|
+
);
|
137
|
+
}
|
138
|
+
} else if (ArrayBuffer.isView(value)) {
|
139
|
+
if (Buffer._isInstance(value, Uint8Array)) {
|
140
|
+
const copy = new Uint8Array(value);
|
141
|
+
const array = copy.buffer;
|
142
|
+
const byteOffset = copy.byteOffset;
|
143
|
+
const length2 = copy.byteLength;
|
144
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
145
|
+
throw new RangeError("offset is outside of buffer bounds");
|
146
|
+
}
|
147
|
+
if (array.byteLength < byteOffset + (length2 || 0)) {
|
148
|
+
throw new RangeError("length is outside of buffer bounds");
|
149
|
+
}
|
150
|
+
super(new Uint8Array(array, byteOffset, length2));
|
151
|
+
} else {
|
152
|
+
const array = value;
|
153
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
154
|
+
super(new Uint8Array(length2));
|
155
|
+
for (let i = 0; i < length2; i++) {
|
156
|
+
this[i] = array[i] & 255;
|
157
|
+
}
|
158
|
+
}
|
159
|
+
} else if (value == null) {
|
160
|
+
throw new TypeError(
|
161
|
+
"The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
|
162
|
+
);
|
163
|
+
} else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
|
164
|
+
const array = value;
|
165
|
+
const byteOffset = encodingOrOffset;
|
166
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
167
|
+
throw new RangeError("offset is outside of buffer bounds");
|
168
|
+
}
|
169
|
+
if (array.byteLength < byteOffset + (length || 0)) {
|
170
|
+
throw new RangeError("length is outside of buffer bounds");
|
171
|
+
}
|
172
|
+
super(new Uint8Array(array, byteOffset, length));
|
173
|
+
} else if (Array.isArray(value)) {
|
174
|
+
const array = value;
|
175
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
176
|
+
super(new Uint8Array(length2));
|
177
|
+
for (let i = 0; i < length2; i++) {
|
178
|
+
this[i] = array[i] & 255;
|
179
|
+
}
|
180
|
+
} else {
|
181
|
+
throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
|
182
|
+
}
|
183
|
+
}
|
184
|
+
/**
|
185
|
+
* Return JSON representation of the buffer.
|
186
|
+
*/
|
187
|
+
toJSON() {
|
188
|
+
return {
|
189
|
+
type: "Buffer",
|
190
|
+
data: Array.prototype.slice.call(this)
|
191
|
+
};
|
192
|
+
}
|
193
|
+
/**
|
194
|
+
* Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
|
195
|
+
* parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
|
196
|
+
* only part of `string` will be written. However, partially encoded characters will not be written.
|
197
|
+
*
|
198
|
+
* @param string String to write to `buf`.
|
199
|
+
* @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
|
200
|
+
* @param length Maximum number of bytes to write: Default: `buf.length - offset`.
|
201
|
+
* @param encoding The character encoding of `string`. Default: `utf8`.
|
202
|
+
*/
|
203
|
+
write(string, offset, length, encoding) {
|
204
|
+
if (typeof offset === "undefined") {
|
205
|
+
encoding = "utf8";
|
206
|
+
length = this.length;
|
207
|
+
offset = 0;
|
208
|
+
} else if (typeof length === "undefined" && typeof offset === "string") {
|
209
|
+
encoding = offset;
|
210
|
+
length = this.length;
|
211
|
+
offset = 0;
|
212
|
+
} else if (typeof offset === "number" && isFinite(offset)) {
|
213
|
+
offset = offset >>> 0;
|
214
|
+
if (typeof length === "number" && isFinite(length)) {
|
215
|
+
length = length >>> 0;
|
216
|
+
encoding ?? (encoding = "utf8");
|
217
|
+
} else if (typeof length === "string") {
|
218
|
+
encoding = length;
|
219
|
+
length = void 0;
|
220
|
+
}
|
221
|
+
} else {
|
222
|
+
throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
|
223
|
+
}
|
224
|
+
const remaining = this.length - offset;
|
225
|
+
if (typeof length === "undefined" || length > remaining) {
|
226
|
+
length = remaining;
|
227
|
+
}
|
228
|
+
if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
|
229
|
+
throw new RangeError("Attempt to write outside buffer bounds");
|
230
|
+
}
|
231
|
+
encoding || (encoding = "utf8");
|
232
|
+
switch (Buffer._getEncoding(encoding)) {
|
233
|
+
case "hex":
|
234
|
+
return Buffer._hexWrite(this, string, offset, length);
|
235
|
+
case "utf8":
|
236
|
+
return Buffer._utf8Write(this, string, offset, length);
|
237
|
+
case "ascii":
|
238
|
+
case "latin1":
|
239
|
+
case "binary":
|
240
|
+
return Buffer._asciiWrite(this, string, offset, length);
|
241
|
+
case "ucs2":
|
242
|
+
case "utf16le":
|
243
|
+
return Buffer._ucs2Write(this, string, offset, length);
|
244
|
+
case "base64":
|
245
|
+
return Buffer._base64Write(this, string, offset, length);
|
246
|
+
}
|
247
|
+
}
|
248
|
+
/**
|
249
|
+
* Decodes the buffer to a string according to the specified character encoding.
|
250
|
+
* Passing `start` and `end` will decode only a subset of the buffer.
|
251
|
+
*
|
252
|
+
* Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
|
253
|
+
* will be replaced with `U+FFFD`.
|
254
|
+
*
|
255
|
+
* @param encoding
|
256
|
+
* @param start
|
257
|
+
* @param end
|
258
|
+
*/
|
259
|
+
toString(encoding, start, end) {
|
260
|
+
const length = this.length;
|
261
|
+
if (length === 0) {
|
262
|
+
return "";
|
263
|
+
}
|
264
|
+
if (arguments.length === 0) {
|
265
|
+
return Buffer._utf8Slice(this, 0, length);
|
266
|
+
}
|
267
|
+
if (typeof start === "undefined" || start < 0) {
|
268
|
+
start = 0;
|
269
|
+
}
|
270
|
+
if (start > this.length) {
|
271
|
+
return "";
|
272
|
+
}
|
273
|
+
if (typeof end === "undefined" || end > this.length) {
|
274
|
+
end = this.length;
|
275
|
+
}
|
276
|
+
if (end <= 0) {
|
277
|
+
return "";
|
278
|
+
}
|
279
|
+
end >>>= 0;
|
280
|
+
start >>>= 0;
|
281
|
+
if (end <= start) {
|
282
|
+
return "";
|
283
|
+
}
|
284
|
+
if (!encoding) {
|
285
|
+
encoding = "utf8";
|
286
|
+
}
|
287
|
+
switch (Buffer._getEncoding(encoding)) {
|
288
|
+
case "hex":
|
289
|
+
return Buffer._hexSlice(this, start, end);
|
290
|
+
case "utf8":
|
291
|
+
return Buffer._utf8Slice(this, start, end);
|
292
|
+
case "ascii":
|
293
|
+
return Buffer._asciiSlice(this, start, end);
|
294
|
+
case "latin1":
|
295
|
+
case "binary":
|
296
|
+
return Buffer._latin1Slice(this, start, end);
|
297
|
+
case "ucs2":
|
298
|
+
case "utf16le":
|
299
|
+
return Buffer._utf16leSlice(this, start, end);
|
300
|
+
case "base64":
|
301
|
+
return Buffer._base64Slice(this, start, end);
|
302
|
+
}
|
303
|
+
}
|
304
|
+
/**
|
305
|
+
* Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
|
306
|
+
*
|
307
|
+
* @param otherBuffer
|
308
|
+
*/
|
309
|
+
equals(otherBuffer) {
|
310
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
311
|
+
throw new TypeError("Argument must be a Buffer");
|
312
|
+
}
|
313
|
+
if (this === otherBuffer) {
|
314
|
+
return true;
|
315
|
+
}
|
316
|
+
return Buffer.compare(this, otherBuffer) === 0;
|
317
|
+
}
|
318
|
+
/**
|
319
|
+
* Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
|
320
|
+
* or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
|
321
|
+
* buffer.
|
322
|
+
*
|
323
|
+
* - `0` is returned if `otherBuffer` is the same as this buffer.
|
324
|
+
* - `1` is returned if `otherBuffer` should come before this buffer when sorted.
|
325
|
+
* - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
|
326
|
+
*
|
327
|
+
* @param otherBuffer The buffer to compare to.
|
328
|
+
* @param targetStart The offset within `otherBuffer` at which to begin comparison.
|
329
|
+
* @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
|
330
|
+
* @param sourceStart The offset within this buffer at which to begin comparison.
|
331
|
+
* @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
|
332
|
+
*/
|
333
|
+
compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
|
334
|
+
if (Buffer._isInstance(otherBuffer, Uint8Array)) {
|
335
|
+
otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
|
336
|
+
}
|
337
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
338
|
+
throw new TypeError("Argument must be a Buffer or Uint8Array");
|
339
|
+
}
|
340
|
+
targetStart ?? (targetStart = 0);
|
341
|
+
targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
|
342
|
+
sourceStart ?? (sourceStart = 0);
|
343
|
+
sourceEnd ?? (sourceEnd = this.length);
|
344
|
+
if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
|
345
|
+
throw new RangeError("Out of range index");
|
346
|
+
}
|
347
|
+
if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
|
348
|
+
return 0;
|
349
|
+
}
|
350
|
+
if (sourceStart >= sourceEnd) {
|
351
|
+
return -1;
|
352
|
+
}
|
353
|
+
if (targetStart >= targetEnd) {
|
354
|
+
return 1;
|
355
|
+
}
|
356
|
+
targetStart >>>= 0;
|
357
|
+
targetEnd >>>= 0;
|
358
|
+
sourceStart >>>= 0;
|
359
|
+
sourceEnd >>>= 0;
|
360
|
+
if (this === otherBuffer) {
|
361
|
+
return 0;
|
362
|
+
}
|
363
|
+
let x = sourceEnd - sourceStart;
|
364
|
+
let y = targetEnd - targetStart;
|
365
|
+
const len = Math.min(x, y);
|
366
|
+
const thisCopy = this.slice(sourceStart, sourceEnd);
|
367
|
+
const targetCopy = otherBuffer.slice(targetStart, targetEnd);
|
368
|
+
for (let i = 0; i < len; ++i) {
|
369
|
+
if (thisCopy[i] !== targetCopy[i]) {
|
370
|
+
x = thisCopy[i];
|
371
|
+
y = targetCopy[i];
|
372
|
+
break;
|
373
|
+
}
|
374
|
+
}
|
375
|
+
if (x < y) return -1;
|
376
|
+
if (y < x) return 1;
|
377
|
+
return 0;
|
378
|
+
}
|
379
|
+
/**
|
380
|
+
* Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
|
381
|
+
* region overlaps with this buffer.
|
382
|
+
*
|
383
|
+
* @param targetBuffer The target buffer to copy into.
|
384
|
+
* @param targetStart The offset within `targetBuffer` at which to begin writing.
|
385
|
+
* @param sourceStart The offset within this buffer at which to begin copying.
|
386
|
+
* @param sourceEnd The offset within this buffer at which to end copying (exclusive).
|
387
|
+
*/
|
388
|
+
copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
|
389
|
+
if (!Buffer.isBuffer(targetBuffer)) throw new TypeError("argument should be a Buffer");
|
390
|
+
if (!sourceStart) sourceStart = 0;
|
391
|
+
if (!targetStart) targetStart = 0;
|
392
|
+
if (!sourceEnd && sourceEnd !== 0) sourceEnd = this.length;
|
393
|
+
if (targetStart >= targetBuffer.length) targetStart = targetBuffer.length;
|
394
|
+
if (!targetStart) targetStart = 0;
|
395
|
+
if (sourceEnd > 0 && sourceEnd < sourceStart) sourceEnd = sourceStart;
|
396
|
+
if (sourceEnd === sourceStart) return 0;
|
397
|
+
if (targetBuffer.length === 0 || this.length === 0) return 0;
|
398
|
+
if (targetStart < 0) {
|
399
|
+
throw new RangeError("targetStart out of bounds");
|
400
|
+
}
|
401
|
+
if (sourceStart < 0 || sourceStart >= this.length) throw new RangeError("Index out of range");
|
402
|
+
if (sourceEnd < 0) throw new RangeError("sourceEnd out of bounds");
|
403
|
+
if (sourceEnd > this.length) sourceEnd = this.length;
|
404
|
+
if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
|
405
|
+
sourceEnd = targetBuffer.length - targetStart + sourceStart;
|
406
|
+
}
|
407
|
+
const len = sourceEnd - sourceStart;
|
408
|
+
if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
|
409
|
+
this.copyWithin(targetStart, sourceStart, sourceEnd);
|
410
|
+
} else {
|
411
|
+
Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
|
412
|
+
}
|
413
|
+
return len;
|
414
|
+
}
|
415
|
+
/**
|
416
|
+
* Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
|
417
|
+
* and `end` indices. This is the same behavior as `buf.subarray()`.
|
418
|
+
*
|
419
|
+
* This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
|
420
|
+
* the slice, use `Uint8Array.prototype.slice()`.
|
421
|
+
*
|
422
|
+
* @param start
|
423
|
+
* @param end
|
424
|
+
*/
|
425
|
+
slice(start, end) {
|
426
|
+
if (!start) {
|
427
|
+
start = 0;
|
428
|
+
}
|
429
|
+
const len = this.length;
|
430
|
+
start = ~~start;
|
431
|
+
end = end === void 0 ? len : ~~end;
|
432
|
+
if (start < 0) {
|
433
|
+
start += len;
|
434
|
+
if (start < 0) {
|
435
|
+
start = 0;
|
436
|
+
}
|
437
|
+
} else if (start > len) {
|
438
|
+
start = len;
|
439
|
+
}
|
440
|
+
if (end < 0) {
|
441
|
+
end += len;
|
442
|
+
if (end < 0) {
|
443
|
+
end = 0;
|
444
|
+
}
|
445
|
+
} else if (end > len) {
|
446
|
+
end = len;
|
447
|
+
}
|
448
|
+
if (end < start) {
|
449
|
+
end = start;
|
450
|
+
}
|
451
|
+
const newBuf = this.subarray(start, end);
|
452
|
+
Object.setPrototypeOf(newBuf, Buffer.prototype);
|
453
|
+
return newBuf;
|
454
|
+
}
|
455
|
+
/**
|
456
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
457
|
+
* of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
|
458
|
+
*
|
459
|
+
* @param value Number to write.
|
460
|
+
* @param offset Number of bytes to skip before starting to write.
|
461
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
462
|
+
* @param noAssert
|
463
|
+
* @returns `offset` plus the number of bytes written.
|
464
|
+
*/
|
465
|
+
writeUIntLE(value, offset, byteLength, noAssert) {
|
466
|
+
value = +value;
|
467
|
+
offset = offset >>> 0;
|
468
|
+
byteLength = byteLength >>> 0;
|
469
|
+
if (!noAssert) {
|
470
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
471
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
472
|
+
}
|
473
|
+
let mul = 1;
|
474
|
+
let i = 0;
|
475
|
+
this[offset] = value & 255;
|
476
|
+
while (++i < byteLength && (mul *= 256)) {
|
477
|
+
this[offset + i] = value / mul & 255;
|
478
|
+
}
|
479
|
+
return offset + byteLength;
|
480
|
+
}
|
481
|
+
/**
|
482
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
|
483
|
+
* accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
|
484
|
+
*
|
485
|
+
* @param value Number to write.
|
486
|
+
* @param offset Number of bytes to skip before starting to write.
|
487
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
488
|
+
* @param noAssert
|
489
|
+
* @returns `offset` plus the number of bytes written.
|
490
|
+
*/
|
491
|
+
writeUIntBE(value, offset, byteLength, noAssert) {
|
492
|
+
value = +value;
|
493
|
+
offset = offset >>> 0;
|
494
|
+
byteLength = byteLength >>> 0;
|
495
|
+
if (!noAssert) {
|
496
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
497
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
498
|
+
}
|
499
|
+
let i = byteLength - 1;
|
500
|
+
let mul = 1;
|
501
|
+
this[offset + i] = value & 255;
|
502
|
+
while (--i >= 0 && (mul *= 256)) {
|
503
|
+
this[offset + i] = value / mul & 255;
|
504
|
+
}
|
505
|
+
return offset + byteLength;
|
506
|
+
}
|
507
|
+
/**
|
508
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
509
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
510
|
+
*
|
511
|
+
* @param value Number to write.
|
512
|
+
* @param offset Number of bytes to skip before starting to write.
|
513
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
514
|
+
* @param noAssert
|
515
|
+
* @returns `offset` plus the number of bytes written.
|
516
|
+
*/
|
517
|
+
writeIntLE(value, offset, byteLength, noAssert) {
|
518
|
+
value = +value;
|
519
|
+
offset = offset >>> 0;
|
520
|
+
if (!noAssert) {
|
521
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
522
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
523
|
+
}
|
524
|
+
let i = 0;
|
525
|
+
let mul = 1;
|
526
|
+
let sub = 0;
|
527
|
+
this[offset] = value & 255;
|
528
|
+
while (++i < byteLength && (mul *= 256)) {
|
529
|
+
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
530
|
+
sub = 1;
|
531
|
+
}
|
532
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
533
|
+
}
|
534
|
+
return offset + byteLength;
|
535
|
+
}
|
536
|
+
/**
|
537
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
|
538
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
539
|
+
*
|
540
|
+
* @param value Number to write.
|
541
|
+
* @param offset Number of bytes to skip before starting to write.
|
542
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
543
|
+
* @param noAssert
|
544
|
+
* @returns `offset` plus the number of bytes written.
|
545
|
+
*/
|
546
|
+
writeIntBE(value, offset, byteLength, noAssert) {
|
547
|
+
value = +value;
|
548
|
+
offset = offset >>> 0;
|
549
|
+
if (!noAssert) {
|
550
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
551
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
552
|
+
}
|
553
|
+
let i = byteLength - 1;
|
554
|
+
let mul = 1;
|
555
|
+
let sub = 0;
|
556
|
+
this[offset + i] = value & 255;
|
557
|
+
while (--i >= 0 && (mul *= 256)) {
|
558
|
+
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
559
|
+
sub = 1;
|
560
|
+
}
|
561
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
562
|
+
}
|
563
|
+
return offset + byteLength;
|
564
|
+
}
|
565
|
+
/**
|
566
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
567
|
+
* unsigned, little-endian integer supporting up to 48 bits of accuracy.
|
568
|
+
*
|
569
|
+
* @param offset Number of bytes to skip before starting to read.
|
570
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
571
|
+
* @param noAssert
|
572
|
+
*/
|
573
|
+
readUIntLE(offset, byteLength, noAssert) {
|
574
|
+
offset = offset >>> 0;
|
575
|
+
byteLength = byteLength >>> 0;
|
576
|
+
if (!noAssert) {
|
577
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
578
|
+
}
|
579
|
+
let val = this[offset];
|
580
|
+
let mul = 1;
|
581
|
+
let i = 0;
|
582
|
+
while (++i < byteLength && (mul *= 256)) {
|
583
|
+
val += this[offset + i] * mul;
|
584
|
+
}
|
585
|
+
return val;
|
586
|
+
}
|
587
|
+
/**
|
588
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
589
|
+
* unsigned, big-endian integer supporting up to 48 bits of accuracy.
|
590
|
+
*
|
591
|
+
* @param offset Number of bytes to skip before starting to read.
|
592
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
593
|
+
* @param noAssert
|
594
|
+
*/
|
595
|
+
readUIntBE(offset, byteLength, noAssert) {
|
596
|
+
offset = offset >>> 0;
|
597
|
+
byteLength = byteLength >>> 0;
|
598
|
+
if (!noAssert) {
|
599
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
600
|
+
}
|
601
|
+
let val = this[offset + --byteLength];
|
602
|
+
let mul = 1;
|
603
|
+
while (byteLength > 0 && (mul *= 256)) {
|
604
|
+
val += this[offset + --byteLength] * mul;
|
605
|
+
}
|
606
|
+
return val;
|
607
|
+
}
|
608
|
+
/**
|
609
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
610
|
+
* little-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
611
|
+
*
|
612
|
+
* @param offset Number of bytes to skip before starting to read.
|
613
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
614
|
+
* @param noAssert
|
615
|
+
*/
|
616
|
+
readIntLE(offset, byteLength, noAssert) {
|
617
|
+
offset = offset >>> 0;
|
618
|
+
byteLength = byteLength >>> 0;
|
619
|
+
if (!noAssert) {
|
620
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
621
|
+
}
|
622
|
+
let val = this[offset];
|
623
|
+
let mul = 1;
|
624
|
+
let i = 0;
|
625
|
+
while (++i < byteLength && (mul *= 256)) {
|
626
|
+
val += this[offset + i] * mul;
|
627
|
+
}
|
628
|
+
mul *= 128;
|
629
|
+
if (val >= mul) {
|
630
|
+
val -= Math.pow(2, 8 * byteLength);
|
631
|
+
}
|
632
|
+
return val;
|
633
|
+
}
|
634
|
+
/**
|
635
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
636
|
+
* big-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
637
|
+
*
|
638
|
+
* @param offset Number of bytes to skip before starting to read.
|
639
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
640
|
+
* @param noAssert
|
641
|
+
*/
|
642
|
+
readIntBE(offset, byteLength, noAssert) {
|
643
|
+
offset = offset >>> 0;
|
644
|
+
byteLength = byteLength >>> 0;
|
645
|
+
if (!noAssert) {
|
646
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
647
|
+
}
|
648
|
+
let i = byteLength;
|
649
|
+
let mul = 1;
|
650
|
+
let val = this[offset + --i];
|
651
|
+
while (i > 0 && (mul *= 256)) {
|
652
|
+
val += this[offset + --i] * mul;
|
653
|
+
}
|
654
|
+
mul *= 128;
|
655
|
+
if (val >= mul) {
|
656
|
+
val -= Math.pow(2, 8 * byteLength);
|
657
|
+
}
|
658
|
+
return val;
|
659
|
+
}
|
660
|
+
/**
|
661
|
+
* Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
|
662
|
+
*
|
663
|
+
* @param offset Number of bytes to skip before starting to read.
|
664
|
+
* @param noAssert
|
665
|
+
*/
|
666
|
+
readUInt8(offset, noAssert) {
|
667
|
+
offset = offset >>> 0;
|
668
|
+
if (!noAssert) {
|
669
|
+
Buffer._checkOffset(offset, 1, this.length);
|
670
|
+
}
|
671
|
+
return this[offset];
|
672
|
+
}
|
673
|
+
/**
|
674
|
+
* Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
|
675
|
+
*
|
676
|
+
* @param offset Number of bytes to skip before starting to read.
|
677
|
+
* @param noAssert
|
678
|
+
*/
|
679
|
+
readUInt16LE(offset, noAssert) {
|
680
|
+
offset = offset >>> 0;
|
681
|
+
if (!noAssert) {
|
682
|
+
Buffer._checkOffset(offset, 2, this.length);
|
683
|
+
}
|
684
|
+
return this[offset] | this[offset + 1] << 8;
|
685
|
+
}
|
686
|
+
/**
|
687
|
+
* Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
|
688
|
+
*
|
689
|
+
* @param offset Number of bytes to skip before starting to read.
|
690
|
+
* @param noAssert
|
691
|
+
*/
|
692
|
+
readUInt16BE(offset, noAssert) {
|
693
|
+
offset = offset >>> 0;
|
694
|
+
if (!noAssert) {
|
695
|
+
Buffer._checkOffset(offset, 2, this.length);
|
696
|
+
}
|
697
|
+
return this[offset] << 8 | this[offset + 1];
|
698
|
+
}
|
699
|
+
/**
|
700
|
+
* Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
|
701
|
+
*
|
702
|
+
* @param offset Number of bytes to skip before starting to read.
|
703
|
+
* @param noAssert
|
704
|
+
*/
|
705
|
+
readUInt32LE(offset, noAssert) {
|
706
|
+
offset = offset >>> 0;
|
707
|
+
if (!noAssert) {
|
708
|
+
Buffer._checkOffset(offset, 4, this.length);
|
709
|
+
}
|
710
|
+
return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
|
711
|
+
}
|
712
|
+
/**
|
713
|
+
* Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
|
714
|
+
*
|
715
|
+
* @param offset Number of bytes to skip before starting to read.
|
716
|
+
* @param noAssert
|
717
|
+
*/
|
718
|
+
readUInt32BE(offset, noAssert) {
|
719
|
+
offset = offset >>> 0;
|
720
|
+
if (!noAssert) {
|
721
|
+
Buffer._checkOffset(offset, 4, this.length);
|
722
|
+
}
|
723
|
+
return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
|
724
|
+
}
|
725
|
+
/**
|
726
|
+
* Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
|
727
|
+
* as two's complement signed values.
|
728
|
+
*
|
729
|
+
* @param offset Number of bytes to skip before starting to read.
|
730
|
+
* @param noAssert
|
731
|
+
*/
|
732
|
+
readInt8(offset, noAssert) {
|
733
|
+
offset = offset >>> 0;
|
734
|
+
if (!noAssert) {
|
735
|
+
Buffer._checkOffset(offset, 1, this.length);
|
736
|
+
}
|
737
|
+
if (!(this[offset] & 128)) {
|
738
|
+
return this[offset];
|
739
|
+
}
|
740
|
+
return (255 - this[offset] + 1) * -1;
|
741
|
+
}
|
742
|
+
/**
|
743
|
+
* Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
744
|
+
* are interpreted as two's complement signed values.
|
745
|
+
*
|
746
|
+
* @param offset Number of bytes to skip before starting to read.
|
747
|
+
* @param noAssert
|
748
|
+
*/
|
749
|
+
readInt16LE(offset, noAssert) {
|
750
|
+
offset = offset >>> 0;
|
751
|
+
if (!noAssert) {
|
752
|
+
Buffer._checkOffset(offset, 2, this.length);
|
753
|
+
}
|
754
|
+
const val = this[offset] | this[offset + 1] << 8;
|
755
|
+
return val & 32768 ? val | 4294901760 : val;
|
756
|
+
}
|
757
|
+
/**
|
758
|
+
* Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
759
|
+
* are interpreted as two's complement signed values.
|
760
|
+
*
|
761
|
+
* @param offset Number of bytes to skip before starting to read.
|
762
|
+
* @param noAssert
|
763
|
+
*/
|
764
|
+
readInt16BE(offset, noAssert) {
|
765
|
+
offset = offset >>> 0;
|
766
|
+
if (!noAssert) {
|
767
|
+
Buffer._checkOffset(offset, 2, this.length);
|
768
|
+
}
|
769
|
+
const val = this[offset + 1] | this[offset] << 8;
|
770
|
+
return val & 32768 ? val | 4294901760 : val;
|
771
|
+
}
|
772
|
+
/**
|
773
|
+
* Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
774
|
+
* are interpreted as two's complement signed values.
|
775
|
+
*
|
776
|
+
* @param offset Number of bytes to skip before starting to read.
|
777
|
+
* @param noAssert
|
778
|
+
*/
|
779
|
+
readInt32LE(offset, noAssert) {
|
780
|
+
offset = offset >>> 0;
|
781
|
+
if (!noAssert) {
|
782
|
+
Buffer._checkOffset(offset, 4, this.length);
|
783
|
+
}
|
784
|
+
return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
|
785
|
+
}
|
786
|
+
/**
|
787
|
+
* Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
788
|
+
* are interpreted as two's complement signed values.
|
789
|
+
*
|
790
|
+
* @param offset Number of bytes to skip before starting to read.
|
791
|
+
* @param noAssert
|
792
|
+
*/
|
793
|
+
readInt32BE(offset, noAssert) {
|
794
|
+
offset = offset >>> 0;
|
795
|
+
if (!noAssert) {
|
796
|
+
Buffer._checkOffset(offset, 4, this.length);
|
797
|
+
}
|
798
|
+
return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
|
799
|
+
}
|
800
|
+
/**
|
801
|
+
* Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
|
802
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 2.
|
803
|
+
*/
|
804
|
+
swap16() {
|
805
|
+
const len = this.length;
|
806
|
+
if (len % 2 !== 0) {
|
807
|
+
throw new RangeError("Buffer size must be a multiple of 16-bits");
|
808
|
+
}
|
809
|
+
for (let i = 0; i < len; i += 2) {
|
810
|
+
this._swap(this, i, i + 1);
|
811
|
+
}
|
812
|
+
return this;
|
813
|
+
}
|
814
|
+
/**
|
815
|
+
* Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
|
816
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 4.
|
817
|
+
*/
|
818
|
+
swap32() {
|
819
|
+
const len = this.length;
|
820
|
+
if (len % 4 !== 0) {
|
821
|
+
throw new RangeError("Buffer size must be a multiple of 32-bits");
|
822
|
+
}
|
823
|
+
for (let i = 0; i < len; i += 4) {
|
824
|
+
this._swap(this, i, i + 3);
|
825
|
+
this._swap(this, i + 1, i + 2);
|
826
|
+
}
|
827
|
+
return this;
|
828
|
+
}
|
829
|
+
/**
|
830
|
+
* Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
|
831
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 8.
|
832
|
+
*/
|
833
|
+
swap64() {
|
834
|
+
const len = this.length;
|
835
|
+
if (len % 8 !== 0) {
|
836
|
+
throw new RangeError("Buffer size must be a multiple of 64-bits");
|
837
|
+
}
|
838
|
+
for (let i = 0; i < len; i += 8) {
|
839
|
+
this._swap(this, i, i + 7);
|
840
|
+
this._swap(this, i + 1, i + 6);
|
841
|
+
this._swap(this, i + 2, i + 5);
|
842
|
+
this._swap(this, i + 3, i + 4);
|
843
|
+
}
|
844
|
+
return this;
|
845
|
+
}
|
846
|
+
/**
|
847
|
+
* Swaps two octets.
|
848
|
+
*
|
849
|
+
* @param b
|
850
|
+
* @param n
|
851
|
+
* @param m
|
852
|
+
*/
|
853
|
+
_swap(b, n, m) {
|
854
|
+
const i = b[n];
|
855
|
+
b[n] = b[m];
|
856
|
+
b[m] = i;
|
857
|
+
}
|
858
|
+
/**
|
859
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
|
860
|
+
* Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
|
861
|
+
*
|
862
|
+
* @param value Number to write.
|
863
|
+
* @param offset Number of bytes to skip before starting to write.
|
864
|
+
* @param noAssert
|
865
|
+
* @returns `offset` plus the number of bytes written.
|
866
|
+
*/
|
867
|
+
writeUInt8(value, offset, noAssert) {
|
868
|
+
value = +value;
|
869
|
+
offset = offset >>> 0;
|
870
|
+
if (!noAssert) {
|
871
|
+
Buffer._checkInt(this, value, offset, 1, 255, 0);
|
872
|
+
}
|
873
|
+
this[offset] = value & 255;
|
874
|
+
return offset + 1;
|
875
|
+
}
|
876
|
+
/**
|
877
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
|
878
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
879
|
+
*
|
880
|
+
* @param value Number to write.
|
881
|
+
* @param offset Number of bytes to skip before starting to write.
|
882
|
+
* @param noAssert
|
883
|
+
* @returns `offset` plus the number of bytes written.
|
884
|
+
*/
|
885
|
+
writeUInt16LE(value, offset, noAssert) {
|
886
|
+
value = +value;
|
887
|
+
offset = offset >>> 0;
|
888
|
+
if (!noAssert) {
|
889
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
890
|
+
}
|
891
|
+
this[offset] = value & 255;
|
892
|
+
this[offset + 1] = value >>> 8;
|
893
|
+
return offset + 2;
|
894
|
+
}
|
895
|
+
/**
|
896
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
|
897
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
898
|
+
*
|
899
|
+
* @param value Number to write.
|
900
|
+
* @param offset Number of bytes to skip before starting to write.
|
901
|
+
* @param noAssert
|
902
|
+
* @returns `offset` plus the number of bytes written.
|
903
|
+
*/
|
904
|
+
writeUInt16BE(value, offset, noAssert) {
|
905
|
+
value = +value;
|
906
|
+
offset = offset >>> 0;
|
907
|
+
if (!noAssert) {
|
908
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
909
|
+
}
|
910
|
+
this[offset] = value >>> 8;
|
911
|
+
this[offset + 1] = value & 255;
|
912
|
+
return offset + 2;
|
913
|
+
}
|
914
|
+
/**
|
915
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
|
916
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
917
|
+
*
|
918
|
+
* @param value Number to write.
|
919
|
+
* @param offset Number of bytes to skip before starting to write.
|
920
|
+
* @param noAssert
|
921
|
+
* @returns `offset` plus the number of bytes written.
|
922
|
+
*/
|
923
|
+
writeUInt32LE(value, offset, noAssert) {
|
924
|
+
value = +value;
|
925
|
+
offset = offset >>> 0;
|
926
|
+
if (!noAssert) {
|
927
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
928
|
+
}
|
929
|
+
this[offset + 3] = value >>> 24;
|
930
|
+
this[offset + 2] = value >>> 16;
|
931
|
+
this[offset + 1] = value >>> 8;
|
932
|
+
this[offset] = value & 255;
|
933
|
+
return offset + 4;
|
934
|
+
}
|
935
|
+
/**
|
936
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
|
937
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
938
|
+
*
|
939
|
+
* @param value Number to write.
|
940
|
+
* @param offset Number of bytes to skip before starting to write.
|
941
|
+
* @param noAssert
|
942
|
+
* @returns `offset` plus the number of bytes written.
|
943
|
+
*/
|
944
|
+
writeUInt32BE(value, offset, noAssert) {
|
945
|
+
value = +value;
|
946
|
+
offset = offset >>> 0;
|
947
|
+
if (!noAssert) {
|
948
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
949
|
+
}
|
950
|
+
this[offset] = value >>> 24;
|
951
|
+
this[offset + 1] = value >>> 16;
|
952
|
+
this[offset + 2] = value >>> 8;
|
953
|
+
this[offset + 3] = value & 255;
|
954
|
+
return offset + 4;
|
955
|
+
}
|
956
|
+
/**
|
957
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
|
958
|
+
* Behavior is undefined when `value` is anything other than a signed 8-bit integer.
|
959
|
+
*
|
960
|
+
* @param value Number to write.
|
961
|
+
* @param offset Number of bytes to skip before starting to write.
|
962
|
+
* @param noAssert
|
963
|
+
* @returns `offset` plus the number of bytes written.
|
964
|
+
*/
|
965
|
+
writeInt8(value, offset, noAssert) {
|
966
|
+
value = +value;
|
967
|
+
offset = offset >>> 0;
|
968
|
+
if (!noAssert) {
|
969
|
+
Buffer._checkInt(this, value, offset, 1, 127, -128);
|
970
|
+
}
|
971
|
+
if (value < 0) {
|
972
|
+
value = 255 + value + 1;
|
973
|
+
}
|
974
|
+
this[offset] = value & 255;
|
975
|
+
return offset + 1;
|
976
|
+
}
|
977
|
+
/**
|
978
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
|
979
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
980
|
+
*
|
981
|
+
* @param value Number to write.
|
982
|
+
* @param offset Number of bytes to skip before starting to write.
|
983
|
+
* @param noAssert
|
984
|
+
* @returns `offset` plus the number of bytes written.
|
985
|
+
*/
|
986
|
+
writeInt16LE(value, offset, noAssert) {
|
987
|
+
value = +value;
|
988
|
+
offset = offset >>> 0;
|
989
|
+
if (!noAssert) {
|
990
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
991
|
+
}
|
992
|
+
this[offset] = value & 255;
|
993
|
+
this[offset + 1] = value >>> 8;
|
994
|
+
return offset + 2;
|
995
|
+
}
|
996
|
+
/**
|
997
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
|
998
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
999
|
+
*
|
1000
|
+
* @param value Number to write.
|
1001
|
+
* @param offset Number of bytes to skip before starting to write.
|
1002
|
+
* @param noAssert
|
1003
|
+
* @returns `offset` plus the number of bytes written.
|
1004
|
+
*/
|
1005
|
+
writeInt16BE(value, offset, noAssert) {
|
1006
|
+
value = +value;
|
1007
|
+
offset = offset >>> 0;
|
1008
|
+
if (!noAssert) {
|
1009
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1010
|
+
}
|
1011
|
+
this[offset] = value >>> 8;
|
1012
|
+
this[offset + 1] = value & 255;
|
1013
|
+
return offset + 2;
|
1014
|
+
}
|
1015
|
+
/**
|
1016
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
|
1017
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1018
|
+
*
|
1019
|
+
* @param value Number to write.
|
1020
|
+
* @param offset Number of bytes to skip before starting to write.
|
1021
|
+
* @param noAssert
|
1022
|
+
* @returns `offset` plus the number of bytes written.
|
1023
|
+
*/
|
1024
|
+
writeInt32LE(value, offset, noAssert) {
|
1025
|
+
value = +value;
|
1026
|
+
offset = offset >>> 0;
|
1027
|
+
if (!noAssert) {
|
1028
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1029
|
+
}
|
1030
|
+
this[offset] = value & 255;
|
1031
|
+
this[offset + 1] = value >>> 8;
|
1032
|
+
this[offset + 2] = value >>> 16;
|
1033
|
+
this[offset + 3] = value >>> 24;
|
1034
|
+
return offset + 4;
|
1035
|
+
}
|
1036
|
+
/**
|
1037
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
|
1038
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1039
|
+
*
|
1040
|
+
* @param value Number to write.
|
1041
|
+
* @param offset Number of bytes to skip before starting to write.
|
1042
|
+
* @param noAssert
|
1043
|
+
* @returns `offset` plus the number of bytes written.
|
1044
|
+
*/
|
1045
|
+
writeInt32BE(value, offset, noAssert) {
|
1046
|
+
value = +value;
|
1047
|
+
offset = offset >>> 0;
|
1048
|
+
if (!noAssert) {
|
1049
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1050
|
+
}
|
1051
|
+
if (value < 0) {
|
1052
|
+
value = 4294967295 + value + 1;
|
1053
|
+
}
|
1054
|
+
this[offset] = value >>> 24;
|
1055
|
+
this[offset + 1] = value >>> 16;
|
1056
|
+
this[offset + 2] = value >>> 8;
|
1057
|
+
this[offset + 3] = value & 255;
|
1058
|
+
return offset + 4;
|
1059
|
+
}
|
1060
|
+
/**
|
1061
|
+
* Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
|
1062
|
+
* filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
|
1063
|
+
* integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
|
1064
|
+
*
|
1065
|
+
* If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
|
1066
|
+
* character that fit into `buf` are written.
|
1067
|
+
*
|
1068
|
+
* If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
|
1069
|
+
*
|
1070
|
+
* @param value
|
1071
|
+
* @param encoding
|
1072
|
+
*/
|
1073
|
+
fill(value, offset, end, encoding) {
|
1074
|
+
if (typeof value === "string") {
|
1075
|
+
if (typeof offset === "string") {
|
1076
|
+
encoding = offset;
|
1077
|
+
offset = 0;
|
1078
|
+
end = this.length;
|
1079
|
+
} else if (typeof end === "string") {
|
1080
|
+
encoding = end;
|
1081
|
+
end = this.length;
|
1082
|
+
}
|
1083
|
+
if (encoding !== void 0 && typeof encoding !== "string") {
|
1084
|
+
throw new TypeError("encoding must be a string");
|
1085
|
+
}
|
1086
|
+
if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
|
1087
|
+
throw new TypeError("Unknown encoding: " + encoding);
|
1088
|
+
}
|
1089
|
+
if (value.length === 1) {
|
1090
|
+
const code = value.charCodeAt(0);
|
1091
|
+
if (encoding === "utf8" && code < 128) {
|
1092
|
+
value = code;
|
1093
|
+
}
|
1094
|
+
}
|
1095
|
+
} else if (typeof value === "number") {
|
1096
|
+
value = value & 255;
|
1097
|
+
} else if (typeof value === "boolean") {
|
1098
|
+
value = Number(value);
|
1099
|
+
}
|
1100
|
+
offset ?? (offset = 0);
|
1101
|
+
end ?? (end = this.length);
|
1102
|
+
if (offset < 0 || this.length < offset || this.length < end) {
|
1103
|
+
throw new RangeError("Out of range index");
|
1104
|
+
}
|
1105
|
+
if (end <= offset) {
|
1106
|
+
return this;
|
1107
|
+
}
|
1108
|
+
offset = offset >>> 0;
|
1109
|
+
end = end === void 0 ? this.length : end >>> 0;
|
1110
|
+
value || (value = 0);
|
1111
|
+
let i;
|
1112
|
+
if (typeof value === "number") {
|
1113
|
+
for (i = offset; i < end; ++i) {
|
1114
|
+
this[i] = value;
|
1115
|
+
}
|
1116
|
+
} else {
|
1117
|
+
const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
|
1118
|
+
const len = bytes.length;
|
1119
|
+
if (len === 0) {
|
1120
|
+
throw new TypeError('The value "' + value + '" is invalid for argument "value"');
|
1121
|
+
}
|
1122
|
+
for (i = 0; i < end - offset; ++i) {
|
1123
|
+
this[i + offset] = bytes[i % len];
|
1124
|
+
}
|
1125
|
+
}
|
1126
|
+
return this;
|
1127
|
+
}
|
1128
|
+
/**
|
1129
|
+
* Returns the index of the specified value.
|
1130
|
+
*
|
1131
|
+
* If `value` is:
|
1132
|
+
* - a string, `value` is interpreted according to the character encoding in `encoding`.
|
1133
|
+
* - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
|
1134
|
+
* - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
|
1135
|
+
*
|
1136
|
+
* Any other types will throw a `TypeError`.
|
1137
|
+
*
|
1138
|
+
* @param value What to search for.
|
1139
|
+
* @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
|
1140
|
+
* @param encoding If `value` is a string, this is the encoding used to search.
|
1141
|
+
* @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
|
1142
|
+
*/
|
1143
|
+
indexOf(value, byteOffset, encoding) {
|
1144
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
|
1145
|
+
}
|
1146
|
+
/**
|
1147
|
+
* Gets the last index of the specified value.
|
1148
|
+
*
|
1149
|
+
* @see indexOf()
|
1150
|
+
* @param value
|
1151
|
+
* @param byteOffset
|
1152
|
+
* @param encoding
|
1153
|
+
*/
|
1154
|
+
lastIndexOf(value, byteOffset, encoding) {
|
1155
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
|
1156
|
+
}
|
1157
|
+
_bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
|
1158
|
+
if (buffer.length === 0) {
|
1159
|
+
return -1;
|
1160
|
+
}
|
1161
|
+
if (typeof byteOffset === "string") {
|
1162
|
+
encoding = byteOffset;
|
1163
|
+
byteOffset = 0;
|
1164
|
+
} else if (typeof byteOffset === "undefined") {
|
1165
|
+
byteOffset = 0;
|
1166
|
+
} else if (byteOffset > 2147483647) {
|
1167
|
+
byteOffset = 2147483647;
|
1168
|
+
} else if (byteOffset < -2147483648) {
|
1169
|
+
byteOffset = -2147483648;
|
1170
|
+
}
|
1171
|
+
byteOffset = +byteOffset;
|
1172
|
+
if (byteOffset !== byteOffset) {
|
1173
|
+
byteOffset = dir ? 0 : buffer.length - 1;
|
1174
|
+
}
|
1175
|
+
if (byteOffset < 0) {
|
1176
|
+
byteOffset = buffer.length + byteOffset;
|
1177
|
+
}
|
1178
|
+
if (byteOffset >= buffer.length) {
|
1179
|
+
if (dir) {
|
1180
|
+
return -1;
|
1181
|
+
} else {
|
1182
|
+
byteOffset = buffer.length - 1;
|
1183
|
+
}
|
1184
|
+
} else if (byteOffset < 0) {
|
1185
|
+
if (dir) {
|
1186
|
+
byteOffset = 0;
|
1187
|
+
} else {
|
1188
|
+
return -1;
|
1189
|
+
}
|
1190
|
+
}
|
1191
|
+
if (typeof val === "string") {
|
1192
|
+
val = Buffer.from(val, encoding);
|
1193
|
+
}
|
1194
|
+
if (Buffer.isBuffer(val)) {
|
1195
|
+
if (val.length === 0) {
|
1196
|
+
return -1;
|
1197
|
+
}
|
1198
|
+
return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
|
1199
|
+
} else if (typeof val === "number") {
|
1200
|
+
val = val & 255;
|
1201
|
+
if (typeof Uint8Array.prototype.indexOf === "function") {
|
1202
|
+
if (dir) {
|
1203
|
+
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
|
1204
|
+
} else {
|
1205
|
+
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
|
1206
|
+
}
|
1207
|
+
}
|
1208
|
+
return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
|
1209
|
+
}
|
1210
|
+
throw new TypeError("val must be string, number or Buffer");
|
1211
|
+
}
|
1212
|
+
/**
|
1213
|
+
* Equivalent to `buf.indexOf() !== -1`.
|
1214
|
+
*
|
1215
|
+
* @param value
|
1216
|
+
* @param byteOffset
|
1217
|
+
* @param encoding
|
1218
|
+
*/
|
1219
|
+
includes(value, byteOffset, encoding) {
|
1220
|
+
return this.indexOf(value, byteOffset, encoding) !== -1;
|
1221
|
+
}
|
1222
|
+
/**
|
1223
|
+
* Creates a new buffer from the given parameters.
|
1224
|
+
*
|
1225
|
+
* @param data
|
1226
|
+
* @param encoding
|
1227
|
+
*/
|
1228
|
+
static from(a, b, c) {
|
1229
|
+
return new Buffer(a, b, c);
|
1230
|
+
}
|
1231
|
+
/**
|
1232
|
+
* Returns true if `obj` is a Buffer.
|
1233
|
+
*
|
1234
|
+
* @param obj
|
1235
|
+
*/
|
1236
|
+
static isBuffer(obj) {
|
1237
|
+
return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
|
1238
|
+
}
|
1239
|
+
/**
|
1240
|
+
* Returns true if `encoding` is a supported encoding.
|
1241
|
+
*
|
1242
|
+
* @param encoding
|
1243
|
+
*/
|
1244
|
+
static isEncoding(encoding) {
|
1245
|
+
switch (encoding.toLowerCase()) {
|
1246
|
+
case "hex":
|
1247
|
+
case "utf8":
|
1248
|
+
case "ascii":
|
1249
|
+
case "binary":
|
1250
|
+
case "latin1":
|
1251
|
+
case "ucs2":
|
1252
|
+
case "utf16le":
|
1253
|
+
case "base64":
|
1254
|
+
return true;
|
1255
|
+
default:
|
1256
|
+
return false;
|
1257
|
+
}
|
1258
|
+
}
|
1259
|
+
/**
|
1260
|
+
* Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
|
1261
|
+
* returns the number of characters in the string.
|
1262
|
+
*
|
1263
|
+
* @param string The string to test.
|
1264
|
+
* @param encoding The encoding to use for calculation. Defaults is `utf8`.
|
1265
|
+
*/
|
1266
|
+
static byteLength(string, encoding) {
|
1267
|
+
if (Buffer.isBuffer(string)) {
|
1268
|
+
return string.length;
|
1269
|
+
}
|
1270
|
+
if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
|
1271
|
+
return string.byteLength;
|
1272
|
+
}
|
1273
|
+
if (typeof string !== "string") {
|
1274
|
+
throw new TypeError(
|
1275
|
+
'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
|
1276
|
+
);
|
1277
|
+
}
|
1278
|
+
const len = string.length;
|
1279
|
+
const mustMatch = arguments.length > 2 && arguments[2] === true;
|
1280
|
+
if (!mustMatch && len === 0) {
|
1281
|
+
return 0;
|
1282
|
+
}
|
1283
|
+
switch (encoding?.toLowerCase()) {
|
1284
|
+
case "ascii":
|
1285
|
+
case "latin1":
|
1286
|
+
case "binary":
|
1287
|
+
return len;
|
1288
|
+
case "utf8":
|
1289
|
+
return Buffer._utf8ToBytes(string).length;
|
1290
|
+
case "hex":
|
1291
|
+
return len >>> 1;
|
1292
|
+
case "ucs2":
|
1293
|
+
case "utf16le":
|
1294
|
+
return len * 2;
|
1295
|
+
case "base64":
|
1296
|
+
return Buffer._base64ToBytes(string).length;
|
1297
|
+
default:
|
1298
|
+
return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
|
1299
|
+
}
|
1300
|
+
}
|
1301
|
+
/**
|
1302
|
+
* Returns a Buffer which is the result of concatenating all the buffers in the list together.
|
1303
|
+
*
|
1304
|
+
* - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
|
1305
|
+
* - If the list has exactly one item, then the first item is returned.
|
1306
|
+
* - If the list has more than one item, then a new buffer is created.
|
1307
|
+
*
|
1308
|
+
* It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
|
1309
|
+
* a small computational expense.
|
1310
|
+
*
|
1311
|
+
* @param list An array of Buffer objects to concatenate.
|
1312
|
+
* @param totalLength Total length of the buffers when concatenated.
|
1313
|
+
*/
|
1314
|
+
static concat(list, totalLength) {
|
1315
|
+
if (!Array.isArray(list)) {
|
1316
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1317
|
+
}
|
1318
|
+
if (list.length === 0) {
|
1319
|
+
return Buffer.alloc(0);
|
1320
|
+
}
|
1321
|
+
let i;
|
1322
|
+
if (totalLength === void 0) {
|
1323
|
+
totalLength = 0;
|
1324
|
+
for (i = 0; i < list.length; ++i) {
|
1325
|
+
totalLength += list[i].length;
|
1326
|
+
}
|
1327
|
+
}
|
1328
|
+
const buffer = Buffer.allocUnsafe(totalLength);
|
1329
|
+
let pos = 0;
|
1330
|
+
for (i = 0; i < list.length; ++i) {
|
1331
|
+
let buf = list[i];
|
1332
|
+
if (Buffer._isInstance(buf, Uint8Array)) {
|
1333
|
+
if (pos + buf.length > buffer.length) {
|
1334
|
+
if (!Buffer.isBuffer(buf)) {
|
1335
|
+
buf = Buffer.from(buf);
|
1336
|
+
}
|
1337
|
+
buf.copy(buffer, pos);
|
1338
|
+
} else {
|
1339
|
+
Uint8Array.prototype.set.call(buffer, buf, pos);
|
1340
|
+
}
|
1341
|
+
} else if (!Buffer.isBuffer(buf)) {
|
1342
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1343
|
+
} else {
|
1344
|
+
buf.copy(buffer, pos);
|
1345
|
+
}
|
1346
|
+
pos += buf.length;
|
1347
|
+
}
|
1348
|
+
return buffer;
|
1349
|
+
}
|
1350
|
+
/**
|
1351
|
+
* The same as `buf1.compare(buf2)`.
|
1352
|
+
*/
|
1353
|
+
static compare(buf1, buf2) {
|
1354
|
+
if (Buffer._isInstance(buf1, Uint8Array)) {
|
1355
|
+
buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
|
1356
|
+
}
|
1357
|
+
if (Buffer._isInstance(buf2, Uint8Array)) {
|
1358
|
+
buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
|
1359
|
+
}
|
1360
|
+
if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
|
1361
|
+
throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
|
1362
|
+
}
|
1363
|
+
if (buf1 === buf2) {
|
1364
|
+
return 0;
|
1365
|
+
}
|
1366
|
+
let x = buf1.length;
|
1367
|
+
let y = buf2.length;
|
1368
|
+
for (let i = 0, len = Math.min(x, y); i < len; ++i) {
|
1369
|
+
if (buf1[i] !== buf2[i]) {
|
1370
|
+
x = buf1[i];
|
1371
|
+
y = buf2[i];
|
1372
|
+
break;
|
1373
|
+
}
|
1374
|
+
}
|
1375
|
+
if (x < y) {
|
1376
|
+
return -1;
|
1377
|
+
}
|
1378
|
+
if (y < x) {
|
1379
|
+
return 1;
|
1380
|
+
}
|
1381
|
+
return 0;
|
1382
|
+
}
|
1383
|
+
/**
|
1384
|
+
* Allocates a new buffer of `size` octets.
|
1385
|
+
*
|
1386
|
+
* @param size The number of octets to allocate.
|
1387
|
+
* @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
|
1388
|
+
* @param encoding The encoding used for the call to `buf.fill()` while initializing.
|
1389
|
+
*/
|
1390
|
+
static alloc(size, fill, encoding) {
|
1391
|
+
if (typeof size !== "number") {
|
1392
|
+
throw new TypeError('"size" argument must be of type number');
|
1393
|
+
} else if (size < 0) {
|
1394
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1395
|
+
}
|
1396
|
+
if (size <= 0) {
|
1397
|
+
return new Buffer(size);
|
1398
|
+
}
|
1399
|
+
if (fill !== void 0) {
|
1400
|
+
return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
|
1401
|
+
}
|
1402
|
+
return new Buffer(size);
|
1403
|
+
}
|
1404
|
+
/**
|
1405
|
+
* Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
|
1406
|
+
*
|
1407
|
+
* @param size
|
1408
|
+
*/
|
1409
|
+
static allocUnsafe(size) {
|
1410
|
+
if (typeof size !== "number") {
|
1411
|
+
throw new TypeError('"size" argument must be of type number');
|
1412
|
+
} else if (size < 0) {
|
1413
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1414
|
+
}
|
1415
|
+
return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
|
1416
|
+
}
|
1417
|
+
/**
|
1418
|
+
* Returns true if the given `obj` is an instance of `type`.
|
1419
|
+
*
|
1420
|
+
* @param obj
|
1421
|
+
* @param type
|
1422
|
+
*/
|
1423
|
+
static _isInstance(obj, type) {
|
1424
|
+
return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
|
1425
|
+
}
|
1426
|
+
static _checked(length) {
|
1427
|
+
if (length >= K_MAX_LENGTH) {
|
1428
|
+
throw new RangeError(
|
1429
|
+
"Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
|
1430
|
+
);
|
1431
|
+
}
|
1432
|
+
return length | 0;
|
1433
|
+
}
|
1434
|
+
static _blitBuffer(src, dst, offset, length) {
|
1435
|
+
let i;
|
1436
|
+
for (i = 0; i < length; ++i) {
|
1437
|
+
if (i + offset >= dst.length || i >= src.length) {
|
1438
|
+
break;
|
1439
|
+
}
|
1440
|
+
dst[i + offset] = src[i];
|
1441
|
+
}
|
1442
|
+
return i;
|
1443
|
+
}
|
1444
|
+
static _utf8Write(buf, string, offset, length) {
|
1445
|
+
return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
|
1446
|
+
}
|
1447
|
+
static _asciiWrite(buf, string, offset, length) {
|
1448
|
+
return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
|
1449
|
+
}
|
1450
|
+
static _base64Write(buf, string, offset, length) {
|
1451
|
+
return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
|
1452
|
+
}
|
1453
|
+
static _ucs2Write(buf, string, offset, length) {
|
1454
|
+
return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
|
1455
|
+
}
|
1456
|
+
static _hexWrite(buf, string, offset, length) {
|
1457
|
+
offset = Number(offset) || 0;
|
1458
|
+
const remaining = buf.length - offset;
|
1459
|
+
if (!length) {
|
1460
|
+
length = remaining;
|
1461
|
+
} else {
|
1462
|
+
length = Number(length);
|
1463
|
+
if (length > remaining) {
|
1464
|
+
length = remaining;
|
1465
|
+
}
|
1466
|
+
}
|
1467
|
+
const strLen = string.length;
|
1468
|
+
if (length > strLen / 2) {
|
1469
|
+
length = strLen / 2;
|
1470
|
+
}
|
1471
|
+
let i;
|
1472
|
+
for (i = 0; i < length; ++i) {
|
1473
|
+
const parsed = parseInt(string.substr(i * 2, 2), 16);
|
1474
|
+
if (parsed !== parsed) {
|
1475
|
+
return i;
|
1476
|
+
}
|
1477
|
+
buf[offset + i] = parsed;
|
1478
|
+
}
|
1479
|
+
return i;
|
1480
|
+
}
|
1481
|
+
static _utf8ToBytes(string, units) {
|
1482
|
+
units = units || Infinity;
|
1483
|
+
const length = string.length;
|
1484
|
+
const bytes = [];
|
1485
|
+
let codePoint;
|
1486
|
+
let leadSurrogate = null;
|
1487
|
+
for (let i = 0; i < length; ++i) {
|
1488
|
+
codePoint = string.charCodeAt(i);
|
1489
|
+
if (codePoint > 55295 && codePoint < 57344) {
|
1490
|
+
if (!leadSurrogate) {
|
1491
|
+
if (codePoint > 56319) {
|
1492
|
+
if ((units -= 3) > -1) {
|
1493
|
+
bytes.push(239, 191, 189);
|
1494
|
+
}
|
1495
|
+
continue;
|
1496
|
+
} else if (i + 1 === length) {
|
1497
|
+
if ((units -= 3) > -1) {
|
1498
|
+
bytes.push(239, 191, 189);
|
1499
|
+
}
|
1500
|
+
continue;
|
1501
|
+
}
|
1502
|
+
leadSurrogate = codePoint;
|
1503
|
+
continue;
|
1504
|
+
}
|
1505
|
+
if (codePoint < 56320) {
|
1506
|
+
if ((units -= 3) > -1) {
|
1507
|
+
bytes.push(239, 191, 189);
|
1508
|
+
}
|
1509
|
+
leadSurrogate = codePoint;
|
1510
|
+
continue;
|
1511
|
+
}
|
1512
|
+
codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
|
1513
|
+
} else if (leadSurrogate) {
|
1514
|
+
if ((units -= 3) > -1) {
|
1515
|
+
bytes.push(239, 191, 189);
|
1516
|
+
}
|
1517
|
+
}
|
1518
|
+
leadSurrogate = null;
|
1519
|
+
if (codePoint < 128) {
|
1520
|
+
if ((units -= 1) < 0) {
|
1521
|
+
break;
|
1522
|
+
}
|
1523
|
+
bytes.push(codePoint);
|
1524
|
+
} else if (codePoint < 2048) {
|
1525
|
+
if ((units -= 2) < 0) {
|
1526
|
+
break;
|
1527
|
+
}
|
1528
|
+
bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
|
1529
|
+
} else if (codePoint < 65536) {
|
1530
|
+
if ((units -= 3) < 0) {
|
1531
|
+
break;
|
1532
|
+
}
|
1533
|
+
bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
|
1534
|
+
} else if (codePoint < 1114112) {
|
1535
|
+
if ((units -= 4) < 0) {
|
1536
|
+
break;
|
1537
|
+
}
|
1538
|
+
bytes.push(
|
1539
|
+
codePoint >> 18 | 240,
|
1540
|
+
codePoint >> 12 & 63 | 128,
|
1541
|
+
codePoint >> 6 & 63 | 128,
|
1542
|
+
codePoint & 63 | 128
|
1543
|
+
);
|
1544
|
+
} else {
|
1545
|
+
throw new Error("Invalid code point");
|
1546
|
+
}
|
1547
|
+
}
|
1548
|
+
return bytes;
|
1549
|
+
}
|
1550
|
+
static _base64ToBytes(str) {
|
1551
|
+
return toByteArray(base64clean(str));
|
1552
|
+
}
|
1553
|
+
static _asciiToBytes(str) {
|
1554
|
+
const byteArray = [];
|
1555
|
+
for (let i = 0; i < str.length; ++i) {
|
1556
|
+
byteArray.push(str.charCodeAt(i) & 255);
|
1557
|
+
}
|
1558
|
+
return byteArray;
|
1559
|
+
}
|
1560
|
+
static _utf16leToBytes(str, units) {
|
1561
|
+
let c, hi, lo;
|
1562
|
+
const byteArray = [];
|
1563
|
+
for (let i = 0; i < str.length; ++i) {
|
1564
|
+
if ((units -= 2) < 0) break;
|
1565
|
+
c = str.charCodeAt(i);
|
1566
|
+
hi = c >> 8;
|
1567
|
+
lo = c % 256;
|
1568
|
+
byteArray.push(lo);
|
1569
|
+
byteArray.push(hi);
|
1570
|
+
}
|
1571
|
+
return byteArray;
|
1572
|
+
}
|
1573
|
+
static _hexSlice(buf, start, end) {
|
1574
|
+
const len = buf.length;
|
1575
|
+
if (!start || start < 0) {
|
1576
|
+
start = 0;
|
1577
|
+
}
|
1578
|
+
if (!end || end < 0 || end > len) {
|
1579
|
+
end = len;
|
1580
|
+
}
|
1581
|
+
let out = "";
|
1582
|
+
for (let i = start; i < end; ++i) {
|
1583
|
+
out += hexSliceLookupTable[buf[i]];
|
1584
|
+
}
|
1585
|
+
return out;
|
1586
|
+
}
|
1587
|
+
static _base64Slice(buf, start, end) {
|
1588
|
+
if (start === 0 && end === buf.length) {
|
1589
|
+
return fromByteArray(buf);
|
1590
|
+
} else {
|
1591
|
+
return fromByteArray(buf.slice(start, end));
|
1592
|
+
}
|
1593
|
+
}
|
1594
|
+
static _utf8Slice(buf, start, end) {
|
1595
|
+
end = Math.min(buf.length, end);
|
1596
|
+
const res = [];
|
1597
|
+
let i = start;
|
1598
|
+
while (i < end) {
|
1599
|
+
const firstByte = buf[i];
|
1600
|
+
let codePoint = null;
|
1601
|
+
let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
|
1602
|
+
if (i + bytesPerSequence <= end) {
|
1603
|
+
let secondByte, thirdByte, fourthByte, tempCodePoint;
|
1604
|
+
switch (bytesPerSequence) {
|
1605
|
+
case 1:
|
1606
|
+
if (firstByte < 128) {
|
1607
|
+
codePoint = firstByte;
|
1608
|
+
}
|
1609
|
+
break;
|
1610
|
+
case 2:
|
1611
|
+
secondByte = buf[i + 1];
|
1612
|
+
if ((secondByte & 192) === 128) {
|
1613
|
+
tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
|
1614
|
+
if (tempCodePoint > 127) {
|
1615
|
+
codePoint = tempCodePoint;
|
1616
|
+
}
|
1617
|
+
}
|
1618
|
+
break;
|
1619
|
+
case 3:
|
1620
|
+
secondByte = buf[i + 1];
|
1621
|
+
thirdByte = buf[i + 2];
|
1622
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
|
1623
|
+
tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
|
1624
|
+
if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
|
1625
|
+
codePoint = tempCodePoint;
|
1626
|
+
}
|
1627
|
+
}
|
1628
|
+
break;
|
1629
|
+
case 4:
|
1630
|
+
secondByte = buf[i + 1];
|
1631
|
+
thirdByte = buf[i + 2];
|
1632
|
+
fourthByte = buf[i + 3];
|
1633
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
|
1634
|
+
tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
|
1635
|
+
if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
|
1636
|
+
codePoint = tempCodePoint;
|
1637
|
+
}
|
1638
|
+
}
|
1639
|
+
}
|
1640
|
+
}
|
1641
|
+
if (codePoint === null) {
|
1642
|
+
codePoint = 65533;
|
1643
|
+
bytesPerSequence = 1;
|
1644
|
+
} else if (codePoint > 65535) {
|
1645
|
+
codePoint -= 65536;
|
1646
|
+
res.push(codePoint >>> 10 & 1023 | 55296);
|
1647
|
+
codePoint = 56320 | codePoint & 1023;
|
1648
|
+
}
|
1649
|
+
res.push(codePoint);
|
1650
|
+
i += bytesPerSequence;
|
1651
|
+
}
|
1652
|
+
return Buffer._decodeCodePointsArray(res);
|
1653
|
+
}
|
1654
|
+
static _decodeCodePointsArray(codePoints) {
|
1655
|
+
const len = codePoints.length;
|
1656
|
+
if (len <= MAX_ARGUMENTS_LENGTH) {
|
1657
|
+
return String.fromCharCode.apply(String, codePoints);
|
1658
|
+
}
|
1659
|
+
let res = "";
|
1660
|
+
let i = 0;
|
1661
|
+
while (i < len) {
|
1662
|
+
res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
|
1663
|
+
}
|
1664
|
+
return res;
|
1665
|
+
}
|
1666
|
+
static _asciiSlice(buf, start, end) {
|
1667
|
+
let ret = "";
|
1668
|
+
end = Math.min(buf.length, end);
|
1669
|
+
for (let i = start; i < end; ++i) {
|
1670
|
+
ret += String.fromCharCode(buf[i] & 127);
|
1671
|
+
}
|
1672
|
+
return ret;
|
1673
|
+
}
|
1674
|
+
static _latin1Slice(buf, start, end) {
|
1675
|
+
let ret = "";
|
1676
|
+
end = Math.min(buf.length, end);
|
1677
|
+
for (let i = start; i < end; ++i) {
|
1678
|
+
ret += String.fromCharCode(buf[i]);
|
1679
|
+
}
|
1680
|
+
return ret;
|
1681
|
+
}
|
1682
|
+
static _utf16leSlice(buf, start, end) {
|
1683
|
+
const bytes = buf.slice(start, end);
|
1684
|
+
let res = "";
|
1685
|
+
for (let i = 0; i < bytes.length - 1; i += 2) {
|
1686
|
+
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
|
1687
|
+
}
|
1688
|
+
return res;
|
1689
|
+
}
|
1690
|
+
static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
|
1691
|
+
let indexSize = 1;
|
1692
|
+
let arrLength = arr.length;
|
1693
|
+
let valLength = val.length;
|
1694
|
+
if (encoding !== void 0) {
|
1695
|
+
encoding = Buffer._getEncoding(encoding);
|
1696
|
+
if (encoding === "ucs2" || encoding === "utf16le") {
|
1697
|
+
if (arr.length < 2 || val.length < 2) {
|
1698
|
+
return -1;
|
1699
|
+
}
|
1700
|
+
indexSize = 2;
|
1701
|
+
arrLength /= 2;
|
1702
|
+
valLength /= 2;
|
1703
|
+
byteOffset /= 2;
|
1704
|
+
}
|
1705
|
+
}
|
1706
|
+
function read(buf, i2) {
|
1707
|
+
if (indexSize === 1) {
|
1708
|
+
return buf[i2];
|
1709
|
+
} else {
|
1710
|
+
return buf.readUInt16BE(i2 * indexSize);
|
1711
|
+
}
|
1712
|
+
}
|
1713
|
+
let i;
|
1714
|
+
if (dir) {
|
1715
|
+
let foundIndex = -1;
|
1716
|
+
for (i = byteOffset; i < arrLength; i++) {
|
1717
|
+
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
1718
|
+
if (foundIndex === -1) foundIndex = i;
|
1719
|
+
if (i - foundIndex + 1 === valLength) return foundIndex * indexSize;
|
1720
|
+
} else {
|
1721
|
+
if (foundIndex !== -1) i -= i - foundIndex;
|
1722
|
+
foundIndex = -1;
|
1723
|
+
}
|
1724
|
+
}
|
1725
|
+
} else {
|
1726
|
+
if (byteOffset + valLength > arrLength) {
|
1727
|
+
byteOffset = arrLength - valLength;
|
1728
|
+
}
|
1729
|
+
for (i = byteOffset; i >= 0; i--) {
|
1730
|
+
let found = true;
|
1731
|
+
for (let j = 0; j < valLength; j++) {
|
1732
|
+
if (read(arr, i + j) !== read(val, j)) {
|
1733
|
+
found = false;
|
1734
|
+
break;
|
1735
|
+
}
|
1736
|
+
}
|
1737
|
+
if (found) {
|
1738
|
+
return i;
|
1739
|
+
}
|
1740
|
+
}
|
1741
|
+
}
|
1742
|
+
return -1;
|
1743
|
+
}
|
1744
|
+
static _checkOffset(offset, ext, length) {
|
1745
|
+
if (offset % 1 !== 0 || offset < 0) throw new RangeError("offset is not uint");
|
1746
|
+
if (offset + ext > length) throw new RangeError("Trying to access beyond buffer length");
|
1747
|
+
}
|
1748
|
+
static _checkInt(buf, value, offset, ext, max, min) {
|
1749
|
+
if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance');
|
1750
|
+
if (value > max || value < min) throw new RangeError('"value" argument is out of bounds');
|
1751
|
+
if (offset + ext > buf.length) throw new RangeError("Index out of range");
|
1752
|
+
}
|
1753
|
+
static _getEncoding(encoding) {
|
1754
|
+
let toLowerCase = false;
|
1755
|
+
let originalEncoding = "";
|
1756
|
+
for (; ; ) {
|
1757
|
+
switch (encoding) {
|
1758
|
+
case "hex":
|
1759
|
+
return "hex";
|
1760
|
+
case "utf8":
|
1761
|
+
return "utf8";
|
1762
|
+
case "ascii":
|
1763
|
+
return "ascii";
|
1764
|
+
case "binary":
|
1765
|
+
return "binary";
|
1766
|
+
case "latin1":
|
1767
|
+
return "latin1";
|
1768
|
+
case "ucs2":
|
1769
|
+
return "ucs2";
|
1770
|
+
case "utf16le":
|
1771
|
+
return "utf16le";
|
1772
|
+
case "base64":
|
1773
|
+
return "base64";
|
1774
|
+
default: {
|
1775
|
+
if (toLowerCase) {
|
1776
|
+
throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
|
1777
|
+
}
|
1778
|
+
toLowerCase = true;
|
1779
|
+
originalEncoding = encoding;
|
1780
|
+
encoding = encoding.toLowerCase();
|
1781
|
+
}
|
1782
|
+
}
|
1783
|
+
}
|
1784
|
+
}
|
1785
|
+
}
|
1786
|
+
const hexSliceLookupTable = function() {
|
1787
|
+
const alphabet = "0123456789abcdef";
|
1788
|
+
const table = new Array(256);
|
1789
|
+
for (let i = 0; i < 16; ++i) {
|
1790
|
+
const i16 = i * 16;
|
1791
|
+
for (let j = 0; j < 16; ++j) {
|
1792
|
+
table[i16 + j] = alphabet[i] + alphabet[j];
|
1793
|
+
}
|
1794
|
+
}
|
1795
|
+
return table;
|
1796
|
+
}();
|
1797
|
+
const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
|
1798
|
+
function base64clean(str) {
|
1799
|
+
str = str.split("=")[0];
|
1800
|
+
str = str.trim().replace(INVALID_BASE64_RE, "");
|
1801
|
+
if (str.length < 2) return "";
|
1802
|
+
while (str.length % 4 !== 0) {
|
1803
|
+
str = str + "=";
|
1804
|
+
}
|
1805
|
+
return str;
|
1806
|
+
}
|
1807
|
+
|
25
1808
|
function notEmpty(value) {
|
26
1809
|
return value !== null && value !== void 0;
|
27
1810
|
}
|
@@ -116,155 +1899,15 @@ function promiseMap(inputValues, mapper) {
|
|
116
1899
|
return inputValues.reduce(reducer, Promise.resolve([]));
|
117
1900
|
}
|
118
1901
|
|
119
|
-
|
120
|
-
|
121
|
-
if (isDefined(process) && isDefined(process.env)) {
|
122
|
-
return {
|
123
|
-
apiKey: process.env.XATA_API_KEY ?? getGlobalApiKey(),
|
124
|
-
databaseURL: process.env.XATA_DATABASE_URL ?? getGlobalDatabaseURL(),
|
125
|
-
branch: process.env.XATA_BRANCH ?? getGlobalBranch(),
|
126
|
-
deployPreview: process.env.XATA_PREVIEW,
|
127
|
-
deployPreviewBranch: process.env.XATA_PREVIEW_BRANCH,
|
128
|
-
vercelGitCommitRef: process.env.VERCEL_GIT_COMMIT_REF,
|
129
|
-
vercelGitRepoOwner: process.env.VERCEL_GIT_REPO_OWNER
|
130
|
-
};
|
131
|
-
}
|
132
|
-
} catch (err) {
|
133
|
-
}
|
134
|
-
try {
|
135
|
-
if (isObject(Deno) && isObject(Deno.env)) {
|
136
|
-
return {
|
137
|
-
apiKey: Deno.env.get("XATA_API_KEY") ?? getGlobalApiKey(),
|
138
|
-
databaseURL: Deno.env.get("XATA_DATABASE_URL") ?? getGlobalDatabaseURL(),
|
139
|
-
branch: Deno.env.get("XATA_BRANCH") ?? getGlobalBranch(),
|
140
|
-
deployPreview: Deno.env.get("XATA_PREVIEW"),
|
141
|
-
deployPreviewBranch: Deno.env.get("XATA_PREVIEW_BRANCH"),
|
142
|
-
vercelGitCommitRef: Deno.env.get("VERCEL_GIT_COMMIT_REF"),
|
143
|
-
vercelGitRepoOwner: Deno.env.get("VERCEL_GIT_REPO_OWNER")
|
144
|
-
};
|
145
|
-
}
|
146
|
-
} catch (err) {
|
147
|
-
}
|
148
|
-
return {
|
149
|
-
apiKey: getGlobalApiKey(),
|
150
|
-
databaseURL: getGlobalDatabaseURL(),
|
151
|
-
branch: getGlobalBranch(),
|
152
|
-
deployPreview: void 0,
|
153
|
-
deployPreviewBranch: void 0,
|
154
|
-
vercelGitCommitRef: void 0,
|
155
|
-
vercelGitRepoOwner: void 0
|
156
|
-
};
|
157
|
-
}
|
158
|
-
function getEnableBrowserVariable() {
|
159
|
-
try {
|
160
|
-
if (isObject(process) && isObject(process.env) && process.env.XATA_ENABLE_BROWSER !== void 0) {
|
161
|
-
return process.env.XATA_ENABLE_BROWSER === "true";
|
162
|
-
}
|
163
|
-
} catch (err) {
|
164
|
-
}
|
165
|
-
try {
|
166
|
-
if (isObject(Deno) && isObject(Deno.env) && Deno.env.get("XATA_ENABLE_BROWSER") !== void 0) {
|
167
|
-
return Deno.env.get("XATA_ENABLE_BROWSER") === "true";
|
168
|
-
}
|
169
|
-
} catch (err) {
|
170
|
-
}
|
171
|
-
try {
|
172
|
-
return XATA_ENABLE_BROWSER === true || XATA_ENABLE_BROWSER === "true";
|
173
|
-
} catch (err) {
|
174
|
-
return void 0;
|
175
|
-
}
|
176
|
-
}
|
177
|
-
function getGlobalApiKey() {
|
178
|
-
try {
|
179
|
-
return XATA_API_KEY;
|
180
|
-
} catch (err) {
|
181
|
-
return void 0;
|
182
|
-
}
|
183
|
-
}
|
184
|
-
function getGlobalDatabaseURL() {
|
185
|
-
try {
|
186
|
-
return XATA_DATABASE_URL;
|
187
|
-
} catch (err) {
|
188
|
-
return void 0;
|
189
|
-
}
|
190
|
-
}
|
191
|
-
function getGlobalBranch() {
|
192
|
-
try {
|
193
|
-
return XATA_BRANCH;
|
194
|
-
} catch (err) {
|
195
|
-
return void 0;
|
196
|
-
}
|
197
|
-
}
|
198
|
-
function getDatabaseURL() {
|
199
|
-
try {
|
200
|
-
const { databaseURL } = getEnvironment();
|
201
|
-
return databaseURL;
|
202
|
-
} catch (err) {
|
203
|
-
return void 0;
|
204
|
-
}
|
205
|
-
}
|
206
|
-
function getAPIKey() {
|
207
|
-
try {
|
208
|
-
const { apiKey } = getEnvironment();
|
209
|
-
return apiKey;
|
210
|
-
} catch (err) {
|
211
|
-
return void 0;
|
212
|
-
}
|
213
|
-
}
|
214
|
-
function getBranch() {
|
215
|
-
try {
|
216
|
-
const { branch } = getEnvironment();
|
217
|
-
return branch;
|
218
|
-
} catch (err) {
|
219
|
-
return void 0;
|
220
|
-
}
|
221
|
-
}
|
222
|
-
function buildPreviewBranchName({ org, branch }) {
|
223
|
-
return `preview-${org}-${branch}`;
|
224
|
-
}
|
225
|
-
function getPreviewBranch() {
|
226
|
-
try {
|
227
|
-
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = getEnvironment();
|
228
|
-
if (deployPreviewBranch)
|
229
|
-
return deployPreviewBranch;
|
230
|
-
switch (deployPreview) {
|
231
|
-
case "vercel": {
|
232
|
-
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
233
|
-
console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
|
234
|
-
return void 0;
|
235
|
-
}
|
236
|
-
return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
|
237
|
-
}
|
238
|
-
}
|
239
|
-
return void 0;
|
240
|
-
} catch (err) {
|
241
|
-
return void 0;
|
242
|
-
}
|
243
|
-
}
|
244
|
-
|
245
|
-
var __accessCheck$6 = (obj, member, msg) => {
|
246
|
-
if (!member.has(obj))
|
247
|
-
throw TypeError("Cannot " + msg);
|
248
|
-
};
|
249
|
-
var __privateGet$5 = (obj, member, getter) => {
|
250
|
-
__accessCheck$6(obj, member, "read from private field");
|
251
|
-
return getter ? getter.call(obj) : member.get(obj);
|
252
|
-
};
|
253
|
-
var __privateAdd$6 = (obj, member, value) => {
|
254
|
-
if (member.has(obj))
|
255
|
-
throw TypeError("Cannot add the same private member more than once");
|
256
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1902
|
+
var __typeError$6 = (msg) => {
|
1903
|
+
throw TypeError(msg);
|
257
1904
|
};
|
258
|
-
var
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
var
|
264
|
-
__accessCheck$6(obj, member, "access private method");
|
265
|
-
return method;
|
266
|
-
};
|
267
|
-
var _fetch, _queue, _concurrency, _enqueue, enqueue_fn;
|
1905
|
+
var __accessCheck$6 = (obj, member, msg) => member.has(obj) || __typeError$6("Cannot " + msg);
|
1906
|
+
var __privateGet$5 = (obj, member, getter) => (__accessCheck$6(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
1907
|
+
var __privateAdd$6 = (obj, member, value) => member.has(obj) ? __typeError$6("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1908
|
+
var __privateSet$4 = (obj, member, value, setter) => (__accessCheck$6(obj, member, "write to private field"), member.set(obj, value), value);
|
1909
|
+
var __privateMethod$4 = (obj, member, method) => (__accessCheck$6(obj, member, "access private method"), method);
|
1910
|
+
var _fetch, _queue, _concurrency, _ApiRequestPool_instances, enqueue_fn;
|
268
1911
|
const REQUEST_TIMEOUT = 5 * 60 * 1e3;
|
269
1912
|
function getFetchImplementation(userFetch) {
|
270
1913
|
const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
|
@@ -277,10 +1920,10 @@ function getFetchImplementation(userFetch) {
|
|
277
1920
|
}
|
278
1921
|
class ApiRequestPool {
|
279
1922
|
constructor(concurrency = 10) {
|
280
|
-
__privateAdd$6(this,
|
281
|
-
__privateAdd$6(this, _fetch
|
282
|
-
__privateAdd$6(this, _queue
|
283
|
-
__privateAdd$6(this, _concurrency
|
1923
|
+
__privateAdd$6(this, _ApiRequestPool_instances);
|
1924
|
+
__privateAdd$6(this, _fetch);
|
1925
|
+
__privateAdd$6(this, _queue);
|
1926
|
+
__privateAdd$6(this, _concurrency);
|
284
1927
|
__privateSet$4(this, _queue, []);
|
285
1928
|
__privateSet$4(this, _concurrency, concurrency);
|
286
1929
|
this.running = 0;
|
@@ -315,7 +1958,7 @@ class ApiRequestPool {
|
|
315
1958
|
}
|
316
1959
|
return response;
|
317
1960
|
};
|
318
|
-
return __privateMethod$4(this,
|
1961
|
+
return __privateMethod$4(this, _ApiRequestPool_instances, enqueue_fn).call(this, async () => {
|
319
1962
|
return await runRequest();
|
320
1963
|
});
|
321
1964
|
}
|
@@ -323,7 +1966,7 @@ class ApiRequestPool {
|
|
323
1966
|
_fetch = new WeakMap();
|
324
1967
|
_queue = new WeakMap();
|
325
1968
|
_concurrency = new WeakMap();
|
326
|
-
|
1969
|
+
_ApiRequestPool_instances = new WeakSet();
|
327
1970
|
enqueue_fn = function(task) {
|
328
1971
|
const promise = new Promise((resolve) => __privateGet$5(this, _queue).push(resolve)).finally(() => {
|
329
1972
|
this.started--;
|
@@ -526,7 +2169,7 @@ function defaultOnOpen(response) {
|
|
526
2169
|
}
|
527
2170
|
}
|
528
2171
|
|
529
|
-
const VERSION = "0.29.
|
2172
|
+
const VERSION = "0.29.4";
|
530
2173
|
|
531
2174
|
class ErrorWithCause extends Error {
|
532
2175
|
constructor(message, options) {
|
@@ -606,35 +2249,30 @@ function parseProviderString(provider = "production") {
|
|
606
2249
|
return provider;
|
607
2250
|
}
|
608
2251
|
const [main, workspaces] = provider.split(",");
|
609
|
-
if (!main || !workspaces)
|
610
|
-
return null;
|
2252
|
+
if (!main || !workspaces) return null;
|
611
2253
|
return { main, workspaces };
|
612
2254
|
}
|
613
2255
|
function buildProviderString(provider) {
|
614
|
-
if (isHostProviderAlias(provider))
|
615
|
-
return provider;
|
2256
|
+
if (isHostProviderAlias(provider)) return provider;
|
616
2257
|
return `${provider.main},${provider.workspaces}`;
|
617
2258
|
}
|
618
2259
|
function parseWorkspacesUrlParts(url) {
|
619
|
-
if (!isString(url))
|
620
|
-
return null;
|
2260
|
+
if (!isString(url)) return null;
|
621
2261
|
const matches = {
|
622
2262
|
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
|
623
2263
|
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
|
624
2264
|
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
|
625
|
-
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:([^:]+):?(.*)?/)
|
2265
|
+
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
|
626
2266
|
};
|
627
2267
|
const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
|
628
|
-
if (!isHostProviderAlias(host) || !match)
|
629
|
-
return null;
|
2268
|
+
if (!isHostProviderAlias(host) || !match) return null;
|
630
2269
|
return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
|
631
2270
|
}
|
632
2271
|
|
633
2272
|
const pool = new ApiRequestPool();
|
634
2273
|
const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
|
635
2274
|
const cleanQueryParams = Object.entries(queryParams).reduce((acc, [key, value]) => {
|
636
|
-
if (value === void 0 || value === null)
|
637
|
-
return acc;
|
2275
|
+
if (value === void 0 || value === null) return acc;
|
638
2276
|
return { ...acc, [key]: value };
|
639
2277
|
}, {});
|
640
2278
|
const query = new URLSearchParams(cleanQueryParams).toString();
|
@@ -682,8 +2320,7 @@ function hostHeader(url) {
|
|
682
2320
|
return groups?.host ? { Host: groups.host } : {};
|
683
2321
|
}
|
684
2322
|
async function parseBody(body, headers) {
|
685
|
-
if (!isDefined(body))
|
686
|
-
return void 0;
|
2323
|
+
if (!isDefined(body)) return void 0;
|
687
2324
|
if (isBlob(body) || typeof body.text === "function") {
|
688
2325
|
return body;
|
689
2326
|
}
|
@@ -762,8 +2399,7 @@ async function fetch$1({
|
|
762
2399
|
[TraceAttributes.CLOUDFLARE_RAY_ID]: response.headers?.get("cf-ray") ?? void 0
|
763
2400
|
});
|
764
2401
|
const message = response.headers?.get("x-xata-message");
|
765
|
-
if (message)
|
766
|
-
console.warn(message);
|
2402
|
+
if (message) console.warn(message);
|
767
2403
|
if (response.status === 204) {
|
768
2404
|
return {};
|
769
2405
|
}
|
@@ -847,16 +2483,48 @@ function parseUrl(url) {
|
|
847
2483
|
|
848
2484
|
const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
|
849
2485
|
|
850
|
-
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2486
|
+
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2487
|
+
url: "/db/{dbBranchName}/migrations/apply",
|
2488
|
+
method: "post",
|
2489
|
+
...variables,
|
2490
|
+
signal
|
2491
|
+
});
|
2492
|
+
const startMigration = (variables, signal) => dataPlaneFetch({
|
2493
|
+
url: "/db/{dbBranchName}/migrations/start",
|
2494
|
+
method: "post",
|
2495
|
+
...variables,
|
2496
|
+
signal
|
2497
|
+
});
|
851
2498
|
const adaptTable = (variables, signal) => dataPlaneFetch({
|
852
2499
|
url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
|
853
2500
|
method: "post",
|
854
2501
|
...variables,
|
855
2502
|
signal
|
856
2503
|
});
|
857
|
-
const
|
858
|
-
|
859
|
-
|
2504
|
+
const adaptAllTables = (variables, signal) => dataPlaneFetch({
|
2505
|
+
url: "/db/{dbBranchName}/migrations/adapt",
|
2506
|
+
method: "post",
|
2507
|
+
...variables,
|
2508
|
+
signal
|
2509
|
+
});
|
2510
|
+
const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2511
|
+
url: "/db/{dbBranchName}/migrations/status",
|
2512
|
+
method: "get",
|
2513
|
+
...variables,
|
2514
|
+
signal
|
2515
|
+
});
|
2516
|
+
const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2517
|
+
url: "/db/{dbBranchName}/migrations/jobs/{jobId}",
|
2518
|
+
method: "get",
|
2519
|
+
...variables,
|
2520
|
+
signal
|
2521
|
+
});
|
2522
|
+
const getMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2523
|
+
url: "/db/{dbBranchName}/migrations/history",
|
2524
|
+
method: "get",
|
2525
|
+
...variables,
|
2526
|
+
signal
|
2527
|
+
});
|
860
2528
|
const getBranchList = (variables, signal) => dataPlaneFetch({
|
861
2529
|
url: "/dbs/{dbName}",
|
862
2530
|
method: "get",
|
@@ -869,82 +2537,167 @@ const getDatabaseSettings = (variables, signal) => dataPlaneFetch({
|
|
869
2537
|
...variables,
|
870
2538
|
signal
|
871
2539
|
});
|
872
|
-
const updateDatabaseSettings = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/settings", method: "patch", ...variables, signal });
|
873
|
-
const getBranchDetails = (variables, signal) => dataPlaneFetch({
|
874
|
-
url: "/db/{dbBranchName}",
|
2540
|
+
const updateDatabaseSettings = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/settings", method: "patch", ...variables, signal });
|
2541
|
+
const getBranchDetails = (variables, signal) => dataPlaneFetch({
|
2542
|
+
url: "/db/{dbBranchName}",
|
2543
|
+
method: "get",
|
2544
|
+
...variables,
|
2545
|
+
signal
|
2546
|
+
});
|
2547
|
+
const createBranch = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}", method: "put", ...variables, signal });
|
2548
|
+
const deleteBranch = (variables, signal) => dataPlaneFetch({
|
2549
|
+
url: "/db/{dbBranchName}",
|
2550
|
+
method: "delete",
|
2551
|
+
...variables,
|
2552
|
+
signal
|
2553
|
+
});
|
2554
|
+
const getSchema = (variables, signal) => dataPlaneFetch({
|
2555
|
+
url: "/db/{dbBranchName}/schema",
|
2556
|
+
method: "get",
|
2557
|
+
...variables,
|
2558
|
+
signal
|
2559
|
+
});
|
2560
|
+
const copyBranch = (variables, signal) => dataPlaneFetch({
|
2561
|
+
url: "/db/{dbBranchName}/copy",
|
2562
|
+
method: "post",
|
2563
|
+
...variables,
|
2564
|
+
signal
|
2565
|
+
});
|
2566
|
+
const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2567
|
+
url: "/db/{dbBranchName}/metadata",
|
2568
|
+
method: "put",
|
2569
|
+
...variables,
|
2570
|
+
signal
|
2571
|
+
});
|
2572
|
+
const getBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2573
|
+
url: "/db/{dbBranchName}/metadata",
|
2574
|
+
method: "get",
|
2575
|
+
...variables,
|
2576
|
+
signal
|
2577
|
+
});
|
2578
|
+
const getBranchStats = (variables, signal) => dataPlaneFetch({
|
2579
|
+
url: "/db/{dbBranchName}/stats",
|
2580
|
+
method: "get",
|
2581
|
+
...variables,
|
2582
|
+
signal
|
2583
|
+
});
|
2584
|
+
const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
|
2585
|
+
const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
|
2586
|
+
const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({
|
2587
|
+
url: "/dbs/{dbName}/gitBranches",
|
2588
|
+
method: "delete",
|
2589
|
+
...variables,
|
2590
|
+
signal
|
2591
|
+
});
|
2592
|
+
const resolveBranch = (variables, signal) => dataPlaneFetch({
|
2593
|
+
url: "/dbs/{dbName}/resolveBranch",
|
2594
|
+
method: "get",
|
2595
|
+
...variables,
|
2596
|
+
signal
|
2597
|
+
});
|
2598
|
+
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2599
|
+
url: "/db/{dbBranchName}/migrations",
|
2600
|
+
method: "get",
|
2601
|
+
...variables,
|
2602
|
+
signal
|
2603
|
+
});
|
2604
|
+
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2605
|
+
url: "/db/{dbBranchName}/migrations/plan",
|
2606
|
+
method: "post",
|
2607
|
+
...variables,
|
2608
|
+
signal
|
2609
|
+
});
|
2610
|
+
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2611
|
+
url: "/db/{dbBranchName}/migrations/execute",
|
2612
|
+
method: "post",
|
2613
|
+
...variables,
|
2614
|
+
signal
|
2615
|
+
});
|
2616
|
+
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({
|
2617
|
+
url: "/dbs/{dbName}/migrations/query",
|
2618
|
+
method: "post",
|
2619
|
+
...variables,
|
2620
|
+
signal
|
2621
|
+
});
|
2622
|
+
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
2623
|
+
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2624
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2625
|
+
method: "get",
|
2626
|
+
...variables,
|
2627
|
+
signal
|
2628
|
+
});
|
2629
|
+
const updateMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2630
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2631
|
+
method: "patch",
|
2632
|
+
...variables,
|
2633
|
+
signal
|
2634
|
+
});
|
2635
|
+
const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({
|
2636
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/commits",
|
2637
|
+
method: "post",
|
2638
|
+
...variables,
|
2639
|
+
signal
|
2640
|
+
});
|
2641
|
+
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2642
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/compare",
|
2643
|
+
method: "post",
|
2644
|
+
...variables,
|
2645
|
+
signal
|
2646
|
+
});
|
2647
|
+
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({
|
2648
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
875
2649
|
method: "get",
|
876
2650
|
...variables,
|
877
2651
|
signal
|
878
2652
|
});
|
879
|
-
const
|
880
|
-
|
881
|
-
|
882
|
-
method: "delete",
|
2653
|
+
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2654
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2655
|
+
method: "post",
|
883
2656
|
...variables,
|
884
2657
|
signal
|
885
2658
|
});
|
886
|
-
const
|
887
|
-
url: "/db/{dbBranchName}/schema",
|
888
|
-
method: "
|
2659
|
+
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({
|
2660
|
+
url: "/db/{dbBranchName}/schema/history",
|
2661
|
+
method: "post",
|
889
2662
|
...variables,
|
890
2663
|
signal
|
891
2664
|
});
|
892
|
-
const
|
893
|
-
url: "/db/{dbBranchName}/
|
2665
|
+
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({
|
2666
|
+
url: "/db/{dbBranchName}/schema/compare",
|
894
2667
|
method: "post",
|
895
2668
|
...variables,
|
896
2669
|
signal
|
897
2670
|
});
|
898
|
-
const
|
899
|
-
url: "/db/{dbBranchName}/
|
900
|
-
method: "
|
2671
|
+
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({
|
2672
|
+
url: "/db/{dbBranchName}/schema/compare/{branchName}",
|
2673
|
+
method: "post",
|
901
2674
|
...variables,
|
902
2675
|
signal
|
903
2676
|
});
|
904
|
-
const
|
905
|
-
url: "/db/{dbBranchName}/
|
906
|
-
method: "
|
2677
|
+
const updateBranchSchema = (variables, signal) => dataPlaneFetch({
|
2678
|
+
url: "/db/{dbBranchName}/schema/update",
|
2679
|
+
method: "post",
|
907
2680
|
...variables,
|
908
2681
|
signal
|
909
2682
|
});
|
910
|
-
const
|
911
|
-
url: "/db/{dbBranchName}/
|
912
|
-
method: "
|
2683
|
+
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2684
|
+
url: "/db/{dbBranchName}/schema/preview",
|
2685
|
+
method: "post",
|
913
2686
|
...variables,
|
914
2687
|
signal
|
915
2688
|
});
|
916
|
-
const
|
917
|
-
|
918
|
-
|
919
|
-
const resolveBranch = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/resolveBranch", method: "get", ...variables, signal });
|
920
|
-
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations", method: "get", ...variables, signal });
|
921
|
-
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/plan", method: "post", ...variables, signal });
|
922
|
-
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/execute", method: "post", ...variables, signal });
|
923
|
-
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/query", method: "post", ...variables, signal });
|
924
|
-
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
925
|
-
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
926
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
927
|
-
method: "get",
|
2689
|
+
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2690
|
+
url: "/db/{dbBranchName}/schema/apply",
|
2691
|
+
method: "post",
|
928
2692
|
...variables,
|
929
2693
|
signal
|
930
2694
|
});
|
931
|
-
const
|
932
|
-
|
933
|
-
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/compare", method: "post", ...variables, signal });
|
934
|
-
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/merge", method: "get", ...variables, signal });
|
935
|
-
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
936
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2695
|
+
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({
|
2696
|
+
url: "/db/{dbBranchName}/schema/push",
|
937
2697
|
method: "post",
|
938
2698
|
...variables,
|
939
2699
|
signal
|
940
2700
|
});
|
941
|
-
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/history", method: "post", ...variables, signal });
|
942
|
-
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare", method: "post", ...variables, signal });
|
943
|
-
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare/{branchName}", method: "post", ...variables, signal });
|
944
|
-
const updateBranchSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/update", method: "post", ...variables, signal });
|
945
|
-
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/preview", method: "post", ...variables, signal });
|
946
|
-
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/apply", method: "post", ...variables, signal });
|
947
|
-
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/push", method: "post", ...variables, signal });
|
948
2701
|
const createTable = (variables, signal) => dataPlaneFetch({
|
949
2702
|
url: "/db/{dbBranchName}/tables/{tableName}",
|
950
2703
|
method: "put",
|
@@ -957,14 +2710,24 @@ const deleteTable = (variables, signal) => dataPlaneFetch({
|
|
957
2710
|
...variables,
|
958
2711
|
signal
|
959
2712
|
});
|
960
|
-
const updateTable = (variables, signal) => dataPlaneFetch({
|
2713
|
+
const updateTable = (variables, signal) => dataPlaneFetch({
|
2714
|
+
url: "/db/{dbBranchName}/tables/{tableName}",
|
2715
|
+
method: "patch",
|
2716
|
+
...variables,
|
2717
|
+
signal
|
2718
|
+
});
|
961
2719
|
const getTableSchema = (variables, signal) => dataPlaneFetch({
|
962
2720
|
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
963
2721
|
method: "get",
|
964
2722
|
...variables,
|
965
2723
|
signal
|
966
2724
|
});
|
967
|
-
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2725
|
+
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2726
|
+
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
2727
|
+
method: "put",
|
2728
|
+
...variables,
|
2729
|
+
signal
|
2730
|
+
});
|
968
2731
|
const getTableColumns = (variables, signal) => dataPlaneFetch({
|
969
2732
|
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
970
2733
|
method: "get",
|
@@ -972,7 +2735,12 @@ const getTableColumns = (variables, signal) => dataPlaneFetch({
|
|
972
2735
|
signal
|
973
2736
|
});
|
974
2737
|
const addTableColumn = (variables, signal) => dataPlaneFetch(
|
975
|
-
{
|
2738
|
+
{
|
2739
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
2740
|
+
method: "post",
|
2741
|
+
...variables,
|
2742
|
+
signal
|
2743
|
+
}
|
976
2744
|
);
|
977
2745
|
const getColumn = (variables, signal) => dataPlaneFetch({
|
978
2746
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
@@ -980,15 +2748,30 @@ const getColumn = (variables, signal) => dataPlaneFetch({
|
|
980
2748
|
...variables,
|
981
2749
|
signal
|
982
2750
|
});
|
983
|
-
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2751
|
+
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2752
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
2753
|
+
method: "patch",
|
2754
|
+
...variables,
|
2755
|
+
signal
|
2756
|
+
});
|
984
2757
|
const deleteColumn = (variables, signal) => dataPlaneFetch({
|
985
2758
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
986
2759
|
method: "delete",
|
987
2760
|
...variables,
|
988
2761
|
signal
|
989
2762
|
});
|
990
|
-
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
991
|
-
|
2763
|
+
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
2764
|
+
url: "/db/{dbBranchName}/transaction",
|
2765
|
+
method: "post",
|
2766
|
+
...variables,
|
2767
|
+
signal
|
2768
|
+
});
|
2769
|
+
const insertRecord = (variables, signal) => dataPlaneFetch({
|
2770
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data",
|
2771
|
+
method: "post",
|
2772
|
+
...variables,
|
2773
|
+
signal
|
2774
|
+
});
|
992
2775
|
const getFileItem = (variables, signal) => dataPlaneFetch({
|
993
2776
|
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
|
994
2777
|
method: "get",
|
@@ -1031,11 +2814,36 @@ const getRecord = (variables, signal) => dataPlaneFetch({
|
|
1031
2814
|
...variables,
|
1032
2815
|
signal
|
1033
2816
|
});
|
1034
|
-
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
1035
|
-
|
1036
|
-
|
1037
|
-
|
1038
|
-
|
2817
|
+
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2818
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2819
|
+
method: "put",
|
2820
|
+
...variables,
|
2821
|
+
signal
|
2822
|
+
});
|
2823
|
+
const updateRecordWithID = (variables, signal) => dataPlaneFetch({
|
2824
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2825
|
+
method: "patch",
|
2826
|
+
...variables,
|
2827
|
+
signal
|
2828
|
+
});
|
2829
|
+
const upsertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2830
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2831
|
+
method: "post",
|
2832
|
+
...variables,
|
2833
|
+
signal
|
2834
|
+
});
|
2835
|
+
const deleteRecord = (variables, signal) => dataPlaneFetch({
|
2836
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2837
|
+
method: "delete",
|
2838
|
+
...variables,
|
2839
|
+
signal
|
2840
|
+
});
|
2841
|
+
const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({
|
2842
|
+
url: "/db/{dbBranchName}/tables/{tableName}/bulk",
|
2843
|
+
method: "post",
|
2844
|
+
...variables,
|
2845
|
+
signal
|
2846
|
+
});
|
1039
2847
|
const queryTable = (variables, signal) => dataPlaneFetch({
|
1040
2848
|
url: "/db/{dbBranchName}/tables/{tableName}/query",
|
1041
2849
|
method: "post",
|
@@ -1054,16 +2862,36 @@ const searchTable = (variables, signal) => dataPlaneFetch({
|
|
1054
2862
|
...variables,
|
1055
2863
|
signal
|
1056
2864
|
});
|
1057
|
-
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
2865
|
+
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
2866
|
+
url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch",
|
2867
|
+
method: "post",
|
2868
|
+
...variables,
|
2869
|
+
signal
|
2870
|
+
});
|
1058
2871
|
const askTable = (variables, signal) => dataPlaneFetch({
|
1059
2872
|
url: "/db/{dbBranchName}/tables/{tableName}/ask",
|
1060
2873
|
method: "post",
|
1061
2874
|
...variables,
|
1062
2875
|
signal
|
1063
2876
|
});
|
1064
|
-
const askTableSession = (variables, signal) => dataPlaneFetch({
|
1065
|
-
|
1066
|
-
|
2877
|
+
const askTableSession = (variables, signal) => dataPlaneFetch({
|
2878
|
+
url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}",
|
2879
|
+
method: "post",
|
2880
|
+
...variables,
|
2881
|
+
signal
|
2882
|
+
});
|
2883
|
+
const summarizeTable = (variables, signal) => dataPlaneFetch({
|
2884
|
+
url: "/db/{dbBranchName}/tables/{tableName}/summarize",
|
2885
|
+
method: "post",
|
2886
|
+
...variables,
|
2887
|
+
signal
|
2888
|
+
});
|
2889
|
+
const aggregateTable = (variables, signal) => dataPlaneFetch({
|
2890
|
+
url: "/db/{dbBranchName}/tables/{tableName}/aggregate",
|
2891
|
+
method: "post",
|
2892
|
+
...variables,
|
2893
|
+
signal
|
2894
|
+
});
|
1067
2895
|
const fileAccess = (variables, signal) => dataPlaneFetch({
|
1068
2896
|
url: "/file/{fileId}",
|
1069
2897
|
method: "get",
|
@@ -1085,7 +2913,9 @@ const sqlQuery = (variables, signal) => dataPlaneFetch({
|
|
1085
2913
|
const operationsByTag$2 = {
|
1086
2914
|
migrations: {
|
1087
2915
|
applyMigration,
|
2916
|
+
startMigration,
|
1088
2917
|
adaptTable,
|
2918
|
+
adaptAllTables,
|
1089
2919
|
getBranchMigrationJobStatus,
|
1090
2920
|
getMigrationJobStatus,
|
1091
2921
|
getMigrationHistory,
|
@@ -1148,7 +2978,16 @@ const operationsByTag$2 = {
|
|
1148
2978
|
deleteRecord,
|
1149
2979
|
bulkInsertTableRecords
|
1150
2980
|
},
|
1151
|
-
files: {
|
2981
|
+
files: {
|
2982
|
+
getFileItem,
|
2983
|
+
putFileItem,
|
2984
|
+
deleteFileItem,
|
2985
|
+
getFile,
|
2986
|
+
putFile,
|
2987
|
+
deleteFile,
|
2988
|
+
fileAccess,
|
2989
|
+
fileUpload
|
2990
|
+
},
|
1152
2991
|
searchAndFilter: {
|
1153
2992
|
queryTable,
|
1154
2993
|
searchBranch,
|
@@ -1226,7 +3065,12 @@ const deleteOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
|
1226
3065
|
...variables,
|
1227
3066
|
signal
|
1228
3067
|
});
|
1229
|
-
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3068
|
+
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3069
|
+
url: "/user/oauth/tokens/{token}",
|
3070
|
+
method: "patch",
|
3071
|
+
...variables,
|
3072
|
+
signal
|
3073
|
+
});
|
1230
3074
|
const getWorkspacesList = (variables, signal) => controlPlaneFetch({
|
1231
3075
|
url: "/workspaces",
|
1232
3076
|
method: "get",
|
@@ -1257,49 +3101,150 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
|
|
1257
3101
|
...variables,
|
1258
3102
|
signal
|
1259
3103
|
});
|
1260
|
-
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
1261
|
-
|
1262
|
-
|
1263
|
-
|
3104
|
+
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3105
|
+
url: "/workspaces/{workspaceId}/settings",
|
3106
|
+
method: "get",
|
3107
|
+
...variables,
|
3108
|
+
signal
|
3109
|
+
});
|
3110
|
+
const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3111
|
+
url: "/workspaces/{workspaceId}/settings",
|
3112
|
+
method: "patch",
|
3113
|
+
...variables,
|
3114
|
+
signal
|
3115
|
+
});
|
3116
|
+
const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({
|
3117
|
+
url: "/workspaces/{workspaceId}/members",
|
3118
|
+
method: "get",
|
3119
|
+
...variables,
|
3120
|
+
signal
|
3121
|
+
});
|
3122
|
+
const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({
|
3123
|
+
url: "/workspaces/{workspaceId}/members/{userId}",
|
3124
|
+
method: "put",
|
3125
|
+
...variables,
|
3126
|
+
signal
|
3127
|
+
});
|
1264
3128
|
const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1265
3129
|
url: "/workspaces/{workspaceId}/members/{userId}",
|
1266
3130
|
method: "delete",
|
1267
3131
|
...variables,
|
1268
3132
|
signal
|
1269
3133
|
});
|
1270
|
-
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1271
|
-
|
1272
|
-
|
1273
|
-
|
1274
|
-
|
1275
|
-
|
1276
|
-
const
|
3134
|
+
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
3135
|
+
url: "/workspaces/{workspaceId}/invites",
|
3136
|
+
method: "post",
|
3137
|
+
...variables,
|
3138
|
+
signal
|
3139
|
+
});
|
3140
|
+
const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3141
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3142
|
+
method: "patch",
|
3143
|
+
...variables,
|
3144
|
+
signal
|
3145
|
+
});
|
3146
|
+
const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3147
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3148
|
+
method: "delete",
|
3149
|
+
...variables,
|
3150
|
+
signal
|
3151
|
+
});
|
3152
|
+
const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3153
|
+
url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept",
|
3154
|
+
method: "post",
|
3155
|
+
...variables,
|
3156
|
+
signal
|
3157
|
+
});
|
3158
|
+
const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3159
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}/resend",
|
3160
|
+
method: "post",
|
3161
|
+
...variables,
|
3162
|
+
signal
|
3163
|
+
});
|
3164
|
+
const listClusters = (variables, signal) => controlPlaneFetch({
|
3165
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3166
|
+
method: "get",
|
3167
|
+
...variables,
|
3168
|
+
signal
|
3169
|
+
});
|
3170
|
+
const createCluster = (variables, signal) => controlPlaneFetch({
|
3171
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3172
|
+
method: "post",
|
3173
|
+
...variables,
|
3174
|
+
signal
|
3175
|
+
});
|
1277
3176
|
const getCluster = (variables, signal) => controlPlaneFetch({
|
1278
3177
|
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
1279
3178
|
method: "get",
|
1280
3179
|
...variables,
|
1281
3180
|
signal
|
1282
3181
|
});
|
1283
|
-
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3182
|
+
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3183
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3184
|
+
method: "patch",
|
3185
|
+
...variables,
|
3186
|
+
signal
|
3187
|
+
});
|
3188
|
+
const deleteCluster = (variables, signal) => controlPlaneFetch({
|
3189
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3190
|
+
method: "delete",
|
3191
|
+
...variables,
|
3192
|
+
signal
|
3193
|
+
});
|
1284
3194
|
const getDatabaseList = (variables, signal) => controlPlaneFetch({
|
1285
3195
|
url: "/workspaces/{workspaceId}/dbs",
|
1286
3196
|
method: "get",
|
1287
3197
|
...variables,
|
1288
3198
|
signal
|
1289
3199
|
});
|
1290
|
-
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3200
|
+
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3201
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3202
|
+
method: "put",
|
3203
|
+
...variables,
|
3204
|
+
signal
|
3205
|
+
});
|
1291
3206
|
const deleteDatabase = (variables, signal) => controlPlaneFetch({
|
1292
3207
|
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
1293
3208
|
method: "delete",
|
1294
3209
|
...variables,
|
1295
3210
|
signal
|
1296
3211
|
});
|
1297
|
-
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
1298
|
-
|
1299
|
-
|
1300
|
-
|
1301
|
-
|
1302
|
-
|
3212
|
+
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3213
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3214
|
+
method: "get",
|
3215
|
+
...variables,
|
3216
|
+
signal
|
3217
|
+
});
|
3218
|
+
const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3219
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3220
|
+
method: "patch",
|
3221
|
+
...variables,
|
3222
|
+
signal
|
3223
|
+
});
|
3224
|
+
const renameDatabase = (variables, signal) => controlPlaneFetch({
|
3225
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/rename",
|
3226
|
+
method: "post",
|
3227
|
+
...variables,
|
3228
|
+
signal
|
3229
|
+
});
|
3230
|
+
const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3231
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3232
|
+
method: "get",
|
3233
|
+
...variables,
|
3234
|
+
signal
|
3235
|
+
});
|
3236
|
+
const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3237
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3238
|
+
method: "put",
|
3239
|
+
...variables,
|
3240
|
+
signal
|
3241
|
+
});
|
3242
|
+
const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3243
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3244
|
+
method: "delete",
|
3245
|
+
...variables,
|
3246
|
+
signal
|
3247
|
+
});
|
1303
3248
|
const listRegions = (variables, signal) => controlPlaneFetch({
|
1304
3249
|
url: "/workspaces/{workspaceId}/regions",
|
1305
3250
|
method: "get",
|
@@ -1337,7 +3282,13 @@ const operationsByTag$1 = {
|
|
1337
3282
|
acceptWorkspaceMemberInvite,
|
1338
3283
|
resendWorkspaceMemberInvite
|
1339
3284
|
},
|
1340
|
-
xbcontrolOther: {
|
3285
|
+
xbcontrolOther: {
|
3286
|
+
listClusters,
|
3287
|
+
createCluster,
|
3288
|
+
getCluster,
|
3289
|
+
updateCluster,
|
3290
|
+
deleteCluster
|
3291
|
+
},
|
1341
3292
|
databases: {
|
1342
3293
|
getDatabaseList,
|
1343
3294
|
createDatabase,
|
@@ -1357,7 +3308,7 @@ const operationsByTag = deepMerge(operationsByTag$2, operationsByTag$1);
|
|
1357
3308
|
const buildApiClient = () => class {
|
1358
3309
|
constructor(options = {}) {
|
1359
3310
|
const provider = options.host ?? "production";
|
1360
|
-
const apiKey = options.apiKey
|
3311
|
+
const apiKey = options.apiKey;
|
1361
3312
|
const trace = options.trace ?? defaultTrace;
|
1362
3313
|
const clientID = generateUUID();
|
1363
3314
|
if (!apiKey) {
|
@@ -1424,8 +3375,7 @@ function buildTransformString(transformations) {
|
|
1424
3375
|
).join(",");
|
1425
3376
|
}
|
1426
3377
|
function transformImage(url, ...transformations) {
|
1427
|
-
if (!isDefined(url))
|
1428
|
-
return void 0;
|
3378
|
+
if (!isDefined(url)) return void 0;
|
1429
3379
|
const newTransformations = buildTransformString(transformations);
|
1430
3380
|
const { hostname, pathname, search } = new URL(url);
|
1431
3381
|
const pathParts = pathname.split("/");
|
@@ -1538,8 +3488,7 @@ class XataFile {
|
|
1538
3488
|
}
|
1539
3489
|
}
|
1540
3490
|
const parseInputFileEntry = async (entry) => {
|
1541
|
-
if (!isDefined(entry))
|
1542
|
-
return null;
|
3491
|
+
if (!isDefined(entry)) return null;
|
1543
3492
|
const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout, uploadUrlTimeout } = await entry;
|
1544
3493
|
return compactObject({
|
1545
3494
|
id,
|
@@ -1554,24 +3503,19 @@ const parseInputFileEntry = async (entry) => {
|
|
1554
3503
|
};
|
1555
3504
|
|
1556
3505
|
function cleanFilter(filter) {
|
1557
|
-
if (!isDefined(filter))
|
1558
|
-
|
1559
|
-
if (!isObject(filter))
|
1560
|
-
return filter;
|
3506
|
+
if (!isDefined(filter)) return void 0;
|
3507
|
+
if (!isObject(filter)) return filter;
|
1561
3508
|
const values = Object.fromEntries(
|
1562
3509
|
Object.entries(filter).reduce((acc, [key, value]) => {
|
1563
|
-
if (!isDefined(value))
|
1564
|
-
return acc;
|
3510
|
+
if (!isDefined(value)) return acc;
|
1565
3511
|
if (Array.isArray(value)) {
|
1566
3512
|
const clean = value.map((item) => cleanFilter(item)).filter((item) => isDefined(item));
|
1567
|
-
if (clean.length === 0)
|
1568
|
-
return acc;
|
3513
|
+
if (clean.length === 0) return acc;
|
1569
3514
|
return [...acc, [key, clean]];
|
1570
3515
|
}
|
1571
3516
|
if (isObject(value)) {
|
1572
3517
|
const clean = cleanFilter(value);
|
1573
|
-
if (!isDefined(clean))
|
1574
|
-
return acc;
|
3518
|
+
if (!isDefined(clean)) return acc;
|
1575
3519
|
return [...acc, [key, clean]];
|
1576
3520
|
}
|
1577
3521
|
return [...acc, [key, value]];
|
@@ -1581,10 +3525,8 @@ function cleanFilter(filter) {
|
|
1581
3525
|
}
|
1582
3526
|
|
1583
3527
|
function stringifyJson(value) {
|
1584
|
-
if (!isDefined(value))
|
1585
|
-
|
1586
|
-
if (isString(value))
|
1587
|
-
return value;
|
3528
|
+
if (!isDefined(value)) return value;
|
3529
|
+
if (isString(value)) return value;
|
1588
3530
|
try {
|
1589
3531
|
return JSON.stringify(value);
|
1590
3532
|
} catch (e) {
|
@@ -1599,28 +3541,17 @@ function parseJson(value) {
|
|
1599
3541
|
}
|
1600
3542
|
}
|
1601
3543
|
|
1602
|
-
var
|
1603
|
-
|
1604
|
-
throw TypeError("Cannot " + msg);
|
1605
|
-
};
|
1606
|
-
var __privateGet$4 = (obj, member, getter) => {
|
1607
|
-
__accessCheck$5(obj, member, "read from private field");
|
1608
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1609
|
-
};
|
1610
|
-
var __privateAdd$5 = (obj, member, value) => {
|
1611
|
-
if (member.has(obj))
|
1612
|
-
throw TypeError("Cannot add the same private member more than once");
|
1613
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1614
|
-
};
|
1615
|
-
var __privateSet$3 = (obj, member, value, setter) => {
|
1616
|
-
__accessCheck$5(obj, member, "write to private field");
|
1617
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1618
|
-
return value;
|
3544
|
+
var __typeError$5 = (msg) => {
|
3545
|
+
throw TypeError(msg);
|
1619
3546
|
};
|
3547
|
+
var __accessCheck$5 = (obj, member, msg) => member.has(obj) || __typeError$5("Cannot " + msg);
|
3548
|
+
var __privateGet$4 = (obj, member, getter) => (__accessCheck$5(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3549
|
+
var __privateAdd$5 = (obj, member, value) => member.has(obj) ? __typeError$5("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3550
|
+
var __privateSet$3 = (obj, member, value, setter) => (__accessCheck$5(obj, member, "write to private field"), member.set(obj, value), value);
|
1620
3551
|
var _query, _page;
|
1621
3552
|
class Page {
|
1622
3553
|
constructor(query, meta, records = []) {
|
1623
|
-
__privateAdd$5(this, _query
|
3554
|
+
__privateAdd$5(this, _query);
|
1624
3555
|
__privateSet$3(this, _query, query);
|
1625
3556
|
this.meta = meta;
|
1626
3557
|
this.records = new PageRecordArray(this, records);
|
@@ -1707,7 +3638,7 @@ class RecordArray extends Array {
|
|
1707
3638
|
const _PageRecordArray = class _PageRecordArray extends Array {
|
1708
3639
|
constructor(...args) {
|
1709
3640
|
super(..._PageRecordArray.parseConstructorParams(...args));
|
1710
|
-
__privateAdd$5(this, _page
|
3641
|
+
__privateAdd$5(this, _page);
|
1711
3642
|
__privateSet$3(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
|
1712
3643
|
}
|
1713
3644
|
static parseConstructorParams(...args) {
|
@@ -1778,34 +3709,20 @@ const _PageRecordArray = class _PageRecordArray extends Array {
|
|
1778
3709
|
_page = new WeakMap();
|
1779
3710
|
let PageRecordArray = _PageRecordArray;
|
1780
3711
|
|
1781
|
-
var
|
1782
|
-
|
1783
|
-
throw TypeError("Cannot " + msg);
|
1784
|
-
};
|
1785
|
-
var __privateGet$3 = (obj, member, getter) => {
|
1786
|
-
__accessCheck$4(obj, member, "read from private field");
|
1787
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1788
|
-
};
|
1789
|
-
var __privateAdd$4 = (obj, member, value) => {
|
1790
|
-
if (member.has(obj))
|
1791
|
-
throw TypeError("Cannot add the same private member more than once");
|
1792
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1793
|
-
};
|
1794
|
-
var __privateSet$2 = (obj, member, value, setter) => {
|
1795
|
-
__accessCheck$4(obj, member, "write to private field");
|
1796
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1797
|
-
return value;
|
1798
|
-
};
|
1799
|
-
var __privateMethod$3 = (obj, member, method) => {
|
1800
|
-
__accessCheck$4(obj, member, "access private method");
|
1801
|
-
return method;
|
3712
|
+
var __typeError$4 = (msg) => {
|
3713
|
+
throw TypeError(msg);
|
1802
3714
|
};
|
1803
|
-
var
|
3715
|
+
var __accessCheck$4 = (obj, member, msg) => member.has(obj) || __typeError$4("Cannot " + msg);
|
3716
|
+
var __privateGet$3 = (obj, member, getter) => (__accessCheck$4(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3717
|
+
var __privateAdd$4 = (obj, member, value) => member.has(obj) ? __typeError$4("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3718
|
+
var __privateSet$2 = (obj, member, value, setter) => (__accessCheck$4(obj, member, "write to private field"), member.set(obj, value), value);
|
3719
|
+
var __privateMethod$3 = (obj, member, method) => (__accessCheck$4(obj, member, "access private method"), method);
|
3720
|
+
var _table$1, _repository, _data, _Query_instances, cleanFilterConstraint_fn;
|
1804
3721
|
const _Query = class _Query {
|
1805
3722
|
constructor(repository, table, data, rawParent) {
|
1806
|
-
__privateAdd$4(this,
|
1807
|
-
__privateAdd$4(this, _table$1
|
1808
|
-
__privateAdd$4(this, _repository
|
3723
|
+
__privateAdd$4(this, _Query_instances);
|
3724
|
+
__privateAdd$4(this, _table$1);
|
3725
|
+
__privateAdd$4(this, _repository);
|
1809
3726
|
__privateAdd$4(this, _data, { filter: {} });
|
1810
3727
|
// Implements pagination
|
1811
3728
|
this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
|
@@ -1883,12 +3800,12 @@ const _Query = class _Query {
|
|
1883
3800
|
filter(a, b) {
|
1884
3801
|
if (arguments.length === 1) {
|
1885
3802
|
const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
|
1886
|
-
[column]: __privateMethod$3(this,
|
3803
|
+
[column]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, column, constraint)
|
1887
3804
|
}));
|
1888
3805
|
const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
|
1889
3806
|
return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
|
1890
3807
|
} else {
|
1891
|
-
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this,
|
3808
|
+
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
|
1892
3809
|
const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
|
1893
3810
|
return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
|
1894
3811
|
}
|
@@ -1967,8 +3884,7 @@ const _Query = class _Query {
|
|
1967
3884
|
}
|
1968
3885
|
async getFirstOrThrow(options = {}) {
|
1969
3886
|
const records = await this.getMany({ ...options, pagination: { size: 1 } });
|
1970
|
-
if (records[0] === void 0)
|
1971
|
-
throw new Error("No results found.");
|
3887
|
+
if (records[0] === void 0) throw new Error("No results found.");
|
1972
3888
|
return records[0];
|
1973
3889
|
}
|
1974
3890
|
async summarize(params = {}) {
|
@@ -2023,7 +3939,7 @@ const _Query = class _Query {
|
|
2023
3939
|
_table$1 = new WeakMap();
|
2024
3940
|
_repository = new WeakMap();
|
2025
3941
|
_data = new WeakMap();
|
2026
|
-
|
3942
|
+
_Query_instances = new WeakSet();
|
2027
3943
|
cleanFilterConstraint_fn = function(column, value) {
|
2028
3944
|
const columnType = __privateGet$3(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
|
2029
3945
|
if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
|
@@ -2084,8 +4000,7 @@ function isSortFilterString(value) {
|
|
2084
4000
|
}
|
2085
4001
|
function isSortFilterBase(filter) {
|
2086
4002
|
return isObject(filter) && Object.entries(filter).every(([key, value]) => {
|
2087
|
-
if (key === "*")
|
2088
|
-
return value === "random";
|
4003
|
+
if (key === "*") return value === "random";
|
2089
4004
|
return value === "asc" || value === "desc";
|
2090
4005
|
});
|
2091
4006
|
}
|
@@ -2106,29 +4021,15 @@ function buildSortFilter(filter) {
|
|
2106
4021
|
}
|
2107
4022
|
}
|
2108
4023
|
|
2109
|
-
var
|
2110
|
-
|
2111
|
-
throw TypeError("Cannot " + msg);
|
2112
|
-
};
|
2113
|
-
var __privateGet$2 = (obj, member, getter) => {
|
2114
|
-
__accessCheck$3(obj, member, "read from private field");
|
2115
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2116
|
-
};
|
2117
|
-
var __privateAdd$3 = (obj, member, value) => {
|
2118
|
-
if (member.has(obj))
|
2119
|
-
throw TypeError("Cannot add the same private member more than once");
|
2120
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4024
|
+
var __typeError$3 = (msg) => {
|
4025
|
+
throw TypeError(msg);
|
2121
4026
|
};
|
2122
|
-
var
|
2123
|
-
|
2124
|
-
|
2125
|
-
|
2126
|
-
|
2127
|
-
var
|
2128
|
-
__accessCheck$3(obj, member, "access private method");
|
2129
|
-
return method;
|
2130
|
-
};
|
2131
|
-
var _table, _getFetchProps, _db, _schemaTables, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn, _insertRecords, insertRecords_fn, _updateRecordWithID, updateRecordWithID_fn, _updateRecords, updateRecords_fn, _upsertRecordWithID, upsertRecordWithID_fn, _deleteRecord, deleteRecord_fn, _deleteRecords, deleteRecords_fn, _getSchemaTables, getSchemaTables_fn, _transformObjectToApi, transformObjectToApi_fn;
|
4027
|
+
var __accessCheck$3 = (obj, member, msg) => member.has(obj) || __typeError$3("Cannot " + msg);
|
4028
|
+
var __privateGet$2 = (obj, member, getter) => (__accessCheck$3(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4029
|
+
var __privateAdd$3 = (obj, member, value) => member.has(obj) ? __typeError$3("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4030
|
+
var __privateSet$1 = (obj, member, value, setter) => (__accessCheck$3(obj, member, "write to private field"), member.set(obj, value), value);
|
4031
|
+
var __privateMethod$2 = (obj, member, method) => (__accessCheck$3(obj, member, "access private method"), method);
|
4032
|
+
var _table, _getFetchProps, _db, _schemaTables, _trace, _RestRepository_instances, insertRecordWithoutId_fn, insertRecordWithId_fn, insertRecords_fn, updateRecordWithID_fn, updateRecords_fn, upsertRecordWithID_fn, deleteRecord_fn, deleteRecords_fn, getSchemaTables_fn, transformObjectToApi_fn;
|
2132
4033
|
const BULK_OPERATION_MAX_SIZE = 1e3;
|
2133
4034
|
class Repository extends Query {
|
2134
4035
|
}
|
@@ -2139,21 +4040,12 @@ class RestRepository extends Query {
|
|
2139
4040
|
{ name: options.table, schema: options.schemaTables?.find((table) => table.name === options.table) },
|
2140
4041
|
{}
|
2141
4042
|
);
|
2142
|
-
__privateAdd$3(this,
|
2143
|
-
__privateAdd$3(this,
|
2144
|
-
__privateAdd$3(this,
|
2145
|
-
__privateAdd$3(this,
|
2146
|
-
__privateAdd$3(this,
|
2147
|
-
__privateAdd$3(this,
|
2148
|
-
__privateAdd$3(this, _deleteRecord);
|
2149
|
-
__privateAdd$3(this, _deleteRecords);
|
2150
|
-
__privateAdd$3(this, _getSchemaTables);
|
2151
|
-
__privateAdd$3(this, _transformObjectToApi);
|
2152
|
-
__privateAdd$3(this, _table, void 0);
|
2153
|
-
__privateAdd$3(this, _getFetchProps, void 0);
|
2154
|
-
__privateAdd$3(this, _db, void 0);
|
2155
|
-
__privateAdd$3(this, _schemaTables, void 0);
|
2156
|
-
__privateAdd$3(this, _trace, void 0);
|
4043
|
+
__privateAdd$3(this, _RestRepository_instances);
|
4044
|
+
__privateAdd$3(this, _table);
|
4045
|
+
__privateAdd$3(this, _getFetchProps);
|
4046
|
+
__privateAdd$3(this, _db);
|
4047
|
+
__privateAdd$3(this, _schemaTables);
|
4048
|
+
__privateAdd$3(this, _trace);
|
2157
4049
|
__privateSet$1(this, _table, options.table);
|
2158
4050
|
__privateSet$1(this, _db, options.db);
|
2159
4051
|
__privateSet$1(this, _schemaTables, options.schemaTables);
|
@@ -2172,31 +4064,28 @@ class RestRepository extends Query {
|
|
2172
4064
|
return __privateGet$2(this, _trace).call(this, "create", async () => {
|
2173
4065
|
const ifVersion = parseIfVersion(b, c, d);
|
2174
4066
|
if (Array.isArray(a)) {
|
2175
|
-
if (a.length === 0)
|
2176
|
-
|
2177
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
4067
|
+
if (a.length === 0) return [];
|
4068
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
2178
4069
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2179
4070
|
const result = await this.read(ids, columns);
|
2180
4071
|
return result;
|
2181
4072
|
}
|
2182
4073
|
if (isString(a) && isObject(b)) {
|
2183
|
-
if (a === "")
|
2184
|
-
throw new Error("The id can't be empty");
|
4074
|
+
if (a === "") throw new Error("The id can't be empty");
|
2185
4075
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2186
|
-
return await __privateMethod$2(this,
|
4076
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
|
2187
4077
|
}
|
2188
4078
|
if (isObject(a) && isString(a.xata_id)) {
|
2189
|
-
if (a.xata_id === "")
|
2190
|
-
throw new Error("The id can't be empty");
|
4079
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2191
4080
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2192
|
-
return await __privateMethod$2(this,
|
4081
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
2193
4082
|
createOnly: true,
|
2194
4083
|
ifVersion
|
2195
4084
|
});
|
2196
4085
|
}
|
2197
4086
|
if (isObject(a)) {
|
2198
4087
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2199
|
-
return __privateMethod$2(this,
|
4088
|
+
return __privateMethod$2(this, _RestRepository_instances, insertRecordWithoutId_fn).call(this, a, columns);
|
2200
4089
|
}
|
2201
4090
|
throw new Error("Invalid arguments for create method");
|
2202
4091
|
});
|
@@ -2205,8 +4094,7 @@ class RestRepository extends Query {
|
|
2205
4094
|
return __privateGet$2(this, _trace).call(this, "read", async () => {
|
2206
4095
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2207
4096
|
if (Array.isArray(a)) {
|
2208
|
-
if (a.length === 0)
|
2209
|
-
return [];
|
4097
|
+
if (a.length === 0) return [];
|
2210
4098
|
const ids = a.map((item) => extractId(item));
|
2211
4099
|
const finalObjects = await this.getAll({ filter: { xata_id: { $any: compact(ids) } }, columns });
|
2212
4100
|
const dictionary = finalObjects.reduce((acc, object) => {
|
@@ -2229,7 +4117,7 @@ class RestRepository extends Query {
|
|
2229
4117
|
queryParams: { columns },
|
2230
4118
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2231
4119
|
});
|
2232
|
-
const schemaTables = await __privateMethod$2(this,
|
4120
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2233
4121
|
return initObject(
|
2234
4122
|
__privateGet$2(this, _db),
|
2235
4123
|
schemaTables,
|
@@ -2270,11 +4158,10 @@ class RestRepository extends Query {
|
|
2270
4158
|
return __privateGet$2(this, _trace).call(this, "update", async () => {
|
2271
4159
|
const ifVersion = parseIfVersion(b, c, d);
|
2272
4160
|
if (Array.isArray(a)) {
|
2273
|
-
if (a.length === 0)
|
2274
|
-
return [];
|
4161
|
+
if (a.length === 0) return [];
|
2275
4162
|
const existing = await this.read(a, ["xata_id"]);
|
2276
4163
|
const updates = a.filter((_item, index) => existing[index] !== null);
|
2277
|
-
await __privateMethod$2(this,
|
4164
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, updates, {
|
2278
4165
|
ifVersion,
|
2279
4166
|
upsert: false
|
2280
4167
|
});
|
@@ -2285,15 +4172,14 @@ class RestRepository extends Query {
|
|
2285
4172
|
try {
|
2286
4173
|
if (isString(a) && isObject(b)) {
|
2287
4174
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2288
|
-
return await __privateMethod$2(this,
|
4175
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2289
4176
|
}
|
2290
4177
|
if (isObject(a) && isString(a.xata_id)) {
|
2291
4178
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2292
|
-
return await __privateMethod$2(this,
|
4179
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2293
4180
|
}
|
2294
4181
|
} catch (error) {
|
2295
|
-
if (error.status === 422)
|
2296
|
-
return null;
|
4182
|
+
if (error.status === 422) return null;
|
2297
4183
|
throw error;
|
2298
4184
|
}
|
2299
4185
|
throw new Error("Invalid arguments for update method");
|
@@ -2322,9 +4208,8 @@ class RestRepository extends Query {
|
|
2322
4208
|
return __privateGet$2(this, _trace).call(this, "createOrUpdate", async () => {
|
2323
4209
|
const ifVersion = parseIfVersion(b, c, d);
|
2324
4210
|
if (Array.isArray(a)) {
|
2325
|
-
if (a.length === 0)
|
2326
|
-
|
2327
|
-
await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, a, {
|
4211
|
+
if (a.length === 0) return [];
|
4212
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, a, {
|
2328
4213
|
ifVersion,
|
2329
4214
|
upsert: true
|
2330
4215
|
});
|
@@ -2333,16 +4218,14 @@ class RestRepository extends Query {
|
|
2333
4218
|
return result;
|
2334
4219
|
}
|
2335
4220
|
if (isString(a) && isObject(b)) {
|
2336
|
-
if (a === "")
|
2337
|
-
throw new Error("The id can't be empty");
|
4221
|
+
if (a === "") throw new Error("The id can't be empty");
|
2338
4222
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2339
|
-
return await __privateMethod$2(this,
|
4223
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2340
4224
|
}
|
2341
4225
|
if (isObject(a) && isString(a.xata_id)) {
|
2342
|
-
if (a.xata_id === "")
|
2343
|
-
throw new Error("The id can't be empty");
|
4226
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2344
4227
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2345
|
-
return await __privateMethod$2(this,
|
4228
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2346
4229
|
}
|
2347
4230
|
if (!isDefined(a) && isObject(b)) {
|
2348
4231
|
return await this.create(b, c);
|
@@ -2357,24 +4240,21 @@ class RestRepository extends Query {
|
|
2357
4240
|
return __privateGet$2(this, _trace).call(this, "createOrReplace", async () => {
|
2358
4241
|
const ifVersion = parseIfVersion(b, c, d);
|
2359
4242
|
if (Array.isArray(a)) {
|
2360
|
-
if (a.length === 0)
|
2361
|
-
|
2362
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
4243
|
+
if (a.length === 0) return [];
|
4244
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
2363
4245
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2364
4246
|
const result = await this.read(ids, columns);
|
2365
4247
|
return result;
|
2366
4248
|
}
|
2367
4249
|
if (isString(a) && isObject(b)) {
|
2368
|
-
if (a === "")
|
2369
|
-
throw new Error("The id can't be empty");
|
4250
|
+
if (a === "") throw new Error("The id can't be empty");
|
2370
4251
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2371
|
-
return await __privateMethod$2(this,
|
4252
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
|
2372
4253
|
}
|
2373
4254
|
if (isObject(a) && isString(a.xata_id)) {
|
2374
|
-
if (a.xata_id === "")
|
2375
|
-
throw new Error("The id can't be empty");
|
4255
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2376
4256
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2377
|
-
return await __privateMethod$2(this,
|
4257
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
2378
4258
|
createOnly: false,
|
2379
4259
|
ifVersion
|
2380
4260
|
});
|
@@ -2391,25 +4271,22 @@ class RestRepository extends Query {
|
|
2391
4271
|
async delete(a, b) {
|
2392
4272
|
return __privateGet$2(this, _trace).call(this, "delete", async () => {
|
2393
4273
|
if (Array.isArray(a)) {
|
2394
|
-
if (a.length === 0)
|
2395
|
-
return [];
|
4274
|
+
if (a.length === 0) return [];
|
2396
4275
|
const ids = a.map((o) => {
|
2397
|
-
if (isString(o))
|
2398
|
-
|
2399
|
-
if (isString(o.xata_id))
|
2400
|
-
return o.xata_id;
|
4276
|
+
if (isString(o)) return o;
|
4277
|
+
if (isString(o.xata_id)) return o.xata_id;
|
2401
4278
|
throw new Error("Invalid arguments for delete method");
|
2402
4279
|
});
|
2403
4280
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2404
4281
|
const result = await this.read(a, columns);
|
2405
|
-
await __privateMethod$2(this,
|
4282
|
+
await __privateMethod$2(this, _RestRepository_instances, deleteRecords_fn).call(this, ids);
|
2406
4283
|
return result;
|
2407
4284
|
}
|
2408
4285
|
if (isString(a)) {
|
2409
|
-
return __privateMethod$2(this,
|
4286
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a, b);
|
2410
4287
|
}
|
2411
4288
|
if (isObject(a) && isString(a.xata_id)) {
|
2412
|
-
return __privateMethod$2(this,
|
4289
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a.xata_id, b);
|
2413
4290
|
}
|
2414
4291
|
throw new Error("Invalid arguments for delete method");
|
2415
4292
|
});
|
@@ -2453,7 +4330,7 @@ class RestRepository extends Query {
|
|
2453
4330
|
},
|
2454
4331
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2455
4332
|
});
|
2456
|
-
const schemaTables = await __privateMethod$2(this,
|
4333
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2457
4334
|
return {
|
2458
4335
|
records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
|
2459
4336
|
totalCount
|
@@ -2478,7 +4355,7 @@ class RestRepository extends Query {
|
|
2478
4355
|
},
|
2479
4356
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2480
4357
|
});
|
2481
|
-
const schemaTables = await __privateMethod$2(this,
|
4358
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2482
4359
|
return {
|
2483
4360
|
records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
|
2484
4361
|
totalCount
|
@@ -2520,7 +4397,7 @@ class RestRepository extends Query {
|
|
2520
4397
|
fetchOptions: data.fetchOptions,
|
2521
4398
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2522
4399
|
});
|
2523
|
-
const schemaTables = await __privateMethod$2(this,
|
4400
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2524
4401
|
const records = objects.map(
|
2525
4402
|
(record) => initObject(
|
2526
4403
|
__privateGet$2(this, _db),
|
@@ -2554,7 +4431,7 @@ class RestRepository extends Query {
|
|
2554
4431
|
},
|
2555
4432
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2556
4433
|
});
|
2557
|
-
const schemaTables = await __privateMethod$2(this,
|
4434
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2558
4435
|
return {
|
2559
4436
|
...result,
|
2560
4437
|
summaries: result.summaries.map(
|
@@ -2602,9 +4479,9 @@ _getFetchProps = new WeakMap();
|
|
2602
4479
|
_db = new WeakMap();
|
2603
4480
|
_schemaTables = new WeakMap();
|
2604
4481
|
_trace = new WeakMap();
|
2605
|
-
|
4482
|
+
_RestRepository_instances = new WeakSet();
|
2606
4483
|
insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
2607
|
-
const record = await __privateMethod$2(this,
|
4484
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2608
4485
|
const response = await insertRecord({
|
2609
4486
|
pathParams: {
|
2610
4487
|
workspace: "{workspaceId}",
|
@@ -2616,14 +4493,12 @@ insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
|
2616
4493
|
body: record,
|
2617
4494
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2618
4495
|
});
|
2619
|
-
const schemaTables = await __privateMethod$2(this,
|
4496
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2620
4497
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2621
4498
|
};
|
2622
|
-
_insertRecordWithId = new WeakSet();
|
2623
4499
|
insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
|
2624
|
-
if (!recordId)
|
2625
|
-
|
2626
|
-
const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4500
|
+
if (!recordId) return null;
|
4501
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2627
4502
|
const response = await insertRecordWithID({
|
2628
4503
|
pathParams: {
|
2629
4504
|
workspace: "{workspaceId}",
|
@@ -2636,13 +4511,12 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
|
|
2636
4511
|
queryParams: { createOnly, columns, ifVersion },
|
2637
4512
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2638
4513
|
});
|
2639
|
-
const schemaTables = await __privateMethod$2(this,
|
4514
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2640
4515
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2641
4516
|
};
|
2642
|
-
_insertRecords = new WeakSet();
|
2643
4517
|
insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
2644
4518
|
const operations = await promiseMap(objects, async (object) => {
|
2645
|
-
const record = await __privateMethod$2(this,
|
4519
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2646
4520
|
return { insert: { table: __privateGet$2(this, _table), record, createOnly, ifVersion } };
|
2647
4521
|
});
|
2648
4522
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -2667,11 +4541,9 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
|
2667
4541
|
}
|
2668
4542
|
return ids;
|
2669
4543
|
};
|
2670
|
-
_updateRecordWithID = new WeakSet();
|
2671
4544
|
updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2672
|
-
if (!recordId)
|
2673
|
-
|
2674
|
-
const { xata_id: _id, ...record } = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4545
|
+
if (!recordId) return null;
|
4546
|
+
const { xata_id: _id, ...record } = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2675
4547
|
try {
|
2676
4548
|
const response = await updateRecordWithID({
|
2677
4549
|
pathParams: {
|
@@ -2685,7 +4557,7 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2685
4557
|
body: record,
|
2686
4558
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2687
4559
|
});
|
2688
|
-
const schemaTables = await __privateMethod$2(this,
|
4560
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2689
4561
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2690
4562
|
} catch (e) {
|
2691
4563
|
if (isObject(e) && e.status === 404) {
|
@@ -2694,10 +4566,9 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2694
4566
|
throw e;
|
2695
4567
|
}
|
2696
4568
|
};
|
2697
|
-
_updateRecords = new WeakSet();
|
2698
4569
|
updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
2699
4570
|
const operations = await promiseMap(objects, async ({ xata_id, ...object }) => {
|
2700
|
-
const fields = await __privateMethod$2(this,
|
4571
|
+
const fields = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2701
4572
|
return { update: { table: __privateGet$2(this, _table), id: xata_id, ifVersion, upsert, fields } };
|
2702
4573
|
});
|
2703
4574
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -2722,10 +4593,8 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
|
2722
4593
|
}
|
2723
4594
|
return ids;
|
2724
4595
|
};
|
2725
|
-
_upsertRecordWithID = new WeakSet();
|
2726
4596
|
upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2727
|
-
if (!recordId)
|
2728
|
-
return null;
|
4597
|
+
if (!recordId) return null;
|
2729
4598
|
const response = await upsertRecordWithID({
|
2730
4599
|
pathParams: {
|
2731
4600
|
workspace: "{workspaceId}",
|
@@ -2738,13 +4607,11 @@ upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2738
4607
|
body: object,
|
2739
4608
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2740
4609
|
});
|
2741
|
-
const schemaTables = await __privateMethod$2(this,
|
4610
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2742
4611
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2743
4612
|
};
|
2744
|
-
_deleteRecord = new WeakSet();
|
2745
4613
|
deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
2746
|
-
if (!recordId)
|
2747
|
-
return null;
|
4614
|
+
if (!recordId) return null;
|
2748
4615
|
try {
|
2749
4616
|
const response = await deleteRecord({
|
2750
4617
|
pathParams: {
|
@@ -2757,7 +4624,7 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2757
4624
|
queryParams: { columns },
|
2758
4625
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2759
4626
|
});
|
2760
|
-
const schemaTables = await __privateMethod$2(this,
|
4627
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2761
4628
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2762
4629
|
} catch (e) {
|
2763
4630
|
if (isObject(e) && e.status === 404) {
|
@@ -2766,7 +4633,6 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2766
4633
|
throw e;
|
2767
4634
|
}
|
2768
4635
|
};
|
2769
|
-
_deleteRecords = new WeakSet();
|
2770
4636
|
deleteRecords_fn = async function(recordIds) {
|
2771
4637
|
const chunkedOperations = chunk(
|
2772
4638
|
compact(recordIds).map((id) => ({ delete: { table: __privateGet$2(this, _table), id } })),
|
@@ -2784,10 +4650,8 @@ deleteRecords_fn = async function(recordIds) {
|
|
2784
4650
|
});
|
2785
4651
|
}
|
2786
4652
|
};
|
2787
|
-
_getSchemaTables = new WeakSet();
|
2788
4653
|
getSchemaTables_fn = async function() {
|
2789
|
-
if (__privateGet$2(this, _schemaTables))
|
2790
|
-
return __privateGet$2(this, _schemaTables);
|
4654
|
+
if (__privateGet$2(this, _schemaTables)) return __privateGet$2(this, _schemaTables);
|
2791
4655
|
const { schema } = await getBranchDetails({
|
2792
4656
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
2793
4657
|
...__privateGet$2(this, _getFetchProps).call(this)
|
@@ -2795,16 +4659,13 @@ getSchemaTables_fn = async function() {
|
|
2795
4659
|
__privateSet$1(this, _schemaTables, schema.tables);
|
2796
4660
|
return schema.tables;
|
2797
4661
|
};
|
2798
|
-
_transformObjectToApi = new WeakSet();
|
2799
4662
|
transformObjectToApi_fn = async function(object) {
|
2800
|
-
const schemaTables = await __privateMethod$2(this,
|
4663
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2801
4664
|
const schema = schemaTables.find((table) => table.name === __privateGet$2(this, _table));
|
2802
|
-
if (!schema)
|
2803
|
-
throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
|
4665
|
+
if (!schema) throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
|
2804
4666
|
const result = {};
|
2805
4667
|
for (const [key, value] of Object.entries(object)) {
|
2806
|
-
if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key))
|
2807
|
-
continue;
|
4668
|
+
if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key)) continue;
|
2808
4669
|
const type = schema.columns.find((column) => column.name === key)?.type;
|
2809
4670
|
switch (type) {
|
2810
4671
|
case "link": {
|
@@ -2834,11 +4695,9 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2834
4695
|
const data = {};
|
2835
4696
|
Object.assign(data, { ...object });
|
2836
4697
|
const { columns } = schemaTables.find(({ name }) => name === table) ?? {};
|
2837
|
-
if (!columns)
|
2838
|
-
console.error(`Table ${table} not found in schema`);
|
4698
|
+
if (!columns) console.error(`Table ${table} not found in schema`);
|
2839
4699
|
for (const column of columns ?? []) {
|
2840
|
-
if (!isValidColumn(selectedColumns, column))
|
2841
|
-
continue;
|
4700
|
+
if (!isValidColumn(selectedColumns, column)) continue;
|
2842
4701
|
const value = data[column.name];
|
2843
4702
|
switch (column.type) {
|
2844
4703
|
case "datetime": {
|
@@ -2924,15 +4783,12 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2924
4783
|
return record;
|
2925
4784
|
};
|
2926
4785
|
function extractId(value) {
|
2927
|
-
if (isString(value))
|
2928
|
-
|
2929
|
-
if (isObject(value) && isString(value.xata_id))
|
2930
|
-
return value.xata_id;
|
4786
|
+
if (isString(value)) return value;
|
4787
|
+
if (isObject(value) && isString(value.xata_id)) return value.xata_id;
|
2931
4788
|
return void 0;
|
2932
4789
|
}
|
2933
4790
|
function isValidColumn(columns, column) {
|
2934
|
-
if (columns.includes("*"))
|
2935
|
-
return true;
|
4791
|
+
if (columns.includes("*")) return true;
|
2936
4792
|
return columns.filter((item) => isString(item) && item.startsWith(column.name)).length > 0;
|
2937
4793
|
}
|
2938
4794
|
function parseIfVersion(...args) {
|
@@ -2972,19 +4828,12 @@ const includesAll = (value) => ({ $includesAll: value });
|
|
2972
4828
|
const includesNone = (value) => ({ $includesNone: value });
|
2973
4829
|
const includesAny = (value) => ({ $includesAny: value });
|
2974
4830
|
|
2975
|
-
var
|
2976
|
-
|
2977
|
-
throw TypeError("Cannot " + msg);
|
2978
|
-
};
|
2979
|
-
var __privateGet$1 = (obj, member, getter) => {
|
2980
|
-
__accessCheck$2(obj, member, "read from private field");
|
2981
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2982
|
-
};
|
2983
|
-
var __privateAdd$2 = (obj, member, value) => {
|
2984
|
-
if (member.has(obj))
|
2985
|
-
throw TypeError("Cannot add the same private member more than once");
|
2986
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4831
|
+
var __typeError$2 = (msg) => {
|
4832
|
+
throw TypeError(msg);
|
2987
4833
|
};
|
4834
|
+
var __accessCheck$2 = (obj, member, msg) => member.has(obj) || __typeError$2("Cannot " + msg);
|
4835
|
+
var __privateGet$1 = (obj, member, getter) => (__accessCheck$2(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4836
|
+
var __privateAdd$2 = (obj, member, value) => member.has(obj) ? __typeError$2("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2988
4837
|
var _tables;
|
2989
4838
|
class SchemaPlugin extends XataPlugin {
|
2990
4839
|
constructor() {
|
@@ -2996,8 +4845,7 @@ class SchemaPlugin extends XataPlugin {
|
|
2996
4845
|
{},
|
2997
4846
|
{
|
2998
4847
|
get: (_target, table) => {
|
2999
|
-
if (!isString(table))
|
3000
|
-
throw new Error("Invalid table name");
|
4848
|
+
if (!isString(table)) throw new Error("Invalid table name");
|
3001
4849
|
if (__privateGet$1(this, _tables)[table] === void 0) {
|
3002
4850
|
__privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
3003
4851
|
}
|
@@ -3088,30 +4936,23 @@ function getContentType(file) {
|
|
3088
4936
|
return "application/octet-stream";
|
3089
4937
|
}
|
3090
4938
|
|
3091
|
-
var
|
3092
|
-
|
3093
|
-
throw TypeError("Cannot " + msg);
|
3094
|
-
};
|
3095
|
-
var __privateAdd$1 = (obj, member, value) => {
|
3096
|
-
if (member.has(obj))
|
3097
|
-
throw TypeError("Cannot add the same private member more than once");
|
3098
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4939
|
+
var __typeError$1 = (msg) => {
|
4940
|
+
throw TypeError(msg);
|
3099
4941
|
};
|
3100
|
-
var
|
3101
|
-
|
3102
|
-
|
3103
|
-
|
3104
|
-
var _search, search_fn;
|
4942
|
+
var __accessCheck$1 = (obj, member, msg) => member.has(obj) || __typeError$1("Cannot " + msg);
|
4943
|
+
var __privateAdd$1 = (obj, member, value) => member.has(obj) ? __typeError$1("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4944
|
+
var __privateMethod$1 = (obj, member, method) => (__accessCheck$1(obj, member, "access private method"), method);
|
4945
|
+
var _SearchPlugin_instances, search_fn;
|
3105
4946
|
class SearchPlugin extends XataPlugin {
|
3106
4947
|
constructor(db) {
|
3107
4948
|
super();
|
3108
4949
|
this.db = db;
|
3109
|
-
__privateAdd$1(this,
|
4950
|
+
__privateAdd$1(this, _SearchPlugin_instances);
|
3110
4951
|
}
|
3111
4952
|
build(pluginOptions) {
|
3112
4953
|
return {
|
3113
4954
|
all: async (query, options = {}) => {
|
3114
|
-
const { records, totalCount } = await __privateMethod$1(this,
|
4955
|
+
const { records, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3115
4956
|
return {
|
3116
4957
|
totalCount,
|
3117
4958
|
records: records.map((record) => {
|
@@ -3121,7 +4962,7 @@ class SearchPlugin extends XataPlugin {
|
|
3121
4962
|
};
|
3122
4963
|
},
|
3123
4964
|
byTable: async (query, options = {}) => {
|
3124
|
-
const { records: rawRecords, totalCount } = await __privateMethod$1(this,
|
4965
|
+
const { records: rawRecords, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3125
4966
|
const records = rawRecords.reduce((acc, record) => {
|
3126
4967
|
const table = record.xata_table;
|
3127
4968
|
const items = acc[table] ?? [];
|
@@ -3133,7 +4974,7 @@ class SearchPlugin extends XataPlugin {
|
|
3133
4974
|
};
|
3134
4975
|
}
|
3135
4976
|
}
|
3136
|
-
|
4977
|
+
_SearchPlugin_instances = new WeakSet();
|
3137
4978
|
search_fn = async function(query, options, pluginOptions) {
|
3138
4979
|
const { tables, fuzziness, highlight, prefix, page } = options ?? {};
|
3139
4980
|
const { records, totalCount } = await searchBranch({
|
@@ -3169,8 +5010,7 @@ function arrayString(val) {
|
|
3169
5010
|
return result;
|
3170
5011
|
}
|
3171
5012
|
function prepareValue(value) {
|
3172
|
-
if (!isDefined(value))
|
3173
|
-
return null;
|
5013
|
+
if (!isDefined(value)) return null;
|
3174
5014
|
if (value instanceof Date) {
|
3175
5015
|
return value.toISOString();
|
3176
5016
|
}
|
@@ -3197,8 +5037,8 @@ function prepareParams(param1, param2) {
|
|
3197
5037
|
return { statement, params: param2?.map((value) => prepareValue(value)) };
|
3198
5038
|
}
|
3199
5039
|
if (isObject(param1)) {
|
3200
|
-
const { statement, params, consistency } = param1;
|
3201
|
-
return { statement, params: params?.map((value) => prepareValue(value)), consistency };
|
5040
|
+
const { statement, params, consistency, responseType } = param1;
|
5041
|
+
return { statement, params: params?.map((value) => prepareValue(value)), consistency, responseType };
|
3202
5042
|
}
|
3203
5043
|
throw new Error("Invalid query");
|
3204
5044
|
}
|
@@ -3209,7 +5049,7 @@ class SQLPlugin extends XataPlugin {
|
|
3209
5049
|
if (!isParamsObject(query) && (!isTemplateStringsArray(query) || !Array.isArray(parameters))) {
|
3210
5050
|
throw new Error("Invalid usage of `xata.sql`. Please use it as a tagged template or with an object.");
|
3211
5051
|
}
|
3212
|
-
const { statement, params, consistency } = prepareParams(query, parameters);
|
5052
|
+
const { statement, params, consistency, responseType } = prepareParams(query, parameters);
|
3213
5053
|
const {
|
3214
5054
|
records,
|
3215
5055
|
rows,
|
@@ -3217,7 +5057,7 @@ class SQLPlugin extends XataPlugin {
|
|
3217
5057
|
columns = []
|
3218
5058
|
} = await sqlQuery({
|
3219
5059
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
3220
|
-
body: { statement, params, consistency },
|
5060
|
+
body: { statement, params, consistency, responseType },
|
3221
5061
|
...pluginOptions
|
3222
5062
|
});
|
3223
5063
|
return { records, rows, warning, columns };
|
@@ -3249,8 +5089,7 @@ function buildDomain(host, region) {
|
|
3249
5089
|
function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
|
3250
5090
|
const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
|
3251
5091
|
const parts = parseWorkspacesUrlParts(url);
|
3252
|
-
if (!parts)
|
3253
|
-
throw new Error("Invalid workspaces URL");
|
5092
|
+
if (!parts) throw new Error("Invalid workspaces URL");
|
3254
5093
|
const { workspace: workspaceSlug, region, database, host } = parts;
|
3255
5094
|
const domain = buildDomain(host, region);
|
3256
5095
|
const workspace = workspaceSlug.split("-").pop();
|
@@ -3275,39 +5114,24 @@ class TransactionPlugin extends XataPlugin {
|
|
3275
5114
|
}
|
3276
5115
|
}
|
3277
5116
|
|
3278
|
-
var
|
3279
|
-
|
3280
|
-
throw TypeError("Cannot " + msg);
|
3281
|
-
};
|
3282
|
-
var __privateGet = (obj, member, getter) => {
|
3283
|
-
__accessCheck(obj, member, "read from private field");
|
3284
|
-
return getter ? getter.call(obj) : member.get(obj);
|
3285
|
-
};
|
3286
|
-
var __privateAdd = (obj, member, value) => {
|
3287
|
-
if (member.has(obj))
|
3288
|
-
throw TypeError("Cannot add the same private member more than once");
|
3289
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3290
|
-
};
|
3291
|
-
var __privateSet = (obj, member, value, setter) => {
|
3292
|
-
__accessCheck(obj, member, "write to private field");
|
3293
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
3294
|
-
return value;
|
3295
|
-
};
|
3296
|
-
var __privateMethod = (obj, member, method) => {
|
3297
|
-
__accessCheck(obj, member, "access private method");
|
3298
|
-
return method;
|
5117
|
+
var __typeError = (msg) => {
|
5118
|
+
throw TypeError(msg);
|
3299
5119
|
};
|
5120
|
+
var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
|
5121
|
+
var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
5122
|
+
var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5123
|
+
var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), member.set(obj, value), value);
|
5124
|
+
var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
|
3300
5125
|
const buildClient = (plugins) => {
|
3301
|
-
var _options,
|
5126
|
+
var _options, _instances, parseOptions_fn, getFetchProps_fn, _a;
|
3302
5127
|
return _a = class {
|
3303
5128
|
constructor(options = {}, tables) {
|
3304
|
-
__privateAdd(this,
|
3305
|
-
__privateAdd(this,
|
3306
|
-
|
3307
|
-
const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
|
5129
|
+
__privateAdd(this, _instances);
|
5130
|
+
__privateAdd(this, _options);
|
5131
|
+
const safeOptions = __privateMethod(this, _instances, parseOptions_fn).call(this, options);
|
3308
5132
|
__privateSet(this, _options, safeOptions);
|
3309
5133
|
const pluginOptions = {
|
3310
|
-
...__privateMethod(this,
|
5134
|
+
...__privateMethod(this, _instances, getFetchProps_fn).call(this, safeOptions),
|
3311
5135
|
host: safeOptions.host,
|
3312
5136
|
tables,
|
3313
5137
|
branch: safeOptions.branch
|
@@ -3324,8 +5148,7 @@ const buildClient = (plugins) => {
|
|
3324
5148
|
this.sql = sql;
|
3325
5149
|
this.files = files;
|
3326
5150
|
for (const [key, namespace] of Object.entries(plugins ?? {})) {
|
3327
|
-
if (namespace === void 0)
|
3328
|
-
continue;
|
5151
|
+
if (namespace === void 0) continue;
|
3329
5152
|
this[key] = namespace.build(pluginOptions);
|
3330
5153
|
}
|
3331
5154
|
}
|
@@ -3334,8 +5157,8 @@ const buildClient = (plugins) => {
|
|
3334
5157
|
const branch = __privateGet(this, _options).branch;
|
3335
5158
|
return { databaseURL, branch };
|
3336
5159
|
}
|
3337
|
-
}, _options = new WeakMap(),
|
3338
|
-
const enableBrowser = options?.enableBrowser ??
|
5160
|
+
}, _options = new WeakMap(), _instances = new WeakSet(), parseOptions_fn = function(options) {
|
5161
|
+
const enableBrowser = options?.enableBrowser ?? false;
|
3339
5162
|
const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
|
3340
5163
|
if (isBrowser && !enableBrowser) {
|
3341
5164
|
throw new Error(
|
@@ -3343,8 +5166,9 @@ const buildClient = (plugins) => {
|
|
3343
5166
|
);
|
3344
5167
|
}
|
3345
5168
|
const fetch = getFetchImplementation(options?.fetch);
|
3346
|
-
const databaseURL = options?.databaseURL
|
3347
|
-
const apiKey = options?.apiKey
|
5169
|
+
const databaseURL = options?.databaseURL;
|
5170
|
+
const apiKey = options?.apiKey;
|
5171
|
+
const branch = options?.branch;
|
3348
5172
|
const trace = options?.trace ?? defaultTrace;
|
3349
5173
|
const clientName = options?.clientName;
|
3350
5174
|
const host = options?.host ?? "production";
|
@@ -3355,25 +5179,8 @@ const buildClient = (plugins) => {
|
|
3355
5179
|
if (!databaseURL) {
|
3356
5180
|
throw new Error("Option databaseURL is required");
|
3357
5181
|
}
|
3358
|
-
|
3359
|
-
|
3360
|
-
const branch = options?.branch || previewBranch || envBranch || "main";
|
3361
|
-
if (!!previewBranch && branch !== previewBranch) {
|
3362
|
-
console.warn(
|
3363
|
-
`Ignoring preview branch ${previewBranch} because branch option was passed to the client constructor with value ${branch}`
|
3364
|
-
);
|
3365
|
-
} else if (!!envBranch && branch !== envBranch) {
|
3366
|
-
console.warn(
|
3367
|
-
`Ignoring branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
|
3368
|
-
);
|
3369
|
-
} else if (!!previewBranch && !!envBranch && previewBranch !== envBranch) {
|
3370
|
-
console.warn(
|
3371
|
-
`Ignoring preview branch ${previewBranch} and branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
|
3372
|
-
);
|
3373
|
-
} else if (!previewBranch && !envBranch && options?.branch === void 0) {
|
3374
|
-
console.warn(
|
3375
|
-
`No branch was passed to the client constructor. Using default branch ${branch}. You can set the branch with the environment variable XATA_BRANCH or by passing the branch option to the client constructor.`
|
3376
|
-
);
|
5182
|
+
if (!branch) {
|
5183
|
+
throw new Error("Option branch is required");
|
3377
5184
|
}
|
3378
5185
|
return {
|
3379
5186
|
fetch,
|
@@ -3387,7 +5194,7 @@ const buildClient = (plugins) => {
|
|
3387
5194
|
clientName,
|
3388
5195
|
xataAgentExtra
|
3389
5196
|
};
|
3390
|
-
},
|
5197
|
+
}, getFetchProps_fn = function({
|
3391
5198
|
fetch,
|
3392
5199
|
apiKey,
|
3393
5200
|
databaseURL,
|
@@ -3428,26 +5235,19 @@ class Serializer {
|
|
3428
5235
|
}
|
3429
5236
|
toJSON(data) {
|
3430
5237
|
function visit(obj) {
|
3431
|
-
if (Array.isArray(obj))
|
3432
|
-
return obj.map(visit);
|
5238
|
+
if (Array.isArray(obj)) return obj.map(visit);
|
3433
5239
|
const type = typeof obj;
|
3434
|
-
if (type === "undefined")
|
3435
|
-
|
3436
|
-
if (
|
3437
|
-
return { [META]: "bigint", [VALUE]: obj.toString() };
|
3438
|
-
if (obj === null || type !== "object")
|
3439
|
-
return obj;
|
5240
|
+
if (type === "undefined") return { [META]: "undefined" };
|
5241
|
+
if (type === "bigint") return { [META]: "bigint", [VALUE]: obj.toString() };
|
5242
|
+
if (obj === null || type !== "object") return obj;
|
3440
5243
|
const constructor = obj.constructor;
|
3441
5244
|
const o = { [META]: constructor.name };
|
3442
5245
|
for (const [key, value] of Object.entries(obj)) {
|
3443
5246
|
o[key] = visit(value);
|
3444
5247
|
}
|
3445
|
-
if (constructor === Date)
|
3446
|
-
|
3447
|
-
if (constructor ===
|
3448
|
-
o[VALUE] = Object.fromEntries(obj);
|
3449
|
-
if (constructor === Set)
|
3450
|
-
o[VALUE] = [...obj];
|
5248
|
+
if (constructor === Date) o[VALUE] = obj.toISOString();
|
5249
|
+
if (constructor === Map) o[VALUE] = Object.fromEntries(obj);
|
5250
|
+
if (constructor === Set) o[VALUE] = [...obj];
|
3451
5251
|
return o;
|
3452
5252
|
}
|
3453
5253
|
return JSON.stringify(visit(data));
|
@@ -3460,16 +5260,11 @@ class Serializer {
|
|
3460
5260
|
if (constructor) {
|
3461
5261
|
return Object.assign(Object.create(constructor.prototype), rest);
|
3462
5262
|
}
|
3463
|
-
if (clazz === "Date")
|
3464
|
-
|
3465
|
-
if (clazz === "
|
3466
|
-
|
3467
|
-
if (clazz === "
|
3468
|
-
return new Map(Object.entries(val));
|
3469
|
-
if (clazz === "bigint")
|
3470
|
-
return BigInt(val);
|
3471
|
-
if (clazz === "undefined")
|
3472
|
-
return void 0;
|
5263
|
+
if (clazz === "Date") return new Date(val);
|
5264
|
+
if (clazz === "Set") return new Set(val);
|
5265
|
+
if (clazz === "Map") return new Map(Object.entries(val));
|
5266
|
+
if (clazz === "bigint") return BigInt(val);
|
5267
|
+
if (clazz === "undefined") return void 0;
|
3473
5268
|
return rest;
|
3474
5269
|
}
|
3475
5270
|
return value;
|
@@ -3484,6 +5279,47 @@ const deserialize = (json) => {
|
|
3484
5279
|
return defaultSerializer.fromJSON(json);
|
3485
5280
|
};
|
3486
5281
|
|
5282
|
+
function parseEnvironment(environment) {
|
5283
|
+
try {
|
5284
|
+
if (typeof environment === "function") {
|
5285
|
+
return new Proxy(
|
5286
|
+
{},
|
5287
|
+
{
|
5288
|
+
get(target) {
|
5289
|
+
return environment(target);
|
5290
|
+
}
|
5291
|
+
}
|
5292
|
+
);
|
5293
|
+
}
|
5294
|
+
if (isObject(environment)) {
|
5295
|
+
return environment;
|
5296
|
+
}
|
5297
|
+
} catch (error) {
|
5298
|
+
}
|
5299
|
+
return {};
|
5300
|
+
}
|
5301
|
+
function buildPreviewBranchName({ org, branch }) {
|
5302
|
+
return `preview-${org}-${branch}`;
|
5303
|
+
}
|
5304
|
+
function getDeployPreviewBranch(environment) {
|
5305
|
+
try {
|
5306
|
+
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = parseEnvironment(environment);
|
5307
|
+
if (deployPreviewBranch) return deployPreviewBranch;
|
5308
|
+
switch (deployPreview) {
|
5309
|
+
case "vercel": {
|
5310
|
+
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
5311
|
+
console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
|
5312
|
+
return void 0;
|
5313
|
+
}
|
5314
|
+
return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
|
5315
|
+
}
|
5316
|
+
}
|
5317
|
+
return void 0;
|
5318
|
+
} catch (err) {
|
5319
|
+
return void 0;
|
5320
|
+
}
|
5321
|
+
}
|
5322
|
+
|
3487
5323
|
class XataError extends Error {
|
3488
5324
|
constructor(message, status) {
|
3489
5325
|
super(message);
|
@@ -3491,5 +5327,5 @@ class XataError extends Error {
|
|
3491
5327
|
}
|
3492
5328
|
}
|
3493
5329
|
|
3494
|
-
export { BaseClient, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge,
|
5330
|
+
export { BaseClient, Buffer, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptAllTables, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteCluster, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge, getAuthorizationCode, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationJobStatus, getBranchMigrationPlan, getBranchSchemaHistory, getBranchStats, getCluster, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseSettings, getDeployPreviewBranch, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationHistory, getMigrationJobStatus, getMigrationRequest, getMigrationRequestIsMerged, getRecord, getSchema, getTableColumns, getTableSchema, getUser, getUserAPIKeys, getUserOAuthAccessTokens, getUserOAuthClients, getWorkspace, getWorkspaceMembersList, getWorkspaceSettings, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, iContains, iPattern, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isValidExpandedColumn, isValidSelectableColumns, le, lessEquals, lessThan, lessThanEquals, listClusters, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern, previewBranchSchemaEdit, pushBranchMigrations, putFile, putFileItem, queryMigrationRequests, queryTable, removeGitBranchesEntry, removeWorkspaceMember, renameDatabase, resendWorkspaceMemberInvite, resolveBranch, searchBranch, searchTable, serialize, setTableSchema, sqlQuery, startMigration, startsWith, summarizeTable, transformImage, updateBranchMetadata, updateBranchSchema, updateCluster, updateColumn, updateDatabaseGithubSettings, updateDatabaseMetadata, updateDatabaseSettings, updateMigrationRequest, updateOAuthAccessToken, updateRecordWithID, updateTable, updateUser, updateWorkspace, updateWorkspaceMemberInvite, updateWorkspaceMemberRole, updateWorkspaceSettings, upsertRecordWithID, vectorSearchTable };
|
3495
5331
|
//# sourceMappingURL=index.mjs.map
|