@xata.io/client 0.0.0-next.v5cfac065298489e56b1435ad10e8a947642693de → 0.0.0-next.v64f10283eb9a098c575590d3ea17600e350321c9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-add-version.log +1 -1
- package/.turbo/turbo-build.log +3 -3
- package/CHANGELOG.md +19 -3
- package/dist/index.cjs +2506 -576
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +5193 -3746
- package/dist/index.mjs +2491 -573
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
@@ -22,6 +22,1789 @@ const TraceAttributes = {
|
|
22
22
|
CLOUDFLARE_RAY_ID: "cf.ray"
|
23
23
|
};
|
24
24
|
|
25
|
+
const lookup = [];
|
26
|
+
const revLookup = [];
|
27
|
+
const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
28
|
+
for (let i = 0, len = code.length; i < len; ++i) {
|
29
|
+
lookup[i] = code[i];
|
30
|
+
revLookup[code.charCodeAt(i)] = i;
|
31
|
+
}
|
32
|
+
revLookup["-".charCodeAt(0)] = 62;
|
33
|
+
revLookup["_".charCodeAt(0)] = 63;
|
34
|
+
function getLens(b64) {
|
35
|
+
const len = b64.length;
|
36
|
+
if (len % 4 > 0) {
|
37
|
+
throw new Error("Invalid string. Length must be a multiple of 4");
|
38
|
+
}
|
39
|
+
let validLen = b64.indexOf("=");
|
40
|
+
if (validLen === -1) validLen = len;
|
41
|
+
const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
|
42
|
+
return [validLen, placeHoldersLen];
|
43
|
+
}
|
44
|
+
function _byteLength(_b64, validLen, placeHoldersLen) {
|
45
|
+
return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
|
46
|
+
}
|
47
|
+
function toByteArray(b64) {
|
48
|
+
let tmp;
|
49
|
+
const lens = getLens(b64);
|
50
|
+
const validLen = lens[0];
|
51
|
+
const placeHoldersLen = lens[1];
|
52
|
+
const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
|
53
|
+
let curByte = 0;
|
54
|
+
const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
|
55
|
+
let i;
|
56
|
+
for (i = 0; i < len; i += 4) {
|
57
|
+
tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
|
58
|
+
arr[curByte++] = tmp >> 16 & 255;
|
59
|
+
arr[curByte++] = tmp >> 8 & 255;
|
60
|
+
arr[curByte++] = tmp & 255;
|
61
|
+
}
|
62
|
+
if (placeHoldersLen === 2) {
|
63
|
+
tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
|
64
|
+
arr[curByte++] = tmp & 255;
|
65
|
+
}
|
66
|
+
if (placeHoldersLen === 1) {
|
67
|
+
tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
|
68
|
+
arr[curByte++] = tmp >> 8 & 255;
|
69
|
+
arr[curByte++] = tmp & 255;
|
70
|
+
}
|
71
|
+
return arr;
|
72
|
+
}
|
73
|
+
function tripletToBase64(num) {
|
74
|
+
return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
|
75
|
+
}
|
76
|
+
function encodeChunk(uint8, start, end) {
|
77
|
+
let tmp;
|
78
|
+
const output = [];
|
79
|
+
for (let i = start; i < end; i += 3) {
|
80
|
+
tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
|
81
|
+
output.push(tripletToBase64(tmp));
|
82
|
+
}
|
83
|
+
return output.join("");
|
84
|
+
}
|
85
|
+
function fromByteArray(uint8) {
|
86
|
+
let tmp;
|
87
|
+
const len = uint8.length;
|
88
|
+
const extraBytes = len % 3;
|
89
|
+
const parts = [];
|
90
|
+
const maxChunkLength = 16383;
|
91
|
+
for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
92
|
+
parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
|
93
|
+
}
|
94
|
+
if (extraBytes === 1) {
|
95
|
+
tmp = uint8[len - 1];
|
96
|
+
parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
|
97
|
+
} else if (extraBytes === 2) {
|
98
|
+
tmp = (uint8[len - 2] << 8) + uint8[len - 1];
|
99
|
+
parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
|
100
|
+
}
|
101
|
+
return parts.join("");
|
102
|
+
}
|
103
|
+
|
104
|
+
const K_MAX_LENGTH = 2147483647;
|
105
|
+
const MAX_ARGUMENTS_LENGTH = 4096;
|
106
|
+
class Buffer extends Uint8Array {
|
107
|
+
/**
|
108
|
+
* Constructs a new `Buffer` instance.
|
109
|
+
*
|
110
|
+
* @param value
|
111
|
+
* @param encodingOrOffset
|
112
|
+
* @param length
|
113
|
+
*/
|
114
|
+
constructor(value, encodingOrOffset, length) {
|
115
|
+
if (typeof value === "number") {
|
116
|
+
if (typeof encodingOrOffset === "string") {
|
117
|
+
throw new TypeError("The first argument must be of type string, received type number");
|
118
|
+
}
|
119
|
+
if (value < 0) {
|
120
|
+
throw new RangeError("The buffer size cannot be negative");
|
121
|
+
}
|
122
|
+
super(value < 0 ? 0 : Buffer._checked(value) | 0);
|
123
|
+
} else if (typeof value === "string") {
|
124
|
+
if (typeof encodingOrOffset !== "string") {
|
125
|
+
encodingOrOffset = "utf8";
|
126
|
+
}
|
127
|
+
if (!Buffer.isEncoding(encodingOrOffset)) {
|
128
|
+
throw new TypeError("Unknown encoding: " + encodingOrOffset);
|
129
|
+
}
|
130
|
+
const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
|
131
|
+
super(length2);
|
132
|
+
const written = this.write(value, 0, this.length, encodingOrOffset);
|
133
|
+
if (written !== length2) {
|
134
|
+
throw new TypeError(
|
135
|
+
"Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
|
136
|
+
);
|
137
|
+
}
|
138
|
+
} else if (ArrayBuffer.isView(value)) {
|
139
|
+
if (Buffer._isInstance(value, Uint8Array)) {
|
140
|
+
const copy = new Uint8Array(value);
|
141
|
+
const array = copy.buffer;
|
142
|
+
const byteOffset = copy.byteOffset;
|
143
|
+
const length2 = copy.byteLength;
|
144
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
145
|
+
throw new RangeError("offset is outside of buffer bounds");
|
146
|
+
}
|
147
|
+
if (array.byteLength < byteOffset + (length2 || 0)) {
|
148
|
+
throw new RangeError("length is outside of buffer bounds");
|
149
|
+
}
|
150
|
+
super(new Uint8Array(array, byteOffset, length2));
|
151
|
+
} else {
|
152
|
+
const array = value;
|
153
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
154
|
+
super(new Uint8Array(length2));
|
155
|
+
for (let i = 0; i < length2; i++) {
|
156
|
+
this[i] = array[i] & 255;
|
157
|
+
}
|
158
|
+
}
|
159
|
+
} else if (value == null) {
|
160
|
+
throw new TypeError(
|
161
|
+
"The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
|
162
|
+
);
|
163
|
+
} else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
|
164
|
+
const array = value;
|
165
|
+
const byteOffset = encodingOrOffset;
|
166
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
167
|
+
throw new RangeError("offset is outside of buffer bounds");
|
168
|
+
}
|
169
|
+
if (array.byteLength < byteOffset + (length || 0)) {
|
170
|
+
throw new RangeError("length is outside of buffer bounds");
|
171
|
+
}
|
172
|
+
super(new Uint8Array(array, byteOffset, length));
|
173
|
+
} else if (Array.isArray(value)) {
|
174
|
+
const array = value;
|
175
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
176
|
+
super(new Uint8Array(length2));
|
177
|
+
for (let i = 0; i < length2; i++) {
|
178
|
+
this[i] = array[i] & 255;
|
179
|
+
}
|
180
|
+
} else {
|
181
|
+
throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
|
182
|
+
}
|
183
|
+
}
|
184
|
+
/**
|
185
|
+
* Return JSON representation of the buffer.
|
186
|
+
*/
|
187
|
+
toJSON() {
|
188
|
+
return {
|
189
|
+
type: "Buffer",
|
190
|
+
data: Array.prototype.slice.call(this)
|
191
|
+
};
|
192
|
+
}
|
193
|
+
/**
|
194
|
+
* Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
|
195
|
+
* parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
|
196
|
+
* only part of `string` will be written. However, partially encoded characters will not be written.
|
197
|
+
*
|
198
|
+
* @param string String to write to `buf`.
|
199
|
+
* @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
|
200
|
+
* @param length Maximum number of bytes to write: Default: `buf.length - offset`.
|
201
|
+
* @param encoding The character encoding of `string`. Default: `utf8`.
|
202
|
+
*/
|
203
|
+
write(string, offset, length, encoding) {
|
204
|
+
if (typeof offset === "undefined") {
|
205
|
+
encoding = "utf8";
|
206
|
+
length = this.length;
|
207
|
+
offset = 0;
|
208
|
+
} else if (typeof length === "undefined" && typeof offset === "string") {
|
209
|
+
encoding = offset;
|
210
|
+
length = this.length;
|
211
|
+
offset = 0;
|
212
|
+
} else if (typeof offset === "number" && isFinite(offset)) {
|
213
|
+
offset = offset >>> 0;
|
214
|
+
if (typeof length === "number" && isFinite(length)) {
|
215
|
+
length = length >>> 0;
|
216
|
+
encoding ?? (encoding = "utf8");
|
217
|
+
} else if (typeof length === "string") {
|
218
|
+
encoding = length;
|
219
|
+
length = void 0;
|
220
|
+
}
|
221
|
+
} else {
|
222
|
+
throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
|
223
|
+
}
|
224
|
+
const remaining = this.length - offset;
|
225
|
+
if (typeof length === "undefined" || length > remaining) {
|
226
|
+
length = remaining;
|
227
|
+
}
|
228
|
+
if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
|
229
|
+
throw new RangeError("Attempt to write outside buffer bounds");
|
230
|
+
}
|
231
|
+
encoding || (encoding = "utf8");
|
232
|
+
switch (Buffer._getEncoding(encoding)) {
|
233
|
+
case "hex":
|
234
|
+
return Buffer._hexWrite(this, string, offset, length);
|
235
|
+
case "utf8":
|
236
|
+
return Buffer._utf8Write(this, string, offset, length);
|
237
|
+
case "ascii":
|
238
|
+
case "latin1":
|
239
|
+
case "binary":
|
240
|
+
return Buffer._asciiWrite(this, string, offset, length);
|
241
|
+
case "ucs2":
|
242
|
+
case "utf16le":
|
243
|
+
return Buffer._ucs2Write(this, string, offset, length);
|
244
|
+
case "base64":
|
245
|
+
return Buffer._base64Write(this, string, offset, length);
|
246
|
+
}
|
247
|
+
}
|
248
|
+
/**
|
249
|
+
* Decodes the buffer to a string according to the specified character encoding.
|
250
|
+
* Passing `start` and `end` will decode only a subset of the buffer.
|
251
|
+
*
|
252
|
+
* Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
|
253
|
+
* will be replaced with `U+FFFD`.
|
254
|
+
*
|
255
|
+
* @param encoding
|
256
|
+
* @param start
|
257
|
+
* @param end
|
258
|
+
*/
|
259
|
+
toString(encoding, start, end) {
|
260
|
+
const length = this.length;
|
261
|
+
if (length === 0) {
|
262
|
+
return "";
|
263
|
+
}
|
264
|
+
if (arguments.length === 0) {
|
265
|
+
return Buffer._utf8Slice(this, 0, length);
|
266
|
+
}
|
267
|
+
if (typeof start === "undefined" || start < 0) {
|
268
|
+
start = 0;
|
269
|
+
}
|
270
|
+
if (start > this.length) {
|
271
|
+
return "";
|
272
|
+
}
|
273
|
+
if (typeof end === "undefined" || end > this.length) {
|
274
|
+
end = this.length;
|
275
|
+
}
|
276
|
+
if (end <= 0) {
|
277
|
+
return "";
|
278
|
+
}
|
279
|
+
end >>>= 0;
|
280
|
+
start >>>= 0;
|
281
|
+
if (end <= start) {
|
282
|
+
return "";
|
283
|
+
}
|
284
|
+
if (!encoding) {
|
285
|
+
encoding = "utf8";
|
286
|
+
}
|
287
|
+
switch (Buffer._getEncoding(encoding)) {
|
288
|
+
case "hex":
|
289
|
+
return Buffer._hexSlice(this, start, end);
|
290
|
+
case "utf8":
|
291
|
+
return Buffer._utf8Slice(this, start, end);
|
292
|
+
case "ascii":
|
293
|
+
return Buffer._asciiSlice(this, start, end);
|
294
|
+
case "latin1":
|
295
|
+
case "binary":
|
296
|
+
return Buffer._latin1Slice(this, start, end);
|
297
|
+
case "ucs2":
|
298
|
+
case "utf16le":
|
299
|
+
return Buffer._utf16leSlice(this, start, end);
|
300
|
+
case "base64":
|
301
|
+
return Buffer._base64Slice(this, start, end);
|
302
|
+
}
|
303
|
+
}
|
304
|
+
/**
|
305
|
+
* Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
|
306
|
+
*
|
307
|
+
* @param otherBuffer
|
308
|
+
*/
|
309
|
+
equals(otherBuffer) {
|
310
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
311
|
+
throw new TypeError("Argument must be a Buffer");
|
312
|
+
}
|
313
|
+
if (this === otherBuffer) {
|
314
|
+
return true;
|
315
|
+
}
|
316
|
+
return Buffer.compare(this, otherBuffer) === 0;
|
317
|
+
}
|
318
|
+
/**
|
319
|
+
* Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
|
320
|
+
* or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
|
321
|
+
* buffer.
|
322
|
+
*
|
323
|
+
* - `0` is returned if `otherBuffer` is the same as this buffer.
|
324
|
+
* - `1` is returned if `otherBuffer` should come before this buffer when sorted.
|
325
|
+
* - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
|
326
|
+
*
|
327
|
+
* @param otherBuffer The buffer to compare to.
|
328
|
+
* @param targetStart The offset within `otherBuffer` at which to begin comparison.
|
329
|
+
* @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
|
330
|
+
* @param sourceStart The offset within this buffer at which to begin comparison.
|
331
|
+
* @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
|
332
|
+
*/
|
333
|
+
compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
|
334
|
+
if (Buffer._isInstance(otherBuffer, Uint8Array)) {
|
335
|
+
otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
|
336
|
+
}
|
337
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
338
|
+
throw new TypeError("Argument must be a Buffer or Uint8Array");
|
339
|
+
}
|
340
|
+
targetStart ?? (targetStart = 0);
|
341
|
+
targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
|
342
|
+
sourceStart ?? (sourceStart = 0);
|
343
|
+
sourceEnd ?? (sourceEnd = this.length);
|
344
|
+
if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
|
345
|
+
throw new RangeError("Out of range index");
|
346
|
+
}
|
347
|
+
if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
|
348
|
+
return 0;
|
349
|
+
}
|
350
|
+
if (sourceStart >= sourceEnd) {
|
351
|
+
return -1;
|
352
|
+
}
|
353
|
+
if (targetStart >= targetEnd) {
|
354
|
+
return 1;
|
355
|
+
}
|
356
|
+
targetStart >>>= 0;
|
357
|
+
targetEnd >>>= 0;
|
358
|
+
sourceStart >>>= 0;
|
359
|
+
sourceEnd >>>= 0;
|
360
|
+
if (this === otherBuffer) {
|
361
|
+
return 0;
|
362
|
+
}
|
363
|
+
let x = sourceEnd - sourceStart;
|
364
|
+
let y = targetEnd - targetStart;
|
365
|
+
const len = Math.min(x, y);
|
366
|
+
const thisCopy = this.slice(sourceStart, sourceEnd);
|
367
|
+
const targetCopy = otherBuffer.slice(targetStart, targetEnd);
|
368
|
+
for (let i = 0; i < len; ++i) {
|
369
|
+
if (thisCopy[i] !== targetCopy[i]) {
|
370
|
+
x = thisCopy[i];
|
371
|
+
y = targetCopy[i];
|
372
|
+
break;
|
373
|
+
}
|
374
|
+
}
|
375
|
+
if (x < y) return -1;
|
376
|
+
if (y < x) return 1;
|
377
|
+
return 0;
|
378
|
+
}
|
379
|
+
/**
|
380
|
+
* Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
|
381
|
+
* region overlaps with this buffer.
|
382
|
+
*
|
383
|
+
* @param targetBuffer The target buffer to copy into.
|
384
|
+
* @param targetStart The offset within `targetBuffer` at which to begin writing.
|
385
|
+
* @param sourceStart The offset within this buffer at which to begin copying.
|
386
|
+
* @param sourceEnd The offset within this buffer at which to end copying (exclusive).
|
387
|
+
*/
|
388
|
+
copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
|
389
|
+
if (!Buffer.isBuffer(targetBuffer)) throw new TypeError("argument should be a Buffer");
|
390
|
+
if (!sourceStart) sourceStart = 0;
|
391
|
+
if (!targetStart) targetStart = 0;
|
392
|
+
if (!sourceEnd && sourceEnd !== 0) sourceEnd = this.length;
|
393
|
+
if (targetStart >= targetBuffer.length) targetStart = targetBuffer.length;
|
394
|
+
if (!targetStart) targetStart = 0;
|
395
|
+
if (sourceEnd > 0 && sourceEnd < sourceStart) sourceEnd = sourceStart;
|
396
|
+
if (sourceEnd === sourceStart) return 0;
|
397
|
+
if (targetBuffer.length === 0 || this.length === 0) return 0;
|
398
|
+
if (targetStart < 0) {
|
399
|
+
throw new RangeError("targetStart out of bounds");
|
400
|
+
}
|
401
|
+
if (sourceStart < 0 || sourceStart >= this.length) throw new RangeError("Index out of range");
|
402
|
+
if (sourceEnd < 0) throw new RangeError("sourceEnd out of bounds");
|
403
|
+
if (sourceEnd > this.length) sourceEnd = this.length;
|
404
|
+
if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
|
405
|
+
sourceEnd = targetBuffer.length - targetStart + sourceStart;
|
406
|
+
}
|
407
|
+
const len = sourceEnd - sourceStart;
|
408
|
+
if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
|
409
|
+
this.copyWithin(targetStart, sourceStart, sourceEnd);
|
410
|
+
} else {
|
411
|
+
Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
|
412
|
+
}
|
413
|
+
return len;
|
414
|
+
}
|
415
|
+
/**
|
416
|
+
* Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
|
417
|
+
* and `end` indices. This is the same behavior as `buf.subarray()`.
|
418
|
+
*
|
419
|
+
* This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
|
420
|
+
* the slice, use `Uint8Array.prototype.slice()`.
|
421
|
+
*
|
422
|
+
* @param start
|
423
|
+
* @param end
|
424
|
+
*/
|
425
|
+
slice(start, end) {
|
426
|
+
if (!start) {
|
427
|
+
start = 0;
|
428
|
+
}
|
429
|
+
const len = this.length;
|
430
|
+
start = ~~start;
|
431
|
+
end = end === void 0 ? len : ~~end;
|
432
|
+
if (start < 0) {
|
433
|
+
start += len;
|
434
|
+
if (start < 0) {
|
435
|
+
start = 0;
|
436
|
+
}
|
437
|
+
} else if (start > len) {
|
438
|
+
start = len;
|
439
|
+
}
|
440
|
+
if (end < 0) {
|
441
|
+
end += len;
|
442
|
+
if (end < 0) {
|
443
|
+
end = 0;
|
444
|
+
}
|
445
|
+
} else if (end > len) {
|
446
|
+
end = len;
|
447
|
+
}
|
448
|
+
if (end < start) {
|
449
|
+
end = start;
|
450
|
+
}
|
451
|
+
const newBuf = this.subarray(start, end);
|
452
|
+
Object.setPrototypeOf(newBuf, Buffer.prototype);
|
453
|
+
return newBuf;
|
454
|
+
}
|
455
|
+
/**
|
456
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
457
|
+
* of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
|
458
|
+
*
|
459
|
+
* @param value Number to write.
|
460
|
+
* @param offset Number of bytes to skip before starting to write.
|
461
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
462
|
+
* @param noAssert
|
463
|
+
* @returns `offset` plus the number of bytes written.
|
464
|
+
*/
|
465
|
+
writeUIntLE(value, offset, byteLength, noAssert) {
|
466
|
+
value = +value;
|
467
|
+
offset = offset >>> 0;
|
468
|
+
byteLength = byteLength >>> 0;
|
469
|
+
if (!noAssert) {
|
470
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
471
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
472
|
+
}
|
473
|
+
let mul = 1;
|
474
|
+
let i = 0;
|
475
|
+
this[offset] = value & 255;
|
476
|
+
while (++i < byteLength && (mul *= 256)) {
|
477
|
+
this[offset + i] = value / mul & 255;
|
478
|
+
}
|
479
|
+
return offset + byteLength;
|
480
|
+
}
|
481
|
+
/**
|
482
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
|
483
|
+
* accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
|
484
|
+
*
|
485
|
+
* @param value Number to write.
|
486
|
+
* @param offset Number of bytes to skip before starting to write.
|
487
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
488
|
+
* @param noAssert
|
489
|
+
* @returns `offset` plus the number of bytes written.
|
490
|
+
*/
|
491
|
+
writeUIntBE(value, offset, byteLength, noAssert) {
|
492
|
+
value = +value;
|
493
|
+
offset = offset >>> 0;
|
494
|
+
byteLength = byteLength >>> 0;
|
495
|
+
if (!noAssert) {
|
496
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
497
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
498
|
+
}
|
499
|
+
let i = byteLength - 1;
|
500
|
+
let mul = 1;
|
501
|
+
this[offset + i] = value & 255;
|
502
|
+
while (--i >= 0 && (mul *= 256)) {
|
503
|
+
this[offset + i] = value / mul & 255;
|
504
|
+
}
|
505
|
+
return offset + byteLength;
|
506
|
+
}
|
507
|
+
/**
|
508
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
509
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
510
|
+
*
|
511
|
+
* @param value Number to write.
|
512
|
+
* @param offset Number of bytes to skip before starting to write.
|
513
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
514
|
+
* @param noAssert
|
515
|
+
* @returns `offset` plus the number of bytes written.
|
516
|
+
*/
|
517
|
+
writeIntLE(value, offset, byteLength, noAssert) {
|
518
|
+
value = +value;
|
519
|
+
offset = offset >>> 0;
|
520
|
+
if (!noAssert) {
|
521
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
522
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
523
|
+
}
|
524
|
+
let i = 0;
|
525
|
+
let mul = 1;
|
526
|
+
let sub = 0;
|
527
|
+
this[offset] = value & 255;
|
528
|
+
while (++i < byteLength && (mul *= 256)) {
|
529
|
+
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
530
|
+
sub = 1;
|
531
|
+
}
|
532
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
533
|
+
}
|
534
|
+
return offset + byteLength;
|
535
|
+
}
|
536
|
+
/**
|
537
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
|
538
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
539
|
+
*
|
540
|
+
* @param value Number to write.
|
541
|
+
* @param offset Number of bytes to skip before starting to write.
|
542
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
543
|
+
* @param noAssert
|
544
|
+
* @returns `offset` plus the number of bytes written.
|
545
|
+
*/
|
546
|
+
writeIntBE(value, offset, byteLength, noAssert) {
|
547
|
+
value = +value;
|
548
|
+
offset = offset >>> 0;
|
549
|
+
if (!noAssert) {
|
550
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
551
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
552
|
+
}
|
553
|
+
let i = byteLength - 1;
|
554
|
+
let mul = 1;
|
555
|
+
let sub = 0;
|
556
|
+
this[offset + i] = value & 255;
|
557
|
+
while (--i >= 0 && (mul *= 256)) {
|
558
|
+
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
559
|
+
sub = 1;
|
560
|
+
}
|
561
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
562
|
+
}
|
563
|
+
return offset + byteLength;
|
564
|
+
}
|
565
|
+
/**
|
566
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
567
|
+
* unsigned, little-endian integer supporting up to 48 bits of accuracy.
|
568
|
+
*
|
569
|
+
* @param offset Number of bytes to skip before starting to read.
|
570
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
571
|
+
* @param noAssert
|
572
|
+
*/
|
573
|
+
readUIntLE(offset, byteLength, noAssert) {
|
574
|
+
offset = offset >>> 0;
|
575
|
+
byteLength = byteLength >>> 0;
|
576
|
+
if (!noAssert) {
|
577
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
578
|
+
}
|
579
|
+
let val = this[offset];
|
580
|
+
let mul = 1;
|
581
|
+
let i = 0;
|
582
|
+
while (++i < byteLength && (mul *= 256)) {
|
583
|
+
val += this[offset + i] * mul;
|
584
|
+
}
|
585
|
+
return val;
|
586
|
+
}
|
587
|
+
/**
|
588
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
589
|
+
* unsigned, big-endian integer supporting up to 48 bits of accuracy.
|
590
|
+
*
|
591
|
+
* @param offset Number of bytes to skip before starting to read.
|
592
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
593
|
+
* @param noAssert
|
594
|
+
*/
|
595
|
+
readUIntBE(offset, byteLength, noAssert) {
|
596
|
+
offset = offset >>> 0;
|
597
|
+
byteLength = byteLength >>> 0;
|
598
|
+
if (!noAssert) {
|
599
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
600
|
+
}
|
601
|
+
let val = this[offset + --byteLength];
|
602
|
+
let mul = 1;
|
603
|
+
while (byteLength > 0 && (mul *= 256)) {
|
604
|
+
val += this[offset + --byteLength] * mul;
|
605
|
+
}
|
606
|
+
return val;
|
607
|
+
}
|
608
|
+
/**
|
609
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
610
|
+
* little-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
611
|
+
*
|
612
|
+
* @param offset Number of bytes to skip before starting to read.
|
613
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
614
|
+
* @param noAssert
|
615
|
+
*/
|
616
|
+
readIntLE(offset, byteLength, noAssert) {
|
617
|
+
offset = offset >>> 0;
|
618
|
+
byteLength = byteLength >>> 0;
|
619
|
+
if (!noAssert) {
|
620
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
621
|
+
}
|
622
|
+
let val = this[offset];
|
623
|
+
let mul = 1;
|
624
|
+
let i = 0;
|
625
|
+
while (++i < byteLength && (mul *= 256)) {
|
626
|
+
val += this[offset + i] * mul;
|
627
|
+
}
|
628
|
+
mul *= 128;
|
629
|
+
if (val >= mul) {
|
630
|
+
val -= Math.pow(2, 8 * byteLength);
|
631
|
+
}
|
632
|
+
return val;
|
633
|
+
}
|
634
|
+
/**
|
635
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
636
|
+
* big-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
637
|
+
*
|
638
|
+
* @param offset Number of bytes to skip before starting to read.
|
639
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
640
|
+
* @param noAssert
|
641
|
+
*/
|
642
|
+
readIntBE(offset, byteLength, noAssert) {
|
643
|
+
offset = offset >>> 0;
|
644
|
+
byteLength = byteLength >>> 0;
|
645
|
+
if (!noAssert) {
|
646
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
647
|
+
}
|
648
|
+
let i = byteLength;
|
649
|
+
let mul = 1;
|
650
|
+
let val = this[offset + --i];
|
651
|
+
while (i > 0 && (mul *= 256)) {
|
652
|
+
val += this[offset + --i] * mul;
|
653
|
+
}
|
654
|
+
mul *= 128;
|
655
|
+
if (val >= mul) {
|
656
|
+
val -= Math.pow(2, 8 * byteLength);
|
657
|
+
}
|
658
|
+
return val;
|
659
|
+
}
|
660
|
+
/**
|
661
|
+
* Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
|
662
|
+
*
|
663
|
+
* @param offset Number of bytes to skip before starting to read.
|
664
|
+
* @param noAssert
|
665
|
+
*/
|
666
|
+
readUInt8(offset, noAssert) {
|
667
|
+
offset = offset >>> 0;
|
668
|
+
if (!noAssert) {
|
669
|
+
Buffer._checkOffset(offset, 1, this.length);
|
670
|
+
}
|
671
|
+
return this[offset];
|
672
|
+
}
|
673
|
+
/**
|
674
|
+
* Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
|
675
|
+
*
|
676
|
+
* @param offset Number of bytes to skip before starting to read.
|
677
|
+
* @param noAssert
|
678
|
+
*/
|
679
|
+
readUInt16LE(offset, noAssert) {
|
680
|
+
offset = offset >>> 0;
|
681
|
+
if (!noAssert) {
|
682
|
+
Buffer._checkOffset(offset, 2, this.length);
|
683
|
+
}
|
684
|
+
return this[offset] | this[offset + 1] << 8;
|
685
|
+
}
|
686
|
+
/**
|
687
|
+
* Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
|
688
|
+
*
|
689
|
+
* @param offset Number of bytes to skip before starting to read.
|
690
|
+
* @param noAssert
|
691
|
+
*/
|
692
|
+
readUInt16BE(offset, noAssert) {
|
693
|
+
offset = offset >>> 0;
|
694
|
+
if (!noAssert) {
|
695
|
+
Buffer._checkOffset(offset, 2, this.length);
|
696
|
+
}
|
697
|
+
return this[offset] << 8 | this[offset + 1];
|
698
|
+
}
|
699
|
+
/**
|
700
|
+
* Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
|
701
|
+
*
|
702
|
+
* @param offset Number of bytes to skip before starting to read.
|
703
|
+
* @param noAssert
|
704
|
+
*/
|
705
|
+
readUInt32LE(offset, noAssert) {
|
706
|
+
offset = offset >>> 0;
|
707
|
+
if (!noAssert) {
|
708
|
+
Buffer._checkOffset(offset, 4, this.length);
|
709
|
+
}
|
710
|
+
return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
|
711
|
+
}
|
712
|
+
/**
|
713
|
+
* Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
|
714
|
+
*
|
715
|
+
* @param offset Number of bytes to skip before starting to read.
|
716
|
+
* @param noAssert
|
717
|
+
*/
|
718
|
+
readUInt32BE(offset, noAssert) {
|
719
|
+
offset = offset >>> 0;
|
720
|
+
if (!noAssert) {
|
721
|
+
Buffer._checkOffset(offset, 4, this.length);
|
722
|
+
}
|
723
|
+
return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
|
724
|
+
}
|
725
|
+
/**
|
726
|
+
* Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
|
727
|
+
* as two's complement signed values.
|
728
|
+
*
|
729
|
+
* @param offset Number of bytes to skip before starting to read.
|
730
|
+
* @param noAssert
|
731
|
+
*/
|
732
|
+
readInt8(offset, noAssert) {
|
733
|
+
offset = offset >>> 0;
|
734
|
+
if (!noAssert) {
|
735
|
+
Buffer._checkOffset(offset, 1, this.length);
|
736
|
+
}
|
737
|
+
if (!(this[offset] & 128)) {
|
738
|
+
return this[offset];
|
739
|
+
}
|
740
|
+
return (255 - this[offset] + 1) * -1;
|
741
|
+
}
|
742
|
+
/**
|
743
|
+
* Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
744
|
+
* are interpreted as two's complement signed values.
|
745
|
+
*
|
746
|
+
* @param offset Number of bytes to skip before starting to read.
|
747
|
+
* @param noAssert
|
748
|
+
*/
|
749
|
+
readInt16LE(offset, noAssert) {
|
750
|
+
offset = offset >>> 0;
|
751
|
+
if (!noAssert) {
|
752
|
+
Buffer._checkOffset(offset, 2, this.length);
|
753
|
+
}
|
754
|
+
const val = this[offset] | this[offset + 1] << 8;
|
755
|
+
return val & 32768 ? val | 4294901760 : val;
|
756
|
+
}
|
757
|
+
/**
|
758
|
+
* Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
759
|
+
* are interpreted as two's complement signed values.
|
760
|
+
*
|
761
|
+
* @param offset Number of bytes to skip before starting to read.
|
762
|
+
* @param noAssert
|
763
|
+
*/
|
764
|
+
readInt16BE(offset, noAssert) {
|
765
|
+
offset = offset >>> 0;
|
766
|
+
if (!noAssert) {
|
767
|
+
Buffer._checkOffset(offset, 2, this.length);
|
768
|
+
}
|
769
|
+
const val = this[offset + 1] | this[offset] << 8;
|
770
|
+
return val & 32768 ? val | 4294901760 : val;
|
771
|
+
}
|
772
|
+
/**
|
773
|
+
* Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
774
|
+
* are interpreted as two's complement signed values.
|
775
|
+
*
|
776
|
+
* @param offset Number of bytes to skip before starting to read.
|
777
|
+
* @param noAssert
|
778
|
+
*/
|
779
|
+
readInt32LE(offset, noAssert) {
|
780
|
+
offset = offset >>> 0;
|
781
|
+
if (!noAssert) {
|
782
|
+
Buffer._checkOffset(offset, 4, this.length);
|
783
|
+
}
|
784
|
+
return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
|
785
|
+
}
|
786
|
+
/**
|
787
|
+
* Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
788
|
+
* are interpreted as two's complement signed values.
|
789
|
+
*
|
790
|
+
* @param offset Number of bytes to skip before starting to read.
|
791
|
+
* @param noAssert
|
792
|
+
*/
|
793
|
+
readInt32BE(offset, noAssert) {
|
794
|
+
offset = offset >>> 0;
|
795
|
+
if (!noAssert) {
|
796
|
+
Buffer._checkOffset(offset, 4, this.length);
|
797
|
+
}
|
798
|
+
return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
|
799
|
+
}
|
800
|
+
/**
|
801
|
+
* Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
|
802
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 2.
|
803
|
+
*/
|
804
|
+
swap16() {
|
805
|
+
const len = this.length;
|
806
|
+
if (len % 2 !== 0) {
|
807
|
+
throw new RangeError("Buffer size must be a multiple of 16-bits");
|
808
|
+
}
|
809
|
+
for (let i = 0; i < len; i += 2) {
|
810
|
+
this._swap(this, i, i + 1);
|
811
|
+
}
|
812
|
+
return this;
|
813
|
+
}
|
814
|
+
/**
|
815
|
+
* Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
|
816
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 4.
|
817
|
+
*/
|
818
|
+
swap32() {
|
819
|
+
const len = this.length;
|
820
|
+
if (len % 4 !== 0) {
|
821
|
+
throw new RangeError("Buffer size must be a multiple of 32-bits");
|
822
|
+
}
|
823
|
+
for (let i = 0; i < len; i += 4) {
|
824
|
+
this._swap(this, i, i + 3);
|
825
|
+
this._swap(this, i + 1, i + 2);
|
826
|
+
}
|
827
|
+
return this;
|
828
|
+
}
|
829
|
+
/**
|
830
|
+
* Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
|
831
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 8.
|
832
|
+
*/
|
833
|
+
swap64() {
|
834
|
+
const len = this.length;
|
835
|
+
if (len % 8 !== 0) {
|
836
|
+
throw new RangeError("Buffer size must be a multiple of 64-bits");
|
837
|
+
}
|
838
|
+
for (let i = 0; i < len; i += 8) {
|
839
|
+
this._swap(this, i, i + 7);
|
840
|
+
this._swap(this, i + 1, i + 6);
|
841
|
+
this._swap(this, i + 2, i + 5);
|
842
|
+
this._swap(this, i + 3, i + 4);
|
843
|
+
}
|
844
|
+
return this;
|
845
|
+
}
|
846
|
+
/**
|
847
|
+
* Swaps two octets.
|
848
|
+
*
|
849
|
+
* @param b
|
850
|
+
* @param n
|
851
|
+
* @param m
|
852
|
+
*/
|
853
|
+
_swap(b, n, m) {
|
854
|
+
const i = b[n];
|
855
|
+
b[n] = b[m];
|
856
|
+
b[m] = i;
|
857
|
+
}
|
858
|
+
/**
|
859
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
|
860
|
+
* Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
|
861
|
+
*
|
862
|
+
* @param value Number to write.
|
863
|
+
* @param offset Number of bytes to skip before starting to write.
|
864
|
+
* @param noAssert
|
865
|
+
* @returns `offset` plus the number of bytes written.
|
866
|
+
*/
|
867
|
+
writeUInt8(value, offset, noAssert) {
|
868
|
+
value = +value;
|
869
|
+
offset = offset >>> 0;
|
870
|
+
if (!noAssert) {
|
871
|
+
Buffer._checkInt(this, value, offset, 1, 255, 0);
|
872
|
+
}
|
873
|
+
this[offset] = value & 255;
|
874
|
+
return offset + 1;
|
875
|
+
}
|
876
|
+
/**
|
877
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
|
878
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
879
|
+
*
|
880
|
+
* @param value Number to write.
|
881
|
+
* @param offset Number of bytes to skip before starting to write.
|
882
|
+
* @param noAssert
|
883
|
+
* @returns `offset` plus the number of bytes written.
|
884
|
+
*/
|
885
|
+
writeUInt16LE(value, offset, noAssert) {
|
886
|
+
value = +value;
|
887
|
+
offset = offset >>> 0;
|
888
|
+
if (!noAssert) {
|
889
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
890
|
+
}
|
891
|
+
this[offset] = value & 255;
|
892
|
+
this[offset + 1] = value >>> 8;
|
893
|
+
return offset + 2;
|
894
|
+
}
|
895
|
+
/**
|
896
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
|
897
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
898
|
+
*
|
899
|
+
* @param value Number to write.
|
900
|
+
* @param offset Number of bytes to skip before starting to write.
|
901
|
+
* @param noAssert
|
902
|
+
* @returns `offset` plus the number of bytes written.
|
903
|
+
*/
|
904
|
+
writeUInt16BE(value, offset, noAssert) {
|
905
|
+
value = +value;
|
906
|
+
offset = offset >>> 0;
|
907
|
+
if (!noAssert) {
|
908
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
909
|
+
}
|
910
|
+
this[offset] = value >>> 8;
|
911
|
+
this[offset + 1] = value & 255;
|
912
|
+
return offset + 2;
|
913
|
+
}
|
914
|
+
/**
|
915
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
|
916
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
917
|
+
*
|
918
|
+
* @param value Number to write.
|
919
|
+
* @param offset Number of bytes to skip before starting to write.
|
920
|
+
* @param noAssert
|
921
|
+
* @returns `offset` plus the number of bytes written.
|
922
|
+
*/
|
923
|
+
writeUInt32LE(value, offset, noAssert) {
|
924
|
+
value = +value;
|
925
|
+
offset = offset >>> 0;
|
926
|
+
if (!noAssert) {
|
927
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
928
|
+
}
|
929
|
+
this[offset + 3] = value >>> 24;
|
930
|
+
this[offset + 2] = value >>> 16;
|
931
|
+
this[offset + 1] = value >>> 8;
|
932
|
+
this[offset] = value & 255;
|
933
|
+
return offset + 4;
|
934
|
+
}
|
935
|
+
/**
|
936
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
|
937
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
938
|
+
*
|
939
|
+
* @param value Number to write.
|
940
|
+
* @param offset Number of bytes to skip before starting to write.
|
941
|
+
* @param noAssert
|
942
|
+
* @returns `offset` plus the number of bytes written.
|
943
|
+
*/
|
944
|
+
writeUInt32BE(value, offset, noAssert) {
|
945
|
+
value = +value;
|
946
|
+
offset = offset >>> 0;
|
947
|
+
if (!noAssert) {
|
948
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
949
|
+
}
|
950
|
+
this[offset] = value >>> 24;
|
951
|
+
this[offset + 1] = value >>> 16;
|
952
|
+
this[offset + 2] = value >>> 8;
|
953
|
+
this[offset + 3] = value & 255;
|
954
|
+
return offset + 4;
|
955
|
+
}
|
956
|
+
/**
|
957
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
|
958
|
+
* Behavior is undefined when `value` is anything other than a signed 8-bit integer.
|
959
|
+
*
|
960
|
+
* @param value Number to write.
|
961
|
+
* @param offset Number of bytes to skip before starting to write.
|
962
|
+
* @param noAssert
|
963
|
+
* @returns `offset` plus the number of bytes written.
|
964
|
+
*/
|
965
|
+
writeInt8(value, offset, noAssert) {
|
966
|
+
value = +value;
|
967
|
+
offset = offset >>> 0;
|
968
|
+
if (!noAssert) {
|
969
|
+
Buffer._checkInt(this, value, offset, 1, 127, -128);
|
970
|
+
}
|
971
|
+
if (value < 0) {
|
972
|
+
value = 255 + value + 1;
|
973
|
+
}
|
974
|
+
this[offset] = value & 255;
|
975
|
+
return offset + 1;
|
976
|
+
}
|
977
|
+
/**
|
978
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
|
979
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
980
|
+
*
|
981
|
+
* @param value Number to write.
|
982
|
+
* @param offset Number of bytes to skip before starting to write.
|
983
|
+
* @param noAssert
|
984
|
+
* @returns `offset` plus the number of bytes written.
|
985
|
+
*/
|
986
|
+
writeInt16LE(value, offset, noAssert) {
|
987
|
+
value = +value;
|
988
|
+
offset = offset >>> 0;
|
989
|
+
if (!noAssert) {
|
990
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
991
|
+
}
|
992
|
+
this[offset] = value & 255;
|
993
|
+
this[offset + 1] = value >>> 8;
|
994
|
+
return offset + 2;
|
995
|
+
}
|
996
|
+
/**
|
997
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
|
998
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
999
|
+
*
|
1000
|
+
* @param value Number to write.
|
1001
|
+
* @param offset Number of bytes to skip before starting to write.
|
1002
|
+
* @param noAssert
|
1003
|
+
* @returns `offset` plus the number of bytes written.
|
1004
|
+
*/
|
1005
|
+
writeInt16BE(value, offset, noAssert) {
|
1006
|
+
value = +value;
|
1007
|
+
offset = offset >>> 0;
|
1008
|
+
if (!noAssert) {
|
1009
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1010
|
+
}
|
1011
|
+
this[offset] = value >>> 8;
|
1012
|
+
this[offset + 1] = value & 255;
|
1013
|
+
return offset + 2;
|
1014
|
+
}
|
1015
|
+
/**
|
1016
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
|
1017
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1018
|
+
*
|
1019
|
+
* @param value Number to write.
|
1020
|
+
* @param offset Number of bytes to skip before starting to write.
|
1021
|
+
* @param noAssert
|
1022
|
+
* @returns `offset` plus the number of bytes written.
|
1023
|
+
*/
|
1024
|
+
writeInt32LE(value, offset, noAssert) {
|
1025
|
+
value = +value;
|
1026
|
+
offset = offset >>> 0;
|
1027
|
+
if (!noAssert) {
|
1028
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1029
|
+
}
|
1030
|
+
this[offset] = value & 255;
|
1031
|
+
this[offset + 1] = value >>> 8;
|
1032
|
+
this[offset + 2] = value >>> 16;
|
1033
|
+
this[offset + 3] = value >>> 24;
|
1034
|
+
return offset + 4;
|
1035
|
+
}
|
1036
|
+
/**
|
1037
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
|
1038
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1039
|
+
*
|
1040
|
+
* @param value Number to write.
|
1041
|
+
* @param offset Number of bytes to skip before starting to write.
|
1042
|
+
* @param noAssert
|
1043
|
+
* @returns `offset` plus the number of bytes written.
|
1044
|
+
*/
|
1045
|
+
writeInt32BE(value, offset, noAssert) {
|
1046
|
+
value = +value;
|
1047
|
+
offset = offset >>> 0;
|
1048
|
+
if (!noAssert) {
|
1049
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1050
|
+
}
|
1051
|
+
if (value < 0) {
|
1052
|
+
value = 4294967295 + value + 1;
|
1053
|
+
}
|
1054
|
+
this[offset] = value >>> 24;
|
1055
|
+
this[offset + 1] = value >>> 16;
|
1056
|
+
this[offset + 2] = value >>> 8;
|
1057
|
+
this[offset + 3] = value & 255;
|
1058
|
+
return offset + 4;
|
1059
|
+
}
|
1060
|
+
/**
|
1061
|
+
* Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
|
1062
|
+
* filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
|
1063
|
+
* integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
|
1064
|
+
*
|
1065
|
+
* If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
|
1066
|
+
* character that fit into `buf` are written.
|
1067
|
+
*
|
1068
|
+
* If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
|
1069
|
+
*
|
1070
|
+
* @param value
|
1071
|
+
* @param encoding
|
1072
|
+
*/
|
1073
|
+
fill(value, offset, end, encoding) {
|
1074
|
+
if (typeof value === "string") {
|
1075
|
+
if (typeof offset === "string") {
|
1076
|
+
encoding = offset;
|
1077
|
+
offset = 0;
|
1078
|
+
end = this.length;
|
1079
|
+
} else if (typeof end === "string") {
|
1080
|
+
encoding = end;
|
1081
|
+
end = this.length;
|
1082
|
+
}
|
1083
|
+
if (encoding !== void 0 && typeof encoding !== "string") {
|
1084
|
+
throw new TypeError("encoding must be a string");
|
1085
|
+
}
|
1086
|
+
if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
|
1087
|
+
throw new TypeError("Unknown encoding: " + encoding);
|
1088
|
+
}
|
1089
|
+
if (value.length === 1) {
|
1090
|
+
const code = value.charCodeAt(0);
|
1091
|
+
if (encoding === "utf8" && code < 128) {
|
1092
|
+
value = code;
|
1093
|
+
}
|
1094
|
+
}
|
1095
|
+
} else if (typeof value === "number") {
|
1096
|
+
value = value & 255;
|
1097
|
+
} else if (typeof value === "boolean") {
|
1098
|
+
value = Number(value);
|
1099
|
+
}
|
1100
|
+
offset ?? (offset = 0);
|
1101
|
+
end ?? (end = this.length);
|
1102
|
+
if (offset < 0 || this.length < offset || this.length < end) {
|
1103
|
+
throw new RangeError("Out of range index");
|
1104
|
+
}
|
1105
|
+
if (end <= offset) {
|
1106
|
+
return this;
|
1107
|
+
}
|
1108
|
+
offset = offset >>> 0;
|
1109
|
+
end = end === void 0 ? this.length : end >>> 0;
|
1110
|
+
value || (value = 0);
|
1111
|
+
let i;
|
1112
|
+
if (typeof value === "number") {
|
1113
|
+
for (i = offset; i < end; ++i) {
|
1114
|
+
this[i] = value;
|
1115
|
+
}
|
1116
|
+
} else {
|
1117
|
+
const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
|
1118
|
+
const len = bytes.length;
|
1119
|
+
if (len === 0) {
|
1120
|
+
throw new TypeError('The value "' + value + '" is invalid for argument "value"');
|
1121
|
+
}
|
1122
|
+
for (i = 0; i < end - offset; ++i) {
|
1123
|
+
this[i + offset] = bytes[i % len];
|
1124
|
+
}
|
1125
|
+
}
|
1126
|
+
return this;
|
1127
|
+
}
|
1128
|
+
/**
|
1129
|
+
* Returns the index of the specified value.
|
1130
|
+
*
|
1131
|
+
* If `value` is:
|
1132
|
+
* - a string, `value` is interpreted according to the character encoding in `encoding`.
|
1133
|
+
* - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
|
1134
|
+
* - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
|
1135
|
+
*
|
1136
|
+
* Any other types will throw a `TypeError`.
|
1137
|
+
*
|
1138
|
+
* @param value What to search for.
|
1139
|
+
* @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
|
1140
|
+
* @param encoding If `value` is a string, this is the encoding used to search.
|
1141
|
+
* @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
|
1142
|
+
*/
|
1143
|
+
indexOf(value, byteOffset, encoding) {
|
1144
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
|
1145
|
+
}
|
1146
|
+
/**
|
1147
|
+
* Gets the last index of the specified value.
|
1148
|
+
*
|
1149
|
+
* @see indexOf()
|
1150
|
+
* @param value
|
1151
|
+
* @param byteOffset
|
1152
|
+
* @param encoding
|
1153
|
+
*/
|
1154
|
+
lastIndexOf(value, byteOffset, encoding) {
|
1155
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
|
1156
|
+
}
|
1157
|
+
_bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
|
1158
|
+
if (buffer.length === 0) {
|
1159
|
+
return -1;
|
1160
|
+
}
|
1161
|
+
if (typeof byteOffset === "string") {
|
1162
|
+
encoding = byteOffset;
|
1163
|
+
byteOffset = 0;
|
1164
|
+
} else if (typeof byteOffset === "undefined") {
|
1165
|
+
byteOffset = 0;
|
1166
|
+
} else if (byteOffset > 2147483647) {
|
1167
|
+
byteOffset = 2147483647;
|
1168
|
+
} else if (byteOffset < -2147483648) {
|
1169
|
+
byteOffset = -2147483648;
|
1170
|
+
}
|
1171
|
+
byteOffset = +byteOffset;
|
1172
|
+
if (byteOffset !== byteOffset) {
|
1173
|
+
byteOffset = dir ? 0 : buffer.length - 1;
|
1174
|
+
}
|
1175
|
+
if (byteOffset < 0) {
|
1176
|
+
byteOffset = buffer.length + byteOffset;
|
1177
|
+
}
|
1178
|
+
if (byteOffset >= buffer.length) {
|
1179
|
+
if (dir) {
|
1180
|
+
return -1;
|
1181
|
+
} else {
|
1182
|
+
byteOffset = buffer.length - 1;
|
1183
|
+
}
|
1184
|
+
} else if (byteOffset < 0) {
|
1185
|
+
if (dir) {
|
1186
|
+
byteOffset = 0;
|
1187
|
+
} else {
|
1188
|
+
return -1;
|
1189
|
+
}
|
1190
|
+
}
|
1191
|
+
if (typeof val === "string") {
|
1192
|
+
val = Buffer.from(val, encoding);
|
1193
|
+
}
|
1194
|
+
if (Buffer.isBuffer(val)) {
|
1195
|
+
if (val.length === 0) {
|
1196
|
+
return -1;
|
1197
|
+
}
|
1198
|
+
return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
|
1199
|
+
} else if (typeof val === "number") {
|
1200
|
+
val = val & 255;
|
1201
|
+
if (typeof Uint8Array.prototype.indexOf === "function") {
|
1202
|
+
if (dir) {
|
1203
|
+
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
|
1204
|
+
} else {
|
1205
|
+
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
|
1206
|
+
}
|
1207
|
+
}
|
1208
|
+
return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
|
1209
|
+
}
|
1210
|
+
throw new TypeError("val must be string, number or Buffer");
|
1211
|
+
}
|
1212
|
+
/**
|
1213
|
+
* Equivalent to `buf.indexOf() !== -1`.
|
1214
|
+
*
|
1215
|
+
* @param value
|
1216
|
+
* @param byteOffset
|
1217
|
+
* @param encoding
|
1218
|
+
*/
|
1219
|
+
includes(value, byteOffset, encoding) {
|
1220
|
+
return this.indexOf(value, byteOffset, encoding) !== -1;
|
1221
|
+
}
|
1222
|
+
/**
|
1223
|
+
* Creates a new buffer from the given parameters.
|
1224
|
+
*
|
1225
|
+
* @param data
|
1226
|
+
* @param encoding
|
1227
|
+
*/
|
1228
|
+
static from(a, b, c) {
|
1229
|
+
return new Buffer(a, b, c);
|
1230
|
+
}
|
1231
|
+
/**
|
1232
|
+
* Returns true if `obj` is a Buffer.
|
1233
|
+
*
|
1234
|
+
* @param obj
|
1235
|
+
*/
|
1236
|
+
static isBuffer(obj) {
|
1237
|
+
return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
|
1238
|
+
}
|
1239
|
+
/**
|
1240
|
+
* Returns true if `encoding` is a supported encoding.
|
1241
|
+
*
|
1242
|
+
* @param encoding
|
1243
|
+
*/
|
1244
|
+
static isEncoding(encoding) {
|
1245
|
+
switch (encoding.toLowerCase()) {
|
1246
|
+
case "hex":
|
1247
|
+
case "utf8":
|
1248
|
+
case "ascii":
|
1249
|
+
case "binary":
|
1250
|
+
case "latin1":
|
1251
|
+
case "ucs2":
|
1252
|
+
case "utf16le":
|
1253
|
+
case "base64":
|
1254
|
+
return true;
|
1255
|
+
default:
|
1256
|
+
return false;
|
1257
|
+
}
|
1258
|
+
}
|
1259
|
+
/**
|
1260
|
+
* Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
|
1261
|
+
* returns the number of characters in the string.
|
1262
|
+
*
|
1263
|
+
* @param string The string to test.
|
1264
|
+
* @param encoding The encoding to use for calculation. Defaults is `utf8`.
|
1265
|
+
*/
|
1266
|
+
static byteLength(string, encoding) {
|
1267
|
+
if (Buffer.isBuffer(string)) {
|
1268
|
+
return string.length;
|
1269
|
+
}
|
1270
|
+
if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
|
1271
|
+
return string.byteLength;
|
1272
|
+
}
|
1273
|
+
if (typeof string !== "string") {
|
1274
|
+
throw new TypeError(
|
1275
|
+
'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
|
1276
|
+
);
|
1277
|
+
}
|
1278
|
+
const len = string.length;
|
1279
|
+
const mustMatch = arguments.length > 2 && arguments[2] === true;
|
1280
|
+
if (!mustMatch && len === 0) {
|
1281
|
+
return 0;
|
1282
|
+
}
|
1283
|
+
switch (encoding?.toLowerCase()) {
|
1284
|
+
case "ascii":
|
1285
|
+
case "latin1":
|
1286
|
+
case "binary":
|
1287
|
+
return len;
|
1288
|
+
case "utf8":
|
1289
|
+
return Buffer._utf8ToBytes(string).length;
|
1290
|
+
case "hex":
|
1291
|
+
return len >>> 1;
|
1292
|
+
case "ucs2":
|
1293
|
+
case "utf16le":
|
1294
|
+
return len * 2;
|
1295
|
+
case "base64":
|
1296
|
+
return Buffer._base64ToBytes(string).length;
|
1297
|
+
default:
|
1298
|
+
return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
|
1299
|
+
}
|
1300
|
+
}
|
1301
|
+
/**
|
1302
|
+
* Returns a Buffer which is the result of concatenating all the buffers in the list together.
|
1303
|
+
*
|
1304
|
+
* - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
|
1305
|
+
* - If the list has exactly one item, then the first item is returned.
|
1306
|
+
* - If the list has more than one item, then a new buffer is created.
|
1307
|
+
*
|
1308
|
+
* It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
|
1309
|
+
* a small computational expense.
|
1310
|
+
*
|
1311
|
+
* @param list An array of Buffer objects to concatenate.
|
1312
|
+
* @param totalLength Total length of the buffers when concatenated.
|
1313
|
+
*/
|
1314
|
+
static concat(list, totalLength) {
|
1315
|
+
if (!Array.isArray(list)) {
|
1316
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1317
|
+
}
|
1318
|
+
if (list.length === 0) {
|
1319
|
+
return Buffer.alloc(0);
|
1320
|
+
}
|
1321
|
+
let i;
|
1322
|
+
if (totalLength === void 0) {
|
1323
|
+
totalLength = 0;
|
1324
|
+
for (i = 0; i < list.length; ++i) {
|
1325
|
+
totalLength += list[i].length;
|
1326
|
+
}
|
1327
|
+
}
|
1328
|
+
const buffer = Buffer.allocUnsafe(totalLength);
|
1329
|
+
let pos = 0;
|
1330
|
+
for (i = 0; i < list.length; ++i) {
|
1331
|
+
let buf = list[i];
|
1332
|
+
if (Buffer._isInstance(buf, Uint8Array)) {
|
1333
|
+
if (pos + buf.length > buffer.length) {
|
1334
|
+
if (!Buffer.isBuffer(buf)) {
|
1335
|
+
buf = Buffer.from(buf);
|
1336
|
+
}
|
1337
|
+
buf.copy(buffer, pos);
|
1338
|
+
} else {
|
1339
|
+
Uint8Array.prototype.set.call(buffer, buf, pos);
|
1340
|
+
}
|
1341
|
+
} else if (!Buffer.isBuffer(buf)) {
|
1342
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1343
|
+
} else {
|
1344
|
+
buf.copy(buffer, pos);
|
1345
|
+
}
|
1346
|
+
pos += buf.length;
|
1347
|
+
}
|
1348
|
+
return buffer;
|
1349
|
+
}
|
1350
|
+
/**
|
1351
|
+
* The same as `buf1.compare(buf2)`.
|
1352
|
+
*/
|
1353
|
+
static compare(buf1, buf2) {
|
1354
|
+
if (Buffer._isInstance(buf1, Uint8Array)) {
|
1355
|
+
buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
|
1356
|
+
}
|
1357
|
+
if (Buffer._isInstance(buf2, Uint8Array)) {
|
1358
|
+
buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
|
1359
|
+
}
|
1360
|
+
if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
|
1361
|
+
throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
|
1362
|
+
}
|
1363
|
+
if (buf1 === buf2) {
|
1364
|
+
return 0;
|
1365
|
+
}
|
1366
|
+
let x = buf1.length;
|
1367
|
+
let y = buf2.length;
|
1368
|
+
for (let i = 0, len = Math.min(x, y); i < len; ++i) {
|
1369
|
+
if (buf1[i] !== buf2[i]) {
|
1370
|
+
x = buf1[i];
|
1371
|
+
y = buf2[i];
|
1372
|
+
break;
|
1373
|
+
}
|
1374
|
+
}
|
1375
|
+
if (x < y) {
|
1376
|
+
return -1;
|
1377
|
+
}
|
1378
|
+
if (y < x) {
|
1379
|
+
return 1;
|
1380
|
+
}
|
1381
|
+
return 0;
|
1382
|
+
}
|
1383
|
+
/**
|
1384
|
+
* Allocates a new buffer of `size` octets.
|
1385
|
+
*
|
1386
|
+
* @param size The number of octets to allocate.
|
1387
|
+
* @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
|
1388
|
+
* @param encoding The encoding used for the call to `buf.fill()` while initializing.
|
1389
|
+
*/
|
1390
|
+
static alloc(size, fill, encoding) {
|
1391
|
+
if (typeof size !== "number") {
|
1392
|
+
throw new TypeError('"size" argument must be of type number');
|
1393
|
+
} else if (size < 0) {
|
1394
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1395
|
+
}
|
1396
|
+
if (size <= 0) {
|
1397
|
+
return new Buffer(size);
|
1398
|
+
}
|
1399
|
+
if (fill !== void 0) {
|
1400
|
+
return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
|
1401
|
+
}
|
1402
|
+
return new Buffer(size);
|
1403
|
+
}
|
1404
|
+
/**
|
1405
|
+
* Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
|
1406
|
+
*
|
1407
|
+
* @param size
|
1408
|
+
*/
|
1409
|
+
static allocUnsafe(size) {
|
1410
|
+
if (typeof size !== "number") {
|
1411
|
+
throw new TypeError('"size" argument must be of type number');
|
1412
|
+
} else if (size < 0) {
|
1413
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1414
|
+
}
|
1415
|
+
return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
|
1416
|
+
}
|
1417
|
+
/**
|
1418
|
+
* Returns true if the given `obj` is an instance of `type`.
|
1419
|
+
*
|
1420
|
+
* @param obj
|
1421
|
+
* @param type
|
1422
|
+
*/
|
1423
|
+
static _isInstance(obj, type) {
|
1424
|
+
return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
|
1425
|
+
}
|
1426
|
+
static _checked(length) {
|
1427
|
+
if (length >= K_MAX_LENGTH) {
|
1428
|
+
throw new RangeError(
|
1429
|
+
"Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
|
1430
|
+
);
|
1431
|
+
}
|
1432
|
+
return length | 0;
|
1433
|
+
}
|
1434
|
+
static _blitBuffer(src, dst, offset, length) {
|
1435
|
+
let i;
|
1436
|
+
for (i = 0; i < length; ++i) {
|
1437
|
+
if (i + offset >= dst.length || i >= src.length) {
|
1438
|
+
break;
|
1439
|
+
}
|
1440
|
+
dst[i + offset] = src[i];
|
1441
|
+
}
|
1442
|
+
return i;
|
1443
|
+
}
|
1444
|
+
static _utf8Write(buf, string, offset, length) {
|
1445
|
+
return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
|
1446
|
+
}
|
1447
|
+
static _asciiWrite(buf, string, offset, length) {
|
1448
|
+
return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
|
1449
|
+
}
|
1450
|
+
static _base64Write(buf, string, offset, length) {
|
1451
|
+
return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
|
1452
|
+
}
|
1453
|
+
static _ucs2Write(buf, string, offset, length) {
|
1454
|
+
return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
|
1455
|
+
}
|
1456
|
+
static _hexWrite(buf, string, offset, length) {
|
1457
|
+
offset = Number(offset) || 0;
|
1458
|
+
const remaining = buf.length - offset;
|
1459
|
+
if (!length) {
|
1460
|
+
length = remaining;
|
1461
|
+
} else {
|
1462
|
+
length = Number(length);
|
1463
|
+
if (length > remaining) {
|
1464
|
+
length = remaining;
|
1465
|
+
}
|
1466
|
+
}
|
1467
|
+
const strLen = string.length;
|
1468
|
+
if (length > strLen / 2) {
|
1469
|
+
length = strLen / 2;
|
1470
|
+
}
|
1471
|
+
let i;
|
1472
|
+
for (i = 0; i < length; ++i) {
|
1473
|
+
const parsed = parseInt(string.substr(i * 2, 2), 16);
|
1474
|
+
if (parsed !== parsed) {
|
1475
|
+
return i;
|
1476
|
+
}
|
1477
|
+
buf[offset + i] = parsed;
|
1478
|
+
}
|
1479
|
+
return i;
|
1480
|
+
}
|
1481
|
+
static _utf8ToBytes(string, units) {
|
1482
|
+
units = units || Infinity;
|
1483
|
+
const length = string.length;
|
1484
|
+
const bytes = [];
|
1485
|
+
let codePoint;
|
1486
|
+
let leadSurrogate = null;
|
1487
|
+
for (let i = 0; i < length; ++i) {
|
1488
|
+
codePoint = string.charCodeAt(i);
|
1489
|
+
if (codePoint > 55295 && codePoint < 57344) {
|
1490
|
+
if (!leadSurrogate) {
|
1491
|
+
if (codePoint > 56319) {
|
1492
|
+
if ((units -= 3) > -1) {
|
1493
|
+
bytes.push(239, 191, 189);
|
1494
|
+
}
|
1495
|
+
continue;
|
1496
|
+
} else if (i + 1 === length) {
|
1497
|
+
if ((units -= 3) > -1) {
|
1498
|
+
bytes.push(239, 191, 189);
|
1499
|
+
}
|
1500
|
+
continue;
|
1501
|
+
}
|
1502
|
+
leadSurrogate = codePoint;
|
1503
|
+
continue;
|
1504
|
+
}
|
1505
|
+
if (codePoint < 56320) {
|
1506
|
+
if ((units -= 3) > -1) {
|
1507
|
+
bytes.push(239, 191, 189);
|
1508
|
+
}
|
1509
|
+
leadSurrogate = codePoint;
|
1510
|
+
continue;
|
1511
|
+
}
|
1512
|
+
codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
|
1513
|
+
} else if (leadSurrogate) {
|
1514
|
+
if ((units -= 3) > -1) {
|
1515
|
+
bytes.push(239, 191, 189);
|
1516
|
+
}
|
1517
|
+
}
|
1518
|
+
leadSurrogate = null;
|
1519
|
+
if (codePoint < 128) {
|
1520
|
+
if ((units -= 1) < 0) {
|
1521
|
+
break;
|
1522
|
+
}
|
1523
|
+
bytes.push(codePoint);
|
1524
|
+
} else if (codePoint < 2048) {
|
1525
|
+
if ((units -= 2) < 0) {
|
1526
|
+
break;
|
1527
|
+
}
|
1528
|
+
bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
|
1529
|
+
} else if (codePoint < 65536) {
|
1530
|
+
if ((units -= 3) < 0) {
|
1531
|
+
break;
|
1532
|
+
}
|
1533
|
+
bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
|
1534
|
+
} else if (codePoint < 1114112) {
|
1535
|
+
if ((units -= 4) < 0) {
|
1536
|
+
break;
|
1537
|
+
}
|
1538
|
+
bytes.push(
|
1539
|
+
codePoint >> 18 | 240,
|
1540
|
+
codePoint >> 12 & 63 | 128,
|
1541
|
+
codePoint >> 6 & 63 | 128,
|
1542
|
+
codePoint & 63 | 128
|
1543
|
+
);
|
1544
|
+
} else {
|
1545
|
+
throw new Error("Invalid code point");
|
1546
|
+
}
|
1547
|
+
}
|
1548
|
+
return bytes;
|
1549
|
+
}
|
1550
|
+
static _base64ToBytes(str) {
|
1551
|
+
return toByteArray(base64clean(str));
|
1552
|
+
}
|
1553
|
+
static _asciiToBytes(str) {
|
1554
|
+
const byteArray = [];
|
1555
|
+
for (let i = 0; i < str.length; ++i) {
|
1556
|
+
byteArray.push(str.charCodeAt(i) & 255);
|
1557
|
+
}
|
1558
|
+
return byteArray;
|
1559
|
+
}
|
1560
|
+
static _utf16leToBytes(str, units) {
|
1561
|
+
let c, hi, lo;
|
1562
|
+
const byteArray = [];
|
1563
|
+
for (let i = 0; i < str.length; ++i) {
|
1564
|
+
if ((units -= 2) < 0) break;
|
1565
|
+
c = str.charCodeAt(i);
|
1566
|
+
hi = c >> 8;
|
1567
|
+
lo = c % 256;
|
1568
|
+
byteArray.push(lo);
|
1569
|
+
byteArray.push(hi);
|
1570
|
+
}
|
1571
|
+
return byteArray;
|
1572
|
+
}
|
1573
|
+
static _hexSlice(buf, start, end) {
|
1574
|
+
const len = buf.length;
|
1575
|
+
if (!start || start < 0) {
|
1576
|
+
start = 0;
|
1577
|
+
}
|
1578
|
+
if (!end || end < 0 || end > len) {
|
1579
|
+
end = len;
|
1580
|
+
}
|
1581
|
+
let out = "";
|
1582
|
+
for (let i = start; i < end; ++i) {
|
1583
|
+
out += hexSliceLookupTable[buf[i]];
|
1584
|
+
}
|
1585
|
+
return out;
|
1586
|
+
}
|
1587
|
+
static _base64Slice(buf, start, end) {
|
1588
|
+
if (start === 0 && end === buf.length) {
|
1589
|
+
return fromByteArray(buf);
|
1590
|
+
} else {
|
1591
|
+
return fromByteArray(buf.slice(start, end));
|
1592
|
+
}
|
1593
|
+
}
|
1594
|
+
static _utf8Slice(buf, start, end) {
|
1595
|
+
end = Math.min(buf.length, end);
|
1596
|
+
const res = [];
|
1597
|
+
let i = start;
|
1598
|
+
while (i < end) {
|
1599
|
+
const firstByte = buf[i];
|
1600
|
+
let codePoint = null;
|
1601
|
+
let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
|
1602
|
+
if (i + bytesPerSequence <= end) {
|
1603
|
+
let secondByte, thirdByte, fourthByte, tempCodePoint;
|
1604
|
+
switch (bytesPerSequence) {
|
1605
|
+
case 1:
|
1606
|
+
if (firstByte < 128) {
|
1607
|
+
codePoint = firstByte;
|
1608
|
+
}
|
1609
|
+
break;
|
1610
|
+
case 2:
|
1611
|
+
secondByte = buf[i + 1];
|
1612
|
+
if ((secondByte & 192) === 128) {
|
1613
|
+
tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
|
1614
|
+
if (tempCodePoint > 127) {
|
1615
|
+
codePoint = tempCodePoint;
|
1616
|
+
}
|
1617
|
+
}
|
1618
|
+
break;
|
1619
|
+
case 3:
|
1620
|
+
secondByte = buf[i + 1];
|
1621
|
+
thirdByte = buf[i + 2];
|
1622
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
|
1623
|
+
tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
|
1624
|
+
if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
|
1625
|
+
codePoint = tempCodePoint;
|
1626
|
+
}
|
1627
|
+
}
|
1628
|
+
break;
|
1629
|
+
case 4:
|
1630
|
+
secondByte = buf[i + 1];
|
1631
|
+
thirdByte = buf[i + 2];
|
1632
|
+
fourthByte = buf[i + 3];
|
1633
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
|
1634
|
+
tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
|
1635
|
+
if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
|
1636
|
+
codePoint = tempCodePoint;
|
1637
|
+
}
|
1638
|
+
}
|
1639
|
+
}
|
1640
|
+
}
|
1641
|
+
if (codePoint === null) {
|
1642
|
+
codePoint = 65533;
|
1643
|
+
bytesPerSequence = 1;
|
1644
|
+
} else if (codePoint > 65535) {
|
1645
|
+
codePoint -= 65536;
|
1646
|
+
res.push(codePoint >>> 10 & 1023 | 55296);
|
1647
|
+
codePoint = 56320 | codePoint & 1023;
|
1648
|
+
}
|
1649
|
+
res.push(codePoint);
|
1650
|
+
i += bytesPerSequence;
|
1651
|
+
}
|
1652
|
+
return Buffer._decodeCodePointsArray(res);
|
1653
|
+
}
|
1654
|
+
static _decodeCodePointsArray(codePoints) {
|
1655
|
+
const len = codePoints.length;
|
1656
|
+
if (len <= MAX_ARGUMENTS_LENGTH) {
|
1657
|
+
return String.fromCharCode.apply(String, codePoints);
|
1658
|
+
}
|
1659
|
+
let res = "";
|
1660
|
+
let i = 0;
|
1661
|
+
while (i < len) {
|
1662
|
+
res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
|
1663
|
+
}
|
1664
|
+
return res;
|
1665
|
+
}
|
1666
|
+
static _asciiSlice(buf, start, end) {
|
1667
|
+
let ret = "";
|
1668
|
+
end = Math.min(buf.length, end);
|
1669
|
+
for (let i = start; i < end; ++i) {
|
1670
|
+
ret += String.fromCharCode(buf[i] & 127);
|
1671
|
+
}
|
1672
|
+
return ret;
|
1673
|
+
}
|
1674
|
+
static _latin1Slice(buf, start, end) {
|
1675
|
+
let ret = "";
|
1676
|
+
end = Math.min(buf.length, end);
|
1677
|
+
for (let i = start; i < end; ++i) {
|
1678
|
+
ret += String.fromCharCode(buf[i]);
|
1679
|
+
}
|
1680
|
+
return ret;
|
1681
|
+
}
|
1682
|
+
static _utf16leSlice(buf, start, end) {
|
1683
|
+
const bytes = buf.slice(start, end);
|
1684
|
+
let res = "";
|
1685
|
+
for (let i = 0; i < bytes.length - 1; i += 2) {
|
1686
|
+
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
|
1687
|
+
}
|
1688
|
+
return res;
|
1689
|
+
}
|
1690
|
+
static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
|
1691
|
+
let indexSize = 1;
|
1692
|
+
let arrLength = arr.length;
|
1693
|
+
let valLength = val.length;
|
1694
|
+
if (encoding !== void 0) {
|
1695
|
+
encoding = Buffer._getEncoding(encoding);
|
1696
|
+
if (encoding === "ucs2" || encoding === "utf16le") {
|
1697
|
+
if (arr.length < 2 || val.length < 2) {
|
1698
|
+
return -1;
|
1699
|
+
}
|
1700
|
+
indexSize = 2;
|
1701
|
+
arrLength /= 2;
|
1702
|
+
valLength /= 2;
|
1703
|
+
byteOffset /= 2;
|
1704
|
+
}
|
1705
|
+
}
|
1706
|
+
function read(buf, i2) {
|
1707
|
+
if (indexSize === 1) {
|
1708
|
+
return buf[i2];
|
1709
|
+
} else {
|
1710
|
+
return buf.readUInt16BE(i2 * indexSize);
|
1711
|
+
}
|
1712
|
+
}
|
1713
|
+
let i;
|
1714
|
+
if (dir) {
|
1715
|
+
let foundIndex = -1;
|
1716
|
+
for (i = byteOffset; i < arrLength; i++) {
|
1717
|
+
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
1718
|
+
if (foundIndex === -1) foundIndex = i;
|
1719
|
+
if (i - foundIndex + 1 === valLength) return foundIndex * indexSize;
|
1720
|
+
} else {
|
1721
|
+
if (foundIndex !== -1) i -= i - foundIndex;
|
1722
|
+
foundIndex = -1;
|
1723
|
+
}
|
1724
|
+
}
|
1725
|
+
} else {
|
1726
|
+
if (byteOffset + valLength > arrLength) {
|
1727
|
+
byteOffset = arrLength - valLength;
|
1728
|
+
}
|
1729
|
+
for (i = byteOffset; i >= 0; i--) {
|
1730
|
+
let found = true;
|
1731
|
+
for (let j = 0; j < valLength; j++) {
|
1732
|
+
if (read(arr, i + j) !== read(val, j)) {
|
1733
|
+
found = false;
|
1734
|
+
break;
|
1735
|
+
}
|
1736
|
+
}
|
1737
|
+
if (found) {
|
1738
|
+
return i;
|
1739
|
+
}
|
1740
|
+
}
|
1741
|
+
}
|
1742
|
+
return -1;
|
1743
|
+
}
|
1744
|
+
static _checkOffset(offset, ext, length) {
|
1745
|
+
if (offset % 1 !== 0 || offset < 0) throw new RangeError("offset is not uint");
|
1746
|
+
if (offset + ext > length) throw new RangeError("Trying to access beyond buffer length");
|
1747
|
+
}
|
1748
|
+
static _checkInt(buf, value, offset, ext, max, min) {
|
1749
|
+
if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance');
|
1750
|
+
if (value > max || value < min) throw new RangeError('"value" argument is out of bounds');
|
1751
|
+
if (offset + ext > buf.length) throw new RangeError("Index out of range");
|
1752
|
+
}
|
1753
|
+
static _getEncoding(encoding) {
|
1754
|
+
let toLowerCase = false;
|
1755
|
+
let originalEncoding = "";
|
1756
|
+
for (; ; ) {
|
1757
|
+
switch (encoding) {
|
1758
|
+
case "hex":
|
1759
|
+
return "hex";
|
1760
|
+
case "utf8":
|
1761
|
+
return "utf8";
|
1762
|
+
case "ascii":
|
1763
|
+
return "ascii";
|
1764
|
+
case "binary":
|
1765
|
+
return "binary";
|
1766
|
+
case "latin1":
|
1767
|
+
return "latin1";
|
1768
|
+
case "ucs2":
|
1769
|
+
return "ucs2";
|
1770
|
+
case "utf16le":
|
1771
|
+
return "utf16le";
|
1772
|
+
case "base64":
|
1773
|
+
return "base64";
|
1774
|
+
default: {
|
1775
|
+
if (toLowerCase) {
|
1776
|
+
throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
|
1777
|
+
}
|
1778
|
+
toLowerCase = true;
|
1779
|
+
originalEncoding = encoding;
|
1780
|
+
encoding = encoding.toLowerCase();
|
1781
|
+
}
|
1782
|
+
}
|
1783
|
+
}
|
1784
|
+
}
|
1785
|
+
}
|
1786
|
+
const hexSliceLookupTable = function() {
|
1787
|
+
const alphabet = "0123456789abcdef";
|
1788
|
+
const table = new Array(256);
|
1789
|
+
for (let i = 0; i < 16; ++i) {
|
1790
|
+
const i16 = i * 16;
|
1791
|
+
for (let j = 0; j < 16; ++j) {
|
1792
|
+
table[i16 + j] = alphabet[i] + alphabet[j];
|
1793
|
+
}
|
1794
|
+
}
|
1795
|
+
return table;
|
1796
|
+
}();
|
1797
|
+
const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
|
1798
|
+
function base64clean(str) {
|
1799
|
+
str = str.split("=")[0];
|
1800
|
+
str = str.trim().replace(INVALID_BASE64_RE, "");
|
1801
|
+
if (str.length < 2) return "";
|
1802
|
+
while (str.length % 4 !== 0) {
|
1803
|
+
str = str + "=";
|
1804
|
+
}
|
1805
|
+
return str;
|
1806
|
+
}
|
1807
|
+
|
25
1808
|
function notEmpty(value) {
|
26
1809
|
return value !== null && value !== void 0;
|
27
1810
|
}
|
@@ -116,155 +1899,15 @@ function promiseMap(inputValues, mapper) {
|
|
116
1899
|
return inputValues.reduce(reducer, Promise.resolve([]));
|
117
1900
|
}
|
118
1901
|
|
119
|
-
|
120
|
-
|
121
|
-
if (isDefined(process) && isDefined(process.env)) {
|
122
|
-
return {
|
123
|
-
apiKey: process.env.XATA_API_KEY ?? getGlobalApiKey(),
|
124
|
-
databaseURL: process.env.XATA_DATABASE_URL ?? getGlobalDatabaseURL(),
|
125
|
-
branch: process.env.XATA_BRANCH ?? getGlobalBranch(),
|
126
|
-
deployPreview: process.env.XATA_PREVIEW,
|
127
|
-
deployPreviewBranch: process.env.XATA_PREVIEW_BRANCH,
|
128
|
-
vercelGitCommitRef: process.env.VERCEL_GIT_COMMIT_REF,
|
129
|
-
vercelGitRepoOwner: process.env.VERCEL_GIT_REPO_OWNER
|
130
|
-
};
|
131
|
-
}
|
132
|
-
} catch (err) {
|
133
|
-
}
|
134
|
-
try {
|
135
|
-
if (isObject(Deno) && isObject(Deno.env)) {
|
136
|
-
return {
|
137
|
-
apiKey: Deno.env.get("XATA_API_KEY") ?? getGlobalApiKey(),
|
138
|
-
databaseURL: Deno.env.get("XATA_DATABASE_URL") ?? getGlobalDatabaseURL(),
|
139
|
-
branch: Deno.env.get("XATA_BRANCH") ?? getGlobalBranch(),
|
140
|
-
deployPreview: Deno.env.get("XATA_PREVIEW"),
|
141
|
-
deployPreviewBranch: Deno.env.get("XATA_PREVIEW_BRANCH"),
|
142
|
-
vercelGitCommitRef: Deno.env.get("VERCEL_GIT_COMMIT_REF"),
|
143
|
-
vercelGitRepoOwner: Deno.env.get("VERCEL_GIT_REPO_OWNER")
|
144
|
-
};
|
145
|
-
}
|
146
|
-
} catch (err) {
|
147
|
-
}
|
148
|
-
return {
|
149
|
-
apiKey: getGlobalApiKey(),
|
150
|
-
databaseURL: getGlobalDatabaseURL(),
|
151
|
-
branch: getGlobalBranch(),
|
152
|
-
deployPreview: void 0,
|
153
|
-
deployPreviewBranch: void 0,
|
154
|
-
vercelGitCommitRef: void 0,
|
155
|
-
vercelGitRepoOwner: void 0
|
156
|
-
};
|
157
|
-
}
|
158
|
-
function getEnableBrowserVariable() {
|
159
|
-
try {
|
160
|
-
if (isObject(process) && isObject(process.env) && process.env.XATA_ENABLE_BROWSER !== void 0) {
|
161
|
-
return process.env.XATA_ENABLE_BROWSER === "true";
|
162
|
-
}
|
163
|
-
} catch (err) {
|
164
|
-
}
|
165
|
-
try {
|
166
|
-
if (isObject(Deno) && isObject(Deno.env) && Deno.env.get("XATA_ENABLE_BROWSER") !== void 0) {
|
167
|
-
return Deno.env.get("XATA_ENABLE_BROWSER") === "true";
|
168
|
-
}
|
169
|
-
} catch (err) {
|
170
|
-
}
|
171
|
-
try {
|
172
|
-
return XATA_ENABLE_BROWSER === true || XATA_ENABLE_BROWSER === "true";
|
173
|
-
} catch (err) {
|
174
|
-
return void 0;
|
175
|
-
}
|
176
|
-
}
|
177
|
-
function getGlobalApiKey() {
|
178
|
-
try {
|
179
|
-
return XATA_API_KEY;
|
180
|
-
} catch (err) {
|
181
|
-
return void 0;
|
182
|
-
}
|
183
|
-
}
|
184
|
-
function getGlobalDatabaseURL() {
|
185
|
-
try {
|
186
|
-
return XATA_DATABASE_URL;
|
187
|
-
} catch (err) {
|
188
|
-
return void 0;
|
189
|
-
}
|
190
|
-
}
|
191
|
-
function getGlobalBranch() {
|
192
|
-
try {
|
193
|
-
return XATA_BRANCH;
|
194
|
-
} catch (err) {
|
195
|
-
return void 0;
|
196
|
-
}
|
197
|
-
}
|
198
|
-
function getDatabaseURL() {
|
199
|
-
try {
|
200
|
-
const { databaseURL } = getEnvironment();
|
201
|
-
return databaseURL;
|
202
|
-
} catch (err) {
|
203
|
-
return void 0;
|
204
|
-
}
|
205
|
-
}
|
206
|
-
function getAPIKey() {
|
207
|
-
try {
|
208
|
-
const { apiKey } = getEnvironment();
|
209
|
-
return apiKey;
|
210
|
-
} catch (err) {
|
211
|
-
return void 0;
|
212
|
-
}
|
213
|
-
}
|
214
|
-
function getBranch() {
|
215
|
-
try {
|
216
|
-
const { branch } = getEnvironment();
|
217
|
-
return branch;
|
218
|
-
} catch (err) {
|
219
|
-
return void 0;
|
220
|
-
}
|
221
|
-
}
|
222
|
-
function buildPreviewBranchName({ org, branch }) {
|
223
|
-
return `preview-${org}-${branch}`;
|
224
|
-
}
|
225
|
-
function getPreviewBranch() {
|
226
|
-
try {
|
227
|
-
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = getEnvironment();
|
228
|
-
if (deployPreviewBranch)
|
229
|
-
return deployPreviewBranch;
|
230
|
-
switch (deployPreview) {
|
231
|
-
case "vercel": {
|
232
|
-
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
233
|
-
console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
|
234
|
-
return void 0;
|
235
|
-
}
|
236
|
-
return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
|
237
|
-
}
|
238
|
-
}
|
239
|
-
return void 0;
|
240
|
-
} catch (err) {
|
241
|
-
return void 0;
|
242
|
-
}
|
243
|
-
}
|
244
|
-
|
245
|
-
var __accessCheck$6 = (obj, member, msg) => {
|
246
|
-
if (!member.has(obj))
|
247
|
-
throw TypeError("Cannot " + msg);
|
248
|
-
};
|
249
|
-
var __privateGet$5 = (obj, member, getter) => {
|
250
|
-
__accessCheck$6(obj, member, "read from private field");
|
251
|
-
return getter ? getter.call(obj) : member.get(obj);
|
252
|
-
};
|
253
|
-
var __privateAdd$6 = (obj, member, value) => {
|
254
|
-
if (member.has(obj))
|
255
|
-
throw TypeError("Cannot add the same private member more than once");
|
256
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
257
|
-
};
|
258
|
-
var __privateSet$4 = (obj, member, value, setter) => {
|
259
|
-
__accessCheck$6(obj, member, "write to private field");
|
260
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
261
|
-
return value;
|
262
|
-
};
|
263
|
-
var __privateMethod$4 = (obj, member, method) => {
|
264
|
-
__accessCheck$6(obj, member, "access private method");
|
265
|
-
return method;
|
1902
|
+
var __typeError$6 = (msg) => {
|
1903
|
+
throw TypeError(msg);
|
266
1904
|
};
|
267
|
-
var
|
1905
|
+
var __accessCheck$6 = (obj, member, msg) => member.has(obj) || __typeError$6("Cannot " + msg);
|
1906
|
+
var __privateGet$5 = (obj, member, getter) => (__accessCheck$6(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
1907
|
+
var __privateAdd$6 = (obj, member, value) => member.has(obj) ? __typeError$6("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1908
|
+
var __privateSet$4 = (obj, member, value, setter) => (__accessCheck$6(obj, member, "write to private field"), member.set(obj, value), value);
|
1909
|
+
var __privateMethod$4 = (obj, member, method) => (__accessCheck$6(obj, member, "access private method"), method);
|
1910
|
+
var _fetch, _queue, _concurrency, _ApiRequestPool_instances, enqueue_fn;
|
268
1911
|
const REQUEST_TIMEOUT = 5 * 60 * 1e3;
|
269
1912
|
function getFetchImplementation(userFetch) {
|
270
1913
|
const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
|
@@ -277,10 +1920,10 @@ function getFetchImplementation(userFetch) {
|
|
277
1920
|
}
|
278
1921
|
class ApiRequestPool {
|
279
1922
|
constructor(concurrency = 10) {
|
280
|
-
__privateAdd$6(this,
|
281
|
-
__privateAdd$6(this, _fetch
|
282
|
-
__privateAdd$6(this, _queue
|
283
|
-
__privateAdd$6(this, _concurrency
|
1923
|
+
__privateAdd$6(this, _ApiRequestPool_instances);
|
1924
|
+
__privateAdd$6(this, _fetch);
|
1925
|
+
__privateAdd$6(this, _queue);
|
1926
|
+
__privateAdd$6(this, _concurrency);
|
284
1927
|
__privateSet$4(this, _queue, []);
|
285
1928
|
__privateSet$4(this, _concurrency, concurrency);
|
286
1929
|
this.running = 0;
|
@@ -315,7 +1958,7 @@ class ApiRequestPool {
|
|
315
1958
|
}
|
316
1959
|
return response;
|
317
1960
|
};
|
318
|
-
return __privateMethod$4(this,
|
1961
|
+
return __privateMethod$4(this, _ApiRequestPool_instances, enqueue_fn).call(this, async () => {
|
319
1962
|
return await runRequest();
|
320
1963
|
});
|
321
1964
|
}
|
@@ -323,7 +1966,7 @@ class ApiRequestPool {
|
|
323
1966
|
_fetch = new WeakMap();
|
324
1967
|
_queue = new WeakMap();
|
325
1968
|
_concurrency = new WeakMap();
|
326
|
-
|
1969
|
+
_ApiRequestPool_instances = new WeakSet();
|
327
1970
|
enqueue_fn = function(task) {
|
328
1971
|
const promise = new Promise((resolve) => __privateGet$5(this, _queue).push(resolve)).finally(() => {
|
329
1972
|
this.started--;
|
@@ -526,7 +2169,7 @@ function defaultOnOpen(response) {
|
|
526
2169
|
}
|
527
2170
|
}
|
528
2171
|
|
529
|
-
const VERSION = "0.29.
|
2172
|
+
const VERSION = "0.29.5";
|
530
2173
|
|
531
2174
|
class ErrorWithCause extends Error {
|
532
2175
|
constructor(message, options) {
|
@@ -606,35 +2249,30 @@ function parseProviderString(provider = "production") {
|
|
606
2249
|
return provider;
|
607
2250
|
}
|
608
2251
|
const [main, workspaces] = provider.split(",");
|
609
|
-
if (!main || !workspaces)
|
610
|
-
return null;
|
2252
|
+
if (!main || !workspaces) return null;
|
611
2253
|
return { main, workspaces };
|
612
2254
|
}
|
613
2255
|
function buildProviderString(provider) {
|
614
|
-
if (isHostProviderAlias(provider))
|
615
|
-
return provider;
|
2256
|
+
if (isHostProviderAlias(provider)) return provider;
|
616
2257
|
return `${provider.main},${provider.workspaces}`;
|
617
2258
|
}
|
618
2259
|
function parseWorkspacesUrlParts(url) {
|
619
|
-
if (!isString(url))
|
620
|
-
return null;
|
2260
|
+
if (!isString(url)) return null;
|
621
2261
|
const matches = {
|
622
2262
|
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
|
623
2263
|
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
|
624
2264
|
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
|
625
|
-
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:([^:]+):?(.*)?/)
|
2265
|
+
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
|
626
2266
|
};
|
627
2267
|
const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
|
628
|
-
if (!isHostProviderAlias(host) || !match)
|
629
|
-
return null;
|
2268
|
+
if (!isHostProviderAlias(host) || !match) return null;
|
630
2269
|
return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
|
631
2270
|
}
|
632
2271
|
|
633
2272
|
const pool = new ApiRequestPool();
|
634
2273
|
const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
|
635
2274
|
const cleanQueryParams = Object.entries(queryParams).reduce((acc, [key, value]) => {
|
636
|
-
if (value === void 0 || value === null)
|
637
|
-
return acc;
|
2275
|
+
if (value === void 0 || value === null) return acc;
|
638
2276
|
return { ...acc, [key]: value };
|
639
2277
|
}, {});
|
640
2278
|
const query = new URLSearchParams(cleanQueryParams).toString();
|
@@ -682,8 +2320,7 @@ function hostHeader(url) {
|
|
682
2320
|
return groups?.host ? { Host: groups.host } : {};
|
683
2321
|
}
|
684
2322
|
async function parseBody(body, headers) {
|
685
|
-
if (!isDefined(body))
|
686
|
-
return void 0;
|
2323
|
+
if (!isDefined(body)) return void 0;
|
687
2324
|
if (isBlob(body) || typeof body.text === "function") {
|
688
2325
|
return body;
|
689
2326
|
}
|
@@ -762,8 +2399,7 @@ async function fetch$1({
|
|
762
2399
|
[TraceAttributes.CLOUDFLARE_RAY_ID]: response.headers?.get("cf-ray") ?? void 0
|
763
2400
|
});
|
764
2401
|
const message = response.headers?.get("x-xata-message");
|
765
|
-
if (message)
|
766
|
-
console.warn(message);
|
2402
|
+
if (message) console.warn(message);
|
767
2403
|
if (response.status === 204) {
|
768
2404
|
return {};
|
769
2405
|
}
|
@@ -847,7 +2483,60 @@ function parseUrl(url) {
|
|
847
2483
|
|
848
2484
|
const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
|
849
2485
|
|
850
|
-
const
|
2486
|
+
const listClusterBranches = (variables, signal) => dataPlaneFetch({
|
2487
|
+
url: "/cluster/{clusterId}/branches",
|
2488
|
+
method: "get",
|
2489
|
+
...variables,
|
2490
|
+
signal
|
2491
|
+
});
|
2492
|
+
const listClusterExtensions = (variables, signal) => dataPlaneFetch({
|
2493
|
+
url: "/cluster/{clusterId}/extensions",
|
2494
|
+
method: "get",
|
2495
|
+
...variables,
|
2496
|
+
signal
|
2497
|
+
});
|
2498
|
+
const installClusterExtension = (variables, signal) => dataPlaneFetch({
|
2499
|
+
url: "/cluster/{clusterId}/extensions",
|
2500
|
+
method: "post",
|
2501
|
+
...variables,
|
2502
|
+
signal
|
2503
|
+
});
|
2504
|
+
const dropClusterExtension = (variables, signal) => dataPlaneFetch({
|
2505
|
+
url: "/cluster/{clusterId}/extensions",
|
2506
|
+
method: "delete",
|
2507
|
+
...variables,
|
2508
|
+
signal
|
2509
|
+
});
|
2510
|
+
const getClusterMetrics = (variables, signal) => dataPlaneFetch({
|
2511
|
+
url: "/cluster/{clusterId}/metrics",
|
2512
|
+
method: "get",
|
2513
|
+
...variables,
|
2514
|
+
signal
|
2515
|
+
});
|
2516
|
+
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2517
|
+
url: "/db/{dbBranchName}/migrations/apply",
|
2518
|
+
method: "post",
|
2519
|
+
...variables,
|
2520
|
+
signal
|
2521
|
+
});
|
2522
|
+
const startMigration = (variables, signal) => dataPlaneFetch({
|
2523
|
+
url: "/db/{dbBranchName}/migrations/start",
|
2524
|
+
method: "post",
|
2525
|
+
...variables,
|
2526
|
+
signal
|
2527
|
+
});
|
2528
|
+
const completeMigration = (variables, signal) => dataPlaneFetch({
|
2529
|
+
url: "/db/{dbBranchName}/migrations/complete",
|
2530
|
+
method: "post",
|
2531
|
+
...variables,
|
2532
|
+
signal
|
2533
|
+
});
|
2534
|
+
const rollbackMigration = (variables, signal) => dataPlaneFetch({
|
2535
|
+
url: "/db/{dbBranchName}/migrations/rollback",
|
2536
|
+
method: "post",
|
2537
|
+
...variables,
|
2538
|
+
signal
|
2539
|
+
});
|
851
2540
|
const adaptTable = (variables, signal) => dataPlaneFetch({
|
852
2541
|
url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
|
853
2542
|
method: "post",
|
@@ -860,9 +2549,30 @@ const adaptAllTables = (variables, signal) => dataPlaneFetch({
|
|
860
2549
|
...variables,
|
861
2550
|
signal
|
862
2551
|
});
|
863
|
-
const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
864
|
-
|
865
|
-
|
2552
|
+
const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2553
|
+
url: "/db/{dbBranchName}/migrations/status",
|
2554
|
+
method: "get",
|
2555
|
+
...variables,
|
2556
|
+
signal
|
2557
|
+
});
|
2558
|
+
const getMigrationJobs = (variables, signal) => dataPlaneFetch({
|
2559
|
+
url: "/db/{dbBranchName}/migrations/jobs",
|
2560
|
+
method: "get",
|
2561
|
+
...variables,
|
2562
|
+
signal
|
2563
|
+
});
|
2564
|
+
const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2565
|
+
url: "/db/{dbBranchName}/migrations/jobs/{jobId}",
|
2566
|
+
method: "get",
|
2567
|
+
...variables,
|
2568
|
+
signal
|
2569
|
+
});
|
2570
|
+
const getMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2571
|
+
url: "/db/{dbBranchName}/migrations/history",
|
2572
|
+
method: "get",
|
2573
|
+
...variables,
|
2574
|
+
signal
|
2575
|
+
});
|
866
2576
|
const getBranchList = (variables, signal) => dataPlaneFetch({
|
867
2577
|
url: "/dbs/{dbName}",
|
868
2578
|
method: "get",
|
@@ -889,68 +2599,166 @@ const deleteBranch = (variables, signal) => dataPlaneFetch({
|
|
889
2599
|
...variables,
|
890
2600
|
signal
|
891
2601
|
});
|
892
|
-
const getSchema = (variables, signal) => dataPlaneFetch({
|
893
|
-
url: "/db/{dbBranchName}/schema",
|
894
|
-
method: "get",
|
2602
|
+
const getSchema = (variables, signal) => dataPlaneFetch({
|
2603
|
+
url: "/db/{dbBranchName}/schema",
|
2604
|
+
method: "get",
|
2605
|
+
...variables,
|
2606
|
+
signal
|
2607
|
+
});
|
2608
|
+
const getSchemas = (variables, signal) => dataPlaneFetch({
|
2609
|
+
url: "/db/{dbBranchName}/schemas",
|
2610
|
+
method: "get",
|
2611
|
+
...variables,
|
2612
|
+
signal
|
2613
|
+
});
|
2614
|
+
const copyBranch = (variables, signal) => dataPlaneFetch({
|
2615
|
+
url: "/db/{dbBranchName}/copy",
|
2616
|
+
method: "post",
|
2617
|
+
...variables,
|
2618
|
+
signal
|
2619
|
+
});
|
2620
|
+
const getBranchMoveStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/move", method: "get", ...variables, signal });
|
2621
|
+
const moveBranch = (variables, signal) => dataPlaneFetch({
|
2622
|
+
url: "/db/{dbBranchName}/move",
|
2623
|
+
method: "put",
|
2624
|
+
...variables,
|
2625
|
+
signal
|
2626
|
+
});
|
2627
|
+
const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2628
|
+
url: "/db/{dbBranchName}/metadata",
|
2629
|
+
method: "put",
|
2630
|
+
...variables,
|
2631
|
+
signal
|
2632
|
+
});
|
2633
|
+
const getBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2634
|
+
url: "/db/{dbBranchName}/metadata",
|
2635
|
+
method: "get",
|
2636
|
+
...variables,
|
2637
|
+
signal
|
2638
|
+
});
|
2639
|
+
const getBranchStats = (variables, signal) => dataPlaneFetch({
|
2640
|
+
url: "/db/{dbBranchName}/stats",
|
2641
|
+
method: "get",
|
2642
|
+
...variables,
|
2643
|
+
signal
|
2644
|
+
});
|
2645
|
+
const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
|
2646
|
+
const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
|
2647
|
+
const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({
|
2648
|
+
url: "/dbs/{dbName}/gitBranches",
|
2649
|
+
method: "delete",
|
2650
|
+
...variables,
|
2651
|
+
signal
|
2652
|
+
});
|
2653
|
+
const resolveBranch = (variables, signal) => dataPlaneFetch({
|
2654
|
+
url: "/dbs/{dbName}/resolveBranch",
|
2655
|
+
method: "get",
|
2656
|
+
...variables,
|
2657
|
+
signal
|
2658
|
+
});
|
2659
|
+
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2660
|
+
url: "/db/{dbBranchName}/migrations",
|
2661
|
+
method: "get",
|
2662
|
+
...variables,
|
2663
|
+
signal
|
2664
|
+
});
|
2665
|
+
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2666
|
+
url: "/db/{dbBranchName}/migrations/plan",
|
2667
|
+
method: "post",
|
2668
|
+
...variables,
|
2669
|
+
signal
|
2670
|
+
});
|
2671
|
+
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2672
|
+
url: "/db/{dbBranchName}/migrations/execute",
|
2673
|
+
method: "post",
|
2674
|
+
...variables,
|
2675
|
+
signal
|
2676
|
+
});
|
2677
|
+
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({
|
2678
|
+
url: "/dbs/{dbName}/migrations/query",
|
2679
|
+
method: "post",
|
2680
|
+
...variables,
|
2681
|
+
signal
|
2682
|
+
});
|
2683
|
+
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
2684
|
+
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2685
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2686
|
+
method: "get",
|
2687
|
+
...variables,
|
2688
|
+
signal
|
2689
|
+
});
|
2690
|
+
const updateMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2691
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2692
|
+
method: "patch",
|
2693
|
+
...variables,
|
2694
|
+
signal
|
2695
|
+
});
|
2696
|
+
const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({
|
2697
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/commits",
|
2698
|
+
method: "post",
|
2699
|
+
...variables,
|
2700
|
+
signal
|
2701
|
+
});
|
2702
|
+
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2703
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/compare",
|
2704
|
+
method: "post",
|
2705
|
+
...variables,
|
2706
|
+
signal
|
2707
|
+
});
|
2708
|
+
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({
|
2709
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2710
|
+
method: "get",
|
2711
|
+
...variables,
|
2712
|
+
signal
|
2713
|
+
});
|
2714
|
+
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2715
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2716
|
+
method: "post",
|
2717
|
+
...variables,
|
2718
|
+
signal
|
2719
|
+
});
|
2720
|
+
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({
|
2721
|
+
url: "/db/{dbBranchName}/schema/history",
|
2722
|
+
method: "post",
|
895
2723
|
...variables,
|
896
2724
|
signal
|
897
2725
|
});
|
898
|
-
const
|
899
|
-
url: "/db/{dbBranchName}/
|
2726
|
+
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({
|
2727
|
+
url: "/db/{dbBranchName}/schema/compare",
|
900
2728
|
method: "post",
|
901
2729
|
...variables,
|
902
2730
|
signal
|
903
2731
|
});
|
904
|
-
const
|
905
|
-
url: "/db/{dbBranchName}/
|
906
|
-
method: "
|
2732
|
+
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({
|
2733
|
+
url: "/db/{dbBranchName}/schema/compare/{branchName}",
|
2734
|
+
method: "post",
|
907
2735
|
...variables,
|
908
2736
|
signal
|
909
2737
|
});
|
910
|
-
const
|
911
|
-
url: "/db/{dbBranchName}/
|
912
|
-
method: "
|
2738
|
+
const updateBranchSchema = (variables, signal) => dataPlaneFetch({
|
2739
|
+
url: "/db/{dbBranchName}/schema/update",
|
2740
|
+
method: "post",
|
913
2741
|
...variables,
|
914
2742
|
signal
|
915
2743
|
});
|
916
|
-
const
|
917
|
-
url: "/db/{dbBranchName}/
|
918
|
-
method: "
|
2744
|
+
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2745
|
+
url: "/db/{dbBranchName}/schema/preview",
|
2746
|
+
method: "post",
|
919
2747
|
...variables,
|
920
2748
|
signal
|
921
2749
|
});
|
922
|
-
const
|
923
|
-
|
924
|
-
|
925
|
-
const resolveBranch = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/resolveBranch", method: "get", ...variables, signal });
|
926
|
-
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations", method: "get", ...variables, signal });
|
927
|
-
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/plan", method: "post", ...variables, signal });
|
928
|
-
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/execute", method: "post", ...variables, signal });
|
929
|
-
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/query", method: "post", ...variables, signal });
|
930
|
-
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
931
|
-
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
932
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
933
|
-
method: "get",
|
2750
|
+
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2751
|
+
url: "/db/{dbBranchName}/schema/apply",
|
2752
|
+
method: "post",
|
934
2753
|
...variables,
|
935
2754
|
signal
|
936
2755
|
});
|
937
|
-
const
|
938
|
-
|
939
|
-
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/compare", method: "post", ...variables, signal });
|
940
|
-
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/merge", method: "get", ...variables, signal });
|
941
|
-
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
942
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2756
|
+
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({
|
2757
|
+
url: "/db/{dbBranchName}/schema/push",
|
943
2758
|
method: "post",
|
944
2759
|
...variables,
|
945
2760
|
signal
|
946
2761
|
});
|
947
|
-
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/history", method: "post", ...variables, signal });
|
948
|
-
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare", method: "post", ...variables, signal });
|
949
|
-
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare/{branchName}", method: "post", ...variables, signal });
|
950
|
-
const updateBranchSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/update", method: "post", ...variables, signal });
|
951
|
-
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/preview", method: "post", ...variables, signal });
|
952
|
-
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/apply", method: "post", ...variables, signal });
|
953
|
-
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/push", method: "post", ...variables, signal });
|
954
2762
|
const createTable = (variables, signal) => dataPlaneFetch({
|
955
2763
|
url: "/db/{dbBranchName}/tables/{tableName}",
|
956
2764
|
method: "put",
|
@@ -963,14 +2771,24 @@ const deleteTable = (variables, signal) => dataPlaneFetch({
|
|
963
2771
|
...variables,
|
964
2772
|
signal
|
965
2773
|
});
|
966
|
-
const updateTable = (variables, signal) => dataPlaneFetch({
|
2774
|
+
const updateTable = (variables, signal) => dataPlaneFetch({
|
2775
|
+
url: "/db/{dbBranchName}/tables/{tableName}",
|
2776
|
+
method: "patch",
|
2777
|
+
...variables,
|
2778
|
+
signal
|
2779
|
+
});
|
967
2780
|
const getTableSchema = (variables, signal) => dataPlaneFetch({
|
968
2781
|
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
969
2782
|
method: "get",
|
970
2783
|
...variables,
|
971
2784
|
signal
|
972
2785
|
});
|
973
|
-
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2786
|
+
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2787
|
+
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
2788
|
+
method: "put",
|
2789
|
+
...variables,
|
2790
|
+
signal
|
2791
|
+
});
|
974
2792
|
const getTableColumns = (variables, signal) => dataPlaneFetch({
|
975
2793
|
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
976
2794
|
method: "get",
|
@@ -978,7 +2796,12 @@ const getTableColumns = (variables, signal) => dataPlaneFetch({
|
|
978
2796
|
signal
|
979
2797
|
});
|
980
2798
|
const addTableColumn = (variables, signal) => dataPlaneFetch(
|
981
|
-
{
|
2799
|
+
{
|
2800
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
2801
|
+
method: "post",
|
2802
|
+
...variables,
|
2803
|
+
signal
|
2804
|
+
}
|
982
2805
|
);
|
983
2806
|
const getColumn = (variables, signal) => dataPlaneFetch({
|
984
2807
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
@@ -986,15 +2809,30 @@ const getColumn = (variables, signal) => dataPlaneFetch({
|
|
986
2809
|
...variables,
|
987
2810
|
signal
|
988
2811
|
});
|
989
|
-
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2812
|
+
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2813
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
2814
|
+
method: "patch",
|
2815
|
+
...variables,
|
2816
|
+
signal
|
2817
|
+
});
|
990
2818
|
const deleteColumn = (variables, signal) => dataPlaneFetch({
|
991
2819
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
992
2820
|
method: "delete",
|
993
2821
|
...variables,
|
994
2822
|
signal
|
995
2823
|
});
|
996
|
-
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
997
|
-
|
2824
|
+
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
2825
|
+
url: "/db/{dbBranchName}/transaction",
|
2826
|
+
method: "post",
|
2827
|
+
...variables,
|
2828
|
+
signal
|
2829
|
+
});
|
2830
|
+
const insertRecord = (variables, signal) => dataPlaneFetch({
|
2831
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data",
|
2832
|
+
method: "post",
|
2833
|
+
...variables,
|
2834
|
+
signal
|
2835
|
+
});
|
998
2836
|
const getFileItem = (variables, signal) => dataPlaneFetch({
|
999
2837
|
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
|
1000
2838
|
method: "get",
|
@@ -1037,11 +2875,36 @@ const getRecord = (variables, signal) => dataPlaneFetch({
|
|
1037
2875
|
...variables,
|
1038
2876
|
signal
|
1039
2877
|
});
|
1040
|
-
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
1041
|
-
|
1042
|
-
|
1043
|
-
|
1044
|
-
|
2878
|
+
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2879
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2880
|
+
method: "put",
|
2881
|
+
...variables,
|
2882
|
+
signal
|
2883
|
+
});
|
2884
|
+
const updateRecordWithID = (variables, signal) => dataPlaneFetch({
|
2885
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2886
|
+
method: "patch",
|
2887
|
+
...variables,
|
2888
|
+
signal
|
2889
|
+
});
|
2890
|
+
const upsertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2891
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2892
|
+
method: "post",
|
2893
|
+
...variables,
|
2894
|
+
signal
|
2895
|
+
});
|
2896
|
+
const deleteRecord = (variables, signal) => dataPlaneFetch({
|
2897
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2898
|
+
method: "delete",
|
2899
|
+
...variables,
|
2900
|
+
signal
|
2901
|
+
});
|
2902
|
+
const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({
|
2903
|
+
url: "/db/{dbBranchName}/tables/{tableName}/bulk",
|
2904
|
+
method: "post",
|
2905
|
+
...variables,
|
2906
|
+
signal
|
2907
|
+
});
|
1045
2908
|
const queryTable = (variables, signal) => dataPlaneFetch({
|
1046
2909
|
url: "/db/{dbBranchName}/tables/{tableName}/query",
|
1047
2910
|
method: "post",
|
@@ -1060,16 +2923,36 @@ const searchTable = (variables, signal) => dataPlaneFetch({
|
|
1060
2923
|
...variables,
|
1061
2924
|
signal
|
1062
2925
|
});
|
1063
|
-
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
2926
|
+
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
2927
|
+
url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch",
|
2928
|
+
method: "post",
|
2929
|
+
...variables,
|
2930
|
+
signal
|
2931
|
+
});
|
1064
2932
|
const askTable = (variables, signal) => dataPlaneFetch({
|
1065
2933
|
url: "/db/{dbBranchName}/tables/{tableName}/ask",
|
1066
2934
|
method: "post",
|
1067
2935
|
...variables,
|
1068
2936
|
signal
|
1069
2937
|
});
|
1070
|
-
const askTableSession = (variables, signal) => dataPlaneFetch({
|
1071
|
-
|
1072
|
-
|
2938
|
+
const askTableSession = (variables, signal) => dataPlaneFetch({
|
2939
|
+
url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}",
|
2940
|
+
method: "post",
|
2941
|
+
...variables,
|
2942
|
+
signal
|
2943
|
+
});
|
2944
|
+
const summarizeTable = (variables, signal) => dataPlaneFetch({
|
2945
|
+
url: "/db/{dbBranchName}/tables/{tableName}/summarize",
|
2946
|
+
method: "post",
|
2947
|
+
...variables,
|
2948
|
+
signal
|
2949
|
+
});
|
2950
|
+
const aggregateTable = (variables, signal) => dataPlaneFetch({
|
2951
|
+
url: "/db/{dbBranchName}/tables/{tableName}/aggregate",
|
2952
|
+
method: "post",
|
2953
|
+
...variables,
|
2954
|
+
signal
|
2955
|
+
});
|
1073
2956
|
const fileAccess = (variables, signal) => dataPlaneFetch({
|
1074
2957
|
url: "/file/{fileId}",
|
1075
2958
|
method: "get",
|
@@ -1088,15 +2971,33 @@ const sqlQuery = (variables, signal) => dataPlaneFetch({
|
|
1088
2971
|
...variables,
|
1089
2972
|
signal
|
1090
2973
|
});
|
2974
|
+
const sqlBatchQuery = (variables, signal) => dataPlaneFetch({
|
2975
|
+
url: "/db/{dbBranchName}/sql/batch",
|
2976
|
+
method: "post",
|
2977
|
+
...variables,
|
2978
|
+
signal
|
2979
|
+
});
|
1091
2980
|
const operationsByTag$2 = {
|
2981
|
+
cluster: {
|
2982
|
+
listClusterBranches,
|
2983
|
+
listClusterExtensions,
|
2984
|
+
installClusterExtension,
|
2985
|
+
dropClusterExtension,
|
2986
|
+
getClusterMetrics
|
2987
|
+
},
|
1092
2988
|
migrations: {
|
1093
2989
|
applyMigration,
|
2990
|
+
startMigration,
|
2991
|
+
completeMigration,
|
2992
|
+
rollbackMigration,
|
1094
2993
|
adaptTable,
|
1095
2994
|
adaptAllTables,
|
1096
2995
|
getBranchMigrationJobStatus,
|
2996
|
+
getMigrationJobs,
|
1097
2997
|
getMigrationJobStatus,
|
1098
2998
|
getMigrationHistory,
|
1099
2999
|
getSchema,
|
3000
|
+
getSchemas,
|
1100
3001
|
getBranchMigrationHistory,
|
1101
3002
|
getBranchMigrationPlan,
|
1102
3003
|
executeBranchMigrationPlan,
|
@@ -1114,6 +3015,8 @@ const operationsByTag$2 = {
|
|
1114
3015
|
createBranch,
|
1115
3016
|
deleteBranch,
|
1116
3017
|
copyBranch,
|
3018
|
+
getBranchMoveStatus,
|
3019
|
+
moveBranch,
|
1117
3020
|
updateBranchMetadata,
|
1118
3021
|
getBranchMetadata,
|
1119
3022
|
getBranchStats,
|
@@ -1155,7 +3058,16 @@ const operationsByTag$2 = {
|
|
1155
3058
|
deleteRecord,
|
1156
3059
|
bulkInsertTableRecords
|
1157
3060
|
},
|
1158
|
-
files: {
|
3061
|
+
files: {
|
3062
|
+
getFileItem,
|
3063
|
+
putFileItem,
|
3064
|
+
deleteFileItem,
|
3065
|
+
getFile,
|
3066
|
+
putFile,
|
3067
|
+
deleteFile,
|
3068
|
+
fileAccess,
|
3069
|
+
fileUpload
|
3070
|
+
},
|
1159
3071
|
searchAndFilter: {
|
1160
3072
|
queryTable,
|
1161
3073
|
searchBranch,
|
@@ -1166,7 +3078,7 @@ const operationsByTag$2 = {
|
|
1166
3078
|
summarizeTable,
|
1167
3079
|
aggregateTable
|
1168
3080
|
},
|
1169
|
-
sql: { sqlQuery }
|
3081
|
+
sql: { sqlQuery, sqlBatchQuery }
|
1170
3082
|
};
|
1171
3083
|
|
1172
3084
|
const controlPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "controlPlane" });
|
@@ -1233,7 +3145,12 @@ const deleteOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
|
1233
3145
|
...variables,
|
1234
3146
|
signal
|
1235
3147
|
});
|
1236
|
-
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3148
|
+
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3149
|
+
url: "/user/oauth/tokens/{token}",
|
3150
|
+
method: "patch",
|
3151
|
+
...variables,
|
3152
|
+
signal
|
3153
|
+
});
|
1237
3154
|
const getWorkspacesList = (variables, signal) => controlPlaneFetch({
|
1238
3155
|
url: "/workspaces",
|
1239
3156
|
method: "get",
|
@@ -1264,49 +3181,150 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
|
|
1264
3181
|
...variables,
|
1265
3182
|
signal
|
1266
3183
|
});
|
1267
|
-
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
1268
|
-
|
1269
|
-
|
1270
|
-
|
3184
|
+
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3185
|
+
url: "/workspaces/{workspaceId}/settings",
|
3186
|
+
method: "get",
|
3187
|
+
...variables,
|
3188
|
+
signal
|
3189
|
+
});
|
3190
|
+
const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3191
|
+
url: "/workspaces/{workspaceId}/settings",
|
3192
|
+
method: "patch",
|
3193
|
+
...variables,
|
3194
|
+
signal
|
3195
|
+
});
|
3196
|
+
const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({
|
3197
|
+
url: "/workspaces/{workspaceId}/members",
|
3198
|
+
method: "get",
|
3199
|
+
...variables,
|
3200
|
+
signal
|
3201
|
+
});
|
3202
|
+
const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({
|
3203
|
+
url: "/workspaces/{workspaceId}/members/{userId}",
|
3204
|
+
method: "put",
|
3205
|
+
...variables,
|
3206
|
+
signal
|
3207
|
+
});
|
1271
3208
|
const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1272
3209
|
url: "/workspaces/{workspaceId}/members/{userId}",
|
1273
3210
|
method: "delete",
|
1274
3211
|
...variables,
|
1275
3212
|
signal
|
1276
3213
|
});
|
1277
|
-
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1278
|
-
|
1279
|
-
|
1280
|
-
|
1281
|
-
|
1282
|
-
|
1283
|
-
const
|
3214
|
+
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
3215
|
+
url: "/workspaces/{workspaceId}/invites",
|
3216
|
+
method: "post",
|
3217
|
+
...variables,
|
3218
|
+
signal
|
3219
|
+
});
|
3220
|
+
const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3221
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3222
|
+
method: "patch",
|
3223
|
+
...variables,
|
3224
|
+
signal
|
3225
|
+
});
|
3226
|
+
const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3227
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3228
|
+
method: "delete",
|
3229
|
+
...variables,
|
3230
|
+
signal
|
3231
|
+
});
|
3232
|
+
const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3233
|
+
url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept",
|
3234
|
+
method: "post",
|
3235
|
+
...variables,
|
3236
|
+
signal
|
3237
|
+
});
|
3238
|
+
const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3239
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}/resend",
|
3240
|
+
method: "post",
|
3241
|
+
...variables,
|
3242
|
+
signal
|
3243
|
+
});
|
3244
|
+
const listClusters = (variables, signal) => controlPlaneFetch({
|
3245
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3246
|
+
method: "get",
|
3247
|
+
...variables,
|
3248
|
+
signal
|
3249
|
+
});
|
3250
|
+
const createCluster = (variables, signal) => controlPlaneFetch({
|
3251
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3252
|
+
method: "post",
|
3253
|
+
...variables,
|
3254
|
+
signal
|
3255
|
+
});
|
1284
3256
|
const getCluster = (variables, signal) => controlPlaneFetch({
|
1285
3257
|
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
1286
3258
|
method: "get",
|
1287
3259
|
...variables,
|
1288
3260
|
signal
|
1289
3261
|
});
|
1290
|
-
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3262
|
+
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3263
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3264
|
+
method: "patch",
|
3265
|
+
...variables,
|
3266
|
+
signal
|
3267
|
+
});
|
3268
|
+
const deleteCluster = (variables, signal) => controlPlaneFetch({
|
3269
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3270
|
+
method: "delete",
|
3271
|
+
...variables,
|
3272
|
+
signal
|
3273
|
+
});
|
1291
3274
|
const getDatabaseList = (variables, signal) => controlPlaneFetch({
|
1292
3275
|
url: "/workspaces/{workspaceId}/dbs",
|
1293
3276
|
method: "get",
|
1294
3277
|
...variables,
|
1295
3278
|
signal
|
1296
3279
|
});
|
1297
|
-
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3280
|
+
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3281
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3282
|
+
method: "put",
|
3283
|
+
...variables,
|
3284
|
+
signal
|
3285
|
+
});
|
1298
3286
|
const deleteDatabase = (variables, signal) => controlPlaneFetch({
|
1299
3287
|
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
1300
3288
|
method: "delete",
|
1301
3289
|
...variables,
|
1302
3290
|
signal
|
1303
3291
|
});
|
1304
|
-
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
1305
|
-
|
1306
|
-
|
1307
|
-
|
1308
|
-
|
1309
|
-
|
3292
|
+
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3293
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3294
|
+
method: "get",
|
3295
|
+
...variables,
|
3296
|
+
signal
|
3297
|
+
});
|
3298
|
+
const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3299
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3300
|
+
method: "patch",
|
3301
|
+
...variables,
|
3302
|
+
signal
|
3303
|
+
});
|
3304
|
+
const renameDatabase = (variables, signal) => controlPlaneFetch({
|
3305
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/rename",
|
3306
|
+
method: "post",
|
3307
|
+
...variables,
|
3308
|
+
signal
|
3309
|
+
});
|
3310
|
+
const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3311
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3312
|
+
method: "get",
|
3313
|
+
...variables,
|
3314
|
+
signal
|
3315
|
+
});
|
3316
|
+
const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3317
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3318
|
+
method: "put",
|
3319
|
+
...variables,
|
3320
|
+
signal
|
3321
|
+
});
|
3322
|
+
const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3323
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3324
|
+
method: "delete",
|
3325
|
+
...variables,
|
3326
|
+
signal
|
3327
|
+
});
|
1310
3328
|
const listRegions = (variables, signal) => controlPlaneFetch({
|
1311
3329
|
url: "/workspaces/{workspaceId}/regions",
|
1312
3330
|
method: "get",
|
@@ -1344,7 +3362,13 @@ const operationsByTag$1 = {
|
|
1344
3362
|
acceptWorkspaceMemberInvite,
|
1345
3363
|
resendWorkspaceMemberInvite
|
1346
3364
|
},
|
1347
|
-
xbcontrolOther: {
|
3365
|
+
xbcontrolOther: {
|
3366
|
+
listClusters,
|
3367
|
+
createCluster,
|
3368
|
+
getCluster,
|
3369
|
+
updateCluster,
|
3370
|
+
deleteCluster
|
3371
|
+
},
|
1348
3372
|
databases: {
|
1349
3373
|
getDatabaseList,
|
1350
3374
|
createDatabase,
|
@@ -1364,7 +3388,7 @@ const operationsByTag = deepMerge(operationsByTag$2, operationsByTag$1);
|
|
1364
3388
|
const buildApiClient = () => class {
|
1365
3389
|
constructor(options = {}) {
|
1366
3390
|
const provider = options.host ?? "production";
|
1367
|
-
const apiKey = options.apiKey
|
3391
|
+
const apiKey = options.apiKey;
|
1368
3392
|
const trace = options.trace ?? defaultTrace;
|
1369
3393
|
const clientID = generateUUID();
|
1370
3394
|
if (!apiKey) {
|
@@ -1431,8 +3455,7 @@ function buildTransformString(transformations) {
|
|
1431
3455
|
).join(",");
|
1432
3456
|
}
|
1433
3457
|
function transformImage(url, ...transformations) {
|
1434
|
-
if (!isDefined(url))
|
1435
|
-
return void 0;
|
3458
|
+
if (!isDefined(url)) return void 0;
|
1436
3459
|
const newTransformations = buildTransformString(transformations);
|
1437
3460
|
const { hostname, pathname, search } = new URL(url);
|
1438
3461
|
const pathParts = pathname.split("/");
|
@@ -1545,8 +3568,7 @@ class XataFile {
|
|
1545
3568
|
}
|
1546
3569
|
}
|
1547
3570
|
const parseInputFileEntry = async (entry) => {
|
1548
|
-
if (!isDefined(entry))
|
1549
|
-
return null;
|
3571
|
+
if (!isDefined(entry)) return null;
|
1550
3572
|
const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout, uploadUrlTimeout } = await entry;
|
1551
3573
|
return compactObject({
|
1552
3574
|
id,
|
@@ -1561,24 +3583,19 @@ const parseInputFileEntry = async (entry) => {
|
|
1561
3583
|
};
|
1562
3584
|
|
1563
3585
|
function cleanFilter(filter) {
|
1564
|
-
if (!isDefined(filter))
|
1565
|
-
|
1566
|
-
if (!isObject(filter))
|
1567
|
-
return filter;
|
3586
|
+
if (!isDefined(filter)) return void 0;
|
3587
|
+
if (!isObject(filter)) return filter;
|
1568
3588
|
const values = Object.fromEntries(
|
1569
3589
|
Object.entries(filter).reduce((acc, [key, value]) => {
|
1570
|
-
if (!isDefined(value))
|
1571
|
-
return acc;
|
3590
|
+
if (!isDefined(value)) return acc;
|
1572
3591
|
if (Array.isArray(value)) {
|
1573
3592
|
const clean = value.map((item) => cleanFilter(item)).filter((item) => isDefined(item));
|
1574
|
-
if (clean.length === 0)
|
1575
|
-
return acc;
|
3593
|
+
if (clean.length === 0) return acc;
|
1576
3594
|
return [...acc, [key, clean]];
|
1577
3595
|
}
|
1578
3596
|
if (isObject(value)) {
|
1579
3597
|
const clean = cleanFilter(value);
|
1580
|
-
if (!isDefined(clean))
|
1581
|
-
return acc;
|
3598
|
+
if (!isDefined(clean)) return acc;
|
1582
3599
|
return [...acc, [key, clean]];
|
1583
3600
|
}
|
1584
3601
|
return [...acc, [key, value]];
|
@@ -1588,10 +3605,8 @@ function cleanFilter(filter) {
|
|
1588
3605
|
}
|
1589
3606
|
|
1590
3607
|
function stringifyJson(value) {
|
1591
|
-
if (!isDefined(value))
|
1592
|
-
|
1593
|
-
if (isString(value))
|
1594
|
-
return value;
|
3608
|
+
if (!isDefined(value)) return value;
|
3609
|
+
if (isString(value)) return value;
|
1595
3610
|
try {
|
1596
3611
|
return JSON.stringify(value);
|
1597
3612
|
} catch (e) {
|
@@ -1606,28 +3621,17 @@ function parseJson(value) {
|
|
1606
3621
|
}
|
1607
3622
|
}
|
1608
3623
|
|
1609
|
-
var
|
1610
|
-
|
1611
|
-
throw TypeError("Cannot " + msg);
|
1612
|
-
};
|
1613
|
-
var __privateGet$4 = (obj, member, getter) => {
|
1614
|
-
__accessCheck$5(obj, member, "read from private field");
|
1615
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1616
|
-
};
|
1617
|
-
var __privateAdd$5 = (obj, member, value) => {
|
1618
|
-
if (member.has(obj))
|
1619
|
-
throw TypeError("Cannot add the same private member more than once");
|
1620
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1621
|
-
};
|
1622
|
-
var __privateSet$3 = (obj, member, value, setter) => {
|
1623
|
-
__accessCheck$5(obj, member, "write to private field");
|
1624
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1625
|
-
return value;
|
3624
|
+
var __typeError$5 = (msg) => {
|
3625
|
+
throw TypeError(msg);
|
1626
3626
|
};
|
3627
|
+
var __accessCheck$5 = (obj, member, msg) => member.has(obj) || __typeError$5("Cannot " + msg);
|
3628
|
+
var __privateGet$4 = (obj, member, getter) => (__accessCheck$5(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3629
|
+
var __privateAdd$5 = (obj, member, value) => member.has(obj) ? __typeError$5("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3630
|
+
var __privateSet$3 = (obj, member, value, setter) => (__accessCheck$5(obj, member, "write to private field"), member.set(obj, value), value);
|
1627
3631
|
var _query, _page;
|
1628
3632
|
class Page {
|
1629
3633
|
constructor(query, meta, records = []) {
|
1630
|
-
__privateAdd$5(this, _query
|
3634
|
+
__privateAdd$5(this, _query);
|
1631
3635
|
__privateSet$3(this, _query, query);
|
1632
3636
|
this.meta = meta;
|
1633
3637
|
this.records = new PageRecordArray(this, records);
|
@@ -1714,7 +3718,7 @@ class RecordArray extends Array {
|
|
1714
3718
|
const _PageRecordArray = class _PageRecordArray extends Array {
|
1715
3719
|
constructor(...args) {
|
1716
3720
|
super(..._PageRecordArray.parseConstructorParams(...args));
|
1717
|
-
__privateAdd$5(this, _page
|
3721
|
+
__privateAdd$5(this, _page);
|
1718
3722
|
__privateSet$3(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
|
1719
3723
|
}
|
1720
3724
|
static parseConstructorParams(...args) {
|
@@ -1785,34 +3789,20 @@ const _PageRecordArray = class _PageRecordArray extends Array {
|
|
1785
3789
|
_page = new WeakMap();
|
1786
3790
|
let PageRecordArray = _PageRecordArray;
|
1787
3791
|
|
1788
|
-
var
|
1789
|
-
|
1790
|
-
throw TypeError("Cannot " + msg);
|
1791
|
-
};
|
1792
|
-
var __privateGet$3 = (obj, member, getter) => {
|
1793
|
-
__accessCheck$4(obj, member, "read from private field");
|
1794
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1795
|
-
};
|
1796
|
-
var __privateAdd$4 = (obj, member, value) => {
|
1797
|
-
if (member.has(obj))
|
1798
|
-
throw TypeError("Cannot add the same private member more than once");
|
1799
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1800
|
-
};
|
1801
|
-
var __privateSet$2 = (obj, member, value, setter) => {
|
1802
|
-
__accessCheck$4(obj, member, "write to private field");
|
1803
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1804
|
-
return value;
|
1805
|
-
};
|
1806
|
-
var __privateMethod$3 = (obj, member, method) => {
|
1807
|
-
__accessCheck$4(obj, member, "access private method");
|
1808
|
-
return method;
|
3792
|
+
var __typeError$4 = (msg) => {
|
3793
|
+
throw TypeError(msg);
|
1809
3794
|
};
|
1810
|
-
var
|
3795
|
+
var __accessCheck$4 = (obj, member, msg) => member.has(obj) || __typeError$4("Cannot " + msg);
|
3796
|
+
var __privateGet$3 = (obj, member, getter) => (__accessCheck$4(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3797
|
+
var __privateAdd$4 = (obj, member, value) => member.has(obj) ? __typeError$4("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3798
|
+
var __privateSet$2 = (obj, member, value, setter) => (__accessCheck$4(obj, member, "write to private field"), member.set(obj, value), value);
|
3799
|
+
var __privateMethod$3 = (obj, member, method) => (__accessCheck$4(obj, member, "access private method"), method);
|
3800
|
+
var _table$1, _repository, _data, _Query_instances, cleanFilterConstraint_fn;
|
1811
3801
|
const _Query = class _Query {
|
1812
3802
|
constructor(repository, table, data, rawParent) {
|
1813
|
-
__privateAdd$4(this,
|
1814
|
-
__privateAdd$4(this, _table$1
|
1815
|
-
__privateAdd$4(this, _repository
|
3803
|
+
__privateAdd$4(this, _Query_instances);
|
3804
|
+
__privateAdd$4(this, _table$1);
|
3805
|
+
__privateAdd$4(this, _repository);
|
1816
3806
|
__privateAdd$4(this, _data, { filter: {} });
|
1817
3807
|
// Implements pagination
|
1818
3808
|
this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
|
@@ -1890,12 +3880,12 @@ const _Query = class _Query {
|
|
1890
3880
|
filter(a, b) {
|
1891
3881
|
if (arguments.length === 1) {
|
1892
3882
|
const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
|
1893
|
-
[column]: __privateMethod$3(this,
|
3883
|
+
[column]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, column, constraint)
|
1894
3884
|
}));
|
1895
3885
|
const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
|
1896
3886
|
return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
|
1897
3887
|
} else {
|
1898
|
-
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this,
|
3888
|
+
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
|
1899
3889
|
const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
|
1900
3890
|
return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
|
1901
3891
|
}
|
@@ -1974,8 +3964,7 @@ const _Query = class _Query {
|
|
1974
3964
|
}
|
1975
3965
|
async getFirstOrThrow(options = {}) {
|
1976
3966
|
const records = await this.getMany({ ...options, pagination: { size: 1 } });
|
1977
|
-
if (records[0] === void 0)
|
1978
|
-
throw new Error("No results found.");
|
3967
|
+
if (records[0] === void 0) throw new Error("No results found.");
|
1979
3968
|
return records[0];
|
1980
3969
|
}
|
1981
3970
|
async summarize(params = {}) {
|
@@ -2030,7 +4019,7 @@ const _Query = class _Query {
|
|
2030
4019
|
_table$1 = new WeakMap();
|
2031
4020
|
_repository = new WeakMap();
|
2032
4021
|
_data = new WeakMap();
|
2033
|
-
|
4022
|
+
_Query_instances = new WeakSet();
|
2034
4023
|
cleanFilterConstraint_fn = function(column, value) {
|
2035
4024
|
const columnType = __privateGet$3(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
|
2036
4025
|
if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
|
@@ -2091,8 +4080,7 @@ function isSortFilterString(value) {
|
|
2091
4080
|
}
|
2092
4081
|
function isSortFilterBase(filter) {
|
2093
4082
|
return isObject(filter) && Object.entries(filter).every(([key, value]) => {
|
2094
|
-
if (key === "*")
|
2095
|
-
return value === "random";
|
4083
|
+
if (key === "*") return value === "random";
|
2096
4084
|
return value === "asc" || value === "desc";
|
2097
4085
|
});
|
2098
4086
|
}
|
@@ -2113,29 +4101,15 @@ function buildSortFilter(filter) {
|
|
2113
4101
|
}
|
2114
4102
|
}
|
2115
4103
|
|
2116
|
-
var
|
2117
|
-
|
2118
|
-
throw TypeError("Cannot " + msg);
|
4104
|
+
var __typeError$3 = (msg) => {
|
4105
|
+
throw TypeError(msg);
|
2119
4106
|
};
|
2120
|
-
var
|
2121
|
-
|
2122
|
-
|
2123
|
-
|
2124
|
-
var
|
2125
|
-
|
2126
|
-
throw TypeError("Cannot add the same private member more than once");
|
2127
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2128
|
-
};
|
2129
|
-
var __privateSet$1 = (obj, member, value, setter) => {
|
2130
|
-
__accessCheck$3(obj, member, "write to private field");
|
2131
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
2132
|
-
return value;
|
2133
|
-
};
|
2134
|
-
var __privateMethod$2 = (obj, member, method) => {
|
2135
|
-
__accessCheck$3(obj, member, "access private method");
|
2136
|
-
return method;
|
2137
|
-
};
|
2138
|
-
var _table, _getFetchProps, _db, _schemaTables, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn, _insertRecords, insertRecords_fn, _updateRecordWithID, updateRecordWithID_fn, _updateRecords, updateRecords_fn, _upsertRecordWithID, upsertRecordWithID_fn, _deleteRecord, deleteRecord_fn, _deleteRecords, deleteRecords_fn, _getSchemaTables, getSchemaTables_fn, _transformObjectToApi, transformObjectToApi_fn;
|
4107
|
+
var __accessCheck$3 = (obj, member, msg) => member.has(obj) || __typeError$3("Cannot " + msg);
|
4108
|
+
var __privateGet$2 = (obj, member, getter) => (__accessCheck$3(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4109
|
+
var __privateAdd$3 = (obj, member, value) => member.has(obj) ? __typeError$3("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4110
|
+
var __privateSet$1 = (obj, member, value, setter) => (__accessCheck$3(obj, member, "write to private field"), member.set(obj, value), value);
|
4111
|
+
var __privateMethod$2 = (obj, member, method) => (__accessCheck$3(obj, member, "access private method"), method);
|
4112
|
+
var _table, _getFetchProps, _db, _schemaTables, _trace, _RestRepository_instances, insertRecordWithoutId_fn, insertRecordWithId_fn, insertRecords_fn, updateRecordWithID_fn, updateRecords_fn, upsertRecordWithID_fn, deleteRecord_fn, deleteRecords_fn, getSchemaTables_fn, transformObjectToApi_fn;
|
2139
4113
|
const BULK_OPERATION_MAX_SIZE = 1e3;
|
2140
4114
|
class Repository extends Query {
|
2141
4115
|
}
|
@@ -2146,21 +4120,12 @@ class RestRepository extends Query {
|
|
2146
4120
|
{ name: options.table, schema: options.schemaTables?.find((table) => table.name === options.table) },
|
2147
4121
|
{}
|
2148
4122
|
);
|
2149
|
-
__privateAdd$3(this,
|
2150
|
-
__privateAdd$3(this,
|
2151
|
-
__privateAdd$3(this,
|
2152
|
-
__privateAdd$3(this,
|
2153
|
-
__privateAdd$3(this,
|
2154
|
-
__privateAdd$3(this,
|
2155
|
-
__privateAdd$3(this, _deleteRecord);
|
2156
|
-
__privateAdd$3(this, _deleteRecords);
|
2157
|
-
__privateAdd$3(this, _getSchemaTables);
|
2158
|
-
__privateAdd$3(this, _transformObjectToApi);
|
2159
|
-
__privateAdd$3(this, _table, void 0);
|
2160
|
-
__privateAdd$3(this, _getFetchProps, void 0);
|
2161
|
-
__privateAdd$3(this, _db, void 0);
|
2162
|
-
__privateAdd$3(this, _schemaTables, void 0);
|
2163
|
-
__privateAdd$3(this, _trace, void 0);
|
4123
|
+
__privateAdd$3(this, _RestRepository_instances);
|
4124
|
+
__privateAdd$3(this, _table);
|
4125
|
+
__privateAdd$3(this, _getFetchProps);
|
4126
|
+
__privateAdd$3(this, _db);
|
4127
|
+
__privateAdd$3(this, _schemaTables);
|
4128
|
+
__privateAdd$3(this, _trace);
|
2164
4129
|
__privateSet$1(this, _table, options.table);
|
2165
4130
|
__privateSet$1(this, _db, options.db);
|
2166
4131
|
__privateSet$1(this, _schemaTables, options.schemaTables);
|
@@ -2179,31 +4144,28 @@ class RestRepository extends Query {
|
|
2179
4144
|
return __privateGet$2(this, _trace).call(this, "create", async () => {
|
2180
4145
|
const ifVersion = parseIfVersion(b, c, d);
|
2181
4146
|
if (Array.isArray(a)) {
|
2182
|
-
if (a.length === 0)
|
2183
|
-
|
2184
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
4147
|
+
if (a.length === 0) return [];
|
4148
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
2185
4149
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2186
4150
|
const result = await this.read(ids, columns);
|
2187
4151
|
return result;
|
2188
4152
|
}
|
2189
4153
|
if (isString(a) && isObject(b)) {
|
2190
|
-
if (a === "")
|
2191
|
-
throw new Error("The id can't be empty");
|
4154
|
+
if (a === "") throw new Error("The id can't be empty");
|
2192
4155
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2193
|
-
return await __privateMethod$2(this,
|
4156
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
|
2194
4157
|
}
|
2195
4158
|
if (isObject(a) && isString(a.xata_id)) {
|
2196
|
-
if (a.xata_id === "")
|
2197
|
-
throw new Error("The id can't be empty");
|
4159
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2198
4160
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2199
|
-
return await __privateMethod$2(this,
|
4161
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
2200
4162
|
createOnly: true,
|
2201
4163
|
ifVersion
|
2202
4164
|
});
|
2203
4165
|
}
|
2204
4166
|
if (isObject(a)) {
|
2205
4167
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2206
|
-
return __privateMethod$2(this,
|
4168
|
+
return __privateMethod$2(this, _RestRepository_instances, insertRecordWithoutId_fn).call(this, a, columns);
|
2207
4169
|
}
|
2208
4170
|
throw new Error("Invalid arguments for create method");
|
2209
4171
|
});
|
@@ -2212,8 +4174,7 @@ class RestRepository extends Query {
|
|
2212
4174
|
return __privateGet$2(this, _trace).call(this, "read", async () => {
|
2213
4175
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2214
4176
|
if (Array.isArray(a)) {
|
2215
|
-
if (a.length === 0)
|
2216
|
-
return [];
|
4177
|
+
if (a.length === 0) return [];
|
2217
4178
|
const ids = a.map((item) => extractId(item));
|
2218
4179
|
const finalObjects = await this.getAll({ filter: { xata_id: { $any: compact(ids) } }, columns });
|
2219
4180
|
const dictionary = finalObjects.reduce((acc, object) => {
|
@@ -2236,7 +4197,7 @@ class RestRepository extends Query {
|
|
2236
4197
|
queryParams: { columns },
|
2237
4198
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2238
4199
|
});
|
2239
|
-
const schemaTables = await __privateMethod$2(this,
|
4200
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2240
4201
|
return initObject(
|
2241
4202
|
__privateGet$2(this, _db),
|
2242
4203
|
schemaTables,
|
@@ -2277,11 +4238,10 @@ class RestRepository extends Query {
|
|
2277
4238
|
return __privateGet$2(this, _trace).call(this, "update", async () => {
|
2278
4239
|
const ifVersion = parseIfVersion(b, c, d);
|
2279
4240
|
if (Array.isArray(a)) {
|
2280
|
-
if (a.length === 0)
|
2281
|
-
return [];
|
4241
|
+
if (a.length === 0) return [];
|
2282
4242
|
const existing = await this.read(a, ["xata_id"]);
|
2283
4243
|
const updates = a.filter((_item, index) => existing[index] !== null);
|
2284
|
-
await __privateMethod$2(this,
|
4244
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, updates, {
|
2285
4245
|
ifVersion,
|
2286
4246
|
upsert: false
|
2287
4247
|
});
|
@@ -2292,15 +4252,14 @@ class RestRepository extends Query {
|
|
2292
4252
|
try {
|
2293
4253
|
if (isString(a) && isObject(b)) {
|
2294
4254
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2295
|
-
return await __privateMethod$2(this,
|
4255
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2296
4256
|
}
|
2297
4257
|
if (isObject(a) && isString(a.xata_id)) {
|
2298
4258
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2299
|
-
return await __privateMethod$2(this,
|
4259
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2300
4260
|
}
|
2301
4261
|
} catch (error) {
|
2302
|
-
if (error.status === 422)
|
2303
|
-
return null;
|
4262
|
+
if (error.status === 422) return null;
|
2304
4263
|
throw error;
|
2305
4264
|
}
|
2306
4265
|
throw new Error("Invalid arguments for update method");
|
@@ -2329,9 +4288,8 @@ class RestRepository extends Query {
|
|
2329
4288
|
return __privateGet$2(this, _trace).call(this, "createOrUpdate", async () => {
|
2330
4289
|
const ifVersion = parseIfVersion(b, c, d);
|
2331
4290
|
if (Array.isArray(a)) {
|
2332
|
-
if (a.length === 0)
|
2333
|
-
|
2334
|
-
await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, a, {
|
4291
|
+
if (a.length === 0) return [];
|
4292
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, a, {
|
2335
4293
|
ifVersion,
|
2336
4294
|
upsert: true
|
2337
4295
|
});
|
@@ -2340,16 +4298,14 @@ class RestRepository extends Query {
|
|
2340
4298
|
return result;
|
2341
4299
|
}
|
2342
4300
|
if (isString(a) && isObject(b)) {
|
2343
|
-
if (a === "")
|
2344
|
-
throw new Error("The id can't be empty");
|
4301
|
+
if (a === "") throw new Error("The id can't be empty");
|
2345
4302
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2346
|
-
return await __privateMethod$2(this,
|
4303
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2347
4304
|
}
|
2348
4305
|
if (isObject(a) && isString(a.xata_id)) {
|
2349
|
-
if (a.xata_id === "")
|
2350
|
-
throw new Error("The id can't be empty");
|
4306
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2351
4307
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2352
|
-
return await __privateMethod$2(this,
|
4308
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2353
4309
|
}
|
2354
4310
|
if (!isDefined(a) && isObject(b)) {
|
2355
4311
|
return await this.create(b, c);
|
@@ -2364,24 +4320,21 @@ class RestRepository extends Query {
|
|
2364
4320
|
return __privateGet$2(this, _trace).call(this, "createOrReplace", async () => {
|
2365
4321
|
const ifVersion = parseIfVersion(b, c, d);
|
2366
4322
|
if (Array.isArray(a)) {
|
2367
|
-
if (a.length === 0)
|
2368
|
-
|
2369
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
4323
|
+
if (a.length === 0) return [];
|
4324
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
2370
4325
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2371
4326
|
const result = await this.read(ids, columns);
|
2372
4327
|
return result;
|
2373
4328
|
}
|
2374
4329
|
if (isString(a) && isObject(b)) {
|
2375
|
-
if (a === "")
|
2376
|
-
throw new Error("The id can't be empty");
|
4330
|
+
if (a === "") throw new Error("The id can't be empty");
|
2377
4331
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2378
|
-
return await __privateMethod$2(this,
|
4332
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
|
2379
4333
|
}
|
2380
4334
|
if (isObject(a) && isString(a.xata_id)) {
|
2381
|
-
if (a.xata_id === "")
|
2382
|
-
throw new Error("The id can't be empty");
|
4335
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2383
4336
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2384
|
-
return await __privateMethod$2(this,
|
4337
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
2385
4338
|
createOnly: false,
|
2386
4339
|
ifVersion
|
2387
4340
|
});
|
@@ -2398,25 +4351,22 @@ class RestRepository extends Query {
|
|
2398
4351
|
async delete(a, b) {
|
2399
4352
|
return __privateGet$2(this, _trace).call(this, "delete", async () => {
|
2400
4353
|
if (Array.isArray(a)) {
|
2401
|
-
if (a.length === 0)
|
2402
|
-
return [];
|
4354
|
+
if (a.length === 0) return [];
|
2403
4355
|
const ids = a.map((o) => {
|
2404
|
-
if (isString(o))
|
2405
|
-
|
2406
|
-
if (isString(o.xata_id))
|
2407
|
-
return o.xata_id;
|
4356
|
+
if (isString(o)) return o;
|
4357
|
+
if (isString(o.xata_id)) return o.xata_id;
|
2408
4358
|
throw new Error("Invalid arguments for delete method");
|
2409
4359
|
});
|
2410
4360
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2411
4361
|
const result = await this.read(a, columns);
|
2412
|
-
await __privateMethod$2(this,
|
4362
|
+
await __privateMethod$2(this, _RestRepository_instances, deleteRecords_fn).call(this, ids);
|
2413
4363
|
return result;
|
2414
4364
|
}
|
2415
4365
|
if (isString(a)) {
|
2416
|
-
return __privateMethod$2(this,
|
4366
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a, b);
|
2417
4367
|
}
|
2418
4368
|
if (isObject(a) && isString(a.xata_id)) {
|
2419
|
-
return __privateMethod$2(this,
|
4369
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a.xata_id, b);
|
2420
4370
|
}
|
2421
4371
|
throw new Error("Invalid arguments for delete method");
|
2422
4372
|
});
|
@@ -2460,7 +4410,7 @@ class RestRepository extends Query {
|
|
2460
4410
|
},
|
2461
4411
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2462
4412
|
});
|
2463
|
-
const schemaTables = await __privateMethod$2(this,
|
4413
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2464
4414
|
return {
|
2465
4415
|
records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
|
2466
4416
|
totalCount
|
@@ -2485,7 +4435,7 @@ class RestRepository extends Query {
|
|
2485
4435
|
},
|
2486
4436
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2487
4437
|
});
|
2488
|
-
const schemaTables = await __privateMethod$2(this,
|
4438
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2489
4439
|
return {
|
2490
4440
|
records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
|
2491
4441
|
totalCount
|
@@ -2527,7 +4477,7 @@ class RestRepository extends Query {
|
|
2527
4477
|
fetchOptions: data.fetchOptions,
|
2528
4478
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2529
4479
|
});
|
2530
|
-
const schemaTables = await __privateMethod$2(this,
|
4480
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2531
4481
|
const records = objects.map(
|
2532
4482
|
(record) => initObject(
|
2533
4483
|
__privateGet$2(this, _db),
|
@@ -2561,7 +4511,7 @@ class RestRepository extends Query {
|
|
2561
4511
|
},
|
2562
4512
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2563
4513
|
});
|
2564
|
-
const schemaTables = await __privateMethod$2(this,
|
4514
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2565
4515
|
return {
|
2566
4516
|
...result,
|
2567
4517
|
summaries: result.summaries.map(
|
@@ -2609,9 +4559,9 @@ _getFetchProps = new WeakMap();
|
|
2609
4559
|
_db = new WeakMap();
|
2610
4560
|
_schemaTables = new WeakMap();
|
2611
4561
|
_trace = new WeakMap();
|
2612
|
-
|
4562
|
+
_RestRepository_instances = new WeakSet();
|
2613
4563
|
insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
2614
|
-
const record = await __privateMethod$2(this,
|
4564
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2615
4565
|
const response = await insertRecord({
|
2616
4566
|
pathParams: {
|
2617
4567
|
workspace: "{workspaceId}",
|
@@ -2623,14 +4573,12 @@ insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
|
2623
4573
|
body: record,
|
2624
4574
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2625
4575
|
});
|
2626
|
-
const schemaTables = await __privateMethod$2(this,
|
4576
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2627
4577
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2628
4578
|
};
|
2629
|
-
_insertRecordWithId = new WeakSet();
|
2630
4579
|
insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
|
2631
|
-
if (!recordId)
|
2632
|
-
|
2633
|
-
const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4580
|
+
if (!recordId) return null;
|
4581
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2634
4582
|
const response = await insertRecordWithID({
|
2635
4583
|
pathParams: {
|
2636
4584
|
workspace: "{workspaceId}",
|
@@ -2643,13 +4591,12 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
|
|
2643
4591
|
queryParams: { createOnly, columns, ifVersion },
|
2644
4592
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2645
4593
|
});
|
2646
|
-
const schemaTables = await __privateMethod$2(this,
|
4594
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2647
4595
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2648
4596
|
};
|
2649
|
-
_insertRecords = new WeakSet();
|
2650
4597
|
insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
2651
4598
|
const operations = await promiseMap(objects, async (object) => {
|
2652
|
-
const record = await __privateMethod$2(this,
|
4599
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2653
4600
|
return { insert: { table: __privateGet$2(this, _table), record, createOnly, ifVersion } };
|
2654
4601
|
});
|
2655
4602
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -2674,11 +4621,9 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
|
2674
4621
|
}
|
2675
4622
|
return ids;
|
2676
4623
|
};
|
2677
|
-
_updateRecordWithID = new WeakSet();
|
2678
4624
|
updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2679
|
-
if (!recordId)
|
2680
|
-
|
2681
|
-
const { xata_id: _id, ...record } = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4625
|
+
if (!recordId) return null;
|
4626
|
+
const { xata_id: _id, ...record } = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2682
4627
|
try {
|
2683
4628
|
const response = await updateRecordWithID({
|
2684
4629
|
pathParams: {
|
@@ -2692,7 +4637,7 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2692
4637
|
body: record,
|
2693
4638
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2694
4639
|
});
|
2695
|
-
const schemaTables = await __privateMethod$2(this,
|
4640
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2696
4641
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2697
4642
|
} catch (e) {
|
2698
4643
|
if (isObject(e) && e.status === 404) {
|
@@ -2701,10 +4646,9 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2701
4646
|
throw e;
|
2702
4647
|
}
|
2703
4648
|
};
|
2704
|
-
_updateRecords = new WeakSet();
|
2705
4649
|
updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
2706
4650
|
const operations = await promiseMap(objects, async ({ xata_id, ...object }) => {
|
2707
|
-
const fields = await __privateMethod$2(this,
|
4651
|
+
const fields = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2708
4652
|
return { update: { table: __privateGet$2(this, _table), id: xata_id, ifVersion, upsert, fields } };
|
2709
4653
|
});
|
2710
4654
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -2729,10 +4673,8 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
|
2729
4673
|
}
|
2730
4674
|
return ids;
|
2731
4675
|
};
|
2732
|
-
_upsertRecordWithID = new WeakSet();
|
2733
4676
|
upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2734
|
-
if (!recordId)
|
2735
|
-
return null;
|
4677
|
+
if (!recordId) return null;
|
2736
4678
|
const response = await upsertRecordWithID({
|
2737
4679
|
pathParams: {
|
2738
4680
|
workspace: "{workspaceId}",
|
@@ -2745,13 +4687,11 @@ upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2745
4687
|
body: object,
|
2746
4688
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2747
4689
|
});
|
2748
|
-
const schemaTables = await __privateMethod$2(this,
|
4690
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2749
4691
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2750
4692
|
};
|
2751
|
-
_deleteRecord = new WeakSet();
|
2752
4693
|
deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
2753
|
-
if (!recordId)
|
2754
|
-
return null;
|
4694
|
+
if (!recordId) return null;
|
2755
4695
|
try {
|
2756
4696
|
const response = await deleteRecord({
|
2757
4697
|
pathParams: {
|
@@ -2764,7 +4704,7 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2764
4704
|
queryParams: { columns },
|
2765
4705
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2766
4706
|
});
|
2767
|
-
const schemaTables = await __privateMethod$2(this,
|
4707
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2768
4708
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2769
4709
|
} catch (e) {
|
2770
4710
|
if (isObject(e) && e.status === 404) {
|
@@ -2773,7 +4713,6 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2773
4713
|
throw e;
|
2774
4714
|
}
|
2775
4715
|
};
|
2776
|
-
_deleteRecords = new WeakSet();
|
2777
4716
|
deleteRecords_fn = async function(recordIds) {
|
2778
4717
|
const chunkedOperations = chunk(
|
2779
4718
|
compact(recordIds).map((id) => ({ delete: { table: __privateGet$2(this, _table), id } })),
|
@@ -2791,10 +4730,8 @@ deleteRecords_fn = async function(recordIds) {
|
|
2791
4730
|
});
|
2792
4731
|
}
|
2793
4732
|
};
|
2794
|
-
_getSchemaTables = new WeakSet();
|
2795
4733
|
getSchemaTables_fn = async function() {
|
2796
|
-
if (__privateGet$2(this, _schemaTables))
|
2797
|
-
return __privateGet$2(this, _schemaTables);
|
4734
|
+
if (__privateGet$2(this, _schemaTables)) return __privateGet$2(this, _schemaTables);
|
2798
4735
|
const { schema } = await getBranchDetails({
|
2799
4736
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
2800
4737
|
...__privateGet$2(this, _getFetchProps).call(this)
|
@@ -2802,16 +4739,13 @@ getSchemaTables_fn = async function() {
|
|
2802
4739
|
__privateSet$1(this, _schemaTables, schema.tables);
|
2803
4740
|
return schema.tables;
|
2804
4741
|
};
|
2805
|
-
_transformObjectToApi = new WeakSet();
|
2806
4742
|
transformObjectToApi_fn = async function(object) {
|
2807
|
-
const schemaTables = await __privateMethod$2(this,
|
4743
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2808
4744
|
const schema = schemaTables.find((table) => table.name === __privateGet$2(this, _table));
|
2809
|
-
if (!schema)
|
2810
|
-
throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
|
4745
|
+
if (!schema) throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
|
2811
4746
|
const result = {};
|
2812
4747
|
for (const [key, value] of Object.entries(object)) {
|
2813
|
-
if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key))
|
2814
|
-
continue;
|
4748
|
+
if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key)) continue;
|
2815
4749
|
const type = schema.columns.find((column) => column.name === key)?.type;
|
2816
4750
|
switch (type) {
|
2817
4751
|
case "link": {
|
@@ -2841,11 +4775,9 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2841
4775
|
const data = {};
|
2842
4776
|
Object.assign(data, { ...object });
|
2843
4777
|
const { columns } = schemaTables.find(({ name }) => name === table) ?? {};
|
2844
|
-
if (!columns)
|
2845
|
-
console.error(`Table ${table} not found in schema`);
|
4778
|
+
if (!columns) console.error(`Table ${table} not found in schema`);
|
2846
4779
|
for (const column of columns ?? []) {
|
2847
|
-
if (!isValidColumn(selectedColumns, column))
|
2848
|
-
continue;
|
4780
|
+
if (!isValidColumn(selectedColumns, column)) continue;
|
2849
4781
|
const value = data[column.name];
|
2850
4782
|
switch (column.type) {
|
2851
4783
|
case "datetime": {
|
@@ -2931,15 +4863,12 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2931
4863
|
return record;
|
2932
4864
|
};
|
2933
4865
|
function extractId(value) {
|
2934
|
-
if (isString(value))
|
2935
|
-
|
2936
|
-
if (isObject(value) && isString(value.xata_id))
|
2937
|
-
return value.xata_id;
|
4866
|
+
if (isString(value)) return value;
|
4867
|
+
if (isObject(value) && isString(value.xata_id)) return value.xata_id;
|
2938
4868
|
return void 0;
|
2939
4869
|
}
|
2940
4870
|
function isValidColumn(columns, column) {
|
2941
|
-
if (columns.includes("*"))
|
2942
|
-
return true;
|
4871
|
+
if (columns.includes("*")) return true;
|
2943
4872
|
return columns.filter((item) => isString(item) && item.startsWith(column.name)).length > 0;
|
2944
4873
|
}
|
2945
4874
|
function parseIfVersion(...args) {
|
@@ -2979,19 +4908,12 @@ const includesAll = (value) => ({ $includesAll: value });
|
|
2979
4908
|
const includesNone = (value) => ({ $includesNone: value });
|
2980
4909
|
const includesAny = (value) => ({ $includesAny: value });
|
2981
4910
|
|
2982
|
-
var
|
2983
|
-
|
2984
|
-
throw TypeError("Cannot " + msg);
|
2985
|
-
};
|
2986
|
-
var __privateGet$1 = (obj, member, getter) => {
|
2987
|
-
__accessCheck$2(obj, member, "read from private field");
|
2988
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2989
|
-
};
|
2990
|
-
var __privateAdd$2 = (obj, member, value) => {
|
2991
|
-
if (member.has(obj))
|
2992
|
-
throw TypeError("Cannot add the same private member more than once");
|
2993
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4911
|
+
var __typeError$2 = (msg) => {
|
4912
|
+
throw TypeError(msg);
|
2994
4913
|
};
|
4914
|
+
var __accessCheck$2 = (obj, member, msg) => member.has(obj) || __typeError$2("Cannot " + msg);
|
4915
|
+
var __privateGet$1 = (obj, member, getter) => (__accessCheck$2(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4916
|
+
var __privateAdd$2 = (obj, member, value) => member.has(obj) ? __typeError$2("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2995
4917
|
var _tables;
|
2996
4918
|
class SchemaPlugin extends XataPlugin {
|
2997
4919
|
constructor() {
|
@@ -3003,8 +4925,7 @@ class SchemaPlugin extends XataPlugin {
|
|
3003
4925
|
{},
|
3004
4926
|
{
|
3005
4927
|
get: (_target, table) => {
|
3006
|
-
if (!isString(table))
|
3007
|
-
throw new Error("Invalid table name");
|
4928
|
+
if (!isString(table)) throw new Error("Invalid table name");
|
3008
4929
|
if (__privateGet$1(this, _tables)[table] === void 0) {
|
3009
4930
|
__privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
3010
4931
|
}
|
@@ -3095,30 +5016,23 @@ function getContentType(file) {
|
|
3095
5016
|
return "application/octet-stream";
|
3096
5017
|
}
|
3097
5018
|
|
3098
|
-
var
|
3099
|
-
|
3100
|
-
throw TypeError("Cannot " + msg);
|
3101
|
-
};
|
3102
|
-
var __privateAdd$1 = (obj, member, value) => {
|
3103
|
-
if (member.has(obj))
|
3104
|
-
throw TypeError("Cannot add the same private member more than once");
|
3105
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5019
|
+
var __typeError$1 = (msg) => {
|
5020
|
+
throw TypeError(msg);
|
3106
5021
|
};
|
3107
|
-
var
|
3108
|
-
|
3109
|
-
|
3110
|
-
|
3111
|
-
var _search, search_fn;
|
5022
|
+
var __accessCheck$1 = (obj, member, msg) => member.has(obj) || __typeError$1("Cannot " + msg);
|
5023
|
+
var __privateAdd$1 = (obj, member, value) => member.has(obj) ? __typeError$1("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5024
|
+
var __privateMethod$1 = (obj, member, method) => (__accessCheck$1(obj, member, "access private method"), method);
|
5025
|
+
var _SearchPlugin_instances, search_fn;
|
3112
5026
|
class SearchPlugin extends XataPlugin {
|
3113
5027
|
constructor(db) {
|
3114
5028
|
super();
|
3115
5029
|
this.db = db;
|
3116
|
-
__privateAdd$1(this,
|
5030
|
+
__privateAdd$1(this, _SearchPlugin_instances);
|
3117
5031
|
}
|
3118
5032
|
build(pluginOptions) {
|
3119
5033
|
return {
|
3120
5034
|
all: async (query, options = {}) => {
|
3121
|
-
const { records, totalCount } = await __privateMethod$1(this,
|
5035
|
+
const { records, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3122
5036
|
return {
|
3123
5037
|
totalCount,
|
3124
5038
|
records: records.map((record) => {
|
@@ -3128,7 +5042,7 @@ class SearchPlugin extends XataPlugin {
|
|
3128
5042
|
};
|
3129
5043
|
},
|
3130
5044
|
byTable: async (query, options = {}) => {
|
3131
|
-
const { records: rawRecords, totalCount } = await __privateMethod$1(this,
|
5045
|
+
const { records: rawRecords, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3132
5046
|
const records = rawRecords.reduce((acc, record) => {
|
3133
5047
|
const table = record.xata_table;
|
3134
5048
|
const items = acc[table] ?? [];
|
@@ -3140,7 +5054,7 @@ class SearchPlugin extends XataPlugin {
|
|
3140
5054
|
};
|
3141
5055
|
}
|
3142
5056
|
}
|
3143
|
-
|
5057
|
+
_SearchPlugin_instances = new WeakSet();
|
3144
5058
|
search_fn = async function(query, options, pluginOptions) {
|
3145
5059
|
const { tables, fuzziness, highlight, prefix, page } = options ?? {};
|
3146
5060
|
const { records, totalCount } = await searchBranch({
|
@@ -3176,8 +5090,7 @@ function arrayString(val) {
|
|
3176
5090
|
return result;
|
3177
5091
|
}
|
3178
5092
|
function prepareValue(value) {
|
3179
|
-
if (!isDefined(value))
|
3180
|
-
return null;
|
5093
|
+
if (!isDefined(value)) return null;
|
3181
5094
|
if (value instanceof Date) {
|
3182
5095
|
return value.toISOString();
|
3183
5096
|
}
|
@@ -3217,19 +5130,28 @@ class SQLPlugin extends XataPlugin {
|
|
3217
5130
|
throw new Error("Invalid usage of `xata.sql`. Please use it as a tagged template or with an object.");
|
3218
5131
|
}
|
3219
5132
|
const { statement, params, consistency, responseType } = prepareParams(query, parameters);
|
3220
|
-
const {
|
3221
|
-
records,
|
3222
|
-
rows,
|
3223
|
-
warning,
|
3224
|
-
columns = []
|
3225
|
-
} = await sqlQuery({
|
5133
|
+
const { warning, columns, ...response } = await sqlQuery({
|
3226
5134
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
3227
5135
|
body: { statement, params, consistency, responseType },
|
3228
5136
|
...pluginOptions
|
3229
5137
|
});
|
5138
|
+
const records = "records" in response ? response.records : void 0;
|
5139
|
+
const rows = "rows" in response ? response.rows : void 0;
|
3230
5140
|
return { records, rows, warning, columns };
|
3231
5141
|
};
|
3232
5142
|
sqlFunction.connectionString = buildConnectionString(pluginOptions);
|
5143
|
+
sqlFunction.batch = async (query) => {
|
5144
|
+
const { results } = await sqlBatchQuery({
|
5145
|
+
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
5146
|
+
body: {
|
5147
|
+
statements: query.statements.map(({ statement, params }) => ({ statement, params })),
|
5148
|
+
consistency: query.consistency,
|
5149
|
+
responseType: query.responseType
|
5150
|
+
},
|
5151
|
+
...pluginOptions
|
5152
|
+
});
|
5153
|
+
return { results };
|
5154
|
+
};
|
3233
5155
|
return sqlFunction;
|
3234
5156
|
}
|
3235
5157
|
}
|
@@ -3256,8 +5178,7 @@ function buildDomain(host, region) {
|
|
3256
5178
|
function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
|
3257
5179
|
const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
|
3258
5180
|
const parts = parseWorkspacesUrlParts(url);
|
3259
|
-
if (!parts)
|
3260
|
-
throw new Error("Invalid workspaces URL");
|
5181
|
+
if (!parts) throw new Error("Invalid workspaces URL");
|
3261
5182
|
const { workspace: workspaceSlug, region, database, host } = parts;
|
3262
5183
|
const domain = buildDomain(host, region);
|
3263
5184
|
const workspace = workspaceSlug.split("-").pop();
|
@@ -3282,39 +5203,24 @@ class TransactionPlugin extends XataPlugin {
|
|
3282
5203
|
}
|
3283
5204
|
}
|
3284
5205
|
|
3285
|
-
var
|
3286
|
-
|
3287
|
-
throw TypeError("Cannot " + msg);
|
3288
|
-
};
|
3289
|
-
var __privateGet = (obj, member, getter) => {
|
3290
|
-
__accessCheck(obj, member, "read from private field");
|
3291
|
-
return getter ? getter.call(obj) : member.get(obj);
|
3292
|
-
};
|
3293
|
-
var __privateAdd = (obj, member, value) => {
|
3294
|
-
if (member.has(obj))
|
3295
|
-
throw TypeError("Cannot add the same private member more than once");
|
3296
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3297
|
-
};
|
3298
|
-
var __privateSet = (obj, member, value, setter) => {
|
3299
|
-
__accessCheck(obj, member, "write to private field");
|
3300
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
3301
|
-
return value;
|
3302
|
-
};
|
3303
|
-
var __privateMethod = (obj, member, method) => {
|
3304
|
-
__accessCheck(obj, member, "access private method");
|
3305
|
-
return method;
|
5206
|
+
var __typeError = (msg) => {
|
5207
|
+
throw TypeError(msg);
|
3306
5208
|
};
|
5209
|
+
var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
|
5210
|
+
var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
5211
|
+
var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5212
|
+
var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), member.set(obj, value), value);
|
5213
|
+
var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
|
3307
5214
|
const buildClient = (plugins) => {
|
3308
|
-
var _options,
|
5215
|
+
var _options, _instances, parseOptions_fn, getFetchProps_fn, _a;
|
3309
5216
|
return _a = class {
|
3310
5217
|
constructor(options = {}, tables) {
|
3311
|
-
__privateAdd(this,
|
3312
|
-
__privateAdd(this,
|
3313
|
-
|
3314
|
-
const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
|
5218
|
+
__privateAdd(this, _instances);
|
5219
|
+
__privateAdd(this, _options);
|
5220
|
+
const safeOptions = __privateMethod(this, _instances, parseOptions_fn).call(this, options);
|
3315
5221
|
__privateSet(this, _options, safeOptions);
|
3316
5222
|
const pluginOptions = {
|
3317
|
-
...__privateMethod(this,
|
5223
|
+
...__privateMethod(this, _instances, getFetchProps_fn).call(this, safeOptions),
|
3318
5224
|
host: safeOptions.host,
|
3319
5225
|
tables,
|
3320
5226
|
branch: safeOptions.branch
|
@@ -3331,8 +5237,7 @@ const buildClient = (plugins) => {
|
|
3331
5237
|
this.sql = sql;
|
3332
5238
|
this.files = files;
|
3333
5239
|
for (const [key, namespace] of Object.entries(plugins ?? {})) {
|
3334
|
-
if (namespace === void 0)
|
3335
|
-
continue;
|
5240
|
+
if (namespace === void 0) continue;
|
3336
5241
|
this[key] = namespace.build(pluginOptions);
|
3337
5242
|
}
|
3338
5243
|
}
|
@@ -3341,8 +5246,8 @@ const buildClient = (plugins) => {
|
|
3341
5246
|
const branch = __privateGet(this, _options).branch;
|
3342
5247
|
return { databaseURL, branch };
|
3343
5248
|
}
|
3344
|
-
}, _options = new WeakMap(),
|
3345
|
-
const enableBrowser = options?.enableBrowser ??
|
5249
|
+
}, _options = new WeakMap(), _instances = new WeakSet(), parseOptions_fn = function(options) {
|
5250
|
+
const enableBrowser = options?.enableBrowser ?? false;
|
3346
5251
|
const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
|
3347
5252
|
if (isBrowser && !enableBrowser) {
|
3348
5253
|
throw new Error(
|
@@ -3350,8 +5255,9 @@ const buildClient = (plugins) => {
|
|
3350
5255
|
);
|
3351
5256
|
}
|
3352
5257
|
const fetch = getFetchImplementation(options?.fetch);
|
3353
|
-
const databaseURL = options?.databaseURL
|
3354
|
-
const apiKey = options?.apiKey
|
5258
|
+
const databaseURL = options?.databaseURL;
|
5259
|
+
const apiKey = options?.apiKey;
|
5260
|
+
const branch = options?.branch;
|
3355
5261
|
const trace = options?.trace ?? defaultTrace;
|
3356
5262
|
const clientName = options?.clientName;
|
3357
5263
|
const host = options?.host ?? "production";
|
@@ -3362,25 +5268,8 @@ const buildClient = (plugins) => {
|
|
3362
5268
|
if (!databaseURL) {
|
3363
5269
|
throw new Error("Option databaseURL is required");
|
3364
5270
|
}
|
3365
|
-
|
3366
|
-
|
3367
|
-
const branch = options?.branch || previewBranch || envBranch || "main";
|
3368
|
-
if (!!previewBranch && branch !== previewBranch) {
|
3369
|
-
console.warn(
|
3370
|
-
`Ignoring preview branch ${previewBranch} because branch option was passed to the client constructor with value ${branch}`
|
3371
|
-
);
|
3372
|
-
} else if (!!envBranch && branch !== envBranch) {
|
3373
|
-
console.warn(
|
3374
|
-
`Ignoring branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
|
3375
|
-
);
|
3376
|
-
} else if (!!previewBranch && !!envBranch && previewBranch !== envBranch) {
|
3377
|
-
console.warn(
|
3378
|
-
`Ignoring preview branch ${previewBranch} and branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
|
3379
|
-
);
|
3380
|
-
} else if (!previewBranch && !envBranch && options?.branch === void 0) {
|
3381
|
-
console.warn(
|
3382
|
-
`No branch was passed to the client constructor. Using default branch ${branch}. You can set the branch with the environment variable XATA_BRANCH or by passing the branch option to the client constructor.`
|
3383
|
-
);
|
5271
|
+
if (!branch) {
|
5272
|
+
throw new Error("Option branch is required");
|
3384
5273
|
}
|
3385
5274
|
return {
|
3386
5275
|
fetch,
|
@@ -3394,7 +5283,7 @@ const buildClient = (plugins) => {
|
|
3394
5283
|
clientName,
|
3395
5284
|
xataAgentExtra
|
3396
5285
|
};
|
3397
|
-
},
|
5286
|
+
}, getFetchProps_fn = function({
|
3398
5287
|
fetch,
|
3399
5288
|
apiKey,
|
3400
5289
|
databaseURL,
|
@@ -3435,26 +5324,19 @@ class Serializer {
|
|
3435
5324
|
}
|
3436
5325
|
toJSON(data) {
|
3437
5326
|
function visit(obj) {
|
3438
|
-
if (Array.isArray(obj))
|
3439
|
-
return obj.map(visit);
|
5327
|
+
if (Array.isArray(obj)) return obj.map(visit);
|
3440
5328
|
const type = typeof obj;
|
3441
|
-
if (type === "undefined")
|
3442
|
-
|
3443
|
-
if (
|
3444
|
-
return { [META]: "bigint", [VALUE]: obj.toString() };
|
3445
|
-
if (obj === null || type !== "object")
|
3446
|
-
return obj;
|
5329
|
+
if (type === "undefined") return { [META]: "undefined" };
|
5330
|
+
if (type === "bigint") return { [META]: "bigint", [VALUE]: obj.toString() };
|
5331
|
+
if (obj === null || type !== "object") return obj;
|
3447
5332
|
const constructor = obj.constructor;
|
3448
5333
|
const o = { [META]: constructor.name };
|
3449
5334
|
for (const [key, value] of Object.entries(obj)) {
|
3450
5335
|
o[key] = visit(value);
|
3451
5336
|
}
|
3452
|
-
if (constructor === Date)
|
3453
|
-
|
3454
|
-
if (constructor ===
|
3455
|
-
o[VALUE] = Object.fromEntries(obj);
|
3456
|
-
if (constructor === Set)
|
3457
|
-
o[VALUE] = [...obj];
|
5337
|
+
if (constructor === Date) o[VALUE] = obj.toISOString();
|
5338
|
+
if (constructor === Map) o[VALUE] = Object.fromEntries(obj);
|
5339
|
+
if (constructor === Set) o[VALUE] = [...obj];
|
3458
5340
|
return o;
|
3459
5341
|
}
|
3460
5342
|
return JSON.stringify(visit(data));
|
@@ -3467,16 +5349,11 @@ class Serializer {
|
|
3467
5349
|
if (constructor) {
|
3468
5350
|
return Object.assign(Object.create(constructor.prototype), rest);
|
3469
5351
|
}
|
3470
|
-
if (clazz === "Date")
|
3471
|
-
|
3472
|
-
if (clazz === "
|
3473
|
-
|
3474
|
-
if (clazz === "
|
3475
|
-
return new Map(Object.entries(val));
|
3476
|
-
if (clazz === "bigint")
|
3477
|
-
return BigInt(val);
|
3478
|
-
if (clazz === "undefined")
|
3479
|
-
return void 0;
|
5352
|
+
if (clazz === "Date") return new Date(val);
|
5353
|
+
if (clazz === "Set") return new Set(val);
|
5354
|
+
if (clazz === "Map") return new Map(Object.entries(val));
|
5355
|
+
if (clazz === "bigint") return BigInt(val);
|
5356
|
+
if (clazz === "undefined") return void 0;
|
3480
5357
|
return rest;
|
3481
5358
|
}
|
3482
5359
|
return value;
|
@@ -3491,6 +5368,47 @@ const deserialize = (json) => {
|
|
3491
5368
|
return defaultSerializer.fromJSON(json);
|
3492
5369
|
};
|
3493
5370
|
|
5371
|
+
function parseEnvironment(environment) {
|
5372
|
+
try {
|
5373
|
+
if (typeof environment === "function") {
|
5374
|
+
return new Proxy(
|
5375
|
+
{},
|
5376
|
+
{
|
5377
|
+
get(target) {
|
5378
|
+
return environment(target);
|
5379
|
+
}
|
5380
|
+
}
|
5381
|
+
);
|
5382
|
+
}
|
5383
|
+
if (isObject(environment)) {
|
5384
|
+
return environment;
|
5385
|
+
}
|
5386
|
+
} catch (error) {
|
5387
|
+
}
|
5388
|
+
return {};
|
5389
|
+
}
|
5390
|
+
function buildPreviewBranchName({ org, branch }) {
|
5391
|
+
return `preview-${org}-${branch}`;
|
5392
|
+
}
|
5393
|
+
function getDeployPreviewBranch(environment) {
|
5394
|
+
try {
|
5395
|
+
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = parseEnvironment(environment);
|
5396
|
+
if (deployPreviewBranch) return deployPreviewBranch;
|
5397
|
+
switch (deployPreview) {
|
5398
|
+
case "vercel": {
|
5399
|
+
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
5400
|
+
console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
|
5401
|
+
return void 0;
|
5402
|
+
}
|
5403
|
+
return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
|
5404
|
+
}
|
5405
|
+
}
|
5406
|
+
return void 0;
|
5407
|
+
} catch (err) {
|
5408
|
+
return void 0;
|
5409
|
+
}
|
5410
|
+
}
|
5411
|
+
|
3494
5412
|
class XataError extends Error {
|
3495
5413
|
constructor(message, status) {
|
3496
5414
|
super(message);
|
@@ -3498,5 +5416,5 @@ class XataError extends Error {
|
|
3498
5416
|
}
|
3499
5417
|
}
|
3500
5418
|
|
3501
|
-
export { BaseClient, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptAllTables, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge,
|
5419
|
+
export { BaseClient, Buffer, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptAllTables, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, completeMigration, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteCluster, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, dropClusterExtension, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge, getAuthorizationCode, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationJobStatus, getBranchMigrationPlan, getBranchMoveStatus, getBranchSchemaHistory, getBranchStats, getCluster, getClusterMetrics, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseSettings, getDeployPreviewBranch, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationHistory, getMigrationJobStatus, getMigrationJobs, getMigrationRequest, getMigrationRequestIsMerged, getRecord, getSchema, getSchemas, getTableColumns, getTableSchema, getUser, getUserAPIKeys, getUserOAuthAccessTokens, getUserOAuthClients, getWorkspace, getWorkspaceMembersList, getWorkspaceSettings, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, iContains, iPattern, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, installClusterExtension, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isValidExpandedColumn, isValidSelectableColumns, le, lessEquals, lessThan, lessThanEquals, listClusterBranches, listClusterExtensions, listClusters, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, moveBranch, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern, previewBranchSchemaEdit, pushBranchMigrations, putFile, putFileItem, queryMigrationRequests, queryTable, removeGitBranchesEntry, removeWorkspaceMember, renameDatabase, resendWorkspaceMemberInvite, resolveBranch, rollbackMigration, searchBranch, searchTable, serialize, setTableSchema, sqlBatchQuery, sqlQuery, startMigration, startsWith, summarizeTable, transformImage, updateBranchMetadata, updateBranchSchema, updateCluster, updateColumn, updateDatabaseGithubSettings, updateDatabaseMetadata, updateDatabaseSettings, updateMigrationRequest, updateOAuthAccessToken, updateRecordWithID, updateTable, updateUser, updateWorkspace, updateWorkspaceMemberInvite, updateWorkspaceMemberRole, updateWorkspaceSettings, upsertRecordWithID, vectorSearchTable };
|
3502
5420
|
//# sourceMappingURL=index.mjs.map
|