@xata.io/client 0.0.0-next.v43b83f3e3d703ba85a9c6790259cc93a43f69e98 → 0.0.0-next.v49febb1f1353c1a1b68b8e090dbc9c500cc77be3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-add-version.log +1 -1
- package/.turbo/turbo-build.log +4 -4
- package/CHANGELOG.md +29 -3
- package/dist/index.cjs +2603 -617
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +5633 -3919
- package/dist/index.mjs +2582 -613
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
@@ -22,6 +22,1789 @@ const TraceAttributes = {
|
|
22
22
|
CLOUDFLARE_RAY_ID: "cf.ray"
|
23
23
|
};
|
24
24
|
|
25
|
+
const lookup = [];
|
26
|
+
const revLookup = [];
|
27
|
+
const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
28
|
+
for (let i = 0, len = code.length; i < len; ++i) {
|
29
|
+
lookup[i] = code[i];
|
30
|
+
revLookup[code.charCodeAt(i)] = i;
|
31
|
+
}
|
32
|
+
revLookup["-".charCodeAt(0)] = 62;
|
33
|
+
revLookup["_".charCodeAt(0)] = 63;
|
34
|
+
function getLens(b64) {
|
35
|
+
const len = b64.length;
|
36
|
+
if (len % 4 > 0) {
|
37
|
+
throw new Error("Invalid string. Length must be a multiple of 4");
|
38
|
+
}
|
39
|
+
let validLen = b64.indexOf("=");
|
40
|
+
if (validLen === -1) validLen = len;
|
41
|
+
const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
|
42
|
+
return [validLen, placeHoldersLen];
|
43
|
+
}
|
44
|
+
function _byteLength(_b64, validLen, placeHoldersLen) {
|
45
|
+
return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
|
46
|
+
}
|
47
|
+
function toByteArray(b64) {
|
48
|
+
let tmp;
|
49
|
+
const lens = getLens(b64);
|
50
|
+
const validLen = lens[0];
|
51
|
+
const placeHoldersLen = lens[1];
|
52
|
+
const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
|
53
|
+
let curByte = 0;
|
54
|
+
const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
|
55
|
+
let i;
|
56
|
+
for (i = 0; i < len; i += 4) {
|
57
|
+
tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
|
58
|
+
arr[curByte++] = tmp >> 16 & 255;
|
59
|
+
arr[curByte++] = tmp >> 8 & 255;
|
60
|
+
arr[curByte++] = tmp & 255;
|
61
|
+
}
|
62
|
+
if (placeHoldersLen === 2) {
|
63
|
+
tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
|
64
|
+
arr[curByte++] = tmp & 255;
|
65
|
+
}
|
66
|
+
if (placeHoldersLen === 1) {
|
67
|
+
tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
|
68
|
+
arr[curByte++] = tmp >> 8 & 255;
|
69
|
+
arr[curByte++] = tmp & 255;
|
70
|
+
}
|
71
|
+
return arr;
|
72
|
+
}
|
73
|
+
function tripletToBase64(num) {
|
74
|
+
return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
|
75
|
+
}
|
76
|
+
function encodeChunk(uint8, start, end) {
|
77
|
+
let tmp;
|
78
|
+
const output = [];
|
79
|
+
for (let i = start; i < end; i += 3) {
|
80
|
+
tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
|
81
|
+
output.push(tripletToBase64(tmp));
|
82
|
+
}
|
83
|
+
return output.join("");
|
84
|
+
}
|
85
|
+
function fromByteArray(uint8) {
|
86
|
+
let tmp;
|
87
|
+
const len = uint8.length;
|
88
|
+
const extraBytes = len % 3;
|
89
|
+
const parts = [];
|
90
|
+
const maxChunkLength = 16383;
|
91
|
+
for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
92
|
+
parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
|
93
|
+
}
|
94
|
+
if (extraBytes === 1) {
|
95
|
+
tmp = uint8[len - 1];
|
96
|
+
parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
|
97
|
+
} else if (extraBytes === 2) {
|
98
|
+
tmp = (uint8[len - 2] << 8) + uint8[len - 1];
|
99
|
+
parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
|
100
|
+
}
|
101
|
+
return parts.join("");
|
102
|
+
}
|
103
|
+
|
104
|
+
const K_MAX_LENGTH = 2147483647;
|
105
|
+
const MAX_ARGUMENTS_LENGTH = 4096;
|
106
|
+
class Buffer extends Uint8Array {
|
107
|
+
/**
|
108
|
+
* Constructs a new `Buffer` instance.
|
109
|
+
*
|
110
|
+
* @param value
|
111
|
+
* @param encodingOrOffset
|
112
|
+
* @param length
|
113
|
+
*/
|
114
|
+
constructor(value, encodingOrOffset, length) {
|
115
|
+
if (typeof value === "number") {
|
116
|
+
if (typeof encodingOrOffset === "string") {
|
117
|
+
throw new TypeError("The first argument must be of type string, received type number");
|
118
|
+
}
|
119
|
+
if (value < 0) {
|
120
|
+
throw new RangeError("The buffer size cannot be negative");
|
121
|
+
}
|
122
|
+
super(value < 0 ? 0 : Buffer._checked(value) | 0);
|
123
|
+
} else if (typeof value === "string") {
|
124
|
+
if (typeof encodingOrOffset !== "string") {
|
125
|
+
encodingOrOffset = "utf8";
|
126
|
+
}
|
127
|
+
if (!Buffer.isEncoding(encodingOrOffset)) {
|
128
|
+
throw new TypeError("Unknown encoding: " + encodingOrOffset);
|
129
|
+
}
|
130
|
+
const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
|
131
|
+
super(length2);
|
132
|
+
const written = this.write(value, 0, this.length, encodingOrOffset);
|
133
|
+
if (written !== length2) {
|
134
|
+
throw new TypeError(
|
135
|
+
"Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
|
136
|
+
);
|
137
|
+
}
|
138
|
+
} else if (ArrayBuffer.isView(value)) {
|
139
|
+
if (Buffer._isInstance(value, Uint8Array)) {
|
140
|
+
const copy = new Uint8Array(value);
|
141
|
+
const array = copy.buffer;
|
142
|
+
const byteOffset = copy.byteOffset;
|
143
|
+
const length2 = copy.byteLength;
|
144
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
145
|
+
throw new RangeError("offset is outside of buffer bounds");
|
146
|
+
}
|
147
|
+
if (array.byteLength < byteOffset + (length2 || 0)) {
|
148
|
+
throw new RangeError("length is outside of buffer bounds");
|
149
|
+
}
|
150
|
+
super(new Uint8Array(array, byteOffset, length2));
|
151
|
+
} else {
|
152
|
+
const array = value;
|
153
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
154
|
+
super(new Uint8Array(length2));
|
155
|
+
for (let i = 0; i < length2; i++) {
|
156
|
+
this[i] = array[i] & 255;
|
157
|
+
}
|
158
|
+
}
|
159
|
+
} else if (value == null) {
|
160
|
+
throw new TypeError(
|
161
|
+
"The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
|
162
|
+
);
|
163
|
+
} else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
|
164
|
+
const array = value;
|
165
|
+
const byteOffset = encodingOrOffset;
|
166
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
167
|
+
throw new RangeError("offset is outside of buffer bounds");
|
168
|
+
}
|
169
|
+
if (array.byteLength < byteOffset + (length || 0)) {
|
170
|
+
throw new RangeError("length is outside of buffer bounds");
|
171
|
+
}
|
172
|
+
super(new Uint8Array(array, byteOffset, length));
|
173
|
+
} else if (Array.isArray(value)) {
|
174
|
+
const array = value;
|
175
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
176
|
+
super(new Uint8Array(length2));
|
177
|
+
for (let i = 0; i < length2; i++) {
|
178
|
+
this[i] = array[i] & 255;
|
179
|
+
}
|
180
|
+
} else {
|
181
|
+
throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
|
182
|
+
}
|
183
|
+
}
|
184
|
+
/**
|
185
|
+
* Return JSON representation of the buffer.
|
186
|
+
*/
|
187
|
+
toJSON() {
|
188
|
+
return {
|
189
|
+
type: "Buffer",
|
190
|
+
data: Array.prototype.slice.call(this)
|
191
|
+
};
|
192
|
+
}
|
193
|
+
/**
|
194
|
+
* Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
|
195
|
+
* parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
|
196
|
+
* only part of `string` will be written. However, partially encoded characters will not be written.
|
197
|
+
*
|
198
|
+
* @param string String to write to `buf`.
|
199
|
+
* @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
|
200
|
+
* @param length Maximum number of bytes to write: Default: `buf.length - offset`.
|
201
|
+
* @param encoding The character encoding of `string`. Default: `utf8`.
|
202
|
+
*/
|
203
|
+
write(string, offset, length, encoding) {
|
204
|
+
if (typeof offset === "undefined") {
|
205
|
+
encoding = "utf8";
|
206
|
+
length = this.length;
|
207
|
+
offset = 0;
|
208
|
+
} else if (typeof length === "undefined" && typeof offset === "string") {
|
209
|
+
encoding = offset;
|
210
|
+
length = this.length;
|
211
|
+
offset = 0;
|
212
|
+
} else if (typeof offset === "number" && isFinite(offset)) {
|
213
|
+
offset = offset >>> 0;
|
214
|
+
if (typeof length === "number" && isFinite(length)) {
|
215
|
+
length = length >>> 0;
|
216
|
+
encoding ?? (encoding = "utf8");
|
217
|
+
} else if (typeof length === "string") {
|
218
|
+
encoding = length;
|
219
|
+
length = void 0;
|
220
|
+
}
|
221
|
+
} else {
|
222
|
+
throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
|
223
|
+
}
|
224
|
+
const remaining = this.length - offset;
|
225
|
+
if (typeof length === "undefined" || length > remaining) {
|
226
|
+
length = remaining;
|
227
|
+
}
|
228
|
+
if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
|
229
|
+
throw new RangeError("Attempt to write outside buffer bounds");
|
230
|
+
}
|
231
|
+
encoding || (encoding = "utf8");
|
232
|
+
switch (Buffer._getEncoding(encoding)) {
|
233
|
+
case "hex":
|
234
|
+
return Buffer._hexWrite(this, string, offset, length);
|
235
|
+
case "utf8":
|
236
|
+
return Buffer._utf8Write(this, string, offset, length);
|
237
|
+
case "ascii":
|
238
|
+
case "latin1":
|
239
|
+
case "binary":
|
240
|
+
return Buffer._asciiWrite(this, string, offset, length);
|
241
|
+
case "ucs2":
|
242
|
+
case "utf16le":
|
243
|
+
return Buffer._ucs2Write(this, string, offset, length);
|
244
|
+
case "base64":
|
245
|
+
return Buffer._base64Write(this, string, offset, length);
|
246
|
+
}
|
247
|
+
}
|
248
|
+
/**
|
249
|
+
* Decodes the buffer to a string according to the specified character encoding.
|
250
|
+
* Passing `start` and `end` will decode only a subset of the buffer.
|
251
|
+
*
|
252
|
+
* Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
|
253
|
+
* will be replaced with `U+FFFD`.
|
254
|
+
*
|
255
|
+
* @param encoding
|
256
|
+
* @param start
|
257
|
+
* @param end
|
258
|
+
*/
|
259
|
+
toString(encoding, start, end) {
|
260
|
+
const length = this.length;
|
261
|
+
if (length === 0) {
|
262
|
+
return "";
|
263
|
+
}
|
264
|
+
if (arguments.length === 0) {
|
265
|
+
return Buffer._utf8Slice(this, 0, length);
|
266
|
+
}
|
267
|
+
if (typeof start === "undefined" || start < 0) {
|
268
|
+
start = 0;
|
269
|
+
}
|
270
|
+
if (start > this.length) {
|
271
|
+
return "";
|
272
|
+
}
|
273
|
+
if (typeof end === "undefined" || end > this.length) {
|
274
|
+
end = this.length;
|
275
|
+
}
|
276
|
+
if (end <= 0) {
|
277
|
+
return "";
|
278
|
+
}
|
279
|
+
end >>>= 0;
|
280
|
+
start >>>= 0;
|
281
|
+
if (end <= start) {
|
282
|
+
return "";
|
283
|
+
}
|
284
|
+
if (!encoding) {
|
285
|
+
encoding = "utf8";
|
286
|
+
}
|
287
|
+
switch (Buffer._getEncoding(encoding)) {
|
288
|
+
case "hex":
|
289
|
+
return Buffer._hexSlice(this, start, end);
|
290
|
+
case "utf8":
|
291
|
+
return Buffer._utf8Slice(this, start, end);
|
292
|
+
case "ascii":
|
293
|
+
return Buffer._asciiSlice(this, start, end);
|
294
|
+
case "latin1":
|
295
|
+
case "binary":
|
296
|
+
return Buffer._latin1Slice(this, start, end);
|
297
|
+
case "ucs2":
|
298
|
+
case "utf16le":
|
299
|
+
return Buffer._utf16leSlice(this, start, end);
|
300
|
+
case "base64":
|
301
|
+
return Buffer._base64Slice(this, start, end);
|
302
|
+
}
|
303
|
+
}
|
304
|
+
/**
|
305
|
+
* Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
|
306
|
+
*
|
307
|
+
* @param otherBuffer
|
308
|
+
*/
|
309
|
+
equals(otherBuffer) {
|
310
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
311
|
+
throw new TypeError("Argument must be a Buffer");
|
312
|
+
}
|
313
|
+
if (this === otherBuffer) {
|
314
|
+
return true;
|
315
|
+
}
|
316
|
+
return Buffer.compare(this, otherBuffer) === 0;
|
317
|
+
}
|
318
|
+
/**
|
319
|
+
* Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
|
320
|
+
* or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
|
321
|
+
* buffer.
|
322
|
+
*
|
323
|
+
* - `0` is returned if `otherBuffer` is the same as this buffer.
|
324
|
+
* - `1` is returned if `otherBuffer` should come before this buffer when sorted.
|
325
|
+
* - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
|
326
|
+
*
|
327
|
+
* @param otherBuffer The buffer to compare to.
|
328
|
+
* @param targetStart The offset within `otherBuffer` at which to begin comparison.
|
329
|
+
* @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
|
330
|
+
* @param sourceStart The offset within this buffer at which to begin comparison.
|
331
|
+
* @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
|
332
|
+
*/
|
333
|
+
compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
|
334
|
+
if (Buffer._isInstance(otherBuffer, Uint8Array)) {
|
335
|
+
otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
|
336
|
+
}
|
337
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
338
|
+
throw new TypeError("Argument must be a Buffer or Uint8Array");
|
339
|
+
}
|
340
|
+
targetStart ?? (targetStart = 0);
|
341
|
+
targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
|
342
|
+
sourceStart ?? (sourceStart = 0);
|
343
|
+
sourceEnd ?? (sourceEnd = this.length);
|
344
|
+
if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
|
345
|
+
throw new RangeError("Out of range index");
|
346
|
+
}
|
347
|
+
if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
|
348
|
+
return 0;
|
349
|
+
}
|
350
|
+
if (sourceStart >= sourceEnd) {
|
351
|
+
return -1;
|
352
|
+
}
|
353
|
+
if (targetStart >= targetEnd) {
|
354
|
+
return 1;
|
355
|
+
}
|
356
|
+
targetStart >>>= 0;
|
357
|
+
targetEnd >>>= 0;
|
358
|
+
sourceStart >>>= 0;
|
359
|
+
sourceEnd >>>= 0;
|
360
|
+
if (this === otherBuffer) {
|
361
|
+
return 0;
|
362
|
+
}
|
363
|
+
let x = sourceEnd - sourceStart;
|
364
|
+
let y = targetEnd - targetStart;
|
365
|
+
const len = Math.min(x, y);
|
366
|
+
const thisCopy = this.slice(sourceStart, sourceEnd);
|
367
|
+
const targetCopy = otherBuffer.slice(targetStart, targetEnd);
|
368
|
+
for (let i = 0; i < len; ++i) {
|
369
|
+
if (thisCopy[i] !== targetCopy[i]) {
|
370
|
+
x = thisCopy[i];
|
371
|
+
y = targetCopy[i];
|
372
|
+
break;
|
373
|
+
}
|
374
|
+
}
|
375
|
+
if (x < y) return -1;
|
376
|
+
if (y < x) return 1;
|
377
|
+
return 0;
|
378
|
+
}
|
379
|
+
/**
|
380
|
+
* Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
|
381
|
+
* region overlaps with this buffer.
|
382
|
+
*
|
383
|
+
* @param targetBuffer The target buffer to copy into.
|
384
|
+
* @param targetStart The offset within `targetBuffer` at which to begin writing.
|
385
|
+
* @param sourceStart The offset within this buffer at which to begin copying.
|
386
|
+
* @param sourceEnd The offset within this buffer at which to end copying (exclusive).
|
387
|
+
*/
|
388
|
+
copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
|
389
|
+
if (!Buffer.isBuffer(targetBuffer)) throw new TypeError("argument should be a Buffer");
|
390
|
+
if (!sourceStart) sourceStart = 0;
|
391
|
+
if (!targetStart) targetStart = 0;
|
392
|
+
if (!sourceEnd && sourceEnd !== 0) sourceEnd = this.length;
|
393
|
+
if (targetStart >= targetBuffer.length) targetStart = targetBuffer.length;
|
394
|
+
if (!targetStart) targetStart = 0;
|
395
|
+
if (sourceEnd > 0 && sourceEnd < sourceStart) sourceEnd = sourceStart;
|
396
|
+
if (sourceEnd === sourceStart) return 0;
|
397
|
+
if (targetBuffer.length === 0 || this.length === 0) return 0;
|
398
|
+
if (targetStart < 0) {
|
399
|
+
throw new RangeError("targetStart out of bounds");
|
400
|
+
}
|
401
|
+
if (sourceStart < 0 || sourceStart >= this.length) throw new RangeError("Index out of range");
|
402
|
+
if (sourceEnd < 0) throw new RangeError("sourceEnd out of bounds");
|
403
|
+
if (sourceEnd > this.length) sourceEnd = this.length;
|
404
|
+
if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
|
405
|
+
sourceEnd = targetBuffer.length - targetStart + sourceStart;
|
406
|
+
}
|
407
|
+
const len = sourceEnd - sourceStart;
|
408
|
+
if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
|
409
|
+
this.copyWithin(targetStart, sourceStart, sourceEnd);
|
410
|
+
} else {
|
411
|
+
Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
|
412
|
+
}
|
413
|
+
return len;
|
414
|
+
}
|
415
|
+
/**
|
416
|
+
* Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
|
417
|
+
* and `end` indices. This is the same behavior as `buf.subarray()`.
|
418
|
+
*
|
419
|
+
* This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
|
420
|
+
* the slice, use `Uint8Array.prototype.slice()`.
|
421
|
+
*
|
422
|
+
* @param start
|
423
|
+
* @param end
|
424
|
+
*/
|
425
|
+
slice(start, end) {
|
426
|
+
if (!start) {
|
427
|
+
start = 0;
|
428
|
+
}
|
429
|
+
const len = this.length;
|
430
|
+
start = ~~start;
|
431
|
+
end = end === void 0 ? len : ~~end;
|
432
|
+
if (start < 0) {
|
433
|
+
start += len;
|
434
|
+
if (start < 0) {
|
435
|
+
start = 0;
|
436
|
+
}
|
437
|
+
} else if (start > len) {
|
438
|
+
start = len;
|
439
|
+
}
|
440
|
+
if (end < 0) {
|
441
|
+
end += len;
|
442
|
+
if (end < 0) {
|
443
|
+
end = 0;
|
444
|
+
}
|
445
|
+
} else if (end > len) {
|
446
|
+
end = len;
|
447
|
+
}
|
448
|
+
if (end < start) {
|
449
|
+
end = start;
|
450
|
+
}
|
451
|
+
const newBuf = this.subarray(start, end);
|
452
|
+
Object.setPrototypeOf(newBuf, Buffer.prototype);
|
453
|
+
return newBuf;
|
454
|
+
}
|
455
|
+
/**
|
456
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
457
|
+
* of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
|
458
|
+
*
|
459
|
+
* @param value Number to write.
|
460
|
+
* @param offset Number of bytes to skip before starting to write.
|
461
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
462
|
+
* @param noAssert
|
463
|
+
* @returns `offset` plus the number of bytes written.
|
464
|
+
*/
|
465
|
+
writeUIntLE(value, offset, byteLength, noAssert) {
|
466
|
+
value = +value;
|
467
|
+
offset = offset >>> 0;
|
468
|
+
byteLength = byteLength >>> 0;
|
469
|
+
if (!noAssert) {
|
470
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
471
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
472
|
+
}
|
473
|
+
let mul = 1;
|
474
|
+
let i = 0;
|
475
|
+
this[offset] = value & 255;
|
476
|
+
while (++i < byteLength && (mul *= 256)) {
|
477
|
+
this[offset + i] = value / mul & 255;
|
478
|
+
}
|
479
|
+
return offset + byteLength;
|
480
|
+
}
|
481
|
+
/**
|
482
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
|
483
|
+
* accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
|
484
|
+
*
|
485
|
+
* @param value Number to write.
|
486
|
+
* @param offset Number of bytes to skip before starting to write.
|
487
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
488
|
+
* @param noAssert
|
489
|
+
* @returns `offset` plus the number of bytes written.
|
490
|
+
*/
|
491
|
+
writeUIntBE(value, offset, byteLength, noAssert) {
|
492
|
+
value = +value;
|
493
|
+
offset = offset >>> 0;
|
494
|
+
byteLength = byteLength >>> 0;
|
495
|
+
if (!noAssert) {
|
496
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
497
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
498
|
+
}
|
499
|
+
let i = byteLength - 1;
|
500
|
+
let mul = 1;
|
501
|
+
this[offset + i] = value & 255;
|
502
|
+
while (--i >= 0 && (mul *= 256)) {
|
503
|
+
this[offset + i] = value / mul & 255;
|
504
|
+
}
|
505
|
+
return offset + byteLength;
|
506
|
+
}
|
507
|
+
/**
|
508
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
509
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
510
|
+
*
|
511
|
+
* @param value Number to write.
|
512
|
+
* @param offset Number of bytes to skip before starting to write.
|
513
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
514
|
+
* @param noAssert
|
515
|
+
* @returns `offset` plus the number of bytes written.
|
516
|
+
*/
|
517
|
+
writeIntLE(value, offset, byteLength, noAssert) {
|
518
|
+
value = +value;
|
519
|
+
offset = offset >>> 0;
|
520
|
+
if (!noAssert) {
|
521
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
522
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
523
|
+
}
|
524
|
+
let i = 0;
|
525
|
+
let mul = 1;
|
526
|
+
let sub = 0;
|
527
|
+
this[offset] = value & 255;
|
528
|
+
while (++i < byteLength && (mul *= 256)) {
|
529
|
+
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
530
|
+
sub = 1;
|
531
|
+
}
|
532
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
533
|
+
}
|
534
|
+
return offset + byteLength;
|
535
|
+
}
|
536
|
+
/**
|
537
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
|
538
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
539
|
+
*
|
540
|
+
* @param value Number to write.
|
541
|
+
* @param offset Number of bytes to skip before starting to write.
|
542
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
543
|
+
* @param noAssert
|
544
|
+
* @returns `offset` plus the number of bytes written.
|
545
|
+
*/
|
546
|
+
writeIntBE(value, offset, byteLength, noAssert) {
|
547
|
+
value = +value;
|
548
|
+
offset = offset >>> 0;
|
549
|
+
if (!noAssert) {
|
550
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
551
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
552
|
+
}
|
553
|
+
let i = byteLength - 1;
|
554
|
+
let mul = 1;
|
555
|
+
let sub = 0;
|
556
|
+
this[offset + i] = value & 255;
|
557
|
+
while (--i >= 0 && (mul *= 256)) {
|
558
|
+
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
559
|
+
sub = 1;
|
560
|
+
}
|
561
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
562
|
+
}
|
563
|
+
return offset + byteLength;
|
564
|
+
}
|
565
|
+
/**
|
566
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
567
|
+
* unsigned, little-endian integer supporting up to 48 bits of accuracy.
|
568
|
+
*
|
569
|
+
* @param offset Number of bytes to skip before starting to read.
|
570
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
571
|
+
* @param noAssert
|
572
|
+
*/
|
573
|
+
readUIntLE(offset, byteLength, noAssert) {
|
574
|
+
offset = offset >>> 0;
|
575
|
+
byteLength = byteLength >>> 0;
|
576
|
+
if (!noAssert) {
|
577
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
578
|
+
}
|
579
|
+
let val = this[offset];
|
580
|
+
let mul = 1;
|
581
|
+
let i = 0;
|
582
|
+
while (++i < byteLength && (mul *= 256)) {
|
583
|
+
val += this[offset + i] * mul;
|
584
|
+
}
|
585
|
+
return val;
|
586
|
+
}
|
587
|
+
/**
|
588
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
589
|
+
* unsigned, big-endian integer supporting up to 48 bits of accuracy.
|
590
|
+
*
|
591
|
+
* @param offset Number of bytes to skip before starting to read.
|
592
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
593
|
+
* @param noAssert
|
594
|
+
*/
|
595
|
+
readUIntBE(offset, byteLength, noAssert) {
|
596
|
+
offset = offset >>> 0;
|
597
|
+
byteLength = byteLength >>> 0;
|
598
|
+
if (!noAssert) {
|
599
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
600
|
+
}
|
601
|
+
let val = this[offset + --byteLength];
|
602
|
+
let mul = 1;
|
603
|
+
while (byteLength > 0 && (mul *= 256)) {
|
604
|
+
val += this[offset + --byteLength] * mul;
|
605
|
+
}
|
606
|
+
return val;
|
607
|
+
}
|
608
|
+
/**
|
609
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
610
|
+
* little-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
611
|
+
*
|
612
|
+
* @param offset Number of bytes to skip before starting to read.
|
613
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
614
|
+
* @param noAssert
|
615
|
+
*/
|
616
|
+
readIntLE(offset, byteLength, noAssert) {
|
617
|
+
offset = offset >>> 0;
|
618
|
+
byteLength = byteLength >>> 0;
|
619
|
+
if (!noAssert) {
|
620
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
621
|
+
}
|
622
|
+
let val = this[offset];
|
623
|
+
let mul = 1;
|
624
|
+
let i = 0;
|
625
|
+
while (++i < byteLength && (mul *= 256)) {
|
626
|
+
val += this[offset + i] * mul;
|
627
|
+
}
|
628
|
+
mul *= 128;
|
629
|
+
if (val >= mul) {
|
630
|
+
val -= Math.pow(2, 8 * byteLength);
|
631
|
+
}
|
632
|
+
return val;
|
633
|
+
}
|
634
|
+
/**
|
635
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
636
|
+
* big-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
637
|
+
*
|
638
|
+
* @param offset Number of bytes to skip before starting to read.
|
639
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
640
|
+
* @param noAssert
|
641
|
+
*/
|
642
|
+
readIntBE(offset, byteLength, noAssert) {
|
643
|
+
offset = offset >>> 0;
|
644
|
+
byteLength = byteLength >>> 0;
|
645
|
+
if (!noAssert) {
|
646
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
647
|
+
}
|
648
|
+
let i = byteLength;
|
649
|
+
let mul = 1;
|
650
|
+
let val = this[offset + --i];
|
651
|
+
while (i > 0 && (mul *= 256)) {
|
652
|
+
val += this[offset + --i] * mul;
|
653
|
+
}
|
654
|
+
mul *= 128;
|
655
|
+
if (val >= mul) {
|
656
|
+
val -= Math.pow(2, 8 * byteLength);
|
657
|
+
}
|
658
|
+
return val;
|
659
|
+
}
|
660
|
+
/**
|
661
|
+
* Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
|
662
|
+
*
|
663
|
+
* @param offset Number of bytes to skip before starting to read.
|
664
|
+
* @param noAssert
|
665
|
+
*/
|
666
|
+
readUInt8(offset, noAssert) {
|
667
|
+
offset = offset >>> 0;
|
668
|
+
if (!noAssert) {
|
669
|
+
Buffer._checkOffset(offset, 1, this.length);
|
670
|
+
}
|
671
|
+
return this[offset];
|
672
|
+
}
|
673
|
+
/**
|
674
|
+
* Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
|
675
|
+
*
|
676
|
+
* @param offset Number of bytes to skip before starting to read.
|
677
|
+
* @param noAssert
|
678
|
+
*/
|
679
|
+
readUInt16LE(offset, noAssert) {
|
680
|
+
offset = offset >>> 0;
|
681
|
+
if (!noAssert) {
|
682
|
+
Buffer._checkOffset(offset, 2, this.length);
|
683
|
+
}
|
684
|
+
return this[offset] | this[offset + 1] << 8;
|
685
|
+
}
|
686
|
+
/**
|
687
|
+
* Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
|
688
|
+
*
|
689
|
+
* @param offset Number of bytes to skip before starting to read.
|
690
|
+
* @param noAssert
|
691
|
+
*/
|
692
|
+
readUInt16BE(offset, noAssert) {
|
693
|
+
offset = offset >>> 0;
|
694
|
+
if (!noAssert) {
|
695
|
+
Buffer._checkOffset(offset, 2, this.length);
|
696
|
+
}
|
697
|
+
return this[offset] << 8 | this[offset + 1];
|
698
|
+
}
|
699
|
+
/**
|
700
|
+
* Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
|
701
|
+
*
|
702
|
+
* @param offset Number of bytes to skip before starting to read.
|
703
|
+
* @param noAssert
|
704
|
+
*/
|
705
|
+
readUInt32LE(offset, noAssert) {
|
706
|
+
offset = offset >>> 0;
|
707
|
+
if (!noAssert) {
|
708
|
+
Buffer._checkOffset(offset, 4, this.length);
|
709
|
+
}
|
710
|
+
return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
|
711
|
+
}
|
712
|
+
/**
|
713
|
+
* Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
|
714
|
+
*
|
715
|
+
* @param offset Number of bytes to skip before starting to read.
|
716
|
+
* @param noAssert
|
717
|
+
*/
|
718
|
+
readUInt32BE(offset, noAssert) {
|
719
|
+
offset = offset >>> 0;
|
720
|
+
if (!noAssert) {
|
721
|
+
Buffer._checkOffset(offset, 4, this.length);
|
722
|
+
}
|
723
|
+
return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
|
724
|
+
}
|
725
|
+
/**
|
726
|
+
* Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
|
727
|
+
* as two's complement signed values.
|
728
|
+
*
|
729
|
+
* @param offset Number of bytes to skip before starting to read.
|
730
|
+
* @param noAssert
|
731
|
+
*/
|
732
|
+
readInt8(offset, noAssert) {
|
733
|
+
offset = offset >>> 0;
|
734
|
+
if (!noAssert) {
|
735
|
+
Buffer._checkOffset(offset, 1, this.length);
|
736
|
+
}
|
737
|
+
if (!(this[offset] & 128)) {
|
738
|
+
return this[offset];
|
739
|
+
}
|
740
|
+
return (255 - this[offset] + 1) * -1;
|
741
|
+
}
|
742
|
+
/**
|
743
|
+
* Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
744
|
+
* are interpreted as two's complement signed values.
|
745
|
+
*
|
746
|
+
* @param offset Number of bytes to skip before starting to read.
|
747
|
+
* @param noAssert
|
748
|
+
*/
|
749
|
+
readInt16LE(offset, noAssert) {
|
750
|
+
offset = offset >>> 0;
|
751
|
+
if (!noAssert) {
|
752
|
+
Buffer._checkOffset(offset, 2, this.length);
|
753
|
+
}
|
754
|
+
const val = this[offset] | this[offset + 1] << 8;
|
755
|
+
return val & 32768 ? val | 4294901760 : val;
|
756
|
+
}
|
757
|
+
/**
|
758
|
+
* Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
759
|
+
* are interpreted as two's complement signed values.
|
760
|
+
*
|
761
|
+
* @param offset Number of bytes to skip before starting to read.
|
762
|
+
* @param noAssert
|
763
|
+
*/
|
764
|
+
readInt16BE(offset, noAssert) {
|
765
|
+
offset = offset >>> 0;
|
766
|
+
if (!noAssert) {
|
767
|
+
Buffer._checkOffset(offset, 2, this.length);
|
768
|
+
}
|
769
|
+
const val = this[offset + 1] | this[offset] << 8;
|
770
|
+
return val & 32768 ? val | 4294901760 : val;
|
771
|
+
}
|
772
|
+
/**
|
773
|
+
* Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
774
|
+
* are interpreted as two's complement signed values.
|
775
|
+
*
|
776
|
+
* @param offset Number of bytes to skip before starting to read.
|
777
|
+
* @param noAssert
|
778
|
+
*/
|
779
|
+
readInt32LE(offset, noAssert) {
|
780
|
+
offset = offset >>> 0;
|
781
|
+
if (!noAssert) {
|
782
|
+
Buffer._checkOffset(offset, 4, this.length);
|
783
|
+
}
|
784
|
+
return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
|
785
|
+
}
|
786
|
+
/**
|
787
|
+
* Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
788
|
+
* are interpreted as two's complement signed values.
|
789
|
+
*
|
790
|
+
* @param offset Number of bytes to skip before starting to read.
|
791
|
+
* @param noAssert
|
792
|
+
*/
|
793
|
+
readInt32BE(offset, noAssert) {
|
794
|
+
offset = offset >>> 0;
|
795
|
+
if (!noAssert) {
|
796
|
+
Buffer._checkOffset(offset, 4, this.length);
|
797
|
+
}
|
798
|
+
return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
|
799
|
+
}
|
800
|
+
/**
|
801
|
+
* Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
|
802
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 2.
|
803
|
+
*/
|
804
|
+
swap16() {
|
805
|
+
const len = this.length;
|
806
|
+
if (len % 2 !== 0) {
|
807
|
+
throw new RangeError("Buffer size must be a multiple of 16-bits");
|
808
|
+
}
|
809
|
+
for (let i = 0; i < len; i += 2) {
|
810
|
+
this._swap(this, i, i + 1);
|
811
|
+
}
|
812
|
+
return this;
|
813
|
+
}
|
814
|
+
/**
|
815
|
+
* Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
|
816
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 4.
|
817
|
+
*/
|
818
|
+
swap32() {
|
819
|
+
const len = this.length;
|
820
|
+
if (len % 4 !== 0) {
|
821
|
+
throw new RangeError("Buffer size must be a multiple of 32-bits");
|
822
|
+
}
|
823
|
+
for (let i = 0; i < len; i += 4) {
|
824
|
+
this._swap(this, i, i + 3);
|
825
|
+
this._swap(this, i + 1, i + 2);
|
826
|
+
}
|
827
|
+
return this;
|
828
|
+
}
|
829
|
+
/**
|
830
|
+
* Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
|
831
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 8.
|
832
|
+
*/
|
833
|
+
swap64() {
|
834
|
+
const len = this.length;
|
835
|
+
if (len % 8 !== 0) {
|
836
|
+
throw new RangeError("Buffer size must be a multiple of 64-bits");
|
837
|
+
}
|
838
|
+
for (let i = 0; i < len; i += 8) {
|
839
|
+
this._swap(this, i, i + 7);
|
840
|
+
this._swap(this, i + 1, i + 6);
|
841
|
+
this._swap(this, i + 2, i + 5);
|
842
|
+
this._swap(this, i + 3, i + 4);
|
843
|
+
}
|
844
|
+
return this;
|
845
|
+
}
|
846
|
+
/**
|
847
|
+
* Swaps two octets.
|
848
|
+
*
|
849
|
+
* @param b
|
850
|
+
* @param n
|
851
|
+
* @param m
|
852
|
+
*/
|
853
|
+
_swap(b, n, m) {
|
854
|
+
const i = b[n];
|
855
|
+
b[n] = b[m];
|
856
|
+
b[m] = i;
|
857
|
+
}
|
858
|
+
/**
|
859
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
|
860
|
+
* Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
|
861
|
+
*
|
862
|
+
* @param value Number to write.
|
863
|
+
* @param offset Number of bytes to skip before starting to write.
|
864
|
+
* @param noAssert
|
865
|
+
* @returns `offset` plus the number of bytes written.
|
866
|
+
*/
|
867
|
+
writeUInt8(value, offset, noAssert) {
|
868
|
+
value = +value;
|
869
|
+
offset = offset >>> 0;
|
870
|
+
if (!noAssert) {
|
871
|
+
Buffer._checkInt(this, value, offset, 1, 255, 0);
|
872
|
+
}
|
873
|
+
this[offset] = value & 255;
|
874
|
+
return offset + 1;
|
875
|
+
}
|
876
|
+
/**
|
877
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
|
878
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
879
|
+
*
|
880
|
+
* @param value Number to write.
|
881
|
+
* @param offset Number of bytes to skip before starting to write.
|
882
|
+
* @param noAssert
|
883
|
+
* @returns `offset` plus the number of bytes written.
|
884
|
+
*/
|
885
|
+
writeUInt16LE(value, offset, noAssert) {
|
886
|
+
value = +value;
|
887
|
+
offset = offset >>> 0;
|
888
|
+
if (!noAssert) {
|
889
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
890
|
+
}
|
891
|
+
this[offset] = value & 255;
|
892
|
+
this[offset + 1] = value >>> 8;
|
893
|
+
return offset + 2;
|
894
|
+
}
|
895
|
+
/**
|
896
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
|
897
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
898
|
+
*
|
899
|
+
* @param value Number to write.
|
900
|
+
* @param offset Number of bytes to skip before starting to write.
|
901
|
+
* @param noAssert
|
902
|
+
* @returns `offset` plus the number of bytes written.
|
903
|
+
*/
|
904
|
+
writeUInt16BE(value, offset, noAssert) {
|
905
|
+
value = +value;
|
906
|
+
offset = offset >>> 0;
|
907
|
+
if (!noAssert) {
|
908
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
909
|
+
}
|
910
|
+
this[offset] = value >>> 8;
|
911
|
+
this[offset + 1] = value & 255;
|
912
|
+
return offset + 2;
|
913
|
+
}
|
914
|
+
/**
|
915
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
|
916
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
917
|
+
*
|
918
|
+
* @param value Number to write.
|
919
|
+
* @param offset Number of bytes to skip before starting to write.
|
920
|
+
* @param noAssert
|
921
|
+
* @returns `offset` plus the number of bytes written.
|
922
|
+
*/
|
923
|
+
writeUInt32LE(value, offset, noAssert) {
|
924
|
+
value = +value;
|
925
|
+
offset = offset >>> 0;
|
926
|
+
if (!noAssert) {
|
927
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
928
|
+
}
|
929
|
+
this[offset + 3] = value >>> 24;
|
930
|
+
this[offset + 2] = value >>> 16;
|
931
|
+
this[offset + 1] = value >>> 8;
|
932
|
+
this[offset] = value & 255;
|
933
|
+
return offset + 4;
|
934
|
+
}
|
935
|
+
/**
|
936
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
|
937
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
938
|
+
*
|
939
|
+
* @param value Number to write.
|
940
|
+
* @param offset Number of bytes to skip before starting to write.
|
941
|
+
* @param noAssert
|
942
|
+
* @returns `offset` plus the number of bytes written.
|
943
|
+
*/
|
944
|
+
writeUInt32BE(value, offset, noAssert) {
|
945
|
+
value = +value;
|
946
|
+
offset = offset >>> 0;
|
947
|
+
if (!noAssert) {
|
948
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
949
|
+
}
|
950
|
+
this[offset] = value >>> 24;
|
951
|
+
this[offset + 1] = value >>> 16;
|
952
|
+
this[offset + 2] = value >>> 8;
|
953
|
+
this[offset + 3] = value & 255;
|
954
|
+
return offset + 4;
|
955
|
+
}
|
956
|
+
/**
|
957
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
|
958
|
+
* Behavior is undefined when `value` is anything other than a signed 8-bit integer.
|
959
|
+
*
|
960
|
+
* @param value Number to write.
|
961
|
+
* @param offset Number of bytes to skip before starting to write.
|
962
|
+
* @param noAssert
|
963
|
+
* @returns `offset` plus the number of bytes written.
|
964
|
+
*/
|
965
|
+
writeInt8(value, offset, noAssert) {
|
966
|
+
value = +value;
|
967
|
+
offset = offset >>> 0;
|
968
|
+
if (!noAssert) {
|
969
|
+
Buffer._checkInt(this, value, offset, 1, 127, -128);
|
970
|
+
}
|
971
|
+
if (value < 0) {
|
972
|
+
value = 255 + value + 1;
|
973
|
+
}
|
974
|
+
this[offset] = value & 255;
|
975
|
+
return offset + 1;
|
976
|
+
}
|
977
|
+
/**
|
978
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
|
979
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
980
|
+
*
|
981
|
+
* @param value Number to write.
|
982
|
+
* @param offset Number of bytes to skip before starting to write.
|
983
|
+
* @param noAssert
|
984
|
+
* @returns `offset` plus the number of bytes written.
|
985
|
+
*/
|
986
|
+
writeInt16LE(value, offset, noAssert) {
|
987
|
+
value = +value;
|
988
|
+
offset = offset >>> 0;
|
989
|
+
if (!noAssert) {
|
990
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
991
|
+
}
|
992
|
+
this[offset] = value & 255;
|
993
|
+
this[offset + 1] = value >>> 8;
|
994
|
+
return offset + 2;
|
995
|
+
}
|
996
|
+
/**
|
997
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
|
998
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
999
|
+
*
|
1000
|
+
* @param value Number to write.
|
1001
|
+
* @param offset Number of bytes to skip before starting to write.
|
1002
|
+
* @param noAssert
|
1003
|
+
* @returns `offset` plus the number of bytes written.
|
1004
|
+
*/
|
1005
|
+
writeInt16BE(value, offset, noAssert) {
|
1006
|
+
value = +value;
|
1007
|
+
offset = offset >>> 0;
|
1008
|
+
if (!noAssert) {
|
1009
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1010
|
+
}
|
1011
|
+
this[offset] = value >>> 8;
|
1012
|
+
this[offset + 1] = value & 255;
|
1013
|
+
return offset + 2;
|
1014
|
+
}
|
1015
|
+
/**
|
1016
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
|
1017
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1018
|
+
*
|
1019
|
+
* @param value Number to write.
|
1020
|
+
* @param offset Number of bytes to skip before starting to write.
|
1021
|
+
* @param noAssert
|
1022
|
+
* @returns `offset` plus the number of bytes written.
|
1023
|
+
*/
|
1024
|
+
writeInt32LE(value, offset, noAssert) {
|
1025
|
+
value = +value;
|
1026
|
+
offset = offset >>> 0;
|
1027
|
+
if (!noAssert) {
|
1028
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1029
|
+
}
|
1030
|
+
this[offset] = value & 255;
|
1031
|
+
this[offset + 1] = value >>> 8;
|
1032
|
+
this[offset + 2] = value >>> 16;
|
1033
|
+
this[offset + 3] = value >>> 24;
|
1034
|
+
return offset + 4;
|
1035
|
+
}
|
1036
|
+
/**
|
1037
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
|
1038
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1039
|
+
*
|
1040
|
+
* @param value Number to write.
|
1041
|
+
* @param offset Number of bytes to skip before starting to write.
|
1042
|
+
* @param noAssert
|
1043
|
+
* @returns `offset` plus the number of bytes written.
|
1044
|
+
*/
|
1045
|
+
writeInt32BE(value, offset, noAssert) {
|
1046
|
+
value = +value;
|
1047
|
+
offset = offset >>> 0;
|
1048
|
+
if (!noAssert) {
|
1049
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1050
|
+
}
|
1051
|
+
if (value < 0) {
|
1052
|
+
value = 4294967295 + value + 1;
|
1053
|
+
}
|
1054
|
+
this[offset] = value >>> 24;
|
1055
|
+
this[offset + 1] = value >>> 16;
|
1056
|
+
this[offset + 2] = value >>> 8;
|
1057
|
+
this[offset + 3] = value & 255;
|
1058
|
+
return offset + 4;
|
1059
|
+
}
|
1060
|
+
/**
|
1061
|
+
* Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
|
1062
|
+
* filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
|
1063
|
+
* integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
|
1064
|
+
*
|
1065
|
+
* If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
|
1066
|
+
* character that fit into `buf` are written.
|
1067
|
+
*
|
1068
|
+
* If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
|
1069
|
+
*
|
1070
|
+
* @param value
|
1071
|
+
* @param encoding
|
1072
|
+
*/
|
1073
|
+
fill(value, offset, end, encoding) {
|
1074
|
+
if (typeof value === "string") {
|
1075
|
+
if (typeof offset === "string") {
|
1076
|
+
encoding = offset;
|
1077
|
+
offset = 0;
|
1078
|
+
end = this.length;
|
1079
|
+
} else if (typeof end === "string") {
|
1080
|
+
encoding = end;
|
1081
|
+
end = this.length;
|
1082
|
+
}
|
1083
|
+
if (encoding !== void 0 && typeof encoding !== "string") {
|
1084
|
+
throw new TypeError("encoding must be a string");
|
1085
|
+
}
|
1086
|
+
if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
|
1087
|
+
throw new TypeError("Unknown encoding: " + encoding);
|
1088
|
+
}
|
1089
|
+
if (value.length === 1) {
|
1090
|
+
const code = value.charCodeAt(0);
|
1091
|
+
if (encoding === "utf8" && code < 128) {
|
1092
|
+
value = code;
|
1093
|
+
}
|
1094
|
+
}
|
1095
|
+
} else if (typeof value === "number") {
|
1096
|
+
value = value & 255;
|
1097
|
+
} else if (typeof value === "boolean") {
|
1098
|
+
value = Number(value);
|
1099
|
+
}
|
1100
|
+
offset ?? (offset = 0);
|
1101
|
+
end ?? (end = this.length);
|
1102
|
+
if (offset < 0 || this.length < offset || this.length < end) {
|
1103
|
+
throw new RangeError("Out of range index");
|
1104
|
+
}
|
1105
|
+
if (end <= offset) {
|
1106
|
+
return this;
|
1107
|
+
}
|
1108
|
+
offset = offset >>> 0;
|
1109
|
+
end = end === void 0 ? this.length : end >>> 0;
|
1110
|
+
value || (value = 0);
|
1111
|
+
let i;
|
1112
|
+
if (typeof value === "number") {
|
1113
|
+
for (i = offset; i < end; ++i) {
|
1114
|
+
this[i] = value;
|
1115
|
+
}
|
1116
|
+
} else {
|
1117
|
+
const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
|
1118
|
+
const len = bytes.length;
|
1119
|
+
if (len === 0) {
|
1120
|
+
throw new TypeError('The value "' + value + '" is invalid for argument "value"');
|
1121
|
+
}
|
1122
|
+
for (i = 0; i < end - offset; ++i) {
|
1123
|
+
this[i + offset] = bytes[i % len];
|
1124
|
+
}
|
1125
|
+
}
|
1126
|
+
return this;
|
1127
|
+
}
|
1128
|
+
/**
|
1129
|
+
* Returns the index of the specified value.
|
1130
|
+
*
|
1131
|
+
* If `value` is:
|
1132
|
+
* - a string, `value` is interpreted according to the character encoding in `encoding`.
|
1133
|
+
* - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
|
1134
|
+
* - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
|
1135
|
+
*
|
1136
|
+
* Any other types will throw a `TypeError`.
|
1137
|
+
*
|
1138
|
+
* @param value What to search for.
|
1139
|
+
* @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
|
1140
|
+
* @param encoding If `value` is a string, this is the encoding used to search.
|
1141
|
+
* @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
|
1142
|
+
*/
|
1143
|
+
indexOf(value, byteOffset, encoding) {
|
1144
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
|
1145
|
+
}
|
1146
|
+
/**
|
1147
|
+
* Gets the last index of the specified value.
|
1148
|
+
*
|
1149
|
+
* @see indexOf()
|
1150
|
+
* @param value
|
1151
|
+
* @param byteOffset
|
1152
|
+
* @param encoding
|
1153
|
+
*/
|
1154
|
+
lastIndexOf(value, byteOffset, encoding) {
|
1155
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
|
1156
|
+
}
|
1157
|
+
_bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
|
1158
|
+
if (buffer.length === 0) {
|
1159
|
+
return -1;
|
1160
|
+
}
|
1161
|
+
if (typeof byteOffset === "string") {
|
1162
|
+
encoding = byteOffset;
|
1163
|
+
byteOffset = 0;
|
1164
|
+
} else if (typeof byteOffset === "undefined") {
|
1165
|
+
byteOffset = 0;
|
1166
|
+
} else if (byteOffset > 2147483647) {
|
1167
|
+
byteOffset = 2147483647;
|
1168
|
+
} else if (byteOffset < -2147483648) {
|
1169
|
+
byteOffset = -2147483648;
|
1170
|
+
}
|
1171
|
+
byteOffset = +byteOffset;
|
1172
|
+
if (byteOffset !== byteOffset) {
|
1173
|
+
byteOffset = dir ? 0 : buffer.length - 1;
|
1174
|
+
}
|
1175
|
+
if (byteOffset < 0) {
|
1176
|
+
byteOffset = buffer.length + byteOffset;
|
1177
|
+
}
|
1178
|
+
if (byteOffset >= buffer.length) {
|
1179
|
+
if (dir) {
|
1180
|
+
return -1;
|
1181
|
+
} else {
|
1182
|
+
byteOffset = buffer.length - 1;
|
1183
|
+
}
|
1184
|
+
} else if (byteOffset < 0) {
|
1185
|
+
if (dir) {
|
1186
|
+
byteOffset = 0;
|
1187
|
+
} else {
|
1188
|
+
return -1;
|
1189
|
+
}
|
1190
|
+
}
|
1191
|
+
if (typeof val === "string") {
|
1192
|
+
val = Buffer.from(val, encoding);
|
1193
|
+
}
|
1194
|
+
if (Buffer.isBuffer(val)) {
|
1195
|
+
if (val.length === 0) {
|
1196
|
+
return -1;
|
1197
|
+
}
|
1198
|
+
return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
|
1199
|
+
} else if (typeof val === "number") {
|
1200
|
+
val = val & 255;
|
1201
|
+
if (typeof Uint8Array.prototype.indexOf === "function") {
|
1202
|
+
if (dir) {
|
1203
|
+
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
|
1204
|
+
} else {
|
1205
|
+
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
|
1206
|
+
}
|
1207
|
+
}
|
1208
|
+
return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
|
1209
|
+
}
|
1210
|
+
throw new TypeError("val must be string, number or Buffer");
|
1211
|
+
}
|
1212
|
+
/**
|
1213
|
+
* Equivalent to `buf.indexOf() !== -1`.
|
1214
|
+
*
|
1215
|
+
* @param value
|
1216
|
+
* @param byteOffset
|
1217
|
+
* @param encoding
|
1218
|
+
*/
|
1219
|
+
includes(value, byteOffset, encoding) {
|
1220
|
+
return this.indexOf(value, byteOffset, encoding) !== -1;
|
1221
|
+
}
|
1222
|
+
/**
|
1223
|
+
* Creates a new buffer from the given parameters.
|
1224
|
+
*
|
1225
|
+
* @param data
|
1226
|
+
* @param encoding
|
1227
|
+
*/
|
1228
|
+
static from(a, b, c) {
|
1229
|
+
return new Buffer(a, b, c);
|
1230
|
+
}
|
1231
|
+
/**
|
1232
|
+
* Returns true if `obj` is a Buffer.
|
1233
|
+
*
|
1234
|
+
* @param obj
|
1235
|
+
*/
|
1236
|
+
static isBuffer(obj) {
|
1237
|
+
return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
|
1238
|
+
}
|
1239
|
+
/**
|
1240
|
+
* Returns true if `encoding` is a supported encoding.
|
1241
|
+
*
|
1242
|
+
* @param encoding
|
1243
|
+
*/
|
1244
|
+
static isEncoding(encoding) {
|
1245
|
+
switch (encoding.toLowerCase()) {
|
1246
|
+
case "hex":
|
1247
|
+
case "utf8":
|
1248
|
+
case "ascii":
|
1249
|
+
case "binary":
|
1250
|
+
case "latin1":
|
1251
|
+
case "ucs2":
|
1252
|
+
case "utf16le":
|
1253
|
+
case "base64":
|
1254
|
+
return true;
|
1255
|
+
default:
|
1256
|
+
return false;
|
1257
|
+
}
|
1258
|
+
}
|
1259
|
+
/**
|
1260
|
+
* Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
|
1261
|
+
* returns the number of characters in the string.
|
1262
|
+
*
|
1263
|
+
* @param string The string to test.
|
1264
|
+
* @param encoding The encoding to use for calculation. Defaults is `utf8`.
|
1265
|
+
*/
|
1266
|
+
static byteLength(string, encoding) {
|
1267
|
+
if (Buffer.isBuffer(string)) {
|
1268
|
+
return string.length;
|
1269
|
+
}
|
1270
|
+
if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
|
1271
|
+
return string.byteLength;
|
1272
|
+
}
|
1273
|
+
if (typeof string !== "string") {
|
1274
|
+
throw new TypeError(
|
1275
|
+
'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
|
1276
|
+
);
|
1277
|
+
}
|
1278
|
+
const len = string.length;
|
1279
|
+
const mustMatch = arguments.length > 2 && arguments[2] === true;
|
1280
|
+
if (!mustMatch && len === 0) {
|
1281
|
+
return 0;
|
1282
|
+
}
|
1283
|
+
switch (encoding?.toLowerCase()) {
|
1284
|
+
case "ascii":
|
1285
|
+
case "latin1":
|
1286
|
+
case "binary":
|
1287
|
+
return len;
|
1288
|
+
case "utf8":
|
1289
|
+
return Buffer._utf8ToBytes(string).length;
|
1290
|
+
case "hex":
|
1291
|
+
return len >>> 1;
|
1292
|
+
case "ucs2":
|
1293
|
+
case "utf16le":
|
1294
|
+
return len * 2;
|
1295
|
+
case "base64":
|
1296
|
+
return Buffer._base64ToBytes(string).length;
|
1297
|
+
default:
|
1298
|
+
return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
|
1299
|
+
}
|
1300
|
+
}
|
1301
|
+
/**
|
1302
|
+
* Returns a Buffer which is the result of concatenating all the buffers in the list together.
|
1303
|
+
*
|
1304
|
+
* - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
|
1305
|
+
* - If the list has exactly one item, then the first item is returned.
|
1306
|
+
* - If the list has more than one item, then a new buffer is created.
|
1307
|
+
*
|
1308
|
+
* It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
|
1309
|
+
* a small computational expense.
|
1310
|
+
*
|
1311
|
+
* @param list An array of Buffer objects to concatenate.
|
1312
|
+
* @param totalLength Total length of the buffers when concatenated.
|
1313
|
+
*/
|
1314
|
+
static concat(list, totalLength) {
|
1315
|
+
if (!Array.isArray(list)) {
|
1316
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1317
|
+
}
|
1318
|
+
if (list.length === 0) {
|
1319
|
+
return Buffer.alloc(0);
|
1320
|
+
}
|
1321
|
+
let i;
|
1322
|
+
if (totalLength === void 0) {
|
1323
|
+
totalLength = 0;
|
1324
|
+
for (i = 0; i < list.length; ++i) {
|
1325
|
+
totalLength += list[i].length;
|
1326
|
+
}
|
1327
|
+
}
|
1328
|
+
const buffer = Buffer.allocUnsafe(totalLength);
|
1329
|
+
let pos = 0;
|
1330
|
+
for (i = 0; i < list.length; ++i) {
|
1331
|
+
let buf = list[i];
|
1332
|
+
if (Buffer._isInstance(buf, Uint8Array)) {
|
1333
|
+
if (pos + buf.length > buffer.length) {
|
1334
|
+
if (!Buffer.isBuffer(buf)) {
|
1335
|
+
buf = Buffer.from(buf);
|
1336
|
+
}
|
1337
|
+
buf.copy(buffer, pos);
|
1338
|
+
} else {
|
1339
|
+
Uint8Array.prototype.set.call(buffer, buf, pos);
|
1340
|
+
}
|
1341
|
+
} else if (!Buffer.isBuffer(buf)) {
|
1342
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1343
|
+
} else {
|
1344
|
+
buf.copy(buffer, pos);
|
1345
|
+
}
|
1346
|
+
pos += buf.length;
|
1347
|
+
}
|
1348
|
+
return buffer;
|
1349
|
+
}
|
1350
|
+
/**
|
1351
|
+
* The same as `buf1.compare(buf2)`.
|
1352
|
+
*/
|
1353
|
+
static compare(buf1, buf2) {
|
1354
|
+
if (Buffer._isInstance(buf1, Uint8Array)) {
|
1355
|
+
buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
|
1356
|
+
}
|
1357
|
+
if (Buffer._isInstance(buf2, Uint8Array)) {
|
1358
|
+
buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
|
1359
|
+
}
|
1360
|
+
if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
|
1361
|
+
throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
|
1362
|
+
}
|
1363
|
+
if (buf1 === buf2) {
|
1364
|
+
return 0;
|
1365
|
+
}
|
1366
|
+
let x = buf1.length;
|
1367
|
+
let y = buf2.length;
|
1368
|
+
for (let i = 0, len = Math.min(x, y); i < len; ++i) {
|
1369
|
+
if (buf1[i] !== buf2[i]) {
|
1370
|
+
x = buf1[i];
|
1371
|
+
y = buf2[i];
|
1372
|
+
break;
|
1373
|
+
}
|
1374
|
+
}
|
1375
|
+
if (x < y) {
|
1376
|
+
return -1;
|
1377
|
+
}
|
1378
|
+
if (y < x) {
|
1379
|
+
return 1;
|
1380
|
+
}
|
1381
|
+
return 0;
|
1382
|
+
}
|
1383
|
+
/**
|
1384
|
+
* Allocates a new buffer of `size` octets.
|
1385
|
+
*
|
1386
|
+
* @param size The number of octets to allocate.
|
1387
|
+
* @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
|
1388
|
+
* @param encoding The encoding used for the call to `buf.fill()` while initializing.
|
1389
|
+
*/
|
1390
|
+
static alloc(size, fill, encoding) {
|
1391
|
+
if (typeof size !== "number") {
|
1392
|
+
throw new TypeError('"size" argument must be of type number');
|
1393
|
+
} else if (size < 0) {
|
1394
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1395
|
+
}
|
1396
|
+
if (size <= 0) {
|
1397
|
+
return new Buffer(size);
|
1398
|
+
}
|
1399
|
+
if (fill !== void 0) {
|
1400
|
+
return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
|
1401
|
+
}
|
1402
|
+
return new Buffer(size);
|
1403
|
+
}
|
1404
|
+
/**
|
1405
|
+
* Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
|
1406
|
+
*
|
1407
|
+
* @param size
|
1408
|
+
*/
|
1409
|
+
static allocUnsafe(size) {
|
1410
|
+
if (typeof size !== "number") {
|
1411
|
+
throw new TypeError('"size" argument must be of type number');
|
1412
|
+
} else if (size < 0) {
|
1413
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1414
|
+
}
|
1415
|
+
return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
|
1416
|
+
}
|
1417
|
+
/**
|
1418
|
+
* Returns true if the given `obj` is an instance of `type`.
|
1419
|
+
*
|
1420
|
+
* @param obj
|
1421
|
+
* @param type
|
1422
|
+
*/
|
1423
|
+
static _isInstance(obj, type) {
|
1424
|
+
return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
|
1425
|
+
}
|
1426
|
+
static _checked(length) {
|
1427
|
+
if (length >= K_MAX_LENGTH) {
|
1428
|
+
throw new RangeError(
|
1429
|
+
"Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
|
1430
|
+
);
|
1431
|
+
}
|
1432
|
+
return length | 0;
|
1433
|
+
}
|
1434
|
+
static _blitBuffer(src, dst, offset, length) {
|
1435
|
+
let i;
|
1436
|
+
for (i = 0; i < length; ++i) {
|
1437
|
+
if (i + offset >= dst.length || i >= src.length) {
|
1438
|
+
break;
|
1439
|
+
}
|
1440
|
+
dst[i + offset] = src[i];
|
1441
|
+
}
|
1442
|
+
return i;
|
1443
|
+
}
|
1444
|
+
static _utf8Write(buf, string, offset, length) {
|
1445
|
+
return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
|
1446
|
+
}
|
1447
|
+
static _asciiWrite(buf, string, offset, length) {
|
1448
|
+
return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
|
1449
|
+
}
|
1450
|
+
static _base64Write(buf, string, offset, length) {
|
1451
|
+
return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
|
1452
|
+
}
|
1453
|
+
static _ucs2Write(buf, string, offset, length) {
|
1454
|
+
return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
|
1455
|
+
}
|
1456
|
+
static _hexWrite(buf, string, offset, length) {
|
1457
|
+
offset = Number(offset) || 0;
|
1458
|
+
const remaining = buf.length - offset;
|
1459
|
+
if (!length) {
|
1460
|
+
length = remaining;
|
1461
|
+
} else {
|
1462
|
+
length = Number(length);
|
1463
|
+
if (length > remaining) {
|
1464
|
+
length = remaining;
|
1465
|
+
}
|
1466
|
+
}
|
1467
|
+
const strLen = string.length;
|
1468
|
+
if (length > strLen / 2) {
|
1469
|
+
length = strLen / 2;
|
1470
|
+
}
|
1471
|
+
let i;
|
1472
|
+
for (i = 0; i < length; ++i) {
|
1473
|
+
const parsed = parseInt(string.substr(i * 2, 2), 16);
|
1474
|
+
if (parsed !== parsed) {
|
1475
|
+
return i;
|
1476
|
+
}
|
1477
|
+
buf[offset + i] = parsed;
|
1478
|
+
}
|
1479
|
+
return i;
|
1480
|
+
}
|
1481
|
+
static _utf8ToBytes(string, units) {
|
1482
|
+
units = units || Infinity;
|
1483
|
+
const length = string.length;
|
1484
|
+
const bytes = [];
|
1485
|
+
let codePoint;
|
1486
|
+
let leadSurrogate = null;
|
1487
|
+
for (let i = 0; i < length; ++i) {
|
1488
|
+
codePoint = string.charCodeAt(i);
|
1489
|
+
if (codePoint > 55295 && codePoint < 57344) {
|
1490
|
+
if (!leadSurrogate) {
|
1491
|
+
if (codePoint > 56319) {
|
1492
|
+
if ((units -= 3) > -1) {
|
1493
|
+
bytes.push(239, 191, 189);
|
1494
|
+
}
|
1495
|
+
continue;
|
1496
|
+
} else if (i + 1 === length) {
|
1497
|
+
if ((units -= 3) > -1) {
|
1498
|
+
bytes.push(239, 191, 189);
|
1499
|
+
}
|
1500
|
+
continue;
|
1501
|
+
}
|
1502
|
+
leadSurrogate = codePoint;
|
1503
|
+
continue;
|
1504
|
+
}
|
1505
|
+
if (codePoint < 56320) {
|
1506
|
+
if ((units -= 3) > -1) {
|
1507
|
+
bytes.push(239, 191, 189);
|
1508
|
+
}
|
1509
|
+
leadSurrogate = codePoint;
|
1510
|
+
continue;
|
1511
|
+
}
|
1512
|
+
codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
|
1513
|
+
} else if (leadSurrogate) {
|
1514
|
+
if ((units -= 3) > -1) {
|
1515
|
+
bytes.push(239, 191, 189);
|
1516
|
+
}
|
1517
|
+
}
|
1518
|
+
leadSurrogate = null;
|
1519
|
+
if (codePoint < 128) {
|
1520
|
+
if ((units -= 1) < 0) {
|
1521
|
+
break;
|
1522
|
+
}
|
1523
|
+
bytes.push(codePoint);
|
1524
|
+
} else if (codePoint < 2048) {
|
1525
|
+
if ((units -= 2) < 0) {
|
1526
|
+
break;
|
1527
|
+
}
|
1528
|
+
bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
|
1529
|
+
} else if (codePoint < 65536) {
|
1530
|
+
if ((units -= 3) < 0) {
|
1531
|
+
break;
|
1532
|
+
}
|
1533
|
+
bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
|
1534
|
+
} else if (codePoint < 1114112) {
|
1535
|
+
if ((units -= 4) < 0) {
|
1536
|
+
break;
|
1537
|
+
}
|
1538
|
+
bytes.push(
|
1539
|
+
codePoint >> 18 | 240,
|
1540
|
+
codePoint >> 12 & 63 | 128,
|
1541
|
+
codePoint >> 6 & 63 | 128,
|
1542
|
+
codePoint & 63 | 128
|
1543
|
+
);
|
1544
|
+
} else {
|
1545
|
+
throw new Error("Invalid code point");
|
1546
|
+
}
|
1547
|
+
}
|
1548
|
+
return bytes;
|
1549
|
+
}
|
1550
|
+
static _base64ToBytes(str) {
|
1551
|
+
return toByteArray(base64clean(str));
|
1552
|
+
}
|
1553
|
+
static _asciiToBytes(str) {
|
1554
|
+
const byteArray = [];
|
1555
|
+
for (let i = 0; i < str.length; ++i) {
|
1556
|
+
byteArray.push(str.charCodeAt(i) & 255);
|
1557
|
+
}
|
1558
|
+
return byteArray;
|
1559
|
+
}
|
1560
|
+
static _utf16leToBytes(str, units) {
|
1561
|
+
let c, hi, lo;
|
1562
|
+
const byteArray = [];
|
1563
|
+
for (let i = 0; i < str.length; ++i) {
|
1564
|
+
if ((units -= 2) < 0) break;
|
1565
|
+
c = str.charCodeAt(i);
|
1566
|
+
hi = c >> 8;
|
1567
|
+
lo = c % 256;
|
1568
|
+
byteArray.push(lo);
|
1569
|
+
byteArray.push(hi);
|
1570
|
+
}
|
1571
|
+
return byteArray;
|
1572
|
+
}
|
1573
|
+
static _hexSlice(buf, start, end) {
|
1574
|
+
const len = buf.length;
|
1575
|
+
if (!start || start < 0) {
|
1576
|
+
start = 0;
|
1577
|
+
}
|
1578
|
+
if (!end || end < 0 || end > len) {
|
1579
|
+
end = len;
|
1580
|
+
}
|
1581
|
+
let out = "";
|
1582
|
+
for (let i = start; i < end; ++i) {
|
1583
|
+
out += hexSliceLookupTable[buf[i]];
|
1584
|
+
}
|
1585
|
+
return out;
|
1586
|
+
}
|
1587
|
+
static _base64Slice(buf, start, end) {
|
1588
|
+
if (start === 0 && end === buf.length) {
|
1589
|
+
return fromByteArray(buf);
|
1590
|
+
} else {
|
1591
|
+
return fromByteArray(buf.slice(start, end));
|
1592
|
+
}
|
1593
|
+
}
|
1594
|
+
static _utf8Slice(buf, start, end) {
|
1595
|
+
end = Math.min(buf.length, end);
|
1596
|
+
const res = [];
|
1597
|
+
let i = start;
|
1598
|
+
while (i < end) {
|
1599
|
+
const firstByte = buf[i];
|
1600
|
+
let codePoint = null;
|
1601
|
+
let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
|
1602
|
+
if (i + bytesPerSequence <= end) {
|
1603
|
+
let secondByte, thirdByte, fourthByte, tempCodePoint;
|
1604
|
+
switch (bytesPerSequence) {
|
1605
|
+
case 1:
|
1606
|
+
if (firstByte < 128) {
|
1607
|
+
codePoint = firstByte;
|
1608
|
+
}
|
1609
|
+
break;
|
1610
|
+
case 2:
|
1611
|
+
secondByte = buf[i + 1];
|
1612
|
+
if ((secondByte & 192) === 128) {
|
1613
|
+
tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
|
1614
|
+
if (tempCodePoint > 127) {
|
1615
|
+
codePoint = tempCodePoint;
|
1616
|
+
}
|
1617
|
+
}
|
1618
|
+
break;
|
1619
|
+
case 3:
|
1620
|
+
secondByte = buf[i + 1];
|
1621
|
+
thirdByte = buf[i + 2];
|
1622
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
|
1623
|
+
tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
|
1624
|
+
if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
|
1625
|
+
codePoint = tempCodePoint;
|
1626
|
+
}
|
1627
|
+
}
|
1628
|
+
break;
|
1629
|
+
case 4:
|
1630
|
+
secondByte = buf[i + 1];
|
1631
|
+
thirdByte = buf[i + 2];
|
1632
|
+
fourthByte = buf[i + 3];
|
1633
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
|
1634
|
+
tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
|
1635
|
+
if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
|
1636
|
+
codePoint = tempCodePoint;
|
1637
|
+
}
|
1638
|
+
}
|
1639
|
+
}
|
1640
|
+
}
|
1641
|
+
if (codePoint === null) {
|
1642
|
+
codePoint = 65533;
|
1643
|
+
bytesPerSequence = 1;
|
1644
|
+
} else if (codePoint > 65535) {
|
1645
|
+
codePoint -= 65536;
|
1646
|
+
res.push(codePoint >>> 10 & 1023 | 55296);
|
1647
|
+
codePoint = 56320 | codePoint & 1023;
|
1648
|
+
}
|
1649
|
+
res.push(codePoint);
|
1650
|
+
i += bytesPerSequence;
|
1651
|
+
}
|
1652
|
+
return Buffer._decodeCodePointsArray(res);
|
1653
|
+
}
|
1654
|
+
static _decodeCodePointsArray(codePoints) {
|
1655
|
+
const len = codePoints.length;
|
1656
|
+
if (len <= MAX_ARGUMENTS_LENGTH) {
|
1657
|
+
return String.fromCharCode.apply(String, codePoints);
|
1658
|
+
}
|
1659
|
+
let res = "";
|
1660
|
+
let i = 0;
|
1661
|
+
while (i < len) {
|
1662
|
+
res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
|
1663
|
+
}
|
1664
|
+
return res;
|
1665
|
+
}
|
1666
|
+
static _asciiSlice(buf, start, end) {
|
1667
|
+
let ret = "";
|
1668
|
+
end = Math.min(buf.length, end);
|
1669
|
+
for (let i = start; i < end; ++i) {
|
1670
|
+
ret += String.fromCharCode(buf[i] & 127);
|
1671
|
+
}
|
1672
|
+
return ret;
|
1673
|
+
}
|
1674
|
+
static _latin1Slice(buf, start, end) {
|
1675
|
+
let ret = "";
|
1676
|
+
end = Math.min(buf.length, end);
|
1677
|
+
for (let i = start; i < end; ++i) {
|
1678
|
+
ret += String.fromCharCode(buf[i]);
|
1679
|
+
}
|
1680
|
+
return ret;
|
1681
|
+
}
|
1682
|
+
static _utf16leSlice(buf, start, end) {
|
1683
|
+
const bytes = buf.slice(start, end);
|
1684
|
+
let res = "";
|
1685
|
+
for (let i = 0; i < bytes.length - 1; i += 2) {
|
1686
|
+
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
|
1687
|
+
}
|
1688
|
+
return res;
|
1689
|
+
}
|
1690
|
+
static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
|
1691
|
+
let indexSize = 1;
|
1692
|
+
let arrLength = arr.length;
|
1693
|
+
let valLength = val.length;
|
1694
|
+
if (encoding !== void 0) {
|
1695
|
+
encoding = Buffer._getEncoding(encoding);
|
1696
|
+
if (encoding === "ucs2" || encoding === "utf16le") {
|
1697
|
+
if (arr.length < 2 || val.length < 2) {
|
1698
|
+
return -1;
|
1699
|
+
}
|
1700
|
+
indexSize = 2;
|
1701
|
+
arrLength /= 2;
|
1702
|
+
valLength /= 2;
|
1703
|
+
byteOffset /= 2;
|
1704
|
+
}
|
1705
|
+
}
|
1706
|
+
function read(buf, i2) {
|
1707
|
+
if (indexSize === 1) {
|
1708
|
+
return buf[i2];
|
1709
|
+
} else {
|
1710
|
+
return buf.readUInt16BE(i2 * indexSize);
|
1711
|
+
}
|
1712
|
+
}
|
1713
|
+
let i;
|
1714
|
+
if (dir) {
|
1715
|
+
let foundIndex = -1;
|
1716
|
+
for (i = byteOffset; i < arrLength; i++) {
|
1717
|
+
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
1718
|
+
if (foundIndex === -1) foundIndex = i;
|
1719
|
+
if (i - foundIndex + 1 === valLength) return foundIndex * indexSize;
|
1720
|
+
} else {
|
1721
|
+
if (foundIndex !== -1) i -= i - foundIndex;
|
1722
|
+
foundIndex = -1;
|
1723
|
+
}
|
1724
|
+
}
|
1725
|
+
} else {
|
1726
|
+
if (byteOffset + valLength > arrLength) {
|
1727
|
+
byteOffset = arrLength - valLength;
|
1728
|
+
}
|
1729
|
+
for (i = byteOffset; i >= 0; i--) {
|
1730
|
+
let found = true;
|
1731
|
+
for (let j = 0; j < valLength; j++) {
|
1732
|
+
if (read(arr, i + j) !== read(val, j)) {
|
1733
|
+
found = false;
|
1734
|
+
break;
|
1735
|
+
}
|
1736
|
+
}
|
1737
|
+
if (found) {
|
1738
|
+
return i;
|
1739
|
+
}
|
1740
|
+
}
|
1741
|
+
}
|
1742
|
+
return -1;
|
1743
|
+
}
|
1744
|
+
static _checkOffset(offset, ext, length) {
|
1745
|
+
if (offset % 1 !== 0 || offset < 0) throw new RangeError("offset is not uint");
|
1746
|
+
if (offset + ext > length) throw new RangeError("Trying to access beyond buffer length");
|
1747
|
+
}
|
1748
|
+
static _checkInt(buf, value, offset, ext, max, min) {
|
1749
|
+
if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance');
|
1750
|
+
if (value > max || value < min) throw new RangeError('"value" argument is out of bounds');
|
1751
|
+
if (offset + ext > buf.length) throw new RangeError("Index out of range");
|
1752
|
+
}
|
1753
|
+
static _getEncoding(encoding) {
|
1754
|
+
let toLowerCase = false;
|
1755
|
+
let originalEncoding = "";
|
1756
|
+
for (; ; ) {
|
1757
|
+
switch (encoding) {
|
1758
|
+
case "hex":
|
1759
|
+
return "hex";
|
1760
|
+
case "utf8":
|
1761
|
+
return "utf8";
|
1762
|
+
case "ascii":
|
1763
|
+
return "ascii";
|
1764
|
+
case "binary":
|
1765
|
+
return "binary";
|
1766
|
+
case "latin1":
|
1767
|
+
return "latin1";
|
1768
|
+
case "ucs2":
|
1769
|
+
return "ucs2";
|
1770
|
+
case "utf16le":
|
1771
|
+
return "utf16le";
|
1772
|
+
case "base64":
|
1773
|
+
return "base64";
|
1774
|
+
default: {
|
1775
|
+
if (toLowerCase) {
|
1776
|
+
throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
|
1777
|
+
}
|
1778
|
+
toLowerCase = true;
|
1779
|
+
originalEncoding = encoding;
|
1780
|
+
encoding = encoding.toLowerCase();
|
1781
|
+
}
|
1782
|
+
}
|
1783
|
+
}
|
1784
|
+
}
|
1785
|
+
}
|
1786
|
+
const hexSliceLookupTable = function() {
|
1787
|
+
const alphabet = "0123456789abcdef";
|
1788
|
+
const table = new Array(256);
|
1789
|
+
for (let i = 0; i < 16; ++i) {
|
1790
|
+
const i16 = i * 16;
|
1791
|
+
for (let j = 0; j < 16; ++j) {
|
1792
|
+
table[i16 + j] = alphabet[i] + alphabet[j];
|
1793
|
+
}
|
1794
|
+
}
|
1795
|
+
return table;
|
1796
|
+
}();
|
1797
|
+
const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
|
1798
|
+
function base64clean(str) {
|
1799
|
+
str = str.split("=")[0];
|
1800
|
+
str = str.trim().replace(INVALID_BASE64_RE, "");
|
1801
|
+
if (str.length < 2) return "";
|
1802
|
+
while (str.length % 4 !== 0) {
|
1803
|
+
str = str + "=";
|
1804
|
+
}
|
1805
|
+
return str;
|
1806
|
+
}
|
1807
|
+
|
25
1808
|
function notEmpty(value) {
|
26
1809
|
return value !== null && value !== void 0;
|
27
1810
|
}
|
@@ -116,155 +1899,15 @@ function promiseMap(inputValues, mapper) {
|
|
116
1899
|
return inputValues.reduce(reducer, Promise.resolve([]));
|
117
1900
|
}
|
118
1901
|
|
119
|
-
|
120
|
-
|
121
|
-
if (isDefined(process) && isDefined(process.env)) {
|
122
|
-
return {
|
123
|
-
apiKey: process.env.XATA_API_KEY ?? getGlobalApiKey(),
|
124
|
-
databaseURL: process.env.XATA_DATABASE_URL ?? getGlobalDatabaseURL(),
|
125
|
-
branch: process.env.XATA_BRANCH ?? getGlobalBranch(),
|
126
|
-
deployPreview: process.env.XATA_PREVIEW,
|
127
|
-
deployPreviewBranch: process.env.XATA_PREVIEW_BRANCH,
|
128
|
-
vercelGitCommitRef: process.env.VERCEL_GIT_COMMIT_REF,
|
129
|
-
vercelGitRepoOwner: process.env.VERCEL_GIT_REPO_OWNER
|
130
|
-
};
|
131
|
-
}
|
132
|
-
} catch (err) {
|
133
|
-
}
|
134
|
-
try {
|
135
|
-
if (isObject(Deno) && isObject(Deno.env)) {
|
136
|
-
return {
|
137
|
-
apiKey: Deno.env.get("XATA_API_KEY") ?? getGlobalApiKey(),
|
138
|
-
databaseURL: Deno.env.get("XATA_DATABASE_URL") ?? getGlobalDatabaseURL(),
|
139
|
-
branch: Deno.env.get("XATA_BRANCH") ?? getGlobalBranch(),
|
140
|
-
deployPreview: Deno.env.get("XATA_PREVIEW"),
|
141
|
-
deployPreviewBranch: Deno.env.get("XATA_PREVIEW_BRANCH"),
|
142
|
-
vercelGitCommitRef: Deno.env.get("VERCEL_GIT_COMMIT_REF"),
|
143
|
-
vercelGitRepoOwner: Deno.env.get("VERCEL_GIT_REPO_OWNER")
|
144
|
-
};
|
145
|
-
}
|
146
|
-
} catch (err) {
|
147
|
-
}
|
148
|
-
return {
|
149
|
-
apiKey: getGlobalApiKey(),
|
150
|
-
databaseURL: getGlobalDatabaseURL(),
|
151
|
-
branch: getGlobalBranch(),
|
152
|
-
deployPreview: void 0,
|
153
|
-
deployPreviewBranch: void 0,
|
154
|
-
vercelGitCommitRef: void 0,
|
155
|
-
vercelGitRepoOwner: void 0
|
156
|
-
};
|
157
|
-
}
|
158
|
-
function getEnableBrowserVariable() {
|
159
|
-
try {
|
160
|
-
if (isObject(process) && isObject(process.env) && process.env.XATA_ENABLE_BROWSER !== void 0) {
|
161
|
-
return process.env.XATA_ENABLE_BROWSER === "true";
|
162
|
-
}
|
163
|
-
} catch (err) {
|
164
|
-
}
|
165
|
-
try {
|
166
|
-
if (isObject(Deno) && isObject(Deno.env) && Deno.env.get("XATA_ENABLE_BROWSER") !== void 0) {
|
167
|
-
return Deno.env.get("XATA_ENABLE_BROWSER") === "true";
|
168
|
-
}
|
169
|
-
} catch (err) {
|
170
|
-
}
|
171
|
-
try {
|
172
|
-
return XATA_ENABLE_BROWSER === true || XATA_ENABLE_BROWSER === "true";
|
173
|
-
} catch (err) {
|
174
|
-
return void 0;
|
175
|
-
}
|
176
|
-
}
|
177
|
-
function getGlobalApiKey() {
|
178
|
-
try {
|
179
|
-
return XATA_API_KEY;
|
180
|
-
} catch (err) {
|
181
|
-
return void 0;
|
182
|
-
}
|
183
|
-
}
|
184
|
-
function getGlobalDatabaseURL() {
|
185
|
-
try {
|
186
|
-
return XATA_DATABASE_URL;
|
187
|
-
} catch (err) {
|
188
|
-
return void 0;
|
189
|
-
}
|
190
|
-
}
|
191
|
-
function getGlobalBranch() {
|
192
|
-
try {
|
193
|
-
return XATA_BRANCH;
|
194
|
-
} catch (err) {
|
195
|
-
return void 0;
|
196
|
-
}
|
197
|
-
}
|
198
|
-
function getDatabaseURL() {
|
199
|
-
try {
|
200
|
-
const { databaseURL } = getEnvironment();
|
201
|
-
return databaseURL;
|
202
|
-
} catch (err) {
|
203
|
-
return void 0;
|
204
|
-
}
|
205
|
-
}
|
206
|
-
function getAPIKey() {
|
207
|
-
try {
|
208
|
-
const { apiKey } = getEnvironment();
|
209
|
-
return apiKey;
|
210
|
-
} catch (err) {
|
211
|
-
return void 0;
|
212
|
-
}
|
213
|
-
}
|
214
|
-
function getBranch() {
|
215
|
-
try {
|
216
|
-
const { branch } = getEnvironment();
|
217
|
-
return branch;
|
218
|
-
} catch (err) {
|
219
|
-
return void 0;
|
220
|
-
}
|
221
|
-
}
|
222
|
-
function buildPreviewBranchName({ org, branch }) {
|
223
|
-
return `preview-${org}-${branch}`;
|
224
|
-
}
|
225
|
-
function getPreviewBranch() {
|
226
|
-
try {
|
227
|
-
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = getEnvironment();
|
228
|
-
if (deployPreviewBranch)
|
229
|
-
return deployPreviewBranch;
|
230
|
-
switch (deployPreview) {
|
231
|
-
case "vercel": {
|
232
|
-
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
233
|
-
console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
|
234
|
-
return void 0;
|
235
|
-
}
|
236
|
-
return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
|
237
|
-
}
|
238
|
-
}
|
239
|
-
return void 0;
|
240
|
-
} catch (err) {
|
241
|
-
return void 0;
|
242
|
-
}
|
243
|
-
}
|
244
|
-
|
245
|
-
var __accessCheck$6 = (obj, member, msg) => {
|
246
|
-
if (!member.has(obj))
|
247
|
-
throw TypeError("Cannot " + msg);
|
248
|
-
};
|
249
|
-
var __privateGet$5 = (obj, member, getter) => {
|
250
|
-
__accessCheck$6(obj, member, "read from private field");
|
251
|
-
return getter ? getter.call(obj) : member.get(obj);
|
252
|
-
};
|
253
|
-
var __privateAdd$6 = (obj, member, value) => {
|
254
|
-
if (member.has(obj))
|
255
|
-
throw TypeError("Cannot add the same private member more than once");
|
256
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
257
|
-
};
|
258
|
-
var __privateSet$4 = (obj, member, value, setter) => {
|
259
|
-
__accessCheck$6(obj, member, "write to private field");
|
260
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
261
|
-
return value;
|
1902
|
+
var __typeError$6 = (msg) => {
|
1903
|
+
throw TypeError(msg);
|
262
1904
|
};
|
263
|
-
var
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
var
|
1905
|
+
var __accessCheck$6 = (obj, member, msg) => member.has(obj) || __typeError$6("Cannot " + msg);
|
1906
|
+
var __privateGet$5 = (obj, member, getter) => (__accessCheck$6(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
1907
|
+
var __privateAdd$6 = (obj, member, value) => member.has(obj) ? __typeError$6("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1908
|
+
var __privateSet$4 = (obj, member, value, setter) => (__accessCheck$6(obj, member, "write to private field"), member.set(obj, value), value);
|
1909
|
+
var __privateMethod$4 = (obj, member, method) => (__accessCheck$6(obj, member, "access private method"), method);
|
1910
|
+
var _fetch, _queue, _concurrency, _ApiRequestPool_instances, enqueue_fn;
|
268
1911
|
const REQUEST_TIMEOUT = 5 * 60 * 1e3;
|
269
1912
|
function getFetchImplementation(userFetch) {
|
270
1913
|
const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
|
@@ -277,10 +1920,10 @@ function getFetchImplementation(userFetch) {
|
|
277
1920
|
}
|
278
1921
|
class ApiRequestPool {
|
279
1922
|
constructor(concurrency = 10) {
|
280
|
-
__privateAdd$6(this,
|
281
|
-
__privateAdd$6(this, _fetch
|
282
|
-
__privateAdd$6(this, _queue
|
283
|
-
__privateAdd$6(this, _concurrency
|
1923
|
+
__privateAdd$6(this, _ApiRequestPool_instances);
|
1924
|
+
__privateAdd$6(this, _fetch);
|
1925
|
+
__privateAdd$6(this, _queue);
|
1926
|
+
__privateAdd$6(this, _concurrency);
|
284
1927
|
__privateSet$4(this, _queue, []);
|
285
1928
|
__privateSet$4(this, _concurrency, concurrency);
|
286
1929
|
this.running = 0;
|
@@ -315,7 +1958,7 @@ class ApiRequestPool {
|
|
315
1958
|
}
|
316
1959
|
return response;
|
317
1960
|
};
|
318
|
-
return __privateMethod$4(this,
|
1961
|
+
return __privateMethod$4(this, _ApiRequestPool_instances, enqueue_fn).call(this, async () => {
|
319
1962
|
return await runRequest();
|
320
1963
|
});
|
321
1964
|
}
|
@@ -323,7 +1966,7 @@ class ApiRequestPool {
|
|
323
1966
|
_fetch = new WeakMap();
|
324
1967
|
_queue = new WeakMap();
|
325
1968
|
_concurrency = new WeakMap();
|
326
|
-
|
1969
|
+
_ApiRequestPool_instances = new WeakSet();
|
327
1970
|
enqueue_fn = function(task) {
|
328
1971
|
const promise = new Promise((resolve) => __privateGet$5(this, _queue).push(resolve)).finally(() => {
|
329
1972
|
this.started--;
|
@@ -526,7 +2169,7 @@ function defaultOnOpen(response) {
|
|
526
2169
|
}
|
527
2170
|
}
|
528
2171
|
|
529
|
-
const VERSION = "0.
|
2172
|
+
const VERSION = "0.30.0";
|
530
2173
|
|
531
2174
|
class ErrorWithCause extends Error {
|
532
2175
|
constructor(message, options) {
|
@@ -606,35 +2249,30 @@ function parseProviderString(provider = "production") {
|
|
606
2249
|
return provider;
|
607
2250
|
}
|
608
2251
|
const [main, workspaces] = provider.split(",");
|
609
|
-
if (!main || !workspaces)
|
610
|
-
return null;
|
2252
|
+
if (!main || !workspaces) return null;
|
611
2253
|
return { main, workspaces };
|
612
2254
|
}
|
613
2255
|
function buildProviderString(provider) {
|
614
|
-
if (isHostProviderAlias(provider))
|
615
|
-
return provider;
|
2256
|
+
if (isHostProviderAlias(provider)) return provider;
|
616
2257
|
return `${provider.main},${provider.workspaces}`;
|
617
2258
|
}
|
618
2259
|
function parseWorkspacesUrlParts(url) {
|
619
|
-
if (!isString(url))
|
620
|
-
return null;
|
2260
|
+
if (!isString(url)) return null;
|
621
2261
|
const matches = {
|
622
|
-
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh
|
623
|
-
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev
|
624
|
-
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev
|
625
|
-
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(
|
2262
|
+
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
|
2263
|
+
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2264
|
+
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2265
|
+
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
|
626
2266
|
};
|
627
2267
|
const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
|
628
|
-
if (!isHostProviderAlias(host) || !match)
|
629
|
-
|
630
|
-
return { workspace: match[1], region: match[2], host };
|
2268
|
+
if (!isHostProviderAlias(host) || !match) return null;
|
2269
|
+
return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
|
631
2270
|
}
|
632
2271
|
|
633
2272
|
const pool = new ApiRequestPool();
|
634
2273
|
const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
|
635
2274
|
const cleanQueryParams = Object.entries(queryParams).reduce((acc, [key, value]) => {
|
636
|
-
if (value === void 0 || value === null)
|
637
|
-
return acc;
|
2275
|
+
if (value === void 0 || value === null) return acc;
|
638
2276
|
return { ...acc, [key]: value };
|
639
2277
|
}, {});
|
640
2278
|
const query = new URLSearchParams(cleanQueryParams).toString();
|
@@ -682,8 +2320,7 @@ function hostHeader(url) {
|
|
682
2320
|
return groups?.host ? { Host: groups.host } : {};
|
683
2321
|
}
|
684
2322
|
async function parseBody(body, headers) {
|
685
|
-
if (!isDefined(body))
|
686
|
-
return void 0;
|
2323
|
+
if (!isDefined(body)) return void 0;
|
687
2324
|
if (isBlob(body) || typeof body.text === "function") {
|
688
2325
|
return body;
|
689
2326
|
}
|
@@ -738,6 +2375,8 @@ async function fetch$1({
|
|
738
2375
|
"X-Xata-Client-ID": clientID ?? defaultClientID,
|
739
2376
|
"X-Xata-Session-ID": sessionID ?? generateUUID(),
|
740
2377
|
"X-Xata-Agent": xataAgent,
|
2378
|
+
// Force field rename to xata_ internal properties
|
2379
|
+
"X-Features": compact(["feat-internal-field-rename-api=1", customHeaders?.["X-Features"]]).join(" "),
|
741
2380
|
...customHeaders,
|
742
2381
|
...hostHeader(fullUrl),
|
743
2382
|
Authorization: `Bearer ${apiKey}`
|
@@ -760,8 +2399,7 @@ async function fetch$1({
|
|
760
2399
|
[TraceAttributes.CLOUDFLARE_RAY_ID]: response.headers?.get("cf-ray") ?? void 0
|
761
2400
|
});
|
762
2401
|
const message = response.headers?.get("x-xata-message");
|
763
|
-
if (message)
|
764
|
-
console.warn(message);
|
2402
|
+
if (message) console.warn(message);
|
765
2403
|
if (response.status === 204) {
|
766
2404
|
return {};
|
767
2405
|
}
|
@@ -845,16 +2483,108 @@ function parseUrl(url) {
|
|
845
2483
|
|
846
2484
|
const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
|
847
2485
|
|
848
|
-
const
|
2486
|
+
const getTasks = (variables, signal) => dataPlaneFetch({
|
2487
|
+
url: "/tasks",
|
2488
|
+
method: "get",
|
2489
|
+
...variables,
|
2490
|
+
signal
|
2491
|
+
});
|
2492
|
+
const getTaskStatus = (variables, signal) => dataPlaneFetch({
|
2493
|
+
url: "/tasks/{taskId}",
|
2494
|
+
method: "get",
|
2495
|
+
...variables,
|
2496
|
+
signal
|
2497
|
+
});
|
2498
|
+
const listClusterBranches = (variables, signal) => dataPlaneFetch({
|
2499
|
+
url: "/cluster/{clusterId}/branches",
|
2500
|
+
method: "get",
|
2501
|
+
...variables,
|
2502
|
+
signal
|
2503
|
+
});
|
2504
|
+
const listClusterExtensions = (variables, signal) => dataPlaneFetch({
|
2505
|
+
url: "/cluster/{clusterId}/extensions",
|
2506
|
+
method: "get",
|
2507
|
+
...variables,
|
2508
|
+
signal
|
2509
|
+
});
|
2510
|
+
const installClusterExtension = (variables, signal) => dataPlaneFetch({
|
2511
|
+
url: "/cluster/{clusterId}/extensions",
|
2512
|
+
method: "post",
|
2513
|
+
...variables,
|
2514
|
+
signal
|
2515
|
+
});
|
2516
|
+
const dropClusterExtension = (variables, signal) => dataPlaneFetch({
|
2517
|
+
url: "/cluster/{clusterId}/extensions",
|
2518
|
+
method: "delete",
|
2519
|
+
...variables,
|
2520
|
+
signal
|
2521
|
+
});
|
2522
|
+
const getClusterMetrics = (variables, signal) => dataPlaneFetch({
|
2523
|
+
url: "/cluster/{clusterId}/metrics",
|
2524
|
+
method: "get",
|
2525
|
+
...variables,
|
2526
|
+
signal
|
2527
|
+
});
|
2528
|
+
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2529
|
+
url: "/db/{dbBranchName}/migrations/apply",
|
2530
|
+
method: "post",
|
2531
|
+
...variables,
|
2532
|
+
signal
|
2533
|
+
});
|
2534
|
+
const startMigration = (variables, signal) => dataPlaneFetch({
|
2535
|
+
url: "/db/{dbBranchName}/migrations/start",
|
2536
|
+
method: "post",
|
2537
|
+
...variables,
|
2538
|
+
signal
|
2539
|
+
});
|
2540
|
+
const completeMigration = (variables, signal) => dataPlaneFetch({
|
2541
|
+
url: "/db/{dbBranchName}/migrations/complete",
|
2542
|
+
method: "post",
|
2543
|
+
...variables,
|
2544
|
+
signal
|
2545
|
+
});
|
2546
|
+
const rollbackMigration = (variables, signal) => dataPlaneFetch({
|
2547
|
+
url: "/db/{dbBranchName}/migrations/rollback",
|
2548
|
+
method: "post",
|
2549
|
+
...variables,
|
2550
|
+
signal
|
2551
|
+
});
|
849
2552
|
const adaptTable = (variables, signal) => dataPlaneFetch({
|
850
2553
|
url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
|
851
2554
|
method: "post",
|
852
2555
|
...variables,
|
853
2556
|
signal
|
854
2557
|
});
|
855
|
-
const
|
856
|
-
|
857
|
-
|
2558
|
+
const adaptAllTables = (variables, signal) => dataPlaneFetch({
|
2559
|
+
url: "/db/{dbBranchName}/migrations/adapt",
|
2560
|
+
method: "post",
|
2561
|
+
...variables,
|
2562
|
+
signal
|
2563
|
+
});
|
2564
|
+
const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2565
|
+
url: "/db/{dbBranchName}/migrations/status",
|
2566
|
+
method: "get",
|
2567
|
+
...variables,
|
2568
|
+
signal
|
2569
|
+
});
|
2570
|
+
const getMigrationJobs = (variables, signal) => dataPlaneFetch({
|
2571
|
+
url: "/db/{dbBranchName}/migrations/jobs",
|
2572
|
+
method: "get",
|
2573
|
+
...variables,
|
2574
|
+
signal
|
2575
|
+
});
|
2576
|
+
const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2577
|
+
url: "/db/{dbBranchName}/migrations/jobs/{jobId}",
|
2578
|
+
method: "get",
|
2579
|
+
...variables,
|
2580
|
+
signal
|
2581
|
+
});
|
2582
|
+
const getMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2583
|
+
url: "/db/{dbBranchName}/migrations/history",
|
2584
|
+
method: "get",
|
2585
|
+
...variables,
|
2586
|
+
signal
|
2587
|
+
});
|
858
2588
|
const getBranchList = (variables, signal) => dataPlaneFetch({
|
859
2589
|
url: "/dbs/{dbName}",
|
860
2590
|
method: "get",
|
@@ -868,6 +2598,7 @@ const getDatabaseSettings = (variables, signal) => dataPlaneFetch({
|
|
868
2598
|
signal
|
869
2599
|
});
|
870
2600
|
const updateDatabaseSettings = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/settings", method: "patch", ...variables, signal });
|
2601
|
+
const createBranchAsync = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/async", method: "put", ...variables, signal });
|
871
2602
|
const getBranchDetails = (variables, signal) => dataPlaneFetch({
|
872
2603
|
url: "/db/{dbBranchName}",
|
873
2604
|
method: "get",
|
@@ -887,62 +2618,160 @@ const getSchema = (variables, signal) => dataPlaneFetch({
|
|
887
2618
|
...variables,
|
888
2619
|
signal
|
889
2620
|
});
|
890
|
-
const
|
891
|
-
url: "/db/{dbBranchName}/
|
2621
|
+
const getSchemas = (variables, signal) => dataPlaneFetch({
|
2622
|
+
url: "/db/{dbBranchName}/schemas",
|
2623
|
+
method: "get",
|
2624
|
+
...variables,
|
2625
|
+
signal
|
2626
|
+
});
|
2627
|
+
const copyBranch = (variables, signal) => dataPlaneFetch({
|
2628
|
+
url: "/db/{dbBranchName}/copy",
|
2629
|
+
method: "post",
|
2630
|
+
...variables,
|
2631
|
+
signal
|
2632
|
+
});
|
2633
|
+
const getBranchMoveStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/move", method: "get", ...variables, signal });
|
2634
|
+
const moveBranch = (variables, signal) => dataPlaneFetch({
|
2635
|
+
url: "/db/{dbBranchName}/move",
|
2636
|
+
method: "put",
|
2637
|
+
...variables,
|
2638
|
+
signal
|
2639
|
+
});
|
2640
|
+
const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2641
|
+
url: "/db/{dbBranchName}/metadata",
|
2642
|
+
method: "put",
|
2643
|
+
...variables,
|
2644
|
+
signal
|
2645
|
+
});
|
2646
|
+
const getBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2647
|
+
url: "/db/{dbBranchName}/metadata",
|
2648
|
+
method: "get",
|
2649
|
+
...variables,
|
2650
|
+
signal
|
2651
|
+
});
|
2652
|
+
const getBranchStats = (variables, signal) => dataPlaneFetch({
|
2653
|
+
url: "/db/{dbBranchName}/stats",
|
2654
|
+
method: "get",
|
2655
|
+
...variables,
|
2656
|
+
signal
|
2657
|
+
});
|
2658
|
+
const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
|
2659
|
+
const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
|
2660
|
+
const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({
|
2661
|
+
url: "/dbs/{dbName}/gitBranches",
|
2662
|
+
method: "delete",
|
2663
|
+
...variables,
|
2664
|
+
signal
|
2665
|
+
});
|
2666
|
+
const resolveBranch = (variables, signal) => dataPlaneFetch({
|
2667
|
+
url: "/dbs/{dbName}/resolveBranch",
|
2668
|
+
method: "get",
|
2669
|
+
...variables,
|
2670
|
+
signal
|
2671
|
+
});
|
2672
|
+
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2673
|
+
url: "/db/{dbBranchName}/migrations",
|
2674
|
+
method: "get",
|
2675
|
+
...variables,
|
2676
|
+
signal
|
2677
|
+
});
|
2678
|
+
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2679
|
+
url: "/db/{dbBranchName}/migrations/plan",
|
2680
|
+
method: "post",
|
2681
|
+
...variables,
|
2682
|
+
signal
|
2683
|
+
});
|
2684
|
+
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2685
|
+
url: "/db/{dbBranchName}/migrations/execute",
|
2686
|
+
method: "post",
|
2687
|
+
...variables,
|
2688
|
+
signal
|
2689
|
+
});
|
2690
|
+
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({
|
2691
|
+
url: "/dbs/{dbName}/migrations/query",
|
2692
|
+
method: "post",
|
2693
|
+
...variables,
|
2694
|
+
signal
|
2695
|
+
});
|
2696
|
+
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
2697
|
+
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2698
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2699
|
+
method: "get",
|
2700
|
+
...variables,
|
2701
|
+
signal
|
2702
|
+
});
|
2703
|
+
const updateMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2704
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2705
|
+
method: "patch",
|
2706
|
+
...variables,
|
2707
|
+
signal
|
2708
|
+
});
|
2709
|
+
const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({
|
2710
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/commits",
|
2711
|
+
method: "post",
|
2712
|
+
...variables,
|
2713
|
+
signal
|
2714
|
+
});
|
2715
|
+
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2716
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/compare",
|
2717
|
+
method: "post",
|
2718
|
+
...variables,
|
2719
|
+
signal
|
2720
|
+
});
|
2721
|
+
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({
|
2722
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2723
|
+
method: "get",
|
2724
|
+
...variables,
|
2725
|
+
signal
|
2726
|
+
});
|
2727
|
+
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2728
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2729
|
+
method: "post",
|
2730
|
+
...variables,
|
2731
|
+
signal
|
2732
|
+
});
|
2733
|
+
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({
|
2734
|
+
url: "/db/{dbBranchName}/schema/history",
|
2735
|
+
method: "post",
|
2736
|
+
...variables,
|
2737
|
+
signal
|
2738
|
+
});
|
2739
|
+
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({
|
2740
|
+
url: "/db/{dbBranchName}/schema/compare",
|
892
2741
|
method: "post",
|
893
2742
|
...variables,
|
894
2743
|
signal
|
895
2744
|
});
|
896
|
-
const
|
897
|
-
url: "/db/{dbBranchName}/
|
898
|
-
method: "
|
2745
|
+
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({
|
2746
|
+
url: "/db/{dbBranchName}/schema/compare/{branchName}",
|
2747
|
+
method: "post",
|
899
2748
|
...variables,
|
900
2749
|
signal
|
901
2750
|
});
|
902
|
-
const
|
903
|
-
url: "/db/{dbBranchName}/
|
904
|
-
method: "
|
2751
|
+
const updateBranchSchema = (variables, signal) => dataPlaneFetch({
|
2752
|
+
url: "/db/{dbBranchName}/schema/update",
|
2753
|
+
method: "post",
|
905
2754
|
...variables,
|
906
2755
|
signal
|
907
2756
|
});
|
908
|
-
const
|
909
|
-
url: "/db/{dbBranchName}/
|
910
|
-
method: "
|
2757
|
+
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2758
|
+
url: "/db/{dbBranchName}/schema/preview",
|
2759
|
+
method: "post",
|
911
2760
|
...variables,
|
912
2761
|
signal
|
913
2762
|
});
|
914
|
-
const
|
915
|
-
|
916
|
-
|
917
|
-
const resolveBranch = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/resolveBranch", method: "get", ...variables, signal });
|
918
|
-
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations", method: "get", ...variables, signal });
|
919
|
-
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/plan", method: "post", ...variables, signal });
|
920
|
-
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/execute", method: "post", ...variables, signal });
|
921
|
-
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/query", method: "post", ...variables, signal });
|
922
|
-
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
923
|
-
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
924
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
925
|
-
method: "get",
|
2763
|
+
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2764
|
+
url: "/db/{dbBranchName}/schema/apply",
|
2765
|
+
method: "post",
|
926
2766
|
...variables,
|
927
2767
|
signal
|
928
2768
|
});
|
929
|
-
const
|
930
|
-
|
931
|
-
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/compare", method: "post", ...variables, signal });
|
932
|
-
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/merge", method: "get", ...variables, signal });
|
933
|
-
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
934
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2769
|
+
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({
|
2770
|
+
url: "/db/{dbBranchName}/schema/push",
|
935
2771
|
method: "post",
|
936
2772
|
...variables,
|
937
2773
|
signal
|
938
2774
|
});
|
939
|
-
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/history", method: "post", ...variables, signal });
|
940
|
-
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare", method: "post", ...variables, signal });
|
941
|
-
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare/{branchName}", method: "post", ...variables, signal });
|
942
|
-
const updateBranchSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/update", method: "post", ...variables, signal });
|
943
|
-
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/preview", method: "post", ...variables, signal });
|
944
|
-
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/apply", method: "post", ...variables, signal });
|
945
|
-
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/push", method: "post", ...variables, signal });
|
946
2775
|
const createTable = (variables, signal) => dataPlaneFetch({
|
947
2776
|
url: "/db/{dbBranchName}/tables/{tableName}",
|
948
2777
|
method: "put",
|
@@ -955,14 +2784,24 @@ const deleteTable = (variables, signal) => dataPlaneFetch({
|
|
955
2784
|
...variables,
|
956
2785
|
signal
|
957
2786
|
});
|
958
|
-
const updateTable = (variables, signal) => dataPlaneFetch({
|
2787
|
+
const updateTable = (variables, signal) => dataPlaneFetch({
|
2788
|
+
url: "/db/{dbBranchName}/tables/{tableName}",
|
2789
|
+
method: "patch",
|
2790
|
+
...variables,
|
2791
|
+
signal
|
2792
|
+
});
|
959
2793
|
const getTableSchema = (variables, signal) => dataPlaneFetch({
|
960
2794
|
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
961
2795
|
method: "get",
|
962
2796
|
...variables,
|
963
2797
|
signal
|
964
2798
|
});
|
965
|
-
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2799
|
+
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2800
|
+
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
2801
|
+
method: "put",
|
2802
|
+
...variables,
|
2803
|
+
signal
|
2804
|
+
});
|
966
2805
|
const getTableColumns = (variables, signal) => dataPlaneFetch({
|
967
2806
|
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
968
2807
|
method: "get",
|
@@ -970,7 +2809,12 @@ const getTableColumns = (variables, signal) => dataPlaneFetch({
|
|
970
2809
|
signal
|
971
2810
|
});
|
972
2811
|
const addTableColumn = (variables, signal) => dataPlaneFetch(
|
973
|
-
{
|
2812
|
+
{
|
2813
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
2814
|
+
method: "post",
|
2815
|
+
...variables,
|
2816
|
+
signal
|
2817
|
+
}
|
974
2818
|
);
|
975
2819
|
const getColumn = (variables, signal) => dataPlaneFetch({
|
976
2820
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
@@ -978,15 +2822,30 @@ const getColumn = (variables, signal) => dataPlaneFetch({
|
|
978
2822
|
...variables,
|
979
2823
|
signal
|
980
2824
|
});
|
981
|
-
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2825
|
+
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2826
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
2827
|
+
method: "patch",
|
2828
|
+
...variables,
|
2829
|
+
signal
|
2830
|
+
});
|
982
2831
|
const deleteColumn = (variables, signal) => dataPlaneFetch({
|
983
2832
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
984
2833
|
method: "delete",
|
985
2834
|
...variables,
|
986
2835
|
signal
|
987
2836
|
});
|
988
|
-
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
989
|
-
|
2837
|
+
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
2838
|
+
url: "/db/{dbBranchName}/transaction",
|
2839
|
+
method: "post",
|
2840
|
+
...variables,
|
2841
|
+
signal
|
2842
|
+
});
|
2843
|
+
const insertRecord = (variables, signal) => dataPlaneFetch({
|
2844
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data",
|
2845
|
+
method: "post",
|
2846
|
+
...variables,
|
2847
|
+
signal
|
2848
|
+
});
|
990
2849
|
const getFileItem = (variables, signal) => dataPlaneFetch({
|
991
2850
|
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
|
992
2851
|
method: "get",
|
@@ -1029,11 +2888,36 @@ const getRecord = (variables, signal) => dataPlaneFetch({
|
|
1029
2888
|
...variables,
|
1030
2889
|
signal
|
1031
2890
|
});
|
1032
|
-
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
1033
|
-
|
1034
|
-
|
1035
|
-
|
1036
|
-
|
2891
|
+
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2892
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2893
|
+
method: "put",
|
2894
|
+
...variables,
|
2895
|
+
signal
|
2896
|
+
});
|
2897
|
+
const updateRecordWithID = (variables, signal) => dataPlaneFetch({
|
2898
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2899
|
+
method: "patch",
|
2900
|
+
...variables,
|
2901
|
+
signal
|
2902
|
+
});
|
2903
|
+
const upsertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2904
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2905
|
+
method: "post",
|
2906
|
+
...variables,
|
2907
|
+
signal
|
2908
|
+
});
|
2909
|
+
const deleteRecord = (variables, signal) => dataPlaneFetch({
|
2910
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2911
|
+
method: "delete",
|
2912
|
+
...variables,
|
2913
|
+
signal
|
2914
|
+
});
|
2915
|
+
const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({
|
2916
|
+
url: "/db/{dbBranchName}/tables/{tableName}/bulk",
|
2917
|
+
method: "post",
|
2918
|
+
...variables,
|
2919
|
+
signal
|
2920
|
+
});
|
1037
2921
|
const queryTable = (variables, signal) => dataPlaneFetch({
|
1038
2922
|
url: "/db/{dbBranchName}/tables/{tableName}/query",
|
1039
2923
|
method: "post",
|
@@ -1052,16 +2936,36 @@ const searchTable = (variables, signal) => dataPlaneFetch({
|
|
1052
2936
|
...variables,
|
1053
2937
|
signal
|
1054
2938
|
});
|
1055
|
-
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
2939
|
+
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
2940
|
+
url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch",
|
2941
|
+
method: "post",
|
2942
|
+
...variables,
|
2943
|
+
signal
|
2944
|
+
});
|
1056
2945
|
const askTable = (variables, signal) => dataPlaneFetch({
|
1057
2946
|
url: "/db/{dbBranchName}/tables/{tableName}/ask",
|
1058
2947
|
method: "post",
|
1059
2948
|
...variables,
|
1060
2949
|
signal
|
1061
2950
|
});
|
1062
|
-
const askTableSession = (variables, signal) => dataPlaneFetch({
|
1063
|
-
|
1064
|
-
|
2951
|
+
const askTableSession = (variables, signal) => dataPlaneFetch({
|
2952
|
+
url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}",
|
2953
|
+
method: "post",
|
2954
|
+
...variables,
|
2955
|
+
signal
|
2956
|
+
});
|
2957
|
+
const summarizeTable = (variables, signal) => dataPlaneFetch({
|
2958
|
+
url: "/db/{dbBranchName}/tables/{tableName}/summarize",
|
2959
|
+
method: "post",
|
2960
|
+
...variables,
|
2961
|
+
signal
|
2962
|
+
});
|
2963
|
+
const aggregateTable = (variables, signal) => dataPlaneFetch({
|
2964
|
+
url: "/db/{dbBranchName}/tables/{tableName}/aggregate",
|
2965
|
+
method: "post",
|
2966
|
+
...variables,
|
2967
|
+
signal
|
2968
|
+
});
|
1065
2969
|
const fileAccess = (variables, signal) => dataPlaneFetch({
|
1066
2970
|
url: "/file/{fileId}",
|
1067
2971
|
method: "get",
|
@@ -1080,14 +2984,34 @@ const sqlQuery = (variables, signal) => dataPlaneFetch({
|
|
1080
2984
|
...variables,
|
1081
2985
|
signal
|
1082
2986
|
});
|
2987
|
+
const sqlBatchQuery = (variables, signal) => dataPlaneFetch({
|
2988
|
+
url: "/db/{dbBranchName}/sql/batch",
|
2989
|
+
method: "post",
|
2990
|
+
...variables,
|
2991
|
+
signal
|
2992
|
+
});
|
1083
2993
|
const operationsByTag$2 = {
|
2994
|
+
tasks: { getTasks, getTaskStatus },
|
2995
|
+
cluster: {
|
2996
|
+
listClusterBranches,
|
2997
|
+
listClusterExtensions,
|
2998
|
+
installClusterExtension,
|
2999
|
+
dropClusterExtension,
|
3000
|
+
getClusterMetrics
|
3001
|
+
},
|
1084
3002
|
migrations: {
|
1085
3003
|
applyMigration,
|
3004
|
+
startMigration,
|
3005
|
+
completeMigration,
|
3006
|
+
rollbackMigration,
|
1086
3007
|
adaptTable,
|
3008
|
+
adaptAllTables,
|
1087
3009
|
getBranchMigrationJobStatus,
|
3010
|
+
getMigrationJobs,
|
1088
3011
|
getMigrationJobStatus,
|
1089
3012
|
getMigrationHistory,
|
1090
3013
|
getSchema,
|
3014
|
+
getSchemas,
|
1091
3015
|
getBranchMigrationHistory,
|
1092
3016
|
getBranchMigrationPlan,
|
1093
3017
|
executeBranchMigrationPlan,
|
@@ -1101,10 +3025,13 @@ const operationsByTag$2 = {
|
|
1101
3025
|
},
|
1102
3026
|
branch: {
|
1103
3027
|
getBranchList,
|
3028
|
+
createBranchAsync,
|
1104
3029
|
getBranchDetails,
|
1105
3030
|
createBranch,
|
1106
3031
|
deleteBranch,
|
1107
3032
|
copyBranch,
|
3033
|
+
getBranchMoveStatus,
|
3034
|
+
moveBranch,
|
1108
3035
|
updateBranchMetadata,
|
1109
3036
|
getBranchMetadata,
|
1110
3037
|
getBranchStats,
|
@@ -1146,7 +3073,16 @@ const operationsByTag$2 = {
|
|
1146
3073
|
deleteRecord,
|
1147
3074
|
bulkInsertTableRecords
|
1148
3075
|
},
|
1149
|
-
files: {
|
3076
|
+
files: {
|
3077
|
+
getFileItem,
|
3078
|
+
putFileItem,
|
3079
|
+
deleteFileItem,
|
3080
|
+
getFile,
|
3081
|
+
putFile,
|
3082
|
+
deleteFile,
|
3083
|
+
fileAccess,
|
3084
|
+
fileUpload
|
3085
|
+
},
|
1150
3086
|
searchAndFilter: {
|
1151
3087
|
queryTable,
|
1152
3088
|
searchBranch,
|
@@ -1157,7 +3093,7 @@ const operationsByTag$2 = {
|
|
1157
3093
|
summarizeTable,
|
1158
3094
|
aggregateTable
|
1159
3095
|
},
|
1160
|
-
sql: { sqlQuery }
|
3096
|
+
sql: { sqlQuery, sqlBatchQuery }
|
1161
3097
|
};
|
1162
3098
|
|
1163
3099
|
const controlPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "controlPlane" });
|
@@ -1224,7 +3160,12 @@ const deleteOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
|
1224
3160
|
...variables,
|
1225
3161
|
signal
|
1226
3162
|
});
|
1227
|
-
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3163
|
+
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3164
|
+
url: "/user/oauth/tokens/{token}",
|
3165
|
+
method: "patch",
|
3166
|
+
...variables,
|
3167
|
+
signal
|
3168
|
+
});
|
1228
3169
|
const getWorkspacesList = (variables, signal) => controlPlaneFetch({
|
1229
3170
|
url: "/workspaces",
|
1230
3171
|
method: "get",
|
@@ -1255,47 +3196,150 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
|
|
1255
3196
|
...variables,
|
1256
3197
|
signal
|
1257
3198
|
});
|
1258
|
-
const
|
1259
|
-
|
3199
|
+
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3200
|
+
url: "/workspaces/{workspaceId}/settings",
|
3201
|
+
method: "get",
|
3202
|
+
...variables,
|
3203
|
+
signal
|
3204
|
+
});
|
3205
|
+
const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3206
|
+
url: "/workspaces/{workspaceId}/settings",
|
3207
|
+
method: "patch",
|
3208
|
+
...variables,
|
3209
|
+
signal
|
3210
|
+
});
|
3211
|
+
const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({
|
3212
|
+
url: "/workspaces/{workspaceId}/members",
|
3213
|
+
method: "get",
|
3214
|
+
...variables,
|
3215
|
+
signal
|
3216
|
+
});
|
3217
|
+
const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({
|
3218
|
+
url: "/workspaces/{workspaceId}/members/{userId}",
|
3219
|
+
method: "put",
|
3220
|
+
...variables,
|
3221
|
+
signal
|
3222
|
+
});
|
1260
3223
|
const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1261
3224
|
url: "/workspaces/{workspaceId}/members/{userId}",
|
1262
3225
|
method: "delete",
|
1263
3226
|
...variables,
|
1264
3227
|
signal
|
1265
3228
|
});
|
1266
|
-
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1267
|
-
|
1268
|
-
|
1269
|
-
|
1270
|
-
|
1271
|
-
|
1272
|
-
const
|
3229
|
+
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
3230
|
+
url: "/workspaces/{workspaceId}/invites",
|
3231
|
+
method: "post",
|
3232
|
+
...variables,
|
3233
|
+
signal
|
3234
|
+
});
|
3235
|
+
const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3236
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3237
|
+
method: "patch",
|
3238
|
+
...variables,
|
3239
|
+
signal
|
3240
|
+
});
|
3241
|
+
const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3242
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3243
|
+
method: "delete",
|
3244
|
+
...variables,
|
3245
|
+
signal
|
3246
|
+
});
|
3247
|
+
const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3248
|
+
url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept",
|
3249
|
+
method: "post",
|
3250
|
+
...variables,
|
3251
|
+
signal
|
3252
|
+
});
|
3253
|
+
const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3254
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}/resend",
|
3255
|
+
method: "post",
|
3256
|
+
...variables,
|
3257
|
+
signal
|
3258
|
+
});
|
3259
|
+
const listClusters = (variables, signal) => controlPlaneFetch({
|
3260
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3261
|
+
method: "get",
|
3262
|
+
...variables,
|
3263
|
+
signal
|
3264
|
+
});
|
3265
|
+
const createCluster = (variables, signal) => controlPlaneFetch({
|
3266
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3267
|
+
method: "post",
|
3268
|
+
...variables,
|
3269
|
+
signal
|
3270
|
+
});
|
1273
3271
|
const getCluster = (variables, signal) => controlPlaneFetch({
|
1274
3272
|
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
1275
3273
|
method: "get",
|
1276
3274
|
...variables,
|
1277
3275
|
signal
|
1278
3276
|
});
|
1279
|
-
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3277
|
+
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3278
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3279
|
+
method: "patch",
|
3280
|
+
...variables,
|
3281
|
+
signal
|
3282
|
+
});
|
3283
|
+
const deleteCluster = (variables, signal) => controlPlaneFetch({
|
3284
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3285
|
+
method: "delete",
|
3286
|
+
...variables,
|
3287
|
+
signal
|
3288
|
+
});
|
1280
3289
|
const getDatabaseList = (variables, signal) => controlPlaneFetch({
|
1281
3290
|
url: "/workspaces/{workspaceId}/dbs",
|
1282
3291
|
method: "get",
|
1283
3292
|
...variables,
|
1284
3293
|
signal
|
1285
3294
|
});
|
1286
|
-
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3295
|
+
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3296
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3297
|
+
method: "put",
|
3298
|
+
...variables,
|
3299
|
+
signal
|
3300
|
+
});
|
1287
3301
|
const deleteDatabase = (variables, signal) => controlPlaneFetch({
|
1288
3302
|
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
1289
3303
|
method: "delete",
|
1290
3304
|
...variables,
|
1291
3305
|
signal
|
1292
3306
|
});
|
1293
|
-
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
1294
|
-
|
1295
|
-
|
1296
|
-
|
1297
|
-
|
1298
|
-
|
3307
|
+
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3308
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3309
|
+
method: "get",
|
3310
|
+
...variables,
|
3311
|
+
signal
|
3312
|
+
});
|
3313
|
+
const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3314
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3315
|
+
method: "patch",
|
3316
|
+
...variables,
|
3317
|
+
signal
|
3318
|
+
});
|
3319
|
+
const renameDatabase = (variables, signal) => controlPlaneFetch({
|
3320
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/rename",
|
3321
|
+
method: "post",
|
3322
|
+
...variables,
|
3323
|
+
signal
|
3324
|
+
});
|
3325
|
+
const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3326
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3327
|
+
method: "get",
|
3328
|
+
...variables,
|
3329
|
+
signal
|
3330
|
+
});
|
3331
|
+
const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3332
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3333
|
+
method: "put",
|
3334
|
+
...variables,
|
3335
|
+
signal
|
3336
|
+
});
|
3337
|
+
const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3338
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3339
|
+
method: "delete",
|
3340
|
+
...variables,
|
3341
|
+
signal
|
3342
|
+
});
|
1299
3343
|
const listRegions = (variables, signal) => controlPlaneFetch({
|
1300
3344
|
url: "/workspaces/{workspaceId}/regions",
|
1301
3345
|
method: "get",
|
@@ -1320,6 +3364,8 @@ const operationsByTag$1 = {
|
|
1320
3364
|
getWorkspace,
|
1321
3365
|
updateWorkspace,
|
1322
3366
|
deleteWorkspace,
|
3367
|
+
getWorkspaceSettings,
|
3368
|
+
updateWorkspaceSettings,
|
1323
3369
|
getWorkspaceMembersList,
|
1324
3370
|
updateWorkspaceMemberRole,
|
1325
3371
|
removeWorkspaceMember
|
@@ -1331,7 +3377,13 @@ const operationsByTag$1 = {
|
|
1331
3377
|
acceptWorkspaceMemberInvite,
|
1332
3378
|
resendWorkspaceMemberInvite
|
1333
3379
|
},
|
1334
|
-
xbcontrolOther: {
|
3380
|
+
xbcontrolOther: {
|
3381
|
+
listClusters,
|
3382
|
+
createCluster,
|
3383
|
+
getCluster,
|
3384
|
+
updateCluster,
|
3385
|
+
deleteCluster
|
3386
|
+
},
|
1335
3387
|
databases: {
|
1336
3388
|
getDatabaseList,
|
1337
3389
|
createDatabase,
|
@@ -1351,7 +3403,7 @@ const operationsByTag = deepMerge(operationsByTag$2, operationsByTag$1);
|
|
1351
3403
|
const buildApiClient = () => class {
|
1352
3404
|
constructor(options = {}) {
|
1353
3405
|
const provider = options.host ?? "production";
|
1354
|
-
const apiKey = options.apiKey
|
3406
|
+
const apiKey = options.apiKey;
|
1355
3407
|
const trace = options.trace ?? defaultTrace;
|
1356
3408
|
const clientID = generateUUID();
|
1357
3409
|
if (!apiKey) {
|
@@ -1418,8 +3470,7 @@ function buildTransformString(transformations) {
|
|
1418
3470
|
).join(",");
|
1419
3471
|
}
|
1420
3472
|
function transformImage(url, ...transformations) {
|
1421
|
-
if (!isDefined(url))
|
1422
|
-
return void 0;
|
3473
|
+
if (!isDefined(url)) return void 0;
|
1423
3474
|
const newTransformations = buildTransformString(transformations);
|
1424
3475
|
const { hostname, pathname, search } = new URL(url);
|
1425
3476
|
const pathParts = pathname.split("/");
|
@@ -1532,8 +3583,7 @@ class XataFile {
|
|
1532
3583
|
}
|
1533
3584
|
}
|
1534
3585
|
const parseInputFileEntry = async (entry) => {
|
1535
|
-
if (!isDefined(entry))
|
1536
|
-
return null;
|
3586
|
+
if (!isDefined(entry)) return null;
|
1537
3587
|
const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout, uploadUrlTimeout } = await entry;
|
1538
3588
|
return compactObject({
|
1539
3589
|
id,
|
@@ -1548,24 +3598,19 @@ const parseInputFileEntry = async (entry) => {
|
|
1548
3598
|
};
|
1549
3599
|
|
1550
3600
|
function cleanFilter(filter) {
|
1551
|
-
if (!isDefined(filter))
|
1552
|
-
|
1553
|
-
if (!isObject(filter))
|
1554
|
-
return filter;
|
3601
|
+
if (!isDefined(filter)) return void 0;
|
3602
|
+
if (!isObject(filter)) return filter;
|
1555
3603
|
const values = Object.fromEntries(
|
1556
3604
|
Object.entries(filter).reduce((acc, [key, value]) => {
|
1557
|
-
if (!isDefined(value))
|
1558
|
-
return acc;
|
3605
|
+
if (!isDefined(value)) return acc;
|
1559
3606
|
if (Array.isArray(value)) {
|
1560
3607
|
const clean = value.map((item) => cleanFilter(item)).filter((item) => isDefined(item));
|
1561
|
-
if (clean.length === 0)
|
1562
|
-
return acc;
|
3608
|
+
if (clean.length === 0) return acc;
|
1563
3609
|
return [...acc, [key, clean]];
|
1564
3610
|
}
|
1565
3611
|
if (isObject(value)) {
|
1566
3612
|
const clean = cleanFilter(value);
|
1567
|
-
if (!isDefined(clean))
|
1568
|
-
return acc;
|
3613
|
+
if (!isDefined(clean)) return acc;
|
1569
3614
|
return [...acc, [key, clean]];
|
1570
3615
|
}
|
1571
3616
|
return [...acc, [key, value]];
|
@@ -1575,10 +3620,8 @@ function cleanFilter(filter) {
|
|
1575
3620
|
}
|
1576
3621
|
|
1577
3622
|
function stringifyJson(value) {
|
1578
|
-
if (!isDefined(value))
|
1579
|
-
|
1580
|
-
if (isString(value))
|
1581
|
-
return value;
|
3623
|
+
if (!isDefined(value)) return value;
|
3624
|
+
if (isString(value)) return value;
|
1582
3625
|
try {
|
1583
3626
|
return JSON.stringify(value);
|
1584
3627
|
} catch (e) {
|
@@ -1593,28 +3636,17 @@ function parseJson(value) {
|
|
1593
3636
|
}
|
1594
3637
|
}
|
1595
3638
|
|
1596
|
-
var
|
1597
|
-
|
1598
|
-
throw TypeError("Cannot " + msg);
|
1599
|
-
};
|
1600
|
-
var __privateGet$4 = (obj, member, getter) => {
|
1601
|
-
__accessCheck$5(obj, member, "read from private field");
|
1602
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1603
|
-
};
|
1604
|
-
var __privateAdd$5 = (obj, member, value) => {
|
1605
|
-
if (member.has(obj))
|
1606
|
-
throw TypeError("Cannot add the same private member more than once");
|
1607
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1608
|
-
};
|
1609
|
-
var __privateSet$3 = (obj, member, value, setter) => {
|
1610
|
-
__accessCheck$5(obj, member, "write to private field");
|
1611
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1612
|
-
return value;
|
3639
|
+
var __typeError$5 = (msg) => {
|
3640
|
+
throw TypeError(msg);
|
1613
3641
|
};
|
3642
|
+
var __accessCheck$5 = (obj, member, msg) => member.has(obj) || __typeError$5("Cannot " + msg);
|
3643
|
+
var __privateGet$4 = (obj, member, getter) => (__accessCheck$5(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3644
|
+
var __privateAdd$5 = (obj, member, value) => member.has(obj) ? __typeError$5("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3645
|
+
var __privateSet$3 = (obj, member, value, setter) => (__accessCheck$5(obj, member, "write to private field"), member.set(obj, value), value);
|
1614
3646
|
var _query, _page;
|
1615
3647
|
class Page {
|
1616
3648
|
constructor(query, meta, records = []) {
|
1617
|
-
__privateAdd$5(this, _query
|
3649
|
+
__privateAdd$5(this, _query);
|
1618
3650
|
__privateSet$3(this, _query, query);
|
1619
3651
|
this.meta = meta;
|
1620
3652
|
this.records = new PageRecordArray(this, records);
|
@@ -1701,7 +3733,7 @@ class RecordArray extends Array {
|
|
1701
3733
|
const _PageRecordArray = class _PageRecordArray extends Array {
|
1702
3734
|
constructor(...args) {
|
1703
3735
|
super(..._PageRecordArray.parseConstructorParams(...args));
|
1704
|
-
__privateAdd$5(this, _page
|
3736
|
+
__privateAdd$5(this, _page);
|
1705
3737
|
__privateSet$3(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
|
1706
3738
|
}
|
1707
3739
|
static parseConstructorParams(...args) {
|
@@ -1772,34 +3804,20 @@ const _PageRecordArray = class _PageRecordArray extends Array {
|
|
1772
3804
|
_page = new WeakMap();
|
1773
3805
|
let PageRecordArray = _PageRecordArray;
|
1774
3806
|
|
1775
|
-
var
|
1776
|
-
|
1777
|
-
throw TypeError("Cannot " + msg);
|
1778
|
-
};
|
1779
|
-
var __privateGet$3 = (obj, member, getter) => {
|
1780
|
-
__accessCheck$4(obj, member, "read from private field");
|
1781
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1782
|
-
};
|
1783
|
-
var __privateAdd$4 = (obj, member, value) => {
|
1784
|
-
if (member.has(obj))
|
1785
|
-
throw TypeError("Cannot add the same private member more than once");
|
1786
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1787
|
-
};
|
1788
|
-
var __privateSet$2 = (obj, member, value, setter) => {
|
1789
|
-
__accessCheck$4(obj, member, "write to private field");
|
1790
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1791
|
-
return value;
|
3807
|
+
var __typeError$4 = (msg) => {
|
3808
|
+
throw TypeError(msg);
|
1792
3809
|
};
|
1793
|
-
var
|
1794
|
-
|
1795
|
-
|
1796
|
-
|
1797
|
-
var
|
3810
|
+
var __accessCheck$4 = (obj, member, msg) => member.has(obj) || __typeError$4("Cannot " + msg);
|
3811
|
+
var __privateGet$3 = (obj, member, getter) => (__accessCheck$4(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3812
|
+
var __privateAdd$4 = (obj, member, value) => member.has(obj) ? __typeError$4("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3813
|
+
var __privateSet$2 = (obj, member, value, setter) => (__accessCheck$4(obj, member, "write to private field"), member.set(obj, value), value);
|
3814
|
+
var __privateMethod$3 = (obj, member, method) => (__accessCheck$4(obj, member, "access private method"), method);
|
3815
|
+
var _table$1, _repository, _data, _Query_instances, cleanFilterConstraint_fn;
|
1798
3816
|
const _Query = class _Query {
|
1799
3817
|
constructor(repository, table, data, rawParent) {
|
1800
|
-
__privateAdd$4(this,
|
1801
|
-
__privateAdd$4(this, _table$1
|
1802
|
-
__privateAdd$4(this, _repository
|
3818
|
+
__privateAdd$4(this, _Query_instances);
|
3819
|
+
__privateAdd$4(this, _table$1);
|
3820
|
+
__privateAdd$4(this, _repository);
|
1803
3821
|
__privateAdd$4(this, _data, { filter: {} });
|
1804
3822
|
// Implements pagination
|
1805
3823
|
this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
|
@@ -1877,12 +3895,12 @@ const _Query = class _Query {
|
|
1877
3895
|
filter(a, b) {
|
1878
3896
|
if (arguments.length === 1) {
|
1879
3897
|
const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
|
1880
|
-
[column]: __privateMethod$3(this,
|
3898
|
+
[column]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, column, constraint)
|
1881
3899
|
}));
|
1882
3900
|
const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
|
1883
3901
|
return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
|
1884
3902
|
} else {
|
1885
|
-
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this,
|
3903
|
+
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
|
1886
3904
|
const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
|
1887
3905
|
return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
|
1888
3906
|
}
|
@@ -1961,8 +3979,7 @@ const _Query = class _Query {
|
|
1961
3979
|
}
|
1962
3980
|
async getFirstOrThrow(options = {}) {
|
1963
3981
|
const records = await this.getMany({ ...options, pagination: { size: 1 } });
|
1964
|
-
if (records[0] === void 0)
|
1965
|
-
throw new Error("No results found.");
|
3982
|
+
if (records[0] === void 0) throw new Error("No results found.");
|
1966
3983
|
return records[0];
|
1967
3984
|
}
|
1968
3985
|
async summarize(params = {}) {
|
@@ -2017,14 +4034,14 @@ const _Query = class _Query {
|
|
2017
4034
|
_table$1 = new WeakMap();
|
2018
4035
|
_repository = new WeakMap();
|
2019
4036
|
_data = new WeakMap();
|
2020
|
-
|
4037
|
+
_Query_instances = new WeakSet();
|
2021
4038
|
cleanFilterConstraint_fn = function(column, value) {
|
2022
4039
|
const columnType = __privateGet$3(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
|
2023
4040
|
if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
|
2024
4041
|
return { $includes: value };
|
2025
4042
|
}
|
2026
|
-
if (columnType === "link" && isObject(value) && isString(value.
|
2027
|
-
return value.
|
4043
|
+
if (columnType === "link" && isObject(value) && isString(value.xata_id)) {
|
4044
|
+
return value.xata_id;
|
2028
4045
|
}
|
2029
4046
|
return value;
|
2030
4047
|
};
|
@@ -2052,12 +4069,7 @@ const RecordColumnTypes = [
|
|
2052
4069
|
"json"
|
2053
4070
|
];
|
2054
4071
|
function isIdentifiable(x) {
|
2055
|
-
return isObject(x) && isString(x?.
|
2056
|
-
}
|
2057
|
-
function isXataRecord(x) {
|
2058
|
-
const record = x;
|
2059
|
-
const metadata = record?.getMetadata();
|
2060
|
-
return isIdentifiable(x) && isObject(metadata) && typeof metadata.version === "number";
|
4072
|
+
return isObject(x) && isString(x?.xata_id);
|
2061
4073
|
}
|
2062
4074
|
|
2063
4075
|
function isValidExpandedColumn(column) {
|
@@ -2083,8 +4095,7 @@ function isSortFilterString(value) {
|
|
2083
4095
|
}
|
2084
4096
|
function isSortFilterBase(filter) {
|
2085
4097
|
return isObject(filter) && Object.entries(filter).every(([key, value]) => {
|
2086
|
-
if (key === "*")
|
2087
|
-
return value === "random";
|
4098
|
+
if (key === "*") return value === "random";
|
2088
4099
|
return value === "asc" || value === "desc";
|
2089
4100
|
});
|
2090
4101
|
}
|
@@ -2105,29 +4116,15 @@ function buildSortFilter(filter) {
|
|
2105
4116
|
}
|
2106
4117
|
}
|
2107
4118
|
|
2108
|
-
var
|
2109
|
-
|
2110
|
-
throw TypeError("Cannot " + msg);
|
2111
|
-
};
|
2112
|
-
var __privateGet$2 = (obj, member, getter) => {
|
2113
|
-
__accessCheck$3(obj, member, "read from private field");
|
2114
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2115
|
-
};
|
2116
|
-
var __privateAdd$3 = (obj, member, value) => {
|
2117
|
-
if (member.has(obj))
|
2118
|
-
throw TypeError("Cannot add the same private member more than once");
|
2119
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4119
|
+
var __typeError$3 = (msg) => {
|
4120
|
+
throw TypeError(msg);
|
2120
4121
|
};
|
2121
|
-
var
|
2122
|
-
|
2123
|
-
|
2124
|
-
|
2125
|
-
|
2126
|
-
var
|
2127
|
-
__accessCheck$3(obj, member, "access private method");
|
2128
|
-
return method;
|
2129
|
-
};
|
2130
|
-
var _table, _getFetchProps, _db, _schemaTables, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn, _insertRecords, insertRecords_fn, _updateRecordWithID, updateRecordWithID_fn, _updateRecords, updateRecords_fn, _upsertRecordWithID, upsertRecordWithID_fn, _deleteRecord, deleteRecord_fn, _deleteRecords, deleteRecords_fn, _getSchemaTables, getSchemaTables_fn, _transformObjectToApi, transformObjectToApi_fn;
|
4122
|
+
var __accessCheck$3 = (obj, member, msg) => member.has(obj) || __typeError$3("Cannot " + msg);
|
4123
|
+
var __privateGet$2 = (obj, member, getter) => (__accessCheck$3(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4124
|
+
var __privateAdd$3 = (obj, member, value) => member.has(obj) ? __typeError$3("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4125
|
+
var __privateSet$1 = (obj, member, value, setter) => (__accessCheck$3(obj, member, "write to private field"), member.set(obj, value), value);
|
4126
|
+
var __privateMethod$2 = (obj, member, method) => (__accessCheck$3(obj, member, "access private method"), method);
|
4127
|
+
var _table, _getFetchProps, _db, _schemaTables, _trace, _RestRepository_instances, insertRecordWithoutId_fn, insertRecordWithId_fn, insertRecords_fn, updateRecordWithID_fn, updateRecords_fn, upsertRecordWithID_fn, deleteRecord_fn, deleteRecords_fn, getSchemaTables_fn, transformObjectToApi_fn;
|
2131
4128
|
const BULK_OPERATION_MAX_SIZE = 1e3;
|
2132
4129
|
class Repository extends Query {
|
2133
4130
|
}
|
@@ -2138,21 +4135,12 @@ class RestRepository extends Query {
|
|
2138
4135
|
{ name: options.table, schema: options.schemaTables?.find((table) => table.name === options.table) },
|
2139
4136
|
{}
|
2140
4137
|
);
|
2141
|
-
__privateAdd$3(this,
|
2142
|
-
__privateAdd$3(this,
|
2143
|
-
__privateAdd$3(this,
|
2144
|
-
__privateAdd$3(this,
|
2145
|
-
__privateAdd$3(this,
|
2146
|
-
__privateAdd$3(this,
|
2147
|
-
__privateAdd$3(this, _deleteRecord);
|
2148
|
-
__privateAdd$3(this, _deleteRecords);
|
2149
|
-
__privateAdd$3(this, _getSchemaTables);
|
2150
|
-
__privateAdd$3(this, _transformObjectToApi);
|
2151
|
-
__privateAdd$3(this, _table, void 0);
|
2152
|
-
__privateAdd$3(this, _getFetchProps, void 0);
|
2153
|
-
__privateAdd$3(this, _db, void 0);
|
2154
|
-
__privateAdd$3(this, _schemaTables, void 0);
|
2155
|
-
__privateAdd$3(this, _trace, void 0);
|
4138
|
+
__privateAdd$3(this, _RestRepository_instances);
|
4139
|
+
__privateAdd$3(this, _table);
|
4140
|
+
__privateAdd$3(this, _getFetchProps);
|
4141
|
+
__privateAdd$3(this, _db);
|
4142
|
+
__privateAdd$3(this, _schemaTables);
|
4143
|
+
__privateAdd$3(this, _trace);
|
2156
4144
|
__privateSet$1(this, _table, options.table);
|
2157
4145
|
__privateSet$1(this, _db, options.db);
|
2158
4146
|
__privateSet$1(this, _schemaTables, options.schemaTables);
|
@@ -2171,28 +4159,28 @@ class RestRepository extends Query {
|
|
2171
4159
|
return __privateGet$2(this, _trace).call(this, "create", async () => {
|
2172
4160
|
const ifVersion = parseIfVersion(b, c, d);
|
2173
4161
|
if (Array.isArray(a)) {
|
2174
|
-
if (a.length === 0)
|
2175
|
-
|
2176
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
4162
|
+
if (a.length === 0) return [];
|
4163
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
2177
4164
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2178
4165
|
const result = await this.read(ids, columns);
|
2179
4166
|
return result;
|
2180
4167
|
}
|
2181
4168
|
if (isString(a) && isObject(b)) {
|
2182
|
-
if (a === "")
|
2183
|
-
throw new Error("The id can't be empty");
|
4169
|
+
if (a === "") throw new Error("The id can't be empty");
|
2184
4170
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2185
|
-
return await __privateMethod$2(this,
|
4171
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
|
2186
4172
|
}
|
2187
|
-
if (isObject(a) && isString(a.
|
2188
|
-
if (a.
|
2189
|
-
throw new Error("The id can't be empty");
|
4173
|
+
if (isObject(a) && isString(a.xata_id)) {
|
4174
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2190
4175
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2191
|
-
return await __privateMethod$2(this,
|
4176
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
4177
|
+
createOnly: true,
|
4178
|
+
ifVersion
|
4179
|
+
});
|
2192
4180
|
}
|
2193
4181
|
if (isObject(a)) {
|
2194
4182
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2195
|
-
return __privateMethod$2(this,
|
4183
|
+
return __privateMethod$2(this, _RestRepository_instances, insertRecordWithoutId_fn).call(this, a, columns);
|
2196
4184
|
}
|
2197
4185
|
throw new Error("Invalid arguments for create method");
|
2198
4186
|
});
|
@@ -2201,12 +4189,11 @@ class RestRepository extends Query {
|
|
2201
4189
|
return __privateGet$2(this, _trace).call(this, "read", async () => {
|
2202
4190
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2203
4191
|
if (Array.isArray(a)) {
|
2204
|
-
if (a.length === 0)
|
2205
|
-
return [];
|
4192
|
+
if (a.length === 0) return [];
|
2206
4193
|
const ids = a.map((item) => extractId(item));
|
2207
|
-
const finalObjects = await this.getAll({ filter: {
|
4194
|
+
const finalObjects = await this.getAll({ filter: { xata_id: { $any: compact(ids) } }, columns });
|
2208
4195
|
const dictionary = finalObjects.reduce((acc, object) => {
|
2209
|
-
acc[object.
|
4196
|
+
acc[object.xata_id] = object;
|
2210
4197
|
return acc;
|
2211
4198
|
}, {});
|
2212
4199
|
return ids.map((id2) => dictionary[id2 ?? ""] ?? null);
|
@@ -2225,7 +4212,7 @@ class RestRepository extends Query {
|
|
2225
4212
|
queryParams: { columns },
|
2226
4213
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2227
4214
|
});
|
2228
|
-
const schemaTables = await __privateMethod$2(this,
|
4215
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2229
4216
|
return initObject(
|
2230
4217
|
__privateGet$2(this, _db),
|
2231
4218
|
schemaTables,
|
@@ -2266,11 +4253,10 @@ class RestRepository extends Query {
|
|
2266
4253
|
return __privateGet$2(this, _trace).call(this, "update", async () => {
|
2267
4254
|
const ifVersion = parseIfVersion(b, c, d);
|
2268
4255
|
if (Array.isArray(a)) {
|
2269
|
-
if (a.length === 0)
|
2270
|
-
|
2271
|
-
const existing = await this.read(a, ["id"]);
|
4256
|
+
if (a.length === 0) return [];
|
4257
|
+
const existing = await this.read(a, ["xata_id"]);
|
2272
4258
|
const updates = a.filter((_item, index) => existing[index] !== null);
|
2273
|
-
await __privateMethod$2(this,
|
4259
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, updates, {
|
2274
4260
|
ifVersion,
|
2275
4261
|
upsert: false
|
2276
4262
|
});
|
@@ -2281,15 +4267,14 @@ class RestRepository extends Query {
|
|
2281
4267
|
try {
|
2282
4268
|
if (isString(a) && isObject(b)) {
|
2283
4269
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2284
|
-
return await __privateMethod$2(this,
|
4270
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2285
4271
|
}
|
2286
|
-
if (isObject(a) && isString(a.
|
4272
|
+
if (isObject(a) && isString(a.xata_id)) {
|
2287
4273
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2288
|
-
return await __privateMethod$2(this,
|
4274
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2289
4275
|
}
|
2290
4276
|
} catch (error) {
|
2291
|
-
if (error.status === 422)
|
2292
|
-
return null;
|
4277
|
+
if (error.status === 422) return null;
|
2293
4278
|
throw error;
|
2294
4279
|
}
|
2295
4280
|
throw new Error("Invalid arguments for update method");
|
@@ -2318,9 +4303,8 @@ class RestRepository extends Query {
|
|
2318
4303
|
return __privateGet$2(this, _trace).call(this, "createOrUpdate", async () => {
|
2319
4304
|
const ifVersion = parseIfVersion(b, c, d);
|
2320
4305
|
if (Array.isArray(a)) {
|
2321
|
-
if (a.length === 0)
|
2322
|
-
|
2323
|
-
await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, a, {
|
4306
|
+
if (a.length === 0) return [];
|
4307
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, a, {
|
2324
4308
|
ifVersion,
|
2325
4309
|
upsert: true
|
2326
4310
|
});
|
@@ -2329,21 +4313,19 @@ class RestRepository extends Query {
|
|
2329
4313
|
return result;
|
2330
4314
|
}
|
2331
4315
|
if (isString(a) && isObject(b)) {
|
2332
|
-
if (a === "")
|
2333
|
-
throw new Error("The id can't be empty");
|
4316
|
+
if (a === "") throw new Error("The id can't be empty");
|
2334
4317
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2335
|
-
return await __privateMethod$2(this,
|
4318
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2336
4319
|
}
|
2337
|
-
if (isObject(a) && isString(a.
|
2338
|
-
if (a.
|
2339
|
-
throw new Error("The id can't be empty");
|
4320
|
+
if (isObject(a) && isString(a.xata_id)) {
|
4321
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2340
4322
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2341
|
-
return await __privateMethod$2(this,
|
4323
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2342
4324
|
}
|
2343
4325
|
if (!isDefined(a) && isObject(b)) {
|
2344
4326
|
return await this.create(b, c);
|
2345
4327
|
}
|
2346
|
-
if (isObject(a) && !isDefined(a.
|
4328
|
+
if (isObject(a) && !isDefined(a.xata_id)) {
|
2347
4329
|
return await this.create(a, b);
|
2348
4330
|
}
|
2349
4331
|
throw new Error("Invalid arguments for createOrUpdate method");
|
@@ -2353,29 +4335,29 @@ class RestRepository extends Query {
|
|
2353
4335
|
return __privateGet$2(this, _trace).call(this, "createOrReplace", async () => {
|
2354
4336
|
const ifVersion = parseIfVersion(b, c, d);
|
2355
4337
|
if (Array.isArray(a)) {
|
2356
|
-
if (a.length === 0)
|
2357
|
-
|
2358
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
4338
|
+
if (a.length === 0) return [];
|
4339
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
2359
4340
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2360
4341
|
const result = await this.read(ids, columns);
|
2361
4342
|
return result;
|
2362
4343
|
}
|
2363
4344
|
if (isString(a) && isObject(b)) {
|
2364
|
-
if (a === "")
|
2365
|
-
throw new Error("The id can't be empty");
|
4345
|
+
if (a === "") throw new Error("The id can't be empty");
|
2366
4346
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2367
|
-
return await __privateMethod$2(this,
|
4347
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
|
2368
4348
|
}
|
2369
|
-
if (isObject(a) && isString(a.
|
2370
|
-
if (a.
|
2371
|
-
throw new Error("The id can't be empty");
|
4349
|
+
if (isObject(a) && isString(a.xata_id)) {
|
4350
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2372
4351
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2373
|
-
return await __privateMethod$2(this,
|
4352
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
4353
|
+
createOnly: false,
|
4354
|
+
ifVersion
|
4355
|
+
});
|
2374
4356
|
}
|
2375
4357
|
if (!isDefined(a) && isObject(b)) {
|
2376
4358
|
return await this.create(b, c);
|
2377
4359
|
}
|
2378
|
-
if (isObject(a) && !isDefined(a.
|
4360
|
+
if (isObject(a) && !isDefined(a.xata_id)) {
|
2379
4361
|
return await this.create(a, b);
|
2380
4362
|
}
|
2381
4363
|
throw new Error("Invalid arguments for createOrReplace method");
|
@@ -2384,25 +4366,22 @@ class RestRepository extends Query {
|
|
2384
4366
|
async delete(a, b) {
|
2385
4367
|
return __privateGet$2(this, _trace).call(this, "delete", async () => {
|
2386
4368
|
if (Array.isArray(a)) {
|
2387
|
-
if (a.length === 0)
|
2388
|
-
return [];
|
4369
|
+
if (a.length === 0) return [];
|
2389
4370
|
const ids = a.map((o) => {
|
2390
|
-
if (isString(o))
|
2391
|
-
|
2392
|
-
if (isString(o.id))
|
2393
|
-
return o.id;
|
4371
|
+
if (isString(o)) return o;
|
4372
|
+
if (isString(o.xata_id)) return o.xata_id;
|
2394
4373
|
throw new Error("Invalid arguments for delete method");
|
2395
4374
|
});
|
2396
4375
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2397
4376
|
const result = await this.read(a, columns);
|
2398
|
-
await __privateMethod$2(this,
|
4377
|
+
await __privateMethod$2(this, _RestRepository_instances, deleteRecords_fn).call(this, ids);
|
2399
4378
|
return result;
|
2400
4379
|
}
|
2401
4380
|
if (isString(a)) {
|
2402
|
-
return __privateMethod$2(this,
|
4381
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a, b);
|
2403
4382
|
}
|
2404
|
-
if (isObject(a) && isString(a.
|
2405
|
-
return __privateMethod$2(this,
|
4383
|
+
if (isObject(a) && isString(a.xata_id)) {
|
4384
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a.xata_id, b);
|
2406
4385
|
}
|
2407
4386
|
throw new Error("Invalid arguments for delete method");
|
2408
4387
|
});
|
@@ -2446,7 +4425,7 @@ class RestRepository extends Query {
|
|
2446
4425
|
},
|
2447
4426
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2448
4427
|
});
|
2449
|
-
const schemaTables = await __privateMethod$2(this,
|
4428
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2450
4429
|
return {
|
2451
4430
|
records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
|
2452
4431
|
totalCount
|
@@ -2471,7 +4450,7 @@ class RestRepository extends Query {
|
|
2471
4450
|
},
|
2472
4451
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2473
4452
|
});
|
2474
|
-
const schemaTables = await __privateMethod$2(this,
|
4453
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2475
4454
|
return {
|
2476
4455
|
records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
|
2477
4456
|
totalCount
|
@@ -2513,7 +4492,7 @@ class RestRepository extends Query {
|
|
2513
4492
|
fetchOptions: data.fetchOptions,
|
2514
4493
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2515
4494
|
});
|
2516
|
-
const schemaTables = await __privateMethod$2(this,
|
4495
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2517
4496
|
const records = objects.map(
|
2518
4497
|
(record) => initObject(
|
2519
4498
|
__privateGet$2(this, _db),
|
@@ -2547,7 +4526,7 @@ class RestRepository extends Query {
|
|
2547
4526
|
},
|
2548
4527
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2549
4528
|
});
|
2550
|
-
const schemaTables = await __privateMethod$2(this,
|
4529
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2551
4530
|
return {
|
2552
4531
|
...result,
|
2553
4532
|
summaries: result.summaries.map(
|
@@ -2595,9 +4574,9 @@ _getFetchProps = new WeakMap();
|
|
2595
4574
|
_db = new WeakMap();
|
2596
4575
|
_schemaTables = new WeakMap();
|
2597
4576
|
_trace = new WeakMap();
|
2598
|
-
|
4577
|
+
_RestRepository_instances = new WeakSet();
|
2599
4578
|
insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
2600
|
-
const record = await __privateMethod$2(this,
|
4579
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2601
4580
|
const response = await insertRecord({
|
2602
4581
|
pathParams: {
|
2603
4582
|
workspace: "{workspaceId}",
|
@@ -2609,14 +4588,12 @@ insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
|
2609
4588
|
body: record,
|
2610
4589
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2611
4590
|
});
|
2612
|
-
const schemaTables = await __privateMethod$2(this,
|
4591
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2613
4592
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2614
4593
|
};
|
2615
|
-
_insertRecordWithId = new WeakSet();
|
2616
4594
|
insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
|
2617
|
-
if (!recordId)
|
2618
|
-
|
2619
|
-
const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4595
|
+
if (!recordId) return null;
|
4596
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2620
4597
|
const response = await insertRecordWithID({
|
2621
4598
|
pathParams: {
|
2622
4599
|
workspace: "{workspaceId}",
|
@@ -2629,13 +4606,12 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
|
|
2629
4606
|
queryParams: { createOnly, columns, ifVersion },
|
2630
4607
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2631
4608
|
});
|
2632
|
-
const schemaTables = await __privateMethod$2(this,
|
4609
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2633
4610
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2634
4611
|
};
|
2635
|
-
_insertRecords = new WeakSet();
|
2636
4612
|
insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
2637
4613
|
const operations = await promiseMap(objects, async (object) => {
|
2638
|
-
const record = await __privateMethod$2(this,
|
4614
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2639
4615
|
return { insert: { table: __privateGet$2(this, _table), record, createOnly, ifVersion } };
|
2640
4616
|
});
|
2641
4617
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -2660,11 +4636,9 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
|
2660
4636
|
}
|
2661
4637
|
return ids;
|
2662
4638
|
};
|
2663
|
-
_updateRecordWithID = new WeakSet();
|
2664
4639
|
updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2665
|
-
if (!recordId)
|
2666
|
-
|
2667
|
-
const { id: _id, ...record } = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4640
|
+
if (!recordId) return null;
|
4641
|
+
const { xata_id: _id, ...record } = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2668
4642
|
try {
|
2669
4643
|
const response = await updateRecordWithID({
|
2670
4644
|
pathParams: {
|
@@ -2678,7 +4652,7 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2678
4652
|
body: record,
|
2679
4653
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2680
4654
|
});
|
2681
|
-
const schemaTables = await __privateMethod$2(this,
|
4655
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2682
4656
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2683
4657
|
} catch (e) {
|
2684
4658
|
if (isObject(e) && e.status === 404) {
|
@@ -2687,11 +4661,10 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2687
4661
|
throw e;
|
2688
4662
|
}
|
2689
4663
|
};
|
2690
|
-
_updateRecords = new WeakSet();
|
2691
4664
|
updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
2692
|
-
const operations = await promiseMap(objects, async ({
|
2693
|
-
const fields = await __privateMethod$2(this,
|
2694
|
-
return { update: { table: __privateGet$2(this, _table), id, ifVersion, upsert, fields } };
|
4665
|
+
const operations = await promiseMap(objects, async ({ xata_id, ...object }) => {
|
4666
|
+
const fields = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
4667
|
+
return { update: { table: __privateGet$2(this, _table), id: xata_id, ifVersion, upsert, fields } };
|
2695
4668
|
});
|
2696
4669
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
2697
4670
|
const ids = [];
|
@@ -2715,10 +4688,8 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
|
2715
4688
|
}
|
2716
4689
|
return ids;
|
2717
4690
|
};
|
2718
|
-
_upsertRecordWithID = new WeakSet();
|
2719
4691
|
upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2720
|
-
if (!recordId)
|
2721
|
-
return null;
|
4692
|
+
if (!recordId) return null;
|
2722
4693
|
const response = await upsertRecordWithID({
|
2723
4694
|
pathParams: {
|
2724
4695
|
workspace: "{workspaceId}",
|
@@ -2731,13 +4702,11 @@ upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2731
4702
|
body: object,
|
2732
4703
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2733
4704
|
});
|
2734
|
-
const schemaTables = await __privateMethod$2(this,
|
4705
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2735
4706
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2736
4707
|
};
|
2737
|
-
_deleteRecord = new WeakSet();
|
2738
4708
|
deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
2739
|
-
if (!recordId)
|
2740
|
-
return null;
|
4709
|
+
if (!recordId) return null;
|
2741
4710
|
try {
|
2742
4711
|
const response = await deleteRecord({
|
2743
4712
|
pathParams: {
|
@@ -2750,7 +4719,7 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2750
4719
|
queryParams: { columns },
|
2751
4720
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2752
4721
|
});
|
2753
|
-
const schemaTables = await __privateMethod$2(this,
|
4722
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2754
4723
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2755
4724
|
} catch (e) {
|
2756
4725
|
if (isObject(e) && e.status === 404) {
|
@@ -2759,7 +4728,6 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2759
4728
|
throw e;
|
2760
4729
|
}
|
2761
4730
|
};
|
2762
|
-
_deleteRecords = new WeakSet();
|
2763
4731
|
deleteRecords_fn = async function(recordIds) {
|
2764
4732
|
const chunkedOperations = chunk(
|
2765
4733
|
compact(recordIds).map((id) => ({ delete: { table: __privateGet$2(this, _table), id } })),
|
@@ -2777,10 +4745,8 @@ deleteRecords_fn = async function(recordIds) {
|
|
2777
4745
|
});
|
2778
4746
|
}
|
2779
4747
|
};
|
2780
|
-
_getSchemaTables = new WeakSet();
|
2781
4748
|
getSchemaTables_fn = async function() {
|
2782
|
-
if (__privateGet$2(this, _schemaTables))
|
2783
|
-
return __privateGet$2(this, _schemaTables);
|
4749
|
+
if (__privateGet$2(this, _schemaTables)) return __privateGet$2(this, _schemaTables);
|
2784
4750
|
const { schema } = await getBranchDetails({
|
2785
4751
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
2786
4752
|
...__privateGet$2(this, _getFetchProps).call(this)
|
@@ -2788,20 +4754,17 @@ getSchemaTables_fn = async function() {
|
|
2788
4754
|
__privateSet$1(this, _schemaTables, schema.tables);
|
2789
4755
|
return schema.tables;
|
2790
4756
|
};
|
2791
|
-
_transformObjectToApi = new WeakSet();
|
2792
4757
|
transformObjectToApi_fn = async function(object) {
|
2793
|
-
const schemaTables = await __privateMethod$2(this,
|
4758
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2794
4759
|
const schema = schemaTables.find((table) => table.name === __privateGet$2(this, _table));
|
2795
|
-
if (!schema)
|
2796
|
-
throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
|
4760
|
+
if (!schema) throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
|
2797
4761
|
const result = {};
|
2798
4762
|
for (const [key, value] of Object.entries(object)) {
|
2799
|
-
if (
|
2800
|
-
continue;
|
4763
|
+
if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key)) continue;
|
2801
4764
|
const type = schema.columns.find((column) => column.name === key)?.type;
|
2802
4765
|
switch (type) {
|
2803
4766
|
case "link": {
|
2804
|
-
result[key] = isIdentifiable(value) ? value.
|
4767
|
+
result[key] = isIdentifiable(value) ? value.xata_id : value;
|
2805
4768
|
break;
|
2806
4769
|
}
|
2807
4770
|
case "datetime": {
|
@@ -2825,14 +4788,11 @@ transformObjectToApi_fn = async function(object) {
|
|
2825
4788
|
};
|
2826
4789
|
const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
2827
4790
|
const data = {};
|
2828
|
-
|
2829
|
-
Object.assign(data, rest);
|
4791
|
+
Object.assign(data, { ...object });
|
2830
4792
|
const { columns } = schemaTables.find(({ name }) => name === table) ?? {};
|
2831
|
-
if (!columns)
|
2832
|
-
console.error(`Table ${table} not found in schema`);
|
4793
|
+
if (!columns) console.error(`Table ${table} not found in schema`);
|
2833
4794
|
for (const column of columns ?? []) {
|
2834
|
-
if (!isValidColumn(selectedColumns, column))
|
2835
|
-
continue;
|
4795
|
+
if (!isValidColumn(selectedColumns, column)) continue;
|
2836
4796
|
const value = data[column.name];
|
2837
4797
|
switch (column.type) {
|
2838
4798
|
case "datetime": {
|
@@ -2889,28 +4849,21 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2889
4849
|
}
|
2890
4850
|
}
|
2891
4851
|
const record = { ...data };
|
2892
|
-
const metadata = xata !== void 0 ? { ...xata, createdAt: new Date(xata.createdAt), updatedAt: new Date(xata.updatedAt) } : void 0;
|
2893
4852
|
record.read = function(columns2) {
|
2894
|
-
return db[table].read(record["
|
4853
|
+
return db[table].read(record["xata_id"], columns2);
|
2895
4854
|
};
|
2896
4855
|
record.update = function(data2, b, c) {
|
2897
4856
|
const columns2 = isValidSelectableColumns(b) ? b : ["*"];
|
2898
4857
|
const ifVersion = parseIfVersion(b, c);
|
2899
|
-
return db[table].update(record["
|
4858
|
+
return db[table].update(record["xata_id"], data2, columns2, { ifVersion });
|
2900
4859
|
};
|
2901
4860
|
record.replace = function(data2, b, c) {
|
2902
4861
|
const columns2 = isValidSelectableColumns(b) ? b : ["*"];
|
2903
4862
|
const ifVersion = parseIfVersion(b, c);
|
2904
|
-
return db[table].createOrReplace(record["
|
4863
|
+
return db[table].createOrReplace(record["xata_id"], data2, columns2, { ifVersion });
|
2905
4864
|
};
|
2906
4865
|
record.delete = function() {
|
2907
|
-
return db[table].delete(record["
|
2908
|
-
};
|
2909
|
-
if (metadata !== void 0) {
|
2910
|
-
record.xata = Object.freeze(metadata);
|
2911
|
-
}
|
2912
|
-
record.getMetadata = function() {
|
2913
|
-
return record.xata;
|
4866
|
+
return db[table].delete(record["xata_id"]);
|
2914
4867
|
};
|
2915
4868
|
record.toSerializable = function() {
|
2916
4869
|
return JSON.parse(JSON.stringify(record));
|
@@ -2918,22 +4871,19 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2918
4871
|
record.toString = function() {
|
2919
4872
|
return JSON.stringify(record);
|
2920
4873
|
};
|
2921
|
-
for (const prop of ["read", "update", "replace", "delete", "
|
4874
|
+
for (const prop of ["read", "update", "replace", "delete", "toSerializable", "toString"]) {
|
2922
4875
|
Object.defineProperty(record, prop, { enumerable: false });
|
2923
4876
|
}
|
2924
4877
|
Object.freeze(record);
|
2925
4878
|
return record;
|
2926
4879
|
};
|
2927
4880
|
function extractId(value) {
|
2928
|
-
if (isString(value))
|
2929
|
-
|
2930
|
-
if (isObject(value) && isString(value.id))
|
2931
|
-
return value.id;
|
4881
|
+
if (isString(value)) return value;
|
4882
|
+
if (isObject(value) && isString(value.xata_id)) return value.xata_id;
|
2932
4883
|
return void 0;
|
2933
4884
|
}
|
2934
4885
|
function isValidColumn(columns, column) {
|
2935
|
-
if (columns.includes("*"))
|
2936
|
-
return true;
|
4886
|
+
if (columns.includes("*")) return true;
|
2937
4887
|
return columns.filter((item) => isString(item) && item.startsWith(column.name)).length > 0;
|
2938
4888
|
}
|
2939
4889
|
function parseIfVersion(...args) {
|
@@ -2973,19 +4923,12 @@ const includesAll = (value) => ({ $includesAll: value });
|
|
2973
4923
|
const includesNone = (value) => ({ $includesNone: value });
|
2974
4924
|
const includesAny = (value) => ({ $includesAny: value });
|
2975
4925
|
|
2976
|
-
var
|
2977
|
-
|
2978
|
-
throw TypeError("Cannot " + msg);
|
2979
|
-
};
|
2980
|
-
var __privateGet$1 = (obj, member, getter) => {
|
2981
|
-
__accessCheck$2(obj, member, "read from private field");
|
2982
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2983
|
-
};
|
2984
|
-
var __privateAdd$2 = (obj, member, value) => {
|
2985
|
-
if (member.has(obj))
|
2986
|
-
throw TypeError("Cannot add the same private member more than once");
|
2987
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4926
|
+
var __typeError$2 = (msg) => {
|
4927
|
+
throw TypeError(msg);
|
2988
4928
|
};
|
4929
|
+
var __accessCheck$2 = (obj, member, msg) => member.has(obj) || __typeError$2("Cannot " + msg);
|
4930
|
+
var __privateGet$1 = (obj, member, getter) => (__accessCheck$2(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4931
|
+
var __privateAdd$2 = (obj, member, value) => member.has(obj) ? __typeError$2("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2989
4932
|
var _tables;
|
2990
4933
|
class SchemaPlugin extends XataPlugin {
|
2991
4934
|
constructor() {
|
@@ -2997,8 +4940,7 @@ class SchemaPlugin extends XataPlugin {
|
|
2997
4940
|
{},
|
2998
4941
|
{
|
2999
4942
|
get: (_target, table) => {
|
3000
|
-
if (!isString(table))
|
3001
|
-
throw new Error("Invalid table name");
|
4943
|
+
if (!isString(table)) throw new Error("Invalid table name");
|
3002
4944
|
if (__privateGet$1(this, _tables)[table] === void 0) {
|
3003
4945
|
__privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
3004
4946
|
}
|
@@ -3089,42 +5031,35 @@ function getContentType(file) {
|
|
3089
5031
|
return "application/octet-stream";
|
3090
5032
|
}
|
3091
5033
|
|
3092
|
-
var
|
3093
|
-
|
3094
|
-
throw TypeError("Cannot " + msg);
|
3095
|
-
};
|
3096
|
-
var __privateAdd$1 = (obj, member, value) => {
|
3097
|
-
if (member.has(obj))
|
3098
|
-
throw TypeError("Cannot add the same private member more than once");
|
3099
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3100
|
-
};
|
3101
|
-
var __privateMethod$1 = (obj, member, method) => {
|
3102
|
-
__accessCheck$1(obj, member, "access private method");
|
3103
|
-
return method;
|
5034
|
+
var __typeError$1 = (msg) => {
|
5035
|
+
throw TypeError(msg);
|
3104
5036
|
};
|
3105
|
-
var
|
5037
|
+
var __accessCheck$1 = (obj, member, msg) => member.has(obj) || __typeError$1("Cannot " + msg);
|
5038
|
+
var __privateAdd$1 = (obj, member, value) => member.has(obj) ? __typeError$1("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5039
|
+
var __privateMethod$1 = (obj, member, method) => (__accessCheck$1(obj, member, "access private method"), method);
|
5040
|
+
var _SearchPlugin_instances, search_fn;
|
3106
5041
|
class SearchPlugin extends XataPlugin {
|
3107
5042
|
constructor(db) {
|
3108
5043
|
super();
|
3109
5044
|
this.db = db;
|
3110
|
-
__privateAdd$1(this,
|
5045
|
+
__privateAdd$1(this, _SearchPlugin_instances);
|
3111
5046
|
}
|
3112
5047
|
build(pluginOptions) {
|
3113
5048
|
return {
|
3114
5049
|
all: async (query, options = {}) => {
|
3115
|
-
const { records, totalCount } = await __privateMethod$1(this,
|
5050
|
+
const { records, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3116
5051
|
return {
|
3117
5052
|
totalCount,
|
3118
5053
|
records: records.map((record) => {
|
3119
|
-
const
|
5054
|
+
const table = record.xata_table;
|
3120
5055
|
return { table, record: initObject(this.db, pluginOptions.tables, table, record, ["*"]) };
|
3121
5056
|
})
|
3122
5057
|
};
|
3123
5058
|
},
|
3124
5059
|
byTable: async (query, options = {}) => {
|
3125
|
-
const { records: rawRecords, totalCount } = await __privateMethod$1(this,
|
5060
|
+
const { records: rawRecords, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3126
5061
|
const records = rawRecords.reduce((acc, record) => {
|
3127
|
-
const
|
5062
|
+
const table = record.xata_table;
|
3128
5063
|
const items = acc[table] ?? [];
|
3129
5064
|
const item = initObject(this.db, pluginOptions.tables, table, record, ["*"]);
|
3130
5065
|
return { ...acc, [table]: [...items, item] };
|
@@ -3134,7 +5069,7 @@ class SearchPlugin extends XataPlugin {
|
|
3134
5069
|
};
|
3135
5070
|
}
|
3136
5071
|
}
|
3137
|
-
|
5072
|
+
_SearchPlugin_instances = new WeakSet();
|
3138
5073
|
search_fn = async function(query, options, pluginOptions) {
|
3139
5074
|
const { tables, fuzziness, highlight, prefix, page } = options ?? {};
|
3140
5075
|
const { records, totalCount } = await searchBranch({
|
@@ -3170,8 +5105,7 @@ function arrayString(val) {
|
|
3170
5105
|
return result;
|
3171
5106
|
}
|
3172
5107
|
function prepareValue(value) {
|
3173
|
-
if (!isDefined(value))
|
3174
|
-
return null;
|
5108
|
+
if (!isDefined(value)) return null;
|
3175
5109
|
if (value instanceof Date) {
|
3176
5110
|
return value.toISOString();
|
3177
5111
|
}
|
@@ -3198,31 +5132,42 @@ function prepareParams(param1, param2) {
|
|
3198
5132
|
return { statement, params: param2?.map((value) => prepareValue(value)) };
|
3199
5133
|
}
|
3200
5134
|
if (isObject(param1)) {
|
3201
|
-
const { statement, params, consistency } = param1;
|
3202
|
-
return { statement, params: params?.map((value) => prepareValue(value)), consistency };
|
5135
|
+
const { statement, params, consistency, responseType } = param1;
|
5136
|
+
return { statement, params: params?.map((value) => prepareValue(value)), consistency, responseType };
|
3203
5137
|
}
|
3204
5138
|
throw new Error("Invalid query");
|
3205
5139
|
}
|
3206
5140
|
|
3207
5141
|
class SQLPlugin extends XataPlugin {
|
3208
5142
|
build(pluginOptions) {
|
3209
|
-
|
5143
|
+
const sqlFunction = async (query, ...parameters) => {
|
3210
5144
|
if (!isParamsObject(query) && (!isTemplateStringsArray(query) || !Array.isArray(parameters))) {
|
3211
5145
|
throw new Error("Invalid usage of `xata.sql`. Please use it as a tagged template or with an object.");
|
3212
5146
|
}
|
3213
|
-
const { statement, params, consistency } = prepareParams(query, parameters);
|
3214
|
-
const {
|
3215
|
-
records,
|
3216
|
-
rows,
|
3217
|
-
warning,
|
3218
|
-
columns = []
|
3219
|
-
} = await sqlQuery({
|
5147
|
+
const { statement, params, consistency, responseType } = prepareParams(query, parameters);
|
5148
|
+
const { warning, columns, ...response } = await sqlQuery({
|
3220
5149
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
3221
|
-
body: { statement, params, consistency },
|
5150
|
+
body: { statement, params, consistency, responseType },
|
3222
5151
|
...pluginOptions
|
3223
5152
|
});
|
5153
|
+
const records = "records" in response ? response.records : void 0;
|
5154
|
+
const rows = "rows" in response ? response.rows : void 0;
|
3224
5155
|
return { records, rows, warning, columns };
|
3225
5156
|
};
|
5157
|
+
sqlFunction.connectionString = buildConnectionString(pluginOptions);
|
5158
|
+
sqlFunction.batch = async (query) => {
|
5159
|
+
const { results } = await sqlBatchQuery({
|
5160
|
+
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
5161
|
+
body: {
|
5162
|
+
statements: query.statements.map(({ statement, params }) => ({ statement, params })),
|
5163
|
+
consistency: query.consistency,
|
5164
|
+
responseType: query.responseType
|
5165
|
+
},
|
5166
|
+
...pluginOptions
|
5167
|
+
});
|
5168
|
+
return { results };
|
5169
|
+
};
|
5170
|
+
return sqlFunction;
|
3226
5171
|
}
|
3227
5172
|
}
|
3228
5173
|
function isTemplateStringsArray(strings) {
|
@@ -3231,6 +5176,32 @@ function isTemplateStringsArray(strings) {
|
|
3231
5176
|
function isParamsObject(params) {
|
3232
5177
|
return isObject(params) && "statement" in params;
|
3233
5178
|
}
|
5179
|
+
function buildDomain(host, region) {
|
5180
|
+
switch (host) {
|
5181
|
+
case "production":
|
5182
|
+
return `${region}.sql.xata.sh`;
|
5183
|
+
case "staging":
|
5184
|
+
return `${region}.sql.staging-xata.dev`;
|
5185
|
+
case "dev":
|
5186
|
+
return `${region}.sql.dev-xata.dev`;
|
5187
|
+
case "local":
|
5188
|
+
return "localhost:7654";
|
5189
|
+
default:
|
5190
|
+
throw new Error("Invalid host provider");
|
5191
|
+
}
|
5192
|
+
}
|
5193
|
+
function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
|
5194
|
+
const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
|
5195
|
+
const parts = parseWorkspacesUrlParts(url);
|
5196
|
+
if (!parts) throw new Error("Invalid workspaces URL");
|
5197
|
+
const { workspace: workspaceSlug, region, database, host } = parts;
|
5198
|
+
const domain = buildDomain(host, region);
|
5199
|
+
const workspace = workspaceSlug.split("-").pop();
|
5200
|
+
if (!workspace || !region || !database || !apiKey || !branch) {
|
5201
|
+
throw new Error("Unable to build xata connection string");
|
5202
|
+
}
|
5203
|
+
return `postgresql://${workspace}:${apiKey}@${domain}/${database}:${branch}?sslmode=require`;
|
5204
|
+
}
|
3234
5205
|
|
3235
5206
|
class TransactionPlugin extends XataPlugin {
|
3236
5207
|
build(pluginOptions) {
|
@@ -3247,41 +5218,27 @@ class TransactionPlugin extends XataPlugin {
|
|
3247
5218
|
}
|
3248
5219
|
}
|
3249
5220
|
|
3250
|
-
var
|
3251
|
-
|
3252
|
-
throw TypeError("Cannot " + msg);
|
3253
|
-
};
|
3254
|
-
var __privateGet = (obj, member, getter) => {
|
3255
|
-
__accessCheck(obj, member, "read from private field");
|
3256
|
-
return getter ? getter.call(obj) : member.get(obj);
|
3257
|
-
};
|
3258
|
-
var __privateAdd = (obj, member, value) => {
|
3259
|
-
if (member.has(obj))
|
3260
|
-
throw TypeError("Cannot add the same private member more than once");
|
3261
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3262
|
-
};
|
3263
|
-
var __privateSet = (obj, member, value, setter) => {
|
3264
|
-
__accessCheck(obj, member, "write to private field");
|
3265
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
3266
|
-
return value;
|
3267
|
-
};
|
3268
|
-
var __privateMethod = (obj, member, method) => {
|
3269
|
-
__accessCheck(obj, member, "access private method");
|
3270
|
-
return method;
|
5221
|
+
var __typeError = (msg) => {
|
5222
|
+
throw TypeError(msg);
|
3271
5223
|
};
|
5224
|
+
var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
|
5225
|
+
var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
5226
|
+
var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5227
|
+
var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), member.set(obj, value), value);
|
5228
|
+
var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
|
3272
5229
|
const buildClient = (plugins) => {
|
3273
|
-
var _options,
|
5230
|
+
var _options, _instances, parseOptions_fn, getFetchProps_fn, _a;
|
3274
5231
|
return _a = class {
|
3275
5232
|
constructor(options = {}, tables) {
|
3276
|
-
__privateAdd(this,
|
3277
|
-
__privateAdd(this,
|
3278
|
-
|
3279
|
-
const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
|
5233
|
+
__privateAdd(this, _instances);
|
5234
|
+
__privateAdd(this, _options);
|
5235
|
+
const safeOptions = __privateMethod(this, _instances, parseOptions_fn).call(this, options);
|
3280
5236
|
__privateSet(this, _options, safeOptions);
|
3281
5237
|
const pluginOptions = {
|
3282
|
-
...__privateMethod(this,
|
5238
|
+
...__privateMethod(this, _instances, getFetchProps_fn).call(this, safeOptions),
|
3283
5239
|
host: safeOptions.host,
|
3284
|
-
tables
|
5240
|
+
tables,
|
5241
|
+
branch: safeOptions.branch
|
3285
5242
|
};
|
3286
5243
|
const db = new SchemaPlugin().build(pluginOptions);
|
3287
5244
|
const search = new SearchPlugin(db).build(pluginOptions);
|
@@ -3295,8 +5252,7 @@ const buildClient = (plugins) => {
|
|
3295
5252
|
this.sql = sql;
|
3296
5253
|
this.files = files;
|
3297
5254
|
for (const [key, namespace] of Object.entries(plugins ?? {})) {
|
3298
|
-
if (namespace === void 0)
|
3299
|
-
continue;
|
5255
|
+
if (namespace === void 0) continue;
|
3300
5256
|
this[key] = namespace.build(pluginOptions);
|
3301
5257
|
}
|
3302
5258
|
}
|
@@ -3305,8 +5261,8 @@ const buildClient = (plugins) => {
|
|
3305
5261
|
const branch = __privateGet(this, _options).branch;
|
3306
5262
|
return { databaseURL, branch };
|
3307
5263
|
}
|
3308
|
-
}, _options = new WeakMap(),
|
3309
|
-
const enableBrowser = options?.enableBrowser ??
|
5264
|
+
}, _options = new WeakMap(), _instances = new WeakSet(), parseOptions_fn = function(options) {
|
5265
|
+
const enableBrowser = options?.enableBrowser ?? false;
|
3310
5266
|
const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
|
3311
5267
|
if (isBrowser && !enableBrowser) {
|
3312
5268
|
throw new Error(
|
@@ -3314,8 +5270,9 @@ const buildClient = (plugins) => {
|
|
3314
5270
|
);
|
3315
5271
|
}
|
3316
5272
|
const fetch = getFetchImplementation(options?.fetch);
|
3317
|
-
const databaseURL = options?.databaseURL
|
3318
|
-
const apiKey = options?.apiKey
|
5273
|
+
const databaseURL = options?.databaseURL;
|
5274
|
+
const apiKey = options?.apiKey;
|
5275
|
+
const branch = options?.branch;
|
3319
5276
|
const trace = options?.trace ?? defaultTrace;
|
3320
5277
|
const clientName = options?.clientName;
|
3321
5278
|
const host = options?.host ?? "production";
|
@@ -3326,25 +5283,8 @@ const buildClient = (plugins) => {
|
|
3326
5283
|
if (!databaseURL) {
|
3327
5284
|
throw new Error("Option databaseURL is required");
|
3328
5285
|
}
|
3329
|
-
|
3330
|
-
|
3331
|
-
const branch = options?.branch || previewBranch || envBranch || "main";
|
3332
|
-
if (!!previewBranch && branch !== previewBranch) {
|
3333
|
-
console.warn(
|
3334
|
-
`Ignoring preview branch ${previewBranch} because branch option was passed to the client constructor with value ${branch}`
|
3335
|
-
);
|
3336
|
-
} else if (!!envBranch && branch !== envBranch) {
|
3337
|
-
console.warn(
|
3338
|
-
`Ignoring branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
|
3339
|
-
);
|
3340
|
-
} else if (!!previewBranch && !!envBranch && previewBranch !== envBranch) {
|
3341
|
-
console.warn(
|
3342
|
-
`Ignoring preview branch ${previewBranch} and branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
|
3343
|
-
);
|
3344
|
-
} else if (!previewBranch && !envBranch && options?.branch === void 0) {
|
3345
|
-
console.warn(
|
3346
|
-
`No branch was passed to the client constructor. Using default branch ${branch}. You can set the branch with the environment variable XATA_BRANCH or by passing the branch option to the client constructor.`
|
3347
|
-
);
|
5286
|
+
if (!branch) {
|
5287
|
+
throw new Error("Option branch is required");
|
3348
5288
|
}
|
3349
5289
|
return {
|
3350
5290
|
fetch,
|
@@ -3358,7 +5298,7 @@ const buildClient = (plugins) => {
|
|
3358
5298
|
clientName,
|
3359
5299
|
xataAgentExtra
|
3360
5300
|
};
|
3361
|
-
},
|
5301
|
+
}, getFetchProps_fn = function({
|
3362
5302
|
fetch,
|
3363
5303
|
apiKey,
|
3364
5304
|
databaseURL,
|
@@ -3399,26 +5339,19 @@ class Serializer {
|
|
3399
5339
|
}
|
3400
5340
|
toJSON(data) {
|
3401
5341
|
function visit(obj) {
|
3402
|
-
if (Array.isArray(obj))
|
3403
|
-
return obj.map(visit);
|
5342
|
+
if (Array.isArray(obj)) return obj.map(visit);
|
3404
5343
|
const type = typeof obj;
|
3405
|
-
if (type === "undefined")
|
3406
|
-
|
3407
|
-
if (
|
3408
|
-
return { [META]: "bigint", [VALUE]: obj.toString() };
|
3409
|
-
if (obj === null || type !== "object")
|
3410
|
-
return obj;
|
5344
|
+
if (type === "undefined") return { [META]: "undefined" };
|
5345
|
+
if (type === "bigint") return { [META]: "bigint", [VALUE]: obj.toString() };
|
5346
|
+
if (obj === null || type !== "object") return obj;
|
3411
5347
|
const constructor = obj.constructor;
|
3412
5348
|
const o = { [META]: constructor.name };
|
3413
5349
|
for (const [key, value] of Object.entries(obj)) {
|
3414
5350
|
o[key] = visit(value);
|
3415
5351
|
}
|
3416
|
-
if (constructor === Date)
|
3417
|
-
|
3418
|
-
if (constructor ===
|
3419
|
-
o[VALUE] = Object.fromEntries(obj);
|
3420
|
-
if (constructor === Set)
|
3421
|
-
o[VALUE] = [...obj];
|
5352
|
+
if (constructor === Date) o[VALUE] = obj.toISOString();
|
5353
|
+
if (constructor === Map) o[VALUE] = Object.fromEntries(obj);
|
5354
|
+
if (constructor === Set) o[VALUE] = [...obj];
|
3422
5355
|
return o;
|
3423
5356
|
}
|
3424
5357
|
return JSON.stringify(visit(data));
|
@@ -3431,16 +5364,11 @@ class Serializer {
|
|
3431
5364
|
if (constructor) {
|
3432
5365
|
return Object.assign(Object.create(constructor.prototype), rest);
|
3433
5366
|
}
|
3434
|
-
if (clazz === "Date")
|
3435
|
-
|
3436
|
-
if (clazz === "
|
3437
|
-
|
3438
|
-
if (clazz === "
|
3439
|
-
return new Map(Object.entries(val));
|
3440
|
-
if (clazz === "bigint")
|
3441
|
-
return BigInt(val);
|
3442
|
-
if (clazz === "undefined")
|
3443
|
-
return void 0;
|
5367
|
+
if (clazz === "Date") return new Date(val);
|
5368
|
+
if (clazz === "Set") return new Set(val);
|
5369
|
+
if (clazz === "Map") return new Map(Object.entries(val));
|
5370
|
+
if (clazz === "bigint") return BigInt(val);
|
5371
|
+
if (clazz === "undefined") return void 0;
|
3444
5372
|
return rest;
|
3445
5373
|
}
|
3446
5374
|
return value;
|
@@ -3455,6 +5383,47 @@ const deserialize = (json) => {
|
|
3455
5383
|
return defaultSerializer.fromJSON(json);
|
3456
5384
|
};
|
3457
5385
|
|
5386
|
+
function parseEnvironment(environment) {
|
5387
|
+
try {
|
5388
|
+
if (typeof environment === "function") {
|
5389
|
+
return new Proxy(
|
5390
|
+
{},
|
5391
|
+
{
|
5392
|
+
get(target) {
|
5393
|
+
return environment(target);
|
5394
|
+
}
|
5395
|
+
}
|
5396
|
+
);
|
5397
|
+
}
|
5398
|
+
if (isObject(environment)) {
|
5399
|
+
return environment;
|
5400
|
+
}
|
5401
|
+
} catch (error) {
|
5402
|
+
}
|
5403
|
+
return {};
|
5404
|
+
}
|
5405
|
+
function buildPreviewBranchName({ org, branch }) {
|
5406
|
+
return `preview-${org}-${branch}`;
|
5407
|
+
}
|
5408
|
+
function getDeployPreviewBranch(environment) {
|
5409
|
+
try {
|
5410
|
+
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = parseEnvironment(environment);
|
5411
|
+
if (deployPreviewBranch) return deployPreviewBranch;
|
5412
|
+
switch (deployPreview) {
|
5413
|
+
case "vercel": {
|
5414
|
+
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
5415
|
+
console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
|
5416
|
+
return void 0;
|
5417
|
+
}
|
5418
|
+
return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
|
5419
|
+
}
|
5420
|
+
}
|
5421
|
+
return void 0;
|
5422
|
+
} catch (err) {
|
5423
|
+
return void 0;
|
5424
|
+
}
|
5425
|
+
}
|
5426
|
+
|
3458
5427
|
class XataError extends Error {
|
3459
5428
|
constructor(message, status) {
|
3460
5429
|
super(message);
|
@@ -3462,5 +5431,5 @@ class XataError extends Error {
|
|
3462
5431
|
}
|
3463
5432
|
}
|
3464
5433
|
|
3465
|
-
export { BaseClient, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge,
|
5434
|
+
export { BaseClient, Buffer, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptAllTables, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, completeMigration, contains, copyBranch, createBranch, createBranchAsync, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteCluster, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, dropClusterExtension, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge, getAuthorizationCode, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationJobStatus, getBranchMigrationPlan, getBranchMoveStatus, getBranchSchemaHistory, getBranchStats, getCluster, getClusterMetrics, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseSettings, getDeployPreviewBranch, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationHistory, getMigrationJobStatus, getMigrationJobs, getMigrationRequest, getMigrationRequestIsMerged, getRecord, getSchema, getSchemas, getTableColumns, getTableSchema, getTaskStatus, getTasks, getUser, getUserAPIKeys, getUserOAuthAccessTokens, getUserOAuthClients, getWorkspace, getWorkspaceMembersList, getWorkspaceSettings, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, iContains, iPattern, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, installClusterExtension, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isValidExpandedColumn, isValidSelectableColumns, le, lessEquals, lessThan, lessThanEquals, listClusterBranches, listClusterExtensions, listClusters, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, moveBranch, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern, previewBranchSchemaEdit, pushBranchMigrations, putFile, putFileItem, queryMigrationRequests, queryTable, removeGitBranchesEntry, removeWorkspaceMember, renameDatabase, resendWorkspaceMemberInvite, resolveBranch, rollbackMigration, searchBranch, searchTable, serialize, setTableSchema, sqlBatchQuery, sqlQuery, startMigration, startsWith, summarizeTable, transformImage, updateBranchMetadata, updateBranchSchema, updateCluster, updateColumn, updateDatabaseGithubSettings, updateDatabaseMetadata, updateDatabaseSettings, updateMigrationRequest, updateOAuthAccessToken, updateRecordWithID, updateTable, updateUser, updateWorkspace, updateWorkspaceMemberInvite, updateWorkspaceMemberRole, updateWorkspaceSettings, upsertRecordWithID, vectorSearchTable };
|
3466
5435
|
//# sourceMappingURL=index.mjs.map
|