@xata.io/client 0.0.0-next.v8f0b7eb0f7b8e6548d2d4a1f2d5c089e8e46863e → 0.0.0-next.v93343b9646f57a1e5c51c35eccf0767c2bb80baa
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-add-version.log +1 -1
- package/.turbo/turbo-build.log +4 -4
- package/CHANGELOG.md +33 -3
- package/dist/index.cjs +2571 -585
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +5557 -3884
- package/dist/index.mjs +2552 -582
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
@@ -24,6 +24,1789 @@ const TraceAttributes = {
|
|
24
24
|
CLOUDFLARE_RAY_ID: "cf.ray"
|
25
25
|
};
|
26
26
|
|
27
|
+
const lookup = [];
|
28
|
+
const revLookup = [];
|
29
|
+
const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
30
|
+
for (let i = 0, len = code.length; i < len; ++i) {
|
31
|
+
lookup[i] = code[i];
|
32
|
+
revLookup[code.charCodeAt(i)] = i;
|
33
|
+
}
|
34
|
+
revLookup["-".charCodeAt(0)] = 62;
|
35
|
+
revLookup["_".charCodeAt(0)] = 63;
|
36
|
+
function getLens(b64) {
|
37
|
+
const len = b64.length;
|
38
|
+
if (len % 4 > 0) {
|
39
|
+
throw new Error("Invalid string. Length must be a multiple of 4");
|
40
|
+
}
|
41
|
+
let validLen = b64.indexOf("=");
|
42
|
+
if (validLen === -1) validLen = len;
|
43
|
+
const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
|
44
|
+
return [validLen, placeHoldersLen];
|
45
|
+
}
|
46
|
+
function _byteLength(_b64, validLen, placeHoldersLen) {
|
47
|
+
return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
|
48
|
+
}
|
49
|
+
function toByteArray(b64) {
|
50
|
+
let tmp;
|
51
|
+
const lens = getLens(b64);
|
52
|
+
const validLen = lens[0];
|
53
|
+
const placeHoldersLen = lens[1];
|
54
|
+
const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
|
55
|
+
let curByte = 0;
|
56
|
+
const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
|
57
|
+
let i;
|
58
|
+
for (i = 0; i < len; i += 4) {
|
59
|
+
tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
|
60
|
+
arr[curByte++] = tmp >> 16 & 255;
|
61
|
+
arr[curByte++] = tmp >> 8 & 255;
|
62
|
+
arr[curByte++] = tmp & 255;
|
63
|
+
}
|
64
|
+
if (placeHoldersLen === 2) {
|
65
|
+
tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
|
66
|
+
arr[curByte++] = tmp & 255;
|
67
|
+
}
|
68
|
+
if (placeHoldersLen === 1) {
|
69
|
+
tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
|
70
|
+
arr[curByte++] = tmp >> 8 & 255;
|
71
|
+
arr[curByte++] = tmp & 255;
|
72
|
+
}
|
73
|
+
return arr;
|
74
|
+
}
|
75
|
+
function tripletToBase64(num) {
|
76
|
+
return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
|
77
|
+
}
|
78
|
+
function encodeChunk(uint8, start, end) {
|
79
|
+
let tmp;
|
80
|
+
const output = [];
|
81
|
+
for (let i = start; i < end; i += 3) {
|
82
|
+
tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
|
83
|
+
output.push(tripletToBase64(tmp));
|
84
|
+
}
|
85
|
+
return output.join("");
|
86
|
+
}
|
87
|
+
function fromByteArray(uint8) {
|
88
|
+
let tmp;
|
89
|
+
const len = uint8.length;
|
90
|
+
const extraBytes = len % 3;
|
91
|
+
const parts = [];
|
92
|
+
const maxChunkLength = 16383;
|
93
|
+
for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
94
|
+
parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
|
95
|
+
}
|
96
|
+
if (extraBytes === 1) {
|
97
|
+
tmp = uint8[len - 1];
|
98
|
+
parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
|
99
|
+
} else if (extraBytes === 2) {
|
100
|
+
tmp = (uint8[len - 2] << 8) + uint8[len - 1];
|
101
|
+
parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
|
102
|
+
}
|
103
|
+
return parts.join("");
|
104
|
+
}
|
105
|
+
|
106
|
+
const K_MAX_LENGTH = 2147483647;
|
107
|
+
const MAX_ARGUMENTS_LENGTH = 4096;
|
108
|
+
class Buffer extends Uint8Array {
|
109
|
+
/**
|
110
|
+
* Constructs a new `Buffer` instance.
|
111
|
+
*
|
112
|
+
* @param value
|
113
|
+
* @param encodingOrOffset
|
114
|
+
* @param length
|
115
|
+
*/
|
116
|
+
constructor(value, encodingOrOffset, length) {
|
117
|
+
if (typeof value === "number") {
|
118
|
+
if (typeof encodingOrOffset === "string") {
|
119
|
+
throw new TypeError("The first argument must be of type string, received type number");
|
120
|
+
}
|
121
|
+
if (value < 0) {
|
122
|
+
throw new RangeError("The buffer size cannot be negative");
|
123
|
+
}
|
124
|
+
super(value < 0 ? 0 : Buffer._checked(value) | 0);
|
125
|
+
} else if (typeof value === "string") {
|
126
|
+
if (typeof encodingOrOffset !== "string") {
|
127
|
+
encodingOrOffset = "utf8";
|
128
|
+
}
|
129
|
+
if (!Buffer.isEncoding(encodingOrOffset)) {
|
130
|
+
throw new TypeError("Unknown encoding: " + encodingOrOffset);
|
131
|
+
}
|
132
|
+
const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
|
133
|
+
super(length2);
|
134
|
+
const written = this.write(value, 0, this.length, encodingOrOffset);
|
135
|
+
if (written !== length2) {
|
136
|
+
throw new TypeError(
|
137
|
+
"Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
|
138
|
+
);
|
139
|
+
}
|
140
|
+
} else if (ArrayBuffer.isView(value)) {
|
141
|
+
if (Buffer._isInstance(value, Uint8Array)) {
|
142
|
+
const copy = new Uint8Array(value);
|
143
|
+
const array = copy.buffer;
|
144
|
+
const byteOffset = copy.byteOffset;
|
145
|
+
const length2 = copy.byteLength;
|
146
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
147
|
+
throw new RangeError("offset is outside of buffer bounds");
|
148
|
+
}
|
149
|
+
if (array.byteLength < byteOffset + (length2 || 0)) {
|
150
|
+
throw new RangeError("length is outside of buffer bounds");
|
151
|
+
}
|
152
|
+
super(new Uint8Array(array, byteOffset, length2));
|
153
|
+
} else {
|
154
|
+
const array = value;
|
155
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
156
|
+
super(new Uint8Array(length2));
|
157
|
+
for (let i = 0; i < length2; i++) {
|
158
|
+
this[i] = array[i] & 255;
|
159
|
+
}
|
160
|
+
}
|
161
|
+
} else if (value == null) {
|
162
|
+
throw new TypeError(
|
163
|
+
"The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
|
164
|
+
);
|
165
|
+
} else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
|
166
|
+
const array = value;
|
167
|
+
const byteOffset = encodingOrOffset;
|
168
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
169
|
+
throw new RangeError("offset is outside of buffer bounds");
|
170
|
+
}
|
171
|
+
if (array.byteLength < byteOffset + (length || 0)) {
|
172
|
+
throw new RangeError("length is outside of buffer bounds");
|
173
|
+
}
|
174
|
+
super(new Uint8Array(array, byteOffset, length));
|
175
|
+
} else if (Array.isArray(value)) {
|
176
|
+
const array = value;
|
177
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
178
|
+
super(new Uint8Array(length2));
|
179
|
+
for (let i = 0; i < length2; i++) {
|
180
|
+
this[i] = array[i] & 255;
|
181
|
+
}
|
182
|
+
} else {
|
183
|
+
throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
|
184
|
+
}
|
185
|
+
}
|
186
|
+
/**
|
187
|
+
* Return JSON representation of the buffer.
|
188
|
+
*/
|
189
|
+
toJSON() {
|
190
|
+
return {
|
191
|
+
type: "Buffer",
|
192
|
+
data: Array.prototype.slice.call(this)
|
193
|
+
};
|
194
|
+
}
|
195
|
+
/**
|
196
|
+
* Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
|
197
|
+
* parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
|
198
|
+
* only part of `string` will be written. However, partially encoded characters will not be written.
|
199
|
+
*
|
200
|
+
* @param string String to write to `buf`.
|
201
|
+
* @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
|
202
|
+
* @param length Maximum number of bytes to write: Default: `buf.length - offset`.
|
203
|
+
* @param encoding The character encoding of `string`. Default: `utf8`.
|
204
|
+
*/
|
205
|
+
write(string, offset, length, encoding) {
|
206
|
+
if (typeof offset === "undefined") {
|
207
|
+
encoding = "utf8";
|
208
|
+
length = this.length;
|
209
|
+
offset = 0;
|
210
|
+
} else if (typeof length === "undefined" && typeof offset === "string") {
|
211
|
+
encoding = offset;
|
212
|
+
length = this.length;
|
213
|
+
offset = 0;
|
214
|
+
} else if (typeof offset === "number" && isFinite(offset)) {
|
215
|
+
offset = offset >>> 0;
|
216
|
+
if (typeof length === "number" && isFinite(length)) {
|
217
|
+
length = length >>> 0;
|
218
|
+
encoding ?? (encoding = "utf8");
|
219
|
+
} else if (typeof length === "string") {
|
220
|
+
encoding = length;
|
221
|
+
length = void 0;
|
222
|
+
}
|
223
|
+
} else {
|
224
|
+
throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
|
225
|
+
}
|
226
|
+
const remaining = this.length - offset;
|
227
|
+
if (typeof length === "undefined" || length > remaining) {
|
228
|
+
length = remaining;
|
229
|
+
}
|
230
|
+
if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
|
231
|
+
throw new RangeError("Attempt to write outside buffer bounds");
|
232
|
+
}
|
233
|
+
encoding || (encoding = "utf8");
|
234
|
+
switch (Buffer._getEncoding(encoding)) {
|
235
|
+
case "hex":
|
236
|
+
return Buffer._hexWrite(this, string, offset, length);
|
237
|
+
case "utf8":
|
238
|
+
return Buffer._utf8Write(this, string, offset, length);
|
239
|
+
case "ascii":
|
240
|
+
case "latin1":
|
241
|
+
case "binary":
|
242
|
+
return Buffer._asciiWrite(this, string, offset, length);
|
243
|
+
case "ucs2":
|
244
|
+
case "utf16le":
|
245
|
+
return Buffer._ucs2Write(this, string, offset, length);
|
246
|
+
case "base64":
|
247
|
+
return Buffer._base64Write(this, string, offset, length);
|
248
|
+
}
|
249
|
+
}
|
250
|
+
/**
|
251
|
+
* Decodes the buffer to a string according to the specified character encoding.
|
252
|
+
* Passing `start` and `end` will decode only a subset of the buffer.
|
253
|
+
*
|
254
|
+
* Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
|
255
|
+
* will be replaced with `U+FFFD`.
|
256
|
+
*
|
257
|
+
* @param encoding
|
258
|
+
* @param start
|
259
|
+
* @param end
|
260
|
+
*/
|
261
|
+
toString(encoding, start, end) {
|
262
|
+
const length = this.length;
|
263
|
+
if (length === 0) {
|
264
|
+
return "";
|
265
|
+
}
|
266
|
+
if (arguments.length === 0) {
|
267
|
+
return Buffer._utf8Slice(this, 0, length);
|
268
|
+
}
|
269
|
+
if (typeof start === "undefined" || start < 0) {
|
270
|
+
start = 0;
|
271
|
+
}
|
272
|
+
if (start > this.length) {
|
273
|
+
return "";
|
274
|
+
}
|
275
|
+
if (typeof end === "undefined" || end > this.length) {
|
276
|
+
end = this.length;
|
277
|
+
}
|
278
|
+
if (end <= 0) {
|
279
|
+
return "";
|
280
|
+
}
|
281
|
+
end >>>= 0;
|
282
|
+
start >>>= 0;
|
283
|
+
if (end <= start) {
|
284
|
+
return "";
|
285
|
+
}
|
286
|
+
if (!encoding) {
|
287
|
+
encoding = "utf8";
|
288
|
+
}
|
289
|
+
switch (Buffer._getEncoding(encoding)) {
|
290
|
+
case "hex":
|
291
|
+
return Buffer._hexSlice(this, start, end);
|
292
|
+
case "utf8":
|
293
|
+
return Buffer._utf8Slice(this, start, end);
|
294
|
+
case "ascii":
|
295
|
+
return Buffer._asciiSlice(this, start, end);
|
296
|
+
case "latin1":
|
297
|
+
case "binary":
|
298
|
+
return Buffer._latin1Slice(this, start, end);
|
299
|
+
case "ucs2":
|
300
|
+
case "utf16le":
|
301
|
+
return Buffer._utf16leSlice(this, start, end);
|
302
|
+
case "base64":
|
303
|
+
return Buffer._base64Slice(this, start, end);
|
304
|
+
}
|
305
|
+
}
|
306
|
+
/**
|
307
|
+
* Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
|
308
|
+
*
|
309
|
+
* @param otherBuffer
|
310
|
+
*/
|
311
|
+
equals(otherBuffer) {
|
312
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
313
|
+
throw new TypeError("Argument must be a Buffer");
|
314
|
+
}
|
315
|
+
if (this === otherBuffer) {
|
316
|
+
return true;
|
317
|
+
}
|
318
|
+
return Buffer.compare(this, otherBuffer) === 0;
|
319
|
+
}
|
320
|
+
/**
|
321
|
+
* Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
|
322
|
+
* or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
|
323
|
+
* buffer.
|
324
|
+
*
|
325
|
+
* - `0` is returned if `otherBuffer` is the same as this buffer.
|
326
|
+
* - `1` is returned if `otherBuffer` should come before this buffer when sorted.
|
327
|
+
* - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
|
328
|
+
*
|
329
|
+
* @param otherBuffer The buffer to compare to.
|
330
|
+
* @param targetStart The offset within `otherBuffer` at which to begin comparison.
|
331
|
+
* @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
|
332
|
+
* @param sourceStart The offset within this buffer at which to begin comparison.
|
333
|
+
* @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
|
334
|
+
*/
|
335
|
+
compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
|
336
|
+
if (Buffer._isInstance(otherBuffer, Uint8Array)) {
|
337
|
+
otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
|
338
|
+
}
|
339
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
340
|
+
throw new TypeError("Argument must be a Buffer or Uint8Array");
|
341
|
+
}
|
342
|
+
targetStart ?? (targetStart = 0);
|
343
|
+
targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
|
344
|
+
sourceStart ?? (sourceStart = 0);
|
345
|
+
sourceEnd ?? (sourceEnd = this.length);
|
346
|
+
if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
|
347
|
+
throw new RangeError("Out of range index");
|
348
|
+
}
|
349
|
+
if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
|
350
|
+
return 0;
|
351
|
+
}
|
352
|
+
if (sourceStart >= sourceEnd) {
|
353
|
+
return -1;
|
354
|
+
}
|
355
|
+
if (targetStart >= targetEnd) {
|
356
|
+
return 1;
|
357
|
+
}
|
358
|
+
targetStart >>>= 0;
|
359
|
+
targetEnd >>>= 0;
|
360
|
+
sourceStart >>>= 0;
|
361
|
+
sourceEnd >>>= 0;
|
362
|
+
if (this === otherBuffer) {
|
363
|
+
return 0;
|
364
|
+
}
|
365
|
+
let x = sourceEnd - sourceStart;
|
366
|
+
let y = targetEnd - targetStart;
|
367
|
+
const len = Math.min(x, y);
|
368
|
+
const thisCopy = this.slice(sourceStart, sourceEnd);
|
369
|
+
const targetCopy = otherBuffer.slice(targetStart, targetEnd);
|
370
|
+
for (let i = 0; i < len; ++i) {
|
371
|
+
if (thisCopy[i] !== targetCopy[i]) {
|
372
|
+
x = thisCopy[i];
|
373
|
+
y = targetCopy[i];
|
374
|
+
break;
|
375
|
+
}
|
376
|
+
}
|
377
|
+
if (x < y) return -1;
|
378
|
+
if (y < x) return 1;
|
379
|
+
return 0;
|
380
|
+
}
|
381
|
+
/**
|
382
|
+
* Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
|
383
|
+
* region overlaps with this buffer.
|
384
|
+
*
|
385
|
+
* @param targetBuffer The target buffer to copy into.
|
386
|
+
* @param targetStart The offset within `targetBuffer` at which to begin writing.
|
387
|
+
* @param sourceStart The offset within this buffer at which to begin copying.
|
388
|
+
* @param sourceEnd The offset within this buffer at which to end copying (exclusive).
|
389
|
+
*/
|
390
|
+
copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
|
391
|
+
if (!Buffer.isBuffer(targetBuffer)) throw new TypeError("argument should be a Buffer");
|
392
|
+
if (!sourceStart) sourceStart = 0;
|
393
|
+
if (!targetStart) targetStart = 0;
|
394
|
+
if (!sourceEnd && sourceEnd !== 0) sourceEnd = this.length;
|
395
|
+
if (targetStart >= targetBuffer.length) targetStart = targetBuffer.length;
|
396
|
+
if (!targetStart) targetStart = 0;
|
397
|
+
if (sourceEnd > 0 && sourceEnd < sourceStart) sourceEnd = sourceStart;
|
398
|
+
if (sourceEnd === sourceStart) return 0;
|
399
|
+
if (targetBuffer.length === 0 || this.length === 0) return 0;
|
400
|
+
if (targetStart < 0) {
|
401
|
+
throw new RangeError("targetStart out of bounds");
|
402
|
+
}
|
403
|
+
if (sourceStart < 0 || sourceStart >= this.length) throw new RangeError("Index out of range");
|
404
|
+
if (sourceEnd < 0) throw new RangeError("sourceEnd out of bounds");
|
405
|
+
if (sourceEnd > this.length) sourceEnd = this.length;
|
406
|
+
if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
|
407
|
+
sourceEnd = targetBuffer.length - targetStart + sourceStart;
|
408
|
+
}
|
409
|
+
const len = sourceEnd - sourceStart;
|
410
|
+
if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
|
411
|
+
this.copyWithin(targetStart, sourceStart, sourceEnd);
|
412
|
+
} else {
|
413
|
+
Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
|
414
|
+
}
|
415
|
+
return len;
|
416
|
+
}
|
417
|
+
/**
|
418
|
+
* Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
|
419
|
+
* and `end` indices. This is the same behavior as `buf.subarray()`.
|
420
|
+
*
|
421
|
+
* This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
|
422
|
+
* the slice, use `Uint8Array.prototype.slice()`.
|
423
|
+
*
|
424
|
+
* @param start
|
425
|
+
* @param end
|
426
|
+
*/
|
427
|
+
slice(start, end) {
|
428
|
+
if (!start) {
|
429
|
+
start = 0;
|
430
|
+
}
|
431
|
+
const len = this.length;
|
432
|
+
start = ~~start;
|
433
|
+
end = end === void 0 ? len : ~~end;
|
434
|
+
if (start < 0) {
|
435
|
+
start += len;
|
436
|
+
if (start < 0) {
|
437
|
+
start = 0;
|
438
|
+
}
|
439
|
+
} else if (start > len) {
|
440
|
+
start = len;
|
441
|
+
}
|
442
|
+
if (end < 0) {
|
443
|
+
end += len;
|
444
|
+
if (end < 0) {
|
445
|
+
end = 0;
|
446
|
+
}
|
447
|
+
} else if (end > len) {
|
448
|
+
end = len;
|
449
|
+
}
|
450
|
+
if (end < start) {
|
451
|
+
end = start;
|
452
|
+
}
|
453
|
+
const newBuf = this.subarray(start, end);
|
454
|
+
Object.setPrototypeOf(newBuf, Buffer.prototype);
|
455
|
+
return newBuf;
|
456
|
+
}
|
457
|
+
/**
|
458
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
459
|
+
* of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
|
460
|
+
*
|
461
|
+
* @param value Number to write.
|
462
|
+
* @param offset Number of bytes to skip before starting to write.
|
463
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
464
|
+
* @param noAssert
|
465
|
+
* @returns `offset` plus the number of bytes written.
|
466
|
+
*/
|
467
|
+
writeUIntLE(value, offset, byteLength, noAssert) {
|
468
|
+
value = +value;
|
469
|
+
offset = offset >>> 0;
|
470
|
+
byteLength = byteLength >>> 0;
|
471
|
+
if (!noAssert) {
|
472
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
473
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
474
|
+
}
|
475
|
+
let mul = 1;
|
476
|
+
let i = 0;
|
477
|
+
this[offset] = value & 255;
|
478
|
+
while (++i < byteLength && (mul *= 256)) {
|
479
|
+
this[offset + i] = value / mul & 255;
|
480
|
+
}
|
481
|
+
return offset + byteLength;
|
482
|
+
}
|
483
|
+
/**
|
484
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
|
485
|
+
* accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
|
486
|
+
*
|
487
|
+
* @param value Number to write.
|
488
|
+
* @param offset Number of bytes to skip before starting to write.
|
489
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
490
|
+
* @param noAssert
|
491
|
+
* @returns `offset` plus the number of bytes written.
|
492
|
+
*/
|
493
|
+
writeUIntBE(value, offset, byteLength, noAssert) {
|
494
|
+
value = +value;
|
495
|
+
offset = offset >>> 0;
|
496
|
+
byteLength = byteLength >>> 0;
|
497
|
+
if (!noAssert) {
|
498
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
499
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
500
|
+
}
|
501
|
+
let i = byteLength - 1;
|
502
|
+
let mul = 1;
|
503
|
+
this[offset + i] = value & 255;
|
504
|
+
while (--i >= 0 && (mul *= 256)) {
|
505
|
+
this[offset + i] = value / mul & 255;
|
506
|
+
}
|
507
|
+
return offset + byteLength;
|
508
|
+
}
|
509
|
+
/**
|
510
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
511
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
512
|
+
*
|
513
|
+
* @param value Number to write.
|
514
|
+
* @param offset Number of bytes to skip before starting to write.
|
515
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
516
|
+
* @param noAssert
|
517
|
+
* @returns `offset` plus the number of bytes written.
|
518
|
+
*/
|
519
|
+
writeIntLE(value, offset, byteLength, noAssert) {
|
520
|
+
value = +value;
|
521
|
+
offset = offset >>> 0;
|
522
|
+
if (!noAssert) {
|
523
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
524
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
525
|
+
}
|
526
|
+
let i = 0;
|
527
|
+
let mul = 1;
|
528
|
+
let sub = 0;
|
529
|
+
this[offset] = value & 255;
|
530
|
+
while (++i < byteLength && (mul *= 256)) {
|
531
|
+
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
532
|
+
sub = 1;
|
533
|
+
}
|
534
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
535
|
+
}
|
536
|
+
return offset + byteLength;
|
537
|
+
}
|
538
|
+
/**
|
539
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
|
540
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
541
|
+
*
|
542
|
+
* @param value Number to write.
|
543
|
+
* @param offset Number of bytes to skip before starting to write.
|
544
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
545
|
+
* @param noAssert
|
546
|
+
* @returns `offset` plus the number of bytes written.
|
547
|
+
*/
|
548
|
+
writeIntBE(value, offset, byteLength, noAssert) {
|
549
|
+
value = +value;
|
550
|
+
offset = offset >>> 0;
|
551
|
+
if (!noAssert) {
|
552
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
553
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
554
|
+
}
|
555
|
+
let i = byteLength - 1;
|
556
|
+
let mul = 1;
|
557
|
+
let sub = 0;
|
558
|
+
this[offset + i] = value & 255;
|
559
|
+
while (--i >= 0 && (mul *= 256)) {
|
560
|
+
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
561
|
+
sub = 1;
|
562
|
+
}
|
563
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
564
|
+
}
|
565
|
+
return offset + byteLength;
|
566
|
+
}
|
567
|
+
/**
|
568
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
569
|
+
* unsigned, little-endian integer supporting up to 48 bits of accuracy.
|
570
|
+
*
|
571
|
+
* @param offset Number of bytes to skip before starting to read.
|
572
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
573
|
+
* @param noAssert
|
574
|
+
*/
|
575
|
+
readUIntLE(offset, byteLength, noAssert) {
|
576
|
+
offset = offset >>> 0;
|
577
|
+
byteLength = byteLength >>> 0;
|
578
|
+
if (!noAssert) {
|
579
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
580
|
+
}
|
581
|
+
let val = this[offset];
|
582
|
+
let mul = 1;
|
583
|
+
let i = 0;
|
584
|
+
while (++i < byteLength && (mul *= 256)) {
|
585
|
+
val += this[offset + i] * mul;
|
586
|
+
}
|
587
|
+
return val;
|
588
|
+
}
|
589
|
+
/**
|
590
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
591
|
+
* unsigned, big-endian integer supporting up to 48 bits of accuracy.
|
592
|
+
*
|
593
|
+
* @param offset Number of bytes to skip before starting to read.
|
594
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
595
|
+
* @param noAssert
|
596
|
+
*/
|
597
|
+
readUIntBE(offset, byteLength, noAssert) {
|
598
|
+
offset = offset >>> 0;
|
599
|
+
byteLength = byteLength >>> 0;
|
600
|
+
if (!noAssert) {
|
601
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
602
|
+
}
|
603
|
+
let val = this[offset + --byteLength];
|
604
|
+
let mul = 1;
|
605
|
+
while (byteLength > 0 && (mul *= 256)) {
|
606
|
+
val += this[offset + --byteLength] * mul;
|
607
|
+
}
|
608
|
+
return val;
|
609
|
+
}
|
610
|
+
/**
|
611
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
612
|
+
* little-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
613
|
+
*
|
614
|
+
* @param offset Number of bytes to skip before starting to read.
|
615
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
616
|
+
* @param noAssert
|
617
|
+
*/
|
618
|
+
readIntLE(offset, byteLength, noAssert) {
|
619
|
+
offset = offset >>> 0;
|
620
|
+
byteLength = byteLength >>> 0;
|
621
|
+
if (!noAssert) {
|
622
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
623
|
+
}
|
624
|
+
let val = this[offset];
|
625
|
+
let mul = 1;
|
626
|
+
let i = 0;
|
627
|
+
while (++i < byteLength && (mul *= 256)) {
|
628
|
+
val += this[offset + i] * mul;
|
629
|
+
}
|
630
|
+
mul *= 128;
|
631
|
+
if (val >= mul) {
|
632
|
+
val -= Math.pow(2, 8 * byteLength);
|
633
|
+
}
|
634
|
+
return val;
|
635
|
+
}
|
636
|
+
/**
|
637
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
638
|
+
* big-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
639
|
+
*
|
640
|
+
* @param offset Number of bytes to skip before starting to read.
|
641
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
642
|
+
* @param noAssert
|
643
|
+
*/
|
644
|
+
readIntBE(offset, byteLength, noAssert) {
|
645
|
+
offset = offset >>> 0;
|
646
|
+
byteLength = byteLength >>> 0;
|
647
|
+
if (!noAssert) {
|
648
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
649
|
+
}
|
650
|
+
let i = byteLength;
|
651
|
+
let mul = 1;
|
652
|
+
let val = this[offset + --i];
|
653
|
+
while (i > 0 && (mul *= 256)) {
|
654
|
+
val += this[offset + --i] * mul;
|
655
|
+
}
|
656
|
+
mul *= 128;
|
657
|
+
if (val >= mul) {
|
658
|
+
val -= Math.pow(2, 8 * byteLength);
|
659
|
+
}
|
660
|
+
return val;
|
661
|
+
}
|
662
|
+
/**
|
663
|
+
* Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
|
664
|
+
*
|
665
|
+
* @param offset Number of bytes to skip before starting to read.
|
666
|
+
* @param noAssert
|
667
|
+
*/
|
668
|
+
readUInt8(offset, noAssert) {
|
669
|
+
offset = offset >>> 0;
|
670
|
+
if (!noAssert) {
|
671
|
+
Buffer._checkOffset(offset, 1, this.length);
|
672
|
+
}
|
673
|
+
return this[offset];
|
674
|
+
}
|
675
|
+
/**
|
676
|
+
* Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
|
677
|
+
*
|
678
|
+
* @param offset Number of bytes to skip before starting to read.
|
679
|
+
* @param noAssert
|
680
|
+
*/
|
681
|
+
readUInt16LE(offset, noAssert) {
|
682
|
+
offset = offset >>> 0;
|
683
|
+
if (!noAssert) {
|
684
|
+
Buffer._checkOffset(offset, 2, this.length);
|
685
|
+
}
|
686
|
+
return this[offset] | this[offset + 1] << 8;
|
687
|
+
}
|
688
|
+
/**
|
689
|
+
* Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
|
690
|
+
*
|
691
|
+
* @param offset Number of bytes to skip before starting to read.
|
692
|
+
* @param noAssert
|
693
|
+
*/
|
694
|
+
readUInt16BE(offset, noAssert) {
|
695
|
+
offset = offset >>> 0;
|
696
|
+
if (!noAssert) {
|
697
|
+
Buffer._checkOffset(offset, 2, this.length);
|
698
|
+
}
|
699
|
+
return this[offset] << 8 | this[offset + 1];
|
700
|
+
}
|
701
|
+
/**
|
702
|
+
* Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
|
703
|
+
*
|
704
|
+
* @param offset Number of bytes to skip before starting to read.
|
705
|
+
* @param noAssert
|
706
|
+
*/
|
707
|
+
readUInt32LE(offset, noAssert) {
|
708
|
+
offset = offset >>> 0;
|
709
|
+
if (!noAssert) {
|
710
|
+
Buffer._checkOffset(offset, 4, this.length);
|
711
|
+
}
|
712
|
+
return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
|
713
|
+
}
|
714
|
+
/**
|
715
|
+
* Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
|
716
|
+
*
|
717
|
+
* @param offset Number of bytes to skip before starting to read.
|
718
|
+
* @param noAssert
|
719
|
+
*/
|
720
|
+
readUInt32BE(offset, noAssert) {
|
721
|
+
offset = offset >>> 0;
|
722
|
+
if (!noAssert) {
|
723
|
+
Buffer._checkOffset(offset, 4, this.length);
|
724
|
+
}
|
725
|
+
return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
|
726
|
+
}
|
727
|
+
/**
|
728
|
+
* Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
|
729
|
+
* as two's complement signed values.
|
730
|
+
*
|
731
|
+
* @param offset Number of bytes to skip before starting to read.
|
732
|
+
* @param noAssert
|
733
|
+
*/
|
734
|
+
readInt8(offset, noAssert) {
|
735
|
+
offset = offset >>> 0;
|
736
|
+
if (!noAssert) {
|
737
|
+
Buffer._checkOffset(offset, 1, this.length);
|
738
|
+
}
|
739
|
+
if (!(this[offset] & 128)) {
|
740
|
+
return this[offset];
|
741
|
+
}
|
742
|
+
return (255 - this[offset] + 1) * -1;
|
743
|
+
}
|
744
|
+
/**
|
745
|
+
* Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
746
|
+
* are interpreted as two's complement signed values.
|
747
|
+
*
|
748
|
+
* @param offset Number of bytes to skip before starting to read.
|
749
|
+
* @param noAssert
|
750
|
+
*/
|
751
|
+
readInt16LE(offset, noAssert) {
|
752
|
+
offset = offset >>> 0;
|
753
|
+
if (!noAssert) {
|
754
|
+
Buffer._checkOffset(offset, 2, this.length);
|
755
|
+
}
|
756
|
+
const val = this[offset] | this[offset + 1] << 8;
|
757
|
+
return val & 32768 ? val | 4294901760 : val;
|
758
|
+
}
|
759
|
+
/**
|
760
|
+
* Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
761
|
+
* are interpreted as two's complement signed values.
|
762
|
+
*
|
763
|
+
* @param offset Number of bytes to skip before starting to read.
|
764
|
+
* @param noAssert
|
765
|
+
*/
|
766
|
+
readInt16BE(offset, noAssert) {
|
767
|
+
offset = offset >>> 0;
|
768
|
+
if (!noAssert) {
|
769
|
+
Buffer._checkOffset(offset, 2, this.length);
|
770
|
+
}
|
771
|
+
const val = this[offset + 1] | this[offset] << 8;
|
772
|
+
return val & 32768 ? val | 4294901760 : val;
|
773
|
+
}
|
774
|
+
/**
|
775
|
+
* Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
776
|
+
* are interpreted as two's complement signed values.
|
777
|
+
*
|
778
|
+
* @param offset Number of bytes to skip before starting to read.
|
779
|
+
* @param noAssert
|
780
|
+
*/
|
781
|
+
readInt32LE(offset, noAssert) {
|
782
|
+
offset = offset >>> 0;
|
783
|
+
if (!noAssert) {
|
784
|
+
Buffer._checkOffset(offset, 4, this.length);
|
785
|
+
}
|
786
|
+
return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
|
787
|
+
}
|
788
|
+
/**
|
789
|
+
* Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
790
|
+
* are interpreted as two's complement signed values.
|
791
|
+
*
|
792
|
+
* @param offset Number of bytes to skip before starting to read.
|
793
|
+
* @param noAssert
|
794
|
+
*/
|
795
|
+
readInt32BE(offset, noAssert) {
|
796
|
+
offset = offset >>> 0;
|
797
|
+
if (!noAssert) {
|
798
|
+
Buffer._checkOffset(offset, 4, this.length);
|
799
|
+
}
|
800
|
+
return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
|
801
|
+
}
|
802
|
+
/**
|
803
|
+
* Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
|
804
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 2.
|
805
|
+
*/
|
806
|
+
swap16() {
|
807
|
+
const len = this.length;
|
808
|
+
if (len % 2 !== 0) {
|
809
|
+
throw new RangeError("Buffer size must be a multiple of 16-bits");
|
810
|
+
}
|
811
|
+
for (let i = 0; i < len; i += 2) {
|
812
|
+
this._swap(this, i, i + 1);
|
813
|
+
}
|
814
|
+
return this;
|
815
|
+
}
|
816
|
+
/**
|
817
|
+
* Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
|
818
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 4.
|
819
|
+
*/
|
820
|
+
swap32() {
|
821
|
+
const len = this.length;
|
822
|
+
if (len % 4 !== 0) {
|
823
|
+
throw new RangeError("Buffer size must be a multiple of 32-bits");
|
824
|
+
}
|
825
|
+
for (let i = 0; i < len; i += 4) {
|
826
|
+
this._swap(this, i, i + 3);
|
827
|
+
this._swap(this, i + 1, i + 2);
|
828
|
+
}
|
829
|
+
return this;
|
830
|
+
}
|
831
|
+
/**
|
832
|
+
* Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
|
833
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 8.
|
834
|
+
*/
|
835
|
+
swap64() {
|
836
|
+
const len = this.length;
|
837
|
+
if (len % 8 !== 0) {
|
838
|
+
throw new RangeError("Buffer size must be a multiple of 64-bits");
|
839
|
+
}
|
840
|
+
for (let i = 0; i < len; i += 8) {
|
841
|
+
this._swap(this, i, i + 7);
|
842
|
+
this._swap(this, i + 1, i + 6);
|
843
|
+
this._swap(this, i + 2, i + 5);
|
844
|
+
this._swap(this, i + 3, i + 4);
|
845
|
+
}
|
846
|
+
return this;
|
847
|
+
}
|
848
|
+
/**
|
849
|
+
* Swaps two octets.
|
850
|
+
*
|
851
|
+
* @param b
|
852
|
+
* @param n
|
853
|
+
* @param m
|
854
|
+
*/
|
855
|
+
_swap(b, n, m) {
|
856
|
+
const i = b[n];
|
857
|
+
b[n] = b[m];
|
858
|
+
b[m] = i;
|
859
|
+
}
|
860
|
+
/**
|
861
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
|
862
|
+
* Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
|
863
|
+
*
|
864
|
+
* @param value Number to write.
|
865
|
+
* @param offset Number of bytes to skip before starting to write.
|
866
|
+
* @param noAssert
|
867
|
+
* @returns `offset` plus the number of bytes written.
|
868
|
+
*/
|
869
|
+
writeUInt8(value, offset, noAssert) {
|
870
|
+
value = +value;
|
871
|
+
offset = offset >>> 0;
|
872
|
+
if (!noAssert) {
|
873
|
+
Buffer._checkInt(this, value, offset, 1, 255, 0);
|
874
|
+
}
|
875
|
+
this[offset] = value & 255;
|
876
|
+
return offset + 1;
|
877
|
+
}
|
878
|
+
/**
|
879
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
|
880
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
881
|
+
*
|
882
|
+
* @param value Number to write.
|
883
|
+
* @param offset Number of bytes to skip before starting to write.
|
884
|
+
* @param noAssert
|
885
|
+
* @returns `offset` plus the number of bytes written.
|
886
|
+
*/
|
887
|
+
writeUInt16LE(value, offset, noAssert) {
|
888
|
+
value = +value;
|
889
|
+
offset = offset >>> 0;
|
890
|
+
if (!noAssert) {
|
891
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
892
|
+
}
|
893
|
+
this[offset] = value & 255;
|
894
|
+
this[offset + 1] = value >>> 8;
|
895
|
+
return offset + 2;
|
896
|
+
}
|
897
|
+
/**
|
898
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
|
899
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
900
|
+
*
|
901
|
+
* @param value Number to write.
|
902
|
+
* @param offset Number of bytes to skip before starting to write.
|
903
|
+
* @param noAssert
|
904
|
+
* @returns `offset` plus the number of bytes written.
|
905
|
+
*/
|
906
|
+
writeUInt16BE(value, offset, noAssert) {
|
907
|
+
value = +value;
|
908
|
+
offset = offset >>> 0;
|
909
|
+
if (!noAssert) {
|
910
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
911
|
+
}
|
912
|
+
this[offset] = value >>> 8;
|
913
|
+
this[offset + 1] = value & 255;
|
914
|
+
return offset + 2;
|
915
|
+
}
|
916
|
+
/**
|
917
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
|
918
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
919
|
+
*
|
920
|
+
* @param value Number to write.
|
921
|
+
* @param offset Number of bytes to skip before starting to write.
|
922
|
+
* @param noAssert
|
923
|
+
* @returns `offset` plus the number of bytes written.
|
924
|
+
*/
|
925
|
+
writeUInt32LE(value, offset, noAssert) {
|
926
|
+
value = +value;
|
927
|
+
offset = offset >>> 0;
|
928
|
+
if (!noAssert) {
|
929
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
930
|
+
}
|
931
|
+
this[offset + 3] = value >>> 24;
|
932
|
+
this[offset + 2] = value >>> 16;
|
933
|
+
this[offset + 1] = value >>> 8;
|
934
|
+
this[offset] = value & 255;
|
935
|
+
return offset + 4;
|
936
|
+
}
|
937
|
+
/**
|
938
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
|
939
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
940
|
+
*
|
941
|
+
* @param value Number to write.
|
942
|
+
* @param offset Number of bytes to skip before starting to write.
|
943
|
+
* @param noAssert
|
944
|
+
* @returns `offset` plus the number of bytes written.
|
945
|
+
*/
|
946
|
+
writeUInt32BE(value, offset, noAssert) {
|
947
|
+
value = +value;
|
948
|
+
offset = offset >>> 0;
|
949
|
+
if (!noAssert) {
|
950
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
951
|
+
}
|
952
|
+
this[offset] = value >>> 24;
|
953
|
+
this[offset + 1] = value >>> 16;
|
954
|
+
this[offset + 2] = value >>> 8;
|
955
|
+
this[offset + 3] = value & 255;
|
956
|
+
return offset + 4;
|
957
|
+
}
|
958
|
+
/**
|
959
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
|
960
|
+
* Behavior is undefined when `value` is anything other than a signed 8-bit integer.
|
961
|
+
*
|
962
|
+
* @param value Number to write.
|
963
|
+
* @param offset Number of bytes to skip before starting to write.
|
964
|
+
* @param noAssert
|
965
|
+
* @returns `offset` plus the number of bytes written.
|
966
|
+
*/
|
967
|
+
writeInt8(value, offset, noAssert) {
|
968
|
+
value = +value;
|
969
|
+
offset = offset >>> 0;
|
970
|
+
if (!noAssert) {
|
971
|
+
Buffer._checkInt(this, value, offset, 1, 127, -128);
|
972
|
+
}
|
973
|
+
if (value < 0) {
|
974
|
+
value = 255 + value + 1;
|
975
|
+
}
|
976
|
+
this[offset] = value & 255;
|
977
|
+
return offset + 1;
|
978
|
+
}
|
979
|
+
/**
|
980
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
|
981
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
982
|
+
*
|
983
|
+
* @param value Number to write.
|
984
|
+
* @param offset Number of bytes to skip before starting to write.
|
985
|
+
* @param noAssert
|
986
|
+
* @returns `offset` plus the number of bytes written.
|
987
|
+
*/
|
988
|
+
writeInt16LE(value, offset, noAssert) {
|
989
|
+
value = +value;
|
990
|
+
offset = offset >>> 0;
|
991
|
+
if (!noAssert) {
|
992
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
993
|
+
}
|
994
|
+
this[offset] = value & 255;
|
995
|
+
this[offset + 1] = value >>> 8;
|
996
|
+
return offset + 2;
|
997
|
+
}
|
998
|
+
/**
|
999
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
|
1000
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
1001
|
+
*
|
1002
|
+
* @param value Number to write.
|
1003
|
+
* @param offset Number of bytes to skip before starting to write.
|
1004
|
+
* @param noAssert
|
1005
|
+
* @returns `offset` plus the number of bytes written.
|
1006
|
+
*/
|
1007
|
+
writeInt16BE(value, offset, noAssert) {
|
1008
|
+
value = +value;
|
1009
|
+
offset = offset >>> 0;
|
1010
|
+
if (!noAssert) {
|
1011
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1012
|
+
}
|
1013
|
+
this[offset] = value >>> 8;
|
1014
|
+
this[offset + 1] = value & 255;
|
1015
|
+
return offset + 2;
|
1016
|
+
}
|
1017
|
+
/**
|
1018
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
|
1019
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1020
|
+
*
|
1021
|
+
* @param value Number to write.
|
1022
|
+
* @param offset Number of bytes to skip before starting to write.
|
1023
|
+
* @param noAssert
|
1024
|
+
* @returns `offset` plus the number of bytes written.
|
1025
|
+
*/
|
1026
|
+
writeInt32LE(value, offset, noAssert) {
|
1027
|
+
value = +value;
|
1028
|
+
offset = offset >>> 0;
|
1029
|
+
if (!noAssert) {
|
1030
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1031
|
+
}
|
1032
|
+
this[offset] = value & 255;
|
1033
|
+
this[offset + 1] = value >>> 8;
|
1034
|
+
this[offset + 2] = value >>> 16;
|
1035
|
+
this[offset + 3] = value >>> 24;
|
1036
|
+
return offset + 4;
|
1037
|
+
}
|
1038
|
+
/**
|
1039
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
|
1040
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1041
|
+
*
|
1042
|
+
* @param value Number to write.
|
1043
|
+
* @param offset Number of bytes to skip before starting to write.
|
1044
|
+
* @param noAssert
|
1045
|
+
* @returns `offset` plus the number of bytes written.
|
1046
|
+
*/
|
1047
|
+
writeInt32BE(value, offset, noAssert) {
|
1048
|
+
value = +value;
|
1049
|
+
offset = offset >>> 0;
|
1050
|
+
if (!noAssert) {
|
1051
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1052
|
+
}
|
1053
|
+
if (value < 0) {
|
1054
|
+
value = 4294967295 + value + 1;
|
1055
|
+
}
|
1056
|
+
this[offset] = value >>> 24;
|
1057
|
+
this[offset + 1] = value >>> 16;
|
1058
|
+
this[offset + 2] = value >>> 8;
|
1059
|
+
this[offset + 3] = value & 255;
|
1060
|
+
return offset + 4;
|
1061
|
+
}
|
1062
|
+
/**
|
1063
|
+
* Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
|
1064
|
+
* filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
|
1065
|
+
* integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
|
1066
|
+
*
|
1067
|
+
* If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
|
1068
|
+
* character that fit into `buf` are written.
|
1069
|
+
*
|
1070
|
+
* If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
|
1071
|
+
*
|
1072
|
+
* @param value
|
1073
|
+
* @param encoding
|
1074
|
+
*/
|
1075
|
+
fill(value, offset, end, encoding) {
|
1076
|
+
if (typeof value === "string") {
|
1077
|
+
if (typeof offset === "string") {
|
1078
|
+
encoding = offset;
|
1079
|
+
offset = 0;
|
1080
|
+
end = this.length;
|
1081
|
+
} else if (typeof end === "string") {
|
1082
|
+
encoding = end;
|
1083
|
+
end = this.length;
|
1084
|
+
}
|
1085
|
+
if (encoding !== void 0 && typeof encoding !== "string") {
|
1086
|
+
throw new TypeError("encoding must be a string");
|
1087
|
+
}
|
1088
|
+
if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
|
1089
|
+
throw new TypeError("Unknown encoding: " + encoding);
|
1090
|
+
}
|
1091
|
+
if (value.length === 1) {
|
1092
|
+
const code = value.charCodeAt(0);
|
1093
|
+
if (encoding === "utf8" && code < 128) {
|
1094
|
+
value = code;
|
1095
|
+
}
|
1096
|
+
}
|
1097
|
+
} else if (typeof value === "number") {
|
1098
|
+
value = value & 255;
|
1099
|
+
} else if (typeof value === "boolean") {
|
1100
|
+
value = Number(value);
|
1101
|
+
}
|
1102
|
+
offset ?? (offset = 0);
|
1103
|
+
end ?? (end = this.length);
|
1104
|
+
if (offset < 0 || this.length < offset || this.length < end) {
|
1105
|
+
throw new RangeError("Out of range index");
|
1106
|
+
}
|
1107
|
+
if (end <= offset) {
|
1108
|
+
return this;
|
1109
|
+
}
|
1110
|
+
offset = offset >>> 0;
|
1111
|
+
end = end === void 0 ? this.length : end >>> 0;
|
1112
|
+
value || (value = 0);
|
1113
|
+
let i;
|
1114
|
+
if (typeof value === "number") {
|
1115
|
+
for (i = offset; i < end; ++i) {
|
1116
|
+
this[i] = value;
|
1117
|
+
}
|
1118
|
+
} else {
|
1119
|
+
const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
|
1120
|
+
const len = bytes.length;
|
1121
|
+
if (len === 0) {
|
1122
|
+
throw new TypeError('The value "' + value + '" is invalid for argument "value"');
|
1123
|
+
}
|
1124
|
+
for (i = 0; i < end - offset; ++i) {
|
1125
|
+
this[i + offset] = bytes[i % len];
|
1126
|
+
}
|
1127
|
+
}
|
1128
|
+
return this;
|
1129
|
+
}
|
1130
|
+
/**
|
1131
|
+
* Returns the index of the specified value.
|
1132
|
+
*
|
1133
|
+
* If `value` is:
|
1134
|
+
* - a string, `value` is interpreted according to the character encoding in `encoding`.
|
1135
|
+
* - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
|
1136
|
+
* - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
|
1137
|
+
*
|
1138
|
+
* Any other types will throw a `TypeError`.
|
1139
|
+
*
|
1140
|
+
* @param value What to search for.
|
1141
|
+
* @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
|
1142
|
+
* @param encoding If `value` is a string, this is the encoding used to search.
|
1143
|
+
* @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
|
1144
|
+
*/
|
1145
|
+
indexOf(value, byteOffset, encoding) {
|
1146
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
|
1147
|
+
}
|
1148
|
+
/**
|
1149
|
+
* Gets the last index of the specified value.
|
1150
|
+
*
|
1151
|
+
* @see indexOf()
|
1152
|
+
* @param value
|
1153
|
+
* @param byteOffset
|
1154
|
+
* @param encoding
|
1155
|
+
*/
|
1156
|
+
lastIndexOf(value, byteOffset, encoding) {
|
1157
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
|
1158
|
+
}
|
1159
|
+
_bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
|
1160
|
+
if (buffer.length === 0) {
|
1161
|
+
return -1;
|
1162
|
+
}
|
1163
|
+
if (typeof byteOffset === "string") {
|
1164
|
+
encoding = byteOffset;
|
1165
|
+
byteOffset = 0;
|
1166
|
+
} else if (typeof byteOffset === "undefined") {
|
1167
|
+
byteOffset = 0;
|
1168
|
+
} else if (byteOffset > 2147483647) {
|
1169
|
+
byteOffset = 2147483647;
|
1170
|
+
} else if (byteOffset < -2147483648) {
|
1171
|
+
byteOffset = -2147483648;
|
1172
|
+
}
|
1173
|
+
byteOffset = +byteOffset;
|
1174
|
+
if (byteOffset !== byteOffset) {
|
1175
|
+
byteOffset = dir ? 0 : buffer.length - 1;
|
1176
|
+
}
|
1177
|
+
if (byteOffset < 0) {
|
1178
|
+
byteOffset = buffer.length + byteOffset;
|
1179
|
+
}
|
1180
|
+
if (byteOffset >= buffer.length) {
|
1181
|
+
if (dir) {
|
1182
|
+
return -1;
|
1183
|
+
} else {
|
1184
|
+
byteOffset = buffer.length - 1;
|
1185
|
+
}
|
1186
|
+
} else if (byteOffset < 0) {
|
1187
|
+
if (dir) {
|
1188
|
+
byteOffset = 0;
|
1189
|
+
} else {
|
1190
|
+
return -1;
|
1191
|
+
}
|
1192
|
+
}
|
1193
|
+
if (typeof val === "string") {
|
1194
|
+
val = Buffer.from(val, encoding);
|
1195
|
+
}
|
1196
|
+
if (Buffer.isBuffer(val)) {
|
1197
|
+
if (val.length === 0) {
|
1198
|
+
return -1;
|
1199
|
+
}
|
1200
|
+
return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
|
1201
|
+
} else if (typeof val === "number") {
|
1202
|
+
val = val & 255;
|
1203
|
+
if (typeof Uint8Array.prototype.indexOf === "function") {
|
1204
|
+
if (dir) {
|
1205
|
+
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
|
1206
|
+
} else {
|
1207
|
+
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
|
1208
|
+
}
|
1209
|
+
}
|
1210
|
+
return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
|
1211
|
+
}
|
1212
|
+
throw new TypeError("val must be string, number or Buffer");
|
1213
|
+
}
|
1214
|
+
/**
|
1215
|
+
* Equivalent to `buf.indexOf() !== -1`.
|
1216
|
+
*
|
1217
|
+
* @param value
|
1218
|
+
* @param byteOffset
|
1219
|
+
* @param encoding
|
1220
|
+
*/
|
1221
|
+
includes(value, byteOffset, encoding) {
|
1222
|
+
return this.indexOf(value, byteOffset, encoding) !== -1;
|
1223
|
+
}
|
1224
|
+
/**
|
1225
|
+
* Creates a new buffer from the given parameters.
|
1226
|
+
*
|
1227
|
+
* @param data
|
1228
|
+
* @param encoding
|
1229
|
+
*/
|
1230
|
+
static from(a, b, c) {
|
1231
|
+
return new Buffer(a, b, c);
|
1232
|
+
}
|
1233
|
+
/**
|
1234
|
+
* Returns true if `obj` is a Buffer.
|
1235
|
+
*
|
1236
|
+
* @param obj
|
1237
|
+
*/
|
1238
|
+
static isBuffer(obj) {
|
1239
|
+
return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
|
1240
|
+
}
|
1241
|
+
/**
|
1242
|
+
* Returns true if `encoding` is a supported encoding.
|
1243
|
+
*
|
1244
|
+
* @param encoding
|
1245
|
+
*/
|
1246
|
+
static isEncoding(encoding) {
|
1247
|
+
switch (encoding.toLowerCase()) {
|
1248
|
+
case "hex":
|
1249
|
+
case "utf8":
|
1250
|
+
case "ascii":
|
1251
|
+
case "binary":
|
1252
|
+
case "latin1":
|
1253
|
+
case "ucs2":
|
1254
|
+
case "utf16le":
|
1255
|
+
case "base64":
|
1256
|
+
return true;
|
1257
|
+
default:
|
1258
|
+
return false;
|
1259
|
+
}
|
1260
|
+
}
|
1261
|
+
/**
|
1262
|
+
* Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
|
1263
|
+
* returns the number of characters in the string.
|
1264
|
+
*
|
1265
|
+
* @param string The string to test.
|
1266
|
+
* @param encoding The encoding to use for calculation. Defaults is `utf8`.
|
1267
|
+
*/
|
1268
|
+
static byteLength(string, encoding) {
|
1269
|
+
if (Buffer.isBuffer(string)) {
|
1270
|
+
return string.length;
|
1271
|
+
}
|
1272
|
+
if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
|
1273
|
+
return string.byteLength;
|
1274
|
+
}
|
1275
|
+
if (typeof string !== "string") {
|
1276
|
+
throw new TypeError(
|
1277
|
+
'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
|
1278
|
+
);
|
1279
|
+
}
|
1280
|
+
const len = string.length;
|
1281
|
+
const mustMatch = arguments.length > 2 && arguments[2] === true;
|
1282
|
+
if (!mustMatch && len === 0) {
|
1283
|
+
return 0;
|
1284
|
+
}
|
1285
|
+
switch (encoding?.toLowerCase()) {
|
1286
|
+
case "ascii":
|
1287
|
+
case "latin1":
|
1288
|
+
case "binary":
|
1289
|
+
return len;
|
1290
|
+
case "utf8":
|
1291
|
+
return Buffer._utf8ToBytes(string).length;
|
1292
|
+
case "hex":
|
1293
|
+
return len >>> 1;
|
1294
|
+
case "ucs2":
|
1295
|
+
case "utf16le":
|
1296
|
+
return len * 2;
|
1297
|
+
case "base64":
|
1298
|
+
return Buffer._base64ToBytes(string).length;
|
1299
|
+
default:
|
1300
|
+
return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
|
1301
|
+
}
|
1302
|
+
}
|
1303
|
+
/**
|
1304
|
+
* Returns a Buffer which is the result of concatenating all the buffers in the list together.
|
1305
|
+
*
|
1306
|
+
* - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
|
1307
|
+
* - If the list has exactly one item, then the first item is returned.
|
1308
|
+
* - If the list has more than one item, then a new buffer is created.
|
1309
|
+
*
|
1310
|
+
* It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
|
1311
|
+
* a small computational expense.
|
1312
|
+
*
|
1313
|
+
* @param list An array of Buffer objects to concatenate.
|
1314
|
+
* @param totalLength Total length of the buffers when concatenated.
|
1315
|
+
*/
|
1316
|
+
static concat(list, totalLength) {
|
1317
|
+
if (!Array.isArray(list)) {
|
1318
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1319
|
+
}
|
1320
|
+
if (list.length === 0) {
|
1321
|
+
return Buffer.alloc(0);
|
1322
|
+
}
|
1323
|
+
let i;
|
1324
|
+
if (totalLength === void 0) {
|
1325
|
+
totalLength = 0;
|
1326
|
+
for (i = 0; i < list.length; ++i) {
|
1327
|
+
totalLength += list[i].length;
|
1328
|
+
}
|
1329
|
+
}
|
1330
|
+
const buffer = Buffer.allocUnsafe(totalLength);
|
1331
|
+
let pos = 0;
|
1332
|
+
for (i = 0; i < list.length; ++i) {
|
1333
|
+
let buf = list[i];
|
1334
|
+
if (Buffer._isInstance(buf, Uint8Array)) {
|
1335
|
+
if (pos + buf.length > buffer.length) {
|
1336
|
+
if (!Buffer.isBuffer(buf)) {
|
1337
|
+
buf = Buffer.from(buf);
|
1338
|
+
}
|
1339
|
+
buf.copy(buffer, pos);
|
1340
|
+
} else {
|
1341
|
+
Uint8Array.prototype.set.call(buffer, buf, pos);
|
1342
|
+
}
|
1343
|
+
} else if (!Buffer.isBuffer(buf)) {
|
1344
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1345
|
+
} else {
|
1346
|
+
buf.copy(buffer, pos);
|
1347
|
+
}
|
1348
|
+
pos += buf.length;
|
1349
|
+
}
|
1350
|
+
return buffer;
|
1351
|
+
}
|
1352
|
+
/**
|
1353
|
+
* The same as `buf1.compare(buf2)`.
|
1354
|
+
*/
|
1355
|
+
static compare(buf1, buf2) {
|
1356
|
+
if (Buffer._isInstance(buf1, Uint8Array)) {
|
1357
|
+
buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
|
1358
|
+
}
|
1359
|
+
if (Buffer._isInstance(buf2, Uint8Array)) {
|
1360
|
+
buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
|
1361
|
+
}
|
1362
|
+
if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
|
1363
|
+
throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
|
1364
|
+
}
|
1365
|
+
if (buf1 === buf2) {
|
1366
|
+
return 0;
|
1367
|
+
}
|
1368
|
+
let x = buf1.length;
|
1369
|
+
let y = buf2.length;
|
1370
|
+
for (let i = 0, len = Math.min(x, y); i < len; ++i) {
|
1371
|
+
if (buf1[i] !== buf2[i]) {
|
1372
|
+
x = buf1[i];
|
1373
|
+
y = buf2[i];
|
1374
|
+
break;
|
1375
|
+
}
|
1376
|
+
}
|
1377
|
+
if (x < y) {
|
1378
|
+
return -1;
|
1379
|
+
}
|
1380
|
+
if (y < x) {
|
1381
|
+
return 1;
|
1382
|
+
}
|
1383
|
+
return 0;
|
1384
|
+
}
|
1385
|
+
/**
|
1386
|
+
* Allocates a new buffer of `size` octets.
|
1387
|
+
*
|
1388
|
+
* @param size The number of octets to allocate.
|
1389
|
+
* @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
|
1390
|
+
* @param encoding The encoding used for the call to `buf.fill()` while initializing.
|
1391
|
+
*/
|
1392
|
+
static alloc(size, fill, encoding) {
|
1393
|
+
if (typeof size !== "number") {
|
1394
|
+
throw new TypeError('"size" argument must be of type number');
|
1395
|
+
} else if (size < 0) {
|
1396
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1397
|
+
}
|
1398
|
+
if (size <= 0) {
|
1399
|
+
return new Buffer(size);
|
1400
|
+
}
|
1401
|
+
if (fill !== void 0) {
|
1402
|
+
return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
|
1403
|
+
}
|
1404
|
+
return new Buffer(size);
|
1405
|
+
}
|
1406
|
+
/**
|
1407
|
+
* Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
|
1408
|
+
*
|
1409
|
+
* @param size
|
1410
|
+
*/
|
1411
|
+
static allocUnsafe(size) {
|
1412
|
+
if (typeof size !== "number") {
|
1413
|
+
throw new TypeError('"size" argument must be of type number');
|
1414
|
+
} else if (size < 0) {
|
1415
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1416
|
+
}
|
1417
|
+
return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
|
1418
|
+
}
|
1419
|
+
/**
|
1420
|
+
* Returns true if the given `obj` is an instance of `type`.
|
1421
|
+
*
|
1422
|
+
* @param obj
|
1423
|
+
* @param type
|
1424
|
+
*/
|
1425
|
+
static _isInstance(obj, type) {
|
1426
|
+
return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
|
1427
|
+
}
|
1428
|
+
static _checked(length) {
|
1429
|
+
if (length >= K_MAX_LENGTH) {
|
1430
|
+
throw new RangeError(
|
1431
|
+
"Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
|
1432
|
+
);
|
1433
|
+
}
|
1434
|
+
return length | 0;
|
1435
|
+
}
|
1436
|
+
static _blitBuffer(src, dst, offset, length) {
|
1437
|
+
let i;
|
1438
|
+
for (i = 0; i < length; ++i) {
|
1439
|
+
if (i + offset >= dst.length || i >= src.length) {
|
1440
|
+
break;
|
1441
|
+
}
|
1442
|
+
dst[i + offset] = src[i];
|
1443
|
+
}
|
1444
|
+
return i;
|
1445
|
+
}
|
1446
|
+
static _utf8Write(buf, string, offset, length) {
|
1447
|
+
return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
|
1448
|
+
}
|
1449
|
+
static _asciiWrite(buf, string, offset, length) {
|
1450
|
+
return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
|
1451
|
+
}
|
1452
|
+
static _base64Write(buf, string, offset, length) {
|
1453
|
+
return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
|
1454
|
+
}
|
1455
|
+
static _ucs2Write(buf, string, offset, length) {
|
1456
|
+
return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
|
1457
|
+
}
|
1458
|
+
static _hexWrite(buf, string, offset, length) {
|
1459
|
+
offset = Number(offset) || 0;
|
1460
|
+
const remaining = buf.length - offset;
|
1461
|
+
if (!length) {
|
1462
|
+
length = remaining;
|
1463
|
+
} else {
|
1464
|
+
length = Number(length);
|
1465
|
+
if (length > remaining) {
|
1466
|
+
length = remaining;
|
1467
|
+
}
|
1468
|
+
}
|
1469
|
+
const strLen = string.length;
|
1470
|
+
if (length > strLen / 2) {
|
1471
|
+
length = strLen / 2;
|
1472
|
+
}
|
1473
|
+
let i;
|
1474
|
+
for (i = 0; i < length; ++i) {
|
1475
|
+
const parsed = parseInt(string.substr(i * 2, 2), 16);
|
1476
|
+
if (parsed !== parsed) {
|
1477
|
+
return i;
|
1478
|
+
}
|
1479
|
+
buf[offset + i] = parsed;
|
1480
|
+
}
|
1481
|
+
return i;
|
1482
|
+
}
|
1483
|
+
static _utf8ToBytes(string, units) {
|
1484
|
+
units = units || Infinity;
|
1485
|
+
const length = string.length;
|
1486
|
+
const bytes = [];
|
1487
|
+
let codePoint;
|
1488
|
+
let leadSurrogate = null;
|
1489
|
+
for (let i = 0; i < length; ++i) {
|
1490
|
+
codePoint = string.charCodeAt(i);
|
1491
|
+
if (codePoint > 55295 && codePoint < 57344) {
|
1492
|
+
if (!leadSurrogate) {
|
1493
|
+
if (codePoint > 56319) {
|
1494
|
+
if ((units -= 3) > -1) {
|
1495
|
+
bytes.push(239, 191, 189);
|
1496
|
+
}
|
1497
|
+
continue;
|
1498
|
+
} else if (i + 1 === length) {
|
1499
|
+
if ((units -= 3) > -1) {
|
1500
|
+
bytes.push(239, 191, 189);
|
1501
|
+
}
|
1502
|
+
continue;
|
1503
|
+
}
|
1504
|
+
leadSurrogate = codePoint;
|
1505
|
+
continue;
|
1506
|
+
}
|
1507
|
+
if (codePoint < 56320) {
|
1508
|
+
if ((units -= 3) > -1) {
|
1509
|
+
bytes.push(239, 191, 189);
|
1510
|
+
}
|
1511
|
+
leadSurrogate = codePoint;
|
1512
|
+
continue;
|
1513
|
+
}
|
1514
|
+
codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
|
1515
|
+
} else if (leadSurrogate) {
|
1516
|
+
if ((units -= 3) > -1) {
|
1517
|
+
bytes.push(239, 191, 189);
|
1518
|
+
}
|
1519
|
+
}
|
1520
|
+
leadSurrogate = null;
|
1521
|
+
if (codePoint < 128) {
|
1522
|
+
if ((units -= 1) < 0) {
|
1523
|
+
break;
|
1524
|
+
}
|
1525
|
+
bytes.push(codePoint);
|
1526
|
+
} else if (codePoint < 2048) {
|
1527
|
+
if ((units -= 2) < 0) {
|
1528
|
+
break;
|
1529
|
+
}
|
1530
|
+
bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
|
1531
|
+
} else if (codePoint < 65536) {
|
1532
|
+
if ((units -= 3) < 0) {
|
1533
|
+
break;
|
1534
|
+
}
|
1535
|
+
bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
|
1536
|
+
} else if (codePoint < 1114112) {
|
1537
|
+
if ((units -= 4) < 0) {
|
1538
|
+
break;
|
1539
|
+
}
|
1540
|
+
bytes.push(
|
1541
|
+
codePoint >> 18 | 240,
|
1542
|
+
codePoint >> 12 & 63 | 128,
|
1543
|
+
codePoint >> 6 & 63 | 128,
|
1544
|
+
codePoint & 63 | 128
|
1545
|
+
);
|
1546
|
+
} else {
|
1547
|
+
throw new Error("Invalid code point");
|
1548
|
+
}
|
1549
|
+
}
|
1550
|
+
return bytes;
|
1551
|
+
}
|
1552
|
+
static _base64ToBytes(str) {
|
1553
|
+
return toByteArray(base64clean(str));
|
1554
|
+
}
|
1555
|
+
static _asciiToBytes(str) {
|
1556
|
+
const byteArray = [];
|
1557
|
+
for (let i = 0; i < str.length; ++i) {
|
1558
|
+
byteArray.push(str.charCodeAt(i) & 255);
|
1559
|
+
}
|
1560
|
+
return byteArray;
|
1561
|
+
}
|
1562
|
+
static _utf16leToBytes(str, units) {
|
1563
|
+
let c, hi, lo;
|
1564
|
+
const byteArray = [];
|
1565
|
+
for (let i = 0; i < str.length; ++i) {
|
1566
|
+
if ((units -= 2) < 0) break;
|
1567
|
+
c = str.charCodeAt(i);
|
1568
|
+
hi = c >> 8;
|
1569
|
+
lo = c % 256;
|
1570
|
+
byteArray.push(lo);
|
1571
|
+
byteArray.push(hi);
|
1572
|
+
}
|
1573
|
+
return byteArray;
|
1574
|
+
}
|
1575
|
+
static _hexSlice(buf, start, end) {
|
1576
|
+
const len = buf.length;
|
1577
|
+
if (!start || start < 0) {
|
1578
|
+
start = 0;
|
1579
|
+
}
|
1580
|
+
if (!end || end < 0 || end > len) {
|
1581
|
+
end = len;
|
1582
|
+
}
|
1583
|
+
let out = "";
|
1584
|
+
for (let i = start; i < end; ++i) {
|
1585
|
+
out += hexSliceLookupTable[buf[i]];
|
1586
|
+
}
|
1587
|
+
return out;
|
1588
|
+
}
|
1589
|
+
static _base64Slice(buf, start, end) {
|
1590
|
+
if (start === 0 && end === buf.length) {
|
1591
|
+
return fromByteArray(buf);
|
1592
|
+
} else {
|
1593
|
+
return fromByteArray(buf.slice(start, end));
|
1594
|
+
}
|
1595
|
+
}
|
1596
|
+
static _utf8Slice(buf, start, end) {
|
1597
|
+
end = Math.min(buf.length, end);
|
1598
|
+
const res = [];
|
1599
|
+
let i = start;
|
1600
|
+
while (i < end) {
|
1601
|
+
const firstByte = buf[i];
|
1602
|
+
let codePoint = null;
|
1603
|
+
let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
|
1604
|
+
if (i + bytesPerSequence <= end) {
|
1605
|
+
let secondByte, thirdByte, fourthByte, tempCodePoint;
|
1606
|
+
switch (bytesPerSequence) {
|
1607
|
+
case 1:
|
1608
|
+
if (firstByte < 128) {
|
1609
|
+
codePoint = firstByte;
|
1610
|
+
}
|
1611
|
+
break;
|
1612
|
+
case 2:
|
1613
|
+
secondByte = buf[i + 1];
|
1614
|
+
if ((secondByte & 192) === 128) {
|
1615
|
+
tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
|
1616
|
+
if (tempCodePoint > 127) {
|
1617
|
+
codePoint = tempCodePoint;
|
1618
|
+
}
|
1619
|
+
}
|
1620
|
+
break;
|
1621
|
+
case 3:
|
1622
|
+
secondByte = buf[i + 1];
|
1623
|
+
thirdByte = buf[i + 2];
|
1624
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
|
1625
|
+
tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
|
1626
|
+
if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
|
1627
|
+
codePoint = tempCodePoint;
|
1628
|
+
}
|
1629
|
+
}
|
1630
|
+
break;
|
1631
|
+
case 4:
|
1632
|
+
secondByte = buf[i + 1];
|
1633
|
+
thirdByte = buf[i + 2];
|
1634
|
+
fourthByte = buf[i + 3];
|
1635
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
|
1636
|
+
tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
|
1637
|
+
if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
|
1638
|
+
codePoint = tempCodePoint;
|
1639
|
+
}
|
1640
|
+
}
|
1641
|
+
}
|
1642
|
+
}
|
1643
|
+
if (codePoint === null) {
|
1644
|
+
codePoint = 65533;
|
1645
|
+
bytesPerSequence = 1;
|
1646
|
+
} else if (codePoint > 65535) {
|
1647
|
+
codePoint -= 65536;
|
1648
|
+
res.push(codePoint >>> 10 & 1023 | 55296);
|
1649
|
+
codePoint = 56320 | codePoint & 1023;
|
1650
|
+
}
|
1651
|
+
res.push(codePoint);
|
1652
|
+
i += bytesPerSequence;
|
1653
|
+
}
|
1654
|
+
return Buffer._decodeCodePointsArray(res);
|
1655
|
+
}
|
1656
|
+
static _decodeCodePointsArray(codePoints) {
|
1657
|
+
const len = codePoints.length;
|
1658
|
+
if (len <= MAX_ARGUMENTS_LENGTH) {
|
1659
|
+
return String.fromCharCode.apply(String, codePoints);
|
1660
|
+
}
|
1661
|
+
let res = "";
|
1662
|
+
let i = 0;
|
1663
|
+
while (i < len) {
|
1664
|
+
res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
|
1665
|
+
}
|
1666
|
+
return res;
|
1667
|
+
}
|
1668
|
+
static _asciiSlice(buf, start, end) {
|
1669
|
+
let ret = "";
|
1670
|
+
end = Math.min(buf.length, end);
|
1671
|
+
for (let i = start; i < end; ++i) {
|
1672
|
+
ret += String.fromCharCode(buf[i] & 127);
|
1673
|
+
}
|
1674
|
+
return ret;
|
1675
|
+
}
|
1676
|
+
static _latin1Slice(buf, start, end) {
|
1677
|
+
let ret = "";
|
1678
|
+
end = Math.min(buf.length, end);
|
1679
|
+
for (let i = start; i < end; ++i) {
|
1680
|
+
ret += String.fromCharCode(buf[i]);
|
1681
|
+
}
|
1682
|
+
return ret;
|
1683
|
+
}
|
1684
|
+
static _utf16leSlice(buf, start, end) {
|
1685
|
+
const bytes = buf.slice(start, end);
|
1686
|
+
let res = "";
|
1687
|
+
for (let i = 0; i < bytes.length - 1; i += 2) {
|
1688
|
+
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
|
1689
|
+
}
|
1690
|
+
return res;
|
1691
|
+
}
|
1692
|
+
static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
|
1693
|
+
let indexSize = 1;
|
1694
|
+
let arrLength = arr.length;
|
1695
|
+
let valLength = val.length;
|
1696
|
+
if (encoding !== void 0) {
|
1697
|
+
encoding = Buffer._getEncoding(encoding);
|
1698
|
+
if (encoding === "ucs2" || encoding === "utf16le") {
|
1699
|
+
if (arr.length < 2 || val.length < 2) {
|
1700
|
+
return -1;
|
1701
|
+
}
|
1702
|
+
indexSize = 2;
|
1703
|
+
arrLength /= 2;
|
1704
|
+
valLength /= 2;
|
1705
|
+
byteOffset /= 2;
|
1706
|
+
}
|
1707
|
+
}
|
1708
|
+
function read(buf, i2) {
|
1709
|
+
if (indexSize === 1) {
|
1710
|
+
return buf[i2];
|
1711
|
+
} else {
|
1712
|
+
return buf.readUInt16BE(i2 * indexSize);
|
1713
|
+
}
|
1714
|
+
}
|
1715
|
+
let i;
|
1716
|
+
if (dir) {
|
1717
|
+
let foundIndex = -1;
|
1718
|
+
for (i = byteOffset; i < arrLength; i++) {
|
1719
|
+
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
1720
|
+
if (foundIndex === -1) foundIndex = i;
|
1721
|
+
if (i - foundIndex + 1 === valLength) return foundIndex * indexSize;
|
1722
|
+
} else {
|
1723
|
+
if (foundIndex !== -1) i -= i - foundIndex;
|
1724
|
+
foundIndex = -1;
|
1725
|
+
}
|
1726
|
+
}
|
1727
|
+
} else {
|
1728
|
+
if (byteOffset + valLength > arrLength) {
|
1729
|
+
byteOffset = arrLength - valLength;
|
1730
|
+
}
|
1731
|
+
for (i = byteOffset; i >= 0; i--) {
|
1732
|
+
let found = true;
|
1733
|
+
for (let j = 0; j < valLength; j++) {
|
1734
|
+
if (read(arr, i + j) !== read(val, j)) {
|
1735
|
+
found = false;
|
1736
|
+
break;
|
1737
|
+
}
|
1738
|
+
}
|
1739
|
+
if (found) {
|
1740
|
+
return i;
|
1741
|
+
}
|
1742
|
+
}
|
1743
|
+
}
|
1744
|
+
return -1;
|
1745
|
+
}
|
1746
|
+
static _checkOffset(offset, ext, length) {
|
1747
|
+
if (offset % 1 !== 0 || offset < 0) throw new RangeError("offset is not uint");
|
1748
|
+
if (offset + ext > length) throw new RangeError("Trying to access beyond buffer length");
|
1749
|
+
}
|
1750
|
+
static _checkInt(buf, value, offset, ext, max, min) {
|
1751
|
+
if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance');
|
1752
|
+
if (value > max || value < min) throw new RangeError('"value" argument is out of bounds');
|
1753
|
+
if (offset + ext > buf.length) throw new RangeError("Index out of range");
|
1754
|
+
}
|
1755
|
+
static _getEncoding(encoding) {
|
1756
|
+
let toLowerCase = false;
|
1757
|
+
let originalEncoding = "";
|
1758
|
+
for (; ; ) {
|
1759
|
+
switch (encoding) {
|
1760
|
+
case "hex":
|
1761
|
+
return "hex";
|
1762
|
+
case "utf8":
|
1763
|
+
return "utf8";
|
1764
|
+
case "ascii":
|
1765
|
+
return "ascii";
|
1766
|
+
case "binary":
|
1767
|
+
return "binary";
|
1768
|
+
case "latin1":
|
1769
|
+
return "latin1";
|
1770
|
+
case "ucs2":
|
1771
|
+
return "ucs2";
|
1772
|
+
case "utf16le":
|
1773
|
+
return "utf16le";
|
1774
|
+
case "base64":
|
1775
|
+
return "base64";
|
1776
|
+
default: {
|
1777
|
+
if (toLowerCase) {
|
1778
|
+
throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
|
1779
|
+
}
|
1780
|
+
toLowerCase = true;
|
1781
|
+
originalEncoding = encoding;
|
1782
|
+
encoding = encoding.toLowerCase();
|
1783
|
+
}
|
1784
|
+
}
|
1785
|
+
}
|
1786
|
+
}
|
1787
|
+
}
|
1788
|
+
const hexSliceLookupTable = function() {
|
1789
|
+
const alphabet = "0123456789abcdef";
|
1790
|
+
const table = new Array(256);
|
1791
|
+
for (let i = 0; i < 16; ++i) {
|
1792
|
+
const i16 = i * 16;
|
1793
|
+
for (let j = 0; j < 16; ++j) {
|
1794
|
+
table[i16 + j] = alphabet[i] + alphabet[j];
|
1795
|
+
}
|
1796
|
+
}
|
1797
|
+
return table;
|
1798
|
+
}();
|
1799
|
+
const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
|
1800
|
+
function base64clean(str) {
|
1801
|
+
str = str.split("=")[0];
|
1802
|
+
str = str.trim().replace(INVALID_BASE64_RE, "");
|
1803
|
+
if (str.length < 2) return "";
|
1804
|
+
while (str.length % 4 !== 0) {
|
1805
|
+
str = str + "=";
|
1806
|
+
}
|
1807
|
+
return str;
|
1808
|
+
}
|
1809
|
+
|
27
1810
|
function notEmpty(value) {
|
28
1811
|
return value !== null && value !== void 0;
|
29
1812
|
}
|
@@ -118,155 +1901,15 @@ function promiseMap(inputValues, mapper) {
|
|
118
1901
|
return inputValues.reduce(reducer, Promise.resolve([]));
|
119
1902
|
}
|
120
1903
|
|
121
|
-
|
122
|
-
|
123
|
-
if (isDefined(process) && isDefined(process.env)) {
|
124
|
-
return {
|
125
|
-
apiKey: process.env.XATA_API_KEY ?? getGlobalApiKey(),
|
126
|
-
databaseURL: process.env.XATA_DATABASE_URL ?? getGlobalDatabaseURL(),
|
127
|
-
branch: process.env.XATA_BRANCH ?? getGlobalBranch(),
|
128
|
-
deployPreview: process.env.XATA_PREVIEW,
|
129
|
-
deployPreviewBranch: process.env.XATA_PREVIEW_BRANCH,
|
130
|
-
vercelGitCommitRef: process.env.VERCEL_GIT_COMMIT_REF,
|
131
|
-
vercelGitRepoOwner: process.env.VERCEL_GIT_REPO_OWNER
|
132
|
-
};
|
133
|
-
}
|
134
|
-
} catch (err) {
|
135
|
-
}
|
136
|
-
try {
|
137
|
-
if (isObject(Deno) && isObject(Deno.env)) {
|
138
|
-
return {
|
139
|
-
apiKey: Deno.env.get("XATA_API_KEY") ?? getGlobalApiKey(),
|
140
|
-
databaseURL: Deno.env.get("XATA_DATABASE_URL") ?? getGlobalDatabaseURL(),
|
141
|
-
branch: Deno.env.get("XATA_BRANCH") ?? getGlobalBranch(),
|
142
|
-
deployPreview: Deno.env.get("XATA_PREVIEW"),
|
143
|
-
deployPreviewBranch: Deno.env.get("XATA_PREVIEW_BRANCH"),
|
144
|
-
vercelGitCommitRef: Deno.env.get("VERCEL_GIT_COMMIT_REF"),
|
145
|
-
vercelGitRepoOwner: Deno.env.get("VERCEL_GIT_REPO_OWNER")
|
146
|
-
};
|
147
|
-
}
|
148
|
-
} catch (err) {
|
149
|
-
}
|
150
|
-
return {
|
151
|
-
apiKey: getGlobalApiKey(),
|
152
|
-
databaseURL: getGlobalDatabaseURL(),
|
153
|
-
branch: getGlobalBranch(),
|
154
|
-
deployPreview: void 0,
|
155
|
-
deployPreviewBranch: void 0,
|
156
|
-
vercelGitCommitRef: void 0,
|
157
|
-
vercelGitRepoOwner: void 0
|
158
|
-
};
|
159
|
-
}
|
160
|
-
function getEnableBrowserVariable() {
|
161
|
-
try {
|
162
|
-
if (isObject(process) && isObject(process.env) && process.env.XATA_ENABLE_BROWSER !== void 0) {
|
163
|
-
return process.env.XATA_ENABLE_BROWSER === "true";
|
164
|
-
}
|
165
|
-
} catch (err) {
|
166
|
-
}
|
167
|
-
try {
|
168
|
-
if (isObject(Deno) && isObject(Deno.env) && Deno.env.get("XATA_ENABLE_BROWSER") !== void 0) {
|
169
|
-
return Deno.env.get("XATA_ENABLE_BROWSER") === "true";
|
170
|
-
}
|
171
|
-
} catch (err) {
|
172
|
-
}
|
173
|
-
try {
|
174
|
-
return XATA_ENABLE_BROWSER === true || XATA_ENABLE_BROWSER === "true";
|
175
|
-
} catch (err) {
|
176
|
-
return void 0;
|
177
|
-
}
|
178
|
-
}
|
179
|
-
function getGlobalApiKey() {
|
180
|
-
try {
|
181
|
-
return XATA_API_KEY;
|
182
|
-
} catch (err) {
|
183
|
-
return void 0;
|
184
|
-
}
|
185
|
-
}
|
186
|
-
function getGlobalDatabaseURL() {
|
187
|
-
try {
|
188
|
-
return XATA_DATABASE_URL;
|
189
|
-
} catch (err) {
|
190
|
-
return void 0;
|
191
|
-
}
|
192
|
-
}
|
193
|
-
function getGlobalBranch() {
|
194
|
-
try {
|
195
|
-
return XATA_BRANCH;
|
196
|
-
} catch (err) {
|
197
|
-
return void 0;
|
198
|
-
}
|
199
|
-
}
|
200
|
-
function getDatabaseURL() {
|
201
|
-
try {
|
202
|
-
const { databaseURL } = getEnvironment();
|
203
|
-
return databaseURL;
|
204
|
-
} catch (err) {
|
205
|
-
return void 0;
|
206
|
-
}
|
207
|
-
}
|
208
|
-
function getAPIKey() {
|
209
|
-
try {
|
210
|
-
const { apiKey } = getEnvironment();
|
211
|
-
return apiKey;
|
212
|
-
} catch (err) {
|
213
|
-
return void 0;
|
214
|
-
}
|
215
|
-
}
|
216
|
-
function getBranch() {
|
217
|
-
try {
|
218
|
-
const { branch } = getEnvironment();
|
219
|
-
return branch;
|
220
|
-
} catch (err) {
|
221
|
-
return void 0;
|
222
|
-
}
|
223
|
-
}
|
224
|
-
function buildPreviewBranchName({ org, branch }) {
|
225
|
-
return `preview-${org}-${branch}`;
|
226
|
-
}
|
227
|
-
function getPreviewBranch() {
|
228
|
-
try {
|
229
|
-
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = getEnvironment();
|
230
|
-
if (deployPreviewBranch)
|
231
|
-
return deployPreviewBranch;
|
232
|
-
switch (deployPreview) {
|
233
|
-
case "vercel": {
|
234
|
-
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
235
|
-
console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
|
236
|
-
return void 0;
|
237
|
-
}
|
238
|
-
return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
|
239
|
-
}
|
240
|
-
}
|
241
|
-
return void 0;
|
242
|
-
} catch (err) {
|
243
|
-
return void 0;
|
244
|
-
}
|
245
|
-
}
|
246
|
-
|
247
|
-
var __accessCheck$6 = (obj, member, msg) => {
|
248
|
-
if (!member.has(obj))
|
249
|
-
throw TypeError("Cannot " + msg);
|
250
|
-
};
|
251
|
-
var __privateGet$5 = (obj, member, getter) => {
|
252
|
-
__accessCheck$6(obj, member, "read from private field");
|
253
|
-
return getter ? getter.call(obj) : member.get(obj);
|
254
|
-
};
|
255
|
-
var __privateAdd$6 = (obj, member, value) => {
|
256
|
-
if (member.has(obj))
|
257
|
-
throw TypeError("Cannot add the same private member more than once");
|
258
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1904
|
+
var __typeError$6 = (msg) => {
|
1905
|
+
throw TypeError(msg);
|
259
1906
|
};
|
260
|
-
var
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
var
|
266
|
-
__accessCheck$6(obj, member, "access private method");
|
267
|
-
return method;
|
268
|
-
};
|
269
|
-
var _fetch, _queue, _concurrency, _enqueue, enqueue_fn;
|
1907
|
+
var __accessCheck$6 = (obj, member, msg) => member.has(obj) || __typeError$6("Cannot " + msg);
|
1908
|
+
var __privateGet$5 = (obj, member, getter) => (__accessCheck$6(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
1909
|
+
var __privateAdd$6 = (obj, member, value) => member.has(obj) ? __typeError$6("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1910
|
+
var __privateSet$4 = (obj, member, value, setter) => (__accessCheck$6(obj, member, "write to private field"), member.set(obj, value), value);
|
1911
|
+
var __privateMethod$4 = (obj, member, method) => (__accessCheck$6(obj, member, "access private method"), method);
|
1912
|
+
var _fetch, _queue, _concurrency, _ApiRequestPool_instances, enqueue_fn;
|
270
1913
|
const REQUEST_TIMEOUT = 5 * 60 * 1e3;
|
271
1914
|
function getFetchImplementation(userFetch) {
|
272
1915
|
const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
|
@@ -279,10 +1922,10 @@ function getFetchImplementation(userFetch) {
|
|
279
1922
|
}
|
280
1923
|
class ApiRequestPool {
|
281
1924
|
constructor(concurrency = 10) {
|
282
|
-
__privateAdd$6(this,
|
283
|
-
__privateAdd$6(this, _fetch
|
284
|
-
__privateAdd$6(this, _queue
|
285
|
-
__privateAdd$6(this, _concurrency
|
1925
|
+
__privateAdd$6(this, _ApiRequestPool_instances);
|
1926
|
+
__privateAdd$6(this, _fetch);
|
1927
|
+
__privateAdd$6(this, _queue);
|
1928
|
+
__privateAdd$6(this, _concurrency);
|
286
1929
|
__privateSet$4(this, _queue, []);
|
287
1930
|
__privateSet$4(this, _concurrency, concurrency);
|
288
1931
|
this.running = 0;
|
@@ -317,7 +1960,7 @@ class ApiRequestPool {
|
|
317
1960
|
}
|
318
1961
|
return response;
|
319
1962
|
};
|
320
|
-
return __privateMethod$4(this,
|
1963
|
+
return __privateMethod$4(this, _ApiRequestPool_instances, enqueue_fn).call(this, async () => {
|
321
1964
|
return await runRequest();
|
322
1965
|
});
|
323
1966
|
}
|
@@ -325,7 +1968,7 @@ class ApiRequestPool {
|
|
325
1968
|
_fetch = new WeakMap();
|
326
1969
|
_queue = new WeakMap();
|
327
1970
|
_concurrency = new WeakMap();
|
328
|
-
|
1971
|
+
_ApiRequestPool_instances = new WeakSet();
|
329
1972
|
enqueue_fn = function(task) {
|
330
1973
|
const promise = new Promise((resolve) => __privateGet$5(this, _queue).push(resolve)).finally(() => {
|
331
1974
|
this.started--;
|
@@ -528,7 +2171,7 @@ function defaultOnOpen(response) {
|
|
528
2171
|
}
|
529
2172
|
}
|
530
2173
|
|
531
|
-
const VERSION = "0.
|
2174
|
+
const VERSION = "0.30.0";
|
532
2175
|
|
533
2176
|
class ErrorWithCause extends Error {
|
534
2177
|
constructor(message, options) {
|
@@ -608,35 +2251,30 @@ function parseProviderString(provider = "production") {
|
|
608
2251
|
return provider;
|
609
2252
|
}
|
610
2253
|
const [main, workspaces] = provider.split(",");
|
611
|
-
if (!main || !workspaces)
|
612
|
-
return null;
|
2254
|
+
if (!main || !workspaces) return null;
|
613
2255
|
return { main, workspaces };
|
614
2256
|
}
|
615
2257
|
function buildProviderString(provider) {
|
616
|
-
if (isHostProviderAlias(provider))
|
617
|
-
return provider;
|
2258
|
+
if (isHostProviderAlias(provider)) return provider;
|
618
2259
|
return `${provider.main},${provider.workspaces}`;
|
619
2260
|
}
|
620
2261
|
function parseWorkspacesUrlParts(url) {
|
621
|
-
if (!isString(url))
|
622
|
-
return null;
|
2262
|
+
if (!isString(url)) return null;
|
623
2263
|
const matches = {
|
624
|
-
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh
|
625
|
-
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev
|
626
|
-
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev
|
627
|
-
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(
|
2264
|
+
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
|
2265
|
+
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2266
|
+
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2267
|
+
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
|
628
2268
|
};
|
629
2269
|
const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
|
630
|
-
if (!isHostProviderAlias(host) || !match)
|
631
|
-
|
632
|
-
return { workspace: match[1], region: match[2], host };
|
2270
|
+
if (!isHostProviderAlias(host) || !match) return null;
|
2271
|
+
return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
|
633
2272
|
}
|
634
2273
|
|
635
2274
|
const pool = new ApiRequestPool();
|
636
2275
|
const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
|
637
2276
|
const cleanQueryParams = Object.entries(queryParams).reduce((acc, [key, value]) => {
|
638
|
-
if (value === void 0 || value === null)
|
639
|
-
return acc;
|
2277
|
+
if (value === void 0 || value === null) return acc;
|
640
2278
|
return { ...acc, [key]: value };
|
641
2279
|
}, {});
|
642
2280
|
const query = new URLSearchParams(cleanQueryParams).toString();
|
@@ -684,8 +2322,7 @@ function hostHeader(url) {
|
|
684
2322
|
return groups?.host ? { Host: groups.host } : {};
|
685
2323
|
}
|
686
2324
|
async function parseBody(body, headers) {
|
687
|
-
if (!isDefined(body))
|
688
|
-
return void 0;
|
2325
|
+
if (!isDefined(body)) return void 0;
|
689
2326
|
if (isBlob(body) || typeof body.text === "function") {
|
690
2327
|
return body;
|
691
2328
|
}
|
@@ -764,8 +2401,7 @@ async function fetch$1({
|
|
764
2401
|
[TraceAttributes.CLOUDFLARE_RAY_ID]: response.headers?.get("cf-ray") ?? void 0
|
765
2402
|
});
|
766
2403
|
const message = response.headers?.get("x-xata-message");
|
767
|
-
if (message)
|
768
|
-
console.warn(message);
|
2404
|
+
if (message) console.warn(message);
|
769
2405
|
if (response.status === 204) {
|
770
2406
|
return {};
|
771
2407
|
}
|
@@ -849,16 +2485,108 @@ function parseUrl(url) {
|
|
849
2485
|
|
850
2486
|
const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
|
851
2487
|
|
852
|
-
const
|
2488
|
+
const getTasks = (variables, signal) => dataPlaneFetch({
|
2489
|
+
url: "/tasks",
|
2490
|
+
method: "get",
|
2491
|
+
...variables,
|
2492
|
+
signal
|
2493
|
+
});
|
2494
|
+
const getTaskStatus = (variables, signal) => dataPlaneFetch({
|
2495
|
+
url: "/tasks/{taskId}",
|
2496
|
+
method: "get",
|
2497
|
+
...variables,
|
2498
|
+
signal
|
2499
|
+
});
|
2500
|
+
const listClusterBranches = (variables, signal) => dataPlaneFetch({
|
2501
|
+
url: "/cluster/{clusterId}/branches",
|
2502
|
+
method: "get",
|
2503
|
+
...variables,
|
2504
|
+
signal
|
2505
|
+
});
|
2506
|
+
const listClusterExtensions = (variables, signal) => dataPlaneFetch({
|
2507
|
+
url: "/cluster/{clusterId}/extensions",
|
2508
|
+
method: "get",
|
2509
|
+
...variables,
|
2510
|
+
signal
|
2511
|
+
});
|
2512
|
+
const installClusterExtension = (variables, signal) => dataPlaneFetch({
|
2513
|
+
url: "/cluster/{clusterId}/extensions",
|
2514
|
+
method: "post",
|
2515
|
+
...variables,
|
2516
|
+
signal
|
2517
|
+
});
|
2518
|
+
const dropClusterExtension = (variables, signal) => dataPlaneFetch({
|
2519
|
+
url: "/cluster/{clusterId}/extensions",
|
2520
|
+
method: "delete",
|
2521
|
+
...variables,
|
2522
|
+
signal
|
2523
|
+
});
|
2524
|
+
const getClusterMetrics = (variables, signal) => dataPlaneFetch({
|
2525
|
+
url: "/cluster/{clusterId}/metrics",
|
2526
|
+
method: "get",
|
2527
|
+
...variables,
|
2528
|
+
signal
|
2529
|
+
});
|
2530
|
+
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2531
|
+
url: "/db/{dbBranchName}/migrations/apply",
|
2532
|
+
method: "post",
|
2533
|
+
...variables,
|
2534
|
+
signal
|
2535
|
+
});
|
2536
|
+
const startMigration = (variables, signal) => dataPlaneFetch({
|
2537
|
+
url: "/db/{dbBranchName}/migrations/start",
|
2538
|
+
method: "post",
|
2539
|
+
...variables,
|
2540
|
+
signal
|
2541
|
+
});
|
2542
|
+
const completeMigration = (variables, signal) => dataPlaneFetch({
|
2543
|
+
url: "/db/{dbBranchName}/migrations/complete",
|
2544
|
+
method: "post",
|
2545
|
+
...variables,
|
2546
|
+
signal
|
2547
|
+
});
|
2548
|
+
const rollbackMigration = (variables, signal) => dataPlaneFetch({
|
2549
|
+
url: "/db/{dbBranchName}/migrations/rollback",
|
2550
|
+
method: "post",
|
2551
|
+
...variables,
|
2552
|
+
signal
|
2553
|
+
});
|
853
2554
|
const adaptTable = (variables, signal) => dataPlaneFetch({
|
854
2555
|
url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
|
855
2556
|
method: "post",
|
856
2557
|
...variables,
|
857
2558
|
signal
|
858
2559
|
});
|
859
|
-
const
|
860
|
-
|
861
|
-
|
2560
|
+
const adaptAllTables = (variables, signal) => dataPlaneFetch({
|
2561
|
+
url: "/db/{dbBranchName}/migrations/adapt",
|
2562
|
+
method: "post",
|
2563
|
+
...variables,
|
2564
|
+
signal
|
2565
|
+
});
|
2566
|
+
const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2567
|
+
url: "/db/{dbBranchName}/migrations/status",
|
2568
|
+
method: "get",
|
2569
|
+
...variables,
|
2570
|
+
signal
|
2571
|
+
});
|
2572
|
+
const getMigrationJobs = (variables, signal) => dataPlaneFetch({
|
2573
|
+
url: "/db/{dbBranchName}/migrations/jobs",
|
2574
|
+
method: "get",
|
2575
|
+
...variables,
|
2576
|
+
signal
|
2577
|
+
});
|
2578
|
+
const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2579
|
+
url: "/db/{dbBranchName}/migrations/jobs/{jobId}",
|
2580
|
+
method: "get",
|
2581
|
+
...variables,
|
2582
|
+
signal
|
2583
|
+
});
|
2584
|
+
const getMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2585
|
+
url: "/db/{dbBranchName}/migrations/history",
|
2586
|
+
method: "get",
|
2587
|
+
...variables,
|
2588
|
+
signal
|
2589
|
+
});
|
862
2590
|
const getBranchList = (variables, signal) => dataPlaneFetch({
|
863
2591
|
url: "/dbs/{dbName}",
|
864
2592
|
method: "get",
|
@@ -872,6 +2600,7 @@ const getDatabaseSettings = (variables, signal) => dataPlaneFetch({
|
|
872
2600
|
signal
|
873
2601
|
});
|
874
2602
|
const updateDatabaseSettings = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/settings", method: "patch", ...variables, signal });
|
2603
|
+
const createBranchAsync = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/async", method: "put", ...variables, signal });
|
875
2604
|
const getBranchDetails = (variables, signal) => dataPlaneFetch({
|
876
2605
|
url: "/db/{dbBranchName}",
|
877
2606
|
method: "get",
|
@@ -885,68 +2614,166 @@ const deleteBranch = (variables, signal) => dataPlaneFetch({
|
|
885
2614
|
...variables,
|
886
2615
|
signal
|
887
2616
|
});
|
888
|
-
const getSchema = (variables, signal) => dataPlaneFetch({
|
889
|
-
url: "/db/{dbBranchName}/schema",
|
890
|
-
method: "get",
|
2617
|
+
const getSchema = (variables, signal) => dataPlaneFetch({
|
2618
|
+
url: "/db/{dbBranchName}/schema",
|
2619
|
+
method: "get",
|
2620
|
+
...variables,
|
2621
|
+
signal
|
2622
|
+
});
|
2623
|
+
const getSchemas = (variables, signal) => dataPlaneFetch({
|
2624
|
+
url: "/db/{dbBranchName}/schemas",
|
2625
|
+
method: "get",
|
2626
|
+
...variables,
|
2627
|
+
signal
|
2628
|
+
});
|
2629
|
+
const copyBranch = (variables, signal) => dataPlaneFetch({
|
2630
|
+
url: "/db/{dbBranchName}/copy",
|
2631
|
+
method: "post",
|
2632
|
+
...variables,
|
2633
|
+
signal
|
2634
|
+
});
|
2635
|
+
const getBranchMoveStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/move", method: "get", ...variables, signal });
|
2636
|
+
const moveBranch = (variables, signal) => dataPlaneFetch({
|
2637
|
+
url: "/db/{dbBranchName}/move",
|
2638
|
+
method: "put",
|
2639
|
+
...variables,
|
2640
|
+
signal
|
2641
|
+
});
|
2642
|
+
const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2643
|
+
url: "/db/{dbBranchName}/metadata",
|
2644
|
+
method: "put",
|
2645
|
+
...variables,
|
2646
|
+
signal
|
2647
|
+
});
|
2648
|
+
const getBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2649
|
+
url: "/db/{dbBranchName}/metadata",
|
2650
|
+
method: "get",
|
2651
|
+
...variables,
|
2652
|
+
signal
|
2653
|
+
});
|
2654
|
+
const getBranchStats = (variables, signal) => dataPlaneFetch({
|
2655
|
+
url: "/db/{dbBranchName}/stats",
|
2656
|
+
method: "get",
|
2657
|
+
...variables,
|
2658
|
+
signal
|
2659
|
+
});
|
2660
|
+
const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
|
2661
|
+
const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
|
2662
|
+
const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({
|
2663
|
+
url: "/dbs/{dbName}/gitBranches",
|
2664
|
+
method: "delete",
|
2665
|
+
...variables,
|
2666
|
+
signal
|
2667
|
+
});
|
2668
|
+
const resolveBranch = (variables, signal) => dataPlaneFetch({
|
2669
|
+
url: "/dbs/{dbName}/resolveBranch",
|
2670
|
+
method: "get",
|
2671
|
+
...variables,
|
2672
|
+
signal
|
2673
|
+
});
|
2674
|
+
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2675
|
+
url: "/db/{dbBranchName}/migrations",
|
2676
|
+
method: "get",
|
2677
|
+
...variables,
|
2678
|
+
signal
|
2679
|
+
});
|
2680
|
+
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2681
|
+
url: "/db/{dbBranchName}/migrations/plan",
|
2682
|
+
method: "post",
|
2683
|
+
...variables,
|
2684
|
+
signal
|
2685
|
+
});
|
2686
|
+
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2687
|
+
url: "/db/{dbBranchName}/migrations/execute",
|
2688
|
+
method: "post",
|
2689
|
+
...variables,
|
2690
|
+
signal
|
2691
|
+
});
|
2692
|
+
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({
|
2693
|
+
url: "/dbs/{dbName}/migrations/query",
|
2694
|
+
method: "post",
|
2695
|
+
...variables,
|
2696
|
+
signal
|
2697
|
+
});
|
2698
|
+
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
2699
|
+
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2700
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2701
|
+
method: "get",
|
2702
|
+
...variables,
|
2703
|
+
signal
|
2704
|
+
});
|
2705
|
+
const updateMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2706
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2707
|
+
method: "patch",
|
2708
|
+
...variables,
|
2709
|
+
signal
|
2710
|
+
});
|
2711
|
+
const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({
|
2712
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/commits",
|
2713
|
+
method: "post",
|
2714
|
+
...variables,
|
2715
|
+
signal
|
2716
|
+
});
|
2717
|
+
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2718
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/compare",
|
2719
|
+
method: "post",
|
2720
|
+
...variables,
|
2721
|
+
signal
|
2722
|
+
});
|
2723
|
+
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({
|
2724
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2725
|
+
method: "get",
|
2726
|
+
...variables,
|
2727
|
+
signal
|
2728
|
+
});
|
2729
|
+
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2730
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2731
|
+
method: "post",
|
2732
|
+
...variables,
|
2733
|
+
signal
|
2734
|
+
});
|
2735
|
+
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({
|
2736
|
+
url: "/db/{dbBranchName}/schema/history",
|
2737
|
+
method: "post",
|
891
2738
|
...variables,
|
892
2739
|
signal
|
893
2740
|
});
|
894
|
-
const
|
895
|
-
url: "/db/{dbBranchName}/
|
2741
|
+
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({
|
2742
|
+
url: "/db/{dbBranchName}/schema/compare",
|
896
2743
|
method: "post",
|
897
2744
|
...variables,
|
898
2745
|
signal
|
899
2746
|
});
|
900
|
-
const
|
901
|
-
url: "/db/{dbBranchName}/
|
902
|
-
method: "
|
2747
|
+
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({
|
2748
|
+
url: "/db/{dbBranchName}/schema/compare/{branchName}",
|
2749
|
+
method: "post",
|
903
2750
|
...variables,
|
904
2751
|
signal
|
905
2752
|
});
|
906
|
-
const
|
907
|
-
url: "/db/{dbBranchName}/
|
908
|
-
method: "
|
2753
|
+
const updateBranchSchema = (variables, signal) => dataPlaneFetch({
|
2754
|
+
url: "/db/{dbBranchName}/schema/update",
|
2755
|
+
method: "post",
|
909
2756
|
...variables,
|
910
2757
|
signal
|
911
2758
|
});
|
912
|
-
const
|
913
|
-
url: "/db/{dbBranchName}/
|
914
|
-
method: "
|
2759
|
+
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2760
|
+
url: "/db/{dbBranchName}/schema/preview",
|
2761
|
+
method: "post",
|
915
2762
|
...variables,
|
916
2763
|
signal
|
917
2764
|
});
|
918
|
-
const
|
919
|
-
|
920
|
-
|
921
|
-
const resolveBranch = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/resolveBranch", method: "get", ...variables, signal });
|
922
|
-
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations", method: "get", ...variables, signal });
|
923
|
-
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/plan", method: "post", ...variables, signal });
|
924
|
-
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/execute", method: "post", ...variables, signal });
|
925
|
-
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/query", method: "post", ...variables, signal });
|
926
|
-
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
927
|
-
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
928
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
929
|
-
method: "get",
|
2765
|
+
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2766
|
+
url: "/db/{dbBranchName}/schema/apply",
|
2767
|
+
method: "post",
|
930
2768
|
...variables,
|
931
2769
|
signal
|
932
2770
|
});
|
933
|
-
const
|
934
|
-
|
935
|
-
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/compare", method: "post", ...variables, signal });
|
936
|
-
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/merge", method: "get", ...variables, signal });
|
937
|
-
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
938
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2771
|
+
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({
|
2772
|
+
url: "/db/{dbBranchName}/schema/push",
|
939
2773
|
method: "post",
|
940
2774
|
...variables,
|
941
2775
|
signal
|
942
2776
|
});
|
943
|
-
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/history", method: "post", ...variables, signal });
|
944
|
-
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare", method: "post", ...variables, signal });
|
945
|
-
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare/{branchName}", method: "post", ...variables, signal });
|
946
|
-
const updateBranchSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/update", method: "post", ...variables, signal });
|
947
|
-
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/preview", method: "post", ...variables, signal });
|
948
|
-
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/apply", method: "post", ...variables, signal });
|
949
|
-
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/push", method: "post", ...variables, signal });
|
950
2777
|
const createTable = (variables, signal) => dataPlaneFetch({
|
951
2778
|
url: "/db/{dbBranchName}/tables/{tableName}",
|
952
2779
|
method: "put",
|
@@ -959,14 +2786,24 @@ const deleteTable = (variables, signal) => dataPlaneFetch({
|
|
959
2786
|
...variables,
|
960
2787
|
signal
|
961
2788
|
});
|
962
|
-
const updateTable = (variables, signal) => dataPlaneFetch({
|
2789
|
+
const updateTable = (variables, signal) => dataPlaneFetch({
|
2790
|
+
url: "/db/{dbBranchName}/tables/{tableName}",
|
2791
|
+
method: "patch",
|
2792
|
+
...variables,
|
2793
|
+
signal
|
2794
|
+
});
|
963
2795
|
const getTableSchema = (variables, signal) => dataPlaneFetch({
|
964
2796
|
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
965
2797
|
method: "get",
|
966
2798
|
...variables,
|
967
2799
|
signal
|
968
2800
|
});
|
969
|
-
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2801
|
+
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2802
|
+
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
2803
|
+
method: "put",
|
2804
|
+
...variables,
|
2805
|
+
signal
|
2806
|
+
});
|
970
2807
|
const getTableColumns = (variables, signal) => dataPlaneFetch({
|
971
2808
|
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
972
2809
|
method: "get",
|
@@ -974,7 +2811,12 @@ const getTableColumns = (variables, signal) => dataPlaneFetch({
|
|
974
2811
|
signal
|
975
2812
|
});
|
976
2813
|
const addTableColumn = (variables, signal) => dataPlaneFetch(
|
977
|
-
{
|
2814
|
+
{
|
2815
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
2816
|
+
method: "post",
|
2817
|
+
...variables,
|
2818
|
+
signal
|
2819
|
+
}
|
978
2820
|
);
|
979
2821
|
const getColumn = (variables, signal) => dataPlaneFetch({
|
980
2822
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
@@ -982,15 +2824,30 @@ const getColumn = (variables, signal) => dataPlaneFetch({
|
|
982
2824
|
...variables,
|
983
2825
|
signal
|
984
2826
|
});
|
985
|
-
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2827
|
+
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2828
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
2829
|
+
method: "patch",
|
2830
|
+
...variables,
|
2831
|
+
signal
|
2832
|
+
});
|
986
2833
|
const deleteColumn = (variables, signal) => dataPlaneFetch({
|
987
2834
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
988
2835
|
method: "delete",
|
989
2836
|
...variables,
|
990
2837
|
signal
|
991
2838
|
});
|
992
|
-
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
993
|
-
|
2839
|
+
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
2840
|
+
url: "/db/{dbBranchName}/transaction",
|
2841
|
+
method: "post",
|
2842
|
+
...variables,
|
2843
|
+
signal
|
2844
|
+
});
|
2845
|
+
const insertRecord = (variables, signal) => dataPlaneFetch({
|
2846
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data",
|
2847
|
+
method: "post",
|
2848
|
+
...variables,
|
2849
|
+
signal
|
2850
|
+
});
|
994
2851
|
const getFileItem = (variables, signal) => dataPlaneFetch({
|
995
2852
|
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
|
996
2853
|
method: "get",
|
@@ -1033,11 +2890,36 @@ const getRecord = (variables, signal) => dataPlaneFetch({
|
|
1033
2890
|
...variables,
|
1034
2891
|
signal
|
1035
2892
|
});
|
1036
|
-
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
1037
|
-
|
1038
|
-
|
1039
|
-
|
1040
|
-
|
2893
|
+
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2894
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2895
|
+
method: "put",
|
2896
|
+
...variables,
|
2897
|
+
signal
|
2898
|
+
});
|
2899
|
+
const updateRecordWithID = (variables, signal) => dataPlaneFetch({
|
2900
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2901
|
+
method: "patch",
|
2902
|
+
...variables,
|
2903
|
+
signal
|
2904
|
+
});
|
2905
|
+
const upsertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2906
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2907
|
+
method: "post",
|
2908
|
+
...variables,
|
2909
|
+
signal
|
2910
|
+
});
|
2911
|
+
const deleteRecord = (variables, signal) => dataPlaneFetch({
|
2912
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2913
|
+
method: "delete",
|
2914
|
+
...variables,
|
2915
|
+
signal
|
2916
|
+
});
|
2917
|
+
const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({
|
2918
|
+
url: "/db/{dbBranchName}/tables/{tableName}/bulk",
|
2919
|
+
method: "post",
|
2920
|
+
...variables,
|
2921
|
+
signal
|
2922
|
+
});
|
1041
2923
|
const queryTable = (variables, signal) => dataPlaneFetch({
|
1042
2924
|
url: "/db/{dbBranchName}/tables/{tableName}/query",
|
1043
2925
|
method: "post",
|
@@ -1056,16 +2938,36 @@ const searchTable = (variables, signal) => dataPlaneFetch({
|
|
1056
2938
|
...variables,
|
1057
2939
|
signal
|
1058
2940
|
});
|
1059
|
-
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
2941
|
+
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
2942
|
+
url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch",
|
2943
|
+
method: "post",
|
2944
|
+
...variables,
|
2945
|
+
signal
|
2946
|
+
});
|
1060
2947
|
const askTable = (variables, signal) => dataPlaneFetch({
|
1061
2948
|
url: "/db/{dbBranchName}/tables/{tableName}/ask",
|
1062
2949
|
method: "post",
|
1063
2950
|
...variables,
|
1064
2951
|
signal
|
1065
2952
|
});
|
1066
|
-
const askTableSession = (variables, signal) => dataPlaneFetch({
|
1067
|
-
|
1068
|
-
|
2953
|
+
const askTableSession = (variables, signal) => dataPlaneFetch({
|
2954
|
+
url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}",
|
2955
|
+
method: "post",
|
2956
|
+
...variables,
|
2957
|
+
signal
|
2958
|
+
});
|
2959
|
+
const summarizeTable = (variables, signal) => dataPlaneFetch({
|
2960
|
+
url: "/db/{dbBranchName}/tables/{tableName}/summarize",
|
2961
|
+
method: "post",
|
2962
|
+
...variables,
|
2963
|
+
signal
|
2964
|
+
});
|
2965
|
+
const aggregateTable = (variables, signal) => dataPlaneFetch({
|
2966
|
+
url: "/db/{dbBranchName}/tables/{tableName}/aggregate",
|
2967
|
+
method: "post",
|
2968
|
+
...variables,
|
2969
|
+
signal
|
2970
|
+
});
|
1069
2971
|
const fileAccess = (variables, signal) => dataPlaneFetch({
|
1070
2972
|
url: "/file/{fileId}",
|
1071
2973
|
method: "get",
|
@@ -1084,14 +2986,34 @@ const sqlQuery = (variables, signal) => dataPlaneFetch({
|
|
1084
2986
|
...variables,
|
1085
2987
|
signal
|
1086
2988
|
});
|
2989
|
+
const sqlBatchQuery = (variables, signal) => dataPlaneFetch({
|
2990
|
+
url: "/db/{dbBranchName}/sql/batch",
|
2991
|
+
method: "post",
|
2992
|
+
...variables,
|
2993
|
+
signal
|
2994
|
+
});
|
1087
2995
|
const operationsByTag$2 = {
|
2996
|
+
tasks: { getTasks, getTaskStatus },
|
2997
|
+
cluster: {
|
2998
|
+
listClusterBranches,
|
2999
|
+
listClusterExtensions,
|
3000
|
+
installClusterExtension,
|
3001
|
+
dropClusterExtension,
|
3002
|
+
getClusterMetrics
|
3003
|
+
},
|
1088
3004
|
migrations: {
|
1089
3005
|
applyMigration,
|
3006
|
+
startMigration,
|
3007
|
+
completeMigration,
|
3008
|
+
rollbackMigration,
|
1090
3009
|
adaptTable,
|
3010
|
+
adaptAllTables,
|
1091
3011
|
getBranchMigrationJobStatus,
|
3012
|
+
getMigrationJobs,
|
1092
3013
|
getMigrationJobStatus,
|
1093
3014
|
getMigrationHistory,
|
1094
3015
|
getSchema,
|
3016
|
+
getSchemas,
|
1095
3017
|
getBranchMigrationHistory,
|
1096
3018
|
getBranchMigrationPlan,
|
1097
3019
|
executeBranchMigrationPlan,
|
@@ -1105,10 +3027,13 @@ const operationsByTag$2 = {
|
|
1105
3027
|
},
|
1106
3028
|
branch: {
|
1107
3029
|
getBranchList,
|
3030
|
+
createBranchAsync,
|
1108
3031
|
getBranchDetails,
|
1109
3032
|
createBranch,
|
1110
3033
|
deleteBranch,
|
1111
3034
|
copyBranch,
|
3035
|
+
getBranchMoveStatus,
|
3036
|
+
moveBranch,
|
1112
3037
|
updateBranchMetadata,
|
1113
3038
|
getBranchMetadata,
|
1114
3039
|
getBranchStats,
|
@@ -1150,7 +3075,16 @@ const operationsByTag$2 = {
|
|
1150
3075
|
deleteRecord,
|
1151
3076
|
bulkInsertTableRecords
|
1152
3077
|
},
|
1153
|
-
files: {
|
3078
|
+
files: {
|
3079
|
+
getFileItem,
|
3080
|
+
putFileItem,
|
3081
|
+
deleteFileItem,
|
3082
|
+
getFile,
|
3083
|
+
putFile,
|
3084
|
+
deleteFile,
|
3085
|
+
fileAccess,
|
3086
|
+
fileUpload
|
3087
|
+
},
|
1154
3088
|
searchAndFilter: {
|
1155
3089
|
queryTable,
|
1156
3090
|
searchBranch,
|
@@ -1161,7 +3095,7 @@ const operationsByTag$2 = {
|
|
1161
3095
|
summarizeTable,
|
1162
3096
|
aggregateTable
|
1163
3097
|
},
|
1164
|
-
sql: { sqlQuery }
|
3098
|
+
sql: { sqlQuery, sqlBatchQuery }
|
1165
3099
|
};
|
1166
3100
|
|
1167
3101
|
const controlPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "controlPlane" });
|
@@ -1228,7 +3162,12 @@ const deleteOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
|
1228
3162
|
...variables,
|
1229
3163
|
signal
|
1230
3164
|
});
|
1231
|
-
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3165
|
+
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3166
|
+
url: "/user/oauth/tokens/{token}",
|
3167
|
+
method: "patch",
|
3168
|
+
...variables,
|
3169
|
+
signal
|
3170
|
+
});
|
1232
3171
|
const getWorkspacesList = (variables, signal) => controlPlaneFetch({
|
1233
3172
|
url: "/workspaces",
|
1234
3173
|
method: "get",
|
@@ -1259,49 +3198,150 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
|
|
1259
3198
|
...variables,
|
1260
3199
|
signal
|
1261
3200
|
});
|
1262
|
-
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
1263
|
-
|
1264
|
-
|
1265
|
-
|
3201
|
+
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3202
|
+
url: "/workspaces/{workspaceId}/settings",
|
3203
|
+
method: "get",
|
3204
|
+
...variables,
|
3205
|
+
signal
|
3206
|
+
});
|
3207
|
+
const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3208
|
+
url: "/workspaces/{workspaceId}/settings",
|
3209
|
+
method: "patch",
|
3210
|
+
...variables,
|
3211
|
+
signal
|
3212
|
+
});
|
3213
|
+
const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({
|
3214
|
+
url: "/workspaces/{workspaceId}/members",
|
3215
|
+
method: "get",
|
3216
|
+
...variables,
|
3217
|
+
signal
|
3218
|
+
});
|
3219
|
+
const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({
|
3220
|
+
url: "/workspaces/{workspaceId}/members/{userId}",
|
3221
|
+
method: "put",
|
3222
|
+
...variables,
|
3223
|
+
signal
|
3224
|
+
});
|
1266
3225
|
const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1267
3226
|
url: "/workspaces/{workspaceId}/members/{userId}",
|
1268
3227
|
method: "delete",
|
1269
3228
|
...variables,
|
1270
3229
|
signal
|
1271
3230
|
});
|
1272
|
-
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1273
|
-
|
1274
|
-
|
1275
|
-
|
1276
|
-
|
1277
|
-
|
1278
|
-
const
|
3231
|
+
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
3232
|
+
url: "/workspaces/{workspaceId}/invites",
|
3233
|
+
method: "post",
|
3234
|
+
...variables,
|
3235
|
+
signal
|
3236
|
+
});
|
3237
|
+
const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3238
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3239
|
+
method: "patch",
|
3240
|
+
...variables,
|
3241
|
+
signal
|
3242
|
+
});
|
3243
|
+
const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3244
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3245
|
+
method: "delete",
|
3246
|
+
...variables,
|
3247
|
+
signal
|
3248
|
+
});
|
3249
|
+
const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3250
|
+
url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept",
|
3251
|
+
method: "post",
|
3252
|
+
...variables,
|
3253
|
+
signal
|
3254
|
+
});
|
3255
|
+
const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3256
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}/resend",
|
3257
|
+
method: "post",
|
3258
|
+
...variables,
|
3259
|
+
signal
|
3260
|
+
});
|
3261
|
+
const listClusters = (variables, signal) => controlPlaneFetch({
|
3262
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3263
|
+
method: "get",
|
3264
|
+
...variables,
|
3265
|
+
signal
|
3266
|
+
});
|
3267
|
+
const createCluster = (variables, signal) => controlPlaneFetch({
|
3268
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3269
|
+
method: "post",
|
3270
|
+
...variables,
|
3271
|
+
signal
|
3272
|
+
});
|
1279
3273
|
const getCluster = (variables, signal) => controlPlaneFetch({
|
1280
3274
|
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
1281
3275
|
method: "get",
|
1282
3276
|
...variables,
|
1283
3277
|
signal
|
1284
3278
|
});
|
1285
|
-
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3279
|
+
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3280
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3281
|
+
method: "patch",
|
3282
|
+
...variables,
|
3283
|
+
signal
|
3284
|
+
});
|
3285
|
+
const deleteCluster = (variables, signal) => controlPlaneFetch({
|
3286
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3287
|
+
method: "delete",
|
3288
|
+
...variables,
|
3289
|
+
signal
|
3290
|
+
});
|
1286
3291
|
const getDatabaseList = (variables, signal) => controlPlaneFetch({
|
1287
3292
|
url: "/workspaces/{workspaceId}/dbs",
|
1288
3293
|
method: "get",
|
1289
3294
|
...variables,
|
1290
3295
|
signal
|
1291
3296
|
});
|
1292
|
-
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3297
|
+
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3298
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3299
|
+
method: "put",
|
3300
|
+
...variables,
|
3301
|
+
signal
|
3302
|
+
});
|
1293
3303
|
const deleteDatabase = (variables, signal) => controlPlaneFetch({
|
1294
3304
|
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
1295
3305
|
method: "delete",
|
1296
3306
|
...variables,
|
1297
3307
|
signal
|
1298
3308
|
});
|
1299
|
-
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
1300
|
-
|
1301
|
-
|
1302
|
-
|
1303
|
-
|
1304
|
-
|
3309
|
+
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3310
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3311
|
+
method: "get",
|
3312
|
+
...variables,
|
3313
|
+
signal
|
3314
|
+
});
|
3315
|
+
const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3316
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3317
|
+
method: "patch",
|
3318
|
+
...variables,
|
3319
|
+
signal
|
3320
|
+
});
|
3321
|
+
const renameDatabase = (variables, signal) => controlPlaneFetch({
|
3322
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/rename",
|
3323
|
+
method: "post",
|
3324
|
+
...variables,
|
3325
|
+
signal
|
3326
|
+
});
|
3327
|
+
const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3328
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3329
|
+
method: "get",
|
3330
|
+
...variables,
|
3331
|
+
signal
|
3332
|
+
});
|
3333
|
+
const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3334
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3335
|
+
method: "put",
|
3336
|
+
...variables,
|
3337
|
+
signal
|
3338
|
+
});
|
3339
|
+
const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3340
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3341
|
+
method: "delete",
|
3342
|
+
...variables,
|
3343
|
+
signal
|
3344
|
+
});
|
1305
3345
|
const listRegions = (variables, signal) => controlPlaneFetch({
|
1306
3346
|
url: "/workspaces/{workspaceId}/regions",
|
1307
3347
|
method: "get",
|
@@ -1339,7 +3379,13 @@ const operationsByTag$1 = {
|
|
1339
3379
|
acceptWorkspaceMemberInvite,
|
1340
3380
|
resendWorkspaceMemberInvite
|
1341
3381
|
},
|
1342
|
-
xbcontrolOther: {
|
3382
|
+
xbcontrolOther: {
|
3383
|
+
listClusters,
|
3384
|
+
createCluster,
|
3385
|
+
getCluster,
|
3386
|
+
updateCluster,
|
3387
|
+
deleteCluster
|
3388
|
+
},
|
1343
3389
|
databases: {
|
1344
3390
|
getDatabaseList,
|
1345
3391
|
createDatabase,
|
@@ -1359,7 +3405,7 @@ const operationsByTag = deepMerge(operationsByTag$2, operationsByTag$1);
|
|
1359
3405
|
const buildApiClient = () => class {
|
1360
3406
|
constructor(options = {}) {
|
1361
3407
|
const provider = options.host ?? "production";
|
1362
|
-
const apiKey = options.apiKey
|
3408
|
+
const apiKey = options.apiKey;
|
1363
3409
|
const trace = options.trace ?? defaultTrace;
|
1364
3410
|
const clientID = generateUUID();
|
1365
3411
|
if (!apiKey) {
|
@@ -1426,8 +3472,7 @@ function buildTransformString(transformations) {
|
|
1426
3472
|
).join(",");
|
1427
3473
|
}
|
1428
3474
|
function transformImage(url, ...transformations) {
|
1429
|
-
if (!isDefined(url))
|
1430
|
-
return void 0;
|
3475
|
+
if (!isDefined(url)) return void 0;
|
1431
3476
|
const newTransformations = buildTransformString(transformations);
|
1432
3477
|
const { hostname, pathname, search } = new URL(url);
|
1433
3478
|
const pathParts = pathname.split("/");
|
@@ -1540,8 +3585,7 @@ class XataFile {
|
|
1540
3585
|
}
|
1541
3586
|
}
|
1542
3587
|
const parseInputFileEntry = async (entry) => {
|
1543
|
-
if (!isDefined(entry))
|
1544
|
-
return null;
|
3588
|
+
if (!isDefined(entry)) return null;
|
1545
3589
|
const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout, uploadUrlTimeout } = await entry;
|
1546
3590
|
return compactObject({
|
1547
3591
|
id,
|
@@ -1556,24 +3600,19 @@ const parseInputFileEntry = async (entry) => {
|
|
1556
3600
|
};
|
1557
3601
|
|
1558
3602
|
function cleanFilter(filter) {
|
1559
|
-
if (!isDefined(filter))
|
1560
|
-
|
1561
|
-
if (!isObject(filter))
|
1562
|
-
return filter;
|
3603
|
+
if (!isDefined(filter)) return void 0;
|
3604
|
+
if (!isObject(filter)) return filter;
|
1563
3605
|
const values = Object.fromEntries(
|
1564
3606
|
Object.entries(filter).reduce((acc, [key, value]) => {
|
1565
|
-
if (!isDefined(value))
|
1566
|
-
return acc;
|
3607
|
+
if (!isDefined(value)) return acc;
|
1567
3608
|
if (Array.isArray(value)) {
|
1568
3609
|
const clean = value.map((item) => cleanFilter(item)).filter((item) => isDefined(item));
|
1569
|
-
if (clean.length === 0)
|
1570
|
-
return acc;
|
3610
|
+
if (clean.length === 0) return acc;
|
1571
3611
|
return [...acc, [key, clean]];
|
1572
3612
|
}
|
1573
3613
|
if (isObject(value)) {
|
1574
3614
|
const clean = cleanFilter(value);
|
1575
|
-
if (!isDefined(clean))
|
1576
|
-
return acc;
|
3615
|
+
if (!isDefined(clean)) return acc;
|
1577
3616
|
return [...acc, [key, clean]];
|
1578
3617
|
}
|
1579
3618
|
return [...acc, [key, value]];
|
@@ -1583,10 +3622,8 @@ function cleanFilter(filter) {
|
|
1583
3622
|
}
|
1584
3623
|
|
1585
3624
|
function stringifyJson(value) {
|
1586
|
-
if (!isDefined(value))
|
1587
|
-
|
1588
|
-
if (isString(value))
|
1589
|
-
return value;
|
3625
|
+
if (!isDefined(value)) return value;
|
3626
|
+
if (isString(value)) return value;
|
1590
3627
|
try {
|
1591
3628
|
return JSON.stringify(value);
|
1592
3629
|
} catch (e) {
|
@@ -1601,28 +3638,17 @@ function parseJson(value) {
|
|
1601
3638
|
}
|
1602
3639
|
}
|
1603
3640
|
|
1604
|
-
var
|
1605
|
-
|
1606
|
-
throw TypeError("Cannot " + msg);
|
1607
|
-
};
|
1608
|
-
var __privateGet$4 = (obj, member, getter) => {
|
1609
|
-
__accessCheck$5(obj, member, "read from private field");
|
1610
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1611
|
-
};
|
1612
|
-
var __privateAdd$5 = (obj, member, value) => {
|
1613
|
-
if (member.has(obj))
|
1614
|
-
throw TypeError("Cannot add the same private member more than once");
|
1615
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1616
|
-
};
|
1617
|
-
var __privateSet$3 = (obj, member, value, setter) => {
|
1618
|
-
__accessCheck$5(obj, member, "write to private field");
|
1619
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1620
|
-
return value;
|
3641
|
+
var __typeError$5 = (msg) => {
|
3642
|
+
throw TypeError(msg);
|
1621
3643
|
};
|
3644
|
+
var __accessCheck$5 = (obj, member, msg) => member.has(obj) || __typeError$5("Cannot " + msg);
|
3645
|
+
var __privateGet$4 = (obj, member, getter) => (__accessCheck$5(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3646
|
+
var __privateAdd$5 = (obj, member, value) => member.has(obj) ? __typeError$5("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3647
|
+
var __privateSet$3 = (obj, member, value, setter) => (__accessCheck$5(obj, member, "write to private field"), member.set(obj, value), value);
|
1622
3648
|
var _query, _page;
|
1623
3649
|
class Page {
|
1624
3650
|
constructor(query, meta, records = []) {
|
1625
|
-
__privateAdd$5(this, _query
|
3651
|
+
__privateAdd$5(this, _query);
|
1626
3652
|
__privateSet$3(this, _query, query);
|
1627
3653
|
this.meta = meta;
|
1628
3654
|
this.records = new PageRecordArray(this, records);
|
@@ -1709,7 +3735,7 @@ class RecordArray extends Array {
|
|
1709
3735
|
const _PageRecordArray = class _PageRecordArray extends Array {
|
1710
3736
|
constructor(...args) {
|
1711
3737
|
super(..._PageRecordArray.parseConstructorParams(...args));
|
1712
|
-
__privateAdd$5(this, _page
|
3738
|
+
__privateAdd$5(this, _page);
|
1713
3739
|
__privateSet$3(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
|
1714
3740
|
}
|
1715
3741
|
static parseConstructorParams(...args) {
|
@@ -1780,34 +3806,20 @@ const _PageRecordArray = class _PageRecordArray extends Array {
|
|
1780
3806
|
_page = new WeakMap();
|
1781
3807
|
let PageRecordArray = _PageRecordArray;
|
1782
3808
|
|
1783
|
-
var
|
1784
|
-
|
1785
|
-
throw TypeError("Cannot " + msg);
|
3809
|
+
var __typeError$4 = (msg) => {
|
3810
|
+
throw TypeError(msg);
|
1786
3811
|
};
|
1787
|
-
var
|
1788
|
-
|
1789
|
-
|
1790
|
-
|
1791
|
-
var
|
1792
|
-
|
1793
|
-
throw TypeError("Cannot add the same private member more than once");
|
1794
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1795
|
-
};
|
1796
|
-
var __privateSet$2 = (obj, member, value, setter) => {
|
1797
|
-
__accessCheck$4(obj, member, "write to private field");
|
1798
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1799
|
-
return value;
|
1800
|
-
};
|
1801
|
-
var __privateMethod$3 = (obj, member, method) => {
|
1802
|
-
__accessCheck$4(obj, member, "access private method");
|
1803
|
-
return method;
|
1804
|
-
};
|
1805
|
-
var _table$1, _repository, _data, _cleanFilterConstraint, cleanFilterConstraint_fn;
|
3812
|
+
var __accessCheck$4 = (obj, member, msg) => member.has(obj) || __typeError$4("Cannot " + msg);
|
3813
|
+
var __privateGet$3 = (obj, member, getter) => (__accessCheck$4(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3814
|
+
var __privateAdd$4 = (obj, member, value) => member.has(obj) ? __typeError$4("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3815
|
+
var __privateSet$2 = (obj, member, value, setter) => (__accessCheck$4(obj, member, "write to private field"), member.set(obj, value), value);
|
3816
|
+
var __privateMethod$3 = (obj, member, method) => (__accessCheck$4(obj, member, "access private method"), method);
|
3817
|
+
var _table$1, _repository, _data, _Query_instances, cleanFilterConstraint_fn;
|
1806
3818
|
const _Query = class _Query {
|
1807
3819
|
constructor(repository, table, data, rawParent) {
|
1808
|
-
__privateAdd$4(this,
|
1809
|
-
__privateAdd$4(this, _table$1
|
1810
|
-
__privateAdd$4(this, _repository
|
3820
|
+
__privateAdd$4(this, _Query_instances);
|
3821
|
+
__privateAdd$4(this, _table$1);
|
3822
|
+
__privateAdd$4(this, _repository);
|
1811
3823
|
__privateAdd$4(this, _data, { filter: {} });
|
1812
3824
|
// Implements pagination
|
1813
3825
|
this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
|
@@ -1885,12 +3897,12 @@ const _Query = class _Query {
|
|
1885
3897
|
filter(a, b) {
|
1886
3898
|
if (arguments.length === 1) {
|
1887
3899
|
const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
|
1888
|
-
[column]: __privateMethod$3(this,
|
3900
|
+
[column]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, column, constraint)
|
1889
3901
|
}));
|
1890
3902
|
const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
|
1891
3903
|
return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
|
1892
3904
|
} else {
|
1893
|
-
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this,
|
3905
|
+
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
|
1894
3906
|
const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
|
1895
3907
|
return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
|
1896
3908
|
}
|
@@ -1969,8 +3981,7 @@ const _Query = class _Query {
|
|
1969
3981
|
}
|
1970
3982
|
async getFirstOrThrow(options = {}) {
|
1971
3983
|
const records = await this.getMany({ ...options, pagination: { size: 1 } });
|
1972
|
-
if (records[0] === void 0)
|
1973
|
-
throw new Error("No results found.");
|
3984
|
+
if (records[0] === void 0) throw new Error("No results found.");
|
1974
3985
|
return records[0];
|
1975
3986
|
}
|
1976
3987
|
async summarize(params = {}) {
|
@@ -2025,7 +4036,7 @@ const _Query = class _Query {
|
|
2025
4036
|
_table$1 = new WeakMap();
|
2026
4037
|
_repository = new WeakMap();
|
2027
4038
|
_data = new WeakMap();
|
2028
|
-
|
4039
|
+
_Query_instances = new WeakSet();
|
2029
4040
|
cleanFilterConstraint_fn = function(column, value) {
|
2030
4041
|
const columnType = __privateGet$3(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
|
2031
4042
|
if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
|
@@ -2086,8 +4097,7 @@ function isSortFilterString(value) {
|
|
2086
4097
|
}
|
2087
4098
|
function isSortFilterBase(filter) {
|
2088
4099
|
return isObject(filter) && Object.entries(filter).every(([key, value]) => {
|
2089
|
-
if (key === "*")
|
2090
|
-
return value === "random";
|
4100
|
+
if (key === "*") return value === "random";
|
2091
4101
|
return value === "asc" || value === "desc";
|
2092
4102
|
});
|
2093
4103
|
}
|
@@ -2108,29 +4118,15 @@ function buildSortFilter(filter) {
|
|
2108
4118
|
}
|
2109
4119
|
}
|
2110
4120
|
|
2111
|
-
var
|
2112
|
-
|
2113
|
-
throw TypeError("Cannot " + msg);
|
2114
|
-
};
|
2115
|
-
var __privateGet$2 = (obj, member, getter) => {
|
2116
|
-
__accessCheck$3(obj, member, "read from private field");
|
2117
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2118
|
-
};
|
2119
|
-
var __privateAdd$3 = (obj, member, value) => {
|
2120
|
-
if (member.has(obj))
|
2121
|
-
throw TypeError("Cannot add the same private member more than once");
|
2122
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2123
|
-
};
|
2124
|
-
var __privateSet$1 = (obj, member, value, setter) => {
|
2125
|
-
__accessCheck$3(obj, member, "write to private field");
|
2126
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
2127
|
-
return value;
|
2128
|
-
};
|
2129
|
-
var __privateMethod$2 = (obj, member, method) => {
|
2130
|
-
__accessCheck$3(obj, member, "access private method");
|
2131
|
-
return method;
|
4121
|
+
var __typeError$3 = (msg) => {
|
4122
|
+
throw TypeError(msg);
|
2132
4123
|
};
|
2133
|
-
var
|
4124
|
+
var __accessCheck$3 = (obj, member, msg) => member.has(obj) || __typeError$3("Cannot " + msg);
|
4125
|
+
var __privateGet$2 = (obj, member, getter) => (__accessCheck$3(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4126
|
+
var __privateAdd$3 = (obj, member, value) => member.has(obj) ? __typeError$3("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4127
|
+
var __privateSet$1 = (obj, member, value, setter) => (__accessCheck$3(obj, member, "write to private field"), member.set(obj, value), value);
|
4128
|
+
var __privateMethod$2 = (obj, member, method) => (__accessCheck$3(obj, member, "access private method"), method);
|
4129
|
+
var _table, _getFetchProps, _db, _schemaTables, _trace, _RestRepository_instances, insertRecordWithoutId_fn, insertRecordWithId_fn, insertRecords_fn, updateRecordWithID_fn, updateRecords_fn, upsertRecordWithID_fn, deleteRecord_fn, deleteRecords_fn, getSchemaTables_fn, transformObjectToApi_fn;
|
2134
4130
|
const BULK_OPERATION_MAX_SIZE = 1e3;
|
2135
4131
|
class Repository extends Query {
|
2136
4132
|
}
|
@@ -2141,21 +4137,12 @@ class RestRepository extends Query {
|
|
2141
4137
|
{ name: options.table, schema: options.schemaTables?.find((table) => table.name === options.table) },
|
2142
4138
|
{}
|
2143
4139
|
);
|
2144
|
-
__privateAdd$3(this,
|
2145
|
-
__privateAdd$3(this,
|
2146
|
-
__privateAdd$3(this,
|
2147
|
-
__privateAdd$3(this,
|
2148
|
-
__privateAdd$3(this,
|
2149
|
-
__privateAdd$3(this,
|
2150
|
-
__privateAdd$3(this, _deleteRecord);
|
2151
|
-
__privateAdd$3(this, _deleteRecords);
|
2152
|
-
__privateAdd$3(this, _getSchemaTables);
|
2153
|
-
__privateAdd$3(this, _transformObjectToApi);
|
2154
|
-
__privateAdd$3(this, _table, void 0);
|
2155
|
-
__privateAdd$3(this, _getFetchProps, void 0);
|
2156
|
-
__privateAdd$3(this, _db, void 0);
|
2157
|
-
__privateAdd$3(this, _schemaTables, void 0);
|
2158
|
-
__privateAdd$3(this, _trace, void 0);
|
4140
|
+
__privateAdd$3(this, _RestRepository_instances);
|
4141
|
+
__privateAdd$3(this, _table);
|
4142
|
+
__privateAdd$3(this, _getFetchProps);
|
4143
|
+
__privateAdd$3(this, _db);
|
4144
|
+
__privateAdd$3(this, _schemaTables);
|
4145
|
+
__privateAdd$3(this, _trace);
|
2159
4146
|
__privateSet$1(this, _table, options.table);
|
2160
4147
|
__privateSet$1(this, _db, options.db);
|
2161
4148
|
__privateSet$1(this, _schemaTables, options.schemaTables);
|
@@ -2174,31 +4161,28 @@ class RestRepository extends Query {
|
|
2174
4161
|
return __privateGet$2(this, _trace).call(this, "create", async () => {
|
2175
4162
|
const ifVersion = parseIfVersion(b, c, d);
|
2176
4163
|
if (Array.isArray(a)) {
|
2177
|
-
if (a.length === 0)
|
2178
|
-
|
2179
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
4164
|
+
if (a.length === 0) return [];
|
4165
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
2180
4166
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2181
4167
|
const result = await this.read(ids, columns);
|
2182
4168
|
return result;
|
2183
4169
|
}
|
2184
4170
|
if (isString(a) && isObject(b)) {
|
2185
|
-
if (a === "")
|
2186
|
-
throw new Error("The id can't be empty");
|
4171
|
+
if (a === "") throw new Error("The id can't be empty");
|
2187
4172
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2188
|
-
return await __privateMethod$2(this,
|
4173
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
|
2189
4174
|
}
|
2190
4175
|
if (isObject(a) && isString(a.xata_id)) {
|
2191
|
-
if (a.xata_id === "")
|
2192
|
-
throw new Error("The id can't be empty");
|
4176
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2193
4177
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2194
|
-
return await __privateMethod$2(this,
|
4178
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
2195
4179
|
createOnly: true,
|
2196
4180
|
ifVersion
|
2197
4181
|
});
|
2198
4182
|
}
|
2199
4183
|
if (isObject(a)) {
|
2200
4184
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2201
|
-
return __privateMethod$2(this,
|
4185
|
+
return __privateMethod$2(this, _RestRepository_instances, insertRecordWithoutId_fn).call(this, a, columns);
|
2202
4186
|
}
|
2203
4187
|
throw new Error("Invalid arguments for create method");
|
2204
4188
|
});
|
@@ -2207,8 +4191,7 @@ class RestRepository extends Query {
|
|
2207
4191
|
return __privateGet$2(this, _trace).call(this, "read", async () => {
|
2208
4192
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2209
4193
|
if (Array.isArray(a)) {
|
2210
|
-
if (a.length === 0)
|
2211
|
-
return [];
|
4194
|
+
if (a.length === 0) return [];
|
2212
4195
|
const ids = a.map((item) => extractId(item));
|
2213
4196
|
const finalObjects = await this.getAll({ filter: { xata_id: { $any: compact(ids) } }, columns });
|
2214
4197
|
const dictionary = finalObjects.reduce((acc, object) => {
|
@@ -2231,7 +4214,7 @@ class RestRepository extends Query {
|
|
2231
4214
|
queryParams: { columns },
|
2232
4215
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2233
4216
|
});
|
2234
|
-
const schemaTables = await __privateMethod$2(this,
|
4217
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2235
4218
|
return initObject(
|
2236
4219
|
__privateGet$2(this, _db),
|
2237
4220
|
schemaTables,
|
@@ -2272,11 +4255,10 @@ class RestRepository extends Query {
|
|
2272
4255
|
return __privateGet$2(this, _trace).call(this, "update", async () => {
|
2273
4256
|
const ifVersion = parseIfVersion(b, c, d);
|
2274
4257
|
if (Array.isArray(a)) {
|
2275
|
-
if (a.length === 0)
|
2276
|
-
return [];
|
4258
|
+
if (a.length === 0) return [];
|
2277
4259
|
const existing = await this.read(a, ["xata_id"]);
|
2278
4260
|
const updates = a.filter((_item, index) => existing[index] !== null);
|
2279
|
-
await __privateMethod$2(this,
|
4261
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, updates, {
|
2280
4262
|
ifVersion,
|
2281
4263
|
upsert: false
|
2282
4264
|
});
|
@@ -2287,15 +4269,14 @@ class RestRepository extends Query {
|
|
2287
4269
|
try {
|
2288
4270
|
if (isString(a) && isObject(b)) {
|
2289
4271
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2290
|
-
return await __privateMethod$2(this,
|
4272
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2291
4273
|
}
|
2292
4274
|
if (isObject(a) && isString(a.xata_id)) {
|
2293
4275
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2294
|
-
return await __privateMethod$2(this,
|
4276
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2295
4277
|
}
|
2296
4278
|
} catch (error) {
|
2297
|
-
if (error.status === 422)
|
2298
|
-
return null;
|
4279
|
+
if (error.status === 422) return null;
|
2299
4280
|
throw error;
|
2300
4281
|
}
|
2301
4282
|
throw new Error("Invalid arguments for update method");
|
@@ -2324,9 +4305,8 @@ class RestRepository extends Query {
|
|
2324
4305
|
return __privateGet$2(this, _trace).call(this, "createOrUpdate", async () => {
|
2325
4306
|
const ifVersion = parseIfVersion(b, c, d);
|
2326
4307
|
if (Array.isArray(a)) {
|
2327
|
-
if (a.length === 0)
|
2328
|
-
|
2329
|
-
await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, a, {
|
4308
|
+
if (a.length === 0) return [];
|
4309
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, a, {
|
2330
4310
|
ifVersion,
|
2331
4311
|
upsert: true
|
2332
4312
|
});
|
@@ -2335,16 +4315,14 @@ class RestRepository extends Query {
|
|
2335
4315
|
return result;
|
2336
4316
|
}
|
2337
4317
|
if (isString(a) && isObject(b)) {
|
2338
|
-
if (a === "")
|
2339
|
-
throw new Error("The id can't be empty");
|
4318
|
+
if (a === "") throw new Error("The id can't be empty");
|
2340
4319
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2341
|
-
return await __privateMethod$2(this,
|
4320
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2342
4321
|
}
|
2343
4322
|
if (isObject(a) && isString(a.xata_id)) {
|
2344
|
-
if (a.xata_id === "")
|
2345
|
-
throw new Error("The id can't be empty");
|
4323
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2346
4324
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2347
|
-
return await __privateMethod$2(this,
|
4325
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2348
4326
|
}
|
2349
4327
|
if (!isDefined(a) && isObject(b)) {
|
2350
4328
|
return await this.create(b, c);
|
@@ -2359,24 +4337,21 @@ class RestRepository extends Query {
|
|
2359
4337
|
return __privateGet$2(this, _trace).call(this, "createOrReplace", async () => {
|
2360
4338
|
const ifVersion = parseIfVersion(b, c, d);
|
2361
4339
|
if (Array.isArray(a)) {
|
2362
|
-
if (a.length === 0)
|
2363
|
-
|
2364
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
4340
|
+
if (a.length === 0) return [];
|
4341
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
2365
4342
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2366
4343
|
const result = await this.read(ids, columns);
|
2367
4344
|
return result;
|
2368
4345
|
}
|
2369
4346
|
if (isString(a) && isObject(b)) {
|
2370
|
-
if (a === "")
|
2371
|
-
throw new Error("The id can't be empty");
|
4347
|
+
if (a === "") throw new Error("The id can't be empty");
|
2372
4348
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2373
|
-
return await __privateMethod$2(this,
|
4349
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
|
2374
4350
|
}
|
2375
4351
|
if (isObject(a) && isString(a.xata_id)) {
|
2376
|
-
if (a.xata_id === "")
|
2377
|
-
throw new Error("The id can't be empty");
|
4352
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2378
4353
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2379
|
-
return await __privateMethod$2(this,
|
4354
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
2380
4355
|
createOnly: false,
|
2381
4356
|
ifVersion
|
2382
4357
|
});
|
@@ -2393,25 +4368,22 @@ class RestRepository extends Query {
|
|
2393
4368
|
async delete(a, b) {
|
2394
4369
|
return __privateGet$2(this, _trace).call(this, "delete", async () => {
|
2395
4370
|
if (Array.isArray(a)) {
|
2396
|
-
if (a.length === 0)
|
2397
|
-
return [];
|
4371
|
+
if (a.length === 0) return [];
|
2398
4372
|
const ids = a.map((o) => {
|
2399
|
-
if (isString(o))
|
2400
|
-
|
2401
|
-
if (isString(o.xata_id))
|
2402
|
-
return o.xata_id;
|
4373
|
+
if (isString(o)) return o;
|
4374
|
+
if (isString(o.xata_id)) return o.xata_id;
|
2403
4375
|
throw new Error("Invalid arguments for delete method");
|
2404
4376
|
});
|
2405
4377
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2406
4378
|
const result = await this.read(a, columns);
|
2407
|
-
await __privateMethod$2(this,
|
4379
|
+
await __privateMethod$2(this, _RestRepository_instances, deleteRecords_fn).call(this, ids);
|
2408
4380
|
return result;
|
2409
4381
|
}
|
2410
4382
|
if (isString(a)) {
|
2411
|
-
return __privateMethod$2(this,
|
4383
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a, b);
|
2412
4384
|
}
|
2413
4385
|
if (isObject(a) && isString(a.xata_id)) {
|
2414
|
-
return __privateMethod$2(this,
|
4386
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a.xata_id, b);
|
2415
4387
|
}
|
2416
4388
|
throw new Error("Invalid arguments for delete method");
|
2417
4389
|
});
|
@@ -2455,7 +4427,7 @@ class RestRepository extends Query {
|
|
2455
4427
|
},
|
2456
4428
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2457
4429
|
});
|
2458
|
-
const schemaTables = await __privateMethod$2(this,
|
4430
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2459
4431
|
return {
|
2460
4432
|
records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
|
2461
4433
|
totalCount
|
@@ -2480,7 +4452,7 @@ class RestRepository extends Query {
|
|
2480
4452
|
},
|
2481
4453
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2482
4454
|
});
|
2483
|
-
const schemaTables = await __privateMethod$2(this,
|
4455
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2484
4456
|
return {
|
2485
4457
|
records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
|
2486
4458
|
totalCount
|
@@ -2522,7 +4494,7 @@ class RestRepository extends Query {
|
|
2522
4494
|
fetchOptions: data.fetchOptions,
|
2523
4495
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2524
4496
|
});
|
2525
|
-
const schemaTables = await __privateMethod$2(this,
|
4497
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2526
4498
|
const records = objects.map(
|
2527
4499
|
(record) => initObject(
|
2528
4500
|
__privateGet$2(this, _db),
|
@@ -2556,7 +4528,7 @@ class RestRepository extends Query {
|
|
2556
4528
|
},
|
2557
4529
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2558
4530
|
});
|
2559
|
-
const schemaTables = await __privateMethod$2(this,
|
4531
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2560
4532
|
return {
|
2561
4533
|
...result,
|
2562
4534
|
summaries: result.summaries.map(
|
@@ -2604,9 +4576,9 @@ _getFetchProps = new WeakMap();
|
|
2604
4576
|
_db = new WeakMap();
|
2605
4577
|
_schemaTables = new WeakMap();
|
2606
4578
|
_trace = new WeakMap();
|
2607
|
-
|
4579
|
+
_RestRepository_instances = new WeakSet();
|
2608
4580
|
insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
2609
|
-
const record = await __privateMethod$2(this,
|
4581
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2610
4582
|
const response = await insertRecord({
|
2611
4583
|
pathParams: {
|
2612
4584
|
workspace: "{workspaceId}",
|
@@ -2618,14 +4590,12 @@ insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
|
2618
4590
|
body: record,
|
2619
4591
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2620
4592
|
});
|
2621
|
-
const schemaTables = await __privateMethod$2(this,
|
4593
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2622
4594
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2623
4595
|
};
|
2624
|
-
_insertRecordWithId = new WeakSet();
|
2625
4596
|
insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
|
2626
|
-
if (!recordId)
|
2627
|
-
|
2628
|
-
const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4597
|
+
if (!recordId) return null;
|
4598
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2629
4599
|
const response = await insertRecordWithID({
|
2630
4600
|
pathParams: {
|
2631
4601
|
workspace: "{workspaceId}",
|
@@ -2638,13 +4608,12 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
|
|
2638
4608
|
queryParams: { createOnly, columns, ifVersion },
|
2639
4609
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2640
4610
|
});
|
2641
|
-
const schemaTables = await __privateMethod$2(this,
|
4611
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2642
4612
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2643
4613
|
};
|
2644
|
-
_insertRecords = new WeakSet();
|
2645
4614
|
insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
2646
4615
|
const operations = await promiseMap(objects, async (object) => {
|
2647
|
-
const record = await __privateMethod$2(this,
|
4616
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2648
4617
|
return { insert: { table: __privateGet$2(this, _table), record, createOnly, ifVersion } };
|
2649
4618
|
});
|
2650
4619
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -2669,11 +4638,9 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
|
2669
4638
|
}
|
2670
4639
|
return ids;
|
2671
4640
|
};
|
2672
|
-
_updateRecordWithID = new WeakSet();
|
2673
4641
|
updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2674
|
-
if (!recordId)
|
2675
|
-
|
2676
|
-
const { xata_id: _id, ...record } = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4642
|
+
if (!recordId) return null;
|
4643
|
+
const { xata_id: _id, ...record } = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2677
4644
|
try {
|
2678
4645
|
const response = await updateRecordWithID({
|
2679
4646
|
pathParams: {
|
@@ -2687,7 +4654,7 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2687
4654
|
body: record,
|
2688
4655
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2689
4656
|
});
|
2690
|
-
const schemaTables = await __privateMethod$2(this,
|
4657
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2691
4658
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2692
4659
|
} catch (e) {
|
2693
4660
|
if (isObject(e) && e.status === 404) {
|
@@ -2696,10 +4663,9 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2696
4663
|
throw e;
|
2697
4664
|
}
|
2698
4665
|
};
|
2699
|
-
_updateRecords = new WeakSet();
|
2700
4666
|
updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
2701
4667
|
const operations = await promiseMap(objects, async ({ xata_id, ...object }) => {
|
2702
|
-
const fields = await __privateMethod$2(this,
|
4668
|
+
const fields = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2703
4669
|
return { update: { table: __privateGet$2(this, _table), id: xata_id, ifVersion, upsert, fields } };
|
2704
4670
|
});
|
2705
4671
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -2724,10 +4690,8 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
|
2724
4690
|
}
|
2725
4691
|
return ids;
|
2726
4692
|
};
|
2727
|
-
_upsertRecordWithID = new WeakSet();
|
2728
4693
|
upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2729
|
-
if (!recordId)
|
2730
|
-
return null;
|
4694
|
+
if (!recordId) return null;
|
2731
4695
|
const response = await upsertRecordWithID({
|
2732
4696
|
pathParams: {
|
2733
4697
|
workspace: "{workspaceId}",
|
@@ -2740,13 +4704,11 @@ upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2740
4704
|
body: object,
|
2741
4705
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2742
4706
|
});
|
2743
|
-
const schemaTables = await __privateMethod$2(this,
|
4707
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2744
4708
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2745
4709
|
};
|
2746
|
-
_deleteRecord = new WeakSet();
|
2747
4710
|
deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
2748
|
-
if (!recordId)
|
2749
|
-
return null;
|
4711
|
+
if (!recordId) return null;
|
2750
4712
|
try {
|
2751
4713
|
const response = await deleteRecord({
|
2752
4714
|
pathParams: {
|
@@ -2759,7 +4721,7 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2759
4721
|
queryParams: { columns },
|
2760
4722
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2761
4723
|
});
|
2762
|
-
const schemaTables = await __privateMethod$2(this,
|
4724
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2763
4725
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2764
4726
|
} catch (e) {
|
2765
4727
|
if (isObject(e) && e.status === 404) {
|
@@ -2768,7 +4730,6 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2768
4730
|
throw e;
|
2769
4731
|
}
|
2770
4732
|
};
|
2771
|
-
_deleteRecords = new WeakSet();
|
2772
4733
|
deleteRecords_fn = async function(recordIds) {
|
2773
4734
|
const chunkedOperations = chunk(
|
2774
4735
|
compact(recordIds).map((id) => ({ delete: { table: __privateGet$2(this, _table), id } })),
|
@@ -2786,10 +4747,8 @@ deleteRecords_fn = async function(recordIds) {
|
|
2786
4747
|
});
|
2787
4748
|
}
|
2788
4749
|
};
|
2789
|
-
_getSchemaTables = new WeakSet();
|
2790
4750
|
getSchemaTables_fn = async function() {
|
2791
|
-
if (__privateGet$2(this, _schemaTables))
|
2792
|
-
return __privateGet$2(this, _schemaTables);
|
4751
|
+
if (__privateGet$2(this, _schemaTables)) return __privateGet$2(this, _schemaTables);
|
2793
4752
|
const { schema } = await getBranchDetails({
|
2794
4753
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
2795
4754
|
...__privateGet$2(this, _getFetchProps).call(this)
|
@@ -2797,16 +4756,13 @@ getSchemaTables_fn = async function() {
|
|
2797
4756
|
__privateSet$1(this, _schemaTables, schema.tables);
|
2798
4757
|
return schema.tables;
|
2799
4758
|
};
|
2800
|
-
_transformObjectToApi = new WeakSet();
|
2801
4759
|
transformObjectToApi_fn = async function(object) {
|
2802
|
-
const schemaTables = await __privateMethod$2(this,
|
4760
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2803
4761
|
const schema = schemaTables.find((table) => table.name === __privateGet$2(this, _table));
|
2804
|
-
if (!schema)
|
2805
|
-
throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
|
4762
|
+
if (!schema) throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
|
2806
4763
|
const result = {};
|
2807
4764
|
for (const [key, value] of Object.entries(object)) {
|
2808
|
-
if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key))
|
2809
|
-
continue;
|
4765
|
+
if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key)) continue;
|
2810
4766
|
const type = schema.columns.find((column) => column.name === key)?.type;
|
2811
4767
|
switch (type) {
|
2812
4768
|
case "link": {
|
@@ -2836,11 +4792,9 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2836
4792
|
const data = {};
|
2837
4793
|
Object.assign(data, { ...object });
|
2838
4794
|
const { columns } = schemaTables.find(({ name }) => name === table) ?? {};
|
2839
|
-
if (!columns)
|
2840
|
-
console.error(`Table ${table} not found in schema`);
|
4795
|
+
if (!columns) console.error(`Table ${table} not found in schema`);
|
2841
4796
|
for (const column of columns ?? []) {
|
2842
|
-
if (!isValidColumn(selectedColumns, column))
|
2843
|
-
continue;
|
4797
|
+
if (!isValidColumn(selectedColumns, column)) continue;
|
2844
4798
|
const value = data[column.name];
|
2845
4799
|
switch (column.type) {
|
2846
4800
|
case "datetime": {
|
@@ -2875,7 +4829,7 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2875
4829
|
selectedLinkColumns
|
2876
4830
|
);
|
2877
4831
|
} else {
|
2878
|
-
data[column.name] = null;
|
4832
|
+
data[column.name] = value ?? null;
|
2879
4833
|
}
|
2880
4834
|
break;
|
2881
4835
|
}
|
@@ -2926,15 +4880,12 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2926
4880
|
return record;
|
2927
4881
|
};
|
2928
4882
|
function extractId(value) {
|
2929
|
-
if (isString(value))
|
2930
|
-
|
2931
|
-
if (isObject(value) && isString(value.xata_id))
|
2932
|
-
return value.xata_id;
|
4883
|
+
if (isString(value)) return value;
|
4884
|
+
if (isObject(value) && isString(value.xata_id)) return value.xata_id;
|
2933
4885
|
return void 0;
|
2934
4886
|
}
|
2935
4887
|
function isValidColumn(columns, column) {
|
2936
|
-
if (columns.includes("*"))
|
2937
|
-
return true;
|
4888
|
+
if (columns.includes("*")) return true;
|
2938
4889
|
return columns.filter((item) => isString(item) && item.startsWith(column.name)).length > 0;
|
2939
4890
|
}
|
2940
4891
|
function parseIfVersion(...args) {
|
@@ -2974,19 +4925,12 @@ const includesAll = (value) => ({ $includesAll: value });
|
|
2974
4925
|
const includesNone = (value) => ({ $includesNone: value });
|
2975
4926
|
const includesAny = (value) => ({ $includesAny: value });
|
2976
4927
|
|
2977
|
-
var
|
2978
|
-
|
2979
|
-
throw TypeError("Cannot " + msg);
|
2980
|
-
};
|
2981
|
-
var __privateGet$1 = (obj, member, getter) => {
|
2982
|
-
__accessCheck$2(obj, member, "read from private field");
|
2983
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2984
|
-
};
|
2985
|
-
var __privateAdd$2 = (obj, member, value) => {
|
2986
|
-
if (member.has(obj))
|
2987
|
-
throw TypeError("Cannot add the same private member more than once");
|
2988
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4928
|
+
var __typeError$2 = (msg) => {
|
4929
|
+
throw TypeError(msg);
|
2989
4930
|
};
|
4931
|
+
var __accessCheck$2 = (obj, member, msg) => member.has(obj) || __typeError$2("Cannot " + msg);
|
4932
|
+
var __privateGet$1 = (obj, member, getter) => (__accessCheck$2(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4933
|
+
var __privateAdd$2 = (obj, member, value) => member.has(obj) ? __typeError$2("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2990
4934
|
var _tables;
|
2991
4935
|
class SchemaPlugin extends XataPlugin {
|
2992
4936
|
constructor() {
|
@@ -2998,8 +4942,7 @@ class SchemaPlugin extends XataPlugin {
|
|
2998
4942
|
{},
|
2999
4943
|
{
|
3000
4944
|
get: (_target, table) => {
|
3001
|
-
if (!isString(table))
|
3002
|
-
throw new Error("Invalid table name");
|
4945
|
+
if (!isString(table)) throw new Error("Invalid table name");
|
3003
4946
|
if (__privateGet$1(this, _tables)[table] === void 0) {
|
3004
4947
|
__privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
3005
4948
|
}
|
@@ -3090,30 +5033,23 @@ function getContentType(file) {
|
|
3090
5033
|
return "application/octet-stream";
|
3091
5034
|
}
|
3092
5035
|
|
3093
|
-
var
|
3094
|
-
|
3095
|
-
throw TypeError("Cannot " + msg);
|
3096
|
-
};
|
3097
|
-
var __privateAdd$1 = (obj, member, value) => {
|
3098
|
-
if (member.has(obj))
|
3099
|
-
throw TypeError("Cannot add the same private member more than once");
|
3100
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3101
|
-
};
|
3102
|
-
var __privateMethod$1 = (obj, member, method) => {
|
3103
|
-
__accessCheck$1(obj, member, "access private method");
|
3104
|
-
return method;
|
5036
|
+
var __typeError$1 = (msg) => {
|
5037
|
+
throw TypeError(msg);
|
3105
5038
|
};
|
3106
|
-
var
|
5039
|
+
var __accessCheck$1 = (obj, member, msg) => member.has(obj) || __typeError$1("Cannot " + msg);
|
5040
|
+
var __privateAdd$1 = (obj, member, value) => member.has(obj) ? __typeError$1("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5041
|
+
var __privateMethod$1 = (obj, member, method) => (__accessCheck$1(obj, member, "access private method"), method);
|
5042
|
+
var _SearchPlugin_instances, search_fn;
|
3107
5043
|
class SearchPlugin extends XataPlugin {
|
3108
5044
|
constructor(db) {
|
3109
5045
|
super();
|
3110
5046
|
this.db = db;
|
3111
|
-
__privateAdd$1(this,
|
5047
|
+
__privateAdd$1(this, _SearchPlugin_instances);
|
3112
5048
|
}
|
3113
5049
|
build(pluginOptions) {
|
3114
5050
|
return {
|
3115
5051
|
all: async (query, options = {}) => {
|
3116
|
-
const { records, totalCount } = await __privateMethod$1(this,
|
5052
|
+
const { records, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3117
5053
|
return {
|
3118
5054
|
totalCount,
|
3119
5055
|
records: records.map((record) => {
|
@@ -3123,7 +5059,7 @@ class SearchPlugin extends XataPlugin {
|
|
3123
5059
|
};
|
3124
5060
|
},
|
3125
5061
|
byTable: async (query, options = {}) => {
|
3126
|
-
const { records: rawRecords, totalCount } = await __privateMethod$1(this,
|
5062
|
+
const { records: rawRecords, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3127
5063
|
const records = rawRecords.reduce((acc, record) => {
|
3128
5064
|
const table = record.xata_table;
|
3129
5065
|
const items = acc[table] ?? [];
|
@@ -3135,7 +5071,7 @@ class SearchPlugin extends XataPlugin {
|
|
3135
5071
|
};
|
3136
5072
|
}
|
3137
5073
|
}
|
3138
|
-
|
5074
|
+
_SearchPlugin_instances = new WeakSet();
|
3139
5075
|
search_fn = async function(query, options, pluginOptions) {
|
3140
5076
|
const { tables, fuzziness, highlight, prefix, page } = options ?? {};
|
3141
5077
|
const { records, totalCount } = await searchBranch({
|
@@ -3171,8 +5107,7 @@ function arrayString(val) {
|
|
3171
5107
|
return result;
|
3172
5108
|
}
|
3173
5109
|
function prepareValue(value) {
|
3174
|
-
if (!isDefined(value))
|
3175
|
-
return null;
|
5110
|
+
if (!isDefined(value)) return null;
|
3176
5111
|
if (value instanceof Date) {
|
3177
5112
|
return value.toISOString();
|
3178
5113
|
}
|
@@ -3199,31 +5134,42 @@ function prepareParams(param1, param2) {
|
|
3199
5134
|
return { statement, params: param2?.map((value) => prepareValue(value)) };
|
3200
5135
|
}
|
3201
5136
|
if (isObject(param1)) {
|
3202
|
-
const { statement, params, consistency } = param1;
|
3203
|
-
return { statement, params: params?.map((value) => prepareValue(value)), consistency };
|
5137
|
+
const { statement, params, consistency, responseType } = param1;
|
5138
|
+
return { statement, params: params?.map((value) => prepareValue(value)), consistency, responseType };
|
3204
5139
|
}
|
3205
5140
|
throw new Error("Invalid query");
|
3206
5141
|
}
|
3207
5142
|
|
3208
5143
|
class SQLPlugin extends XataPlugin {
|
3209
5144
|
build(pluginOptions) {
|
3210
|
-
|
5145
|
+
const sqlFunction = async (query, ...parameters) => {
|
3211
5146
|
if (!isParamsObject(query) && (!isTemplateStringsArray(query) || !Array.isArray(parameters))) {
|
3212
5147
|
throw new Error("Invalid usage of `xata.sql`. Please use it as a tagged template or with an object.");
|
3213
5148
|
}
|
3214
|
-
const { statement, params, consistency } = prepareParams(query, parameters);
|
3215
|
-
const {
|
3216
|
-
records,
|
3217
|
-
rows,
|
3218
|
-
warning,
|
3219
|
-
columns = []
|
3220
|
-
} = await sqlQuery({
|
5149
|
+
const { statement, params, consistency, responseType } = prepareParams(query, parameters);
|
5150
|
+
const { warning, columns, ...response } = await sqlQuery({
|
3221
5151
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
3222
|
-
body: { statement, params, consistency },
|
5152
|
+
body: { statement, params, consistency, responseType },
|
3223
5153
|
...pluginOptions
|
3224
5154
|
});
|
5155
|
+
const records = "records" in response ? response.records : void 0;
|
5156
|
+
const rows = "rows" in response ? response.rows : void 0;
|
3225
5157
|
return { records, rows, warning, columns };
|
3226
5158
|
};
|
5159
|
+
sqlFunction.connectionString = buildConnectionString(pluginOptions);
|
5160
|
+
sqlFunction.batch = async (query) => {
|
5161
|
+
const { results } = await sqlBatchQuery({
|
5162
|
+
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
5163
|
+
body: {
|
5164
|
+
statements: query.statements.map(({ statement, params }) => ({ statement, params })),
|
5165
|
+
consistency: query.consistency,
|
5166
|
+
responseType: query.responseType
|
5167
|
+
},
|
5168
|
+
...pluginOptions
|
5169
|
+
});
|
5170
|
+
return { results };
|
5171
|
+
};
|
5172
|
+
return sqlFunction;
|
3227
5173
|
}
|
3228
5174
|
}
|
3229
5175
|
function isTemplateStringsArray(strings) {
|
@@ -3232,6 +5178,32 @@ function isTemplateStringsArray(strings) {
|
|
3232
5178
|
function isParamsObject(params) {
|
3233
5179
|
return isObject(params) && "statement" in params;
|
3234
5180
|
}
|
5181
|
+
function buildDomain(host, region) {
|
5182
|
+
switch (host) {
|
5183
|
+
case "production":
|
5184
|
+
return `${region}.sql.xata.sh`;
|
5185
|
+
case "staging":
|
5186
|
+
return `${region}.sql.staging-xata.dev`;
|
5187
|
+
case "dev":
|
5188
|
+
return `${region}.sql.dev-xata.dev`;
|
5189
|
+
case "local":
|
5190
|
+
return "localhost:7654";
|
5191
|
+
default:
|
5192
|
+
throw new Error("Invalid host provider");
|
5193
|
+
}
|
5194
|
+
}
|
5195
|
+
function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
|
5196
|
+
const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
|
5197
|
+
const parts = parseWorkspacesUrlParts(url);
|
5198
|
+
if (!parts) throw new Error("Invalid workspaces URL");
|
5199
|
+
const { workspace: workspaceSlug, region, database, host } = parts;
|
5200
|
+
const domain = buildDomain(host, region);
|
5201
|
+
const workspace = workspaceSlug.split("-").pop();
|
5202
|
+
if (!workspace || !region || !database || !apiKey || !branch) {
|
5203
|
+
throw new Error("Unable to build xata connection string");
|
5204
|
+
}
|
5205
|
+
return `postgresql://${workspace}:${apiKey}@${domain}/${database}:${branch}?sslmode=require`;
|
5206
|
+
}
|
3235
5207
|
|
3236
5208
|
class TransactionPlugin extends XataPlugin {
|
3237
5209
|
build(pluginOptions) {
|
@@ -3248,41 +5220,27 @@ class TransactionPlugin extends XataPlugin {
|
|
3248
5220
|
}
|
3249
5221
|
}
|
3250
5222
|
|
3251
|
-
var
|
3252
|
-
|
3253
|
-
throw TypeError("Cannot " + msg);
|
3254
|
-
};
|
3255
|
-
var __privateGet = (obj, member, getter) => {
|
3256
|
-
__accessCheck(obj, member, "read from private field");
|
3257
|
-
return getter ? getter.call(obj) : member.get(obj);
|
3258
|
-
};
|
3259
|
-
var __privateAdd = (obj, member, value) => {
|
3260
|
-
if (member.has(obj))
|
3261
|
-
throw TypeError("Cannot add the same private member more than once");
|
3262
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3263
|
-
};
|
3264
|
-
var __privateSet = (obj, member, value, setter) => {
|
3265
|
-
__accessCheck(obj, member, "write to private field");
|
3266
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
3267
|
-
return value;
|
3268
|
-
};
|
3269
|
-
var __privateMethod = (obj, member, method) => {
|
3270
|
-
__accessCheck(obj, member, "access private method");
|
3271
|
-
return method;
|
5223
|
+
var __typeError = (msg) => {
|
5224
|
+
throw TypeError(msg);
|
3272
5225
|
};
|
5226
|
+
var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
|
5227
|
+
var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
5228
|
+
var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5229
|
+
var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), member.set(obj, value), value);
|
5230
|
+
var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
|
3273
5231
|
const buildClient = (plugins) => {
|
3274
|
-
var _options,
|
5232
|
+
var _options, _instances, parseOptions_fn, getFetchProps_fn, _a;
|
3275
5233
|
return _a = class {
|
3276
5234
|
constructor(options = {}, tables) {
|
3277
|
-
__privateAdd(this,
|
3278
|
-
__privateAdd(this,
|
3279
|
-
|
3280
|
-
const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
|
5235
|
+
__privateAdd(this, _instances);
|
5236
|
+
__privateAdd(this, _options);
|
5237
|
+
const safeOptions = __privateMethod(this, _instances, parseOptions_fn).call(this, options);
|
3281
5238
|
__privateSet(this, _options, safeOptions);
|
3282
5239
|
const pluginOptions = {
|
3283
|
-
...__privateMethod(this,
|
5240
|
+
...__privateMethod(this, _instances, getFetchProps_fn).call(this, safeOptions),
|
3284
5241
|
host: safeOptions.host,
|
3285
|
-
tables
|
5242
|
+
tables,
|
5243
|
+
branch: safeOptions.branch
|
3286
5244
|
};
|
3287
5245
|
const db = new SchemaPlugin().build(pluginOptions);
|
3288
5246
|
const search = new SearchPlugin(db).build(pluginOptions);
|
@@ -3296,8 +5254,7 @@ const buildClient = (plugins) => {
|
|
3296
5254
|
this.sql = sql;
|
3297
5255
|
this.files = files;
|
3298
5256
|
for (const [key, namespace] of Object.entries(plugins ?? {})) {
|
3299
|
-
if (namespace === void 0)
|
3300
|
-
continue;
|
5257
|
+
if (namespace === void 0) continue;
|
3301
5258
|
this[key] = namespace.build(pluginOptions);
|
3302
5259
|
}
|
3303
5260
|
}
|
@@ -3306,8 +5263,8 @@ const buildClient = (plugins) => {
|
|
3306
5263
|
const branch = __privateGet(this, _options).branch;
|
3307
5264
|
return { databaseURL, branch };
|
3308
5265
|
}
|
3309
|
-
}, _options = new WeakMap(),
|
3310
|
-
const enableBrowser = options?.enableBrowser ??
|
5266
|
+
}, _options = new WeakMap(), _instances = new WeakSet(), parseOptions_fn = function(options) {
|
5267
|
+
const enableBrowser = options?.enableBrowser ?? false;
|
3311
5268
|
const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
|
3312
5269
|
if (isBrowser && !enableBrowser) {
|
3313
5270
|
throw new Error(
|
@@ -3315,8 +5272,9 @@ const buildClient = (plugins) => {
|
|
3315
5272
|
);
|
3316
5273
|
}
|
3317
5274
|
const fetch = getFetchImplementation(options?.fetch);
|
3318
|
-
const databaseURL = options?.databaseURL
|
3319
|
-
const apiKey = options?.apiKey
|
5275
|
+
const databaseURL = options?.databaseURL;
|
5276
|
+
const apiKey = options?.apiKey;
|
5277
|
+
const branch = options?.branch;
|
3320
5278
|
const trace = options?.trace ?? defaultTrace;
|
3321
5279
|
const clientName = options?.clientName;
|
3322
5280
|
const host = options?.host ?? "production";
|
@@ -3327,25 +5285,8 @@ const buildClient = (plugins) => {
|
|
3327
5285
|
if (!databaseURL) {
|
3328
5286
|
throw new Error("Option databaseURL is required");
|
3329
5287
|
}
|
3330
|
-
|
3331
|
-
|
3332
|
-
const branch = options?.branch || previewBranch || envBranch || "main";
|
3333
|
-
if (!!previewBranch && branch !== previewBranch) {
|
3334
|
-
console.warn(
|
3335
|
-
`Ignoring preview branch ${previewBranch} because branch option was passed to the client constructor with value ${branch}`
|
3336
|
-
);
|
3337
|
-
} else if (!!envBranch && branch !== envBranch) {
|
3338
|
-
console.warn(
|
3339
|
-
`Ignoring branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
|
3340
|
-
);
|
3341
|
-
} else if (!!previewBranch && !!envBranch && previewBranch !== envBranch) {
|
3342
|
-
console.warn(
|
3343
|
-
`Ignoring preview branch ${previewBranch} and branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
|
3344
|
-
);
|
3345
|
-
} else if (!previewBranch && !envBranch && options?.branch === void 0) {
|
3346
|
-
console.warn(
|
3347
|
-
`No branch was passed to the client constructor. Using default branch ${branch}. You can set the branch with the environment variable XATA_BRANCH or by passing the branch option to the client constructor.`
|
3348
|
-
);
|
5288
|
+
if (!branch) {
|
5289
|
+
throw new Error("Option branch is required");
|
3349
5290
|
}
|
3350
5291
|
return {
|
3351
5292
|
fetch,
|
@@ -3359,7 +5300,7 @@ const buildClient = (plugins) => {
|
|
3359
5300
|
clientName,
|
3360
5301
|
xataAgentExtra
|
3361
5302
|
};
|
3362
|
-
},
|
5303
|
+
}, getFetchProps_fn = function({
|
3363
5304
|
fetch,
|
3364
5305
|
apiKey,
|
3365
5306
|
databaseURL,
|
@@ -3400,26 +5341,19 @@ class Serializer {
|
|
3400
5341
|
}
|
3401
5342
|
toJSON(data) {
|
3402
5343
|
function visit(obj) {
|
3403
|
-
if (Array.isArray(obj))
|
3404
|
-
return obj.map(visit);
|
5344
|
+
if (Array.isArray(obj)) return obj.map(visit);
|
3405
5345
|
const type = typeof obj;
|
3406
|
-
if (type === "undefined")
|
3407
|
-
|
3408
|
-
if (
|
3409
|
-
return { [META]: "bigint", [VALUE]: obj.toString() };
|
3410
|
-
if (obj === null || type !== "object")
|
3411
|
-
return obj;
|
5346
|
+
if (type === "undefined") return { [META]: "undefined" };
|
5347
|
+
if (type === "bigint") return { [META]: "bigint", [VALUE]: obj.toString() };
|
5348
|
+
if (obj === null || type !== "object") return obj;
|
3412
5349
|
const constructor = obj.constructor;
|
3413
5350
|
const o = { [META]: constructor.name };
|
3414
5351
|
for (const [key, value] of Object.entries(obj)) {
|
3415
5352
|
o[key] = visit(value);
|
3416
5353
|
}
|
3417
|
-
if (constructor === Date)
|
3418
|
-
|
3419
|
-
if (constructor ===
|
3420
|
-
o[VALUE] = Object.fromEntries(obj);
|
3421
|
-
if (constructor === Set)
|
3422
|
-
o[VALUE] = [...obj];
|
5354
|
+
if (constructor === Date) o[VALUE] = obj.toISOString();
|
5355
|
+
if (constructor === Map) o[VALUE] = Object.fromEntries(obj);
|
5356
|
+
if (constructor === Set) o[VALUE] = [...obj];
|
3423
5357
|
return o;
|
3424
5358
|
}
|
3425
5359
|
return JSON.stringify(visit(data));
|
@@ -3432,16 +5366,11 @@ class Serializer {
|
|
3432
5366
|
if (constructor) {
|
3433
5367
|
return Object.assign(Object.create(constructor.prototype), rest);
|
3434
5368
|
}
|
3435
|
-
if (clazz === "Date")
|
3436
|
-
|
3437
|
-
if (clazz === "
|
3438
|
-
|
3439
|
-
if (clazz === "
|
3440
|
-
return new Map(Object.entries(val));
|
3441
|
-
if (clazz === "bigint")
|
3442
|
-
return BigInt(val);
|
3443
|
-
if (clazz === "undefined")
|
3444
|
-
return void 0;
|
5369
|
+
if (clazz === "Date") return new Date(val);
|
5370
|
+
if (clazz === "Set") return new Set(val);
|
5371
|
+
if (clazz === "Map") return new Map(Object.entries(val));
|
5372
|
+
if (clazz === "bigint") return BigInt(val);
|
5373
|
+
if (clazz === "undefined") return void 0;
|
3445
5374
|
return rest;
|
3446
5375
|
}
|
3447
5376
|
return value;
|
@@ -3456,6 +5385,47 @@ const deserialize = (json) => {
|
|
3456
5385
|
return defaultSerializer.fromJSON(json);
|
3457
5386
|
};
|
3458
5387
|
|
5388
|
+
function parseEnvironment(environment) {
|
5389
|
+
try {
|
5390
|
+
if (typeof environment === "function") {
|
5391
|
+
return new Proxy(
|
5392
|
+
{},
|
5393
|
+
{
|
5394
|
+
get(target) {
|
5395
|
+
return environment(target);
|
5396
|
+
}
|
5397
|
+
}
|
5398
|
+
);
|
5399
|
+
}
|
5400
|
+
if (isObject(environment)) {
|
5401
|
+
return environment;
|
5402
|
+
}
|
5403
|
+
} catch (error) {
|
5404
|
+
}
|
5405
|
+
return {};
|
5406
|
+
}
|
5407
|
+
function buildPreviewBranchName({ org, branch }) {
|
5408
|
+
return `preview-${org}-${branch}`;
|
5409
|
+
}
|
5410
|
+
function getDeployPreviewBranch(environment) {
|
5411
|
+
try {
|
5412
|
+
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = parseEnvironment(environment);
|
5413
|
+
if (deployPreviewBranch) return deployPreviewBranch;
|
5414
|
+
switch (deployPreview) {
|
5415
|
+
case "vercel": {
|
5416
|
+
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
5417
|
+
console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
|
5418
|
+
return void 0;
|
5419
|
+
}
|
5420
|
+
return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
|
5421
|
+
}
|
5422
|
+
}
|
5423
|
+
return void 0;
|
5424
|
+
} catch (err) {
|
5425
|
+
return void 0;
|
5426
|
+
}
|
5427
|
+
}
|
5428
|
+
|
3459
5429
|
class XataError extends Error {
|
3460
5430
|
constructor(message, status) {
|
3461
5431
|
super(message);
|
@@ -3464,6 +5434,7 @@ class XataError extends Error {
|
|
3464
5434
|
}
|
3465
5435
|
|
3466
5436
|
exports.BaseClient = BaseClient;
|
5437
|
+
exports.Buffer = Buffer;
|
3467
5438
|
exports.FetcherError = FetcherError;
|
3468
5439
|
exports.FilesPlugin = FilesPlugin;
|
3469
5440
|
exports.Operations = operationsByTag;
|
@@ -3489,6 +5460,7 @@ exports.XataError = XataError;
|
|
3489
5460
|
exports.XataFile = XataFile;
|
3490
5461
|
exports.XataPlugin = XataPlugin;
|
3491
5462
|
exports.acceptWorkspaceMemberInvite = acceptWorkspaceMemberInvite;
|
5463
|
+
exports.adaptAllTables = adaptAllTables;
|
3492
5464
|
exports.adaptTable = adaptTable;
|
3493
5465
|
exports.addGitBranchesEntry = addGitBranchesEntry;
|
3494
5466
|
exports.addTableColumn = addTableColumn;
|
@@ -3506,9 +5478,11 @@ exports.cancelWorkspaceMemberInvite = cancelWorkspaceMemberInvite;
|
|
3506
5478
|
exports.compareBranchSchemas = compareBranchSchemas;
|
3507
5479
|
exports.compareBranchWithUserSchema = compareBranchWithUserSchema;
|
3508
5480
|
exports.compareMigrationRequest = compareMigrationRequest;
|
5481
|
+
exports.completeMigration = completeMigration;
|
3509
5482
|
exports.contains = contains;
|
3510
5483
|
exports.copyBranch = copyBranch;
|
3511
5484
|
exports.createBranch = createBranch;
|
5485
|
+
exports.createBranchAsync = createBranchAsync;
|
3512
5486
|
exports.createCluster = createCluster;
|
3513
5487
|
exports.createDatabase = createDatabase;
|
3514
5488
|
exports.createMigrationRequest = createMigrationRequest;
|
@@ -3516,6 +5490,7 @@ exports.createTable = createTable;
|
|
3516
5490
|
exports.createUserAPIKey = createUserAPIKey;
|
3517
5491
|
exports.createWorkspace = createWorkspace;
|
3518
5492
|
exports.deleteBranch = deleteBranch;
|
5493
|
+
exports.deleteCluster = deleteCluster;
|
3519
5494
|
exports.deleteColumn = deleteColumn;
|
3520
5495
|
exports.deleteDatabase = deleteDatabase;
|
3521
5496
|
exports.deleteDatabaseGithubSettings = deleteDatabaseGithubSettings;
|
@@ -3529,6 +5504,7 @@ exports.deleteUserAPIKey = deleteUserAPIKey;
|
|
3529
5504
|
exports.deleteUserOAuthClient = deleteUserOAuthClient;
|
3530
5505
|
exports.deleteWorkspace = deleteWorkspace;
|
3531
5506
|
exports.deserialize = deserialize;
|
5507
|
+
exports.dropClusterExtension = dropClusterExtension;
|
3532
5508
|
exports.endsWith = endsWith;
|
3533
5509
|
exports.equals = equals;
|
3534
5510
|
exports.executeBranchMigrationPlan = executeBranchMigrationPlan;
|
@@ -3536,37 +5512,40 @@ exports.exists = exists;
|
|
3536
5512
|
exports.fileAccess = fileAccess;
|
3537
5513
|
exports.fileUpload = fileUpload;
|
3538
5514
|
exports.ge = ge;
|
3539
|
-
exports.getAPIKey = getAPIKey;
|
3540
5515
|
exports.getAuthorizationCode = getAuthorizationCode;
|
3541
|
-
exports.getBranch = getBranch;
|
3542
5516
|
exports.getBranchDetails = getBranchDetails;
|
3543
5517
|
exports.getBranchList = getBranchList;
|
3544
5518
|
exports.getBranchMetadata = getBranchMetadata;
|
3545
5519
|
exports.getBranchMigrationHistory = getBranchMigrationHistory;
|
3546
5520
|
exports.getBranchMigrationJobStatus = getBranchMigrationJobStatus;
|
3547
5521
|
exports.getBranchMigrationPlan = getBranchMigrationPlan;
|
5522
|
+
exports.getBranchMoveStatus = getBranchMoveStatus;
|
3548
5523
|
exports.getBranchSchemaHistory = getBranchSchemaHistory;
|
3549
5524
|
exports.getBranchStats = getBranchStats;
|
3550
5525
|
exports.getCluster = getCluster;
|
5526
|
+
exports.getClusterMetrics = getClusterMetrics;
|
3551
5527
|
exports.getColumn = getColumn;
|
3552
5528
|
exports.getDatabaseGithubSettings = getDatabaseGithubSettings;
|
3553
5529
|
exports.getDatabaseList = getDatabaseList;
|
3554
5530
|
exports.getDatabaseMetadata = getDatabaseMetadata;
|
3555
5531
|
exports.getDatabaseSettings = getDatabaseSettings;
|
3556
|
-
exports.
|
5532
|
+
exports.getDeployPreviewBranch = getDeployPreviewBranch;
|
3557
5533
|
exports.getFile = getFile;
|
3558
5534
|
exports.getFileItem = getFileItem;
|
3559
5535
|
exports.getGitBranchesMapping = getGitBranchesMapping;
|
3560
5536
|
exports.getHostUrl = getHostUrl;
|
3561
5537
|
exports.getMigrationHistory = getMigrationHistory;
|
3562
5538
|
exports.getMigrationJobStatus = getMigrationJobStatus;
|
5539
|
+
exports.getMigrationJobs = getMigrationJobs;
|
3563
5540
|
exports.getMigrationRequest = getMigrationRequest;
|
3564
5541
|
exports.getMigrationRequestIsMerged = getMigrationRequestIsMerged;
|
3565
|
-
exports.getPreviewBranch = getPreviewBranch;
|
3566
5542
|
exports.getRecord = getRecord;
|
3567
5543
|
exports.getSchema = getSchema;
|
5544
|
+
exports.getSchemas = getSchemas;
|
3568
5545
|
exports.getTableColumns = getTableColumns;
|
3569
5546
|
exports.getTableSchema = getTableSchema;
|
5547
|
+
exports.getTaskStatus = getTaskStatus;
|
5548
|
+
exports.getTasks = getTasks;
|
3570
5549
|
exports.getUser = getUser;
|
3571
5550
|
exports.getUserAPIKeys = getUserAPIKeys;
|
3572
5551
|
exports.getUserOAuthAccessTokens = getUserOAuthAccessTokens;
|
@@ -3589,6 +5568,7 @@ exports.includesAny = includesAny;
|
|
3589
5568
|
exports.includesNone = includesNone;
|
3590
5569
|
exports.insertRecord = insertRecord;
|
3591
5570
|
exports.insertRecordWithID = insertRecordWithID;
|
5571
|
+
exports.installClusterExtension = installClusterExtension;
|
3592
5572
|
exports.inviteWorkspaceMember = inviteWorkspaceMember;
|
3593
5573
|
exports.is = is;
|
3594
5574
|
exports.isCursorPaginationOptions = isCursorPaginationOptions;
|
@@ -3602,12 +5582,15 @@ exports.le = le;
|
|
3602
5582
|
exports.lessEquals = lessEquals;
|
3603
5583
|
exports.lessThan = lessThan;
|
3604
5584
|
exports.lessThanEquals = lessThanEquals;
|
5585
|
+
exports.listClusterBranches = listClusterBranches;
|
5586
|
+
exports.listClusterExtensions = listClusterExtensions;
|
3605
5587
|
exports.listClusters = listClusters;
|
3606
5588
|
exports.listMigrationRequestsCommits = listMigrationRequestsCommits;
|
3607
5589
|
exports.listRegions = listRegions;
|
3608
5590
|
exports.lt = lt;
|
3609
5591
|
exports.lte = lte;
|
3610
5592
|
exports.mergeMigrationRequest = mergeMigrationRequest;
|
5593
|
+
exports.moveBranch = moveBranch;
|
3611
5594
|
exports.notExists = notExists;
|
3612
5595
|
exports.operationsByTag = operationsByTag;
|
3613
5596
|
exports.parseProviderString = parseProviderString;
|
@@ -3624,11 +5607,14 @@ exports.removeWorkspaceMember = removeWorkspaceMember;
|
|
3624
5607
|
exports.renameDatabase = renameDatabase;
|
3625
5608
|
exports.resendWorkspaceMemberInvite = resendWorkspaceMemberInvite;
|
3626
5609
|
exports.resolveBranch = resolveBranch;
|
5610
|
+
exports.rollbackMigration = rollbackMigration;
|
3627
5611
|
exports.searchBranch = searchBranch;
|
3628
5612
|
exports.searchTable = searchTable;
|
3629
5613
|
exports.serialize = serialize;
|
3630
5614
|
exports.setTableSchema = setTableSchema;
|
5615
|
+
exports.sqlBatchQuery = sqlBatchQuery;
|
3631
5616
|
exports.sqlQuery = sqlQuery;
|
5617
|
+
exports.startMigration = startMigration;
|
3632
5618
|
exports.startsWith = startsWith;
|
3633
5619
|
exports.summarizeTable = summarizeTable;
|
3634
5620
|
exports.transformImage = transformImage;
|