@xata.io/client 0.0.0-next.ve39b4ed80de99105b4e9db8fa85745ab1b3f541e → 0.0.0-next.vf8424dec977277df1a40e8feda5d574b7b0ee66c
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-add-version.log +1 -1
- package/.turbo/turbo-build.log +4 -4
- package/CHANGELOG.md +27 -3
- package/dist/index.cjs +2556 -582
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +5225 -3671
- package/dist/index.mjs +2538 -579
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
@@ -24,6 +24,1789 @@ const TraceAttributes = {
|
|
24
24
|
CLOUDFLARE_RAY_ID: "cf.ray"
|
25
25
|
};
|
26
26
|
|
27
|
+
const lookup = [];
|
28
|
+
const revLookup = [];
|
29
|
+
const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
30
|
+
for (let i = 0, len = code.length; i < len; ++i) {
|
31
|
+
lookup[i] = code[i];
|
32
|
+
revLookup[code.charCodeAt(i)] = i;
|
33
|
+
}
|
34
|
+
revLookup["-".charCodeAt(0)] = 62;
|
35
|
+
revLookup["_".charCodeAt(0)] = 63;
|
36
|
+
function getLens(b64) {
|
37
|
+
const len = b64.length;
|
38
|
+
if (len % 4 > 0) {
|
39
|
+
throw new Error("Invalid string. Length must be a multiple of 4");
|
40
|
+
}
|
41
|
+
let validLen = b64.indexOf("=");
|
42
|
+
if (validLen === -1) validLen = len;
|
43
|
+
const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
|
44
|
+
return [validLen, placeHoldersLen];
|
45
|
+
}
|
46
|
+
function _byteLength(_b64, validLen, placeHoldersLen) {
|
47
|
+
return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
|
48
|
+
}
|
49
|
+
function toByteArray(b64) {
|
50
|
+
let tmp;
|
51
|
+
const lens = getLens(b64);
|
52
|
+
const validLen = lens[0];
|
53
|
+
const placeHoldersLen = lens[1];
|
54
|
+
const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
|
55
|
+
let curByte = 0;
|
56
|
+
const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
|
57
|
+
let i;
|
58
|
+
for (i = 0; i < len; i += 4) {
|
59
|
+
tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
|
60
|
+
arr[curByte++] = tmp >> 16 & 255;
|
61
|
+
arr[curByte++] = tmp >> 8 & 255;
|
62
|
+
arr[curByte++] = tmp & 255;
|
63
|
+
}
|
64
|
+
if (placeHoldersLen === 2) {
|
65
|
+
tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
|
66
|
+
arr[curByte++] = tmp & 255;
|
67
|
+
}
|
68
|
+
if (placeHoldersLen === 1) {
|
69
|
+
tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
|
70
|
+
arr[curByte++] = tmp >> 8 & 255;
|
71
|
+
arr[curByte++] = tmp & 255;
|
72
|
+
}
|
73
|
+
return arr;
|
74
|
+
}
|
75
|
+
function tripletToBase64(num) {
|
76
|
+
return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
|
77
|
+
}
|
78
|
+
function encodeChunk(uint8, start, end) {
|
79
|
+
let tmp;
|
80
|
+
const output = [];
|
81
|
+
for (let i = start; i < end; i += 3) {
|
82
|
+
tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
|
83
|
+
output.push(tripletToBase64(tmp));
|
84
|
+
}
|
85
|
+
return output.join("");
|
86
|
+
}
|
87
|
+
function fromByteArray(uint8) {
|
88
|
+
let tmp;
|
89
|
+
const len = uint8.length;
|
90
|
+
const extraBytes = len % 3;
|
91
|
+
const parts = [];
|
92
|
+
const maxChunkLength = 16383;
|
93
|
+
for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
94
|
+
parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
|
95
|
+
}
|
96
|
+
if (extraBytes === 1) {
|
97
|
+
tmp = uint8[len - 1];
|
98
|
+
parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
|
99
|
+
} else if (extraBytes === 2) {
|
100
|
+
tmp = (uint8[len - 2] << 8) + uint8[len - 1];
|
101
|
+
parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
|
102
|
+
}
|
103
|
+
return parts.join("");
|
104
|
+
}
|
105
|
+
|
106
|
+
const K_MAX_LENGTH = 2147483647;
|
107
|
+
const MAX_ARGUMENTS_LENGTH = 4096;
|
108
|
+
class Buffer extends Uint8Array {
|
109
|
+
/**
|
110
|
+
* Constructs a new `Buffer` instance.
|
111
|
+
*
|
112
|
+
* @param value
|
113
|
+
* @param encodingOrOffset
|
114
|
+
* @param length
|
115
|
+
*/
|
116
|
+
constructor(value, encodingOrOffset, length) {
|
117
|
+
if (typeof value === "number") {
|
118
|
+
if (typeof encodingOrOffset === "string") {
|
119
|
+
throw new TypeError("The first argument must be of type string, received type number");
|
120
|
+
}
|
121
|
+
if (value < 0) {
|
122
|
+
throw new RangeError("The buffer size cannot be negative");
|
123
|
+
}
|
124
|
+
super(value < 0 ? 0 : Buffer._checked(value) | 0);
|
125
|
+
} else if (typeof value === "string") {
|
126
|
+
if (typeof encodingOrOffset !== "string") {
|
127
|
+
encodingOrOffset = "utf8";
|
128
|
+
}
|
129
|
+
if (!Buffer.isEncoding(encodingOrOffset)) {
|
130
|
+
throw new TypeError("Unknown encoding: " + encodingOrOffset);
|
131
|
+
}
|
132
|
+
const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
|
133
|
+
super(length2);
|
134
|
+
const written = this.write(value, 0, this.length, encodingOrOffset);
|
135
|
+
if (written !== length2) {
|
136
|
+
throw new TypeError(
|
137
|
+
"Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
|
138
|
+
);
|
139
|
+
}
|
140
|
+
} else if (ArrayBuffer.isView(value)) {
|
141
|
+
if (Buffer._isInstance(value, Uint8Array)) {
|
142
|
+
const copy = new Uint8Array(value);
|
143
|
+
const array = copy.buffer;
|
144
|
+
const byteOffset = copy.byteOffset;
|
145
|
+
const length2 = copy.byteLength;
|
146
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
147
|
+
throw new RangeError("offset is outside of buffer bounds");
|
148
|
+
}
|
149
|
+
if (array.byteLength < byteOffset + (length2 || 0)) {
|
150
|
+
throw new RangeError("length is outside of buffer bounds");
|
151
|
+
}
|
152
|
+
super(new Uint8Array(array, byteOffset, length2));
|
153
|
+
} else {
|
154
|
+
const array = value;
|
155
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
156
|
+
super(new Uint8Array(length2));
|
157
|
+
for (let i = 0; i < length2; i++) {
|
158
|
+
this[i] = array[i] & 255;
|
159
|
+
}
|
160
|
+
}
|
161
|
+
} else if (value == null) {
|
162
|
+
throw new TypeError(
|
163
|
+
"The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
|
164
|
+
);
|
165
|
+
} else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
|
166
|
+
const array = value;
|
167
|
+
const byteOffset = encodingOrOffset;
|
168
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
169
|
+
throw new RangeError("offset is outside of buffer bounds");
|
170
|
+
}
|
171
|
+
if (array.byteLength < byteOffset + (length || 0)) {
|
172
|
+
throw new RangeError("length is outside of buffer bounds");
|
173
|
+
}
|
174
|
+
super(new Uint8Array(array, byteOffset, length));
|
175
|
+
} else if (Array.isArray(value)) {
|
176
|
+
const array = value;
|
177
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
178
|
+
super(new Uint8Array(length2));
|
179
|
+
for (let i = 0; i < length2; i++) {
|
180
|
+
this[i] = array[i] & 255;
|
181
|
+
}
|
182
|
+
} else {
|
183
|
+
throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
|
184
|
+
}
|
185
|
+
}
|
186
|
+
/**
|
187
|
+
* Return JSON representation of the buffer.
|
188
|
+
*/
|
189
|
+
toJSON() {
|
190
|
+
return {
|
191
|
+
type: "Buffer",
|
192
|
+
data: Array.prototype.slice.call(this)
|
193
|
+
};
|
194
|
+
}
|
195
|
+
/**
|
196
|
+
* Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
|
197
|
+
* parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
|
198
|
+
* only part of `string` will be written. However, partially encoded characters will not be written.
|
199
|
+
*
|
200
|
+
* @param string String to write to `buf`.
|
201
|
+
* @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
|
202
|
+
* @param length Maximum number of bytes to write: Default: `buf.length - offset`.
|
203
|
+
* @param encoding The character encoding of `string`. Default: `utf8`.
|
204
|
+
*/
|
205
|
+
write(string, offset, length, encoding) {
|
206
|
+
if (typeof offset === "undefined") {
|
207
|
+
encoding = "utf8";
|
208
|
+
length = this.length;
|
209
|
+
offset = 0;
|
210
|
+
} else if (typeof length === "undefined" && typeof offset === "string") {
|
211
|
+
encoding = offset;
|
212
|
+
length = this.length;
|
213
|
+
offset = 0;
|
214
|
+
} else if (typeof offset === "number" && isFinite(offset)) {
|
215
|
+
offset = offset >>> 0;
|
216
|
+
if (typeof length === "number" && isFinite(length)) {
|
217
|
+
length = length >>> 0;
|
218
|
+
encoding ?? (encoding = "utf8");
|
219
|
+
} else if (typeof length === "string") {
|
220
|
+
encoding = length;
|
221
|
+
length = void 0;
|
222
|
+
}
|
223
|
+
} else {
|
224
|
+
throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
|
225
|
+
}
|
226
|
+
const remaining = this.length - offset;
|
227
|
+
if (typeof length === "undefined" || length > remaining) {
|
228
|
+
length = remaining;
|
229
|
+
}
|
230
|
+
if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
|
231
|
+
throw new RangeError("Attempt to write outside buffer bounds");
|
232
|
+
}
|
233
|
+
encoding || (encoding = "utf8");
|
234
|
+
switch (Buffer._getEncoding(encoding)) {
|
235
|
+
case "hex":
|
236
|
+
return Buffer._hexWrite(this, string, offset, length);
|
237
|
+
case "utf8":
|
238
|
+
return Buffer._utf8Write(this, string, offset, length);
|
239
|
+
case "ascii":
|
240
|
+
case "latin1":
|
241
|
+
case "binary":
|
242
|
+
return Buffer._asciiWrite(this, string, offset, length);
|
243
|
+
case "ucs2":
|
244
|
+
case "utf16le":
|
245
|
+
return Buffer._ucs2Write(this, string, offset, length);
|
246
|
+
case "base64":
|
247
|
+
return Buffer._base64Write(this, string, offset, length);
|
248
|
+
}
|
249
|
+
}
|
250
|
+
/**
|
251
|
+
* Decodes the buffer to a string according to the specified character encoding.
|
252
|
+
* Passing `start` and `end` will decode only a subset of the buffer.
|
253
|
+
*
|
254
|
+
* Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
|
255
|
+
* will be replaced with `U+FFFD`.
|
256
|
+
*
|
257
|
+
* @param encoding
|
258
|
+
* @param start
|
259
|
+
* @param end
|
260
|
+
*/
|
261
|
+
toString(encoding, start, end) {
|
262
|
+
const length = this.length;
|
263
|
+
if (length === 0) {
|
264
|
+
return "";
|
265
|
+
}
|
266
|
+
if (arguments.length === 0) {
|
267
|
+
return Buffer._utf8Slice(this, 0, length);
|
268
|
+
}
|
269
|
+
if (typeof start === "undefined" || start < 0) {
|
270
|
+
start = 0;
|
271
|
+
}
|
272
|
+
if (start > this.length) {
|
273
|
+
return "";
|
274
|
+
}
|
275
|
+
if (typeof end === "undefined" || end > this.length) {
|
276
|
+
end = this.length;
|
277
|
+
}
|
278
|
+
if (end <= 0) {
|
279
|
+
return "";
|
280
|
+
}
|
281
|
+
end >>>= 0;
|
282
|
+
start >>>= 0;
|
283
|
+
if (end <= start) {
|
284
|
+
return "";
|
285
|
+
}
|
286
|
+
if (!encoding) {
|
287
|
+
encoding = "utf8";
|
288
|
+
}
|
289
|
+
switch (Buffer._getEncoding(encoding)) {
|
290
|
+
case "hex":
|
291
|
+
return Buffer._hexSlice(this, start, end);
|
292
|
+
case "utf8":
|
293
|
+
return Buffer._utf8Slice(this, start, end);
|
294
|
+
case "ascii":
|
295
|
+
return Buffer._asciiSlice(this, start, end);
|
296
|
+
case "latin1":
|
297
|
+
case "binary":
|
298
|
+
return Buffer._latin1Slice(this, start, end);
|
299
|
+
case "ucs2":
|
300
|
+
case "utf16le":
|
301
|
+
return Buffer._utf16leSlice(this, start, end);
|
302
|
+
case "base64":
|
303
|
+
return Buffer._base64Slice(this, start, end);
|
304
|
+
}
|
305
|
+
}
|
306
|
+
/**
|
307
|
+
* Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
|
308
|
+
*
|
309
|
+
* @param otherBuffer
|
310
|
+
*/
|
311
|
+
equals(otherBuffer) {
|
312
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
313
|
+
throw new TypeError("Argument must be a Buffer");
|
314
|
+
}
|
315
|
+
if (this === otherBuffer) {
|
316
|
+
return true;
|
317
|
+
}
|
318
|
+
return Buffer.compare(this, otherBuffer) === 0;
|
319
|
+
}
|
320
|
+
/**
|
321
|
+
* Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
|
322
|
+
* or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
|
323
|
+
* buffer.
|
324
|
+
*
|
325
|
+
* - `0` is returned if `otherBuffer` is the same as this buffer.
|
326
|
+
* - `1` is returned if `otherBuffer` should come before this buffer when sorted.
|
327
|
+
* - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
|
328
|
+
*
|
329
|
+
* @param otherBuffer The buffer to compare to.
|
330
|
+
* @param targetStart The offset within `otherBuffer` at which to begin comparison.
|
331
|
+
* @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
|
332
|
+
* @param sourceStart The offset within this buffer at which to begin comparison.
|
333
|
+
* @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
|
334
|
+
*/
|
335
|
+
compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
|
336
|
+
if (Buffer._isInstance(otherBuffer, Uint8Array)) {
|
337
|
+
otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
|
338
|
+
}
|
339
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
340
|
+
throw new TypeError("Argument must be a Buffer or Uint8Array");
|
341
|
+
}
|
342
|
+
targetStart ?? (targetStart = 0);
|
343
|
+
targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
|
344
|
+
sourceStart ?? (sourceStart = 0);
|
345
|
+
sourceEnd ?? (sourceEnd = this.length);
|
346
|
+
if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
|
347
|
+
throw new RangeError("Out of range index");
|
348
|
+
}
|
349
|
+
if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
|
350
|
+
return 0;
|
351
|
+
}
|
352
|
+
if (sourceStart >= sourceEnd) {
|
353
|
+
return -1;
|
354
|
+
}
|
355
|
+
if (targetStart >= targetEnd) {
|
356
|
+
return 1;
|
357
|
+
}
|
358
|
+
targetStart >>>= 0;
|
359
|
+
targetEnd >>>= 0;
|
360
|
+
sourceStart >>>= 0;
|
361
|
+
sourceEnd >>>= 0;
|
362
|
+
if (this === otherBuffer) {
|
363
|
+
return 0;
|
364
|
+
}
|
365
|
+
let x = sourceEnd - sourceStart;
|
366
|
+
let y = targetEnd - targetStart;
|
367
|
+
const len = Math.min(x, y);
|
368
|
+
const thisCopy = this.slice(sourceStart, sourceEnd);
|
369
|
+
const targetCopy = otherBuffer.slice(targetStart, targetEnd);
|
370
|
+
for (let i = 0; i < len; ++i) {
|
371
|
+
if (thisCopy[i] !== targetCopy[i]) {
|
372
|
+
x = thisCopy[i];
|
373
|
+
y = targetCopy[i];
|
374
|
+
break;
|
375
|
+
}
|
376
|
+
}
|
377
|
+
if (x < y) return -1;
|
378
|
+
if (y < x) return 1;
|
379
|
+
return 0;
|
380
|
+
}
|
381
|
+
/**
|
382
|
+
* Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
|
383
|
+
* region overlaps with this buffer.
|
384
|
+
*
|
385
|
+
* @param targetBuffer The target buffer to copy into.
|
386
|
+
* @param targetStart The offset within `targetBuffer` at which to begin writing.
|
387
|
+
* @param sourceStart The offset within this buffer at which to begin copying.
|
388
|
+
* @param sourceEnd The offset within this buffer at which to end copying (exclusive).
|
389
|
+
*/
|
390
|
+
copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
|
391
|
+
if (!Buffer.isBuffer(targetBuffer)) throw new TypeError("argument should be a Buffer");
|
392
|
+
if (!sourceStart) sourceStart = 0;
|
393
|
+
if (!targetStart) targetStart = 0;
|
394
|
+
if (!sourceEnd && sourceEnd !== 0) sourceEnd = this.length;
|
395
|
+
if (targetStart >= targetBuffer.length) targetStart = targetBuffer.length;
|
396
|
+
if (!targetStart) targetStart = 0;
|
397
|
+
if (sourceEnd > 0 && sourceEnd < sourceStart) sourceEnd = sourceStart;
|
398
|
+
if (sourceEnd === sourceStart) return 0;
|
399
|
+
if (targetBuffer.length === 0 || this.length === 0) return 0;
|
400
|
+
if (targetStart < 0) {
|
401
|
+
throw new RangeError("targetStart out of bounds");
|
402
|
+
}
|
403
|
+
if (sourceStart < 0 || sourceStart >= this.length) throw new RangeError("Index out of range");
|
404
|
+
if (sourceEnd < 0) throw new RangeError("sourceEnd out of bounds");
|
405
|
+
if (sourceEnd > this.length) sourceEnd = this.length;
|
406
|
+
if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
|
407
|
+
sourceEnd = targetBuffer.length - targetStart + sourceStart;
|
408
|
+
}
|
409
|
+
const len = sourceEnd - sourceStart;
|
410
|
+
if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
|
411
|
+
this.copyWithin(targetStart, sourceStart, sourceEnd);
|
412
|
+
} else {
|
413
|
+
Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
|
414
|
+
}
|
415
|
+
return len;
|
416
|
+
}
|
417
|
+
/**
|
418
|
+
* Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
|
419
|
+
* and `end` indices. This is the same behavior as `buf.subarray()`.
|
420
|
+
*
|
421
|
+
* This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
|
422
|
+
* the slice, use `Uint8Array.prototype.slice()`.
|
423
|
+
*
|
424
|
+
* @param start
|
425
|
+
* @param end
|
426
|
+
*/
|
427
|
+
slice(start, end) {
|
428
|
+
if (!start) {
|
429
|
+
start = 0;
|
430
|
+
}
|
431
|
+
const len = this.length;
|
432
|
+
start = ~~start;
|
433
|
+
end = end === void 0 ? len : ~~end;
|
434
|
+
if (start < 0) {
|
435
|
+
start += len;
|
436
|
+
if (start < 0) {
|
437
|
+
start = 0;
|
438
|
+
}
|
439
|
+
} else if (start > len) {
|
440
|
+
start = len;
|
441
|
+
}
|
442
|
+
if (end < 0) {
|
443
|
+
end += len;
|
444
|
+
if (end < 0) {
|
445
|
+
end = 0;
|
446
|
+
}
|
447
|
+
} else if (end > len) {
|
448
|
+
end = len;
|
449
|
+
}
|
450
|
+
if (end < start) {
|
451
|
+
end = start;
|
452
|
+
}
|
453
|
+
const newBuf = this.subarray(start, end);
|
454
|
+
Object.setPrototypeOf(newBuf, Buffer.prototype);
|
455
|
+
return newBuf;
|
456
|
+
}
|
457
|
+
/**
|
458
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
459
|
+
* of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
|
460
|
+
*
|
461
|
+
* @param value Number to write.
|
462
|
+
* @param offset Number of bytes to skip before starting to write.
|
463
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
464
|
+
* @param noAssert
|
465
|
+
* @returns `offset` plus the number of bytes written.
|
466
|
+
*/
|
467
|
+
writeUIntLE(value, offset, byteLength, noAssert) {
|
468
|
+
value = +value;
|
469
|
+
offset = offset >>> 0;
|
470
|
+
byteLength = byteLength >>> 0;
|
471
|
+
if (!noAssert) {
|
472
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
473
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
474
|
+
}
|
475
|
+
let mul = 1;
|
476
|
+
let i = 0;
|
477
|
+
this[offset] = value & 255;
|
478
|
+
while (++i < byteLength && (mul *= 256)) {
|
479
|
+
this[offset + i] = value / mul & 255;
|
480
|
+
}
|
481
|
+
return offset + byteLength;
|
482
|
+
}
|
483
|
+
/**
|
484
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
|
485
|
+
* accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
|
486
|
+
*
|
487
|
+
* @param value Number to write.
|
488
|
+
* @param offset Number of bytes to skip before starting to write.
|
489
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
490
|
+
* @param noAssert
|
491
|
+
* @returns `offset` plus the number of bytes written.
|
492
|
+
*/
|
493
|
+
writeUIntBE(value, offset, byteLength, noAssert) {
|
494
|
+
value = +value;
|
495
|
+
offset = offset >>> 0;
|
496
|
+
byteLength = byteLength >>> 0;
|
497
|
+
if (!noAssert) {
|
498
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
499
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
500
|
+
}
|
501
|
+
let i = byteLength - 1;
|
502
|
+
let mul = 1;
|
503
|
+
this[offset + i] = value & 255;
|
504
|
+
while (--i >= 0 && (mul *= 256)) {
|
505
|
+
this[offset + i] = value / mul & 255;
|
506
|
+
}
|
507
|
+
return offset + byteLength;
|
508
|
+
}
|
509
|
+
/**
|
510
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
511
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
512
|
+
*
|
513
|
+
* @param value Number to write.
|
514
|
+
* @param offset Number of bytes to skip before starting to write.
|
515
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
516
|
+
* @param noAssert
|
517
|
+
* @returns `offset` plus the number of bytes written.
|
518
|
+
*/
|
519
|
+
writeIntLE(value, offset, byteLength, noAssert) {
|
520
|
+
value = +value;
|
521
|
+
offset = offset >>> 0;
|
522
|
+
if (!noAssert) {
|
523
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
524
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
525
|
+
}
|
526
|
+
let i = 0;
|
527
|
+
let mul = 1;
|
528
|
+
let sub = 0;
|
529
|
+
this[offset] = value & 255;
|
530
|
+
while (++i < byteLength && (mul *= 256)) {
|
531
|
+
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
532
|
+
sub = 1;
|
533
|
+
}
|
534
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
535
|
+
}
|
536
|
+
return offset + byteLength;
|
537
|
+
}
|
538
|
+
/**
|
539
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
|
540
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
541
|
+
*
|
542
|
+
* @param value Number to write.
|
543
|
+
* @param offset Number of bytes to skip before starting to write.
|
544
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
545
|
+
* @param noAssert
|
546
|
+
* @returns `offset` plus the number of bytes written.
|
547
|
+
*/
|
548
|
+
writeIntBE(value, offset, byteLength, noAssert) {
|
549
|
+
value = +value;
|
550
|
+
offset = offset >>> 0;
|
551
|
+
if (!noAssert) {
|
552
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
553
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
554
|
+
}
|
555
|
+
let i = byteLength - 1;
|
556
|
+
let mul = 1;
|
557
|
+
let sub = 0;
|
558
|
+
this[offset + i] = value & 255;
|
559
|
+
while (--i >= 0 && (mul *= 256)) {
|
560
|
+
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
561
|
+
sub = 1;
|
562
|
+
}
|
563
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
564
|
+
}
|
565
|
+
return offset + byteLength;
|
566
|
+
}
|
567
|
+
/**
|
568
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
569
|
+
* unsigned, little-endian integer supporting up to 48 bits of accuracy.
|
570
|
+
*
|
571
|
+
* @param offset Number of bytes to skip before starting to read.
|
572
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
573
|
+
* @param noAssert
|
574
|
+
*/
|
575
|
+
readUIntLE(offset, byteLength, noAssert) {
|
576
|
+
offset = offset >>> 0;
|
577
|
+
byteLength = byteLength >>> 0;
|
578
|
+
if (!noAssert) {
|
579
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
580
|
+
}
|
581
|
+
let val = this[offset];
|
582
|
+
let mul = 1;
|
583
|
+
let i = 0;
|
584
|
+
while (++i < byteLength && (mul *= 256)) {
|
585
|
+
val += this[offset + i] * mul;
|
586
|
+
}
|
587
|
+
return val;
|
588
|
+
}
|
589
|
+
/**
|
590
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
591
|
+
* unsigned, big-endian integer supporting up to 48 bits of accuracy.
|
592
|
+
*
|
593
|
+
* @param offset Number of bytes to skip before starting to read.
|
594
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
595
|
+
* @param noAssert
|
596
|
+
*/
|
597
|
+
readUIntBE(offset, byteLength, noAssert) {
|
598
|
+
offset = offset >>> 0;
|
599
|
+
byteLength = byteLength >>> 0;
|
600
|
+
if (!noAssert) {
|
601
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
602
|
+
}
|
603
|
+
let val = this[offset + --byteLength];
|
604
|
+
let mul = 1;
|
605
|
+
while (byteLength > 0 && (mul *= 256)) {
|
606
|
+
val += this[offset + --byteLength] * mul;
|
607
|
+
}
|
608
|
+
return val;
|
609
|
+
}
|
610
|
+
/**
|
611
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
612
|
+
* little-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
613
|
+
*
|
614
|
+
* @param offset Number of bytes to skip before starting to read.
|
615
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
616
|
+
* @param noAssert
|
617
|
+
*/
|
618
|
+
readIntLE(offset, byteLength, noAssert) {
|
619
|
+
offset = offset >>> 0;
|
620
|
+
byteLength = byteLength >>> 0;
|
621
|
+
if (!noAssert) {
|
622
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
623
|
+
}
|
624
|
+
let val = this[offset];
|
625
|
+
let mul = 1;
|
626
|
+
let i = 0;
|
627
|
+
while (++i < byteLength && (mul *= 256)) {
|
628
|
+
val += this[offset + i] * mul;
|
629
|
+
}
|
630
|
+
mul *= 128;
|
631
|
+
if (val >= mul) {
|
632
|
+
val -= Math.pow(2, 8 * byteLength);
|
633
|
+
}
|
634
|
+
return val;
|
635
|
+
}
|
636
|
+
/**
|
637
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
638
|
+
* big-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
639
|
+
*
|
640
|
+
* @param offset Number of bytes to skip before starting to read.
|
641
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
642
|
+
* @param noAssert
|
643
|
+
*/
|
644
|
+
readIntBE(offset, byteLength, noAssert) {
|
645
|
+
offset = offset >>> 0;
|
646
|
+
byteLength = byteLength >>> 0;
|
647
|
+
if (!noAssert) {
|
648
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
649
|
+
}
|
650
|
+
let i = byteLength;
|
651
|
+
let mul = 1;
|
652
|
+
let val = this[offset + --i];
|
653
|
+
while (i > 0 && (mul *= 256)) {
|
654
|
+
val += this[offset + --i] * mul;
|
655
|
+
}
|
656
|
+
mul *= 128;
|
657
|
+
if (val >= mul) {
|
658
|
+
val -= Math.pow(2, 8 * byteLength);
|
659
|
+
}
|
660
|
+
return val;
|
661
|
+
}
|
662
|
+
/**
|
663
|
+
* Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
|
664
|
+
*
|
665
|
+
* @param offset Number of bytes to skip before starting to read.
|
666
|
+
* @param noAssert
|
667
|
+
*/
|
668
|
+
readUInt8(offset, noAssert) {
|
669
|
+
offset = offset >>> 0;
|
670
|
+
if (!noAssert) {
|
671
|
+
Buffer._checkOffset(offset, 1, this.length);
|
672
|
+
}
|
673
|
+
return this[offset];
|
674
|
+
}
|
675
|
+
/**
|
676
|
+
* Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
|
677
|
+
*
|
678
|
+
* @param offset Number of bytes to skip before starting to read.
|
679
|
+
* @param noAssert
|
680
|
+
*/
|
681
|
+
readUInt16LE(offset, noAssert) {
|
682
|
+
offset = offset >>> 0;
|
683
|
+
if (!noAssert) {
|
684
|
+
Buffer._checkOffset(offset, 2, this.length);
|
685
|
+
}
|
686
|
+
return this[offset] | this[offset + 1] << 8;
|
687
|
+
}
|
688
|
+
/**
|
689
|
+
* Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
|
690
|
+
*
|
691
|
+
* @param offset Number of bytes to skip before starting to read.
|
692
|
+
* @param noAssert
|
693
|
+
*/
|
694
|
+
readUInt16BE(offset, noAssert) {
|
695
|
+
offset = offset >>> 0;
|
696
|
+
if (!noAssert) {
|
697
|
+
Buffer._checkOffset(offset, 2, this.length);
|
698
|
+
}
|
699
|
+
return this[offset] << 8 | this[offset + 1];
|
700
|
+
}
|
701
|
+
/**
|
702
|
+
* Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
|
703
|
+
*
|
704
|
+
* @param offset Number of bytes to skip before starting to read.
|
705
|
+
* @param noAssert
|
706
|
+
*/
|
707
|
+
readUInt32LE(offset, noAssert) {
|
708
|
+
offset = offset >>> 0;
|
709
|
+
if (!noAssert) {
|
710
|
+
Buffer._checkOffset(offset, 4, this.length);
|
711
|
+
}
|
712
|
+
return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
|
713
|
+
}
|
714
|
+
/**
|
715
|
+
* Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
|
716
|
+
*
|
717
|
+
* @param offset Number of bytes to skip before starting to read.
|
718
|
+
* @param noAssert
|
719
|
+
*/
|
720
|
+
readUInt32BE(offset, noAssert) {
|
721
|
+
offset = offset >>> 0;
|
722
|
+
if (!noAssert) {
|
723
|
+
Buffer._checkOffset(offset, 4, this.length);
|
724
|
+
}
|
725
|
+
return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
|
726
|
+
}
|
727
|
+
/**
|
728
|
+
* Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
|
729
|
+
* as two's complement signed values.
|
730
|
+
*
|
731
|
+
* @param offset Number of bytes to skip before starting to read.
|
732
|
+
* @param noAssert
|
733
|
+
*/
|
734
|
+
readInt8(offset, noAssert) {
|
735
|
+
offset = offset >>> 0;
|
736
|
+
if (!noAssert) {
|
737
|
+
Buffer._checkOffset(offset, 1, this.length);
|
738
|
+
}
|
739
|
+
if (!(this[offset] & 128)) {
|
740
|
+
return this[offset];
|
741
|
+
}
|
742
|
+
return (255 - this[offset] + 1) * -1;
|
743
|
+
}
|
744
|
+
/**
|
745
|
+
* Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
746
|
+
* are interpreted as two's complement signed values.
|
747
|
+
*
|
748
|
+
* @param offset Number of bytes to skip before starting to read.
|
749
|
+
* @param noAssert
|
750
|
+
*/
|
751
|
+
readInt16LE(offset, noAssert) {
|
752
|
+
offset = offset >>> 0;
|
753
|
+
if (!noAssert) {
|
754
|
+
Buffer._checkOffset(offset, 2, this.length);
|
755
|
+
}
|
756
|
+
const val = this[offset] | this[offset + 1] << 8;
|
757
|
+
return val & 32768 ? val | 4294901760 : val;
|
758
|
+
}
|
759
|
+
/**
|
760
|
+
* Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
761
|
+
* are interpreted as two's complement signed values.
|
762
|
+
*
|
763
|
+
* @param offset Number of bytes to skip before starting to read.
|
764
|
+
* @param noAssert
|
765
|
+
*/
|
766
|
+
readInt16BE(offset, noAssert) {
|
767
|
+
offset = offset >>> 0;
|
768
|
+
if (!noAssert) {
|
769
|
+
Buffer._checkOffset(offset, 2, this.length);
|
770
|
+
}
|
771
|
+
const val = this[offset + 1] | this[offset] << 8;
|
772
|
+
return val & 32768 ? val | 4294901760 : val;
|
773
|
+
}
|
774
|
+
/**
|
775
|
+
* Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
776
|
+
* are interpreted as two's complement signed values.
|
777
|
+
*
|
778
|
+
* @param offset Number of bytes to skip before starting to read.
|
779
|
+
* @param noAssert
|
780
|
+
*/
|
781
|
+
readInt32LE(offset, noAssert) {
|
782
|
+
offset = offset >>> 0;
|
783
|
+
if (!noAssert) {
|
784
|
+
Buffer._checkOffset(offset, 4, this.length);
|
785
|
+
}
|
786
|
+
return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
|
787
|
+
}
|
788
|
+
/**
|
789
|
+
* Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
790
|
+
* are interpreted as two's complement signed values.
|
791
|
+
*
|
792
|
+
* @param offset Number of bytes to skip before starting to read.
|
793
|
+
* @param noAssert
|
794
|
+
*/
|
795
|
+
readInt32BE(offset, noAssert) {
|
796
|
+
offset = offset >>> 0;
|
797
|
+
if (!noAssert) {
|
798
|
+
Buffer._checkOffset(offset, 4, this.length);
|
799
|
+
}
|
800
|
+
return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
|
801
|
+
}
|
802
|
+
/**
|
803
|
+
* Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
|
804
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 2.
|
805
|
+
*/
|
806
|
+
swap16() {
|
807
|
+
const len = this.length;
|
808
|
+
if (len % 2 !== 0) {
|
809
|
+
throw new RangeError("Buffer size must be a multiple of 16-bits");
|
810
|
+
}
|
811
|
+
for (let i = 0; i < len; i += 2) {
|
812
|
+
this._swap(this, i, i + 1);
|
813
|
+
}
|
814
|
+
return this;
|
815
|
+
}
|
816
|
+
/**
|
817
|
+
* Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
|
818
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 4.
|
819
|
+
*/
|
820
|
+
swap32() {
|
821
|
+
const len = this.length;
|
822
|
+
if (len % 4 !== 0) {
|
823
|
+
throw new RangeError("Buffer size must be a multiple of 32-bits");
|
824
|
+
}
|
825
|
+
for (let i = 0; i < len; i += 4) {
|
826
|
+
this._swap(this, i, i + 3);
|
827
|
+
this._swap(this, i + 1, i + 2);
|
828
|
+
}
|
829
|
+
return this;
|
830
|
+
}
|
831
|
+
/**
|
832
|
+
* Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
|
833
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 8.
|
834
|
+
*/
|
835
|
+
swap64() {
|
836
|
+
const len = this.length;
|
837
|
+
if (len % 8 !== 0) {
|
838
|
+
throw new RangeError("Buffer size must be a multiple of 64-bits");
|
839
|
+
}
|
840
|
+
for (let i = 0; i < len; i += 8) {
|
841
|
+
this._swap(this, i, i + 7);
|
842
|
+
this._swap(this, i + 1, i + 6);
|
843
|
+
this._swap(this, i + 2, i + 5);
|
844
|
+
this._swap(this, i + 3, i + 4);
|
845
|
+
}
|
846
|
+
return this;
|
847
|
+
}
|
848
|
+
/**
|
849
|
+
* Swaps two octets.
|
850
|
+
*
|
851
|
+
* @param b
|
852
|
+
* @param n
|
853
|
+
* @param m
|
854
|
+
*/
|
855
|
+
_swap(b, n, m) {
|
856
|
+
const i = b[n];
|
857
|
+
b[n] = b[m];
|
858
|
+
b[m] = i;
|
859
|
+
}
|
860
|
+
/**
|
861
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
|
862
|
+
* Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
|
863
|
+
*
|
864
|
+
* @param value Number to write.
|
865
|
+
* @param offset Number of bytes to skip before starting to write.
|
866
|
+
* @param noAssert
|
867
|
+
* @returns `offset` plus the number of bytes written.
|
868
|
+
*/
|
869
|
+
writeUInt8(value, offset, noAssert) {
|
870
|
+
value = +value;
|
871
|
+
offset = offset >>> 0;
|
872
|
+
if (!noAssert) {
|
873
|
+
Buffer._checkInt(this, value, offset, 1, 255, 0);
|
874
|
+
}
|
875
|
+
this[offset] = value & 255;
|
876
|
+
return offset + 1;
|
877
|
+
}
|
878
|
+
/**
|
879
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
|
880
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
881
|
+
*
|
882
|
+
* @param value Number to write.
|
883
|
+
* @param offset Number of bytes to skip before starting to write.
|
884
|
+
* @param noAssert
|
885
|
+
* @returns `offset` plus the number of bytes written.
|
886
|
+
*/
|
887
|
+
writeUInt16LE(value, offset, noAssert) {
|
888
|
+
value = +value;
|
889
|
+
offset = offset >>> 0;
|
890
|
+
if (!noAssert) {
|
891
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
892
|
+
}
|
893
|
+
this[offset] = value & 255;
|
894
|
+
this[offset + 1] = value >>> 8;
|
895
|
+
return offset + 2;
|
896
|
+
}
|
897
|
+
/**
|
898
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
|
899
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
900
|
+
*
|
901
|
+
* @param value Number to write.
|
902
|
+
* @param offset Number of bytes to skip before starting to write.
|
903
|
+
* @param noAssert
|
904
|
+
* @returns `offset` plus the number of bytes written.
|
905
|
+
*/
|
906
|
+
writeUInt16BE(value, offset, noAssert) {
|
907
|
+
value = +value;
|
908
|
+
offset = offset >>> 0;
|
909
|
+
if (!noAssert) {
|
910
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
911
|
+
}
|
912
|
+
this[offset] = value >>> 8;
|
913
|
+
this[offset + 1] = value & 255;
|
914
|
+
return offset + 2;
|
915
|
+
}
|
916
|
+
/**
|
917
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
|
918
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
919
|
+
*
|
920
|
+
* @param value Number to write.
|
921
|
+
* @param offset Number of bytes to skip before starting to write.
|
922
|
+
* @param noAssert
|
923
|
+
* @returns `offset` plus the number of bytes written.
|
924
|
+
*/
|
925
|
+
writeUInt32LE(value, offset, noAssert) {
|
926
|
+
value = +value;
|
927
|
+
offset = offset >>> 0;
|
928
|
+
if (!noAssert) {
|
929
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
930
|
+
}
|
931
|
+
this[offset + 3] = value >>> 24;
|
932
|
+
this[offset + 2] = value >>> 16;
|
933
|
+
this[offset + 1] = value >>> 8;
|
934
|
+
this[offset] = value & 255;
|
935
|
+
return offset + 4;
|
936
|
+
}
|
937
|
+
/**
|
938
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
|
939
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
940
|
+
*
|
941
|
+
* @param value Number to write.
|
942
|
+
* @param offset Number of bytes to skip before starting to write.
|
943
|
+
* @param noAssert
|
944
|
+
* @returns `offset` plus the number of bytes written.
|
945
|
+
*/
|
946
|
+
writeUInt32BE(value, offset, noAssert) {
|
947
|
+
value = +value;
|
948
|
+
offset = offset >>> 0;
|
949
|
+
if (!noAssert) {
|
950
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
951
|
+
}
|
952
|
+
this[offset] = value >>> 24;
|
953
|
+
this[offset + 1] = value >>> 16;
|
954
|
+
this[offset + 2] = value >>> 8;
|
955
|
+
this[offset + 3] = value & 255;
|
956
|
+
return offset + 4;
|
957
|
+
}
|
958
|
+
/**
|
959
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
|
960
|
+
* Behavior is undefined when `value` is anything other than a signed 8-bit integer.
|
961
|
+
*
|
962
|
+
* @param value Number to write.
|
963
|
+
* @param offset Number of bytes to skip before starting to write.
|
964
|
+
* @param noAssert
|
965
|
+
* @returns `offset` plus the number of bytes written.
|
966
|
+
*/
|
967
|
+
writeInt8(value, offset, noAssert) {
|
968
|
+
value = +value;
|
969
|
+
offset = offset >>> 0;
|
970
|
+
if (!noAssert) {
|
971
|
+
Buffer._checkInt(this, value, offset, 1, 127, -128);
|
972
|
+
}
|
973
|
+
if (value < 0) {
|
974
|
+
value = 255 + value + 1;
|
975
|
+
}
|
976
|
+
this[offset] = value & 255;
|
977
|
+
return offset + 1;
|
978
|
+
}
|
979
|
+
/**
|
980
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
|
981
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
982
|
+
*
|
983
|
+
* @param value Number to write.
|
984
|
+
* @param offset Number of bytes to skip before starting to write.
|
985
|
+
* @param noAssert
|
986
|
+
* @returns `offset` plus the number of bytes written.
|
987
|
+
*/
|
988
|
+
writeInt16LE(value, offset, noAssert) {
|
989
|
+
value = +value;
|
990
|
+
offset = offset >>> 0;
|
991
|
+
if (!noAssert) {
|
992
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
993
|
+
}
|
994
|
+
this[offset] = value & 255;
|
995
|
+
this[offset + 1] = value >>> 8;
|
996
|
+
return offset + 2;
|
997
|
+
}
|
998
|
+
/**
|
999
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
|
1000
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
1001
|
+
*
|
1002
|
+
* @param value Number to write.
|
1003
|
+
* @param offset Number of bytes to skip before starting to write.
|
1004
|
+
* @param noAssert
|
1005
|
+
* @returns `offset` plus the number of bytes written.
|
1006
|
+
*/
|
1007
|
+
writeInt16BE(value, offset, noAssert) {
|
1008
|
+
value = +value;
|
1009
|
+
offset = offset >>> 0;
|
1010
|
+
if (!noAssert) {
|
1011
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1012
|
+
}
|
1013
|
+
this[offset] = value >>> 8;
|
1014
|
+
this[offset + 1] = value & 255;
|
1015
|
+
return offset + 2;
|
1016
|
+
}
|
1017
|
+
/**
|
1018
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
|
1019
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1020
|
+
*
|
1021
|
+
* @param value Number to write.
|
1022
|
+
* @param offset Number of bytes to skip before starting to write.
|
1023
|
+
* @param noAssert
|
1024
|
+
* @returns `offset` plus the number of bytes written.
|
1025
|
+
*/
|
1026
|
+
writeInt32LE(value, offset, noAssert) {
|
1027
|
+
value = +value;
|
1028
|
+
offset = offset >>> 0;
|
1029
|
+
if (!noAssert) {
|
1030
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1031
|
+
}
|
1032
|
+
this[offset] = value & 255;
|
1033
|
+
this[offset + 1] = value >>> 8;
|
1034
|
+
this[offset + 2] = value >>> 16;
|
1035
|
+
this[offset + 3] = value >>> 24;
|
1036
|
+
return offset + 4;
|
1037
|
+
}
|
1038
|
+
/**
|
1039
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
|
1040
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1041
|
+
*
|
1042
|
+
* @param value Number to write.
|
1043
|
+
* @param offset Number of bytes to skip before starting to write.
|
1044
|
+
* @param noAssert
|
1045
|
+
* @returns `offset` plus the number of bytes written.
|
1046
|
+
*/
|
1047
|
+
writeInt32BE(value, offset, noAssert) {
|
1048
|
+
value = +value;
|
1049
|
+
offset = offset >>> 0;
|
1050
|
+
if (!noAssert) {
|
1051
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1052
|
+
}
|
1053
|
+
if (value < 0) {
|
1054
|
+
value = 4294967295 + value + 1;
|
1055
|
+
}
|
1056
|
+
this[offset] = value >>> 24;
|
1057
|
+
this[offset + 1] = value >>> 16;
|
1058
|
+
this[offset + 2] = value >>> 8;
|
1059
|
+
this[offset + 3] = value & 255;
|
1060
|
+
return offset + 4;
|
1061
|
+
}
|
1062
|
+
/**
|
1063
|
+
* Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
|
1064
|
+
* filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
|
1065
|
+
* integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
|
1066
|
+
*
|
1067
|
+
* If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
|
1068
|
+
* character that fit into `buf` are written.
|
1069
|
+
*
|
1070
|
+
* If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
|
1071
|
+
*
|
1072
|
+
* @param value
|
1073
|
+
* @param encoding
|
1074
|
+
*/
|
1075
|
+
fill(value, offset, end, encoding) {
|
1076
|
+
if (typeof value === "string") {
|
1077
|
+
if (typeof offset === "string") {
|
1078
|
+
encoding = offset;
|
1079
|
+
offset = 0;
|
1080
|
+
end = this.length;
|
1081
|
+
} else if (typeof end === "string") {
|
1082
|
+
encoding = end;
|
1083
|
+
end = this.length;
|
1084
|
+
}
|
1085
|
+
if (encoding !== void 0 && typeof encoding !== "string") {
|
1086
|
+
throw new TypeError("encoding must be a string");
|
1087
|
+
}
|
1088
|
+
if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
|
1089
|
+
throw new TypeError("Unknown encoding: " + encoding);
|
1090
|
+
}
|
1091
|
+
if (value.length === 1) {
|
1092
|
+
const code = value.charCodeAt(0);
|
1093
|
+
if (encoding === "utf8" && code < 128) {
|
1094
|
+
value = code;
|
1095
|
+
}
|
1096
|
+
}
|
1097
|
+
} else if (typeof value === "number") {
|
1098
|
+
value = value & 255;
|
1099
|
+
} else if (typeof value === "boolean") {
|
1100
|
+
value = Number(value);
|
1101
|
+
}
|
1102
|
+
offset ?? (offset = 0);
|
1103
|
+
end ?? (end = this.length);
|
1104
|
+
if (offset < 0 || this.length < offset || this.length < end) {
|
1105
|
+
throw new RangeError("Out of range index");
|
1106
|
+
}
|
1107
|
+
if (end <= offset) {
|
1108
|
+
return this;
|
1109
|
+
}
|
1110
|
+
offset = offset >>> 0;
|
1111
|
+
end = end === void 0 ? this.length : end >>> 0;
|
1112
|
+
value || (value = 0);
|
1113
|
+
let i;
|
1114
|
+
if (typeof value === "number") {
|
1115
|
+
for (i = offset; i < end; ++i) {
|
1116
|
+
this[i] = value;
|
1117
|
+
}
|
1118
|
+
} else {
|
1119
|
+
const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
|
1120
|
+
const len = bytes.length;
|
1121
|
+
if (len === 0) {
|
1122
|
+
throw new TypeError('The value "' + value + '" is invalid for argument "value"');
|
1123
|
+
}
|
1124
|
+
for (i = 0; i < end - offset; ++i) {
|
1125
|
+
this[i + offset] = bytes[i % len];
|
1126
|
+
}
|
1127
|
+
}
|
1128
|
+
return this;
|
1129
|
+
}
|
1130
|
+
/**
|
1131
|
+
* Returns the index of the specified value.
|
1132
|
+
*
|
1133
|
+
* If `value` is:
|
1134
|
+
* - a string, `value` is interpreted according to the character encoding in `encoding`.
|
1135
|
+
* - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
|
1136
|
+
* - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
|
1137
|
+
*
|
1138
|
+
* Any other types will throw a `TypeError`.
|
1139
|
+
*
|
1140
|
+
* @param value What to search for.
|
1141
|
+
* @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
|
1142
|
+
* @param encoding If `value` is a string, this is the encoding used to search.
|
1143
|
+
* @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
|
1144
|
+
*/
|
1145
|
+
indexOf(value, byteOffset, encoding) {
|
1146
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
|
1147
|
+
}
|
1148
|
+
/**
|
1149
|
+
* Gets the last index of the specified value.
|
1150
|
+
*
|
1151
|
+
* @see indexOf()
|
1152
|
+
* @param value
|
1153
|
+
* @param byteOffset
|
1154
|
+
* @param encoding
|
1155
|
+
*/
|
1156
|
+
lastIndexOf(value, byteOffset, encoding) {
|
1157
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
|
1158
|
+
}
|
1159
|
+
_bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
|
1160
|
+
if (buffer.length === 0) {
|
1161
|
+
return -1;
|
1162
|
+
}
|
1163
|
+
if (typeof byteOffset === "string") {
|
1164
|
+
encoding = byteOffset;
|
1165
|
+
byteOffset = 0;
|
1166
|
+
} else if (typeof byteOffset === "undefined") {
|
1167
|
+
byteOffset = 0;
|
1168
|
+
} else if (byteOffset > 2147483647) {
|
1169
|
+
byteOffset = 2147483647;
|
1170
|
+
} else if (byteOffset < -2147483648) {
|
1171
|
+
byteOffset = -2147483648;
|
1172
|
+
}
|
1173
|
+
byteOffset = +byteOffset;
|
1174
|
+
if (byteOffset !== byteOffset) {
|
1175
|
+
byteOffset = dir ? 0 : buffer.length - 1;
|
1176
|
+
}
|
1177
|
+
if (byteOffset < 0) {
|
1178
|
+
byteOffset = buffer.length + byteOffset;
|
1179
|
+
}
|
1180
|
+
if (byteOffset >= buffer.length) {
|
1181
|
+
if (dir) {
|
1182
|
+
return -1;
|
1183
|
+
} else {
|
1184
|
+
byteOffset = buffer.length - 1;
|
1185
|
+
}
|
1186
|
+
} else if (byteOffset < 0) {
|
1187
|
+
if (dir) {
|
1188
|
+
byteOffset = 0;
|
1189
|
+
} else {
|
1190
|
+
return -1;
|
1191
|
+
}
|
1192
|
+
}
|
1193
|
+
if (typeof val === "string") {
|
1194
|
+
val = Buffer.from(val, encoding);
|
1195
|
+
}
|
1196
|
+
if (Buffer.isBuffer(val)) {
|
1197
|
+
if (val.length === 0) {
|
1198
|
+
return -1;
|
1199
|
+
}
|
1200
|
+
return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
|
1201
|
+
} else if (typeof val === "number") {
|
1202
|
+
val = val & 255;
|
1203
|
+
if (typeof Uint8Array.prototype.indexOf === "function") {
|
1204
|
+
if (dir) {
|
1205
|
+
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
|
1206
|
+
} else {
|
1207
|
+
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
|
1208
|
+
}
|
1209
|
+
}
|
1210
|
+
return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
|
1211
|
+
}
|
1212
|
+
throw new TypeError("val must be string, number or Buffer");
|
1213
|
+
}
|
1214
|
+
/**
|
1215
|
+
* Equivalent to `buf.indexOf() !== -1`.
|
1216
|
+
*
|
1217
|
+
* @param value
|
1218
|
+
* @param byteOffset
|
1219
|
+
* @param encoding
|
1220
|
+
*/
|
1221
|
+
includes(value, byteOffset, encoding) {
|
1222
|
+
return this.indexOf(value, byteOffset, encoding) !== -1;
|
1223
|
+
}
|
1224
|
+
/**
|
1225
|
+
* Creates a new buffer from the given parameters.
|
1226
|
+
*
|
1227
|
+
* @param data
|
1228
|
+
* @param encoding
|
1229
|
+
*/
|
1230
|
+
static from(a, b, c) {
|
1231
|
+
return new Buffer(a, b, c);
|
1232
|
+
}
|
1233
|
+
/**
|
1234
|
+
* Returns true if `obj` is a Buffer.
|
1235
|
+
*
|
1236
|
+
* @param obj
|
1237
|
+
*/
|
1238
|
+
static isBuffer(obj) {
|
1239
|
+
return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
|
1240
|
+
}
|
1241
|
+
/**
|
1242
|
+
* Returns true if `encoding` is a supported encoding.
|
1243
|
+
*
|
1244
|
+
* @param encoding
|
1245
|
+
*/
|
1246
|
+
static isEncoding(encoding) {
|
1247
|
+
switch (encoding.toLowerCase()) {
|
1248
|
+
case "hex":
|
1249
|
+
case "utf8":
|
1250
|
+
case "ascii":
|
1251
|
+
case "binary":
|
1252
|
+
case "latin1":
|
1253
|
+
case "ucs2":
|
1254
|
+
case "utf16le":
|
1255
|
+
case "base64":
|
1256
|
+
return true;
|
1257
|
+
default:
|
1258
|
+
return false;
|
1259
|
+
}
|
1260
|
+
}
|
1261
|
+
/**
|
1262
|
+
* Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
|
1263
|
+
* returns the number of characters in the string.
|
1264
|
+
*
|
1265
|
+
* @param string The string to test.
|
1266
|
+
* @param encoding The encoding to use for calculation. Defaults is `utf8`.
|
1267
|
+
*/
|
1268
|
+
static byteLength(string, encoding) {
|
1269
|
+
if (Buffer.isBuffer(string)) {
|
1270
|
+
return string.length;
|
1271
|
+
}
|
1272
|
+
if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
|
1273
|
+
return string.byteLength;
|
1274
|
+
}
|
1275
|
+
if (typeof string !== "string") {
|
1276
|
+
throw new TypeError(
|
1277
|
+
'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
|
1278
|
+
);
|
1279
|
+
}
|
1280
|
+
const len = string.length;
|
1281
|
+
const mustMatch = arguments.length > 2 && arguments[2] === true;
|
1282
|
+
if (!mustMatch && len === 0) {
|
1283
|
+
return 0;
|
1284
|
+
}
|
1285
|
+
switch (encoding?.toLowerCase()) {
|
1286
|
+
case "ascii":
|
1287
|
+
case "latin1":
|
1288
|
+
case "binary":
|
1289
|
+
return len;
|
1290
|
+
case "utf8":
|
1291
|
+
return Buffer._utf8ToBytes(string).length;
|
1292
|
+
case "hex":
|
1293
|
+
return len >>> 1;
|
1294
|
+
case "ucs2":
|
1295
|
+
case "utf16le":
|
1296
|
+
return len * 2;
|
1297
|
+
case "base64":
|
1298
|
+
return Buffer._base64ToBytes(string).length;
|
1299
|
+
default:
|
1300
|
+
return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
|
1301
|
+
}
|
1302
|
+
}
|
1303
|
+
/**
|
1304
|
+
* Returns a Buffer which is the result of concatenating all the buffers in the list together.
|
1305
|
+
*
|
1306
|
+
* - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
|
1307
|
+
* - If the list has exactly one item, then the first item is returned.
|
1308
|
+
* - If the list has more than one item, then a new buffer is created.
|
1309
|
+
*
|
1310
|
+
* It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
|
1311
|
+
* a small computational expense.
|
1312
|
+
*
|
1313
|
+
* @param list An array of Buffer objects to concatenate.
|
1314
|
+
* @param totalLength Total length of the buffers when concatenated.
|
1315
|
+
*/
|
1316
|
+
static concat(list, totalLength) {
|
1317
|
+
if (!Array.isArray(list)) {
|
1318
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1319
|
+
}
|
1320
|
+
if (list.length === 0) {
|
1321
|
+
return Buffer.alloc(0);
|
1322
|
+
}
|
1323
|
+
let i;
|
1324
|
+
if (totalLength === void 0) {
|
1325
|
+
totalLength = 0;
|
1326
|
+
for (i = 0; i < list.length; ++i) {
|
1327
|
+
totalLength += list[i].length;
|
1328
|
+
}
|
1329
|
+
}
|
1330
|
+
const buffer = Buffer.allocUnsafe(totalLength);
|
1331
|
+
let pos = 0;
|
1332
|
+
for (i = 0; i < list.length; ++i) {
|
1333
|
+
let buf = list[i];
|
1334
|
+
if (Buffer._isInstance(buf, Uint8Array)) {
|
1335
|
+
if (pos + buf.length > buffer.length) {
|
1336
|
+
if (!Buffer.isBuffer(buf)) {
|
1337
|
+
buf = Buffer.from(buf);
|
1338
|
+
}
|
1339
|
+
buf.copy(buffer, pos);
|
1340
|
+
} else {
|
1341
|
+
Uint8Array.prototype.set.call(buffer, buf, pos);
|
1342
|
+
}
|
1343
|
+
} else if (!Buffer.isBuffer(buf)) {
|
1344
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1345
|
+
} else {
|
1346
|
+
buf.copy(buffer, pos);
|
1347
|
+
}
|
1348
|
+
pos += buf.length;
|
1349
|
+
}
|
1350
|
+
return buffer;
|
1351
|
+
}
|
1352
|
+
/**
|
1353
|
+
* The same as `buf1.compare(buf2)`.
|
1354
|
+
*/
|
1355
|
+
static compare(buf1, buf2) {
|
1356
|
+
if (Buffer._isInstance(buf1, Uint8Array)) {
|
1357
|
+
buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
|
1358
|
+
}
|
1359
|
+
if (Buffer._isInstance(buf2, Uint8Array)) {
|
1360
|
+
buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
|
1361
|
+
}
|
1362
|
+
if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
|
1363
|
+
throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
|
1364
|
+
}
|
1365
|
+
if (buf1 === buf2) {
|
1366
|
+
return 0;
|
1367
|
+
}
|
1368
|
+
let x = buf1.length;
|
1369
|
+
let y = buf2.length;
|
1370
|
+
for (let i = 0, len = Math.min(x, y); i < len; ++i) {
|
1371
|
+
if (buf1[i] !== buf2[i]) {
|
1372
|
+
x = buf1[i];
|
1373
|
+
y = buf2[i];
|
1374
|
+
break;
|
1375
|
+
}
|
1376
|
+
}
|
1377
|
+
if (x < y) {
|
1378
|
+
return -1;
|
1379
|
+
}
|
1380
|
+
if (y < x) {
|
1381
|
+
return 1;
|
1382
|
+
}
|
1383
|
+
return 0;
|
1384
|
+
}
|
1385
|
+
/**
|
1386
|
+
* Allocates a new buffer of `size` octets.
|
1387
|
+
*
|
1388
|
+
* @param size The number of octets to allocate.
|
1389
|
+
* @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
|
1390
|
+
* @param encoding The encoding used for the call to `buf.fill()` while initializing.
|
1391
|
+
*/
|
1392
|
+
static alloc(size, fill, encoding) {
|
1393
|
+
if (typeof size !== "number") {
|
1394
|
+
throw new TypeError('"size" argument must be of type number');
|
1395
|
+
} else if (size < 0) {
|
1396
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1397
|
+
}
|
1398
|
+
if (size <= 0) {
|
1399
|
+
return new Buffer(size);
|
1400
|
+
}
|
1401
|
+
if (fill !== void 0) {
|
1402
|
+
return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
|
1403
|
+
}
|
1404
|
+
return new Buffer(size);
|
1405
|
+
}
|
1406
|
+
/**
|
1407
|
+
* Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
|
1408
|
+
*
|
1409
|
+
* @param size
|
1410
|
+
*/
|
1411
|
+
static allocUnsafe(size) {
|
1412
|
+
if (typeof size !== "number") {
|
1413
|
+
throw new TypeError('"size" argument must be of type number');
|
1414
|
+
} else if (size < 0) {
|
1415
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1416
|
+
}
|
1417
|
+
return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
|
1418
|
+
}
|
1419
|
+
/**
|
1420
|
+
* Returns true if the given `obj` is an instance of `type`.
|
1421
|
+
*
|
1422
|
+
* @param obj
|
1423
|
+
* @param type
|
1424
|
+
*/
|
1425
|
+
static _isInstance(obj, type) {
|
1426
|
+
return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
|
1427
|
+
}
|
1428
|
+
static _checked(length) {
|
1429
|
+
if (length >= K_MAX_LENGTH) {
|
1430
|
+
throw new RangeError(
|
1431
|
+
"Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
|
1432
|
+
);
|
1433
|
+
}
|
1434
|
+
return length | 0;
|
1435
|
+
}
|
1436
|
+
static _blitBuffer(src, dst, offset, length) {
|
1437
|
+
let i;
|
1438
|
+
for (i = 0; i < length; ++i) {
|
1439
|
+
if (i + offset >= dst.length || i >= src.length) {
|
1440
|
+
break;
|
1441
|
+
}
|
1442
|
+
dst[i + offset] = src[i];
|
1443
|
+
}
|
1444
|
+
return i;
|
1445
|
+
}
|
1446
|
+
static _utf8Write(buf, string, offset, length) {
|
1447
|
+
return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
|
1448
|
+
}
|
1449
|
+
static _asciiWrite(buf, string, offset, length) {
|
1450
|
+
return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
|
1451
|
+
}
|
1452
|
+
static _base64Write(buf, string, offset, length) {
|
1453
|
+
return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
|
1454
|
+
}
|
1455
|
+
static _ucs2Write(buf, string, offset, length) {
|
1456
|
+
return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
|
1457
|
+
}
|
1458
|
+
static _hexWrite(buf, string, offset, length) {
|
1459
|
+
offset = Number(offset) || 0;
|
1460
|
+
const remaining = buf.length - offset;
|
1461
|
+
if (!length) {
|
1462
|
+
length = remaining;
|
1463
|
+
} else {
|
1464
|
+
length = Number(length);
|
1465
|
+
if (length > remaining) {
|
1466
|
+
length = remaining;
|
1467
|
+
}
|
1468
|
+
}
|
1469
|
+
const strLen = string.length;
|
1470
|
+
if (length > strLen / 2) {
|
1471
|
+
length = strLen / 2;
|
1472
|
+
}
|
1473
|
+
let i;
|
1474
|
+
for (i = 0; i < length; ++i) {
|
1475
|
+
const parsed = parseInt(string.substr(i * 2, 2), 16);
|
1476
|
+
if (parsed !== parsed) {
|
1477
|
+
return i;
|
1478
|
+
}
|
1479
|
+
buf[offset + i] = parsed;
|
1480
|
+
}
|
1481
|
+
return i;
|
1482
|
+
}
|
1483
|
+
static _utf8ToBytes(string, units) {
|
1484
|
+
units = units || Infinity;
|
1485
|
+
const length = string.length;
|
1486
|
+
const bytes = [];
|
1487
|
+
let codePoint;
|
1488
|
+
let leadSurrogate = null;
|
1489
|
+
for (let i = 0; i < length; ++i) {
|
1490
|
+
codePoint = string.charCodeAt(i);
|
1491
|
+
if (codePoint > 55295 && codePoint < 57344) {
|
1492
|
+
if (!leadSurrogate) {
|
1493
|
+
if (codePoint > 56319) {
|
1494
|
+
if ((units -= 3) > -1) {
|
1495
|
+
bytes.push(239, 191, 189);
|
1496
|
+
}
|
1497
|
+
continue;
|
1498
|
+
} else if (i + 1 === length) {
|
1499
|
+
if ((units -= 3) > -1) {
|
1500
|
+
bytes.push(239, 191, 189);
|
1501
|
+
}
|
1502
|
+
continue;
|
1503
|
+
}
|
1504
|
+
leadSurrogate = codePoint;
|
1505
|
+
continue;
|
1506
|
+
}
|
1507
|
+
if (codePoint < 56320) {
|
1508
|
+
if ((units -= 3) > -1) {
|
1509
|
+
bytes.push(239, 191, 189);
|
1510
|
+
}
|
1511
|
+
leadSurrogate = codePoint;
|
1512
|
+
continue;
|
1513
|
+
}
|
1514
|
+
codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
|
1515
|
+
} else if (leadSurrogate) {
|
1516
|
+
if ((units -= 3) > -1) {
|
1517
|
+
bytes.push(239, 191, 189);
|
1518
|
+
}
|
1519
|
+
}
|
1520
|
+
leadSurrogate = null;
|
1521
|
+
if (codePoint < 128) {
|
1522
|
+
if ((units -= 1) < 0) {
|
1523
|
+
break;
|
1524
|
+
}
|
1525
|
+
bytes.push(codePoint);
|
1526
|
+
} else if (codePoint < 2048) {
|
1527
|
+
if ((units -= 2) < 0) {
|
1528
|
+
break;
|
1529
|
+
}
|
1530
|
+
bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
|
1531
|
+
} else if (codePoint < 65536) {
|
1532
|
+
if ((units -= 3) < 0) {
|
1533
|
+
break;
|
1534
|
+
}
|
1535
|
+
bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
|
1536
|
+
} else if (codePoint < 1114112) {
|
1537
|
+
if ((units -= 4) < 0) {
|
1538
|
+
break;
|
1539
|
+
}
|
1540
|
+
bytes.push(
|
1541
|
+
codePoint >> 18 | 240,
|
1542
|
+
codePoint >> 12 & 63 | 128,
|
1543
|
+
codePoint >> 6 & 63 | 128,
|
1544
|
+
codePoint & 63 | 128
|
1545
|
+
);
|
1546
|
+
} else {
|
1547
|
+
throw new Error("Invalid code point");
|
1548
|
+
}
|
1549
|
+
}
|
1550
|
+
return bytes;
|
1551
|
+
}
|
1552
|
+
static _base64ToBytes(str) {
|
1553
|
+
return toByteArray(base64clean(str));
|
1554
|
+
}
|
1555
|
+
static _asciiToBytes(str) {
|
1556
|
+
const byteArray = [];
|
1557
|
+
for (let i = 0; i < str.length; ++i) {
|
1558
|
+
byteArray.push(str.charCodeAt(i) & 255);
|
1559
|
+
}
|
1560
|
+
return byteArray;
|
1561
|
+
}
|
1562
|
+
static _utf16leToBytes(str, units) {
|
1563
|
+
let c, hi, lo;
|
1564
|
+
const byteArray = [];
|
1565
|
+
for (let i = 0; i < str.length; ++i) {
|
1566
|
+
if ((units -= 2) < 0) break;
|
1567
|
+
c = str.charCodeAt(i);
|
1568
|
+
hi = c >> 8;
|
1569
|
+
lo = c % 256;
|
1570
|
+
byteArray.push(lo);
|
1571
|
+
byteArray.push(hi);
|
1572
|
+
}
|
1573
|
+
return byteArray;
|
1574
|
+
}
|
1575
|
+
static _hexSlice(buf, start, end) {
|
1576
|
+
const len = buf.length;
|
1577
|
+
if (!start || start < 0) {
|
1578
|
+
start = 0;
|
1579
|
+
}
|
1580
|
+
if (!end || end < 0 || end > len) {
|
1581
|
+
end = len;
|
1582
|
+
}
|
1583
|
+
let out = "";
|
1584
|
+
for (let i = start; i < end; ++i) {
|
1585
|
+
out += hexSliceLookupTable[buf[i]];
|
1586
|
+
}
|
1587
|
+
return out;
|
1588
|
+
}
|
1589
|
+
static _base64Slice(buf, start, end) {
|
1590
|
+
if (start === 0 && end === buf.length) {
|
1591
|
+
return fromByteArray(buf);
|
1592
|
+
} else {
|
1593
|
+
return fromByteArray(buf.slice(start, end));
|
1594
|
+
}
|
1595
|
+
}
|
1596
|
+
static _utf8Slice(buf, start, end) {
|
1597
|
+
end = Math.min(buf.length, end);
|
1598
|
+
const res = [];
|
1599
|
+
let i = start;
|
1600
|
+
while (i < end) {
|
1601
|
+
const firstByte = buf[i];
|
1602
|
+
let codePoint = null;
|
1603
|
+
let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
|
1604
|
+
if (i + bytesPerSequence <= end) {
|
1605
|
+
let secondByte, thirdByte, fourthByte, tempCodePoint;
|
1606
|
+
switch (bytesPerSequence) {
|
1607
|
+
case 1:
|
1608
|
+
if (firstByte < 128) {
|
1609
|
+
codePoint = firstByte;
|
1610
|
+
}
|
1611
|
+
break;
|
1612
|
+
case 2:
|
1613
|
+
secondByte = buf[i + 1];
|
1614
|
+
if ((secondByte & 192) === 128) {
|
1615
|
+
tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
|
1616
|
+
if (tempCodePoint > 127) {
|
1617
|
+
codePoint = tempCodePoint;
|
1618
|
+
}
|
1619
|
+
}
|
1620
|
+
break;
|
1621
|
+
case 3:
|
1622
|
+
secondByte = buf[i + 1];
|
1623
|
+
thirdByte = buf[i + 2];
|
1624
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
|
1625
|
+
tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
|
1626
|
+
if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
|
1627
|
+
codePoint = tempCodePoint;
|
1628
|
+
}
|
1629
|
+
}
|
1630
|
+
break;
|
1631
|
+
case 4:
|
1632
|
+
secondByte = buf[i + 1];
|
1633
|
+
thirdByte = buf[i + 2];
|
1634
|
+
fourthByte = buf[i + 3];
|
1635
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
|
1636
|
+
tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
|
1637
|
+
if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
|
1638
|
+
codePoint = tempCodePoint;
|
1639
|
+
}
|
1640
|
+
}
|
1641
|
+
}
|
1642
|
+
}
|
1643
|
+
if (codePoint === null) {
|
1644
|
+
codePoint = 65533;
|
1645
|
+
bytesPerSequence = 1;
|
1646
|
+
} else if (codePoint > 65535) {
|
1647
|
+
codePoint -= 65536;
|
1648
|
+
res.push(codePoint >>> 10 & 1023 | 55296);
|
1649
|
+
codePoint = 56320 | codePoint & 1023;
|
1650
|
+
}
|
1651
|
+
res.push(codePoint);
|
1652
|
+
i += bytesPerSequence;
|
1653
|
+
}
|
1654
|
+
return Buffer._decodeCodePointsArray(res);
|
1655
|
+
}
|
1656
|
+
static _decodeCodePointsArray(codePoints) {
|
1657
|
+
const len = codePoints.length;
|
1658
|
+
if (len <= MAX_ARGUMENTS_LENGTH) {
|
1659
|
+
return String.fromCharCode.apply(String, codePoints);
|
1660
|
+
}
|
1661
|
+
let res = "";
|
1662
|
+
let i = 0;
|
1663
|
+
while (i < len) {
|
1664
|
+
res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
|
1665
|
+
}
|
1666
|
+
return res;
|
1667
|
+
}
|
1668
|
+
static _asciiSlice(buf, start, end) {
|
1669
|
+
let ret = "";
|
1670
|
+
end = Math.min(buf.length, end);
|
1671
|
+
for (let i = start; i < end; ++i) {
|
1672
|
+
ret += String.fromCharCode(buf[i] & 127);
|
1673
|
+
}
|
1674
|
+
return ret;
|
1675
|
+
}
|
1676
|
+
static _latin1Slice(buf, start, end) {
|
1677
|
+
let ret = "";
|
1678
|
+
end = Math.min(buf.length, end);
|
1679
|
+
for (let i = start; i < end; ++i) {
|
1680
|
+
ret += String.fromCharCode(buf[i]);
|
1681
|
+
}
|
1682
|
+
return ret;
|
1683
|
+
}
|
1684
|
+
static _utf16leSlice(buf, start, end) {
|
1685
|
+
const bytes = buf.slice(start, end);
|
1686
|
+
let res = "";
|
1687
|
+
for (let i = 0; i < bytes.length - 1; i += 2) {
|
1688
|
+
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
|
1689
|
+
}
|
1690
|
+
return res;
|
1691
|
+
}
|
1692
|
+
static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
|
1693
|
+
let indexSize = 1;
|
1694
|
+
let arrLength = arr.length;
|
1695
|
+
let valLength = val.length;
|
1696
|
+
if (encoding !== void 0) {
|
1697
|
+
encoding = Buffer._getEncoding(encoding);
|
1698
|
+
if (encoding === "ucs2" || encoding === "utf16le") {
|
1699
|
+
if (arr.length < 2 || val.length < 2) {
|
1700
|
+
return -1;
|
1701
|
+
}
|
1702
|
+
indexSize = 2;
|
1703
|
+
arrLength /= 2;
|
1704
|
+
valLength /= 2;
|
1705
|
+
byteOffset /= 2;
|
1706
|
+
}
|
1707
|
+
}
|
1708
|
+
function read(buf, i2) {
|
1709
|
+
if (indexSize === 1) {
|
1710
|
+
return buf[i2];
|
1711
|
+
} else {
|
1712
|
+
return buf.readUInt16BE(i2 * indexSize);
|
1713
|
+
}
|
1714
|
+
}
|
1715
|
+
let i;
|
1716
|
+
if (dir) {
|
1717
|
+
let foundIndex = -1;
|
1718
|
+
for (i = byteOffset; i < arrLength; i++) {
|
1719
|
+
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
1720
|
+
if (foundIndex === -1) foundIndex = i;
|
1721
|
+
if (i - foundIndex + 1 === valLength) return foundIndex * indexSize;
|
1722
|
+
} else {
|
1723
|
+
if (foundIndex !== -1) i -= i - foundIndex;
|
1724
|
+
foundIndex = -1;
|
1725
|
+
}
|
1726
|
+
}
|
1727
|
+
} else {
|
1728
|
+
if (byteOffset + valLength > arrLength) {
|
1729
|
+
byteOffset = arrLength - valLength;
|
1730
|
+
}
|
1731
|
+
for (i = byteOffset; i >= 0; i--) {
|
1732
|
+
let found = true;
|
1733
|
+
for (let j = 0; j < valLength; j++) {
|
1734
|
+
if (read(arr, i + j) !== read(val, j)) {
|
1735
|
+
found = false;
|
1736
|
+
break;
|
1737
|
+
}
|
1738
|
+
}
|
1739
|
+
if (found) {
|
1740
|
+
return i;
|
1741
|
+
}
|
1742
|
+
}
|
1743
|
+
}
|
1744
|
+
return -1;
|
1745
|
+
}
|
1746
|
+
static _checkOffset(offset, ext, length) {
|
1747
|
+
if (offset % 1 !== 0 || offset < 0) throw new RangeError("offset is not uint");
|
1748
|
+
if (offset + ext > length) throw new RangeError("Trying to access beyond buffer length");
|
1749
|
+
}
|
1750
|
+
static _checkInt(buf, value, offset, ext, max, min) {
|
1751
|
+
if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance');
|
1752
|
+
if (value > max || value < min) throw new RangeError('"value" argument is out of bounds');
|
1753
|
+
if (offset + ext > buf.length) throw new RangeError("Index out of range");
|
1754
|
+
}
|
1755
|
+
static _getEncoding(encoding) {
|
1756
|
+
let toLowerCase = false;
|
1757
|
+
let originalEncoding = "";
|
1758
|
+
for (; ; ) {
|
1759
|
+
switch (encoding) {
|
1760
|
+
case "hex":
|
1761
|
+
return "hex";
|
1762
|
+
case "utf8":
|
1763
|
+
return "utf8";
|
1764
|
+
case "ascii":
|
1765
|
+
return "ascii";
|
1766
|
+
case "binary":
|
1767
|
+
return "binary";
|
1768
|
+
case "latin1":
|
1769
|
+
return "latin1";
|
1770
|
+
case "ucs2":
|
1771
|
+
return "ucs2";
|
1772
|
+
case "utf16le":
|
1773
|
+
return "utf16le";
|
1774
|
+
case "base64":
|
1775
|
+
return "base64";
|
1776
|
+
default: {
|
1777
|
+
if (toLowerCase) {
|
1778
|
+
throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
|
1779
|
+
}
|
1780
|
+
toLowerCase = true;
|
1781
|
+
originalEncoding = encoding;
|
1782
|
+
encoding = encoding.toLowerCase();
|
1783
|
+
}
|
1784
|
+
}
|
1785
|
+
}
|
1786
|
+
}
|
1787
|
+
}
|
1788
|
+
const hexSliceLookupTable = function() {
|
1789
|
+
const alphabet = "0123456789abcdef";
|
1790
|
+
const table = new Array(256);
|
1791
|
+
for (let i = 0; i < 16; ++i) {
|
1792
|
+
const i16 = i * 16;
|
1793
|
+
for (let j = 0; j < 16; ++j) {
|
1794
|
+
table[i16 + j] = alphabet[i] + alphabet[j];
|
1795
|
+
}
|
1796
|
+
}
|
1797
|
+
return table;
|
1798
|
+
}();
|
1799
|
+
const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
|
1800
|
+
function base64clean(str) {
|
1801
|
+
str = str.split("=")[0];
|
1802
|
+
str = str.trim().replace(INVALID_BASE64_RE, "");
|
1803
|
+
if (str.length < 2) return "";
|
1804
|
+
while (str.length % 4 !== 0) {
|
1805
|
+
str = str + "=";
|
1806
|
+
}
|
1807
|
+
return str;
|
1808
|
+
}
|
1809
|
+
|
27
1810
|
function notEmpty(value) {
|
28
1811
|
return value !== null && value !== void 0;
|
29
1812
|
}
|
@@ -118,155 +1901,15 @@ function promiseMap(inputValues, mapper) {
|
|
118
1901
|
return inputValues.reduce(reducer, Promise.resolve([]));
|
119
1902
|
}
|
120
1903
|
|
121
|
-
|
122
|
-
|
123
|
-
if (isDefined(process) && isDefined(process.env)) {
|
124
|
-
return {
|
125
|
-
apiKey: process.env.XATA_API_KEY ?? getGlobalApiKey(),
|
126
|
-
databaseURL: process.env.XATA_DATABASE_URL ?? getGlobalDatabaseURL(),
|
127
|
-
branch: process.env.XATA_BRANCH ?? getGlobalBranch(),
|
128
|
-
deployPreview: process.env.XATA_PREVIEW,
|
129
|
-
deployPreviewBranch: process.env.XATA_PREVIEW_BRANCH,
|
130
|
-
vercelGitCommitRef: process.env.VERCEL_GIT_COMMIT_REF,
|
131
|
-
vercelGitRepoOwner: process.env.VERCEL_GIT_REPO_OWNER
|
132
|
-
};
|
133
|
-
}
|
134
|
-
} catch (err) {
|
135
|
-
}
|
136
|
-
try {
|
137
|
-
if (isObject(Deno) && isObject(Deno.env)) {
|
138
|
-
return {
|
139
|
-
apiKey: Deno.env.get("XATA_API_KEY") ?? getGlobalApiKey(),
|
140
|
-
databaseURL: Deno.env.get("XATA_DATABASE_URL") ?? getGlobalDatabaseURL(),
|
141
|
-
branch: Deno.env.get("XATA_BRANCH") ?? getGlobalBranch(),
|
142
|
-
deployPreview: Deno.env.get("XATA_PREVIEW"),
|
143
|
-
deployPreviewBranch: Deno.env.get("XATA_PREVIEW_BRANCH"),
|
144
|
-
vercelGitCommitRef: Deno.env.get("VERCEL_GIT_COMMIT_REF"),
|
145
|
-
vercelGitRepoOwner: Deno.env.get("VERCEL_GIT_REPO_OWNER")
|
146
|
-
};
|
147
|
-
}
|
148
|
-
} catch (err) {
|
149
|
-
}
|
150
|
-
return {
|
151
|
-
apiKey: getGlobalApiKey(),
|
152
|
-
databaseURL: getGlobalDatabaseURL(),
|
153
|
-
branch: getGlobalBranch(),
|
154
|
-
deployPreview: void 0,
|
155
|
-
deployPreviewBranch: void 0,
|
156
|
-
vercelGitCommitRef: void 0,
|
157
|
-
vercelGitRepoOwner: void 0
|
158
|
-
};
|
159
|
-
}
|
160
|
-
function getEnableBrowserVariable() {
|
161
|
-
try {
|
162
|
-
if (isObject(process) && isObject(process.env) && process.env.XATA_ENABLE_BROWSER !== void 0) {
|
163
|
-
return process.env.XATA_ENABLE_BROWSER === "true";
|
164
|
-
}
|
165
|
-
} catch (err) {
|
166
|
-
}
|
167
|
-
try {
|
168
|
-
if (isObject(Deno) && isObject(Deno.env) && Deno.env.get("XATA_ENABLE_BROWSER") !== void 0) {
|
169
|
-
return Deno.env.get("XATA_ENABLE_BROWSER") === "true";
|
170
|
-
}
|
171
|
-
} catch (err) {
|
172
|
-
}
|
173
|
-
try {
|
174
|
-
return XATA_ENABLE_BROWSER === true || XATA_ENABLE_BROWSER === "true";
|
175
|
-
} catch (err) {
|
176
|
-
return void 0;
|
177
|
-
}
|
178
|
-
}
|
179
|
-
function getGlobalApiKey() {
|
180
|
-
try {
|
181
|
-
return XATA_API_KEY;
|
182
|
-
} catch (err) {
|
183
|
-
return void 0;
|
184
|
-
}
|
185
|
-
}
|
186
|
-
function getGlobalDatabaseURL() {
|
187
|
-
try {
|
188
|
-
return XATA_DATABASE_URL;
|
189
|
-
} catch (err) {
|
190
|
-
return void 0;
|
191
|
-
}
|
192
|
-
}
|
193
|
-
function getGlobalBranch() {
|
194
|
-
try {
|
195
|
-
return XATA_BRANCH;
|
196
|
-
} catch (err) {
|
197
|
-
return void 0;
|
198
|
-
}
|
199
|
-
}
|
200
|
-
function getDatabaseURL() {
|
201
|
-
try {
|
202
|
-
const { databaseURL } = getEnvironment();
|
203
|
-
return databaseURL;
|
204
|
-
} catch (err) {
|
205
|
-
return void 0;
|
206
|
-
}
|
207
|
-
}
|
208
|
-
function getAPIKey() {
|
209
|
-
try {
|
210
|
-
const { apiKey } = getEnvironment();
|
211
|
-
return apiKey;
|
212
|
-
} catch (err) {
|
213
|
-
return void 0;
|
214
|
-
}
|
215
|
-
}
|
216
|
-
function getBranch() {
|
217
|
-
try {
|
218
|
-
const { branch } = getEnvironment();
|
219
|
-
return branch;
|
220
|
-
} catch (err) {
|
221
|
-
return void 0;
|
222
|
-
}
|
223
|
-
}
|
224
|
-
function buildPreviewBranchName({ org, branch }) {
|
225
|
-
return `preview-${org}-${branch}`;
|
226
|
-
}
|
227
|
-
function getPreviewBranch() {
|
228
|
-
try {
|
229
|
-
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = getEnvironment();
|
230
|
-
if (deployPreviewBranch)
|
231
|
-
return deployPreviewBranch;
|
232
|
-
switch (deployPreview) {
|
233
|
-
case "vercel": {
|
234
|
-
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
235
|
-
console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
|
236
|
-
return void 0;
|
237
|
-
}
|
238
|
-
return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
|
239
|
-
}
|
240
|
-
}
|
241
|
-
return void 0;
|
242
|
-
} catch (err) {
|
243
|
-
return void 0;
|
244
|
-
}
|
245
|
-
}
|
246
|
-
|
247
|
-
var __accessCheck$6 = (obj, member, msg) => {
|
248
|
-
if (!member.has(obj))
|
249
|
-
throw TypeError("Cannot " + msg);
|
250
|
-
};
|
251
|
-
var __privateGet$5 = (obj, member, getter) => {
|
252
|
-
__accessCheck$6(obj, member, "read from private field");
|
253
|
-
return getter ? getter.call(obj) : member.get(obj);
|
254
|
-
};
|
255
|
-
var __privateAdd$6 = (obj, member, value) => {
|
256
|
-
if (member.has(obj))
|
257
|
-
throw TypeError("Cannot add the same private member more than once");
|
258
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1904
|
+
var __typeError$6 = (msg) => {
|
1905
|
+
throw TypeError(msg);
|
259
1906
|
};
|
260
|
-
var
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
var
|
266
|
-
__accessCheck$6(obj, member, "access private method");
|
267
|
-
return method;
|
268
|
-
};
|
269
|
-
var _fetch, _queue, _concurrency, _enqueue, enqueue_fn;
|
1907
|
+
var __accessCheck$6 = (obj, member, msg) => member.has(obj) || __typeError$6("Cannot " + msg);
|
1908
|
+
var __privateGet$5 = (obj, member, getter) => (__accessCheck$6(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
1909
|
+
var __privateAdd$6 = (obj, member, value) => member.has(obj) ? __typeError$6("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1910
|
+
var __privateSet$4 = (obj, member, value, setter) => (__accessCheck$6(obj, member, "write to private field"), member.set(obj, value), value);
|
1911
|
+
var __privateMethod$4 = (obj, member, method) => (__accessCheck$6(obj, member, "access private method"), method);
|
1912
|
+
var _fetch, _queue, _concurrency, _ApiRequestPool_instances, enqueue_fn;
|
270
1913
|
const REQUEST_TIMEOUT = 5 * 60 * 1e3;
|
271
1914
|
function getFetchImplementation(userFetch) {
|
272
1915
|
const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
|
@@ -279,10 +1922,10 @@ function getFetchImplementation(userFetch) {
|
|
279
1922
|
}
|
280
1923
|
class ApiRequestPool {
|
281
1924
|
constructor(concurrency = 10) {
|
282
|
-
__privateAdd$6(this,
|
283
|
-
__privateAdd$6(this, _fetch
|
284
|
-
__privateAdd$6(this, _queue
|
285
|
-
__privateAdd$6(this, _concurrency
|
1925
|
+
__privateAdd$6(this, _ApiRequestPool_instances);
|
1926
|
+
__privateAdd$6(this, _fetch);
|
1927
|
+
__privateAdd$6(this, _queue);
|
1928
|
+
__privateAdd$6(this, _concurrency);
|
286
1929
|
__privateSet$4(this, _queue, []);
|
287
1930
|
__privateSet$4(this, _concurrency, concurrency);
|
288
1931
|
this.running = 0;
|
@@ -317,7 +1960,7 @@ class ApiRequestPool {
|
|
317
1960
|
}
|
318
1961
|
return response;
|
319
1962
|
};
|
320
|
-
return __privateMethod$4(this,
|
1963
|
+
return __privateMethod$4(this, _ApiRequestPool_instances, enqueue_fn).call(this, async () => {
|
321
1964
|
return await runRequest();
|
322
1965
|
});
|
323
1966
|
}
|
@@ -325,7 +1968,7 @@ class ApiRequestPool {
|
|
325
1968
|
_fetch = new WeakMap();
|
326
1969
|
_queue = new WeakMap();
|
327
1970
|
_concurrency = new WeakMap();
|
328
|
-
|
1971
|
+
_ApiRequestPool_instances = new WeakSet();
|
329
1972
|
enqueue_fn = function(task) {
|
330
1973
|
const promise = new Promise((resolve) => __privateGet$5(this, _queue).push(resolve)).finally(() => {
|
331
1974
|
this.started--;
|
@@ -528,7 +2171,7 @@ function defaultOnOpen(response) {
|
|
528
2171
|
}
|
529
2172
|
}
|
530
2173
|
|
531
|
-
const VERSION = "0.29.
|
2174
|
+
const VERSION = "0.29.5";
|
532
2175
|
|
533
2176
|
class ErrorWithCause extends Error {
|
534
2177
|
constructor(message, options) {
|
@@ -608,35 +2251,30 @@ function parseProviderString(provider = "production") {
|
|
608
2251
|
return provider;
|
609
2252
|
}
|
610
2253
|
const [main, workspaces] = provider.split(",");
|
611
|
-
if (!main || !workspaces)
|
612
|
-
return null;
|
2254
|
+
if (!main || !workspaces) return null;
|
613
2255
|
return { main, workspaces };
|
614
2256
|
}
|
615
2257
|
function buildProviderString(provider) {
|
616
|
-
if (isHostProviderAlias(provider))
|
617
|
-
return provider;
|
2258
|
+
if (isHostProviderAlias(provider)) return provider;
|
618
2259
|
return `${provider.main},${provider.workspaces}`;
|
619
2260
|
}
|
620
2261
|
function parseWorkspacesUrlParts(url) {
|
621
|
-
if (!isString(url))
|
622
|
-
return null;
|
2262
|
+
if (!isString(url)) return null;
|
623
2263
|
const matches = {
|
624
|
-
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh
|
625
|
-
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev
|
626
|
-
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev
|
627
|
-
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(
|
2264
|
+
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
|
2265
|
+
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2266
|
+
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2267
|
+
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
|
628
2268
|
};
|
629
2269
|
const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
|
630
|
-
if (!isHostProviderAlias(host) || !match)
|
631
|
-
|
632
|
-
return { workspace: match[1], region: match[2], host };
|
2270
|
+
if (!isHostProviderAlias(host) || !match) return null;
|
2271
|
+
return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
|
633
2272
|
}
|
634
2273
|
|
635
2274
|
const pool = new ApiRequestPool();
|
636
2275
|
const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
|
637
2276
|
const cleanQueryParams = Object.entries(queryParams).reduce((acc, [key, value]) => {
|
638
|
-
if (value === void 0 || value === null)
|
639
|
-
return acc;
|
2277
|
+
if (value === void 0 || value === null) return acc;
|
640
2278
|
return { ...acc, [key]: value };
|
641
2279
|
}, {});
|
642
2280
|
const query = new URLSearchParams(cleanQueryParams).toString();
|
@@ -684,8 +2322,7 @@ function hostHeader(url) {
|
|
684
2322
|
return groups?.host ? { Host: groups.host } : {};
|
685
2323
|
}
|
686
2324
|
async function parseBody(body, headers) {
|
687
|
-
if (!isDefined(body))
|
688
|
-
return void 0;
|
2325
|
+
if (!isDefined(body)) return void 0;
|
689
2326
|
if (isBlob(body) || typeof body.text === "function") {
|
690
2327
|
return body;
|
691
2328
|
}
|
@@ -764,8 +2401,7 @@ async function fetch$1({
|
|
764
2401
|
[TraceAttributes.CLOUDFLARE_RAY_ID]: response.headers?.get("cf-ray") ?? void 0
|
765
2402
|
});
|
766
2403
|
const message = response.headers?.get("x-xata-message");
|
767
|
-
if (message)
|
768
|
-
console.warn(message);
|
2404
|
+
if (message) console.warn(message);
|
769
2405
|
if (response.status === 204) {
|
770
2406
|
return {};
|
771
2407
|
}
|
@@ -849,16 +2485,96 @@ function parseUrl(url) {
|
|
849
2485
|
|
850
2486
|
const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
|
851
2487
|
|
852
|
-
const
|
2488
|
+
const listClusterBranches = (variables, signal) => dataPlaneFetch({
|
2489
|
+
url: "/cluster/{clusterId}/branches",
|
2490
|
+
method: "get",
|
2491
|
+
...variables,
|
2492
|
+
signal
|
2493
|
+
});
|
2494
|
+
const listClusterExtensions = (variables, signal) => dataPlaneFetch({
|
2495
|
+
url: "/cluster/{clusterId}/extensions",
|
2496
|
+
method: "get",
|
2497
|
+
...variables,
|
2498
|
+
signal
|
2499
|
+
});
|
2500
|
+
const installClusterExtension = (variables, signal) => dataPlaneFetch({
|
2501
|
+
url: "/cluster/{clusterId}/extensions",
|
2502
|
+
method: "post",
|
2503
|
+
...variables,
|
2504
|
+
signal
|
2505
|
+
});
|
2506
|
+
const dropClusterExtension = (variables, signal) => dataPlaneFetch({
|
2507
|
+
url: "/cluster/{clusterId}/extensions",
|
2508
|
+
method: "delete",
|
2509
|
+
...variables,
|
2510
|
+
signal
|
2511
|
+
});
|
2512
|
+
const getClusterMetrics = (variables, signal) => dataPlaneFetch({
|
2513
|
+
url: "/cluster/{clusterId}/metrics",
|
2514
|
+
method: "get",
|
2515
|
+
...variables,
|
2516
|
+
signal
|
2517
|
+
});
|
2518
|
+
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2519
|
+
url: "/db/{dbBranchName}/migrations/apply",
|
2520
|
+
method: "post",
|
2521
|
+
...variables,
|
2522
|
+
signal
|
2523
|
+
});
|
2524
|
+
const startMigration = (variables, signal) => dataPlaneFetch({
|
2525
|
+
url: "/db/{dbBranchName}/migrations/start",
|
2526
|
+
method: "post",
|
2527
|
+
...variables,
|
2528
|
+
signal
|
2529
|
+
});
|
2530
|
+
const completeMigration = (variables, signal) => dataPlaneFetch({
|
2531
|
+
url: "/db/{dbBranchName}/migrations/complete",
|
2532
|
+
method: "post",
|
2533
|
+
...variables,
|
2534
|
+
signal
|
2535
|
+
});
|
2536
|
+
const rollbackMigration = (variables, signal) => dataPlaneFetch({
|
2537
|
+
url: "/db/{dbBranchName}/migrations/rollback",
|
2538
|
+
method: "post",
|
2539
|
+
...variables,
|
2540
|
+
signal
|
2541
|
+
});
|
853
2542
|
const adaptTable = (variables, signal) => dataPlaneFetch({
|
854
2543
|
url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
|
855
2544
|
method: "post",
|
856
2545
|
...variables,
|
857
2546
|
signal
|
858
2547
|
});
|
859
|
-
const
|
860
|
-
|
861
|
-
|
2548
|
+
const adaptAllTables = (variables, signal) => dataPlaneFetch({
|
2549
|
+
url: "/db/{dbBranchName}/migrations/adapt",
|
2550
|
+
method: "post",
|
2551
|
+
...variables,
|
2552
|
+
signal
|
2553
|
+
});
|
2554
|
+
const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2555
|
+
url: "/db/{dbBranchName}/migrations/status",
|
2556
|
+
method: "get",
|
2557
|
+
...variables,
|
2558
|
+
signal
|
2559
|
+
});
|
2560
|
+
const getMigrationJobs = (variables, signal) => dataPlaneFetch({
|
2561
|
+
url: "/db/{dbBranchName}/migrations/jobs",
|
2562
|
+
method: "get",
|
2563
|
+
...variables,
|
2564
|
+
signal
|
2565
|
+
});
|
2566
|
+
const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2567
|
+
url: "/db/{dbBranchName}/migrations/jobs/{jobId}",
|
2568
|
+
method: "get",
|
2569
|
+
...variables,
|
2570
|
+
signal
|
2571
|
+
});
|
2572
|
+
const getMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2573
|
+
url: "/db/{dbBranchName}/migrations/history",
|
2574
|
+
method: "get",
|
2575
|
+
...variables,
|
2576
|
+
signal
|
2577
|
+
});
|
862
2578
|
const getBranchList = (variables, signal) => dataPlaneFetch({
|
863
2579
|
url: "/dbs/{dbName}",
|
864
2580
|
method: "get",
|
@@ -885,68 +2601,166 @@ const deleteBranch = (variables, signal) => dataPlaneFetch({
|
|
885
2601
|
...variables,
|
886
2602
|
signal
|
887
2603
|
});
|
888
|
-
const getSchema = (variables, signal) => dataPlaneFetch({
|
889
|
-
url: "/db/{dbBranchName}/schema",
|
890
|
-
method: "get",
|
2604
|
+
const getSchema = (variables, signal) => dataPlaneFetch({
|
2605
|
+
url: "/db/{dbBranchName}/schema",
|
2606
|
+
method: "get",
|
2607
|
+
...variables,
|
2608
|
+
signal
|
2609
|
+
});
|
2610
|
+
const getSchemas = (variables, signal) => dataPlaneFetch({
|
2611
|
+
url: "/db/{dbBranchName}/schemas",
|
2612
|
+
method: "get",
|
2613
|
+
...variables,
|
2614
|
+
signal
|
2615
|
+
});
|
2616
|
+
const copyBranch = (variables, signal) => dataPlaneFetch({
|
2617
|
+
url: "/db/{dbBranchName}/copy",
|
2618
|
+
method: "post",
|
2619
|
+
...variables,
|
2620
|
+
signal
|
2621
|
+
});
|
2622
|
+
const getBranchMoveStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/move", method: "get", ...variables, signal });
|
2623
|
+
const moveBranch = (variables, signal) => dataPlaneFetch({
|
2624
|
+
url: "/db/{dbBranchName}/move",
|
2625
|
+
method: "put",
|
2626
|
+
...variables,
|
2627
|
+
signal
|
2628
|
+
});
|
2629
|
+
const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2630
|
+
url: "/db/{dbBranchName}/metadata",
|
2631
|
+
method: "put",
|
2632
|
+
...variables,
|
2633
|
+
signal
|
2634
|
+
});
|
2635
|
+
const getBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2636
|
+
url: "/db/{dbBranchName}/metadata",
|
2637
|
+
method: "get",
|
2638
|
+
...variables,
|
2639
|
+
signal
|
2640
|
+
});
|
2641
|
+
const getBranchStats = (variables, signal) => dataPlaneFetch({
|
2642
|
+
url: "/db/{dbBranchName}/stats",
|
2643
|
+
method: "get",
|
2644
|
+
...variables,
|
2645
|
+
signal
|
2646
|
+
});
|
2647
|
+
const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
|
2648
|
+
const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
|
2649
|
+
const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({
|
2650
|
+
url: "/dbs/{dbName}/gitBranches",
|
2651
|
+
method: "delete",
|
2652
|
+
...variables,
|
2653
|
+
signal
|
2654
|
+
});
|
2655
|
+
const resolveBranch = (variables, signal) => dataPlaneFetch({
|
2656
|
+
url: "/dbs/{dbName}/resolveBranch",
|
2657
|
+
method: "get",
|
2658
|
+
...variables,
|
2659
|
+
signal
|
2660
|
+
});
|
2661
|
+
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2662
|
+
url: "/db/{dbBranchName}/migrations",
|
2663
|
+
method: "get",
|
2664
|
+
...variables,
|
2665
|
+
signal
|
2666
|
+
});
|
2667
|
+
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2668
|
+
url: "/db/{dbBranchName}/migrations/plan",
|
2669
|
+
method: "post",
|
2670
|
+
...variables,
|
2671
|
+
signal
|
2672
|
+
});
|
2673
|
+
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2674
|
+
url: "/db/{dbBranchName}/migrations/execute",
|
2675
|
+
method: "post",
|
2676
|
+
...variables,
|
2677
|
+
signal
|
2678
|
+
});
|
2679
|
+
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({
|
2680
|
+
url: "/dbs/{dbName}/migrations/query",
|
2681
|
+
method: "post",
|
2682
|
+
...variables,
|
2683
|
+
signal
|
2684
|
+
});
|
2685
|
+
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
2686
|
+
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2687
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2688
|
+
method: "get",
|
2689
|
+
...variables,
|
2690
|
+
signal
|
2691
|
+
});
|
2692
|
+
const updateMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2693
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2694
|
+
method: "patch",
|
2695
|
+
...variables,
|
2696
|
+
signal
|
2697
|
+
});
|
2698
|
+
const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({
|
2699
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/commits",
|
2700
|
+
method: "post",
|
2701
|
+
...variables,
|
2702
|
+
signal
|
2703
|
+
});
|
2704
|
+
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2705
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/compare",
|
2706
|
+
method: "post",
|
2707
|
+
...variables,
|
2708
|
+
signal
|
2709
|
+
});
|
2710
|
+
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({
|
2711
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2712
|
+
method: "get",
|
2713
|
+
...variables,
|
2714
|
+
signal
|
2715
|
+
});
|
2716
|
+
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2717
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2718
|
+
method: "post",
|
2719
|
+
...variables,
|
2720
|
+
signal
|
2721
|
+
});
|
2722
|
+
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({
|
2723
|
+
url: "/db/{dbBranchName}/schema/history",
|
2724
|
+
method: "post",
|
891
2725
|
...variables,
|
892
2726
|
signal
|
893
2727
|
});
|
894
|
-
const
|
895
|
-
url: "/db/{dbBranchName}/
|
2728
|
+
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({
|
2729
|
+
url: "/db/{dbBranchName}/schema/compare",
|
896
2730
|
method: "post",
|
897
2731
|
...variables,
|
898
2732
|
signal
|
899
2733
|
});
|
900
|
-
const
|
901
|
-
url: "/db/{dbBranchName}/
|
902
|
-
method: "
|
2734
|
+
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({
|
2735
|
+
url: "/db/{dbBranchName}/schema/compare/{branchName}",
|
2736
|
+
method: "post",
|
903
2737
|
...variables,
|
904
2738
|
signal
|
905
2739
|
});
|
906
|
-
const
|
907
|
-
url: "/db/{dbBranchName}/
|
908
|
-
method: "
|
2740
|
+
const updateBranchSchema = (variables, signal) => dataPlaneFetch({
|
2741
|
+
url: "/db/{dbBranchName}/schema/update",
|
2742
|
+
method: "post",
|
909
2743
|
...variables,
|
910
2744
|
signal
|
911
2745
|
});
|
912
|
-
const
|
913
|
-
url: "/db/{dbBranchName}/
|
914
|
-
method: "
|
2746
|
+
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2747
|
+
url: "/db/{dbBranchName}/schema/preview",
|
2748
|
+
method: "post",
|
915
2749
|
...variables,
|
916
2750
|
signal
|
917
2751
|
});
|
918
|
-
const
|
919
|
-
|
920
|
-
|
921
|
-
const resolveBranch = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/resolveBranch", method: "get", ...variables, signal });
|
922
|
-
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations", method: "get", ...variables, signal });
|
923
|
-
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/plan", method: "post", ...variables, signal });
|
924
|
-
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/execute", method: "post", ...variables, signal });
|
925
|
-
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/query", method: "post", ...variables, signal });
|
926
|
-
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
927
|
-
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
928
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
929
|
-
method: "get",
|
2752
|
+
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2753
|
+
url: "/db/{dbBranchName}/schema/apply",
|
2754
|
+
method: "post",
|
930
2755
|
...variables,
|
931
2756
|
signal
|
932
2757
|
});
|
933
|
-
const
|
934
|
-
|
935
|
-
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/compare", method: "post", ...variables, signal });
|
936
|
-
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/merge", method: "get", ...variables, signal });
|
937
|
-
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
938
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2758
|
+
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({
|
2759
|
+
url: "/db/{dbBranchName}/schema/push",
|
939
2760
|
method: "post",
|
940
2761
|
...variables,
|
941
2762
|
signal
|
942
2763
|
});
|
943
|
-
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/history", method: "post", ...variables, signal });
|
944
|
-
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare", method: "post", ...variables, signal });
|
945
|
-
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare/{branchName}", method: "post", ...variables, signal });
|
946
|
-
const updateBranchSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/update", method: "post", ...variables, signal });
|
947
|
-
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/preview", method: "post", ...variables, signal });
|
948
|
-
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/apply", method: "post", ...variables, signal });
|
949
|
-
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/push", method: "post", ...variables, signal });
|
950
2764
|
const createTable = (variables, signal) => dataPlaneFetch({
|
951
2765
|
url: "/db/{dbBranchName}/tables/{tableName}",
|
952
2766
|
method: "put",
|
@@ -959,14 +2773,24 @@ const deleteTable = (variables, signal) => dataPlaneFetch({
|
|
959
2773
|
...variables,
|
960
2774
|
signal
|
961
2775
|
});
|
962
|
-
const updateTable = (variables, signal) => dataPlaneFetch({
|
2776
|
+
const updateTable = (variables, signal) => dataPlaneFetch({
|
2777
|
+
url: "/db/{dbBranchName}/tables/{tableName}",
|
2778
|
+
method: "patch",
|
2779
|
+
...variables,
|
2780
|
+
signal
|
2781
|
+
});
|
963
2782
|
const getTableSchema = (variables, signal) => dataPlaneFetch({
|
964
2783
|
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
965
2784
|
method: "get",
|
966
2785
|
...variables,
|
967
2786
|
signal
|
968
2787
|
});
|
969
|
-
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2788
|
+
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2789
|
+
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
2790
|
+
method: "put",
|
2791
|
+
...variables,
|
2792
|
+
signal
|
2793
|
+
});
|
970
2794
|
const getTableColumns = (variables, signal) => dataPlaneFetch({
|
971
2795
|
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
972
2796
|
method: "get",
|
@@ -974,7 +2798,12 @@ const getTableColumns = (variables, signal) => dataPlaneFetch({
|
|
974
2798
|
signal
|
975
2799
|
});
|
976
2800
|
const addTableColumn = (variables, signal) => dataPlaneFetch(
|
977
|
-
{
|
2801
|
+
{
|
2802
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
2803
|
+
method: "post",
|
2804
|
+
...variables,
|
2805
|
+
signal
|
2806
|
+
}
|
978
2807
|
);
|
979
2808
|
const getColumn = (variables, signal) => dataPlaneFetch({
|
980
2809
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
@@ -982,15 +2811,30 @@ const getColumn = (variables, signal) => dataPlaneFetch({
|
|
982
2811
|
...variables,
|
983
2812
|
signal
|
984
2813
|
});
|
985
|
-
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2814
|
+
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2815
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
2816
|
+
method: "patch",
|
2817
|
+
...variables,
|
2818
|
+
signal
|
2819
|
+
});
|
986
2820
|
const deleteColumn = (variables, signal) => dataPlaneFetch({
|
987
2821
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
988
2822
|
method: "delete",
|
989
2823
|
...variables,
|
990
2824
|
signal
|
991
2825
|
});
|
992
|
-
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
993
|
-
|
2826
|
+
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
2827
|
+
url: "/db/{dbBranchName}/transaction",
|
2828
|
+
method: "post",
|
2829
|
+
...variables,
|
2830
|
+
signal
|
2831
|
+
});
|
2832
|
+
const insertRecord = (variables, signal) => dataPlaneFetch({
|
2833
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data",
|
2834
|
+
method: "post",
|
2835
|
+
...variables,
|
2836
|
+
signal
|
2837
|
+
});
|
994
2838
|
const getFileItem = (variables, signal) => dataPlaneFetch({
|
995
2839
|
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
|
996
2840
|
method: "get",
|
@@ -1033,11 +2877,36 @@ const getRecord = (variables, signal) => dataPlaneFetch({
|
|
1033
2877
|
...variables,
|
1034
2878
|
signal
|
1035
2879
|
});
|
1036
|
-
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
1037
|
-
|
1038
|
-
|
1039
|
-
|
1040
|
-
|
2880
|
+
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2881
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2882
|
+
method: "put",
|
2883
|
+
...variables,
|
2884
|
+
signal
|
2885
|
+
});
|
2886
|
+
const updateRecordWithID = (variables, signal) => dataPlaneFetch({
|
2887
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2888
|
+
method: "patch",
|
2889
|
+
...variables,
|
2890
|
+
signal
|
2891
|
+
});
|
2892
|
+
const upsertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2893
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2894
|
+
method: "post",
|
2895
|
+
...variables,
|
2896
|
+
signal
|
2897
|
+
});
|
2898
|
+
const deleteRecord = (variables, signal) => dataPlaneFetch({
|
2899
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2900
|
+
method: "delete",
|
2901
|
+
...variables,
|
2902
|
+
signal
|
2903
|
+
});
|
2904
|
+
const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({
|
2905
|
+
url: "/db/{dbBranchName}/tables/{tableName}/bulk",
|
2906
|
+
method: "post",
|
2907
|
+
...variables,
|
2908
|
+
signal
|
2909
|
+
});
|
1041
2910
|
const queryTable = (variables, signal) => dataPlaneFetch({
|
1042
2911
|
url: "/db/{dbBranchName}/tables/{tableName}/query",
|
1043
2912
|
method: "post",
|
@@ -1056,16 +2925,36 @@ const searchTable = (variables, signal) => dataPlaneFetch({
|
|
1056
2925
|
...variables,
|
1057
2926
|
signal
|
1058
2927
|
});
|
1059
|
-
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
2928
|
+
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
2929
|
+
url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch",
|
2930
|
+
method: "post",
|
2931
|
+
...variables,
|
2932
|
+
signal
|
2933
|
+
});
|
1060
2934
|
const askTable = (variables, signal) => dataPlaneFetch({
|
1061
2935
|
url: "/db/{dbBranchName}/tables/{tableName}/ask",
|
1062
2936
|
method: "post",
|
1063
2937
|
...variables,
|
1064
2938
|
signal
|
1065
2939
|
});
|
1066
|
-
const askTableSession = (variables, signal) => dataPlaneFetch({
|
1067
|
-
|
1068
|
-
|
2940
|
+
const askTableSession = (variables, signal) => dataPlaneFetch({
|
2941
|
+
url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}",
|
2942
|
+
method: "post",
|
2943
|
+
...variables,
|
2944
|
+
signal
|
2945
|
+
});
|
2946
|
+
const summarizeTable = (variables, signal) => dataPlaneFetch({
|
2947
|
+
url: "/db/{dbBranchName}/tables/{tableName}/summarize",
|
2948
|
+
method: "post",
|
2949
|
+
...variables,
|
2950
|
+
signal
|
2951
|
+
});
|
2952
|
+
const aggregateTable = (variables, signal) => dataPlaneFetch({
|
2953
|
+
url: "/db/{dbBranchName}/tables/{tableName}/aggregate",
|
2954
|
+
method: "post",
|
2955
|
+
...variables,
|
2956
|
+
signal
|
2957
|
+
});
|
1069
2958
|
const fileAccess = (variables, signal) => dataPlaneFetch({
|
1070
2959
|
url: "/file/{fileId}",
|
1071
2960
|
method: "get",
|
@@ -1084,14 +2973,33 @@ const sqlQuery = (variables, signal) => dataPlaneFetch({
|
|
1084
2973
|
...variables,
|
1085
2974
|
signal
|
1086
2975
|
});
|
2976
|
+
const sqlBatchQuery = (variables, signal) => dataPlaneFetch({
|
2977
|
+
url: "/db/{dbBranchName}/sql/batch",
|
2978
|
+
method: "post",
|
2979
|
+
...variables,
|
2980
|
+
signal
|
2981
|
+
});
|
1087
2982
|
const operationsByTag$2 = {
|
2983
|
+
cluster: {
|
2984
|
+
listClusterBranches,
|
2985
|
+
listClusterExtensions,
|
2986
|
+
installClusterExtension,
|
2987
|
+
dropClusterExtension,
|
2988
|
+
getClusterMetrics
|
2989
|
+
},
|
1088
2990
|
migrations: {
|
1089
2991
|
applyMigration,
|
2992
|
+
startMigration,
|
2993
|
+
completeMigration,
|
2994
|
+
rollbackMigration,
|
1090
2995
|
adaptTable,
|
2996
|
+
adaptAllTables,
|
1091
2997
|
getBranchMigrationJobStatus,
|
2998
|
+
getMigrationJobs,
|
1092
2999
|
getMigrationJobStatus,
|
1093
3000
|
getMigrationHistory,
|
1094
3001
|
getSchema,
|
3002
|
+
getSchemas,
|
1095
3003
|
getBranchMigrationHistory,
|
1096
3004
|
getBranchMigrationPlan,
|
1097
3005
|
executeBranchMigrationPlan,
|
@@ -1109,6 +3017,8 @@ const operationsByTag$2 = {
|
|
1109
3017
|
createBranch,
|
1110
3018
|
deleteBranch,
|
1111
3019
|
copyBranch,
|
3020
|
+
getBranchMoveStatus,
|
3021
|
+
moveBranch,
|
1112
3022
|
updateBranchMetadata,
|
1113
3023
|
getBranchMetadata,
|
1114
3024
|
getBranchStats,
|
@@ -1150,7 +3060,16 @@ const operationsByTag$2 = {
|
|
1150
3060
|
deleteRecord,
|
1151
3061
|
bulkInsertTableRecords
|
1152
3062
|
},
|
1153
|
-
files: {
|
3063
|
+
files: {
|
3064
|
+
getFileItem,
|
3065
|
+
putFileItem,
|
3066
|
+
deleteFileItem,
|
3067
|
+
getFile,
|
3068
|
+
putFile,
|
3069
|
+
deleteFile,
|
3070
|
+
fileAccess,
|
3071
|
+
fileUpload
|
3072
|
+
},
|
1154
3073
|
searchAndFilter: {
|
1155
3074
|
queryTable,
|
1156
3075
|
searchBranch,
|
@@ -1161,7 +3080,7 @@ const operationsByTag$2 = {
|
|
1161
3080
|
summarizeTable,
|
1162
3081
|
aggregateTable
|
1163
3082
|
},
|
1164
|
-
sql: { sqlQuery }
|
3083
|
+
sql: { sqlQuery, sqlBatchQuery }
|
1165
3084
|
};
|
1166
3085
|
|
1167
3086
|
const controlPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "controlPlane" });
|
@@ -1228,7 +3147,12 @@ const deleteOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
|
1228
3147
|
...variables,
|
1229
3148
|
signal
|
1230
3149
|
});
|
1231
|
-
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3150
|
+
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3151
|
+
url: "/user/oauth/tokens/{token}",
|
3152
|
+
method: "patch",
|
3153
|
+
...variables,
|
3154
|
+
signal
|
3155
|
+
});
|
1232
3156
|
const getWorkspacesList = (variables, signal) => controlPlaneFetch({
|
1233
3157
|
url: "/workspaces",
|
1234
3158
|
method: "get",
|
@@ -1259,47 +3183,150 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
|
|
1259
3183
|
...variables,
|
1260
3184
|
signal
|
1261
3185
|
});
|
1262
|
-
const
|
1263
|
-
|
3186
|
+
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3187
|
+
url: "/workspaces/{workspaceId}/settings",
|
3188
|
+
method: "get",
|
3189
|
+
...variables,
|
3190
|
+
signal
|
3191
|
+
});
|
3192
|
+
const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3193
|
+
url: "/workspaces/{workspaceId}/settings",
|
3194
|
+
method: "patch",
|
3195
|
+
...variables,
|
3196
|
+
signal
|
3197
|
+
});
|
3198
|
+
const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({
|
3199
|
+
url: "/workspaces/{workspaceId}/members",
|
3200
|
+
method: "get",
|
3201
|
+
...variables,
|
3202
|
+
signal
|
3203
|
+
});
|
3204
|
+
const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({
|
3205
|
+
url: "/workspaces/{workspaceId}/members/{userId}",
|
3206
|
+
method: "put",
|
3207
|
+
...variables,
|
3208
|
+
signal
|
3209
|
+
});
|
1264
3210
|
const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1265
3211
|
url: "/workspaces/{workspaceId}/members/{userId}",
|
1266
3212
|
method: "delete",
|
1267
3213
|
...variables,
|
1268
3214
|
signal
|
1269
3215
|
});
|
1270
|
-
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1271
|
-
|
1272
|
-
|
1273
|
-
|
1274
|
-
|
1275
|
-
|
1276
|
-
const
|
3216
|
+
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
3217
|
+
url: "/workspaces/{workspaceId}/invites",
|
3218
|
+
method: "post",
|
3219
|
+
...variables,
|
3220
|
+
signal
|
3221
|
+
});
|
3222
|
+
const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3223
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3224
|
+
method: "patch",
|
3225
|
+
...variables,
|
3226
|
+
signal
|
3227
|
+
});
|
3228
|
+
const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3229
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3230
|
+
method: "delete",
|
3231
|
+
...variables,
|
3232
|
+
signal
|
3233
|
+
});
|
3234
|
+
const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3235
|
+
url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept",
|
3236
|
+
method: "post",
|
3237
|
+
...variables,
|
3238
|
+
signal
|
3239
|
+
});
|
3240
|
+
const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3241
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}/resend",
|
3242
|
+
method: "post",
|
3243
|
+
...variables,
|
3244
|
+
signal
|
3245
|
+
});
|
3246
|
+
const listClusters = (variables, signal) => controlPlaneFetch({
|
3247
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3248
|
+
method: "get",
|
3249
|
+
...variables,
|
3250
|
+
signal
|
3251
|
+
});
|
3252
|
+
const createCluster = (variables, signal) => controlPlaneFetch({
|
3253
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3254
|
+
method: "post",
|
3255
|
+
...variables,
|
3256
|
+
signal
|
3257
|
+
});
|
1277
3258
|
const getCluster = (variables, signal) => controlPlaneFetch({
|
1278
3259
|
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
1279
3260
|
method: "get",
|
1280
3261
|
...variables,
|
1281
3262
|
signal
|
1282
3263
|
});
|
1283
|
-
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3264
|
+
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3265
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3266
|
+
method: "patch",
|
3267
|
+
...variables,
|
3268
|
+
signal
|
3269
|
+
});
|
3270
|
+
const deleteCluster = (variables, signal) => controlPlaneFetch({
|
3271
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3272
|
+
method: "delete",
|
3273
|
+
...variables,
|
3274
|
+
signal
|
3275
|
+
});
|
1284
3276
|
const getDatabaseList = (variables, signal) => controlPlaneFetch({
|
1285
3277
|
url: "/workspaces/{workspaceId}/dbs",
|
1286
3278
|
method: "get",
|
1287
3279
|
...variables,
|
1288
3280
|
signal
|
1289
3281
|
});
|
1290
|
-
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3282
|
+
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3283
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3284
|
+
method: "put",
|
3285
|
+
...variables,
|
3286
|
+
signal
|
3287
|
+
});
|
1291
3288
|
const deleteDatabase = (variables, signal) => controlPlaneFetch({
|
1292
3289
|
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
1293
3290
|
method: "delete",
|
1294
3291
|
...variables,
|
1295
3292
|
signal
|
1296
3293
|
});
|
1297
|
-
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
1298
|
-
|
1299
|
-
|
1300
|
-
|
1301
|
-
|
1302
|
-
|
3294
|
+
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3295
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3296
|
+
method: "get",
|
3297
|
+
...variables,
|
3298
|
+
signal
|
3299
|
+
});
|
3300
|
+
const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3301
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3302
|
+
method: "patch",
|
3303
|
+
...variables,
|
3304
|
+
signal
|
3305
|
+
});
|
3306
|
+
const renameDatabase = (variables, signal) => controlPlaneFetch({
|
3307
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/rename",
|
3308
|
+
method: "post",
|
3309
|
+
...variables,
|
3310
|
+
signal
|
3311
|
+
});
|
3312
|
+
const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3313
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3314
|
+
method: "get",
|
3315
|
+
...variables,
|
3316
|
+
signal
|
3317
|
+
});
|
3318
|
+
const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3319
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3320
|
+
method: "put",
|
3321
|
+
...variables,
|
3322
|
+
signal
|
3323
|
+
});
|
3324
|
+
const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3325
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3326
|
+
method: "delete",
|
3327
|
+
...variables,
|
3328
|
+
signal
|
3329
|
+
});
|
1303
3330
|
const listRegions = (variables, signal) => controlPlaneFetch({
|
1304
3331
|
url: "/workspaces/{workspaceId}/regions",
|
1305
3332
|
method: "get",
|
@@ -1324,6 +3351,8 @@ const operationsByTag$1 = {
|
|
1324
3351
|
getWorkspace,
|
1325
3352
|
updateWorkspace,
|
1326
3353
|
deleteWorkspace,
|
3354
|
+
getWorkspaceSettings,
|
3355
|
+
updateWorkspaceSettings,
|
1327
3356
|
getWorkspaceMembersList,
|
1328
3357
|
updateWorkspaceMemberRole,
|
1329
3358
|
removeWorkspaceMember
|
@@ -1335,7 +3364,13 @@ const operationsByTag$1 = {
|
|
1335
3364
|
acceptWorkspaceMemberInvite,
|
1336
3365
|
resendWorkspaceMemberInvite
|
1337
3366
|
},
|
1338
|
-
xbcontrolOther: {
|
3367
|
+
xbcontrolOther: {
|
3368
|
+
listClusters,
|
3369
|
+
createCluster,
|
3370
|
+
getCluster,
|
3371
|
+
updateCluster,
|
3372
|
+
deleteCluster
|
3373
|
+
},
|
1339
3374
|
databases: {
|
1340
3375
|
getDatabaseList,
|
1341
3376
|
createDatabase,
|
@@ -1355,7 +3390,7 @@ const operationsByTag = deepMerge(operationsByTag$2, operationsByTag$1);
|
|
1355
3390
|
const buildApiClient = () => class {
|
1356
3391
|
constructor(options = {}) {
|
1357
3392
|
const provider = options.host ?? "production";
|
1358
|
-
const apiKey = options.apiKey
|
3393
|
+
const apiKey = options.apiKey;
|
1359
3394
|
const trace = options.trace ?? defaultTrace;
|
1360
3395
|
const clientID = generateUUID();
|
1361
3396
|
if (!apiKey) {
|
@@ -1422,8 +3457,7 @@ function buildTransformString(transformations) {
|
|
1422
3457
|
).join(",");
|
1423
3458
|
}
|
1424
3459
|
function transformImage(url, ...transformations) {
|
1425
|
-
if (!isDefined(url))
|
1426
|
-
return void 0;
|
3460
|
+
if (!isDefined(url)) return void 0;
|
1427
3461
|
const newTransformations = buildTransformString(transformations);
|
1428
3462
|
const { hostname, pathname, search } = new URL(url);
|
1429
3463
|
const pathParts = pathname.split("/");
|
@@ -1536,8 +3570,7 @@ class XataFile {
|
|
1536
3570
|
}
|
1537
3571
|
}
|
1538
3572
|
const parseInputFileEntry = async (entry) => {
|
1539
|
-
if (!isDefined(entry))
|
1540
|
-
return null;
|
3573
|
+
if (!isDefined(entry)) return null;
|
1541
3574
|
const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout, uploadUrlTimeout } = await entry;
|
1542
3575
|
return compactObject({
|
1543
3576
|
id,
|
@@ -1552,24 +3585,19 @@ const parseInputFileEntry = async (entry) => {
|
|
1552
3585
|
};
|
1553
3586
|
|
1554
3587
|
function cleanFilter(filter) {
|
1555
|
-
if (!isDefined(filter))
|
1556
|
-
|
1557
|
-
if (!isObject(filter))
|
1558
|
-
return filter;
|
3588
|
+
if (!isDefined(filter)) return void 0;
|
3589
|
+
if (!isObject(filter)) return filter;
|
1559
3590
|
const values = Object.fromEntries(
|
1560
3591
|
Object.entries(filter).reduce((acc, [key, value]) => {
|
1561
|
-
if (!isDefined(value))
|
1562
|
-
return acc;
|
3592
|
+
if (!isDefined(value)) return acc;
|
1563
3593
|
if (Array.isArray(value)) {
|
1564
3594
|
const clean = value.map((item) => cleanFilter(item)).filter((item) => isDefined(item));
|
1565
|
-
if (clean.length === 0)
|
1566
|
-
return acc;
|
3595
|
+
if (clean.length === 0) return acc;
|
1567
3596
|
return [...acc, [key, clean]];
|
1568
3597
|
}
|
1569
3598
|
if (isObject(value)) {
|
1570
3599
|
const clean = cleanFilter(value);
|
1571
|
-
if (!isDefined(clean))
|
1572
|
-
return acc;
|
3600
|
+
if (!isDefined(clean)) return acc;
|
1573
3601
|
return [...acc, [key, clean]];
|
1574
3602
|
}
|
1575
3603
|
return [...acc, [key, value]];
|
@@ -1579,10 +3607,8 @@ function cleanFilter(filter) {
|
|
1579
3607
|
}
|
1580
3608
|
|
1581
3609
|
function stringifyJson(value) {
|
1582
|
-
if (!isDefined(value))
|
1583
|
-
|
1584
|
-
if (isString(value))
|
1585
|
-
return value;
|
3610
|
+
if (!isDefined(value)) return value;
|
3611
|
+
if (isString(value)) return value;
|
1586
3612
|
try {
|
1587
3613
|
return JSON.stringify(value);
|
1588
3614
|
} catch (e) {
|
@@ -1597,28 +3623,17 @@ function parseJson(value) {
|
|
1597
3623
|
}
|
1598
3624
|
}
|
1599
3625
|
|
1600
|
-
var
|
1601
|
-
|
1602
|
-
throw TypeError("Cannot " + msg);
|
1603
|
-
};
|
1604
|
-
var __privateGet$4 = (obj, member, getter) => {
|
1605
|
-
__accessCheck$5(obj, member, "read from private field");
|
1606
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1607
|
-
};
|
1608
|
-
var __privateAdd$5 = (obj, member, value) => {
|
1609
|
-
if (member.has(obj))
|
1610
|
-
throw TypeError("Cannot add the same private member more than once");
|
1611
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1612
|
-
};
|
1613
|
-
var __privateSet$3 = (obj, member, value, setter) => {
|
1614
|
-
__accessCheck$5(obj, member, "write to private field");
|
1615
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1616
|
-
return value;
|
3626
|
+
var __typeError$5 = (msg) => {
|
3627
|
+
throw TypeError(msg);
|
1617
3628
|
};
|
3629
|
+
var __accessCheck$5 = (obj, member, msg) => member.has(obj) || __typeError$5("Cannot " + msg);
|
3630
|
+
var __privateGet$4 = (obj, member, getter) => (__accessCheck$5(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3631
|
+
var __privateAdd$5 = (obj, member, value) => member.has(obj) ? __typeError$5("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3632
|
+
var __privateSet$3 = (obj, member, value, setter) => (__accessCheck$5(obj, member, "write to private field"), member.set(obj, value), value);
|
1618
3633
|
var _query, _page;
|
1619
3634
|
class Page {
|
1620
3635
|
constructor(query, meta, records = []) {
|
1621
|
-
__privateAdd$5(this, _query
|
3636
|
+
__privateAdd$5(this, _query);
|
1622
3637
|
__privateSet$3(this, _query, query);
|
1623
3638
|
this.meta = meta;
|
1624
3639
|
this.records = new PageRecordArray(this, records);
|
@@ -1705,7 +3720,7 @@ class RecordArray extends Array {
|
|
1705
3720
|
const _PageRecordArray = class _PageRecordArray extends Array {
|
1706
3721
|
constructor(...args) {
|
1707
3722
|
super(..._PageRecordArray.parseConstructorParams(...args));
|
1708
|
-
__privateAdd$5(this, _page
|
3723
|
+
__privateAdd$5(this, _page);
|
1709
3724
|
__privateSet$3(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
|
1710
3725
|
}
|
1711
3726
|
static parseConstructorParams(...args) {
|
@@ -1776,34 +3791,20 @@ const _PageRecordArray = class _PageRecordArray extends Array {
|
|
1776
3791
|
_page = new WeakMap();
|
1777
3792
|
let PageRecordArray = _PageRecordArray;
|
1778
3793
|
|
1779
|
-
var
|
1780
|
-
|
1781
|
-
throw TypeError("Cannot " + msg);
|
3794
|
+
var __typeError$4 = (msg) => {
|
3795
|
+
throw TypeError(msg);
|
1782
3796
|
};
|
1783
|
-
var
|
1784
|
-
|
1785
|
-
|
1786
|
-
|
1787
|
-
var
|
1788
|
-
|
1789
|
-
throw TypeError("Cannot add the same private member more than once");
|
1790
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1791
|
-
};
|
1792
|
-
var __privateSet$2 = (obj, member, value, setter) => {
|
1793
|
-
__accessCheck$4(obj, member, "write to private field");
|
1794
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1795
|
-
return value;
|
1796
|
-
};
|
1797
|
-
var __privateMethod$3 = (obj, member, method) => {
|
1798
|
-
__accessCheck$4(obj, member, "access private method");
|
1799
|
-
return method;
|
1800
|
-
};
|
1801
|
-
var _table$1, _repository, _data, _cleanFilterConstraint, cleanFilterConstraint_fn;
|
3797
|
+
var __accessCheck$4 = (obj, member, msg) => member.has(obj) || __typeError$4("Cannot " + msg);
|
3798
|
+
var __privateGet$3 = (obj, member, getter) => (__accessCheck$4(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3799
|
+
var __privateAdd$4 = (obj, member, value) => member.has(obj) ? __typeError$4("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3800
|
+
var __privateSet$2 = (obj, member, value, setter) => (__accessCheck$4(obj, member, "write to private field"), member.set(obj, value), value);
|
3801
|
+
var __privateMethod$3 = (obj, member, method) => (__accessCheck$4(obj, member, "access private method"), method);
|
3802
|
+
var _table$1, _repository, _data, _Query_instances, cleanFilterConstraint_fn;
|
1802
3803
|
const _Query = class _Query {
|
1803
3804
|
constructor(repository, table, data, rawParent) {
|
1804
|
-
__privateAdd$4(this,
|
1805
|
-
__privateAdd$4(this, _table$1
|
1806
|
-
__privateAdd$4(this, _repository
|
3805
|
+
__privateAdd$4(this, _Query_instances);
|
3806
|
+
__privateAdd$4(this, _table$1);
|
3807
|
+
__privateAdd$4(this, _repository);
|
1807
3808
|
__privateAdd$4(this, _data, { filter: {} });
|
1808
3809
|
// Implements pagination
|
1809
3810
|
this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
|
@@ -1881,12 +3882,12 @@ const _Query = class _Query {
|
|
1881
3882
|
filter(a, b) {
|
1882
3883
|
if (arguments.length === 1) {
|
1883
3884
|
const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
|
1884
|
-
[column]: __privateMethod$3(this,
|
3885
|
+
[column]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, column, constraint)
|
1885
3886
|
}));
|
1886
3887
|
const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
|
1887
3888
|
return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
|
1888
3889
|
} else {
|
1889
|
-
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this,
|
3890
|
+
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
|
1890
3891
|
const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
|
1891
3892
|
return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
|
1892
3893
|
}
|
@@ -1965,8 +3966,7 @@ const _Query = class _Query {
|
|
1965
3966
|
}
|
1966
3967
|
async getFirstOrThrow(options = {}) {
|
1967
3968
|
const records = await this.getMany({ ...options, pagination: { size: 1 } });
|
1968
|
-
if (records[0] === void 0)
|
1969
|
-
throw new Error("No results found.");
|
3969
|
+
if (records[0] === void 0) throw new Error("No results found.");
|
1970
3970
|
return records[0];
|
1971
3971
|
}
|
1972
3972
|
async summarize(params = {}) {
|
@@ -2021,7 +4021,7 @@ const _Query = class _Query {
|
|
2021
4021
|
_table$1 = new WeakMap();
|
2022
4022
|
_repository = new WeakMap();
|
2023
4023
|
_data = new WeakMap();
|
2024
|
-
|
4024
|
+
_Query_instances = new WeakSet();
|
2025
4025
|
cleanFilterConstraint_fn = function(column, value) {
|
2026
4026
|
const columnType = __privateGet$3(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
|
2027
4027
|
if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
|
@@ -2082,8 +4082,7 @@ function isSortFilterString(value) {
|
|
2082
4082
|
}
|
2083
4083
|
function isSortFilterBase(filter) {
|
2084
4084
|
return isObject(filter) && Object.entries(filter).every(([key, value]) => {
|
2085
|
-
if (key === "*")
|
2086
|
-
return value === "random";
|
4085
|
+
if (key === "*") return value === "random";
|
2087
4086
|
return value === "asc" || value === "desc";
|
2088
4087
|
});
|
2089
4088
|
}
|
@@ -2104,29 +4103,15 @@ function buildSortFilter(filter) {
|
|
2104
4103
|
}
|
2105
4104
|
}
|
2106
4105
|
|
2107
|
-
var
|
2108
|
-
|
2109
|
-
throw TypeError("Cannot " + msg);
|
2110
|
-
};
|
2111
|
-
var __privateGet$2 = (obj, member, getter) => {
|
2112
|
-
__accessCheck$3(obj, member, "read from private field");
|
2113
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2114
|
-
};
|
2115
|
-
var __privateAdd$3 = (obj, member, value) => {
|
2116
|
-
if (member.has(obj))
|
2117
|
-
throw TypeError("Cannot add the same private member more than once");
|
2118
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2119
|
-
};
|
2120
|
-
var __privateSet$1 = (obj, member, value, setter) => {
|
2121
|
-
__accessCheck$3(obj, member, "write to private field");
|
2122
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
2123
|
-
return value;
|
2124
|
-
};
|
2125
|
-
var __privateMethod$2 = (obj, member, method) => {
|
2126
|
-
__accessCheck$3(obj, member, "access private method");
|
2127
|
-
return method;
|
4106
|
+
var __typeError$3 = (msg) => {
|
4107
|
+
throw TypeError(msg);
|
2128
4108
|
};
|
2129
|
-
var
|
4109
|
+
var __accessCheck$3 = (obj, member, msg) => member.has(obj) || __typeError$3("Cannot " + msg);
|
4110
|
+
var __privateGet$2 = (obj, member, getter) => (__accessCheck$3(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4111
|
+
var __privateAdd$3 = (obj, member, value) => member.has(obj) ? __typeError$3("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4112
|
+
var __privateSet$1 = (obj, member, value, setter) => (__accessCheck$3(obj, member, "write to private field"), member.set(obj, value), value);
|
4113
|
+
var __privateMethod$2 = (obj, member, method) => (__accessCheck$3(obj, member, "access private method"), method);
|
4114
|
+
var _table, _getFetchProps, _db, _schemaTables, _trace, _RestRepository_instances, insertRecordWithoutId_fn, insertRecordWithId_fn, insertRecords_fn, updateRecordWithID_fn, updateRecords_fn, upsertRecordWithID_fn, deleteRecord_fn, deleteRecords_fn, getSchemaTables_fn, transformObjectToApi_fn;
|
2130
4115
|
const BULK_OPERATION_MAX_SIZE = 1e3;
|
2131
4116
|
class Repository extends Query {
|
2132
4117
|
}
|
@@ -2137,21 +4122,12 @@ class RestRepository extends Query {
|
|
2137
4122
|
{ name: options.table, schema: options.schemaTables?.find((table) => table.name === options.table) },
|
2138
4123
|
{}
|
2139
4124
|
);
|
2140
|
-
__privateAdd$3(this,
|
2141
|
-
__privateAdd$3(this,
|
2142
|
-
__privateAdd$3(this,
|
2143
|
-
__privateAdd$3(this,
|
2144
|
-
__privateAdd$3(this,
|
2145
|
-
__privateAdd$3(this,
|
2146
|
-
__privateAdd$3(this, _deleteRecord);
|
2147
|
-
__privateAdd$3(this, _deleteRecords);
|
2148
|
-
__privateAdd$3(this, _getSchemaTables);
|
2149
|
-
__privateAdd$3(this, _transformObjectToApi);
|
2150
|
-
__privateAdd$3(this, _table, void 0);
|
2151
|
-
__privateAdd$3(this, _getFetchProps, void 0);
|
2152
|
-
__privateAdd$3(this, _db, void 0);
|
2153
|
-
__privateAdd$3(this, _schemaTables, void 0);
|
2154
|
-
__privateAdd$3(this, _trace, void 0);
|
4125
|
+
__privateAdd$3(this, _RestRepository_instances);
|
4126
|
+
__privateAdd$3(this, _table);
|
4127
|
+
__privateAdd$3(this, _getFetchProps);
|
4128
|
+
__privateAdd$3(this, _db);
|
4129
|
+
__privateAdd$3(this, _schemaTables);
|
4130
|
+
__privateAdd$3(this, _trace);
|
2155
4131
|
__privateSet$1(this, _table, options.table);
|
2156
4132
|
__privateSet$1(this, _db, options.db);
|
2157
4133
|
__privateSet$1(this, _schemaTables, options.schemaTables);
|
@@ -2170,31 +4146,28 @@ class RestRepository extends Query {
|
|
2170
4146
|
return __privateGet$2(this, _trace).call(this, "create", async () => {
|
2171
4147
|
const ifVersion = parseIfVersion(b, c, d);
|
2172
4148
|
if (Array.isArray(a)) {
|
2173
|
-
if (a.length === 0)
|
2174
|
-
|
2175
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
4149
|
+
if (a.length === 0) return [];
|
4150
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
2176
4151
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2177
4152
|
const result = await this.read(ids, columns);
|
2178
4153
|
return result;
|
2179
4154
|
}
|
2180
4155
|
if (isString(a) && isObject(b)) {
|
2181
|
-
if (a === "")
|
2182
|
-
throw new Error("The id can't be empty");
|
4156
|
+
if (a === "") throw new Error("The id can't be empty");
|
2183
4157
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2184
|
-
return await __privateMethod$2(this,
|
4158
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
|
2185
4159
|
}
|
2186
4160
|
if (isObject(a) && isString(a.xata_id)) {
|
2187
|
-
if (a.xata_id === "")
|
2188
|
-
throw new Error("The id can't be empty");
|
4161
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2189
4162
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2190
|
-
return await __privateMethod$2(this,
|
4163
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
2191
4164
|
createOnly: true,
|
2192
4165
|
ifVersion
|
2193
4166
|
});
|
2194
4167
|
}
|
2195
4168
|
if (isObject(a)) {
|
2196
4169
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2197
|
-
return __privateMethod$2(this,
|
4170
|
+
return __privateMethod$2(this, _RestRepository_instances, insertRecordWithoutId_fn).call(this, a, columns);
|
2198
4171
|
}
|
2199
4172
|
throw new Error("Invalid arguments for create method");
|
2200
4173
|
});
|
@@ -2203,8 +4176,7 @@ class RestRepository extends Query {
|
|
2203
4176
|
return __privateGet$2(this, _trace).call(this, "read", async () => {
|
2204
4177
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2205
4178
|
if (Array.isArray(a)) {
|
2206
|
-
if (a.length === 0)
|
2207
|
-
return [];
|
4179
|
+
if (a.length === 0) return [];
|
2208
4180
|
const ids = a.map((item) => extractId(item));
|
2209
4181
|
const finalObjects = await this.getAll({ filter: { xata_id: { $any: compact(ids) } }, columns });
|
2210
4182
|
const dictionary = finalObjects.reduce((acc, object) => {
|
@@ -2227,7 +4199,7 @@ class RestRepository extends Query {
|
|
2227
4199
|
queryParams: { columns },
|
2228
4200
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2229
4201
|
});
|
2230
|
-
const schemaTables = await __privateMethod$2(this,
|
4202
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2231
4203
|
return initObject(
|
2232
4204
|
__privateGet$2(this, _db),
|
2233
4205
|
schemaTables,
|
@@ -2268,11 +4240,10 @@ class RestRepository extends Query {
|
|
2268
4240
|
return __privateGet$2(this, _trace).call(this, "update", async () => {
|
2269
4241
|
const ifVersion = parseIfVersion(b, c, d);
|
2270
4242
|
if (Array.isArray(a)) {
|
2271
|
-
if (a.length === 0)
|
2272
|
-
return [];
|
4243
|
+
if (a.length === 0) return [];
|
2273
4244
|
const existing = await this.read(a, ["xata_id"]);
|
2274
4245
|
const updates = a.filter((_item, index) => existing[index] !== null);
|
2275
|
-
await __privateMethod$2(this,
|
4246
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, updates, {
|
2276
4247
|
ifVersion,
|
2277
4248
|
upsert: false
|
2278
4249
|
});
|
@@ -2283,15 +4254,14 @@ class RestRepository extends Query {
|
|
2283
4254
|
try {
|
2284
4255
|
if (isString(a) && isObject(b)) {
|
2285
4256
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2286
|
-
return await __privateMethod$2(this,
|
4257
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2287
4258
|
}
|
2288
4259
|
if (isObject(a) && isString(a.xata_id)) {
|
2289
4260
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2290
|
-
return await __privateMethod$2(this,
|
4261
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2291
4262
|
}
|
2292
4263
|
} catch (error) {
|
2293
|
-
if (error.status === 422)
|
2294
|
-
return null;
|
4264
|
+
if (error.status === 422) return null;
|
2295
4265
|
throw error;
|
2296
4266
|
}
|
2297
4267
|
throw new Error("Invalid arguments for update method");
|
@@ -2320,9 +4290,8 @@ class RestRepository extends Query {
|
|
2320
4290
|
return __privateGet$2(this, _trace).call(this, "createOrUpdate", async () => {
|
2321
4291
|
const ifVersion = parseIfVersion(b, c, d);
|
2322
4292
|
if (Array.isArray(a)) {
|
2323
|
-
if (a.length === 0)
|
2324
|
-
|
2325
|
-
await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, a, {
|
4293
|
+
if (a.length === 0) return [];
|
4294
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, a, {
|
2326
4295
|
ifVersion,
|
2327
4296
|
upsert: true
|
2328
4297
|
});
|
@@ -2331,16 +4300,14 @@ class RestRepository extends Query {
|
|
2331
4300
|
return result;
|
2332
4301
|
}
|
2333
4302
|
if (isString(a) && isObject(b)) {
|
2334
|
-
if (a === "")
|
2335
|
-
throw new Error("The id can't be empty");
|
4303
|
+
if (a === "") throw new Error("The id can't be empty");
|
2336
4304
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2337
|
-
return await __privateMethod$2(this,
|
4305
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2338
4306
|
}
|
2339
4307
|
if (isObject(a) && isString(a.xata_id)) {
|
2340
|
-
if (a.xata_id === "")
|
2341
|
-
throw new Error("The id can't be empty");
|
4308
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2342
4309
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2343
|
-
return await __privateMethod$2(this,
|
4310
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2344
4311
|
}
|
2345
4312
|
if (!isDefined(a) && isObject(b)) {
|
2346
4313
|
return await this.create(b, c);
|
@@ -2355,24 +4322,21 @@ class RestRepository extends Query {
|
|
2355
4322
|
return __privateGet$2(this, _trace).call(this, "createOrReplace", async () => {
|
2356
4323
|
const ifVersion = parseIfVersion(b, c, d);
|
2357
4324
|
if (Array.isArray(a)) {
|
2358
|
-
if (a.length === 0)
|
2359
|
-
|
2360
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
4325
|
+
if (a.length === 0) return [];
|
4326
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
2361
4327
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2362
4328
|
const result = await this.read(ids, columns);
|
2363
4329
|
return result;
|
2364
4330
|
}
|
2365
4331
|
if (isString(a) && isObject(b)) {
|
2366
|
-
if (a === "")
|
2367
|
-
throw new Error("The id can't be empty");
|
4332
|
+
if (a === "") throw new Error("The id can't be empty");
|
2368
4333
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2369
|
-
return await __privateMethod$2(this,
|
4334
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
|
2370
4335
|
}
|
2371
4336
|
if (isObject(a) && isString(a.xata_id)) {
|
2372
|
-
if (a.xata_id === "")
|
2373
|
-
throw new Error("The id can't be empty");
|
4337
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2374
4338
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2375
|
-
return await __privateMethod$2(this,
|
4339
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
2376
4340
|
createOnly: false,
|
2377
4341
|
ifVersion
|
2378
4342
|
});
|
@@ -2389,25 +4353,22 @@ class RestRepository extends Query {
|
|
2389
4353
|
async delete(a, b) {
|
2390
4354
|
return __privateGet$2(this, _trace).call(this, "delete", async () => {
|
2391
4355
|
if (Array.isArray(a)) {
|
2392
|
-
if (a.length === 0)
|
2393
|
-
return [];
|
4356
|
+
if (a.length === 0) return [];
|
2394
4357
|
const ids = a.map((o) => {
|
2395
|
-
if (isString(o))
|
2396
|
-
|
2397
|
-
if (isString(o.xata_id))
|
2398
|
-
return o.xata_id;
|
4358
|
+
if (isString(o)) return o;
|
4359
|
+
if (isString(o.xata_id)) return o.xata_id;
|
2399
4360
|
throw new Error("Invalid arguments for delete method");
|
2400
4361
|
});
|
2401
4362
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2402
4363
|
const result = await this.read(a, columns);
|
2403
|
-
await __privateMethod$2(this,
|
4364
|
+
await __privateMethod$2(this, _RestRepository_instances, deleteRecords_fn).call(this, ids);
|
2404
4365
|
return result;
|
2405
4366
|
}
|
2406
4367
|
if (isString(a)) {
|
2407
|
-
return __privateMethod$2(this,
|
4368
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a, b);
|
2408
4369
|
}
|
2409
4370
|
if (isObject(a) && isString(a.xata_id)) {
|
2410
|
-
return __privateMethod$2(this,
|
4371
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a.xata_id, b);
|
2411
4372
|
}
|
2412
4373
|
throw new Error("Invalid arguments for delete method");
|
2413
4374
|
});
|
@@ -2451,7 +4412,7 @@ class RestRepository extends Query {
|
|
2451
4412
|
},
|
2452
4413
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2453
4414
|
});
|
2454
|
-
const schemaTables = await __privateMethod$2(this,
|
4415
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2455
4416
|
return {
|
2456
4417
|
records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
|
2457
4418
|
totalCount
|
@@ -2476,7 +4437,7 @@ class RestRepository extends Query {
|
|
2476
4437
|
},
|
2477
4438
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2478
4439
|
});
|
2479
|
-
const schemaTables = await __privateMethod$2(this,
|
4440
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2480
4441
|
return {
|
2481
4442
|
records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
|
2482
4443
|
totalCount
|
@@ -2518,7 +4479,7 @@ class RestRepository extends Query {
|
|
2518
4479
|
fetchOptions: data.fetchOptions,
|
2519
4480
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2520
4481
|
});
|
2521
|
-
const schemaTables = await __privateMethod$2(this,
|
4482
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2522
4483
|
const records = objects.map(
|
2523
4484
|
(record) => initObject(
|
2524
4485
|
__privateGet$2(this, _db),
|
@@ -2552,7 +4513,7 @@ class RestRepository extends Query {
|
|
2552
4513
|
},
|
2553
4514
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2554
4515
|
});
|
2555
|
-
const schemaTables = await __privateMethod$2(this,
|
4516
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2556
4517
|
return {
|
2557
4518
|
...result,
|
2558
4519
|
summaries: result.summaries.map(
|
@@ -2600,9 +4561,9 @@ _getFetchProps = new WeakMap();
|
|
2600
4561
|
_db = new WeakMap();
|
2601
4562
|
_schemaTables = new WeakMap();
|
2602
4563
|
_trace = new WeakMap();
|
2603
|
-
|
4564
|
+
_RestRepository_instances = new WeakSet();
|
2604
4565
|
insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
2605
|
-
const record = await __privateMethod$2(this,
|
4566
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2606
4567
|
const response = await insertRecord({
|
2607
4568
|
pathParams: {
|
2608
4569
|
workspace: "{workspaceId}",
|
@@ -2614,14 +4575,12 @@ insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
|
2614
4575
|
body: record,
|
2615
4576
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2616
4577
|
});
|
2617
|
-
const schemaTables = await __privateMethod$2(this,
|
4578
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2618
4579
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2619
4580
|
};
|
2620
|
-
_insertRecordWithId = new WeakSet();
|
2621
4581
|
insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
|
2622
|
-
if (!recordId)
|
2623
|
-
|
2624
|
-
const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4582
|
+
if (!recordId) return null;
|
4583
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2625
4584
|
const response = await insertRecordWithID({
|
2626
4585
|
pathParams: {
|
2627
4586
|
workspace: "{workspaceId}",
|
@@ -2634,13 +4593,12 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
|
|
2634
4593
|
queryParams: { createOnly, columns, ifVersion },
|
2635
4594
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2636
4595
|
});
|
2637
|
-
const schemaTables = await __privateMethod$2(this,
|
4596
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2638
4597
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2639
4598
|
};
|
2640
|
-
_insertRecords = new WeakSet();
|
2641
4599
|
insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
2642
4600
|
const operations = await promiseMap(objects, async (object) => {
|
2643
|
-
const record = await __privateMethod$2(this,
|
4601
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2644
4602
|
return { insert: { table: __privateGet$2(this, _table), record, createOnly, ifVersion } };
|
2645
4603
|
});
|
2646
4604
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -2665,11 +4623,9 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
|
2665
4623
|
}
|
2666
4624
|
return ids;
|
2667
4625
|
};
|
2668
|
-
_updateRecordWithID = new WeakSet();
|
2669
4626
|
updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2670
|
-
if (!recordId)
|
2671
|
-
|
2672
|
-
const { xata_id: _id, ...record } = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4627
|
+
if (!recordId) return null;
|
4628
|
+
const { xata_id: _id, ...record } = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2673
4629
|
try {
|
2674
4630
|
const response = await updateRecordWithID({
|
2675
4631
|
pathParams: {
|
@@ -2683,7 +4639,7 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2683
4639
|
body: record,
|
2684
4640
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2685
4641
|
});
|
2686
|
-
const schemaTables = await __privateMethod$2(this,
|
4642
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2687
4643
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2688
4644
|
} catch (e) {
|
2689
4645
|
if (isObject(e) && e.status === 404) {
|
@@ -2692,10 +4648,9 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2692
4648
|
throw e;
|
2693
4649
|
}
|
2694
4650
|
};
|
2695
|
-
_updateRecords = new WeakSet();
|
2696
4651
|
updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
2697
4652
|
const operations = await promiseMap(objects, async ({ xata_id, ...object }) => {
|
2698
|
-
const fields = await __privateMethod$2(this,
|
4653
|
+
const fields = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2699
4654
|
return { update: { table: __privateGet$2(this, _table), id: xata_id, ifVersion, upsert, fields } };
|
2700
4655
|
});
|
2701
4656
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -2720,10 +4675,8 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
|
2720
4675
|
}
|
2721
4676
|
return ids;
|
2722
4677
|
};
|
2723
|
-
_upsertRecordWithID = new WeakSet();
|
2724
4678
|
upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2725
|
-
if (!recordId)
|
2726
|
-
return null;
|
4679
|
+
if (!recordId) return null;
|
2727
4680
|
const response = await upsertRecordWithID({
|
2728
4681
|
pathParams: {
|
2729
4682
|
workspace: "{workspaceId}",
|
@@ -2736,13 +4689,11 @@ upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2736
4689
|
body: object,
|
2737
4690
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2738
4691
|
});
|
2739
|
-
const schemaTables = await __privateMethod$2(this,
|
4692
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2740
4693
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2741
4694
|
};
|
2742
|
-
_deleteRecord = new WeakSet();
|
2743
4695
|
deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
2744
|
-
if (!recordId)
|
2745
|
-
return null;
|
4696
|
+
if (!recordId) return null;
|
2746
4697
|
try {
|
2747
4698
|
const response = await deleteRecord({
|
2748
4699
|
pathParams: {
|
@@ -2755,7 +4706,7 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2755
4706
|
queryParams: { columns },
|
2756
4707
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2757
4708
|
});
|
2758
|
-
const schemaTables = await __privateMethod$2(this,
|
4709
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2759
4710
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2760
4711
|
} catch (e) {
|
2761
4712
|
if (isObject(e) && e.status === 404) {
|
@@ -2764,7 +4715,6 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2764
4715
|
throw e;
|
2765
4716
|
}
|
2766
4717
|
};
|
2767
|
-
_deleteRecords = new WeakSet();
|
2768
4718
|
deleteRecords_fn = async function(recordIds) {
|
2769
4719
|
const chunkedOperations = chunk(
|
2770
4720
|
compact(recordIds).map((id) => ({ delete: { table: __privateGet$2(this, _table), id } })),
|
@@ -2782,10 +4732,8 @@ deleteRecords_fn = async function(recordIds) {
|
|
2782
4732
|
});
|
2783
4733
|
}
|
2784
4734
|
};
|
2785
|
-
_getSchemaTables = new WeakSet();
|
2786
4735
|
getSchemaTables_fn = async function() {
|
2787
|
-
if (__privateGet$2(this, _schemaTables))
|
2788
|
-
return __privateGet$2(this, _schemaTables);
|
4736
|
+
if (__privateGet$2(this, _schemaTables)) return __privateGet$2(this, _schemaTables);
|
2789
4737
|
const { schema } = await getBranchDetails({
|
2790
4738
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
2791
4739
|
...__privateGet$2(this, _getFetchProps).call(this)
|
@@ -2793,16 +4741,13 @@ getSchemaTables_fn = async function() {
|
|
2793
4741
|
__privateSet$1(this, _schemaTables, schema.tables);
|
2794
4742
|
return schema.tables;
|
2795
4743
|
};
|
2796
|
-
_transformObjectToApi = new WeakSet();
|
2797
4744
|
transformObjectToApi_fn = async function(object) {
|
2798
|
-
const schemaTables = await __privateMethod$2(this,
|
4745
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2799
4746
|
const schema = schemaTables.find((table) => table.name === __privateGet$2(this, _table));
|
2800
|
-
if (!schema)
|
2801
|
-
throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
|
4747
|
+
if (!schema) throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
|
2802
4748
|
const result = {};
|
2803
4749
|
for (const [key, value] of Object.entries(object)) {
|
2804
|
-
if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key))
|
2805
|
-
continue;
|
4750
|
+
if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key)) continue;
|
2806
4751
|
const type = schema.columns.find((column) => column.name === key)?.type;
|
2807
4752
|
switch (type) {
|
2808
4753
|
case "link": {
|
@@ -2832,11 +4777,9 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2832
4777
|
const data = {};
|
2833
4778
|
Object.assign(data, { ...object });
|
2834
4779
|
const { columns } = schemaTables.find(({ name }) => name === table) ?? {};
|
2835
|
-
if (!columns)
|
2836
|
-
console.error(`Table ${table} not found in schema`);
|
4780
|
+
if (!columns) console.error(`Table ${table} not found in schema`);
|
2837
4781
|
for (const column of columns ?? []) {
|
2838
|
-
if (!isValidColumn(selectedColumns, column))
|
2839
|
-
continue;
|
4782
|
+
if (!isValidColumn(selectedColumns, column)) continue;
|
2840
4783
|
const value = data[column.name];
|
2841
4784
|
switch (column.type) {
|
2842
4785
|
case "datetime": {
|
@@ -2922,15 +4865,12 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2922
4865
|
return record;
|
2923
4866
|
};
|
2924
4867
|
function extractId(value) {
|
2925
|
-
if (isString(value))
|
2926
|
-
|
2927
|
-
if (isObject(value) && isString(value.xata_id))
|
2928
|
-
return value.xata_id;
|
4868
|
+
if (isString(value)) return value;
|
4869
|
+
if (isObject(value) && isString(value.xata_id)) return value.xata_id;
|
2929
4870
|
return void 0;
|
2930
4871
|
}
|
2931
4872
|
function isValidColumn(columns, column) {
|
2932
|
-
if (columns.includes("*"))
|
2933
|
-
return true;
|
4873
|
+
if (columns.includes("*")) return true;
|
2934
4874
|
return columns.filter((item) => isString(item) && item.startsWith(column.name)).length > 0;
|
2935
4875
|
}
|
2936
4876
|
function parseIfVersion(...args) {
|
@@ -2970,19 +4910,12 @@ const includesAll = (value) => ({ $includesAll: value });
|
|
2970
4910
|
const includesNone = (value) => ({ $includesNone: value });
|
2971
4911
|
const includesAny = (value) => ({ $includesAny: value });
|
2972
4912
|
|
2973
|
-
var
|
2974
|
-
|
2975
|
-
throw TypeError("Cannot " + msg);
|
2976
|
-
};
|
2977
|
-
var __privateGet$1 = (obj, member, getter) => {
|
2978
|
-
__accessCheck$2(obj, member, "read from private field");
|
2979
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2980
|
-
};
|
2981
|
-
var __privateAdd$2 = (obj, member, value) => {
|
2982
|
-
if (member.has(obj))
|
2983
|
-
throw TypeError("Cannot add the same private member more than once");
|
2984
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4913
|
+
var __typeError$2 = (msg) => {
|
4914
|
+
throw TypeError(msg);
|
2985
4915
|
};
|
4916
|
+
var __accessCheck$2 = (obj, member, msg) => member.has(obj) || __typeError$2("Cannot " + msg);
|
4917
|
+
var __privateGet$1 = (obj, member, getter) => (__accessCheck$2(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4918
|
+
var __privateAdd$2 = (obj, member, value) => member.has(obj) ? __typeError$2("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2986
4919
|
var _tables;
|
2987
4920
|
class SchemaPlugin extends XataPlugin {
|
2988
4921
|
constructor() {
|
@@ -2994,8 +4927,7 @@ class SchemaPlugin extends XataPlugin {
|
|
2994
4927
|
{},
|
2995
4928
|
{
|
2996
4929
|
get: (_target, table) => {
|
2997
|
-
if (!isString(table))
|
2998
|
-
throw new Error("Invalid table name");
|
4930
|
+
if (!isString(table)) throw new Error("Invalid table name");
|
2999
4931
|
if (__privateGet$1(this, _tables)[table] === void 0) {
|
3000
4932
|
__privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
3001
4933
|
}
|
@@ -3086,30 +5018,23 @@ function getContentType(file) {
|
|
3086
5018
|
return "application/octet-stream";
|
3087
5019
|
}
|
3088
5020
|
|
3089
|
-
var
|
3090
|
-
|
3091
|
-
throw TypeError("Cannot " + msg);
|
3092
|
-
};
|
3093
|
-
var __privateAdd$1 = (obj, member, value) => {
|
3094
|
-
if (member.has(obj))
|
3095
|
-
throw TypeError("Cannot add the same private member more than once");
|
3096
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3097
|
-
};
|
3098
|
-
var __privateMethod$1 = (obj, member, method) => {
|
3099
|
-
__accessCheck$1(obj, member, "access private method");
|
3100
|
-
return method;
|
5021
|
+
var __typeError$1 = (msg) => {
|
5022
|
+
throw TypeError(msg);
|
3101
5023
|
};
|
3102
|
-
var
|
5024
|
+
var __accessCheck$1 = (obj, member, msg) => member.has(obj) || __typeError$1("Cannot " + msg);
|
5025
|
+
var __privateAdd$1 = (obj, member, value) => member.has(obj) ? __typeError$1("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5026
|
+
var __privateMethod$1 = (obj, member, method) => (__accessCheck$1(obj, member, "access private method"), method);
|
5027
|
+
var _SearchPlugin_instances, search_fn;
|
3103
5028
|
class SearchPlugin extends XataPlugin {
|
3104
5029
|
constructor(db) {
|
3105
5030
|
super();
|
3106
5031
|
this.db = db;
|
3107
|
-
__privateAdd$1(this,
|
5032
|
+
__privateAdd$1(this, _SearchPlugin_instances);
|
3108
5033
|
}
|
3109
5034
|
build(pluginOptions) {
|
3110
5035
|
return {
|
3111
5036
|
all: async (query, options = {}) => {
|
3112
|
-
const { records, totalCount } = await __privateMethod$1(this,
|
5037
|
+
const { records, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3113
5038
|
return {
|
3114
5039
|
totalCount,
|
3115
5040
|
records: records.map((record) => {
|
@@ -3119,7 +5044,7 @@ class SearchPlugin extends XataPlugin {
|
|
3119
5044
|
};
|
3120
5045
|
},
|
3121
5046
|
byTable: async (query, options = {}) => {
|
3122
|
-
const { records: rawRecords, totalCount } = await __privateMethod$1(this,
|
5047
|
+
const { records: rawRecords, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3123
5048
|
const records = rawRecords.reduce((acc, record) => {
|
3124
5049
|
const table = record.xata_table;
|
3125
5050
|
const items = acc[table] ?? [];
|
@@ -3131,7 +5056,7 @@ class SearchPlugin extends XataPlugin {
|
|
3131
5056
|
};
|
3132
5057
|
}
|
3133
5058
|
}
|
3134
|
-
|
5059
|
+
_SearchPlugin_instances = new WeakSet();
|
3135
5060
|
search_fn = async function(query, options, pluginOptions) {
|
3136
5061
|
const { tables, fuzziness, highlight, prefix, page } = options ?? {};
|
3137
5062
|
const { records, totalCount } = await searchBranch({
|
@@ -3167,8 +5092,7 @@ function arrayString(val) {
|
|
3167
5092
|
return result;
|
3168
5093
|
}
|
3169
5094
|
function prepareValue(value) {
|
3170
|
-
if (!isDefined(value))
|
3171
|
-
return null;
|
5095
|
+
if (!isDefined(value)) return null;
|
3172
5096
|
if (value instanceof Date) {
|
3173
5097
|
return value.toISOString();
|
3174
5098
|
}
|
@@ -3195,31 +5119,42 @@ function prepareParams(param1, param2) {
|
|
3195
5119
|
return { statement, params: param2?.map((value) => prepareValue(value)) };
|
3196
5120
|
}
|
3197
5121
|
if (isObject(param1)) {
|
3198
|
-
const { statement, params, consistency } = param1;
|
3199
|
-
return { statement, params: params?.map((value) => prepareValue(value)), consistency };
|
5122
|
+
const { statement, params, consistency, responseType } = param1;
|
5123
|
+
return { statement, params: params?.map((value) => prepareValue(value)), consistency, responseType };
|
3200
5124
|
}
|
3201
5125
|
throw new Error("Invalid query");
|
3202
5126
|
}
|
3203
5127
|
|
3204
5128
|
class SQLPlugin extends XataPlugin {
|
3205
5129
|
build(pluginOptions) {
|
3206
|
-
|
5130
|
+
const sqlFunction = async (query, ...parameters) => {
|
3207
5131
|
if (!isParamsObject(query) && (!isTemplateStringsArray(query) || !Array.isArray(parameters))) {
|
3208
5132
|
throw new Error("Invalid usage of `xata.sql`. Please use it as a tagged template or with an object.");
|
3209
5133
|
}
|
3210
|
-
const { statement, params, consistency } = prepareParams(query, parameters);
|
3211
|
-
const {
|
3212
|
-
records,
|
3213
|
-
rows,
|
3214
|
-
warning,
|
3215
|
-
columns = []
|
3216
|
-
} = await sqlQuery({
|
5134
|
+
const { statement, params, consistency, responseType } = prepareParams(query, parameters);
|
5135
|
+
const { warning, columns, ...response } = await sqlQuery({
|
3217
5136
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
3218
|
-
body: { statement, params, consistency },
|
5137
|
+
body: { statement, params, consistency, responseType },
|
3219
5138
|
...pluginOptions
|
3220
5139
|
});
|
5140
|
+
const records = "records" in response ? response.records : void 0;
|
5141
|
+
const rows = "rows" in response ? response.rows : void 0;
|
3221
5142
|
return { records, rows, warning, columns };
|
3222
5143
|
};
|
5144
|
+
sqlFunction.connectionString = buildConnectionString(pluginOptions);
|
5145
|
+
sqlFunction.batch = async (query) => {
|
5146
|
+
const { results } = await sqlBatchQuery({
|
5147
|
+
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
5148
|
+
body: {
|
5149
|
+
statements: query.statements.map(({ statement, params }) => ({ statement, params })),
|
5150
|
+
consistency: query.consistency,
|
5151
|
+
responseType: query.responseType
|
5152
|
+
},
|
5153
|
+
...pluginOptions
|
5154
|
+
});
|
5155
|
+
return { results };
|
5156
|
+
};
|
5157
|
+
return sqlFunction;
|
3223
5158
|
}
|
3224
5159
|
}
|
3225
5160
|
function isTemplateStringsArray(strings) {
|
@@ -3228,6 +5163,32 @@ function isTemplateStringsArray(strings) {
|
|
3228
5163
|
function isParamsObject(params) {
|
3229
5164
|
return isObject(params) && "statement" in params;
|
3230
5165
|
}
|
5166
|
+
function buildDomain(host, region) {
|
5167
|
+
switch (host) {
|
5168
|
+
case "production":
|
5169
|
+
return `${region}.sql.xata.sh`;
|
5170
|
+
case "staging":
|
5171
|
+
return `${region}.sql.staging-xata.dev`;
|
5172
|
+
case "dev":
|
5173
|
+
return `${region}.sql.dev-xata.dev`;
|
5174
|
+
case "local":
|
5175
|
+
return "localhost:7654";
|
5176
|
+
default:
|
5177
|
+
throw new Error("Invalid host provider");
|
5178
|
+
}
|
5179
|
+
}
|
5180
|
+
function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
|
5181
|
+
const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
|
5182
|
+
const parts = parseWorkspacesUrlParts(url);
|
5183
|
+
if (!parts) throw new Error("Invalid workspaces URL");
|
5184
|
+
const { workspace: workspaceSlug, region, database, host } = parts;
|
5185
|
+
const domain = buildDomain(host, region);
|
5186
|
+
const workspace = workspaceSlug.split("-").pop();
|
5187
|
+
if (!workspace || !region || !database || !apiKey || !branch) {
|
5188
|
+
throw new Error("Unable to build xata connection string");
|
5189
|
+
}
|
5190
|
+
return `postgresql://${workspace}:${apiKey}@${domain}/${database}:${branch}?sslmode=require`;
|
5191
|
+
}
|
3231
5192
|
|
3232
5193
|
class TransactionPlugin extends XataPlugin {
|
3233
5194
|
build(pluginOptions) {
|
@@ -3244,41 +5205,27 @@ class TransactionPlugin extends XataPlugin {
|
|
3244
5205
|
}
|
3245
5206
|
}
|
3246
5207
|
|
3247
|
-
var
|
3248
|
-
|
3249
|
-
throw TypeError("Cannot " + msg);
|
3250
|
-
};
|
3251
|
-
var __privateGet = (obj, member, getter) => {
|
3252
|
-
__accessCheck(obj, member, "read from private field");
|
3253
|
-
return getter ? getter.call(obj) : member.get(obj);
|
3254
|
-
};
|
3255
|
-
var __privateAdd = (obj, member, value) => {
|
3256
|
-
if (member.has(obj))
|
3257
|
-
throw TypeError("Cannot add the same private member more than once");
|
3258
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3259
|
-
};
|
3260
|
-
var __privateSet = (obj, member, value, setter) => {
|
3261
|
-
__accessCheck(obj, member, "write to private field");
|
3262
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
3263
|
-
return value;
|
3264
|
-
};
|
3265
|
-
var __privateMethod = (obj, member, method) => {
|
3266
|
-
__accessCheck(obj, member, "access private method");
|
3267
|
-
return method;
|
5208
|
+
var __typeError = (msg) => {
|
5209
|
+
throw TypeError(msg);
|
3268
5210
|
};
|
5211
|
+
var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
|
5212
|
+
var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
5213
|
+
var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5214
|
+
var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), member.set(obj, value), value);
|
5215
|
+
var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
|
3269
5216
|
const buildClient = (plugins) => {
|
3270
|
-
var _options,
|
5217
|
+
var _options, _instances, parseOptions_fn, getFetchProps_fn, _a;
|
3271
5218
|
return _a = class {
|
3272
5219
|
constructor(options = {}, tables) {
|
3273
|
-
__privateAdd(this,
|
3274
|
-
__privateAdd(this,
|
3275
|
-
|
3276
|
-
const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
|
5220
|
+
__privateAdd(this, _instances);
|
5221
|
+
__privateAdd(this, _options);
|
5222
|
+
const safeOptions = __privateMethod(this, _instances, parseOptions_fn).call(this, options);
|
3277
5223
|
__privateSet(this, _options, safeOptions);
|
3278
5224
|
const pluginOptions = {
|
3279
|
-
...__privateMethod(this,
|
5225
|
+
...__privateMethod(this, _instances, getFetchProps_fn).call(this, safeOptions),
|
3280
5226
|
host: safeOptions.host,
|
3281
|
-
tables
|
5227
|
+
tables,
|
5228
|
+
branch: safeOptions.branch
|
3282
5229
|
};
|
3283
5230
|
const db = new SchemaPlugin().build(pluginOptions);
|
3284
5231
|
const search = new SearchPlugin(db).build(pluginOptions);
|
@@ -3292,8 +5239,7 @@ const buildClient = (plugins) => {
|
|
3292
5239
|
this.sql = sql;
|
3293
5240
|
this.files = files;
|
3294
5241
|
for (const [key, namespace] of Object.entries(plugins ?? {})) {
|
3295
|
-
if (namespace === void 0)
|
3296
|
-
continue;
|
5242
|
+
if (namespace === void 0) continue;
|
3297
5243
|
this[key] = namespace.build(pluginOptions);
|
3298
5244
|
}
|
3299
5245
|
}
|
@@ -3302,8 +5248,8 @@ const buildClient = (plugins) => {
|
|
3302
5248
|
const branch = __privateGet(this, _options).branch;
|
3303
5249
|
return { databaseURL, branch };
|
3304
5250
|
}
|
3305
|
-
}, _options = new WeakMap(),
|
3306
|
-
const enableBrowser = options?.enableBrowser ??
|
5251
|
+
}, _options = new WeakMap(), _instances = new WeakSet(), parseOptions_fn = function(options) {
|
5252
|
+
const enableBrowser = options?.enableBrowser ?? false;
|
3307
5253
|
const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
|
3308
5254
|
if (isBrowser && !enableBrowser) {
|
3309
5255
|
throw new Error(
|
@@ -3311,8 +5257,9 @@ const buildClient = (plugins) => {
|
|
3311
5257
|
);
|
3312
5258
|
}
|
3313
5259
|
const fetch = getFetchImplementation(options?.fetch);
|
3314
|
-
const databaseURL = options?.databaseURL
|
3315
|
-
const apiKey = options?.apiKey
|
5260
|
+
const databaseURL = options?.databaseURL;
|
5261
|
+
const apiKey = options?.apiKey;
|
5262
|
+
const branch = options?.branch;
|
3316
5263
|
const trace = options?.trace ?? defaultTrace;
|
3317
5264
|
const clientName = options?.clientName;
|
3318
5265
|
const host = options?.host ?? "production";
|
@@ -3323,25 +5270,8 @@ const buildClient = (plugins) => {
|
|
3323
5270
|
if (!databaseURL) {
|
3324
5271
|
throw new Error("Option databaseURL is required");
|
3325
5272
|
}
|
3326
|
-
|
3327
|
-
|
3328
|
-
const branch = options?.branch || previewBranch || envBranch || "main";
|
3329
|
-
if (!!previewBranch && branch !== previewBranch) {
|
3330
|
-
console.warn(
|
3331
|
-
`Ignoring preview branch ${previewBranch} because branch option was passed to the client constructor with value ${branch}`
|
3332
|
-
);
|
3333
|
-
} else if (!!envBranch && branch !== envBranch) {
|
3334
|
-
console.warn(
|
3335
|
-
`Ignoring branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
|
3336
|
-
);
|
3337
|
-
} else if (!!previewBranch && !!envBranch && previewBranch !== envBranch) {
|
3338
|
-
console.warn(
|
3339
|
-
`Ignoring preview branch ${previewBranch} and branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
|
3340
|
-
);
|
3341
|
-
} else if (!previewBranch && !envBranch && options?.branch === void 0) {
|
3342
|
-
console.warn(
|
3343
|
-
`No branch was passed to the client constructor. Using default branch ${branch}. You can set the branch with the environment variable XATA_BRANCH or by passing the branch option to the client constructor.`
|
3344
|
-
);
|
5273
|
+
if (!branch) {
|
5274
|
+
throw new Error("Option branch is required");
|
3345
5275
|
}
|
3346
5276
|
return {
|
3347
5277
|
fetch,
|
@@ -3355,7 +5285,7 @@ const buildClient = (plugins) => {
|
|
3355
5285
|
clientName,
|
3356
5286
|
xataAgentExtra
|
3357
5287
|
};
|
3358
|
-
},
|
5288
|
+
}, getFetchProps_fn = function({
|
3359
5289
|
fetch,
|
3360
5290
|
apiKey,
|
3361
5291
|
databaseURL,
|
@@ -3396,26 +5326,19 @@ class Serializer {
|
|
3396
5326
|
}
|
3397
5327
|
toJSON(data) {
|
3398
5328
|
function visit(obj) {
|
3399
|
-
if (Array.isArray(obj))
|
3400
|
-
return obj.map(visit);
|
5329
|
+
if (Array.isArray(obj)) return obj.map(visit);
|
3401
5330
|
const type = typeof obj;
|
3402
|
-
if (type === "undefined")
|
3403
|
-
|
3404
|
-
if (
|
3405
|
-
return { [META]: "bigint", [VALUE]: obj.toString() };
|
3406
|
-
if (obj === null || type !== "object")
|
3407
|
-
return obj;
|
5331
|
+
if (type === "undefined") return { [META]: "undefined" };
|
5332
|
+
if (type === "bigint") return { [META]: "bigint", [VALUE]: obj.toString() };
|
5333
|
+
if (obj === null || type !== "object") return obj;
|
3408
5334
|
const constructor = obj.constructor;
|
3409
5335
|
const o = { [META]: constructor.name };
|
3410
5336
|
for (const [key, value] of Object.entries(obj)) {
|
3411
5337
|
o[key] = visit(value);
|
3412
5338
|
}
|
3413
|
-
if (constructor === Date)
|
3414
|
-
|
3415
|
-
if (constructor ===
|
3416
|
-
o[VALUE] = Object.fromEntries(obj);
|
3417
|
-
if (constructor === Set)
|
3418
|
-
o[VALUE] = [...obj];
|
5339
|
+
if (constructor === Date) o[VALUE] = obj.toISOString();
|
5340
|
+
if (constructor === Map) o[VALUE] = Object.fromEntries(obj);
|
5341
|
+
if (constructor === Set) o[VALUE] = [...obj];
|
3419
5342
|
return o;
|
3420
5343
|
}
|
3421
5344
|
return JSON.stringify(visit(data));
|
@@ -3428,16 +5351,11 @@ class Serializer {
|
|
3428
5351
|
if (constructor) {
|
3429
5352
|
return Object.assign(Object.create(constructor.prototype), rest);
|
3430
5353
|
}
|
3431
|
-
if (clazz === "Date")
|
3432
|
-
|
3433
|
-
if (clazz === "
|
3434
|
-
|
3435
|
-
if (clazz === "
|
3436
|
-
return new Map(Object.entries(val));
|
3437
|
-
if (clazz === "bigint")
|
3438
|
-
return BigInt(val);
|
3439
|
-
if (clazz === "undefined")
|
3440
|
-
return void 0;
|
5354
|
+
if (clazz === "Date") return new Date(val);
|
5355
|
+
if (clazz === "Set") return new Set(val);
|
5356
|
+
if (clazz === "Map") return new Map(Object.entries(val));
|
5357
|
+
if (clazz === "bigint") return BigInt(val);
|
5358
|
+
if (clazz === "undefined") return void 0;
|
3441
5359
|
return rest;
|
3442
5360
|
}
|
3443
5361
|
return value;
|
@@ -3452,6 +5370,47 @@ const deserialize = (json) => {
|
|
3452
5370
|
return defaultSerializer.fromJSON(json);
|
3453
5371
|
};
|
3454
5372
|
|
5373
|
+
function parseEnvironment(environment) {
|
5374
|
+
try {
|
5375
|
+
if (typeof environment === "function") {
|
5376
|
+
return new Proxy(
|
5377
|
+
{},
|
5378
|
+
{
|
5379
|
+
get(target) {
|
5380
|
+
return environment(target);
|
5381
|
+
}
|
5382
|
+
}
|
5383
|
+
);
|
5384
|
+
}
|
5385
|
+
if (isObject(environment)) {
|
5386
|
+
return environment;
|
5387
|
+
}
|
5388
|
+
} catch (error) {
|
5389
|
+
}
|
5390
|
+
return {};
|
5391
|
+
}
|
5392
|
+
function buildPreviewBranchName({ org, branch }) {
|
5393
|
+
return `preview-${org}-${branch}`;
|
5394
|
+
}
|
5395
|
+
function getDeployPreviewBranch(environment) {
|
5396
|
+
try {
|
5397
|
+
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = parseEnvironment(environment);
|
5398
|
+
if (deployPreviewBranch) return deployPreviewBranch;
|
5399
|
+
switch (deployPreview) {
|
5400
|
+
case "vercel": {
|
5401
|
+
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
5402
|
+
console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
|
5403
|
+
return void 0;
|
5404
|
+
}
|
5405
|
+
return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
|
5406
|
+
}
|
5407
|
+
}
|
5408
|
+
return void 0;
|
5409
|
+
} catch (err) {
|
5410
|
+
return void 0;
|
5411
|
+
}
|
5412
|
+
}
|
5413
|
+
|
3455
5414
|
class XataError extends Error {
|
3456
5415
|
constructor(message, status) {
|
3457
5416
|
super(message);
|
@@ -3460,6 +5419,7 @@ class XataError extends Error {
|
|
3460
5419
|
}
|
3461
5420
|
|
3462
5421
|
exports.BaseClient = BaseClient;
|
5422
|
+
exports.Buffer = Buffer;
|
3463
5423
|
exports.FetcherError = FetcherError;
|
3464
5424
|
exports.FilesPlugin = FilesPlugin;
|
3465
5425
|
exports.Operations = operationsByTag;
|
@@ -3485,6 +5445,7 @@ exports.XataError = XataError;
|
|
3485
5445
|
exports.XataFile = XataFile;
|
3486
5446
|
exports.XataPlugin = XataPlugin;
|
3487
5447
|
exports.acceptWorkspaceMemberInvite = acceptWorkspaceMemberInvite;
|
5448
|
+
exports.adaptAllTables = adaptAllTables;
|
3488
5449
|
exports.adaptTable = adaptTable;
|
3489
5450
|
exports.addGitBranchesEntry = addGitBranchesEntry;
|
3490
5451
|
exports.addTableColumn = addTableColumn;
|
@@ -3502,6 +5463,7 @@ exports.cancelWorkspaceMemberInvite = cancelWorkspaceMemberInvite;
|
|
3502
5463
|
exports.compareBranchSchemas = compareBranchSchemas;
|
3503
5464
|
exports.compareBranchWithUserSchema = compareBranchWithUserSchema;
|
3504
5465
|
exports.compareMigrationRequest = compareMigrationRequest;
|
5466
|
+
exports.completeMigration = completeMigration;
|
3505
5467
|
exports.contains = contains;
|
3506
5468
|
exports.copyBranch = copyBranch;
|
3507
5469
|
exports.createBranch = createBranch;
|
@@ -3512,6 +5474,7 @@ exports.createTable = createTable;
|
|
3512
5474
|
exports.createUserAPIKey = createUserAPIKey;
|
3513
5475
|
exports.createWorkspace = createWorkspace;
|
3514
5476
|
exports.deleteBranch = deleteBranch;
|
5477
|
+
exports.deleteCluster = deleteCluster;
|
3515
5478
|
exports.deleteColumn = deleteColumn;
|
3516
5479
|
exports.deleteDatabase = deleteDatabase;
|
3517
5480
|
exports.deleteDatabaseGithubSettings = deleteDatabaseGithubSettings;
|
@@ -3525,6 +5488,7 @@ exports.deleteUserAPIKey = deleteUserAPIKey;
|
|
3525
5488
|
exports.deleteUserOAuthClient = deleteUserOAuthClient;
|
3526
5489
|
exports.deleteWorkspace = deleteWorkspace;
|
3527
5490
|
exports.deserialize = deserialize;
|
5491
|
+
exports.dropClusterExtension = dropClusterExtension;
|
3528
5492
|
exports.endsWith = endsWith;
|
3529
5493
|
exports.equals = equals;
|
3530
5494
|
exports.executeBranchMigrationPlan = executeBranchMigrationPlan;
|
@@ -3532,35 +5496,36 @@ exports.exists = exists;
|
|
3532
5496
|
exports.fileAccess = fileAccess;
|
3533
5497
|
exports.fileUpload = fileUpload;
|
3534
5498
|
exports.ge = ge;
|
3535
|
-
exports.getAPIKey = getAPIKey;
|
3536
5499
|
exports.getAuthorizationCode = getAuthorizationCode;
|
3537
|
-
exports.getBranch = getBranch;
|
3538
5500
|
exports.getBranchDetails = getBranchDetails;
|
3539
5501
|
exports.getBranchList = getBranchList;
|
3540
5502
|
exports.getBranchMetadata = getBranchMetadata;
|
3541
5503
|
exports.getBranchMigrationHistory = getBranchMigrationHistory;
|
3542
5504
|
exports.getBranchMigrationJobStatus = getBranchMigrationJobStatus;
|
3543
5505
|
exports.getBranchMigrationPlan = getBranchMigrationPlan;
|
5506
|
+
exports.getBranchMoveStatus = getBranchMoveStatus;
|
3544
5507
|
exports.getBranchSchemaHistory = getBranchSchemaHistory;
|
3545
5508
|
exports.getBranchStats = getBranchStats;
|
3546
5509
|
exports.getCluster = getCluster;
|
5510
|
+
exports.getClusterMetrics = getClusterMetrics;
|
3547
5511
|
exports.getColumn = getColumn;
|
3548
5512
|
exports.getDatabaseGithubSettings = getDatabaseGithubSettings;
|
3549
5513
|
exports.getDatabaseList = getDatabaseList;
|
3550
5514
|
exports.getDatabaseMetadata = getDatabaseMetadata;
|
3551
5515
|
exports.getDatabaseSettings = getDatabaseSettings;
|
3552
|
-
exports.
|
5516
|
+
exports.getDeployPreviewBranch = getDeployPreviewBranch;
|
3553
5517
|
exports.getFile = getFile;
|
3554
5518
|
exports.getFileItem = getFileItem;
|
3555
5519
|
exports.getGitBranchesMapping = getGitBranchesMapping;
|
3556
5520
|
exports.getHostUrl = getHostUrl;
|
3557
5521
|
exports.getMigrationHistory = getMigrationHistory;
|
3558
5522
|
exports.getMigrationJobStatus = getMigrationJobStatus;
|
5523
|
+
exports.getMigrationJobs = getMigrationJobs;
|
3559
5524
|
exports.getMigrationRequest = getMigrationRequest;
|
3560
5525
|
exports.getMigrationRequestIsMerged = getMigrationRequestIsMerged;
|
3561
|
-
exports.getPreviewBranch = getPreviewBranch;
|
3562
5526
|
exports.getRecord = getRecord;
|
3563
5527
|
exports.getSchema = getSchema;
|
5528
|
+
exports.getSchemas = getSchemas;
|
3564
5529
|
exports.getTableColumns = getTableColumns;
|
3565
5530
|
exports.getTableSchema = getTableSchema;
|
3566
5531
|
exports.getUser = getUser;
|
@@ -3569,6 +5534,7 @@ exports.getUserOAuthAccessTokens = getUserOAuthAccessTokens;
|
|
3569
5534
|
exports.getUserOAuthClients = getUserOAuthClients;
|
3570
5535
|
exports.getWorkspace = getWorkspace;
|
3571
5536
|
exports.getWorkspaceMembersList = getWorkspaceMembersList;
|
5537
|
+
exports.getWorkspaceSettings = getWorkspaceSettings;
|
3572
5538
|
exports.getWorkspacesList = getWorkspacesList;
|
3573
5539
|
exports.grantAuthorizationCode = grantAuthorizationCode;
|
3574
5540
|
exports.greaterEquals = greaterEquals;
|
@@ -3584,6 +5550,7 @@ exports.includesAny = includesAny;
|
|
3584
5550
|
exports.includesNone = includesNone;
|
3585
5551
|
exports.insertRecord = insertRecord;
|
3586
5552
|
exports.insertRecordWithID = insertRecordWithID;
|
5553
|
+
exports.installClusterExtension = installClusterExtension;
|
3587
5554
|
exports.inviteWorkspaceMember = inviteWorkspaceMember;
|
3588
5555
|
exports.is = is;
|
3589
5556
|
exports.isCursorPaginationOptions = isCursorPaginationOptions;
|
@@ -3597,12 +5564,15 @@ exports.le = le;
|
|
3597
5564
|
exports.lessEquals = lessEquals;
|
3598
5565
|
exports.lessThan = lessThan;
|
3599
5566
|
exports.lessThanEquals = lessThanEquals;
|
5567
|
+
exports.listClusterBranches = listClusterBranches;
|
5568
|
+
exports.listClusterExtensions = listClusterExtensions;
|
3600
5569
|
exports.listClusters = listClusters;
|
3601
5570
|
exports.listMigrationRequestsCommits = listMigrationRequestsCommits;
|
3602
5571
|
exports.listRegions = listRegions;
|
3603
5572
|
exports.lt = lt;
|
3604
5573
|
exports.lte = lte;
|
3605
5574
|
exports.mergeMigrationRequest = mergeMigrationRequest;
|
5575
|
+
exports.moveBranch = moveBranch;
|
3606
5576
|
exports.notExists = notExists;
|
3607
5577
|
exports.operationsByTag = operationsByTag;
|
3608
5578
|
exports.parseProviderString = parseProviderString;
|
@@ -3619,11 +5589,14 @@ exports.removeWorkspaceMember = removeWorkspaceMember;
|
|
3619
5589
|
exports.renameDatabase = renameDatabase;
|
3620
5590
|
exports.resendWorkspaceMemberInvite = resendWorkspaceMemberInvite;
|
3621
5591
|
exports.resolveBranch = resolveBranch;
|
5592
|
+
exports.rollbackMigration = rollbackMigration;
|
3622
5593
|
exports.searchBranch = searchBranch;
|
3623
5594
|
exports.searchTable = searchTable;
|
3624
5595
|
exports.serialize = serialize;
|
3625
5596
|
exports.setTableSchema = setTableSchema;
|
5597
|
+
exports.sqlBatchQuery = sqlBatchQuery;
|
3626
5598
|
exports.sqlQuery = sqlQuery;
|
5599
|
+
exports.startMigration = startMigration;
|
3627
5600
|
exports.startsWith = startsWith;
|
3628
5601
|
exports.summarizeTable = summarizeTable;
|
3629
5602
|
exports.transformImage = transformImage;
|
@@ -3642,6 +5615,7 @@ exports.updateUser = updateUser;
|
|
3642
5615
|
exports.updateWorkspace = updateWorkspace;
|
3643
5616
|
exports.updateWorkspaceMemberInvite = updateWorkspaceMemberInvite;
|
3644
5617
|
exports.updateWorkspaceMemberRole = updateWorkspaceMemberRole;
|
5618
|
+
exports.updateWorkspaceSettings = updateWorkspaceSettings;
|
3645
5619
|
exports.upsertRecordWithID = upsertRecordWithID;
|
3646
5620
|
exports.vectorSearchTable = vectorSearchTable;
|
3647
5621
|
//# sourceMappingURL=index.cjs.map
|