@xata.io/client 0.0.0-next.va2d8ec2a91aa05ba703071b545a477e727db67d6 → 0.0.0-next.vaf38c4bbe8c1d35159e6de658514ac90b419f55a
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-add-version.log +1 -1
- package/.turbo/turbo-build.log +4 -4
- package/CHANGELOG.md +19 -3
- package/dist/index.cjs +2471 -582
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +4778 -3836
- package/dist/index.mjs +2463 -579
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
@@ -22,6 +22,1789 @@ const TraceAttributes = {
|
|
22
22
|
CLOUDFLARE_RAY_ID: "cf.ray"
|
23
23
|
};
|
24
24
|
|
25
|
+
const lookup = [];
|
26
|
+
const revLookup = [];
|
27
|
+
const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
28
|
+
for (let i = 0, len = code.length; i < len; ++i) {
|
29
|
+
lookup[i] = code[i];
|
30
|
+
revLookup[code.charCodeAt(i)] = i;
|
31
|
+
}
|
32
|
+
revLookup["-".charCodeAt(0)] = 62;
|
33
|
+
revLookup["_".charCodeAt(0)] = 63;
|
34
|
+
function getLens(b64) {
|
35
|
+
const len = b64.length;
|
36
|
+
if (len % 4 > 0) {
|
37
|
+
throw new Error("Invalid string. Length must be a multiple of 4");
|
38
|
+
}
|
39
|
+
let validLen = b64.indexOf("=");
|
40
|
+
if (validLen === -1) validLen = len;
|
41
|
+
const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
|
42
|
+
return [validLen, placeHoldersLen];
|
43
|
+
}
|
44
|
+
function _byteLength(_b64, validLen, placeHoldersLen) {
|
45
|
+
return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
|
46
|
+
}
|
47
|
+
function toByteArray(b64) {
|
48
|
+
let tmp;
|
49
|
+
const lens = getLens(b64);
|
50
|
+
const validLen = lens[0];
|
51
|
+
const placeHoldersLen = lens[1];
|
52
|
+
const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
|
53
|
+
let curByte = 0;
|
54
|
+
const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
|
55
|
+
let i;
|
56
|
+
for (i = 0; i < len; i += 4) {
|
57
|
+
tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
|
58
|
+
arr[curByte++] = tmp >> 16 & 255;
|
59
|
+
arr[curByte++] = tmp >> 8 & 255;
|
60
|
+
arr[curByte++] = tmp & 255;
|
61
|
+
}
|
62
|
+
if (placeHoldersLen === 2) {
|
63
|
+
tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
|
64
|
+
arr[curByte++] = tmp & 255;
|
65
|
+
}
|
66
|
+
if (placeHoldersLen === 1) {
|
67
|
+
tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
|
68
|
+
arr[curByte++] = tmp >> 8 & 255;
|
69
|
+
arr[curByte++] = tmp & 255;
|
70
|
+
}
|
71
|
+
return arr;
|
72
|
+
}
|
73
|
+
function tripletToBase64(num) {
|
74
|
+
return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
|
75
|
+
}
|
76
|
+
function encodeChunk(uint8, start, end) {
|
77
|
+
let tmp;
|
78
|
+
const output = [];
|
79
|
+
for (let i = start; i < end; i += 3) {
|
80
|
+
tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
|
81
|
+
output.push(tripletToBase64(tmp));
|
82
|
+
}
|
83
|
+
return output.join("");
|
84
|
+
}
|
85
|
+
function fromByteArray(uint8) {
|
86
|
+
let tmp;
|
87
|
+
const len = uint8.length;
|
88
|
+
const extraBytes = len % 3;
|
89
|
+
const parts = [];
|
90
|
+
const maxChunkLength = 16383;
|
91
|
+
for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
92
|
+
parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
|
93
|
+
}
|
94
|
+
if (extraBytes === 1) {
|
95
|
+
tmp = uint8[len - 1];
|
96
|
+
parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
|
97
|
+
} else if (extraBytes === 2) {
|
98
|
+
tmp = (uint8[len - 2] << 8) + uint8[len - 1];
|
99
|
+
parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
|
100
|
+
}
|
101
|
+
return parts.join("");
|
102
|
+
}
|
103
|
+
|
104
|
+
const K_MAX_LENGTH = 2147483647;
|
105
|
+
const MAX_ARGUMENTS_LENGTH = 4096;
|
106
|
+
class Buffer extends Uint8Array {
|
107
|
+
/**
|
108
|
+
* Constructs a new `Buffer` instance.
|
109
|
+
*
|
110
|
+
* @param value
|
111
|
+
* @param encodingOrOffset
|
112
|
+
* @param length
|
113
|
+
*/
|
114
|
+
constructor(value, encodingOrOffset, length) {
|
115
|
+
if (typeof value === "number") {
|
116
|
+
if (typeof encodingOrOffset === "string") {
|
117
|
+
throw new TypeError("The first argument must be of type string, received type number");
|
118
|
+
}
|
119
|
+
if (value < 0) {
|
120
|
+
throw new RangeError("The buffer size cannot be negative");
|
121
|
+
}
|
122
|
+
super(value < 0 ? 0 : Buffer._checked(value) | 0);
|
123
|
+
} else if (typeof value === "string") {
|
124
|
+
if (typeof encodingOrOffset !== "string") {
|
125
|
+
encodingOrOffset = "utf8";
|
126
|
+
}
|
127
|
+
if (!Buffer.isEncoding(encodingOrOffset)) {
|
128
|
+
throw new TypeError("Unknown encoding: " + encodingOrOffset);
|
129
|
+
}
|
130
|
+
const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
|
131
|
+
super(length2);
|
132
|
+
const written = this.write(value, 0, this.length, encodingOrOffset);
|
133
|
+
if (written !== length2) {
|
134
|
+
throw new TypeError(
|
135
|
+
"Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
|
136
|
+
);
|
137
|
+
}
|
138
|
+
} else if (ArrayBuffer.isView(value)) {
|
139
|
+
if (Buffer._isInstance(value, Uint8Array)) {
|
140
|
+
const copy = new Uint8Array(value);
|
141
|
+
const array = copy.buffer;
|
142
|
+
const byteOffset = copy.byteOffset;
|
143
|
+
const length2 = copy.byteLength;
|
144
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
145
|
+
throw new RangeError("offset is outside of buffer bounds");
|
146
|
+
}
|
147
|
+
if (array.byteLength < byteOffset + (length2 || 0)) {
|
148
|
+
throw new RangeError("length is outside of buffer bounds");
|
149
|
+
}
|
150
|
+
super(new Uint8Array(array, byteOffset, length2));
|
151
|
+
} else {
|
152
|
+
const array = value;
|
153
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
154
|
+
super(new Uint8Array(length2));
|
155
|
+
for (let i = 0; i < length2; i++) {
|
156
|
+
this[i] = array[i] & 255;
|
157
|
+
}
|
158
|
+
}
|
159
|
+
} else if (value == null) {
|
160
|
+
throw new TypeError(
|
161
|
+
"The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
|
162
|
+
);
|
163
|
+
} else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
|
164
|
+
const array = value;
|
165
|
+
const byteOffset = encodingOrOffset;
|
166
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
167
|
+
throw new RangeError("offset is outside of buffer bounds");
|
168
|
+
}
|
169
|
+
if (array.byteLength < byteOffset + (length || 0)) {
|
170
|
+
throw new RangeError("length is outside of buffer bounds");
|
171
|
+
}
|
172
|
+
super(new Uint8Array(array, byteOffset, length));
|
173
|
+
} else if (Array.isArray(value)) {
|
174
|
+
const array = value;
|
175
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
176
|
+
super(new Uint8Array(length2));
|
177
|
+
for (let i = 0; i < length2; i++) {
|
178
|
+
this[i] = array[i] & 255;
|
179
|
+
}
|
180
|
+
} else {
|
181
|
+
throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
|
182
|
+
}
|
183
|
+
}
|
184
|
+
/**
|
185
|
+
* Return JSON representation of the buffer.
|
186
|
+
*/
|
187
|
+
toJSON() {
|
188
|
+
return {
|
189
|
+
type: "Buffer",
|
190
|
+
data: Array.prototype.slice.call(this)
|
191
|
+
};
|
192
|
+
}
|
193
|
+
/**
|
194
|
+
* Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
|
195
|
+
* parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
|
196
|
+
* only part of `string` will be written. However, partially encoded characters will not be written.
|
197
|
+
*
|
198
|
+
* @param string String to write to `buf`.
|
199
|
+
* @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
|
200
|
+
* @param length Maximum number of bytes to write: Default: `buf.length - offset`.
|
201
|
+
* @param encoding The character encoding of `string`. Default: `utf8`.
|
202
|
+
*/
|
203
|
+
write(string, offset, length, encoding) {
|
204
|
+
if (typeof offset === "undefined") {
|
205
|
+
encoding = "utf8";
|
206
|
+
length = this.length;
|
207
|
+
offset = 0;
|
208
|
+
} else if (typeof length === "undefined" && typeof offset === "string") {
|
209
|
+
encoding = offset;
|
210
|
+
length = this.length;
|
211
|
+
offset = 0;
|
212
|
+
} else if (typeof offset === "number" && isFinite(offset)) {
|
213
|
+
offset = offset >>> 0;
|
214
|
+
if (typeof length === "number" && isFinite(length)) {
|
215
|
+
length = length >>> 0;
|
216
|
+
encoding ?? (encoding = "utf8");
|
217
|
+
} else if (typeof length === "string") {
|
218
|
+
encoding = length;
|
219
|
+
length = void 0;
|
220
|
+
}
|
221
|
+
} else {
|
222
|
+
throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
|
223
|
+
}
|
224
|
+
const remaining = this.length - offset;
|
225
|
+
if (typeof length === "undefined" || length > remaining) {
|
226
|
+
length = remaining;
|
227
|
+
}
|
228
|
+
if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
|
229
|
+
throw new RangeError("Attempt to write outside buffer bounds");
|
230
|
+
}
|
231
|
+
encoding || (encoding = "utf8");
|
232
|
+
switch (Buffer._getEncoding(encoding)) {
|
233
|
+
case "hex":
|
234
|
+
return Buffer._hexWrite(this, string, offset, length);
|
235
|
+
case "utf8":
|
236
|
+
return Buffer._utf8Write(this, string, offset, length);
|
237
|
+
case "ascii":
|
238
|
+
case "latin1":
|
239
|
+
case "binary":
|
240
|
+
return Buffer._asciiWrite(this, string, offset, length);
|
241
|
+
case "ucs2":
|
242
|
+
case "utf16le":
|
243
|
+
return Buffer._ucs2Write(this, string, offset, length);
|
244
|
+
case "base64":
|
245
|
+
return Buffer._base64Write(this, string, offset, length);
|
246
|
+
}
|
247
|
+
}
|
248
|
+
/**
|
249
|
+
* Decodes the buffer to a string according to the specified character encoding.
|
250
|
+
* Passing `start` and `end` will decode only a subset of the buffer.
|
251
|
+
*
|
252
|
+
* Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
|
253
|
+
* will be replaced with `U+FFFD`.
|
254
|
+
*
|
255
|
+
* @param encoding
|
256
|
+
* @param start
|
257
|
+
* @param end
|
258
|
+
*/
|
259
|
+
toString(encoding, start, end) {
|
260
|
+
const length = this.length;
|
261
|
+
if (length === 0) {
|
262
|
+
return "";
|
263
|
+
}
|
264
|
+
if (arguments.length === 0) {
|
265
|
+
return Buffer._utf8Slice(this, 0, length);
|
266
|
+
}
|
267
|
+
if (typeof start === "undefined" || start < 0) {
|
268
|
+
start = 0;
|
269
|
+
}
|
270
|
+
if (start > this.length) {
|
271
|
+
return "";
|
272
|
+
}
|
273
|
+
if (typeof end === "undefined" || end > this.length) {
|
274
|
+
end = this.length;
|
275
|
+
}
|
276
|
+
if (end <= 0) {
|
277
|
+
return "";
|
278
|
+
}
|
279
|
+
end >>>= 0;
|
280
|
+
start >>>= 0;
|
281
|
+
if (end <= start) {
|
282
|
+
return "";
|
283
|
+
}
|
284
|
+
if (!encoding) {
|
285
|
+
encoding = "utf8";
|
286
|
+
}
|
287
|
+
switch (Buffer._getEncoding(encoding)) {
|
288
|
+
case "hex":
|
289
|
+
return Buffer._hexSlice(this, start, end);
|
290
|
+
case "utf8":
|
291
|
+
return Buffer._utf8Slice(this, start, end);
|
292
|
+
case "ascii":
|
293
|
+
return Buffer._asciiSlice(this, start, end);
|
294
|
+
case "latin1":
|
295
|
+
case "binary":
|
296
|
+
return Buffer._latin1Slice(this, start, end);
|
297
|
+
case "ucs2":
|
298
|
+
case "utf16le":
|
299
|
+
return Buffer._utf16leSlice(this, start, end);
|
300
|
+
case "base64":
|
301
|
+
return Buffer._base64Slice(this, start, end);
|
302
|
+
}
|
303
|
+
}
|
304
|
+
/**
|
305
|
+
* Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
|
306
|
+
*
|
307
|
+
* @param otherBuffer
|
308
|
+
*/
|
309
|
+
equals(otherBuffer) {
|
310
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
311
|
+
throw new TypeError("Argument must be a Buffer");
|
312
|
+
}
|
313
|
+
if (this === otherBuffer) {
|
314
|
+
return true;
|
315
|
+
}
|
316
|
+
return Buffer.compare(this, otherBuffer) === 0;
|
317
|
+
}
|
318
|
+
/**
|
319
|
+
* Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
|
320
|
+
* or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
|
321
|
+
* buffer.
|
322
|
+
*
|
323
|
+
* - `0` is returned if `otherBuffer` is the same as this buffer.
|
324
|
+
* - `1` is returned if `otherBuffer` should come before this buffer when sorted.
|
325
|
+
* - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
|
326
|
+
*
|
327
|
+
* @param otherBuffer The buffer to compare to.
|
328
|
+
* @param targetStart The offset within `otherBuffer` at which to begin comparison.
|
329
|
+
* @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
|
330
|
+
* @param sourceStart The offset within this buffer at which to begin comparison.
|
331
|
+
* @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
|
332
|
+
*/
|
333
|
+
compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
|
334
|
+
if (Buffer._isInstance(otherBuffer, Uint8Array)) {
|
335
|
+
otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
|
336
|
+
}
|
337
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
338
|
+
throw new TypeError("Argument must be a Buffer or Uint8Array");
|
339
|
+
}
|
340
|
+
targetStart ?? (targetStart = 0);
|
341
|
+
targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
|
342
|
+
sourceStart ?? (sourceStart = 0);
|
343
|
+
sourceEnd ?? (sourceEnd = this.length);
|
344
|
+
if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
|
345
|
+
throw new RangeError("Out of range index");
|
346
|
+
}
|
347
|
+
if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
|
348
|
+
return 0;
|
349
|
+
}
|
350
|
+
if (sourceStart >= sourceEnd) {
|
351
|
+
return -1;
|
352
|
+
}
|
353
|
+
if (targetStart >= targetEnd) {
|
354
|
+
return 1;
|
355
|
+
}
|
356
|
+
targetStart >>>= 0;
|
357
|
+
targetEnd >>>= 0;
|
358
|
+
sourceStart >>>= 0;
|
359
|
+
sourceEnd >>>= 0;
|
360
|
+
if (this === otherBuffer) {
|
361
|
+
return 0;
|
362
|
+
}
|
363
|
+
let x = sourceEnd - sourceStart;
|
364
|
+
let y = targetEnd - targetStart;
|
365
|
+
const len = Math.min(x, y);
|
366
|
+
const thisCopy = this.slice(sourceStart, sourceEnd);
|
367
|
+
const targetCopy = otherBuffer.slice(targetStart, targetEnd);
|
368
|
+
for (let i = 0; i < len; ++i) {
|
369
|
+
if (thisCopy[i] !== targetCopy[i]) {
|
370
|
+
x = thisCopy[i];
|
371
|
+
y = targetCopy[i];
|
372
|
+
break;
|
373
|
+
}
|
374
|
+
}
|
375
|
+
if (x < y) return -1;
|
376
|
+
if (y < x) return 1;
|
377
|
+
return 0;
|
378
|
+
}
|
379
|
+
/**
|
380
|
+
* Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
|
381
|
+
* region overlaps with this buffer.
|
382
|
+
*
|
383
|
+
* @param targetBuffer The target buffer to copy into.
|
384
|
+
* @param targetStart The offset within `targetBuffer` at which to begin writing.
|
385
|
+
* @param sourceStart The offset within this buffer at which to begin copying.
|
386
|
+
* @param sourceEnd The offset within this buffer at which to end copying (exclusive).
|
387
|
+
*/
|
388
|
+
copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
|
389
|
+
if (!Buffer.isBuffer(targetBuffer)) throw new TypeError("argument should be a Buffer");
|
390
|
+
if (!sourceStart) sourceStart = 0;
|
391
|
+
if (!targetStart) targetStart = 0;
|
392
|
+
if (!sourceEnd && sourceEnd !== 0) sourceEnd = this.length;
|
393
|
+
if (targetStart >= targetBuffer.length) targetStart = targetBuffer.length;
|
394
|
+
if (!targetStart) targetStart = 0;
|
395
|
+
if (sourceEnd > 0 && sourceEnd < sourceStart) sourceEnd = sourceStart;
|
396
|
+
if (sourceEnd === sourceStart) return 0;
|
397
|
+
if (targetBuffer.length === 0 || this.length === 0) return 0;
|
398
|
+
if (targetStart < 0) {
|
399
|
+
throw new RangeError("targetStart out of bounds");
|
400
|
+
}
|
401
|
+
if (sourceStart < 0 || sourceStart >= this.length) throw new RangeError("Index out of range");
|
402
|
+
if (sourceEnd < 0) throw new RangeError("sourceEnd out of bounds");
|
403
|
+
if (sourceEnd > this.length) sourceEnd = this.length;
|
404
|
+
if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
|
405
|
+
sourceEnd = targetBuffer.length - targetStart + sourceStart;
|
406
|
+
}
|
407
|
+
const len = sourceEnd - sourceStart;
|
408
|
+
if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
|
409
|
+
this.copyWithin(targetStart, sourceStart, sourceEnd);
|
410
|
+
} else {
|
411
|
+
Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
|
412
|
+
}
|
413
|
+
return len;
|
414
|
+
}
|
415
|
+
/**
|
416
|
+
* Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
|
417
|
+
* and `end` indices. This is the same behavior as `buf.subarray()`.
|
418
|
+
*
|
419
|
+
* This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
|
420
|
+
* the slice, use `Uint8Array.prototype.slice()`.
|
421
|
+
*
|
422
|
+
* @param start
|
423
|
+
* @param end
|
424
|
+
*/
|
425
|
+
slice(start, end) {
|
426
|
+
if (!start) {
|
427
|
+
start = 0;
|
428
|
+
}
|
429
|
+
const len = this.length;
|
430
|
+
start = ~~start;
|
431
|
+
end = end === void 0 ? len : ~~end;
|
432
|
+
if (start < 0) {
|
433
|
+
start += len;
|
434
|
+
if (start < 0) {
|
435
|
+
start = 0;
|
436
|
+
}
|
437
|
+
} else if (start > len) {
|
438
|
+
start = len;
|
439
|
+
}
|
440
|
+
if (end < 0) {
|
441
|
+
end += len;
|
442
|
+
if (end < 0) {
|
443
|
+
end = 0;
|
444
|
+
}
|
445
|
+
} else if (end > len) {
|
446
|
+
end = len;
|
447
|
+
}
|
448
|
+
if (end < start) {
|
449
|
+
end = start;
|
450
|
+
}
|
451
|
+
const newBuf = this.subarray(start, end);
|
452
|
+
Object.setPrototypeOf(newBuf, Buffer.prototype);
|
453
|
+
return newBuf;
|
454
|
+
}
|
455
|
+
/**
|
456
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
457
|
+
* of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
|
458
|
+
*
|
459
|
+
* @param value Number to write.
|
460
|
+
* @param offset Number of bytes to skip before starting to write.
|
461
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
462
|
+
* @param noAssert
|
463
|
+
* @returns `offset` plus the number of bytes written.
|
464
|
+
*/
|
465
|
+
writeUIntLE(value, offset, byteLength, noAssert) {
|
466
|
+
value = +value;
|
467
|
+
offset = offset >>> 0;
|
468
|
+
byteLength = byteLength >>> 0;
|
469
|
+
if (!noAssert) {
|
470
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
471
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
472
|
+
}
|
473
|
+
let mul = 1;
|
474
|
+
let i = 0;
|
475
|
+
this[offset] = value & 255;
|
476
|
+
while (++i < byteLength && (mul *= 256)) {
|
477
|
+
this[offset + i] = value / mul & 255;
|
478
|
+
}
|
479
|
+
return offset + byteLength;
|
480
|
+
}
|
481
|
+
/**
|
482
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
|
483
|
+
* accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
|
484
|
+
*
|
485
|
+
* @param value Number to write.
|
486
|
+
* @param offset Number of bytes to skip before starting to write.
|
487
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
488
|
+
* @param noAssert
|
489
|
+
* @returns `offset` plus the number of bytes written.
|
490
|
+
*/
|
491
|
+
writeUIntBE(value, offset, byteLength, noAssert) {
|
492
|
+
value = +value;
|
493
|
+
offset = offset >>> 0;
|
494
|
+
byteLength = byteLength >>> 0;
|
495
|
+
if (!noAssert) {
|
496
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
497
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
498
|
+
}
|
499
|
+
let i = byteLength - 1;
|
500
|
+
let mul = 1;
|
501
|
+
this[offset + i] = value & 255;
|
502
|
+
while (--i >= 0 && (mul *= 256)) {
|
503
|
+
this[offset + i] = value / mul & 255;
|
504
|
+
}
|
505
|
+
return offset + byteLength;
|
506
|
+
}
|
507
|
+
/**
|
508
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
509
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
510
|
+
*
|
511
|
+
* @param value Number to write.
|
512
|
+
* @param offset Number of bytes to skip before starting to write.
|
513
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
514
|
+
* @param noAssert
|
515
|
+
* @returns `offset` plus the number of bytes written.
|
516
|
+
*/
|
517
|
+
writeIntLE(value, offset, byteLength, noAssert) {
|
518
|
+
value = +value;
|
519
|
+
offset = offset >>> 0;
|
520
|
+
if (!noAssert) {
|
521
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
522
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
523
|
+
}
|
524
|
+
let i = 0;
|
525
|
+
let mul = 1;
|
526
|
+
let sub = 0;
|
527
|
+
this[offset] = value & 255;
|
528
|
+
while (++i < byteLength && (mul *= 256)) {
|
529
|
+
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
530
|
+
sub = 1;
|
531
|
+
}
|
532
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
533
|
+
}
|
534
|
+
return offset + byteLength;
|
535
|
+
}
|
536
|
+
/**
|
537
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
|
538
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
539
|
+
*
|
540
|
+
* @param value Number to write.
|
541
|
+
* @param offset Number of bytes to skip before starting to write.
|
542
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
543
|
+
* @param noAssert
|
544
|
+
* @returns `offset` plus the number of bytes written.
|
545
|
+
*/
|
546
|
+
writeIntBE(value, offset, byteLength, noAssert) {
|
547
|
+
value = +value;
|
548
|
+
offset = offset >>> 0;
|
549
|
+
if (!noAssert) {
|
550
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
551
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
552
|
+
}
|
553
|
+
let i = byteLength - 1;
|
554
|
+
let mul = 1;
|
555
|
+
let sub = 0;
|
556
|
+
this[offset + i] = value & 255;
|
557
|
+
while (--i >= 0 && (mul *= 256)) {
|
558
|
+
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
559
|
+
sub = 1;
|
560
|
+
}
|
561
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
562
|
+
}
|
563
|
+
return offset + byteLength;
|
564
|
+
}
|
565
|
+
/**
|
566
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
567
|
+
* unsigned, little-endian integer supporting up to 48 bits of accuracy.
|
568
|
+
*
|
569
|
+
* @param offset Number of bytes to skip before starting to read.
|
570
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
571
|
+
* @param noAssert
|
572
|
+
*/
|
573
|
+
readUIntLE(offset, byteLength, noAssert) {
|
574
|
+
offset = offset >>> 0;
|
575
|
+
byteLength = byteLength >>> 0;
|
576
|
+
if (!noAssert) {
|
577
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
578
|
+
}
|
579
|
+
let val = this[offset];
|
580
|
+
let mul = 1;
|
581
|
+
let i = 0;
|
582
|
+
while (++i < byteLength && (mul *= 256)) {
|
583
|
+
val += this[offset + i] * mul;
|
584
|
+
}
|
585
|
+
return val;
|
586
|
+
}
|
587
|
+
/**
|
588
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
589
|
+
* unsigned, big-endian integer supporting up to 48 bits of accuracy.
|
590
|
+
*
|
591
|
+
* @param offset Number of bytes to skip before starting to read.
|
592
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
593
|
+
* @param noAssert
|
594
|
+
*/
|
595
|
+
readUIntBE(offset, byteLength, noAssert) {
|
596
|
+
offset = offset >>> 0;
|
597
|
+
byteLength = byteLength >>> 0;
|
598
|
+
if (!noAssert) {
|
599
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
600
|
+
}
|
601
|
+
let val = this[offset + --byteLength];
|
602
|
+
let mul = 1;
|
603
|
+
while (byteLength > 0 && (mul *= 256)) {
|
604
|
+
val += this[offset + --byteLength] * mul;
|
605
|
+
}
|
606
|
+
return val;
|
607
|
+
}
|
608
|
+
/**
|
609
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
610
|
+
* little-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
611
|
+
*
|
612
|
+
* @param offset Number of bytes to skip before starting to read.
|
613
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
614
|
+
* @param noAssert
|
615
|
+
*/
|
616
|
+
readIntLE(offset, byteLength, noAssert) {
|
617
|
+
offset = offset >>> 0;
|
618
|
+
byteLength = byteLength >>> 0;
|
619
|
+
if (!noAssert) {
|
620
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
621
|
+
}
|
622
|
+
let val = this[offset];
|
623
|
+
let mul = 1;
|
624
|
+
let i = 0;
|
625
|
+
while (++i < byteLength && (mul *= 256)) {
|
626
|
+
val += this[offset + i] * mul;
|
627
|
+
}
|
628
|
+
mul *= 128;
|
629
|
+
if (val >= mul) {
|
630
|
+
val -= Math.pow(2, 8 * byteLength);
|
631
|
+
}
|
632
|
+
return val;
|
633
|
+
}
|
634
|
+
/**
|
635
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
636
|
+
* big-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
637
|
+
*
|
638
|
+
* @param offset Number of bytes to skip before starting to read.
|
639
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
640
|
+
* @param noAssert
|
641
|
+
*/
|
642
|
+
readIntBE(offset, byteLength, noAssert) {
|
643
|
+
offset = offset >>> 0;
|
644
|
+
byteLength = byteLength >>> 0;
|
645
|
+
if (!noAssert) {
|
646
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
647
|
+
}
|
648
|
+
let i = byteLength;
|
649
|
+
let mul = 1;
|
650
|
+
let val = this[offset + --i];
|
651
|
+
while (i > 0 && (mul *= 256)) {
|
652
|
+
val += this[offset + --i] * mul;
|
653
|
+
}
|
654
|
+
mul *= 128;
|
655
|
+
if (val >= mul) {
|
656
|
+
val -= Math.pow(2, 8 * byteLength);
|
657
|
+
}
|
658
|
+
return val;
|
659
|
+
}
|
660
|
+
/**
|
661
|
+
* Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
|
662
|
+
*
|
663
|
+
* @param offset Number of bytes to skip before starting to read.
|
664
|
+
* @param noAssert
|
665
|
+
*/
|
666
|
+
readUInt8(offset, noAssert) {
|
667
|
+
offset = offset >>> 0;
|
668
|
+
if (!noAssert) {
|
669
|
+
Buffer._checkOffset(offset, 1, this.length);
|
670
|
+
}
|
671
|
+
return this[offset];
|
672
|
+
}
|
673
|
+
/**
|
674
|
+
* Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
|
675
|
+
*
|
676
|
+
* @param offset Number of bytes to skip before starting to read.
|
677
|
+
* @param noAssert
|
678
|
+
*/
|
679
|
+
readUInt16LE(offset, noAssert) {
|
680
|
+
offset = offset >>> 0;
|
681
|
+
if (!noAssert) {
|
682
|
+
Buffer._checkOffset(offset, 2, this.length);
|
683
|
+
}
|
684
|
+
return this[offset] | this[offset + 1] << 8;
|
685
|
+
}
|
686
|
+
/**
|
687
|
+
* Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
|
688
|
+
*
|
689
|
+
* @param offset Number of bytes to skip before starting to read.
|
690
|
+
* @param noAssert
|
691
|
+
*/
|
692
|
+
readUInt16BE(offset, noAssert) {
|
693
|
+
offset = offset >>> 0;
|
694
|
+
if (!noAssert) {
|
695
|
+
Buffer._checkOffset(offset, 2, this.length);
|
696
|
+
}
|
697
|
+
return this[offset] << 8 | this[offset + 1];
|
698
|
+
}
|
699
|
+
/**
|
700
|
+
* Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
|
701
|
+
*
|
702
|
+
* @param offset Number of bytes to skip before starting to read.
|
703
|
+
* @param noAssert
|
704
|
+
*/
|
705
|
+
readUInt32LE(offset, noAssert) {
|
706
|
+
offset = offset >>> 0;
|
707
|
+
if (!noAssert) {
|
708
|
+
Buffer._checkOffset(offset, 4, this.length);
|
709
|
+
}
|
710
|
+
return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
|
711
|
+
}
|
712
|
+
/**
|
713
|
+
* Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
|
714
|
+
*
|
715
|
+
* @param offset Number of bytes to skip before starting to read.
|
716
|
+
* @param noAssert
|
717
|
+
*/
|
718
|
+
readUInt32BE(offset, noAssert) {
|
719
|
+
offset = offset >>> 0;
|
720
|
+
if (!noAssert) {
|
721
|
+
Buffer._checkOffset(offset, 4, this.length);
|
722
|
+
}
|
723
|
+
return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
|
724
|
+
}
|
725
|
+
/**
|
726
|
+
* Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
|
727
|
+
* as two's complement signed values.
|
728
|
+
*
|
729
|
+
* @param offset Number of bytes to skip before starting to read.
|
730
|
+
* @param noAssert
|
731
|
+
*/
|
732
|
+
readInt8(offset, noAssert) {
|
733
|
+
offset = offset >>> 0;
|
734
|
+
if (!noAssert) {
|
735
|
+
Buffer._checkOffset(offset, 1, this.length);
|
736
|
+
}
|
737
|
+
if (!(this[offset] & 128)) {
|
738
|
+
return this[offset];
|
739
|
+
}
|
740
|
+
return (255 - this[offset] + 1) * -1;
|
741
|
+
}
|
742
|
+
/**
|
743
|
+
* Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
744
|
+
* are interpreted as two's complement signed values.
|
745
|
+
*
|
746
|
+
* @param offset Number of bytes to skip before starting to read.
|
747
|
+
* @param noAssert
|
748
|
+
*/
|
749
|
+
readInt16LE(offset, noAssert) {
|
750
|
+
offset = offset >>> 0;
|
751
|
+
if (!noAssert) {
|
752
|
+
Buffer._checkOffset(offset, 2, this.length);
|
753
|
+
}
|
754
|
+
const val = this[offset] | this[offset + 1] << 8;
|
755
|
+
return val & 32768 ? val | 4294901760 : val;
|
756
|
+
}
|
757
|
+
/**
|
758
|
+
* Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
759
|
+
* are interpreted as two's complement signed values.
|
760
|
+
*
|
761
|
+
* @param offset Number of bytes to skip before starting to read.
|
762
|
+
* @param noAssert
|
763
|
+
*/
|
764
|
+
readInt16BE(offset, noAssert) {
|
765
|
+
offset = offset >>> 0;
|
766
|
+
if (!noAssert) {
|
767
|
+
Buffer._checkOffset(offset, 2, this.length);
|
768
|
+
}
|
769
|
+
const val = this[offset + 1] | this[offset] << 8;
|
770
|
+
return val & 32768 ? val | 4294901760 : val;
|
771
|
+
}
|
772
|
+
/**
|
773
|
+
* Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
774
|
+
* are interpreted as two's complement signed values.
|
775
|
+
*
|
776
|
+
* @param offset Number of bytes to skip before starting to read.
|
777
|
+
* @param noAssert
|
778
|
+
*/
|
779
|
+
readInt32LE(offset, noAssert) {
|
780
|
+
offset = offset >>> 0;
|
781
|
+
if (!noAssert) {
|
782
|
+
Buffer._checkOffset(offset, 4, this.length);
|
783
|
+
}
|
784
|
+
return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
|
785
|
+
}
|
786
|
+
/**
|
787
|
+
* Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
788
|
+
* are interpreted as two's complement signed values.
|
789
|
+
*
|
790
|
+
* @param offset Number of bytes to skip before starting to read.
|
791
|
+
* @param noAssert
|
792
|
+
*/
|
793
|
+
readInt32BE(offset, noAssert) {
|
794
|
+
offset = offset >>> 0;
|
795
|
+
if (!noAssert) {
|
796
|
+
Buffer._checkOffset(offset, 4, this.length);
|
797
|
+
}
|
798
|
+
return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
|
799
|
+
}
|
800
|
+
/**
|
801
|
+
* Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
|
802
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 2.
|
803
|
+
*/
|
804
|
+
swap16() {
|
805
|
+
const len = this.length;
|
806
|
+
if (len % 2 !== 0) {
|
807
|
+
throw new RangeError("Buffer size must be a multiple of 16-bits");
|
808
|
+
}
|
809
|
+
for (let i = 0; i < len; i += 2) {
|
810
|
+
this._swap(this, i, i + 1);
|
811
|
+
}
|
812
|
+
return this;
|
813
|
+
}
|
814
|
+
/**
|
815
|
+
* Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
|
816
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 4.
|
817
|
+
*/
|
818
|
+
swap32() {
|
819
|
+
const len = this.length;
|
820
|
+
if (len % 4 !== 0) {
|
821
|
+
throw new RangeError("Buffer size must be a multiple of 32-bits");
|
822
|
+
}
|
823
|
+
for (let i = 0; i < len; i += 4) {
|
824
|
+
this._swap(this, i, i + 3);
|
825
|
+
this._swap(this, i + 1, i + 2);
|
826
|
+
}
|
827
|
+
return this;
|
828
|
+
}
|
829
|
+
/**
|
830
|
+
* Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
|
831
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 8.
|
832
|
+
*/
|
833
|
+
swap64() {
|
834
|
+
const len = this.length;
|
835
|
+
if (len % 8 !== 0) {
|
836
|
+
throw new RangeError("Buffer size must be a multiple of 64-bits");
|
837
|
+
}
|
838
|
+
for (let i = 0; i < len; i += 8) {
|
839
|
+
this._swap(this, i, i + 7);
|
840
|
+
this._swap(this, i + 1, i + 6);
|
841
|
+
this._swap(this, i + 2, i + 5);
|
842
|
+
this._swap(this, i + 3, i + 4);
|
843
|
+
}
|
844
|
+
return this;
|
845
|
+
}
|
846
|
+
/**
|
847
|
+
* Swaps two octets.
|
848
|
+
*
|
849
|
+
* @param b
|
850
|
+
* @param n
|
851
|
+
* @param m
|
852
|
+
*/
|
853
|
+
_swap(b, n, m) {
|
854
|
+
const i = b[n];
|
855
|
+
b[n] = b[m];
|
856
|
+
b[m] = i;
|
857
|
+
}
|
858
|
+
/**
|
859
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
|
860
|
+
* Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
|
861
|
+
*
|
862
|
+
* @param value Number to write.
|
863
|
+
* @param offset Number of bytes to skip before starting to write.
|
864
|
+
* @param noAssert
|
865
|
+
* @returns `offset` plus the number of bytes written.
|
866
|
+
*/
|
867
|
+
writeUInt8(value, offset, noAssert) {
|
868
|
+
value = +value;
|
869
|
+
offset = offset >>> 0;
|
870
|
+
if (!noAssert) {
|
871
|
+
Buffer._checkInt(this, value, offset, 1, 255, 0);
|
872
|
+
}
|
873
|
+
this[offset] = value & 255;
|
874
|
+
return offset + 1;
|
875
|
+
}
|
876
|
+
/**
|
877
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
|
878
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
879
|
+
*
|
880
|
+
* @param value Number to write.
|
881
|
+
* @param offset Number of bytes to skip before starting to write.
|
882
|
+
* @param noAssert
|
883
|
+
* @returns `offset` plus the number of bytes written.
|
884
|
+
*/
|
885
|
+
writeUInt16LE(value, offset, noAssert) {
|
886
|
+
value = +value;
|
887
|
+
offset = offset >>> 0;
|
888
|
+
if (!noAssert) {
|
889
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
890
|
+
}
|
891
|
+
this[offset] = value & 255;
|
892
|
+
this[offset + 1] = value >>> 8;
|
893
|
+
return offset + 2;
|
894
|
+
}
|
895
|
+
/**
|
896
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
|
897
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
898
|
+
*
|
899
|
+
* @param value Number to write.
|
900
|
+
* @param offset Number of bytes to skip before starting to write.
|
901
|
+
* @param noAssert
|
902
|
+
* @returns `offset` plus the number of bytes written.
|
903
|
+
*/
|
904
|
+
writeUInt16BE(value, offset, noAssert) {
|
905
|
+
value = +value;
|
906
|
+
offset = offset >>> 0;
|
907
|
+
if (!noAssert) {
|
908
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
909
|
+
}
|
910
|
+
this[offset] = value >>> 8;
|
911
|
+
this[offset + 1] = value & 255;
|
912
|
+
return offset + 2;
|
913
|
+
}
|
914
|
+
/**
|
915
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
|
916
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
917
|
+
*
|
918
|
+
* @param value Number to write.
|
919
|
+
* @param offset Number of bytes to skip before starting to write.
|
920
|
+
* @param noAssert
|
921
|
+
* @returns `offset` plus the number of bytes written.
|
922
|
+
*/
|
923
|
+
writeUInt32LE(value, offset, noAssert) {
|
924
|
+
value = +value;
|
925
|
+
offset = offset >>> 0;
|
926
|
+
if (!noAssert) {
|
927
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
928
|
+
}
|
929
|
+
this[offset + 3] = value >>> 24;
|
930
|
+
this[offset + 2] = value >>> 16;
|
931
|
+
this[offset + 1] = value >>> 8;
|
932
|
+
this[offset] = value & 255;
|
933
|
+
return offset + 4;
|
934
|
+
}
|
935
|
+
/**
|
936
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
|
937
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
938
|
+
*
|
939
|
+
* @param value Number to write.
|
940
|
+
* @param offset Number of bytes to skip before starting to write.
|
941
|
+
* @param noAssert
|
942
|
+
* @returns `offset` plus the number of bytes written.
|
943
|
+
*/
|
944
|
+
writeUInt32BE(value, offset, noAssert) {
|
945
|
+
value = +value;
|
946
|
+
offset = offset >>> 0;
|
947
|
+
if (!noAssert) {
|
948
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
949
|
+
}
|
950
|
+
this[offset] = value >>> 24;
|
951
|
+
this[offset + 1] = value >>> 16;
|
952
|
+
this[offset + 2] = value >>> 8;
|
953
|
+
this[offset + 3] = value & 255;
|
954
|
+
return offset + 4;
|
955
|
+
}
|
956
|
+
/**
|
957
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
|
958
|
+
* Behavior is undefined when `value` is anything other than a signed 8-bit integer.
|
959
|
+
*
|
960
|
+
* @param value Number to write.
|
961
|
+
* @param offset Number of bytes to skip before starting to write.
|
962
|
+
* @param noAssert
|
963
|
+
* @returns `offset` plus the number of bytes written.
|
964
|
+
*/
|
965
|
+
writeInt8(value, offset, noAssert) {
|
966
|
+
value = +value;
|
967
|
+
offset = offset >>> 0;
|
968
|
+
if (!noAssert) {
|
969
|
+
Buffer._checkInt(this, value, offset, 1, 127, -128);
|
970
|
+
}
|
971
|
+
if (value < 0) {
|
972
|
+
value = 255 + value + 1;
|
973
|
+
}
|
974
|
+
this[offset] = value & 255;
|
975
|
+
return offset + 1;
|
976
|
+
}
|
977
|
+
/**
|
978
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
|
979
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
980
|
+
*
|
981
|
+
* @param value Number to write.
|
982
|
+
* @param offset Number of bytes to skip before starting to write.
|
983
|
+
* @param noAssert
|
984
|
+
* @returns `offset` plus the number of bytes written.
|
985
|
+
*/
|
986
|
+
writeInt16LE(value, offset, noAssert) {
|
987
|
+
value = +value;
|
988
|
+
offset = offset >>> 0;
|
989
|
+
if (!noAssert) {
|
990
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
991
|
+
}
|
992
|
+
this[offset] = value & 255;
|
993
|
+
this[offset + 1] = value >>> 8;
|
994
|
+
return offset + 2;
|
995
|
+
}
|
996
|
+
/**
|
997
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
|
998
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
999
|
+
*
|
1000
|
+
* @param value Number to write.
|
1001
|
+
* @param offset Number of bytes to skip before starting to write.
|
1002
|
+
* @param noAssert
|
1003
|
+
* @returns `offset` plus the number of bytes written.
|
1004
|
+
*/
|
1005
|
+
writeInt16BE(value, offset, noAssert) {
|
1006
|
+
value = +value;
|
1007
|
+
offset = offset >>> 0;
|
1008
|
+
if (!noAssert) {
|
1009
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1010
|
+
}
|
1011
|
+
this[offset] = value >>> 8;
|
1012
|
+
this[offset + 1] = value & 255;
|
1013
|
+
return offset + 2;
|
1014
|
+
}
|
1015
|
+
/**
|
1016
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
|
1017
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1018
|
+
*
|
1019
|
+
* @param value Number to write.
|
1020
|
+
* @param offset Number of bytes to skip before starting to write.
|
1021
|
+
* @param noAssert
|
1022
|
+
* @returns `offset` plus the number of bytes written.
|
1023
|
+
*/
|
1024
|
+
writeInt32LE(value, offset, noAssert) {
|
1025
|
+
value = +value;
|
1026
|
+
offset = offset >>> 0;
|
1027
|
+
if (!noAssert) {
|
1028
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1029
|
+
}
|
1030
|
+
this[offset] = value & 255;
|
1031
|
+
this[offset + 1] = value >>> 8;
|
1032
|
+
this[offset + 2] = value >>> 16;
|
1033
|
+
this[offset + 3] = value >>> 24;
|
1034
|
+
return offset + 4;
|
1035
|
+
}
|
1036
|
+
/**
|
1037
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
|
1038
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1039
|
+
*
|
1040
|
+
* @param value Number to write.
|
1041
|
+
* @param offset Number of bytes to skip before starting to write.
|
1042
|
+
* @param noAssert
|
1043
|
+
* @returns `offset` plus the number of bytes written.
|
1044
|
+
*/
|
1045
|
+
writeInt32BE(value, offset, noAssert) {
|
1046
|
+
value = +value;
|
1047
|
+
offset = offset >>> 0;
|
1048
|
+
if (!noAssert) {
|
1049
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1050
|
+
}
|
1051
|
+
if (value < 0) {
|
1052
|
+
value = 4294967295 + value + 1;
|
1053
|
+
}
|
1054
|
+
this[offset] = value >>> 24;
|
1055
|
+
this[offset + 1] = value >>> 16;
|
1056
|
+
this[offset + 2] = value >>> 8;
|
1057
|
+
this[offset + 3] = value & 255;
|
1058
|
+
return offset + 4;
|
1059
|
+
}
|
1060
|
+
/**
|
1061
|
+
* Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
|
1062
|
+
* filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
|
1063
|
+
* integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
|
1064
|
+
*
|
1065
|
+
* If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
|
1066
|
+
* character that fit into `buf` are written.
|
1067
|
+
*
|
1068
|
+
* If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
|
1069
|
+
*
|
1070
|
+
* @param value
|
1071
|
+
* @param encoding
|
1072
|
+
*/
|
1073
|
+
fill(value, offset, end, encoding) {
|
1074
|
+
if (typeof value === "string") {
|
1075
|
+
if (typeof offset === "string") {
|
1076
|
+
encoding = offset;
|
1077
|
+
offset = 0;
|
1078
|
+
end = this.length;
|
1079
|
+
} else if (typeof end === "string") {
|
1080
|
+
encoding = end;
|
1081
|
+
end = this.length;
|
1082
|
+
}
|
1083
|
+
if (encoding !== void 0 && typeof encoding !== "string") {
|
1084
|
+
throw new TypeError("encoding must be a string");
|
1085
|
+
}
|
1086
|
+
if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
|
1087
|
+
throw new TypeError("Unknown encoding: " + encoding);
|
1088
|
+
}
|
1089
|
+
if (value.length === 1) {
|
1090
|
+
const code = value.charCodeAt(0);
|
1091
|
+
if (encoding === "utf8" && code < 128) {
|
1092
|
+
value = code;
|
1093
|
+
}
|
1094
|
+
}
|
1095
|
+
} else if (typeof value === "number") {
|
1096
|
+
value = value & 255;
|
1097
|
+
} else if (typeof value === "boolean") {
|
1098
|
+
value = Number(value);
|
1099
|
+
}
|
1100
|
+
offset ?? (offset = 0);
|
1101
|
+
end ?? (end = this.length);
|
1102
|
+
if (offset < 0 || this.length < offset || this.length < end) {
|
1103
|
+
throw new RangeError("Out of range index");
|
1104
|
+
}
|
1105
|
+
if (end <= offset) {
|
1106
|
+
return this;
|
1107
|
+
}
|
1108
|
+
offset = offset >>> 0;
|
1109
|
+
end = end === void 0 ? this.length : end >>> 0;
|
1110
|
+
value || (value = 0);
|
1111
|
+
let i;
|
1112
|
+
if (typeof value === "number") {
|
1113
|
+
for (i = offset; i < end; ++i) {
|
1114
|
+
this[i] = value;
|
1115
|
+
}
|
1116
|
+
} else {
|
1117
|
+
const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
|
1118
|
+
const len = bytes.length;
|
1119
|
+
if (len === 0) {
|
1120
|
+
throw new TypeError('The value "' + value + '" is invalid for argument "value"');
|
1121
|
+
}
|
1122
|
+
for (i = 0; i < end - offset; ++i) {
|
1123
|
+
this[i + offset] = bytes[i % len];
|
1124
|
+
}
|
1125
|
+
}
|
1126
|
+
return this;
|
1127
|
+
}
|
1128
|
+
/**
|
1129
|
+
* Returns the index of the specified value.
|
1130
|
+
*
|
1131
|
+
* If `value` is:
|
1132
|
+
* - a string, `value` is interpreted according to the character encoding in `encoding`.
|
1133
|
+
* - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
|
1134
|
+
* - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
|
1135
|
+
*
|
1136
|
+
* Any other types will throw a `TypeError`.
|
1137
|
+
*
|
1138
|
+
* @param value What to search for.
|
1139
|
+
* @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
|
1140
|
+
* @param encoding If `value` is a string, this is the encoding used to search.
|
1141
|
+
* @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
|
1142
|
+
*/
|
1143
|
+
indexOf(value, byteOffset, encoding) {
|
1144
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
|
1145
|
+
}
|
1146
|
+
/**
|
1147
|
+
* Gets the last index of the specified value.
|
1148
|
+
*
|
1149
|
+
* @see indexOf()
|
1150
|
+
* @param value
|
1151
|
+
* @param byteOffset
|
1152
|
+
* @param encoding
|
1153
|
+
*/
|
1154
|
+
lastIndexOf(value, byteOffset, encoding) {
|
1155
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
|
1156
|
+
}
|
1157
|
+
_bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
|
1158
|
+
if (buffer.length === 0) {
|
1159
|
+
return -1;
|
1160
|
+
}
|
1161
|
+
if (typeof byteOffset === "string") {
|
1162
|
+
encoding = byteOffset;
|
1163
|
+
byteOffset = 0;
|
1164
|
+
} else if (typeof byteOffset === "undefined") {
|
1165
|
+
byteOffset = 0;
|
1166
|
+
} else if (byteOffset > 2147483647) {
|
1167
|
+
byteOffset = 2147483647;
|
1168
|
+
} else if (byteOffset < -2147483648) {
|
1169
|
+
byteOffset = -2147483648;
|
1170
|
+
}
|
1171
|
+
byteOffset = +byteOffset;
|
1172
|
+
if (byteOffset !== byteOffset) {
|
1173
|
+
byteOffset = dir ? 0 : buffer.length - 1;
|
1174
|
+
}
|
1175
|
+
if (byteOffset < 0) {
|
1176
|
+
byteOffset = buffer.length + byteOffset;
|
1177
|
+
}
|
1178
|
+
if (byteOffset >= buffer.length) {
|
1179
|
+
if (dir) {
|
1180
|
+
return -1;
|
1181
|
+
} else {
|
1182
|
+
byteOffset = buffer.length - 1;
|
1183
|
+
}
|
1184
|
+
} else if (byteOffset < 0) {
|
1185
|
+
if (dir) {
|
1186
|
+
byteOffset = 0;
|
1187
|
+
} else {
|
1188
|
+
return -1;
|
1189
|
+
}
|
1190
|
+
}
|
1191
|
+
if (typeof val === "string") {
|
1192
|
+
val = Buffer.from(val, encoding);
|
1193
|
+
}
|
1194
|
+
if (Buffer.isBuffer(val)) {
|
1195
|
+
if (val.length === 0) {
|
1196
|
+
return -1;
|
1197
|
+
}
|
1198
|
+
return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
|
1199
|
+
} else if (typeof val === "number") {
|
1200
|
+
val = val & 255;
|
1201
|
+
if (typeof Uint8Array.prototype.indexOf === "function") {
|
1202
|
+
if (dir) {
|
1203
|
+
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
|
1204
|
+
} else {
|
1205
|
+
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
|
1206
|
+
}
|
1207
|
+
}
|
1208
|
+
return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
|
1209
|
+
}
|
1210
|
+
throw new TypeError("val must be string, number or Buffer");
|
1211
|
+
}
|
1212
|
+
/**
|
1213
|
+
* Equivalent to `buf.indexOf() !== -1`.
|
1214
|
+
*
|
1215
|
+
* @param value
|
1216
|
+
* @param byteOffset
|
1217
|
+
* @param encoding
|
1218
|
+
*/
|
1219
|
+
includes(value, byteOffset, encoding) {
|
1220
|
+
return this.indexOf(value, byteOffset, encoding) !== -1;
|
1221
|
+
}
|
1222
|
+
/**
|
1223
|
+
* Creates a new buffer from the given parameters.
|
1224
|
+
*
|
1225
|
+
* @param data
|
1226
|
+
* @param encoding
|
1227
|
+
*/
|
1228
|
+
static from(a, b, c) {
|
1229
|
+
return new Buffer(a, b, c);
|
1230
|
+
}
|
1231
|
+
/**
|
1232
|
+
* Returns true if `obj` is a Buffer.
|
1233
|
+
*
|
1234
|
+
* @param obj
|
1235
|
+
*/
|
1236
|
+
static isBuffer(obj) {
|
1237
|
+
return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
|
1238
|
+
}
|
1239
|
+
/**
|
1240
|
+
* Returns true if `encoding` is a supported encoding.
|
1241
|
+
*
|
1242
|
+
* @param encoding
|
1243
|
+
*/
|
1244
|
+
static isEncoding(encoding) {
|
1245
|
+
switch (encoding.toLowerCase()) {
|
1246
|
+
case "hex":
|
1247
|
+
case "utf8":
|
1248
|
+
case "ascii":
|
1249
|
+
case "binary":
|
1250
|
+
case "latin1":
|
1251
|
+
case "ucs2":
|
1252
|
+
case "utf16le":
|
1253
|
+
case "base64":
|
1254
|
+
return true;
|
1255
|
+
default:
|
1256
|
+
return false;
|
1257
|
+
}
|
1258
|
+
}
|
1259
|
+
/**
|
1260
|
+
* Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
|
1261
|
+
* returns the number of characters in the string.
|
1262
|
+
*
|
1263
|
+
* @param string The string to test.
|
1264
|
+
* @param encoding The encoding to use for calculation. Defaults is `utf8`.
|
1265
|
+
*/
|
1266
|
+
static byteLength(string, encoding) {
|
1267
|
+
if (Buffer.isBuffer(string)) {
|
1268
|
+
return string.length;
|
1269
|
+
}
|
1270
|
+
if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
|
1271
|
+
return string.byteLength;
|
1272
|
+
}
|
1273
|
+
if (typeof string !== "string") {
|
1274
|
+
throw new TypeError(
|
1275
|
+
'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
|
1276
|
+
);
|
1277
|
+
}
|
1278
|
+
const len = string.length;
|
1279
|
+
const mustMatch = arguments.length > 2 && arguments[2] === true;
|
1280
|
+
if (!mustMatch && len === 0) {
|
1281
|
+
return 0;
|
1282
|
+
}
|
1283
|
+
switch (encoding?.toLowerCase()) {
|
1284
|
+
case "ascii":
|
1285
|
+
case "latin1":
|
1286
|
+
case "binary":
|
1287
|
+
return len;
|
1288
|
+
case "utf8":
|
1289
|
+
return Buffer._utf8ToBytes(string).length;
|
1290
|
+
case "hex":
|
1291
|
+
return len >>> 1;
|
1292
|
+
case "ucs2":
|
1293
|
+
case "utf16le":
|
1294
|
+
return len * 2;
|
1295
|
+
case "base64":
|
1296
|
+
return Buffer._base64ToBytes(string).length;
|
1297
|
+
default:
|
1298
|
+
return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
|
1299
|
+
}
|
1300
|
+
}
|
1301
|
+
/**
|
1302
|
+
* Returns a Buffer which is the result of concatenating all the buffers in the list together.
|
1303
|
+
*
|
1304
|
+
* - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
|
1305
|
+
* - If the list has exactly one item, then the first item is returned.
|
1306
|
+
* - If the list has more than one item, then a new buffer is created.
|
1307
|
+
*
|
1308
|
+
* It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
|
1309
|
+
* a small computational expense.
|
1310
|
+
*
|
1311
|
+
* @param list An array of Buffer objects to concatenate.
|
1312
|
+
* @param totalLength Total length of the buffers when concatenated.
|
1313
|
+
*/
|
1314
|
+
static concat(list, totalLength) {
|
1315
|
+
if (!Array.isArray(list)) {
|
1316
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1317
|
+
}
|
1318
|
+
if (list.length === 0) {
|
1319
|
+
return Buffer.alloc(0);
|
1320
|
+
}
|
1321
|
+
let i;
|
1322
|
+
if (totalLength === void 0) {
|
1323
|
+
totalLength = 0;
|
1324
|
+
for (i = 0; i < list.length; ++i) {
|
1325
|
+
totalLength += list[i].length;
|
1326
|
+
}
|
1327
|
+
}
|
1328
|
+
const buffer = Buffer.allocUnsafe(totalLength);
|
1329
|
+
let pos = 0;
|
1330
|
+
for (i = 0; i < list.length; ++i) {
|
1331
|
+
let buf = list[i];
|
1332
|
+
if (Buffer._isInstance(buf, Uint8Array)) {
|
1333
|
+
if (pos + buf.length > buffer.length) {
|
1334
|
+
if (!Buffer.isBuffer(buf)) {
|
1335
|
+
buf = Buffer.from(buf);
|
1336
|
+
}
|
1337
|
+
buf.copy(buffer, pos);
|
1338
|
+
} else {
|
1339
|
+
Uint8Array.prototype.set.call(buffer, buf, pos);
|
1340
|
+
}
|
1341
|
+
} else if (!Buffer.isBuffer(buf)) {
|
1342
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1343
|
+
} else {
|
1344
|
+
buf.copy(buffer, pos);
|
1345
|
+
}
|
1346
|
+
pos += buf.length;
|
1347
|
+
}
|
1348
|
+
return buffer;
|
1349
|
+
}
|
1350
|
+
/**
|
1351
|
+
* The same as `buf1.compare(buf2)`.
|
1352
|
+
*/
|
1353
|
+
static compare(buf1, buf2) {
|
1354
|
+
if (Buffer._isInstance(buf1, Uint8Array)) {
|
1355
|
+
buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
|
1356
|
+
}
|
1357
|
+
if (Buffer._isInstance(buf2, Uint8Array)) {
|
1358
|
+
buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
|
1359
|
+
}
|
1360
|
+
if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
|
1361
|
+
throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
|
1362
|
+
}
|
1363
|
+
if (buf1 === buf2) {
|
1364
|
+
return 0;
|
1365
|
+
}
|
1366
|
+
let x = buf1.length;
|
1367
|
+
let y = buf2.length;
|
1368
|
+
for (let i = 0, len = Math.min(x, y); i < len; ++i) {
|
1369
|
+
if (buf1[i] !== buf2[i]) {
|
1370
|
+
x = buf1[i];
|
1371
|
+
y = buf2[i];
|
1372
|
+
break;
|
1373
|
+
}
|
1374
|
+
}
|
1375
|
+
if (x < y) {
|
1376
|
+
return -1;
|
1377
|
+
}
|
1378
|
+
if (y < x) {
|
1379
|
+
return 1;
|
1380
|
+
}
|
1381
|
+
return 0;
|
1382
|
+
}
|
1383
|
+
/**
|
1384
|
+
* Allocates a new buffer of `size` octets.
|
1385
|
+
*
|
1386
|
+
* @param size The number of octets to allocate.
|
1387
|
+
* @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
|
1388
|
+
* @param encoding The encoding used for the call to `buf.fill()` while initializing.
|
1389
|
+
*/
|
1390
|
+
static alloc(size, fill, encoding) {
|
1391
|
+
if (typeof size !== "number") {
|
1392
|
+
throw new TypeError('"size" argument must be of type number');
|
1393
|
+
} else if (size < 0) {
|
1394
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1395
|
+
}
|
1396
|
+
if (size <= 0) {
|
1397
|
+
return new Buffer(size);
|
1398
|
+
}
|
1399
|
+
if (fill !== void 0) {
|
1400
|
+
return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
|
1401
|
+
}
|
1402
|
+
return new Buffer(size);
|
1403
|
+
}
|
1404
|
+
/**
|
1405
|
+
* Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
|
1406
|
+
*
|
1407
|
+
* @param size
|
1408
|
+
*/
|
1409
|
+
static allocUnsafe(size) {
|
1410
|
+
if (typeof size !== "number") {
|
1411
|
+
throw new TypeError('"size" argument must be of type number');
|
1412
|
+
} else if (size < 0) {
|
1413
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1414
|
+
}
|
1415
|
+
return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
|
1416
|
+
}
|
1417
|
+
/**
|
1418
|
+
* Returns true if the given `obj` is an instance of `type`.
|
1419
|
+
*
|
1420
|
+
* @param obj
|
1421
|
+
* @param type
|
1422
|
+
*/
|
1423
|
+
static _isInstance(obj, type) {
|
1424
|
+
return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
|
1425
|
+
}
|
1426
|
+
static _checked(length) {
|
1427
|
+
if (length >= K_MAX_LENGTH) {
|
1428
|
+
throw new RangeError(
|
1429
|
+
"Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
|
1430
|
+
);
|
1431
|
+
}
|
1432
|
+
return length | 0;
|
1433
|
+
}
|
1434
|
+
static _blitBuffer(src, dst, offset, length) {
|
1435
|
+
let i;
|
1436
|
+
for (i = 0; i < length; ++i) {
|
1437
|
+
if (i + offset >= dst.length || i >= src.length) {
|
1438
|
+
break;
|
1439
|
+
}
|
1440
|
+
dst[i + offset] = src[i];
|
1441
|
+
}
|
1442
|
+
return i;
|
1443
|
+
}
|
1444
|
+
static _utf8Write(buf, string, offset, length) {
|
1445
|
+
return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
|
1446
|
+
}
|
1447
|
+
static _asciiWrite(buf, string, offset, length) {
|
1448
|
+
return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
|
1449
|
+
}
|
1450
|
+
static _base64Write(buf, string, offset, length) {
|
1451
|
+
return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
|
1452
|
+
}
|
1453
|
+
static _ucs2Write(buf, string, offset, length) {
|
1454
|
+
return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
|
1455
|
+
}
|
1456
|
+
static _hexWrite(buf, string, offset, length) {
|
1457
|
+
offset = Number(offset) || 0;
|
1458
|
+
const remaining = buf.length - offset;
|
1459
|
+
if (!length) {
|
1460
|
+
length = remaining;
|
1461
|
+
} else {
|
1462
|
+
length = Number(length);
|
1463
|
+
if (length > remaining) {
|
1464
|
+
length = remaining;
|
1465
|
+
}
|
1466
|
+
}
|
1467
|
+
const strLen = string.length;
|
1468
|
+
if (length > strLen / 2) {
|
1469
|
+
length = strLen / 2;
|
1470
|
+
}
|
1471
|
+
let i;
|
1472
|
+
for (i = 0; i < length; ++i) {
|
1473
|
+
const parsed = parseInt(string.substr(i * 2, 2), 16);
|
1474
|
+
if (parsed !== parsed) {
|
1475
|
+
return i;
|
1476
|
+
}
|
1477
|
+
buf[offset + i] = parsed;
|
1478
|
+
}
|
1479
|
+
return i;
|
1480
|
+
}
|
1481
|
+
static _utf8ToBytes(string, units) {
|
1482
|
+
units = units || Infinity;
|
1483
|
+
const length = string.length;
|
1484
|
+
const bytes = [];
|
1485
|
+
let codePoint;
|
1486
|
+
let leadSurrogate = null;
|
1487
|
+
for (let i = 0; i < length; ++i) {
|
1488
|
+
codePoint = string.charCodeAt(i);
|
1489
|
+
if (codePoint > 55295 && codePoint < 57344) {
|
1490
|
+
if (!leadSurrogate) {
|
1491
|
+
if (codePoint > 56319) {
|
1492
|
+
if ((units -= 3) > -1) {
|
1493
|
+
bytes.push(239, 191, 189);
|
1494
|
+
}
|
1495
|
+
continue;
|
1496
|
+
} else if (i + 1 === length) {
|
1497
|
+
if ((units -= 3) > -1) {
|
1498
|
+
bytes.push(239, 191, 189);
|
1499
|
+
}
|
1500
|
+
continue;
|
1501
|
+
}
|
1502
|
+
leadSurrogate = codePoint;
|
1503
|
+
continue;
|
1504
|
+
}
|
1505
|
+
if (codePoint < 56320) {
|
1506
|
+
if ((units -= 3) > -1) {
|
1507
|
+
bytes.push(239, 191, 189);
|
1508
|
+
}
|
1509
|
+
leadSurrogate = codePoint;
|
1510
|
+
continue;
|
1511
|
+
}
|
1512
|
+
codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
|
1513
|
+
} else if (leadSurrogate) {
|
1514
|
+
if ((units -= 3) > -1) {
|
1515
|
+
bytes.push(239, 191, 189);
|
1516
|
+
}
|
1517
|
+
}
|
1518
|
+
leadSurrogate = null;
|
1519
|
+
if (codePoint < 128) {
|
1520
|
+
if ((units -= 1) < 0) {
|
1521
|
+
break;
|
1522
|
+
}
|
1523
|
+
bytes.push(codePoint);
|
1524
|
+
} else if (codePoint < 2048) {
|
1525
|
+
if ((units -= 2) < 0) {
|
1526
|
+
break;
|
1527
|
+
}
|
1528
|
+
bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
|
1529
|
+
} else if (codePoint < 65536) {
|
1530
|
+
if ((units -= 3) < 0) {
|
1531
|
+
break;
|
1532
|
+
}
|
1533
|
+
bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
|
1534
|
+
} else if (codePoint < 1114112) {
|
1535
|
+
if ((units -= 4) < 0) {
|
1536
|
+
break;
|
1537
|
+
}
|
1538
|
+
bytes.push(
|
1539
|
+
codePoint >> 18 | 240,
|
1540
|
+
codePoint >> 12 & 63 | 128,
|
1541
|
+
codePoint >> 6 & 63 | 128,
|
1542
|
+
codePoint & 63 | 128
|
1543
|
+
);
|
1544
|
+
} else {
|
1545
|
+
throw new Error("Invalid code point");
|
1546
|
+
}
|
1547
|
+
}
|
1548
|
+
return bytes;
|
1549
|
+
}
|
1550
|
+
static _base64ToBytes(str) {
|
1551
|
+
return toByteArray(base64clean(str));
|
1552
|
+
}
|
1553
|
+
static _asciiToBytes(str) {
|
1554
|
+
const byteArray = [];
|
1555
|
+
for (let i = 0; i < str.length; ++i) {
|
1556
|
+
byteArray.push(str.charCodeAt(i) & 255);
|
1557
|
+
}
|
1558
|
+
return byteArray;
|
1559
|
+
}
|
1560
|
+
static _utf16leToBytes(str, units) {
|
1561
|
+
let c, hi, lo;
|
1562
|
+
const byteArray = [];
|
1563
|
+
for (let i = 0; i < str.length; ++i) {
|
1564
|
+
if ((units -= 2) < 0) break;
|
1565
|
+
c = str.charCodeAt(i);
|
1566
|
+
hi = c >> 8;
|
1567
|
+
lo = c % 256;
|
1568
|
+
byteArray.push(lo);
|
1569
|
+
byteArray.push(hi);
|
1570
|
+
}
|
1571
|
+
return byteArray;
|
1572
|
+
}
|
1573
|
+
static _hexSlice(buf, start, end) {
|
1574
|
+
const len = buf.length;
|
1575
|
+
if (!start || start < 0) {
|
1576
|
+
start = 0;
|
1577
|
+
}
|
1578
|
+
if (!end || end < 0 || end > len) {
|
1579
|
+
end = len;
|
1580
|
+
}
|
1581
|
+
let out = "";
|
1582
|
+
for (let i = start; i < end; ++i) {
|
1583
|
+
out += hexSliceLookupTable[buf[i]];
|
1584
|
+
}
|
1585
|
+
return out;
|
1586
|
+
}
|
1587
|
+
static _base64Slice(buf, start, end) {
|
1588
|
+
if (start === 0 && end === buf.length) {
|
1589
|
+
return fromByteArray(buf);
|
1590
|
+
} else {
|
1591
|
+
return fromByteArray(buf.slice(start, end));
|
1592
|
+
}
|
1593
|
+
}
|
1594
|
+
static _utf8Slice(buf, start, end) {
|
1595
|
+
end = Math.min(buf.length, end);
|
1596
|
+
const res = [];
|
1597
|
+
let i = start;
|
1598
|
+
while (i < end) {
|
1599
|
+
const firstByte = buf[i];
|
1600
|
+
let codePoint = null;
|
1601
|
+
let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
|
1602
|
+
if (i + bytesPerSequence <= end) {
|
1603
|
+
let secondByte, thirdByte, fourthByte, tempCodePoint;
|
1604
|
+
switch (bytesPerSequence) {
|
1605
|
+
case 1:
|
1606
|
+
if (firstByte < 128) {
|
1607
|
+
codePoint = firstByte;
|
1608
|
+
}
|
1609
|
+
break;
|
1610
|
+
case 2:
|
1611
|
+
secondByte = buf[i + 1];
|
1612
|
+
if ((secondByte & 192) === 128) {
|
1613
|
+
tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
|
1614
|
+
if (tempCodePoint > 127) {
|
1615
|
+
codePoint = tempCodePoint;
|
1616
|
+
}
|
1617
|
+
}
|
1618
|
+
break;
|
1619
|
+
case 3:
|
1620
|
+
secondByte = buf[i + 1];
|
1621
|
+
thirdByte = buf[i + 2];
|
1622
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
|
1623
|
+
tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
|
1624
|
+
if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
|
1625
|
+
codePoint = tempCodePoint;
|
1626
|
+
}
|
1627
|
+
}
|
1628
|
+
break;
|
1629
|
+
case 4:
|
1630
|
+
secondByte = buf[i + 1];
|
1631
|
+
thirdByte = buf[i + 2];
|
1632
|
+
fourthByte = buf[i + 3];
|
1633
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
|
1634
|
+
tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
|
1635
|
+
if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
|
1636
|
+
codePoint = tempCodePoint;
|
1637
|
+
}
|
1638
|
+
}
|
1639
|
+
}
|
1640
|
+
}
|
1641
|
+
if (codePoint === null) {
|
1642
|
+
codePoint = 65533;
|
1643
|
+
bytesPerSequence = 1;
|
1644
|
+
} else if (codePoint > 65535) {
|
1645
|
+
codePoint -= 65536;
|
1646
|
+
res.push(codePoint >>> 10 & 1023 | 55296);
|
1647
|
+
codePoint = 56320 | codePoint & 1023;
|
1648
|
+
}
|
1649
|
+
res.push(codePoint);
|
1650
|
+
i += bytesPerSequence;
|
1651
|
+
}
|
1652
|
+
return Buffer._decodeCodePointsArray(res);
|
1653
|
+
}
|
1654
|
+
static _decodeCodePointsArray(codePoints) {
|
1655
|
+
const len = codePoints.length;
|
1656
|
+
if (len <= MAX_ARGUMENTS_LENGTH) {
|
1657
|
+
return String.fromCharCode.apply(String, codePoints);
|
1658
|
+
}
|
1659
|
+
let res = "";
|
1660
|
+
let i = 0;
|
1661
|
+
while (i < len) {
|
1662
|
+
res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
|
1663
|
+
}
|
1664
|
+
return res;
|
1665
|
+
}
|
1666
|
+
static _asciiSlice(buf, start, end) {
|
1667
|
+
let ret = "";
|
1668
|
+
end = Math.min(buf.length, end);
|
1669
|
+
for (let i = start; i < end; ++i) {
|
1670
|
+
ret += String.fromCharCode(buf[i] & 127);
|
1671
|
+
}
|
1672
|
+
return ret;
|
1673
|
+
}
|
1674
|
+
static _latin1Slice(buf, start, end) {
|
1675
|
+
let ret = "";
|
1676
|
+
end = Math.min(buf.length, end);
|
1677
|
+
for (let i = start; i < end; ++i) {
|
1678
|
+
ret += String.fromCharCode(buf[i]);
|
1679
|
+
}
|
1680
|
+
return ret;
|
1681
|
+
}
|
1682
|
+
static _utf16leSlice(buf, start, end) {
|
1683
|
+
const bytes = buf.slice(start, end);
|
1684
|
+
let res = "";
|
1685
|
+
for (let i = 0; i < bytes.length - 1; i += 2) {
|
1686
|
+
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
|
1687
|
+
}
|
1688
|
+
return res;
|
1689
|
+
}
|
1690
|
+
static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
|
1691
|
+
let indexSize = 1;
|
1692
|
+
let arrLength = arr.length;
|
1693
|
+
let valLength = val.length;
|
1694
|
+
if (encoding !== void 0) {
|
1695
|
+
encoding = Buffer._getEncoding(encoding);
|
1696
|
+
if (encoding === "ucs2" || encoding === "utf16le") {
|
1697
|
+
if (arr.length < 2 || val.length < 2) {
|
1698
|
+
return -1;
|
1699
|
+
}
|
1700
|
+
indexSize = 2;
|
1701
|
+
arrLength /= 2;
|
1702
|
+
valLength /= 2;
|
1703
|
+
byteOffset /= 2;
|
1704
|
+
}
|
1705
|
+
}
|
1706
|
+
function read(buf, i2) {
|
1707
|
+
if (indexSize === 1) {
|
1708
|
+
return buf[i2];
|
1709
|
+
} else {
|
1710
|
+
return buf.readUInt16BE(i2 * indexSize);
|
1711
|
+
}
|
1712
|
+
}
|
1713
|
+
let i;
|
1714
|
+
if (dir) {
|
1715
|
+
let foundIndex = -1;
|
1716
|
+
for (i = byteOffset; i < arrLength; i++) {
|
1717
|
+
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
1718
|
+
if (foundIndex === -1) foundIndex = i;
|
1719
|
+
if (i - foundIndex + 1 === valLength) return foundIndex * indexSize;
|
1720
|
+
} else {
|
1721
|
+
if (foundIndex !== -1) i -= i - foundIndex;
|
1722
|
+
foundIndex = -1;
|
1723
|
+
}
|
1724
|
+
}
|
1725
|
+
} else {
|
1726
|
+
if (byteOffset + valLength > arrLength) {
|
1727
|
+
byteOffset = arrLength - valLength;
|
1728
|
+
}
|
1729
|
+
for (i = byteOffset; i >= 0; i--) {
|
1730
|
+
let found = true;
|
1731
|
+
for (let j = 0; j < valLength; j++) {
|
1732
|
+
if (read(arr, i + j) !== read(val, j)) {
|
1733
|
+
found = false;
|
1734
|
+
break;
|
1735
|
+
}
|
1736
|
+
}
|
1737
|
+
if (found) {
|
1738
|
+
return i;
|
1739
|
+
}
|
1740
|
+
}
|
1741
|
+
}
|
1742
|
+
return -1;
|
1743
|
+
}
|
1744
|
+
static _checkOffset(offset, ext, length) {
|
1745
|
+
if (offset % 1 !== 0 || offset < 0) throw new RangeError("offset is not uint");
|
1746
|
+
if (offset + ext > length) throw new RangeError("Trying to access beyond buffer length");
|
1747
|
+
}
|
1748
|
+
static _checkInt(buf, value, offset, ext, max, min) {
|
1749
|
+
if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance');
|
1750
|
+
if (value > max || value < min) throw new RangeError('"value" argument is out of bounds');
|
1751
|
+
if (offset + ext > buf.length) throw new RangeError("Index out of range");
|
1752
|
+
}
|
1753
|
+
static _getEncoding(encoding) {
|
1754
|
+
let toLowerCase = false;
|
1755
|
+
let originalEncoding = "";
|
1756
|
+
for (; ; ) {
|
1757
|
+
switch (encoding) {
|
1758
|
+
case "hex":
|
1759
|
+
return "hex";
|
1760
|
+
case "utf8":
|
1761
|
+
return "utf8";
|
1762
|
+
case "ascii":
|
1763
|
+
return "ascii";
|
1764
|
+
case "binary":
|
1765
|
+
return "binary";
|
1766
|
+
case "latin1":
|
1767
|
+
return "latin1";
|
1768
|
+
case "ucs2":
|
1769
|
+
return "ucs2";
|
1770
|
+
case "utf16le":
|
1771
|
+
return "utf16le";
|
1772
|
+
case "base64":
|
1773
|
+
return "base64";
|
1774
|
+
default: {
|
1775
|
+
if (toLowerCase) {
|
1776
|
+
throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
|
1777
|
+
}
|
1778
|
+
toLowerCase = true;
|
1779
|
+
originalEncoding = encoding;
|
1780
|
+
encoding = encoding.toLowerCase();
|
1781
|
+
}
|
1782
|
+
}
|
1783
|
+
}
|
1784
|
+
}
|
1785
|
+
}
|
1786
|
+
const hexSliceLookupTable = function() {
|
1787
|
+
const alphabet = "0123456789abcdef";
|
1788
|
+
const table = new Array(256);
|
1789
|
+
for (let i = 0; i < 16; ++i) {
|
1790
|
+
const i16 = i * 16;
|
1791
|
+
for (let j = 0; j < 16; ++j) {
|
1792
|
+
table[i16 + j] = alphabet[i] + alphabet[j];
|
1793
|
+
}
|
1794
|
+
}
|
1795
|
+
return table;
|
1796
|
+
}();
|
1797
|
+
const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
|
1798
|
+
function base64clean(str) {
|
1799
|
+
str = str.split("=")[0];
|
1800
|
+
str = str.trim().replace(INVALID_BASE64_RE, "");
|
1801
|
+
if (str.length < 2) return "";
|
1802
|
+
while (str.length % 4 !== 0) {
|
1803
|
+
str = str + "=";
|
1804
|
+
}
|
1805
|
+
return str;
|
1806
|
+
}
|
1807
|
+
|
25
1808
|
function notEmpty(value) {
|
26
1809
|
return value !== null && value !== void 0;
|
27
1810
|
}
|
@@ -116,155 +1899,15 @@ function promiseMap(inputValues, mapper) {
|
|
116
1899
|
return inputValues.reduce(reducer, Promise.resolve([]));
|
117
1900
|
}
|
118
1901
|
|
119
|
-
|
120
|
-
|
121
|
-
if (isDefined(process) && isDefined(process.env)) {
|
122
|
-
return {
|
123
|
-
apiKey: process.env.XATA_API_KEY ?? getGlobalApiKey(),
|
124
|
-
databaseURL: process.env.XATA_DATABASE_URL ?? getGlobalDatabaseURL(),
|
125
|
-
branch: process.env.XATA_BRANCH ?? getGlobalBranch(),
|
126
|
-
deployPreview: process.env.XATA_PREVIEW,
|
127
|
-
deployPreviewBranch: process.env.XATA_PREVIEW_BRANCH,
|
128
|
-
vercelGitCommitRef: process.env.VERCEL_GIT_COMMIT_REF,
|
129
|
-
vercelGitRepoOwner: process.env.VERCEL_GIT_REPO_OWNER
|
130
|
-
};
|
131
|
-
}
|
132
|
-
} catch (err) {
|
133
|
-
}
|
134
|
-
try {
|
135
|
-
if (isObject(Deno) && isObject(Deno.env)) {
|
136
|
-
return {
|
137
|
-
apiKey: Deno.env.get("XATA_API_KEY") ?? getGlobalApiKey(),
|
138
|
-
databaseURL: Deno.env.get("XATA_DATABASE_URL") ?? getGlobalDatabaseURL(),
|
139
|
-
branch: Deno.env.get("XATA_BRANCH") ?? getGlobalBranch(),
|
140
|
-
deployPreview: Deno.env.get("XATA_PREVIEW"),
|
141
|
-
deployPreviewBranch: Deno.env.get("XATA_PREVIEW_BRANCH"),
|
142
|
-
vercelGitCommitRef: Deno.env.get("VERCEL_GIT_COMMIT_REF"),
|
143
|
-
vercelGitRepoOwner: Deno.env.get("VERCEL_GIT_REPO_OWNER")
|
144
|
-
};
|
145
|
-
}
|
146
|
-
} catch (err) {
|
147
|
-
}
|
148
|
-
return {
|
149
|
-
apiKey: getGlobalApiKey(),
|
150
|
-
databaseURL: getGlobalDatabaseURL(),
|
151
|
-
branch: getGlobalBranch(),
|
152
|
-
deployPreview: void 0,
|
153
|
-
deployPreviewBranch: void 0,
|
154
|
-
vercelGitCommitRef: void 0,
|
155
|
-
vercelGitRepoOwner: void 0
|
156
|
-
};
|
157
|
-
}
|
158
|
-
function getEnableBrowserVariable() {
|
159
|
-
try {
|
160
|
-
if (isObject(process) && isObject(process.env) && process.env.XATA_ENABLE_BROWSER !== void 0) {
|
161
|
-
return process.env.XATA_ENABLE_BROWSER === "true";
|
162
|
-
}
|
163
|
-
} catch (err) {
|
164
|
-
}
|
165
|
-
try {
|
166
|
-
if (isObject(Deno) && isObject(Deno.env) && Deno.env.get("XATA_ENABLE_BROWSER") !== void 0) {
|
167
|
-
return Deno.env.get("XATA_ENABLE_BROWSER") === "true";
|
168
|
-
}
|
169
|
-
} catch (err) {
|
170
|
-
}
|
171
|
-
try {
|
172
|
-
return XATA_ENABLE_BROWSER === true || XATA_ENABLE_BROWSER === "true";
|
173
|
-
} catch (err) {
|
174
|
-
return void 0;
|
175
|
-
}
|
176
|
-
}
|
177
|
-
function getGlobalApiKey() {
|
178
|
-
try {
|
179
|
-
return XATA_API_KEY;
|
180
|
-
} catch (err) {
|
181
|
-
return void 0;
|
182
|
-
}
|
183
|
-
}
|
184
|
-
function getGlobalDatabaseURL() {
|
185
|
-
try {
|
186
|
-
return XATA_DATABASE_URL;
|
187
|
-
} catch (err) {
|
188
|
-
return void 0;
|
189
|
-
}
|
190
|
-
}
|
191
|
-
function getGlobalBranch() {
|
192
|
-
try {
|
193
|
-
return XATA_BRANCH;
|
194
|
-
} catch (err) {
|
195
|
-
return void 0;
|
196
|
-
}
|
197
|
-
}
|
198
|
-
function getDatabaseURL() {
|
199
|
-
try {
|
200
|
-
const { databaseURL } = getEnvironment();
|
201
|
-
return databaseURL;
|
202
|
-
} catch (err) {
|
203
|
-
return void 0;
|
204
|
-
}
|
205
|
-
}
|
206
|
-
function getAPIKey() {
|
207
|
-
try {
|
208
|
-
const { apiKey } = getEnvironment();
|
209
|
-
return apiKey;
|
210
|
-
} catch (err) {
|
211
|
-
return void 0;
|
212
|
-
}
|
213
|
-
}
|
214
|
-
function getBranch() {
|
215
|
-
try {
|
216
|
-
const { branch } = getEnvironment();
|
217
|
-
return branch;
|
218
|
-
} catch (err) {
|
219
|
-
return void 0;
|
220
|
-
}
|
221
|
-
}
|
222
|
-
function buildPreviewBranchName({ org, branch }) {
|
223
|
-
return `preview-${org}-${branch}`;
|
224
|
-
}
|
225
|
-
function getPreviewBranch() {
|
226
|
-
try {
|
227
|
-
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = getEnvironment();
|
228
|
-
if (deployPreviewBranch)
|
229
|
-
return deployPreviewBranch;
|
230
|
-
switch (deployPreview) {
|
231
|
-
case "vercel": {
|
232
|
-
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
233
|
-
console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
|
234
|
-
return void 0;
|
235
|
-
}
|
236
|
-
return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
|
237
|
-
}
|
238
|
-
}
|
239
|
-
return void 0;
|
240
|
-
} catch (err) {
|
241
|
-
return void 0;
|
242
|
-
}
|
243
|
-
}
|
244
|
-
|
245
|
-
var __accessCheck$6 = (obj, member, msg) => {
|
246
|
-
if (!member.has(obj))
|
247
|
-
throw TypeError("Cannot " + msg);
|
248
|
-
};
|
249
|
-
var __privateGet$5 = (obj, member, getter) => {
|
250
|
-
__accessCheck$6(obj, member, "read from private field");
|
251
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1902
|
+
var __typeError$6 = (msg) => {
|
1903
|
+
throw TypeError(msg);
|
252
1904
|
};
|
253
|
-
var
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
var
|
259
|
-
__accessCheck$6(obj, member, "write to private field");
|
260
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
261
|
-
return value;
|
262
|
-
};
|
263
|
-
var __privateMethod$4 = (obj, member, method) => {
|
264
|
-
__accessCheck$6(obj, member, "access private method");
|
265
|
-
return method;
|
266
|
-
};
|
267
|
-
var _fetch, _queue, _concurrency, _enqueue, enqueue_fn;
|
1905
|
+
var __accessCheck$6 = (obj, member, msg) => member.has(obj) || __typeError$6("Cannot " + msg);
|
1906
|
+
var __privateGet$5 = (obj, member, getter) => (__accessCheck$6(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
1907
|
+
var __privateAdd$6 = (obj, member, value) => member.has(obj) ? __typeError$6("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1908
|
+
var __privateSet$4 = (obj, member, value, setter) => (__accessCheck$6(obj, member, "write to private field"), member.set(obj, value), value);
|
1909
|
+
var __privateMethod$4 = (obj, member, method) => (__accessCheck$6(obj, member, "access private method"), method);
|
1910
|
+
var _fetch, _queue, _concurrency, _ApiRequestPool_instances, enqueue_fn;
|
268
1911
|
const REQUEST_TIMEOUT = 5 * 60 * 1e3;
|
269
1912
|
function getFetchImplementation(userFetch) {
|
270
1913
|
const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
|
@@ -277,10 +1920,10 @@ function getFetchImplementation(userFetch) {
|
|
277
1920
|
}
|
278
1921
|
class ApiRequestPool {
|
279
1922
|
constructor(concurrency = 10) {
|
280
|
-
__privateAdd$6(this,
|
281
|
-
__privateAdd$6(this, _fetch
|
282
|
-
__privateAdd$6(this, _queue
|
283
|
-
__privateAdd$6(this, _concurrency
|
1923
|
+
__privateAdd$6(this, _ApiRequestPool_instances);
|
1924
|
+
__privateAdd$6(this, _fetch);
|
1925
|
+
__privateAdd$6(this, _queue);
|
1926
|
+
__privateAdd$6(this, _concurrency);
|
284
1927
|
__privateSet$4(this, _queue, []);
|
285
1928
|
__privateSet$4(this, _concurrency, concurrency);
|
286
1929
|
this.running = 0;
|
@@ -315,7 +1958,7 @@ class ApiRequestPool {
|
|
315
1958
|
}
|
316
1959
|
return response;
|
317
1960
|
};
|
318
|
-
return __privateMethod$4(this,
|
1961
|
+
return __privateMethod$4(this, _ApiRequestPool_instances, enqueue_fn).call(this, async () => {
|
319
1962
|
return await runRequest();
|
320
1963
|
});
|
321
1964
|
}
|
@@ -323,7 +1966,7 @@ class ApiRequestPool {
|
|
323
1966
|
_fetch = new WeakMap();
|
324
1967
|
_queue = new WeakMap();
|
325
1968
|
_concurrency = new WeakMap();
|
326
|
-
|
1969
|
+
_ApiRequestPool_instances = new WeakSet();
|
327
1970
|
enqueue_fn = function(task) {
|
328
1971
|
const promise = new Promise((resolve) => __privateGet$5(this, _queue).push(resolve)).finally(() => {
|
329
1972
|
this.started--;
|
@@ -526,7 +2169,7 @@ function defaultOnOpen(response) {
|
|
526
2169
|
}
|
527
2170
|
}
|
528
2171
|
|
529
|
-
const VERSION = "0.29.
|
2172
|
+
const VERSION = "0.29.4";
|
530
2173
|
|
531
2174
|
class ErrorWithCause extends Error {
|
532
2175
|
constructor(message, options) {
|
@@ -606,35 +2249,30 @@ function parseProviderString(provider = "production") {
|
|
606
2249
|
return provider;
|
607
2250
|
}
|
608
2251
|
const [main, workspaces] = provider.split(",");
|
609
|
-
if (!main || !workspaces)
|
610
|
-
return null;
|
2252
|
+
if (!main || !workspaces) return null;
|
611
2253
|
return { main, workspaces };
|
612
2254
|
}
|
613
2255
|
function buildProviderString(provider) {
|
614
|
-
if (isHostProviderAlias(provider))
|
615
|
-
return provider;
|
2256
|
+
if (isHostProviderAlias(provider)) return provider;
|
616
2257
|
return `${provider.main},${provider.workspaces}`;
|
617
2258
|
}
|
618
2259
|
function parseWorkspacesUrlParts(url) {
|
619
|
-
if (!isString(url))
|
620
|
-
return null;
|
2260
|
+
if (!isString(url)) return null;
|
621
2261
|
const matches = {
|
622
|
-
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh
|
623
|
-
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev
|
624
|
-
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev
|
625
|
-
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(
|
2262
|
+
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
|
2263
|
+
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2264
|
+
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2265
|
+
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
|
626
2266
|
};
|
627
2267
|
const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
|
628
|
-
if (!isHostProviderAlias(host) || !match)
|
629
|
-
|
630
|
-
return { workspace: match[1], region: match[2], host };
|
2268
|
+
if (!isHostProviderAlias(host) || !match) return null;
|
2269
|
+
return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
|
631
2270
|
}
|
632
2271
|
|
633
2272
|
const pool = new ApiRequestPool();
|
634
2273
|
const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
|
635
2274
|
const cleanQueryParams = Object.entries(queryParams).reduce((acc, [key, value]) => {
|
636
|
-
if (value === void 0 || value === null)
|
637
|
-
return acc;
|
2275
|
+
if (value === void 0 || value === null) return acc;
|
638
2276
|
return { ...acc, [key]: value };
|
639
2277
|
}, {});
|
640
2278
|
const query = new URLSearchParams(cleanQueryParams).toString();
|
@@ -682,8 +2320,7 @@ function hostHeader(url) {
|
|
682
2320
|
return groups?.host ? { Host: groups.host } : {};
|
683
2321
|
}
|
684
2322
|
async function parseBody(body, headers) {
|
685
|
-
if (!isDefined(body))
|
686
|
-
return void 0;
|
2323
|
+
if (!isDefined(body)) return void 0;
|
687
2324
|
if (isBlob(body) || typeof body.text === "function") {
|
688
2325
|
return body;
|
689
2326
|
}
|
@@ -762,8 +2399,7 @@ async function fetch$1({
|
|
762
2399
|
[TraceAttributes.CLOUDFLARE_RAY_ID]: response.headers?.get("cf-ray") ?? void 0
|
763
2400
|
});
|
764
2401
|
const message = response.headers?.get("x-xata-message");
|
765
|
-
if (message)
|
766
|
-
console.warn(message);
|
2402
|
+
if (message) console.warn(message);
|
767
2403
|
if (response.status === 204) {
|
768
2404
|
return {};
|
769
2405
|
}
|
@@ -847,16 +2483,60 @@ function parseUrl(url) {
|
|
847
2483
|
|
848
2484
|
const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
|
849
2485
|
|
850
|
-
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2486
|
+
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2487
|
+
url: "/db/{dbBranchName}/migrations/apply",
|
2488
|
+
method: "post",
|
2489
|
+
...variables,
|
2490
|
+
signal
|
2491
|
+
});
|
2492
|
+
const startMigration = (variables, signal) => dataPlaneFetch({
|
2493
|
+
url: "/db/{dbBranchName}/migrations/start",
|
2494
|
+
method: "post",
|
2495
|
+
...variables,
|
2496
|
+
signal
|
2497
|
+
});
|
2498
|
+
const completeMigration = (variables, signal) => dataPlaneFetch({
|
2499
|
+
url: "/db/{dbBranchName}/migrations/complete",
|
2500
|
+
method: "post",
|
2501
|
+
...variables,
|
2502
|
+
signal
|
2503
|
+
});
|
2504
|
+
const rollbackMigration = (variables, signal) => dataPlaneFetch({
|
2505
|
+
url: "/db/{dbBranchName}/migrations/rollback",
|
2506
|
+
method: "post",
|
2507
|
+
...variables,
|
2508
|
+
signal
|
2509
|
+
});
|
851
2510
|
const adaptTable = (variables, signal) => dataPlaneFetch({
|
852
2511
|
url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
|
853
2512
|
method: "post",
|
854
2513
|
...variables,
|
855
2514
|
signal
|
856
2515
|
});
|
857
|
-
const
|
858
|
-
|
859
|
-
|
2516
|
+
const adaptAllTables = (variables, signal) => dataPlaneFetch({
|
2517
|
+
url: "/db/{dbBranchName}/migrations/adapt",
|
2518
|
+
method: "post",
|
2519
|
+
...variables,
|
2520
|
+
signal
|
2521
|
+
});
|
2522
|
+
const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2523
|
+
url: "/db/{dbBranchName}/migrations/status",
|
2524
|
+
method: "get",
|
2525
|
+
...variables,
|
2526
|
+
signal
|
2527
|
+
});
|
2528
|
+
const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2529
|
+
url: "/db/{dbBranchName}/migrations/jobs/{jobId}",
|
2530
|
+
method: "get",
|
2531
|
+
...variables,
|
2532
|
+
signal
|
2533
|
+
});
|
2534
|
+
const getMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2535
|
+
url: "/db/{dbBranchName}/migrations/history",
|
2536
|
+
method: "get",
|
2537
|
+
...variables,
|
2538
|
+
signal
|
2539
|
+
});
|
860
2540
|
const getBranchList = (variables, signal) => dataPlaneFetch({
|
861
2541
|
url: "/dbs/{dbName}",
|
862
2542
|
method: "get",
|
@@ -869,82 +2549,167 @@ const getDatabaseSettings = (variables, signal) => dataPlaneFetch({
|
|
869
2549
|
...variables,
|
870
2550
|
signal
|
871
2551
|
});
|
872
|
-
const updateDatabaseSettings = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/settings", method: "patch", ...variables, signal });
|
873
|
-
const getBranchDetails = (variables, signal) => dataPlaneFetch({
|
874
|
-
url: "/db/{dbBranchName}",
|
2552
|
+
const updateDatabaseSettings = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/settings", method: "patch", ...variables, signal });
|
2553
|
+
const getBranchDetails = (variables, signal) => dataPlaneFetch({
|
2554
|
+
url: "/db/{dbBranchName}",
|
2555
|
+
method: "get",
|
2556
|
+
...variables,
|
2557
|
+
signal
|
2558
|
+
});
|
2559
|
+
const createBranch = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}", method: "put", ...variables, signal });
|
2560
|
+
const deleteBranch = (variables, signal) => dataPlaneFetch({
|
2561
|
+
url: "/db/{dbBranchName}",
|
2562
|
+
method: "delete",
|
2563
|
+
...variables,
|
2564
|
+
signal
|
2565
|
+
});
|
2566
|
+
const getSchema = (variables, signal) => dataPlaneFetch({
|
2567
|
+
url: "/db/{dbBranchName}/schema",
|
2568
|
+
method: "get",
|
2569
|
+
...variables,
|
2570
|
+
signal
|
2571
|
+
});
|
2572
|
+
const copyBranch = (variables, signal) => dataPlaneFetch({
|
2573
|
+
url: "/db/{dbBranchName}/copy",
|
2574
|
+
method: "post",
|
2575
|
+
...variables,
|
2576
|
+
signal
|
2577
|
+
});
|
2578
|
+
const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2579
|
+
url: "/db/{dbBranchName}/metadata",
|
2580
|
+
method: "put",
|
2581
|
+
...variables,
|
2582
|
+
signal
|
2583
|
+
});
|
2584
|
+
const getBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2585
|
+
url: "/db/{dbBranchName}/metadata",
|
2586
|
+
method: "get",
|
2587
|
+
...variables,
|
2588
|
+
signal
|
2589
|
+
});
|
2590
|
+
const getBranchStats = (variables, signal) => dataPlaneFetch({
|
2591
|
+
url: "/db/{dbBranchName}/stats",
|
2592
|
+
method: "get",
|
2593
|
+
...variables,
|
2594
|
+
signal
|
2595
|
+
});
|
2596
|
+
const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
|
2597
|
+
const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
|
2598
|
+
const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({
|
2599
|
+
url: "/dbs/{dbName}/gitBranches",
|
2600
|
+
method: "delete",
|
2601
|
+
...variables,
|
2602
|
+
signal
|
2603
|
+
});
|
2604
|
+
const resolveBranch = (variables, signal) => dataPlaneFetch({
|
2605
|
+
url: "/dbs/{dbName}/resolveBranch",
|
2606
|
+
method: "get",
|
2607
|
+
...variables,
|
2608
|
+
signal
|
2609
|
+
});
|
2610
|
+
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2611
|
+
url: "/db/{dbBranchName}/migrations",
|
2612
|
+
method: "get",
|
2613
|
+
...variables,
|
2614
|
+
signal
|
2615
|
+
});
|
2616
|
+
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2617
|
+
url: "/db/{dbBranchName}/migrations/plan",
|
2618
|
+
method: "post",
|
2619
|
+
...variables,
|
2620
|
+
signal
|
2621
|
+
});
|
2622
|
+
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2623
|
+
url: "/db/{dbBranchName}/migrations/execute",
|
2624
|
+
method: "post",
|
2625
|
+
...variables,
|
2626
|
+
signal
|
2627
|
+
});
|
2628
|
+
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({
|
2629
|
+
url: "/dbs/{dbName}/migrations/query",
|
2630
|
+
method: "post",
|
2631
|
+
...variables,
|
2632
|
+
signal
|
2633
|
+
});
|
2634
|
+
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
2635
|
+
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2636
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2637
|
+
method: "get",
|
2638
|
+
...variables,
|
2639
|
+
signal
|
2640
|
+
});
|
2641
|
+
const updateMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2642
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2643
|
+
method: "patch",
|
2644
|
+
...variables,
|
2645
|
+
signal
|
2646
|
+
});
|
2647
|
+
const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({
|
2648
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/commits",
|
2649
|
+
method: "post",
|
2650
|
+
...variables,
|
2651
|
+
signal
|
2652
|
+
});
|
2653
|
+
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2654
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/compare",
|
2655
|
+
method: "post",
|
2656
|
+
...variables,
|
2657
|
+
signal
|
2658
|
+
});
|
2659
|
+
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({
|
2660
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
875
2661
|
method: "get",
|
876
2662
|
...variables,
|
877
2663
|
signal
|
878
2664
|
});
|
879
|
-
const
|
880
|
-
|
881
|
-
|
882
|
-
method: "delete",
|
2665
|
+
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2666
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2667
|
+
method: "post",
|
883
2668
|
...variables,
|
884
2669
|
signal
|
885
2670
|
});
|
886
|
-
const
|
887
|
-
url: "/db/{dbBranchName}/schema",
|
888
|
-
method: "
|
2671
|
+
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({
|
2672
|
+
url: "/db/{dbBranchName}/schema/history",
|
2673
|
+
method: "post",
|
889
2674
|
...variables,
|
890
2675
|
signal
|
891
2676
|
});
|
892
|
-
const
|
893
|
-
url: "/db/{dbBranchName}/
|
2677
|
+
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({
|
2678
|
+
url: "/db/{dbBranchName}/schema/compare",
|
894
2679
|
method: "post",
|
895
2680
|
...variables,
|
896
2681
|
signal
|
897
2682
|
});
|
898
|
-
const
|
899
|
-
url: "/db/{dbBranchName}/
|
900
|
-
method: "
|
2683
|
+
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({
|
2684
|
+
url: "/db/{dbBranchName}/schema/compare/{branchName}",
|
2685
|
+
method: "post",
|
901
2686
|
...variables,
|
902
2687
|
signal
|
903
2688
|
});
|
904
|
-
const
|
905
|
-
url: "/db/{dbBranchName}/
|
906
|
-
method: "
|
2689
|
+
const updateBranchSchema = (variables, signal) => dataPlaneFetch({
|
2690
|
+
url: "/db/{dbBranchName}/schema/update",
|
2691
|
+
method: "post",
|
907
2692
|
...variables,
|
908
2693
|
signal
|
909
2694
|
});
|
910
|
-
const
|
911
|
-
url: "/db/{dbBranchName}/
|
912
|
-
method: "
|
2695
|
+
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2696
|
+
url: "/db/{dbBranchName}/schema/preview",
|
2697
|
+
method: "post",
|
913
2698
|
...variables,
|
914
2699
|
signal
|
915
2700
|
});
|
916
|
-
const
|
917
|
-
|
918
|
-
|
919
|
-
const resolveBranch = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/resolveBranch", method: "get", ...variables, signal });
|
920
|
-
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations", method: "get", ...variables, signal });
|
921
|
-
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/plan", method: "post", ...variables, signal });
|
922
|
-
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/execute", method: "post", ...variables, signal });
|
923
|
-
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/query", method: "post", ...variables, signal });
|
924
|
-
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
925
|
-
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
926
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
927
|
-
method: "get",
|
2701
|
+
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2702
|
+
url: "/db/{dbBranchName}/schema/apply",
|
2703
|
+
method: "post",
|
928
2704
|
...variables,
|
929
2705
|
signal
|
930
2706
|
});
|
931
|
-
const
|
932
|
-
|
933
|
-
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/compare", method: "post", ...variables, signal });
|
934
|
-
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/merge", method: "get", ...variables, signal });
|
935
|
-
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
936
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2707
|
+
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({
|
2708
|
+
url: "/db/{dbBranchName}/schema/push",
|
937
2709
|
method: "post",
|
938
2710
|
...variables,
|
939
2711
|
signal
|
940
2712
|
});
|
941
|
-
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/history", method: "post", ...variables, signal });
|
942
|
-
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare", method: "post", ...variables, signal });
|
943
|
-
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare/{branchName}", method: "post", ...variables, signal });
|
944
|
-
const updateBranchSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/update", method: "post", ...variables, signal });
|
945
|
-
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/preview", method: "post", ...variables, signal });
|
946
|
-
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/apply", method: "post", ...variables, signal });
|
947
|
-
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/push", method: "post", ...variables, signal });
|
948
2713
|
const createTable = (variables, signal) => dataPlaneFetch({
|
949
2714
|
url: "/db/{dbBranchName}/tables/{tableName}",
|
950
2715
|
method: "put",
|
@@ -957,14 +2722,24 @@ const deleteTable = (variables, signal) => dataPlaneFetch({
|
|
957
2722
|
...variables,
|
958
2723
|
signal
|
959
2724
|
});
|
960
|
-
const updateTable = (variables, signal) => dataPlaneFetch({
|
2725
|
+
const updateTable = (variables, signal) => dataPlaneFetch({
|
2726
|
+
url: "/db/{dbBranchName}/tables/{tableName}",
|
2727
|
+
method: "patch",
|
2728
|
+
...variables,
|
2729
|
+
signal
|
2730
|
+
});
|
961
2731
|
const getTableSchema = (variables, signal) => dataPlaneFetch({
|
962
2732
|
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
963
2733
|
method: "get",
|
964
2734
|
...variables,
|
965
2735
|
signal
|
966
2736
|
});
|
967
|
-
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2737
|
+
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2738
|
+
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
2739
|
+
method: "put",
|
2740
|
+
...variables,
|
2741
|
+
signal
|
2742
|
+
});
|
968
2743
|
const getTableColumns = (variables, signal) => dataPlaneFetch({
|
969
2744
|
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
970
2745
|
method: "get",
|
@@ -972,7 +2747,12 @@ const getTableColumns = (variables, signal) => dataPlaneFetch({
|
|
972
2747
|
signal
|
973
2748
|
});
|
974
2749
|
const addTableColumn = (variables, signal) => dataPlaneFetch(
|
975
|
-
{
|
2750
|
+
{
|
2751
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
2752
|
+
method: "post",
|
2753
|
+
...variables,
|
2754
|
+
signal
|
2755
|
+
}
|
976
2756
|
);
|
977
2757
|
const getColumn = (variables, signal) => dataPlaneFetch({
|
978
2758
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
@@ -980,15 +2760,30 @@ const getColumn = (variables, signal) => dataPlaneFetch({
|
|
980
2760
|
...variables,
|
981
2761
|
signal
|
982
2762
|
});
|
983
|
-
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2763
|
+
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2764
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
2765
|
+
method: "patch",
|
2766
|
+
...variables,
|
2767
|
+
signal
|
2768
|
+
});
|
984
2769
|
const deleteColumn = (variables, signal) => dataPlaneFetch({
|
985
2770
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
986
2771
|
method: "delete",
|
987
2772
|
...variables,
|
988
2773
|
signal
|
989
2774
|
});
|
990
|
-
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
991
|
-
|
2775
|
+
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
2776
|
+
url: "/db/{dbBranchName}/transaction",
|
2777
|
+
method: "post",
|
2778
|
+
...variables,
|
2779
|
+
signal
|
2780
|
+
});
|
2781
|
+
const insertRecord = (variables, signal) => dataPlaneFetch({
|
2782
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data",
|
2783
|
+
method: "post",
|
2784
|
+
...variables,
|
2785
|
+
signal
|
2786
|
+
});
|
992
2787
|
const getFileItem = (variables, signal) => dataPlaneFetch({
|
993
2788
|
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
|
994
2789
|
method: "get",
|
@@ -1031,11 +2826,36 @@ const getRecord = (variables, signal) => dataPlaneFetch({
|
|
1031
2826
|
...variables,
|
1032
2827
|
signal
|
1033
2828
|
});
|
1034
|
-
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
1035
|
-
|
1036
|
-
|
1037
|
-
|
1038
|
-
|
2829
|
+
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2830
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2831
|
+
method: "put",
|
2832
|
+
...variables,
|
2833
|
+
signal
|
2834
|
+
});
|
2835
|
+
const updateRecordWithID = (variables, signal) => dataPlaneFetch({
|
2836
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2837
|
+
method: "patch",
|
2838
|
+
...variables,
|
2839
|
+
signal
|
2840
|
+
});
|
2841
|
+
const upsertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2842
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2843
|
+
method: "post",
|
2844
|
+
...variables,
|
2845
|
+
signal
|
2846
|
+
});
|
2847
|
+
const deleteRecord = (variables, signal) => dataPlaneFetch({
|
2848
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2849
|
+
method: "delete",
|
2850
|
+
...variables,
|
2851
|
+
signal
|
2852
|
+
});
|
2853
|
+
const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({
|
2854
|
+
url: "/db/{dbBranchName}/tables/{tableName}/bulk",
|
2855
|
+
method: "post",
|
2856
|
+
...variables,
|
2857
|
+
signal
|
2858
|
+
});
|
1039
2859
|
const queryTable = (variables, signal) => dataPlaneFetch({
|
1040
2860
|
url: "/db/{dbBranchName}/tables/{tableName}/query",
|
1041
2861
|
method: "post",
|
@@ -1054,16 +2874,36 @@ const searchTable = (variables, signal) => dataPlaneFetch({
|
|
1054
2874
|
...variables,
|
1055
2875
|
signal
|
1056
2876
|
});
|
1057
|
-
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
2877
|
+
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
2878
|
+
url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch",
|
2879
|
+
method: "post",
|
2880
|
+
...variables,
|
2881
|
+
signal
|
2882
|
+
});
|
1058
2883
|
const askTable = (variables, signal) => dataPlaneFetch({
|
1059
2884
|
url: "/db/{dbBranchName}/tables/{tableName}/ask",
|
1060
2885
|
method: "post",
|
1061
2886
|
...variables,
|
1062
2887
|
signal
|
1063
2888
|
});
|
1064
|
-
const askTableSession = (variables, signal) => dataPlaneFetch({
|
1065
|
-
|
1066
|
-
|
2889
|
+
const askTableSession = (variables, signal) => dataPlaneFetch({
|
2890
|
+
url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}",
|
2891
|
+
method: "post",
|
2892
|
+
...variables,
|
2893
|
+
signal
|
2894
|
+
});
|
2895
|
+
const summarizeTable = (variables, signal) => dataPlaneFetch({
|
2896
|
+
url: "/db/{dbBranchName}/tables/{tableName}/summarize",
|
2897
|
+
method: "post",
|
2898
|
+
...variables,
|
2899
|
+
signal
|
2900
|
+
});
|
2901
|
+
const aggregateTable = (variables, signal) => dataPlaneFetch({
|
2902
|
+
url: "/db/{dbBranchName}/tables/{tableName}/aggregate",
|
2903
|
+
method: "post",
|
2904
|
+
...variables,
|
2905
|
+
signal
|
2906
|
+
});
|
1067
2907
|
const fileAccess = (variables, signal) => dataPlaneFetch({
|
1068
2908
|
url: "/file/{fileId}",
|
1069
2909
|
method: "get",
|
@@ -1085,7 +2925,11 @@ const sqlQuery = (variables, signal) => dataPlaneFetch({
|
|
1085
2925
|
const operationsByTag$2 = {
|
1086
2926
|
migrations: {
|
1087
2927
|
applyMigration,
|
2928
|
+
startMigration,
|
2929
|
+
completeMigration,
|
2930
|
+
rollbackMigration,
|
1088
2931
|
adaptTable,
|
2932
|
+
adaptAllTables,
|
1089
2933
|
getBranchMigrationJobStatus,
|
1090
2934
|
getMigrationJobStatus,
|
1091
2935
|
getMigrationHistory,
|
@@ -1148,7 +2992,16 @@ const operationsByTag$2 = {
|
|
1148
2992
|
deleteRecord,
|
1149
2993
|
bulkInsertTableRecords
|
1150
2994
|
},
|
1151
|
-
files: {
|
2995
|
+
files: {
|
2996
|
+
getFileItem,
|
2997
|
+
putFileItem,
|
2998
|
+
deleteFileItem,
|
2999
|
+
getFile,
|
3000
|
+
putFile,
|
3001
|
+
deleteFile,
|
3002
|
+
fileAccess,
|
3003
|
+
fileUpload
|
3004
|
+
},
|
1152
3005
|
searchAndFilter: {
|
1153
3006
|
queryTable,
|
1154
3007
|
searchBranch,
|
@@ -1226,7 +3079,12 @@ const deleteOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
|
1226
3079
|
...variables,
|
1227
3080
|
signal
|
1228
3081
|
});
|
1229
|
-
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3082
|
+
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3083
|
+
url: "/user/oauth/tokens/{token}",
|
3084
|
+
method: "patch",
|
3085
|
+
...variables,
|
3086
|
+
signal
|
3087
|
+
});
|
1230
3088
|
const getWorkspacesList = (variables, signal) => controlPlaneFetch({
|
1231
3089
|
url: "/workspaces",
|
1232
3090
|
method: "get",
|
@@ -1257,47 +3115,150 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
|
|
1257
3115
|
...variables,
|
1258
3116
|
signal
|
1259
3117
|
});
|
1260
|
-
const
|
1261
|
-
|
3118
|
+
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3119
|
+
url: "/workspaces/{workspaceId}/settings",
|
3120
|
+
method: "get",
|
3121
|
+
...variables,
|
3122
|
+
signal
|
3123
|
+
});
|
3124
|
+
const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3125
|
+
url: "/workspaces/{workspaceId}/settings",
|
3126
|
+
method: "patch",
|
3127
|
+
...variables,
|
3128
|
+
signal
|
3129
|
+
});
|
3130
|
+
const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({
|
3131
|
+
url: "/workspaces/{workspaceId}/members",
|
3132
|
+
method: "get",
|
3133
|
+
...variables,
|
3134
|
+
signal
|
3135
|
+
});
|
3136
|
+
const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({
|
3137
|
+
url: "/workspaces/{workspaceId}/members/{userId}",
|
3138
|
+
method: "put",
|
3139
|
+
...variables,
|
3140
|
+
signal
|
3141
|
+
});
|
1262
3142
|
const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1263
3143
|
url: "/workspaces/{workspaceId}/members/{userId}",
|
1264
3144
|
method: "delete",
|
1265
3145
|
...variables,
|
1266
3146
|
signal
|
1267
3147
|
});
|
1268
|
-
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1269
|
-
|
1270
|
-
|
1271
|
-
|
1272
|
-
|
1273
|
-
|
1274
|
-
const
|
3148
|
+
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
3149
|
+
url: "/workspaces/{workspaceId}/invites",
|
3150
|
+
method: "post",
|
3151
|
+
...variables,
|
3152
|
+
signal
|
3153
|
+
});
|
3154
|
+
const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3155
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3156
|
+
method: "patch",
|
3157
|
+
...variables,
|
3158
|
+
signal
|
3159
|
+
});
|
3160
|
+
const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3161
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3162
|
+
method: "delete",
|
3163
|
+
...variables,
|
3164
|
+
signal
|
3165
|
+
});
|
3166
|
+
const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3167
|
+
url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept",
|
3168
|
+
method: "post",
|
3169
|
+
...variables,
|
3170
|
+
signal
|
3171
|
+
});
|
3172
|
+
const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3173
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}/resend",
|
3174
|
+
method: "post",
|
3175
|
+
...variables,
|
3176
|
+
signal
|
3177
|
+
});
|
3178
|
+
const listClusters = (variables, signal) => controlPlaneFetch({
|
3179
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3180
|
+
method: "get",
|
3181
|
+
...variables,
|
3182
|
+
signal
|
3183
|
+
});
|
3184
|
+
const createCluster = (variables, signal) => controlPlaneFetch({
|
3185
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3186
|
+
method: "post",
|
3187
|
+
...variables,
|
3188
|
+
signal
|
3189
|
+
});
|
1275
3190
|
const getCluster = (variables, signal) => controlPlaneFetch({
|
1276
3191
|
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
1277
3192
|
method: "get",
|
1278
3193
|
...variables,
|
1279
3194
|
signal
|
1280
3195
|
});
|
1281
|
-
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3196
|
+
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3197
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3198
|
+
method: "patch",
|
3199
|
+
...variables,
|
3200
|
+
signal
|
3201
|
+
});
|
3202
|
+
const deleteCluster = (variables, signal) => controlPlaneFetch({
|
3203
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3204
|
+
method: "delete",
|
3205
|
+
...variables,
|
3206
|
+
signal
|
3207
|
+
});
|
1282
3208
|
const getDatabaseList = (variables, signal) => controlPlaneFetch({
|
1283
3209
|
url: "/workspaces/{workspaceId}/dbs",
|
1284
3210
|
method: "get",
|
1285
3211
|
...variables,
|
1286
3212
|
signal
|
1287
3213
|
});
|
1288
|
-
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3214
|
+
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3215
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3216
|
+
method: "put",
|
3217
|
+
...variables,
|
3218
|
+
signal
|
3219
|
+
});
|
1289
3220
|
const deleteDatabase = (variables, signal) => controlPlaneFetch({
|
1290
3221
|
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
1291
3222
|
method: "delete",
|
1292
3223
|
...variables,
|
1293
3224
|
signal
|
1294
3225
|
});
|
1295
|
-
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
1296
|
-
|
1297
|
-
|
1298
|
-
|
1299
|
-
|
1300
|
-
|
3226
|
+
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3227
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3228
|
+
method: "get",
|
3229
|
+
...variables,
|
3230
|
+
signal
|
3231
|
+
});
|
3232
|
+
const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3233
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3234
|
+
method: "patch",
|
3235
|
+
...variables,
|
3236
|
+
signal
|
3237
|
+
});
|
3238
|
+
const renameDatabase = (variables, signal) => controlPlaneFetch({
|
3239
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/rename",
|
3240
|
+
method: "post",
|
3241
|
+
...variables,
|
3242
|
+
signal
|
3243
|
+
});
|
3244
|
+
const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3245
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3246
|
+
method: "get",
|
3247
|
+
...variables,
|
3248
|
+
signal
|
3249
|
+
});
|
3250
|
+
const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3251
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3252
|
+
method: "put",
|
3253
|
+
...variables,
|
3254
|
+
signal
|
3255
|
+
});
|
3256
|
+
const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3257
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3258
|
+
method: "delete",
|
3259
|
+
...variables,
|
3260
|
+
signal
|
3261
|
+
});
|
1301
3262
|
const listRegions = (variables, signal) => controlPlaneFetch({
|
1302
3263
|
url: "/workspaces/{workspaceId}/regions",
|
1303
3264
|
method: "get",
|
@@ -1322,6 +3283,8 @@ const operationsByTag$1 = {
|
|
1322
3283
|
getWorkspace,
|
1323
3284
|
updateWorkspace,
|
1324
3285
|
deleteWorkspace,
|
3286
|
+
getWorkspaceSettings,
|
3287
|
+
updateWorkspaceSettings,
|
1325
3288
|
getWorkspaceMembersList,
|
1326
3289
|
updateWorkspaceMemberRole,
|
1327
3290
|
removeWorkspaceMember
|
@@ -1333,7 +3296,13 @@ const operationsByTag$1 = {
|
|
1333
3296
|
acceptWorkspaceMemberInvite,
|
1334
3297
|
resendWorkspaceMemberInvite
|
1335
3298
|
},
|
1336
|
-
xbcontrolOther: {
|
3299
|
+
xbcontrolOther: {
|
3300
|
+
listClusters,
|
3301
|
+
createCluster,
|
3302
|
+
getCluster,
|
3303
|
+
updateCluster,
|
3304
|
+
deleteCluster
|
3305
|
+
},
|
1337
3306
|
databases: {
|
1338
3307
|
getDatabaseList,
|
1339
3308
|
createDatabase,
|
@@ -1353,7 +3322,7 @@ const operationsByTag = deepMerge(operationsByTag$2, operationsByTag$1);
|
|
1353
3322
|
const buildApiClient = () => class {
|
1354
3323
|
constructor(options = {}) {
|
1355
3324
|
const provider = options.host ?? "production";
|
1356
|
-
const apiKey = options.apiKey
|
3325
|
+
const apiKey = options.apiKey;
|
1357
3326
|
const trace = options.trace ?? defaultTrace;
|
1358
3327
|
const clientID = generateUUID();
|
1359
3328
|
if (!apiKey) {
|
@@ -1420,8 +3389,7 @@ function buildTransformString(transformations) {
|
|
1420
3389
|
).join(",");
|
1421
3390
|
}
|
1422
3391
|
function transformImage(url, ...transformations) {
|
1423
|
-
if (!isDefined(url))
|
1424
|
-
return void 0;
|
3392
|
+
if (!isDefined(url)) return void 0;
|
1425
3393
|
const newTransformations = buildTransformString(transformations);
|
1426
3394
|
const { hostname, pathname, search } = new URL(url);
|
1427
3395
|
const pathParts = pathname.split("/");
|
@@ -1534,8 +3502,7 @@ class XataFile {
|
|
1534
3502
|
}
|
1535
3503
|
}
|
1536
3504
|
const parseInputFileEntry = async (entry) => {
|
1537
|
-
if (!isDefined(entry))
|
1538
|
-
return null;
|
3505
|
+
if (!isDefined(entry)) return null;
|
1539
3506
|
const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout, uploadUrlTimeout } = await entry;
|
1540
3507
|
return compactObject({
|
1541
3508
|
id,
|
@@ -1550,24 +3517,19 @@ const parseInputFileEntry = async (entry) => {
|
|
1550
3517
|
};
|
1551
3518
|
|
1552
3519
|
function cleanFilter(filter) {
|
1553
|
-
if (!isDefined(filter))
|
1554
|
-
|
1555
|
-
if (!isObject(filter))
|
1556
|
-
return filter;
|
3520
|
+
if (!isDefined(filter)) return void 0;
|
3521
|
+
if (!isObject(filter)) return filter;
|
1557
3522
|
const values = Object.fromEntries(
|
1558
3523
|
Object.entries(filter).reduce((acc, [key, value]) => {
|
1559
|
-
if (!isDefined(value))
|
1560
|
-
return acc;
|
3524
|
+
if (!isDefined(value)) return acc;
|
1561
3525
|
if (Array.isArray(value)) {
|
1562
3526
|
const clean = value.map((item) => cleanFilter(item)).filter((item) => isDefined(item));
|
1563
|
-
if (clean.length === 0)
|
1564
|
-
return acc;
|
3527
|
+
if (clean.length === 0) return acc;
|
1565
3528
|
return [...acc, [key, clean]];
|
1566
3529
|
}
|
1567
3530
|
if (isObject(value)) {
|
1568
3531
|
const clean = cleanFilter(value);
|
1569
|
-
if (!isDefined(clean))
|
1570
|
-
return acc;
|
3532
|
+
if (!isDefined(clean)) return acc;
|
1571
3533
|
return [...acc, [key, clean]];
|
1572
3534
|
}
|
1573
3535
|
return [...acc, [key, value]];
|
@@ -1577,10 +3539,8 @@ function cleanFilter(filter) {
|
|
1577
3539
|
}
|
1578
3540
|
|
1579
3541
|
function stringifyJson(value) {
|
1580
|
-
if (!isDefined(value))
|
1581
|
-
|
1582
|
-
if (isString(value))
|
1583
|
-
return value;
|
3542
|
+
if (!isDefined(value)) return value;
|
3543
|
+
if (isString(value)) return value;
|
1584
3544
|
try {
|
1585
3545
|
return JSON.stringify(value);
|
1586
3546
|
} catch (e) {
|
@@ -1595,28 +3555,17 @@ function parseJson(value) {
|
|
1595
3555
|
}
|
1596
3556
|
}
|
1597
3557
|
|
1598
|
-
var
|
1599
|
-
|
1600
|
-
throw TypeError("Cannot " + msg);
|
1601
|
-
};
|
1602
|
-
var __privateGet$4 = (obj, member, getter) => {
|
1603
|
-
__accessCheck$5(obj, member, "read from private field");
|
1604
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1605
|
-
};
|
1606
|
-
var __privateAdd$5 = (obj, member, value) => {
|
1607
|
-
if (member.has(obj))
|
1608
|
-
throw TypeError("Cannot add the same private member more than once");
|
1609
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1610
|
-
};
|
1611
|
-
var __privateSet$3 = (obj, member, value, setter) => {
|
1612
|
-
__accessCheck$5(obj, member, "write to private field");
|
1613
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1614
|
-
return value;
|
3558
|
+
var __typeError$5 = (msg) => {
|
3559
|
+
throw TypeError(msg);
|
1615
3560
|
};
|
3561
|
+
var __accessCheck$5 = (obj, member, msg) => member.has(obj) || __typeError$5("Cannot " + msg);
|
3562
|
+
var __privateGet$4 = (obj, member, getter) => (__accessCheck$5(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3563
|
+
var __privateAdd$5 = (obj, member, value) => member.has(obj) ? __typeError$5("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3564
|
+
var __privateSet$3 = (obj, member, value, setter) => (__accessCheck$5(obj, member, "write to private field"), member.set(obj, value), value);
|
1616
3565
|
var _query, _page;
|
1617
3566
|
class Page {
|
1618
3567
|
constructor(query, meta, records = []) {
|
1619
|
-
__privateAdd$5(this, _query
|
3568
|
+
__privateAdd$5(this, _query);
|
1620
3569
|
__privateSet$3(this, _query, query);
|
1621
3570
|
this.meta = meta;
|
1622
3571
|
this.records = new PageRecordArray(this, records);
|
@@ -1703,7 +3652,7 @@ class RecordArray extends Array {
|
|
1703
3652
|
const _PageRecordArray = class _PageRecordArray extends Array {
|
1704
3653
|
constructor(...args) {
|
1705
3654
|
super(..._PageRecordArray.parseConstructorParams(...args));
|
1706
|
-
__privateAdd$5(this, _page
|
3655
|
+
__privateAdd$5(this, _page);
|
1707
3656
|
__privateSet$3(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
|
1708
3657
|
}
|
1709
3658
|
static parseConstructorParams(...args) {
|
@@ -1774,34 +3723,20 @@ const _PageRecordArray = class _PageRecordArray extends Array {
|
|
1774
3723
|
_page = new WeakMap();
|
1775
3724
|
let PageRecordArray = _PageRecordArray;
|
1776
3725
|
|
1777
|
-
var
|
1778
|
-
|
1779
|
-
throw TypeError("Cannot " + msg);
|
1780
|
-
};
|
1781
|
-
var __privateGet$3 = (obj, member, getter) => {
|
1782
|
-
__accessCheck$4(obj, member, "read from private field");
|
1783
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1784
|
-
};
|
1785
|
-
var __privateAdd$4 = (obj, member, value) => {
|
1786
|
-
if (member.has(obj))
|
1787
|
-
throw TypeError("Cannot add the same private member more than once");
|
1788
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3726
|
+
var __typeError$4 = (msg) => {
|
3727
|
+
throw TypeError(msg);
|
1789
3728
|
};
|
1790
|
-
var
|
1791
|
-
|
1792
|
-
|
1793
|
-
|
1794
|
-
|
1795
|
-
var
|
1796
|
-
__accessCheck$4(obj, member, "access private method");
|
1797
|
-
return method;
|
1798
|
-
};
|
1799
|
-
var _table$1, _repository, _data, _cleanFilterConstraint, cleanFilterConstraint_fn;
|
3729
|
+
var __accessCheck$4 = (obj, member, msg) => member.has(obj) || __typeError$4("Cannot " + msg);
|
3730
|
+
var __privateGet$3 = (obj, member, getter) => (__accessCheck$4(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3731
|
+
var __privateAdd$4 = (obj, member, value) => member.has(obj) ? __typeError$4("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3732
|
+
var __privateSet$2 = (obj, member, value, setter) => (__accessCheck$4(obj, member, "write to private field"), member.set(obj, value), value);
|
3733
|
+
var __privateMethod$3 = (obj, member, method) => (__accessCheck$4(obj, member, "access private method"), method);
|
3734
|
+
var _table$1, _repository, _data, _Query_instances, cleanFilterConstraint_fn;
|
1800
3735
|
const _Query = class _Query {
|
1801
3736
|
constructor(repository, table, data, rawParent) {
|
1802
|
-
__privateAdd$4(this,
|
1803
|
-
__privateAdd$4(this, _table$1
|
1804
|
-
__privateAdd$4(this, _repository
|
3737
|
+
__privateAdd$4(this, _Query_instances);
|
3738
|
+
__privateAdd$4(this, _table$1);
|
3739
|
+
__privateAdd$4(this, _repository);
|
1805
3740
|
__privateAdd$4(this, _data, { filter: {} });
|
1806
3741
|
// Implements pagination
|
1807
3742
|
this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
|
@@ -1879,12 +3814,12 @@ const _Query = class _Query {
|
|
1879
3814
|
filter(a, b) {
|
1880
3815
|
if (arguments.length === 1) {
|
1881
3816
|
const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
|
1882
|
-
[column]: __privateMethod$3(this,
|
3817
|
+
[column]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, column, constraint)
|
1883
3818
|
}));
|
1884
3819
|
const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
|
1885
3820
|
return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
|
1886
3821
|
} else {
|
1887
|
-
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this,
|
3822
|
+
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
|
1888
3823
|
const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
|
1889
3824
|
return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
|
1890
3825
|
}
|
@@ -1963,8 +3898,7 @@ const _Query = class _Query {
|
|
1963
3898
|
}
|
1964
3899
|
async getFirstOrThrow(options = {}) {
|
1965
3900
|
const records = await this.getMany({ ...options, pagination: { size: 1 } });
|
1966
|
-
if (records[0] === void 0)
|
1967
|
-
throw new Error("No results found.");
|
3901
|
+
if (records[0] === void 0) throw new Error("No results found.");
|
1968
3902
|
return records[0];
|
1969
3903
|
}
|
1970
3904
|
async summarize(params = {}) {
|
@@ -2019,7 +3953,7 @@ const _Query = class _Query {
|
|
2019
3953
|
_table$1 = new WeakMap();
|
2020
3954
|
_repository = new WeakMap();
|
2021
3955
|
_data = new WeakMap();
|
2022
|
-
|
3956
|
+
_Query_instances = new WeakSet();
|
2023
3957
|
cleanFilterConstraint_fn = function(column, value) {
|
2024
3958
|
const columnType = __privateGet$3(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
|
2025
3959
|
if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
|
@@ -2080,8 +4014,7 @@ function isSortFilterString(value) {
|
|
2080
4014
|
}
|
2081
4015
|
function isSortFilterBase(filter) {
|
2082
4016
|
return isObject(filter) && Object.entries(filter).every(([key, value]) => {
|
2083
|
-
if (key === "*")
|
2084
|
-
return value === "random";
|
4017
|
+
if (key === "*") return value === "random";
|
2085
4018
|
return value === "asc" || value === "desc";
|
2086
4019
|
});
|
2087
4020
|
}
|
@@ -2102,29 +4035,15 @@ function buildSortFilter(filter) {
|
|
2102
4035
|
}
|
2103
4036
|
}
|
2104
4037
|
|
2105
|
-
var
|
2106
|
-
|
2107
|
-
throw TypeError("Cannot " + msg);
|
2108
|
-
};
|
2109
|
-
var __privateGet$2 = (obj, member, getter) => {
|
2110
|
-
__accessCheck$3(obj, member, "read from private field");
|
2111
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2112
|
-
};
|
2113
|
-
var __privateAdd$3 = (obj, member, value) => {
|
2114
|
-
if (member.has(obj))
|
2115
|
-
throw TypeError("Cannot add the same private member more than once");
|
2116
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2117
|
-
};
|
2118
|
-
var __privateSet$1 = (obj, member, value, setter) => {
|
2119
|
-
__accessCheck$3(obj, member, "write to private field");
|
2120
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
2121
|
-
return value;
|
2122
|
-
};
|
2123
|
-
var __privateMethod$2 = (obj, member, method) => {
|
2124
|
-
__accessCheck$3(obj, member, "access private method");
|
2125
|
-
return method;
|
4038
|
+
var __typeError$3 = (msg) => {
|
4039
|
+
throw TypeError(msg);
|
2126
4040
|
};
|
2127
|
-
var
|
4041
|
+
var __accessCheck$3 = (obj, member, msg) => member.has(obj) || __typeError$3("Cannot " + msg);
|
4042
|
+
var __privateGet$2 = (obj, member, getter) => (__accessCheck$3(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4043
|
+
var __privateAdd$3 = (obj, member, value) => member.has(obj) ? __typeError$3("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4044
|
+
var __privateSet$1 = (obj, member, value, setter) => (__accessCheck$3(obj, member, "write to private field"), member.set(obj, value), value);
|
4045
|
+
var __privateMethod$2 = (obj, member, method) => (__accessCheck$3(obj, member, "access private method"), method);
|
4046
|
+
var _table, _getFetchProps, _db, _schemaTables, _trace, _RestRepository_instances, insertRecordWithoutId_fn, insertRecordWithId_fn, insertRecords_fn, updateRecordWithID_fn, updateRecords_fn, upsertRecordWithID_fn, deleteRecord_fn, deleteRecords_fn, getSchemaTables_fn, transformObjectToApi_fn;
|
2128
4047
|
const BULK_OPERATION_MAX_SIZE = 1e3;
|
2129
4048
|
class Repository extends Query {
|
2130
4049
|
}
|
@@ -2135,21 +4054,12 @@ class RestRepository extends Query {
|
|
2135
4054
|
{ name: options.table, schema: options.schemaTables?.find((table) => table.name === options.table) },
|
2136
4055
|
{}
|
2137
4056
|
);
|
2138
|
-
__privateAdd$3(this,
|
2139
|
-
__privateAdd$3(this,
|
2140
|
-
__privateAdd$3(this,
|
2141
|
-
__privateAdd$3(this,
|
2142
|
-
__privateAdd$3(this,
|
2143
|
-
__privateAdd$3(this,
|
2144
|
-
__privateAdd$3(this, _deleteRecord);
|
2145
|
-
__privateAdd$3(this, _deleteRecords);
|
2146
|
-
__privateAdd$3(this, _getSchemaTables);
|
2147
|
-
__privateAdd$3(this, _transformObjectToApi);
|
2148
|
-
__privateAdd$3(this, _table, void 0);
|
2149
|
-
__privateAdd$3(this, _getFetchProps, void 0);
|
2150
|
-
__privateAdd$3(this, _db, void 0);
|
2151
|
-
__privateAdd$3(this, _schemaTables, void 0);
|
2152
|
-
__privateAdd$3(this, _trace, void 0);
|
4057
|
+
__privateAdd$3(this, _RestRepository_instances);
|
4058
|
+
__privateAdd$3(this, _table);
|
4059
|
+
__privateAdd$3(this, _getFetchProps);
|
4060
|
+
__privateAdd$3(this, _db);
|
4061
|
+
__privateAdd$3(this, _schemaTables);
|
4062
|
+
__privateAdd$3(this, _trace);
|
2153
4063
|
__privateSet$1(this, _table, options.table);
|
2154
4064
|
__privateSet$1(this, _db, options.db);
|
2155
4065
|
__privateSet$1(this, _schemaTables, options.schemaTables);
|
@@ -2168,31 +4078,28 @@ class RestRepository extends Query {
|
|
2168
4078
|
return __privateGet$2(this, _trace).call(this, "create", async () => {
|
2169
4079
|
const ifVersion = parseIfVersion(b, c, d);
|
2170
4080
|
if (Array.isArray(a)) {
|
2171
|
-
if (a.length === 0)
|
2172
|
-
|
2173
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
4081
|
+
if (a.length === 0) return [];
|
4082
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
2174
4083
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2175
4084
|
const result = await this.read(ids, columns);
|
2176
4085
|
return result;
|
2177
4086
|
}
|
2178
4087
|
if (isString(a) && isObject(b)) {
|
2179
|
-
if (a === "")
|
2180
|
-
throw new Error("The id can't be empty");
|
4088
|
+
if (a === "") throw new Error("The id can't be empty");
|
2181
4089
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2182
|
-
return await __privateMethod$2(this,
|
4090
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
|
2183
4091
|
}
|
2184
4092
|
if (isObject(a) && isString(a.xata_id)) {
|
2185
|
-
if (a.xata_id === "")
|
2186
|
-
throw new Error("The id can't be empty");
|
4093
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2187
4094
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2188
|
-
return await __privateMethod$2(this,
|
4095
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
2189
4096
|
createOnly: true,
|
2190
4097
|
ifVersion
|
2191
4098
|
});
|
2192
4099
|
}
|
2193
4100
|
if (isObject(a)) {
|
2194
4101
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2195
|
-
return __privateMethod$2(this,
|
4102
|
+
return __privateMethod$2(this, _RestRepository_instances, insertRecordWithoutId_fn).call(this, a, columns);
|
2196
4103
|
}
|
2197
4104
|
throw new Error("Invalid arguments for create method");
|
2198
4105
|
});
|
@@ -2201,8 +4108,7 @@ class RestRepository extends Query {
|
|
2201
4108
|
return __privateGet$2(this, _trace).call(this, "read", async () => {
|
2202
4109
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2203
4110
|
if (Array.isArray(a)) {
|
2204
|
-
if (a.length === 0)
|
2205
|
-
return [];
|
4111
|
+
if (a.length === 0) return [];
|
2206
4112
|
const ids = a.map((item) => extractId(item));
|
2207
4113
|
const finalObjects = await this.getAll({ filter: { xata_id: { $any: compact(ids) } }, columns });
|
2208
4114
|
const dictionary = finalObjects.reduce((acc, object) => {
|
@@ -2225,7 +4131,7 @@ class RestRepository extends Query {
|
|
2225
4131
|
queryParams: { columns },
|
2226
4132
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2227
4133
|
});
|
2228
|
-
const schemaTables = await __privateMethod$2(this,
|
4134
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2229
4135
|
return initObject(
|
2230
4136
|
__privateGet$2(this, _db),
|
2231
4137
|
schemaTables,
|
@@ -2266,11 +4172,10 @@ class RestRepository extends Query {
|
|
2266
4172
|
return __privateGet$2(this, _trace).call(this, "update", async () => {
|
2267
4173
|
const ifVersion = parseIfVersion(b, c, d);
|
2268
4174
|
if (Array.isArray(a)) {
|
2269
|
-
if (a.length === 0)
|
2270
|
-
return [];
|
4175
|
+
if (a.length === 0) return [];
|
2271
4176
|
const existing = await this.read(a, ["xata_id"]);
|
2272
4177
|
const updates = a.filter((_item, index) => existing[index] !== null);
|
2273
|
-
await __privateMethod$2(this,
|
4178
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, updates, {
|
2274
4179
|
ifVersion,
|
2275
4180
|
upsert: false
|
2276
4181
|
});
|
@@ -2281,15 +4186,14 @@ class RestRepository extends Query {
|
|
2281
4186
|
try {
|
2282
4187
|
if (isString(a) && isObject(b)) {
|
2283
4188
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2284
|
-
return await __privateMethod$2(this,
|
4189
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2285
4190
|
}
|
2286
4191
|
if (isObject(a) && isString(a.xata_id)) {
|
2287
4192
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2288
|
-
return await __privateMethod$2(this,
|
4193
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2289
4194
|
}
|
2290
4195
|
} catch (error) {
|
2291
|
-
if (error.status === 422)
|
2292
|
-
return null;
|
4196
|
+
if (error.status === 422) return null;
|
2293
4197
|
throw error;
|
2294
4198
|
}
|
2295
4199
|
throw new Error("Invalid arguments for update method");
|
@@ -2318,9 +4222,8 @@ class RestRepository extends Query {
|
|
2318
4222
|
return __privateGet$2(this, _trace).call(this, "createOrUpdate", async () => {
|
2319
4223
|
const ifVersion = parseIfVersion(b, c, d);
|
2320
4224
|
if (Array.isArray(a)) {
|
2321
|
-
if (a.length === 0)
|
2322
|
-
|
2323
|
-
await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, a, {
|
4225
|
+
if (a.length === 0) return [];
|
4226
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, a, {
|
2324
4227
|
ifVersion,
|
2325
4228
|
upsert: true
|
2326
4229
|
});
|
@@ -2329,16 +4232,14 @@ class RestRepository extends Query {
|
|
2329
4232
|
return result;
|
2330
4233
|
}
|
2331
4234
|
if (isString(a) && isObject(b)) {
|
2332
|
-
if (a === "")
|
2333
|
-
throw new Error("The id can't be empty");
|
4235
|
+
if (a === "") throw new Error("The id can't be empty");
|
2334
4236
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2335
|
-
return await __privateMethod$2(this,
|
4237
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2336
4238
|
}
|
2337
4239
|
if (isObject(a) && isString(a.xata_id)) {
|
2338
|
-
if (a.xata_id === "")
|
2339
|
-
throw new Error("The id can't be empty");
|
4240
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2340
4241
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2341
|
-
return await __privateMethod$2(this,
|
4242
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2342
4243
|
}
|
2343
4244
|
if (!isDefined(a) && isObject(b)) {
|
2344
4245
|
return await this.create(b, c);
|
@@ -2353,24 +4254,21 @@ class RestRepository extends Query {
|
|
2353
4254
|
return __privateGet$2(this, _trace).call(this, "createOrReplace", async () => {
|
2354
4255
|
const ifVersion = parseIfVersion(b, c, d);
|
2355
4256
|
if (Array.isArray(a)) {
|
2356
|
-
if (a.length === 0)
|
2357
|
-
|
2358
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
4257
|
+
if (a.length === 0) return [];
|
4258
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
2359
4259
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2360
4260
|
const result = await this.read(ids, columns);
|
2361
4261
|
return result;
|
2362
4262
|
}
|
2363
4263
|
if (isString(a) && isObject(b)) {
|
2364
|
-
if (a === "")
|
2365
|
-
throw new Error("The id can't be empty");
|
4264
|
+
if (a === "") throw new Error("The id can't be empty");
|
2366
4265
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2367
|
-
return await __privateMethod$2(this,
|
4266
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
|
2368
4267
|
}
|
2369
4268
|
if (isObject(a) && isString(a.xata_id)) {
|
2370
|
-
if (a.xata_id === "")
|
2371
|
-
throw new Error("The id can't be empty");
|
4269
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2372
4270
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2373
|
-
return await __privateMethod$2(this,
|
4271
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
2374
4272
|
createOnly: false,
|
2375
4273
|
ifVersion
|
2376
4274
|
});
|
@@ -2387,25 +4285,22 @@ class RestRepository extends Query {
|
|
2387
4285
|
async delete(a, b) {
|
2388
4286
|
return __privateGet$2(this, _trace).call(this, "delete", async () => {
|
2389
4287
|
if (Array.isArray(a)) {
|
2390
|
-
if (a.length === 0)
|
2391
|
-
return [];
|
4288
|
+
if (a.length === 0) return [];
|
2392
4289
|
const ids = a.map((o) => {
|
2393
|
-
if (isString(o))
|
2394
|
-
|
2395
|
-
if (isString(o.xata_id))
|
2396
|
-
return o.xata_id;
|
4290
|
+
if (isString(o)) return o;
|
4291
|
+
if (isString(o.xata_id)) return o.xata_id;
|
2397
4292
|
throw new Error("Invalid arguments for delete method");
|
2398
4293
|
});
|
2399
4294
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2400
4295
|
const result = await this.read(a, columns);
|
2401
|
-
await __privateMethod$2(this,
|
4296
|
+
await __privateMethod$2(this, _RestRepository_instances, deleteRecords_fn).call(this, ids);
|
2402
4297
|
return result;
|
2403
4298
|
}
|
2404
4299
|
if (isString(a)) {
|
2405
|
-
return __privateMethod$2(this,
|
4300
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a, b);
|
2406
4301
|
}
|
2407
4302
|
if (isObject(a) && isString(a.xata_id)) {
|
2408
|
-
return __privateMethod$2(this,
|
4303
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a.xata_id, b);
|
2409
4304
|
}
|
2410
4305
|
throw new Error("Invalid arguments for delete method");
|
2411
4306
|
});
|
@@ -2449,7 +4344,7 @@ class RestRepository extends Query {
|
|
2449
4344
|
},
|
2450
4345
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2451
4346
|
});
|
2452
|
-
const schemaTables = await __privateMethod$2(this,
|
4347
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2453
4348
|
return {
|
2454
4349
|
records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
|
2455
4350
|
totalCount
|
@@ -2474,7 +4369,7 @@ class RestRepository extends Query {
|
|
2474
4369
|
},
|
2475
4370
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2476
4371
|
});
|
2477
|
-
const schemaTables = await __privateMethod$2(this,
|
4372
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2478
4373
|
return {
|
2479
4374
|
records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
|
2480
4375
|
totalCount
|
@@ -2516,7 +4411,7 @@ class RestRepository extends Query {
|
|
2516
4411
|
fetchOptions: data.fetchOptions,
|
2517
4412
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2518
4413
|
});
|
2519
|
-
const schemaTables = await __privateMethod$2(this,
|
4414
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2520
4415
|
const records = objects.map(
|
2521
4416
|
(record) => initObject(
|
2522
4417
|
__privateGet$2(this, _db),
|
@@ -2550,7 +4445,7 @@ class RestRepository extends Query {
|
|
2550
4445
|
},
|
2551
4446
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2552
4447
|
});
|
2553
|
-
const schemaTables = await __privateMethod$2(this,
|
4448
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2554
4449
|
return {
|
2555
4450
|
...result,
|
2556
4451
|
summaries: result.summaries.map(
|
@@ -2598,9 +4493,9 @@ _getFetchProps = new WeakMap();
|
|
2598
4493
|
_db = new WeakMap();
|
2599
4494
|
_schemaTables = new WeakMap();
|
2600
4495
|
_trace = new WeakMap();
|
2601
|
-
|
4496
|
+
_RestRepository_instances = new WeakSet();
|
2602
4497
|
insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
2603
|
-
const record = await __privateMethod$2(this,
|
4498
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2604
4499
|
const response = await insertRecord({
|
2605
4500
|
pathParams: {
|
2606
4501
|
workspace: "{workspaceId}",
|
@@ -2612,14 +4507,12 @@ insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
|
2612
4507
|
body: record,
|
2613
4508
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2614
4509
|
});
|
2615
|
-
const schemaTables = await __privateMethod$2(this,
|
4510
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2616
4511
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2617
4512
|
};
|
2618
|
-
_insertRecordWithId = new WeakSet();
|
2619
4513
|
insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
|
2620
|
-
if (!recordId)
|
2621
|
-
|
2622
|
-
const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4514
|
+
if (!recordId) return null;
|
4515
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2623
4516
|
const response = await insertRecordWithID({
|
2624
4517
|
pathParams: {
|
2625
4518
|
workspace: "{workspaceId}",
|
@@ -2632,13 +4525,12 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
|
|
2632
4525
|
queryParams: { createOnly, columns, ifVersion },
|
2633
4526
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2634
4527
|
});
|
2635
|
-
const schemaTables = await __privateMethod$2(this,
|
4528
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2636
4529
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2637
4530
|
};
|
2638
|
-
_insertRecords = new WeakSet();
|
2639
4531
|
insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
2640
4532
|
const operations = await promiseMap(objects, async (object) => {
|
2641
|
-
const record = await __privateMethod$2(this,
|
4533
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2642
4534
|
return { insert: { table: __privateGet$2(this, _table), record, createOnly, ifVersion } };
|
2643
4535
|
});
|
2644
4536
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -2663,11 +4555,9 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
|
2663
4555
|
}
|
2664
4556
|
return ids;
|
2665
4557
|
};
|
2666
|
-
_updateRecordWithID = new WeakSet();
|
2667
4558
|
updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2668
|
-
if (!recordId)
|
2669
|
-
|
2670
|
-
const { xata_id: _id, ...record } = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4559
|
+
if (!recordId) return null;
|
4560
|
+
const { xata_id: _id, ...record } = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2671
4561
|
try {
|
2672
4562
|
const response = await updateRecordWithID({
|
2673
4563
|
pathParams: {
|
@@ -2681,7 +4571,7 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2681
4571
|
body: record,
|
2682
4572
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2683
4573
|
});
|
2684
|
-
const schemaTables = await __privateMethod$2(this,
|
4574
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2685
4575
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2686
4576
|
} catch (e) {
|
2687
4577
|
if (isObject(e) && e.status === 404) {
|
@@ -2690,10 +4580,9 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2690
4580
|
throw e;
|
2691
4581
|
}
|
2692
4582
|
};
|
2693
|
-
_updateRecords = new WeakSet();
|
2694
4583
|
updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
2695
4584
|
const operations = await promiseMap(objects, async ({ xata_id, ...object }) => {
|
2696
|
-
const fields = await __privateMethod$2(this,
|
4585
|
+
const fields = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2697
4586
|
return { update: { table: __privateGet$2(this, _table), id: xata_id, ifVersion, upsert, fields } };
|
2698
4587
|
});
|
2699
4588
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -2718,10 +4607,8 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
|
2718
4607
|
}
|
2719
4608
|
return ids;
|
2720
4609
|
};
|
2721
|
-
_upsertRecordWithID = new WeakSet();
|
2722
4610
|
upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2723
|
-
if (!recordId)
|
2724
|
-
return null;
|
4611
|
+
if (!recordId) return null;
|
2725
4612
|
const response = await upsertRecordWithID({
|
2726
4613
|
pathParams: {
|
2727
4614
|
workspace: "{workspaceId}",
|
@@ -2734,13 +4621,11 @@ upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2734
4621
|
body: object,
|
2735
4622
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2736
4623
|
});
|
2737
|
-
const schemaTables = await __privateMethod$2(this,
|
4624
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2738
4625
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2739
4626
|
};
|
2740
|
-
_deleteRecord = new WeakSet();
|
2741
4627
|
deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
2742
|
-
if (!recordId)
|
2743
|
-
return null;
|
4628
|
+
if (!recordId) return null;
|
2744
4629
|
try {
|
2745
4630
|
const response = await deleteRecord({
|
2746
4631
|
pathParams: {
|
@@ -2753,7 +4638,7 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2753
4638
|
queryParams: { columns },
|
2754
4639
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2755
4640
|
});
|
2756
|
-
const schemaTables = await __privateMethod$2(this,
|
4641
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2757
4642
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2758
4643
|
} catch (e) {
|
2759
4644
|
if (isObject(e) && e.status === 404) {
|
@@ -2762,7 +4647,6 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2762
4647
|
throw e;
|
2763
4648
|
}
|
2764
4649
|
};
|
2765
|
-
_deleteRecords = new WeakSet();
|
2766
4650
|
deleteRecords_fn = async function(recordIds) {
|
2767
4651
|
const chunkedOperations = chunk(
|
2768
4652
|
compact(recordIds).map((id) => ({ delete: { table: __privateGet$2(this, _table), id } })),
|
@@ -2780,10 +4664,8 @@ deleteRecords_fn = async function(recordIds) {
|
|
2780
4664
|
});
|
2781
4665
|
}
|
2782
4666
|
};
|
2783
|
-
_getSchemaTables = new WeakSet();
|
2784
4667
|
getSchemaTables_fn = async function() {
|
2785
|
-
if (__privateGet$2(this, _schemaTables))
|
2786
|
-
return __privateGet$2(this, _schemaTables);
|
4668
|
+
if (__privateGet$2(this, _schemaTables)) return __privateGet$2(this, _schemaTables);
|
2787
4669
|
const { schema } = await getBranchDetails({
|
2788
4670
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
2789
4671
|
...__privateGet$2(this, _getFetchProps).call(this)
|
@@ -2791,16 +4673,13 @@ getSchemaTables_fn = async function() {
|
|
2791
4673
|
__privateSet$1(this, _schemaTables, schema.tables);
|
2792
4674
|
return schema.tables;
|
2793
4675
|
};
|
2794
|
-
_transformObjectToApi = new WeakSet();
|
2795
4676
|
transformObjectToApi_fn = async function(object) {
|
2796
|
-
const schemaTables = await __privateMethod$2(this,
|
4677
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2797
4678
|
const schema = schemaTables.find((table) => table.name === __privateGet$2(this, _table));
|
2798
|
-
if (!schema)
|
2799
|
-
throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
|
4679
|
+
if (!schema) throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
|
2800
4680
|
const result = {};
|
2801
4681
|
for (const [key, value] of Object.entries(object)) {
|
2802
|
-
if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key))
|
2803
|
-
continue;
|
4682
|
+
if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key)) continue;
|
2804
4683
|
const type = schema.columns.find((column) => column.name === key)?.type;
|
2805
4684
|
switch (type) {
|
2806
4685
|
case "link": {
|
@@ -2830,11 +4709,9 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2830
4709
|
const data = {};
|
2831
4710
|
Object.assign(data, { ...object });
|
2832
4711
|
const { columns } = schemaTables.find(({ name }) => name === table) ?? {};
|
2833
|
-
if (!columns)
|
2834
|
-
console.error(`Table ${table} not found in schema`);
|
4712
|
+
if (!columns) console.error(`Table ${table} not found in schema`);
|
2835
4713
|
for (const column of columns ?? []) {
|
2836
|
-
if (!isValidColumn(selectedColumns, column))
|
2837
|
-
continue;
|
4714
|
+
if (!isValidColumn(selectedColumns, column)) continue;
|
2838
4715
|
const value = data[column.name];
|
2839
4716
|
switch (column.type) {
|
2840
4717
|
case "datetime": {
|
@@ -2920,15 +4797,12 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2920
4797
|
return record;
|
2921
4798
|
};
|
2922
4799
|
function extractId(value) {
|
2923
|
-
if (isString(value))
|
2924
|
-
|
2925
|
-
if (isObject(value) && isString(value.xata_id))
|
2926
|
-
return value.xata_id;
|
4800
|
+
if (isString(value)) return value;
|
4801
|
+
if (isObject(value) && isString(value.xata_id)) return value.xata_id;
|
2927
4802
|
return void 0;
|
2928
4803
|
}
|
2929
4804
|
function isValidColumn(columns, column) {
|
2930
|
-
if (columns.includes("*"))
|
2931
|
-
return true;
|
4805
|
+
if (columns.includes("*")) return true;
|
2932
4806
|
return columns.filter((item) => isString(item) && item.startsWith(column.name)).length > 0;
|
2933
4807
|
}
|
2934
4808
|
function parseIfVersion(...args) {
|
@@ -2968,19 +4842,12 @@ const includesAll = (value) => ({ $includesAll: value });
|
|
2968
4842
|
const includesNone = (value) => ({ $includesNone: value });
|
2969
4843
|
const includesAny = (value) => ({ $includesAny: value });
|
2970
4844
|
|
2971
|
-
var
|
2972
|
-
|
2973
|
-
throw TypeError("Cannot " + msg);
|
2974
|
-
};
|
2975
|
-
var __privateGet$1 = (obj, member, getter) => {
|
2976
|
-
__accessCheck$2(obj, member, "read from private field");
|
2977
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2978
|
-
};
|
2979
|
-
var __privateAdd$2 = (obj, member, value) => {
|
2980
|
-
if (member.has(obj))
|
2981
|
-
throw TypeError("Cannot add the same private member more than once");
|
2982
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4845
|
+
var __typeError$2 = (msg) => {
|
4846
|
+
throw TypeError(msg);
|
2983
4847
|
};
|
4848
|
+
var __accessCheck$2 = (obj, member, msg) => member.has(obj) || __typeError$2("Cannot " + msg);
|
4849
|
+
var __privateGet$1 = (obj, member, getter) => (__accessCheck$2(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4850
|
+
var __privateAdd$2 = (obj, member, value) => member.has(obj) ? __typeError$2("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2984
4851
|
var _tables;
|
2985
4852
|
class SchemaPlugin extends XataPlugin {
|
2986
4853
|
constructor() {
|
@@ -2992,8 +4859,7 @@ class SchemaPlugin extends XataPlugin {
|
|
2992
4859
|
{},
|
2993
4860
|
{
|
2994
4861
|
get: (_target, table) => {
|
2995
|
-
if (!isString(table))
|
2996
|
-
throw new Error("Invalid table name");
|
4862
|
+
if (!isString(table)) throw new Error("Invalid table name");
|
2997
4863
|
if (__privateGet$1(this, _tables)[table] === void 0) {
|
2998
4864
|
__privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
2999
4865
|
}
|
@@ -3084,30 +4950,23 @@ function getContentType(file) {
|
|
3084
4950
|
return "application/octet-stream";
|
3085
4951
|
}
|
3086
4952
|
|
3087
|
-
var
|
3088
|
-
|
3089
|
-
throw TypeError("Cannot " + msg);
|
4953
|
+
var __typeError$1 = (msg) => {
|
4954
|
+
throw TypeError(msg);
|
3090
4955
|
};
|
3091
|
-
var
|
3092
|
-
|
3093
|
-
|
3094
|
-
|
3095
|
-
};
|
3096
|
-
var __privateMethod$1 = (obj, member, method) => {
|
3097
|
-
__accessCheck$1(obj, member, "access private method");
|
3098
|
-
return method;
|
3099
|
-
};
|
3100
|
-
var _search, search_fn;
|
4956
|
+
var __accessCheck$1 = (obj, member, msg) => member.has(obj) || __typeError$1("Cannot " + msg);
|
4957
|
+
var __privateAdd$1 = (obj, member, value) => member.has(obj) ? __typeError$1("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4958
|
+
var __privateMethod$1 = (obj, member, method) => (__accessCheck$1(obj, member, "access private method"), method);
|
4959
|
+
var _SearchPlugin_instances, search_fn;
|
3101
4960
|
class SearchPlugin extends XataPlugin {
|
3102
4961
|
constructor(db) {
|
3103
4962
|
super();
|
3104
4963
|
this.db = db;
|
3105
|
-
__privateAdd$1(this,
|
4964
|
+
__privateAdd$1(this, _SearchPlugin_instances);
|
3106
4965
|
}
|
3107
4966
|
build(pluginOptions) {
|
3108
4967
|
return {
|
3109
4968
|
all: async (query, options = {}) => {
|
3110
|
-
const { records, totalCount } = await __privateMethod$1(this,
|
4969
|
+
const { records, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3111
4970
|
return {
|
3112
4971
|
totalCount,
|
3113
4972
|
records: records.map((record) => {
|
@@ -3117,7 +4976,7 @@ class SearchPlugin extends XataPlugin {
|
|
3117
4976
|
};
|
3118
4977
|
},
|
3119
4978
|
byTable: async (query, options = {}) => {
|
3120
|
-
const { records: rawRecords, totalCount } = await __privateMethod$1(this,
|
4979
|
+
const { records: rawRecords, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3121
4980
|
const records = rawRecords.reduce((acc, record) => {
|
3122
4981
|
const table = record.xata_table;
|
3123
4982
|
const items = acc[table] ?? [];
|
@@ -3129,7 +4988,7 @@ class SearchPlugin extends XataPlugin {
|
|
3129
4988
|
};
|
3130
4989
|
}
|
3131
4990
|
}
|
3132
|
-
|
4991
|
+
_SearchPlugin_instances = new WeakSet();
|
3133
4992
|
search_fn = async function(query, options, pluginOptions) {
|
3134
4993
|
const { tables, fuzziness, highlight, prefix, page } = options ?? {};
|
3135
4994
|
const { records, totalCount } = await searchBranch({
|
@@ -3165,8 +5024,7 @@ function arrayString(val) {
|
|
3165
5024
|
return result;
|
3166
5025
|
}
|
3167
5026
|
function prepareValue(value) {
|
3168
|
-
if (!isDefined(value))
|
3169
|
-
return null;
|
5027
|
+
if (!isDefined(value)) return null;
|
3170
5028
|
if (value instanceof Date) {
|
3171
5029
|
return value.toISOString();
|
3172
5030
|
}
|
@@ -3193,19 +5051,19 @@ function prepareParams(param1, param2) {
|
|
3193
5051
|
return { statement, params: param2?.map((value) => prepareValue(value)) };
|
3194
5052
|
}
|
3195
5053
|
if (isObject(param1)) {
|
3196
|
-
const { statement, params, consistency } = param1;
|
3197
|
-
return { statement, params: params?.map((value) => prepareValue(value)), consistency };
|
5054
|
+
const { statement, params, consistency, responseType } = param1;
|
5055
|
+
return { statement, params: params?.map((value) => prepareValue(value)), consistency, responseType };
|
3198
5056
|
}
|
3199
5057
|
throw new Error("Invalid query");
|
3200
5058
|
}
|
3201
5059
|
|
3202
5060
|
class SQLPlugin extends XataPlugin {
|
3203
5061
|
build(pluginOptions) {
|
3204
|
-
|
5062
|
+
const sqlFunction = async (query, ...parameters) => {
|
3205
5063
|
if (!isParamsObject(query) && (!isTemplateStringsArray(query) || !Array.isArray(parameters))) {
|
3206
5064
|
throw new Error("Invalid usage of `xata.sql`. Please use it as a tagged template or with an object.");
|
3207
5065
|
}
|
3208
|
-
const { statement, params, consistency } = prepareParams(query, parameters);
|
5066
|
+
const { statement, params, consistency, responseType } = prepareParams(query, parameters);
|
3209
5067
|
const {
|
3210
5068
|
records,
|
3211
5069
|
rows,
|
@@ -3213,11 +5071,13 @@ class SQLPlugin extends XataPlugin {
|
|
3213
5071
|
columns = []
|
3214
5072
|
} = await sqlQuery({
|
3215
5073
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
3216
|
-
body: { statement, params, consistency },
|
5074
|
+
body: { statement, params, consistency, responseType },
|
3217
5075
|
...pluginOptions
|
3218
5076
|
});
|
3219
5077
|
return { records, rows, warning, columns };
|
3220
5078
|
};
|
5079
|
+
sqlFunction.connectionString = buildConnectionString(pluginOptions);
|
5080
|
+
return sqlFunction;
|
3221
5081
|
}
|
3222
5082
|
}
|
3223
5083
|
function isTemplateStringsArray(strings) {
|
@@ -3226,6 +5086,32 @@ function isTemplateStringsArray(strings) {
|
|
3226
5086
|
function isParamsObject(params) {
|
3227
5087
|
return isObject(params) && "statement" in params;
|
3228
5088
|
}
|
5089
|
+
function buildDomain(host, region) {
|
5090
|
+
switch (host) {
|
5091
|
+
case "production":
|
5092
|
+
return `${region}.sql.xata.sh`;
|
5093
|
+
case "staging":
|
5094
|
+
return `${region}.sql.staging-xata.dev`;
|
5095
|
+
case "dev":
|
5096
|
+
return `${region}.sql.dev-xata.dev`;
|
5097
|
+
case "local":
|
5098
|
+
return "localhost:7654";
|
5099
|
+
default:
|
5100
|
+
throw new Error("Invalid host provider");
|
5101
|
+
}
|
5102
|
+
}
|
5103
|
+
function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
|
5104
|
+
const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
|
5105
|
+
const parts = parseWorkspacesUrlParts(url);
|
5106
|
+
if (!parts) throw new Error("Invalid workspaces URL");
|
5107
|
+
const { workspace: workspaceSlug, region, database, host } = parts;
|
5108
|
+
const domain = buildDomain(host, region);
|
5109
|
+
const workspace = workspaceSlug.split("-").pop();
|
5110
|
+
if (!workspace || !region || !database || !apiKey || !branch) {
|
5111
|
+
throw new Error("Unable to build xata connection string");
|
5112
|
+
}
|
5113
|
+
return `postgresql://${workspace}:${apiKey}@${domain}/${database}:${branch}?sslmode=require`;
|
5114
|
+
}
|
3229
5115
|
|
3230
5116
|
class TransactionPlugin extends XataPlugin {
|
3231
5117
|
build(pluginOptions) {
|
@@ -3242,41 +5128,27 @@ class TransactionPlugin extends XataPlugin {
|
|
3242
5128
|
}
|
3243
5129
|
}
|
3244
5130
|
|
3245
|
-
var
|
3246
|
-
|
3247
|
-
throw TypeError("Cannot " + msg);
|
3248
|
-
};
|
3249
|
-
var __privateGet = (obj, member, getter) => {
|
3250
|
-
__accessCheck(obj, member, "read from private field");
|
3251
|
-
return getter ? getter.call(obj) : member.get(obj);
|
3252
|
-
};
|
3253
|
-
var __privateAdd = (obj, member, value) => {
|
3254
|
-
if (member.has(obj))
|
3255
|
-
throw TypeError("Cannot add the same private member more than once");
|
3256
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3257
|
-
};
|
3258
|
-
var __privateSet = (obj, member, value, setter) => {
|
3259
|
-
__accessCheck(obj, member, "write to private field");
|
3260
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
3261
|
-
return value;
|
3262
|
-
};
|
3263
|
-
var __privateMethod = (obj, member, method) => {
|
3264
|
-
__accessCheck(obj, member, "access private method");
|
3265
|
-
return method;
|
5131
|
+
var __typeError = (msg) => {
|
5132
|
+
throw TypeError(msg);
|
3266
5133
|
};
|
5134
|
+
var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
|
5135
|
+
var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
5136
|
+
var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5137
|
+
var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), member.set(obj, value), value);
|
5138
|
+
var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
|
3267
5139
|
const buildClient = (plugins) => {
|
3268
|
-
var _options,
|
5140
|
+
var _options, _instances, parseOptions_fn, getFetchProps_fn, _a;
|
3269
5141
|
return _a = class {
|
3270
5142
|
constructor(options = {}, tables) {
|
3271
|
-
__privateAdd(this,
|
3272
|
-
__privateAdd(this,
|
3273
|
-
|
3274
|
-
const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
|
5143
|
+
__privateAdd(this, _instances);
|
5144
|
+
__privateAdd(this, _options);
|
5145
|
+
const safeOptions = __privateMethod(this, _instances, parseOptions_fn).call(this, options);
|
3275
5146
|
__privateSet(this, _options, safeOptions);
|
3276
5147
|
const pluginOptions = {
|
3277
|
-
...__privateMethod(this,
|
5148
|
+
...__privateMethod(this, _instances, getFetchProps_fn).call(this, safeOptions),
|
3278
5149
|
host: safeOptions.host,
|
3279
|
-
tables
|
5150
|
+
tables,
|
5151
|
+
branch: safeOptions.branch
|
3280
5152
|
};
|
3281
5153
|
const db = new SchemaPlugin().build(pluginOptions);
|
3282
5154
|
const search = new SearchPlugin(db).build(pluginOptions);
|
@@ -3290,8 +5162,7 @@ const buildClient = (plugins) => {
|
|
3290
5162
|
this.sql = sql;
|
3291
5163
|
this.files = files;
|
3292
5164
|
for (const [key, namespace] of Object.entries(plugins ?? {})) {
|
3293
|
-
if (namespace === void 0)
|
3294
|
-
continue;
|
5165
|
+
if (namespace === void 0) continue;
|
3295
5166
|
this[key] = namespace.build(pluginOptions);
|
3296
5167
|
}
|
3297
5168
|
}
|
@@ -3300,8 +5171,8 @@ const buildClient = (plugins) => {
|
|
3300
5171
|
const branch = __privateGet(this, _options).branch;
|
3301
5172
|
return { databaseURL, branch };
|
3302
5173
|
}
|
3303
|
-
}, _options = new WeakMap(),
|
3304
|
-
const enableBrowser = options?.enableBrowser ??
|
5174
|
+
}, _options = new WeakMap(), _instances = new WeakSet(), parseOptions_fn = function(options) {
|
5175
|
+
const enableBrowser = options?.enableBrowser ?? false;
|
3305
5176
|
const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
|
3306
5177
|
if (isBrowser && !enableBrowser) {
|
3307
5178
|
throw new Error(
|
@@ -3309,8 +5180,9 @@ const buildClient = (plugins) => {
|
|
3309
5180
|
);
|
3310
5181
|
}
|
3311
5182
|
const fetch = getFetchImplementation(options?.fetch);
|
3312
|
-
const databaseURL = options?.databaseURL
|
3313
|
-
const apiKey = options?.apiKey
|
5183
|
+
const databaseURL = options?.databaseURL;
|
5184
|
+
const apiKey = options?.apiKey;
|
5185
|
+
const branch = options?.branch;
|
3314
5186
|
const trace = options?.trace ?? defaultTrace;
|
3315
5187
|
const clientName = options?.clientName;
|
3316
5188
|
const host = options?.host ?? "production";
|
@@ -3321,25 +5193,8 @@ const buildClient = (plugins) => {
|
|
3321
5193
|
if (!databaseURL) {
|
3322
5194
|
throw new Error("Option databaseURL is required");
|
3323
5195
|
}
|
3324
|
-
|
3325
|
-
|
3326
|
-
const branch = options?.branch || previewBranch || envBranch || "main";
|
3327
|
-
if (!!previewBranch && branch !== previewBranch) {
|
3328
|
-
console.warn(
|
3329
|
-
`Ignoring preview branch ${previewBranch} because branch option was passed to the client constructor with value ${branch}`
|
3330
|
-
);
|
3331
|
-
} else if (!!envBranch && branch !== envBranch) {
|
3332
|
-
console.warn(
|
3333
|
-
`Ignoring branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
|
3334
|
-
);
|
3335
|
-
} else if (!!previewBranch && !!envBranch && previewBranch !== envBranch) {
|
3336
|
-
console.warn(
|
3337
|
-
`Ignoring preview branch ${previewBranch} and branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
|
3338
|
-
);
|
3339
|
-
} else if (!previewBranch && !envBranch && options?.branch === void 0) {
|
3340
|
-
console.warn(
|
3341
|
-
`No branch was passed to the client constructor. Using default branch ${branch}. You can set the branch with the environment variable XATA_BRANCH or by passing the branch option to the client constructor.`
|
3342
|
-
);
|
5196
|
+
if (!branch) {
|
5197
|
+
throw new Error("Option branch is required");
|
3343
5198
|
}
|
3344
5199
|
return {
|
3345
5200
|
fetch,
|
@@ -3353,7 +5208,7 @@ const buildClient = (plugins) => {
|
|
3353
5208
|
clientName,
|
3354
5209
|
xataAgentExtra
|
3355
5210
|
};
|
3356
|
-
},
|
5211
|
+
}, getFetchProps_fn = function({
|
3357
5212
|
fetch,
|
3358
5213
|
apiKey,
|
3359
5214
|
databaseURL,
|
@@ -3394,26 +5249,19 @@ class Serializer {
|
|
3394
5249
|
}
|
3395
5250
|
toJSON(data) {
|
3396
5251
|
function visit(obj) {
|
3397
|
-
if (Array.isArray(obj))
|
3398
|
-
return obj.map(visit);
|
5252
|
+
if (Array.isArray(obj)) return obj.map(visit);
|
3399
5253
|
const type = typeof obj;
|
3400
|
-
if (type === "undefined")
|
3401
|
-
|
3402
|
-
if (
|
3403
|
-
return { [META]: "bigint", [VALUE]: obj.toString() };
|
3404
|
-
if (obj === null || type !== "object")
|
3405
|
-
return obj;
|
5254
|
+
if (type === "undefined") return { [META]: "undefined" };
|
5255
|
+
if (type === "bigint") return { [META]: "bigint", [VALUE]: obj.toString() };
|
5256
|
+
if (obj === null || type !== "object") return obj;
|
3406
5257
|
const constructor = obj.constructor;
|
3407
5258
|
const o = { [META]: constructor.name };
|
3408
5259
|
for (const [key, value] of Object.entries(obj)) {
|
3409
5260
|
o[key] = visit(value);
|
3410
5261
|
}
|
3411
|
-
if (constructor === Date)
|
3412
|
-
|
3413
|
-
if (constructor ===
|
3414
|
-
o[VALUE] = Object.fromEntries(obj);
|
3415
|
-
if (constructor === Set)
|
3416
|
-
o[VALUE] = [...obj];
|
5262
|
+
if (constructor === Date) o[VALUE] = obj.toISOString();
|
5263
|
+
if (constructor === Map) o[VALUE] = Object.fromEntries(obj);
|
5264
|
+
if (constructor === Set) o[VALUE] = [...obj];
|
3417
5265
|
return o;
|
3418
5266
|
}
|
3419
5267
|
return JSON.stringify(visit(data));
|
@@ -3426,16 +5274,11 @@ class Serializer {
|
|
3426
5274
|
if (constructor) {
|
3427
5275
|
return Object.assign(Object.create(constructor.prototype), rest);
|
3428
5276
|
}
|
3429
|
-
if (clazz === "Date")
|
3430
|
-
|
3431
|
-
if (clazz === "
|
3432
|
-
|
3433
|
-
if (clazz === "
|
3434
|
-
return new Map(Object.entries(val));
|
3435
|
-
if (clazz === "bigint")
|
3436
|
-
return BigInt(val);
|
3437
|
-
if (clazz === "undefined")
|
3438
|
-
return void 0;
|
5277
|
+
if (clazz === "Date") return new Date(val);
|
5278
|
+
if (clazz === "Set") return new Set(val);
|
5279
|
+
if (clazz === "Map") return new Map(Object.entries(val));
|
5280
|
+
if (clazz === "bigint") return BigInt(val);
|
5281
|
+
if (clazz === "undefined") return void 0;
|
3439
5282
|
return rest;
|
3440
5283
|
}
|
3441
5284
|
return value;
|
@@ -3450,6 +5293,47 @@ const deserialize = (json) => {
|
|
3450
5293
|
return defaultSerializer.fromJSON(json);
|
3451
5294
|
};
|
3452
5295
|
|
5296
|
+
function parseEnvironment(environment) {
|
5297
|
+
try {
|
5298
|
+
if (typeof environment === "function") {
|
5299
|
+
return new Proxy(
|
5300
|
+
{},
|
5301
|
+
{
|
5302
|
+
get(target) {
|
5303
|
+
return environment(target);
|
5304
|
+
}
|
5305
|
+
}
|
5306
|
+
);
|
5307
|
+
}
|
5308
|
+
if (isObject(environment)) {
|
5309
|
+
return environment;
|
5310
|
+
}
|
5311
|
+
} catch (error) {
|
5312
|
+
}
|
5313
|
+
return {};
|
5314
|
+
}
|
5315
|
+
function buildPreviewBranchName({ org, branch }) {
|
5316
|
+
return `preview-${org}-${branch}`;
|
5317
|
+
}
|
5318
|
+
function getDeployPreviewBranch(environment) {
|
5319
|
+
try {
|
5320
|
+
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = parseEnvironment(environment);
|
5321
|
+
if (deployPreviewBranch) return deployPreviewBranch;
|
5322
|
+
switch (deployPreview) {
|
5323
|
+
case "vercel": {
|
5324
|
+
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
5325
|
+
console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
|
5326
|
+
return void 0;
|
5327
|
+
}
|
5328
|
+
return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
|
5329
|
+
}
|
5330
|
+
}
|
5331
|
+
return void 0;
|
5332
|
+
} catch (err) {
|
5333
|
+
return void 0;
|
5334
|
+
}
|
5335
|
+
}
|
5336
|
+
|
3453
5337
|
class XataError extends Error {
|
3454
5338
|
constructor(message, status) {
|
3455
5339
|
super(message);
|
@@ -3457,5 +5341,5 @@ class XataError extends Error {
|
|
3457
5341
|
}
|
3458
5342
|
}
|
3459
5343
|
|
3460
|
-
export { BaseClient, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge,
|
5344
|
+
export { BaseClient, Buffer, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptAllTables, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, completeMigration, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteCluster, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge, getAuthorizationCode, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationJobStatus, getBranchMigrationPlan, getBranchSchemaHistory, getBranchStats, getCluster, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseSettings, getDeployPreviewBranch, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationHistory, getMigrationJobStatus, getMigrationRequest, getMigrationRequestIsMerged, getRecord, getSchema, getTableColumns, getTableSchema, getUser, getUserAPIKeys, getUserOAuthAccessTokens, getUserOAuthClients, getWorkspace, getWorkspaceMembersList, getWorkspaceSettings, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, iContains, iPattern, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isValidExpandedColumn, isValidSelectableColumns, le, lessEquals, lessThan, lessThanEquals, listClusters, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern, previewBranchSchemaEdit, pushBranchMigrations, putFile, putFileItem, queryMigrationRequests, queryTable, removeGitBranchesEntry, removeWorkspaceMember, renameDatabase, resendWorkspaceMemberInvite, resolveBranch, rollbackMigration, searchBranch, searchTable, serialize, setTableSchema, sqlQuery, startMigration, startsWith, summarizeTable, transformImage, updateBranchMetadata, updateBranchSchema, updateCluster, updateColumn, updateDatabaseGithubSettings, updateDatabaseMetadata, updateDatabaseSettings, updateMigrationRequest, updateOAuthAccessToken, updateRecordWithID, updateTable, updateUser, updateWorkspace, updateWorkspaceMemberInvite, updateWorkspaceMemberRole, updateWorkspaceSettings, upsertRecordWithID, vectorSearchTable };
|
3461
5345
|
//# sourceMappingURL=index.mjs.map
|