@xata.io/client 0.0.0-next.vdcc7247ab6c54a023e791b576c247bc20533009e → 0.0.0-next.vde33c6adc2eca5775c886dfa99cae37e464ae8ad
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-add-version.log +1 -1
- package/.turbo/turbo-build.log +4 -4
- package/CHANGELOG.md +23 -3
- package/dist/index.cjs +2484 -576
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +4814 -3746
- package/dist/index.mjs +2475 -573
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
@@ -24,6 +24,1789 @@ const TraceAttributes = {
|
|
24
24
|
CLOUDFLARE_RAY_ID: "cf.ray"
|
25
25
|
};
|
26
26
|
|
27
|
+
const lookup = [];
|
28
|
+
const revLookup = [];
|
29
|
+
const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
30
|
+
for (let i = 0, len = code.length; i < len; ++i) {
|
31
|
+
lookup[i] = code[i];
|
32
|
+
revLookup[code.charCodeAt(i)] = i;
|
33
|
+
}
|
34
|
+
revLookup["-".charCodeAt(0)] = 62;
|
35
|
+
revLookup["_".charCodeAt(0)] = 63;
|
36
|
+
function getLens(b64) {
|
37
|
+
const len = b64.length;
|
38
|
+
if (len % 4 > 0) {
|
39
|
+
throw new Error("Invalid string. Length must be a multiple of 4");
|
40
|
+
}
|
41
|
+
let validLen = b64.indexOf("=");
|
42
|
+
if (validLen === -1) validLen = len;
|
43
|
+
const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
|
44
|
+
return [validLen, placeHoldersLen];
|
45
|
+
}
|
46
|
+
function _byteLength(_b64, validLen, placeHoldersLen) {
|
47
|
+
return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
|
48
|
+
}
|
49
|
+
function toByteArray(b64) {
|
50
|
+
let tmp;
|
51
|
+
const lens = getLens(b64);
|
52
|
+
const validLen = lens[0];
|
53
|
+
const placeHoldersLen = lens[1];
|
54
|
+
const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
|
55
|
+
let curByte = 0;
|
56
|
+
const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
|
57
|
+
let i;
|
58
|
+
for (i = 0; i < len; i += 4) {
|
59
|
+
tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
|
60
|
+
arr[curByte++] = tmp >> 16 & 255;
|
61
|
+
arr[curByte++] = tmp >> 8 & 255;
|
62
|
+
arr[curByte++] = tmp & 255;
|
63
|
+
}
|
64
|
+
if (placeHoldersLen === 2) {
|
65
|
+
tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
|
66
|
+
arr[curByte++] = tmp & 255;
|
67
|
+
}
|
68
|
+
if (placeHoldersLen === 1) {
|
69
|
+
tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
|
70
|
+
arr[curByte++] = tmp >> 8 & 255;
|
71
|
+
arr[curByte++] = tmp & 255;
|
72
|
+
}
|
73
|
+
return arr;
|
74
|
+
}
|
75
|
+
function tripletToBase64(num) {
|
76
|
+
return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
|
77
|
+
}
|
78
|
+
function encodeChunk(uint8, start, end) {
|
79
|
+
let tmp;
|
80
|
+
const output = [];
|
81
|
+
for (let i = start; i < end; i += 3) {
|
82
|
+
tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
|
83
|
+
output.push(tripletToBase64(tmp));
|
84
|
+
}
|
85
|
+
return output.join("");
|
86
|
+
}
|
87
|
+
function fromByteArray(uint8) {
|
88
|
+
let tmp;
|
89
|
+
const len = uint8.length;
|
90
|
+
const extraBytes = len % 3;
|
91
|
+
const parts = [];
|
92
|
+
const maxChunkLength = 16383;
|
93
|
+
for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
94
|
+
parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
|
95
|
+
}
|
96
|
+
if (extraBytes === 1) {
|
97
|
+
tmp = uint8[len - 1];
|
98
|
+
parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
|
99
|
+
} else if (extraBytes === 2) {
|
100
|
+
tmp = (uint8[len - 2] << 8) + uint8[len - 1];
|
101
|
+
parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
|
102
|
+
}
|
103
|
+
return parts.join("");
|
104
|
+
}
|
105
|
+
|
106
|
+
const K_MAX_LENGTH = 2147483647;
|
107
|
+
const MAX_ARGUMENTS_LENGTH = 4096;
|
108
|
+
class Buffer extends Uint8Array {
|
109
|
+
/**
|
110
|
+
* Constructs a new `Buffer` instance.
|
111
|
+
*
|
112
|
+
* @param value
|
113
|
+
* @param encodingOrOffset
|
114
|
+
* @param length
|
115
|
+
*/
|
116
|
+
constructor(value, encodingOrOffset, length) {
|
117
|
+
if (typeof value === "number") {
|
118
|
+
if (typeof encodingOrOffset === "string") {
|
119
|
+
throw new TypeError("The first argument must be of type string, received type number");
|
120
|
+
}
|
121
|
+
if (value < 0) {
|
122
|
+
throw new RangeError("The buffer size cannot be negative");
|
123
|
+
}
|
124
|
+
super(value < 0 ? 0 : Buffer._checked(value) | 0);
|
125
|
+
} else if (typeof value === "string") {
|
126
|
+
if (typeof encodingOrOffset !== "string") {
|
127
|
+
encodingOrOffset = "utf8";
|
128
|
+
}
|
129
|
+
if (!Buffer.isEncoding(encodingOrOffset)) {
|
130
|
+
throw new TypeError("Unknown encoding: " + encodingOrOffset);
|
131
|
+
}
|
132
|
+
const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
|
133
|
+
super(length2);
|
134
|
+
const written = this.write(value, 0, this.length, encodingOrOffset);
|
135
|
+
if (written !== length2) {
|
136
|
+
throw new TypeError(
|
137
|
+
"Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
|
138
|
+
);
|
139
|
+
}
|
140
|
+
} else if (ArrayBuffer.isView(value)) {
|
141
|
+
if (Buffer._isInstance(value, Uint8Array)) {
|
142
|
+
const copy = new Uint8Array(value);
|
143
|
+
const array = copy.buffer;
|
144
|
+
const byteOffset = copy.byteOffset;
|
145
|
+
const length2 = copy.byteLength;
|
146
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
147
|
+
throw new RangeError("offset is outside of buffer bounds");
|
148
|
+
}
|
149
|
+
if (array.byteLength < byteOffset + (length2 || 0)) {
|
150
|
+
throw new RangeError("length is outside of buffer bounds");
|
151
|
+
}
|
152
|
+
super(new Uint8Array(array, byteOffset, length2));
|
153
|
+
} else {
|
154
|
+
const array = value;
|
155
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
156
|
+
super(new Uint8Array(length2));
|
157
|
+
for (let i = 0; i < length2; i++) {
|
158
|
+
this[i] = array[i] & 255;
|
159
|
+
}
|
160
|
+
}
|
161
|
+
} else if (value == null) {
|
162
|
+
throw new TypeError(
|
163
|
+
"The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
|
164
|
+
);
|
165
|
+
} else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
|
166
|
+
const array = value;
|
167
|
+
const byteOffset = encodingOrOffset;
|
168
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
169
|
+
throw new RangeError("offset is outside of buffer bounds");
|
170
|
+
}
|
171
|
+
if (array.byteLength < byteOffset + (length || 0)) {
|
172
|
+
throw new RangeError("length is outside of buffer bounds");
|
173
|
+
}
|
174
|
+
super(new Uint8Array(array, byteOffset, length));
|
175
|
+
} else if (Array.isArray(value)) {
|
176
|
+
const array = value;
|
177
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
178
|
+
super(new Uint8Array(length2));
|
179
|
+
for (let i = 0; i < length2; i++) {
|
180
|
+
this[i] = array[i] & 255;
|
181
|
+
}
|
182
|
+
} else {
|
183
|
+
throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
|
184
|
+
}
|
185
|
+
}
|
186
|
+
/**
|
187
|
+
* Return JSON representation of the buffer.
|
188
|
+
*/
|
189
|
+
toJSON() {
|
190
|
+
return {
|
191
|
+
type: "Buffer",
|
192
|
+
data: Array.prototype.slice.call(this)
|
193
|
+
};
|
194
|
+
}
|
195
|
+
/**
|
196
|
+
* Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
|
197
|
+
* parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
|
198
|
+
* only part of `string` will be written. However, partially encoded characters will not be written.
|
199
|
+
*
|
200
|
+
* @param string String to write to `buf`.
|
201
|
+
* @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
|
202
|
+
* @param length Maximum number of bytes to write: Default: `buf.length - offset`.
|
203
|
+
* @param encoding The character encoding of `string`. Default: `utf8`.
|
204
|
+
*/
|
205
|
+
write(string, offset, length, encoding) {
|
206
|
+
if (typeof offset === "undefined") {
|
207
|
+
encoding = "utf8";
|
208
|
+
length = this.length;
|
209
|
+
offset = 0;
|
210
|
+
} else if (typeof length === "undefined" && typeof offset === "string") {
|
211
|
+
encoding = offset;
|
212
|
+
length = this.length;
|
213
|
+
offset = 0;
|
214
|
+
} else if (typeof offset === "number" && isFinite(offset)) {
|
215
|
+
offset = offset >>> 0;
|
216
|
+
if (typeof length === "number" && isFinite(length)) {
|
217
|
+
length = length >>> 0;
|
218
|
+
encoding ?? (encoding = "utf8");
|
219
|
+
} else if (typeof length === "string") {
|
220
|
+
encoding = length;
|
221
|
+
length = void 0;
|
222
|
+
}
|
223
|
+
} else {
|
224
|
+
throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
|
225
|
+
}
|
226
|
+
const remaining = this.length - offset;
|
227
|
+
if (typeof length === "undefined" || length > remaining) {
|
228
|
+
length = remaining;
|
229
|
+
}
|
230
|
+
if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
|
231
|
+
throw new RangeError("Attempt to write outside buffer bounds");
|
232
|
+
}
|
233
|
+
encoding || (encoding = "utf8");
|
234
|
+
switch (Buffer._getEncoding(encoding)) {
|
235
|
+
case "hex":
|
236
|
+
return Buffer._hexWrite(this, string, offset, length);
|
237
|
+
case "utf8":
|
238
|
+
return Buffer._utf8Write(this, string, offset, length);
|
239
|
+
case "ascii":
|
240
|
+
case "latin1":
|
241
|
+
case "binary":
|
242
|
+
return Buffer._asciiWrite(this, string, offset, length);
|
243
|
+
case "ucs2":
|
244
|
+
case "utf16le":
|
245
|
+
return Buffer._ucs2Write(this, string, offset, length);
|
246
|
+
case "base64":
|
247
|
+
return Buffer._base64Write(this, string, offset, length);
|
248
|
+
}
|
249
|
+
}
|
250
|
+
/**
|
251
|
+
* Decodes the buffer to a string according to the specified character encoding.
|
252
|
+
* Passing `start` and `end` will decode only a subset of the buffer.
|
253
|
+
*
|
254
|
+
* Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
|
255
|
+
* will be replaced with `U+FFFD`.
|
256
|
+
*
|
257
|
+
* @param encoding
|
258
|
+
* @param start
|
259
|
+
* @param end
|
260
|
+
*/
|
261
|
+
toString(encoding, start, end) {
|
262
|
+
const length = this.length;
|
263
|
+
if (length === 0) {
|
264
|
+
return "";
|
265
|
+
}
|
266
|
+
if (arguments.length === 0) {
|
267
|
+
return Buffer._utf8Slice(this, 0, length);
|
268
|
+
}
|
269
|
+
if (typeof start === "undefined" || start < 0) {
|
270
|
+
start = 0;
|
271
|
+
}
|
272
|
+
if (start > this.length) {
|
273
|
+
return "";
|
274
|
+
}
|
275
|
+
if (typeof end === "undefined" || end > this.length) {
|
276
|
+
end = this.length;
|
277
|
+
}
|
278
|
+
if (end <= 0) {
|
279
|
+
return "";
|
280
|
+
}
|
281
|
+
end >>>= 0;
|
282
|
+
start >>>= 0;
|
283
|
+
if (end <= start) {
|
284
|
+
return "";
|
285
|
+
}
|
286
|
+
if (!encoding) {
|
287
|
+
encoding = "utf8";
|
288
|
+
}
|
289
|
+
switch (Buffer._getEncoding(encoding)) {
|
290
|
+
case "hex":
|
291
|
+
return Buffer._hexSlice(this, start, end);
|
292
|
+
case "utf8":
|
293
|
+
return Buffer._utf8Slice(this, start, end);
|
294
|
+
case "ascii":
|
295
|
+
return Buffer._asciiSlice(this, start, end);
|
296
|
+
case "latin1":
|
297
|
+
case "binary":
|
298
|
+
return Buffer._latin1Slice(this, start, end);
|
299
|
+
case "ucs2":
|
300
|
+
case "utf16le":
|
301
|
+
return Buffer._utf16leSlice(this, start, end);
|
302
|
+
case "base64":
|
303
|
+
return Buffer._base64Slice(this, start, end);
|
304
|
+
}
|
305
|
+
}
|
306
|
+
/**
|
307
|
+
* Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
|
308
|
+
*
|
309
|
+
* @param otherBuffer
|
310
|
+
*/
|
311
|
+
equals(otherBuffer) {
|
312
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
313
|
+
throw new TypeError("Argument must be a Buffer");
|
314
|
+
}
|
315
|
+
if (this === otherBuffer) {
|
316
|
+
return true;
|
317
|
+
}
|
318
|
+
return Buffer.compare(this, otherBuffer) === 0;
|
319
|
+
}
|
320
|
+
/**
|
321
|
+
* Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
|
322
|
+
* or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
|
323
|
+
* buffer.
|
324
|
+
*
|
325
|
+
* - `0` is returned if `otherBuffer` is the same as this buffer.
|
326
|
+
* - `1` is returned if `otherBuffer` should come before this buffer when sorted.
|
327
|
+
* - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
|
328
|
+
*
|
329
|
+
* @param otherBuffer The buffer to compare to.
|
330
|
+
* @param targetStart The offset within `otherBuffer` at which to begin comparison.
|
331
|
+
* @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
|
332
|
+
* @param sourceStart The offset within this buffer at which to begin comparison.
|
333
|
+
* @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
|
334
|
+
*/
|
335
|
+
compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
|
336
|
+
if (Buffer._isInstance(otherBuffer, Uint8Array)) {
|
337
|
+
otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
|
338
|
+
}
|
339
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
340
|
+
throw new TypeError("Argument must be a Buffer or Uint8Array");
|
341
|
+
}
|
342
|
+
targetStart ?? (targetStart = 0);
|
343
|
+
targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
|
344
|
+
sourceStart ?? (sourceStart = 0);
|
345
|
+
sourceEnd ?? (sourceEnd = this.length);
|
346
|
+
if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
|
347
|
+
throw new RangeError("Out of range index");
|
348
|
+
}
|
349
|
+
if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
|
350
|
+
return 0;
|
351
|
+
}
|
352
|
+
if (sourceStart >= sourceEnd) {
|
353
|
+
return -1;
|
354
|
+
}
|
355
|
+
if (targetStart >= targetEnd) {
|
356
|
+
return 1;
|
357
|
+
}
|
358
|
+
targetStart >>>= 0;
|
359
|
+
targetEnd >>>= 0;
|
360
|
+
sourceStart >>>= 0;
|
361
|
+
sourceEnd >>>= 0;
|
362
|
+
if (this === otherBuffer) {
|
363
|
+
return 0;
|
364
|
+
}
|
365
|
+
let x = sourceEnd - sourceStart;
|
366
|
+
let y = targetEnd - targetStart;
|
367
|
+
const len = Math.min(x, y);
|
368
|
+
const thisCopy = this.slice(sourceStart, sourceEnd);
|
369
|
+
const targetCopy = otherBuffer.slice(targetStart, targetEnd);
|
370
|
+
for (let i = 0; i < len; ++i) {
|
371
|
+
if (thisCopy[i] !== targetCopy[i]) {
|
372
|
+
x = thisCopy[i];
|
373
|
+
y = targetCopy[i];
|
374
|
+
break;
|
375
|
+
}
|
376
|
+
}
|
377
|
+
if (x < y) return -1;
|
378
|
+
if (y < x) return 1;
|
379
|
+
return 0;
|
380
|
+
}
|
381
|
+
/**
|
382
|
+
* Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
|
383
|
+
* region overlaps with this buffer.
|
384
|
+
*
|
385
|
+
* @param targetBuffer The target buffer to copy into.
|
386
|
+
* @param targetStart The offset within `targetBuffer` at which to begin writing.
|
387
|
+
* @param sourceStart The offset within this buffer at which to begin copying.
|
388
|
+
* @param sourceEnd The offset within this buffer at which to end copying (exclusive).
|
389
|
+
*/
|
390
|
+
copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
|
391
|
+
if (!Buffer.isBuffer(targetBuffer)) throw new TypeError("argument should be a Buffer");
|
392
|
+
if (!sourceStart) sourceStart = 0;
|
393
|
+
if (!targetStart) targetStart = 0;
|
394
|
+
if (!sourceEnd && sourceEnd !== 0) sourceEnd = this.length;
|
395
|
+
if (targetStart >= targetBuffer.length) targetStart = targetBuffer.length;
|
396
|
+
if (!targetStart) targetStart = 0;
|
397
|
+
if (sourceEnd > 0 && sourceEnd < sourceStart) sourceEnd = sourceStart;
|
398
|
+
if (sourceEnd === sourceStart) return 0;
|
399
|
+
if (targetBuffer.length === 0 || this.length === 0) return 0;
|
400
|
+
if (targetStart < 0) {
|
401
|
+
throw new RangeError("targetStart out of bounds");
|
402
|
+
}
|
403
|
+
if (sourceStart < 0 || sourceStart >= this.length) throw new RangeError("Index out of range");
|
404
|
+
if (sourceEnd < 0) throw new RangeError("sourceEnd out of bounds");
|
405
|
+
if (sourceEnd > this.length) sourceEnd = this.length;
|
406
|
+
if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
|
407
|
+
sourceEnd = targetBuffer.length - targetStart + sourceStart;
|
408
|
+
}
|
409
|
+
const len = sourceEnd - sourceStart;
|
410
|
+
if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
|
411
|
+
this.copyWithin(targetStart, sourceStart, sourceEnd);
|
412
|
+
} else {
|
413
|
+
Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
|
414
|
+
}
|
415
|
+
return len;
|
416
|
+
}
|
417
|
+
/**
|
418
|
+
* Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
|
419
|
+
* and `end` indices. This is the same behavior as `buf.subarray()`.
|
420
|
+
*
|
421
|
+
* This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
|
422
|
+
* the slice, use `Uint8Array.prototype.slice()`.
|
423
|
+
*
|
424
|
+
* @param start
|
425
|
+
* @param end
|
426
|
+
*/
|
427
|
+
slice(start, end) {
|
428
|
+
if (!start) {
|
429
|
+
start = 0;
|
430
|
+
}
|
431
|
+
const len = this.length;
|
432
|
+
start = ~~start;
|
433
|
+
end = end === void 0 ? len : ~~end;
|
434
|
+
if (start < 0) {
|
435
|
+
start += len;
|
436
|
+
if (start < 0) {
|
437
|
+
start = 0;
|
438
|
+
}
|
439
|
+
} else if (start > len) {
|
440
|
+
start = len;
|
441
|
+
}
|
442
|
+
if (end < 0) {
|
443
|
+
end += len;
|
444
|
+
if (end < 0) {
|
445
|
+
end = 0;
|
446
|
+
}
|
447
|
+
} else if (end > len) {
|
448
|
+
end = len;
|
449
|
+
}
|
450
|
+
if (end < start) {
|
451
|
+
end = start;
|
452
|
+
}
|
453
|
+
const newBuf = this.subarray(start, end);
|
454
|
+
Object.setPrototypeOf(newBuf, Buffer.prototype);
|
455
|
+
return newBuf;
|
456
|
+
}
|
457
|
+
/**
|
458
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
459
|
+
* of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
|
460
|
+
*
|
461
|
+
* @param value Number to write.
|
462
|
+
* @param offset Number of bytes to skip before starting to write.
|
463
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
464
|
+
* @param noAssert
|
465
|
+
* @returns `offset` plus the number of bytes written.
|
466
|
+
*/
|
467
|
+
writeUIntLE(value, offset, byteLength, noAssert) {
|
468
|
+
value = +value;
|
469
|
+
offset = offset >>> 0;
|
470
|
+
byteLength = byteLength >>> 0;
|
471
|
+
if (!noAssert) {
|
472
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
473
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
474
|
+
}
|
475
|
+
let mul = 1;
|
476
|
+
let i = 0;
|
477
|
+
this[offset] = value & 255;
|
478
|
+
while (++i < byteLength && (mul *= 256)) {
|
479
|
+
this[offset + i] = value / mul & 255;
|
480
|
+
}
|
481
|
+
return offset + byteLength;
|
482
|
+
}
|
483
|
+
/**
|
484
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
|
485
|
+
* accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
|
486
|
+
*
|
487
|
+
* @param value Number to write.
|
488
|
+
* @param offset Number of bytes to skip before starting to write.
|
489
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
490
|
+
* @param noAssert
|
491
|
+
* @returns `offset` plus the number of bytes written.
|
492
|
+
*/
|
493
|
+
writeUIntBE(value, offset, byteLength, noAssert) {
|
494
|
+
value = +value;
|
495
|
+
offset = offset >>> 0;
|
496
|
+
byteLength = byteLength >>> 0;
|
497
|
+
if (!noAssert) {
|
498
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
499
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
500
|
+
}
|
501
|
+
let i = byteLength - 1;
|
502
|
+
let mul = 1;
|
503
|
+
this[offset + i] = value & 255;
|
504
|
+
while (--i >= 0 && (mul *= 256)) {
|
505
|
+
this[offset + i] = value / mul & 255;
|
506
|
+
}
|
507
|
+
return offset + byteLength;
|
508
|
+
}
|
509
|
+
/**
|
510
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
511
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
512
|
+
*
|
513
|
+
* @param value Number to write.
|
514
|
+
* @param offset Number of bytes to skip before starting to write.
|
515
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
516
|
+
* @param noAssert
|
517
|
+
* @returns `offset` plus the number of bytes written.
|
518
|
+
*/
|
519
|
+
writeIntLE(value, offset, byteLength, noAssert) {
|
520
|
+
value = +value;
|
521
|
+
offset = offset >>> 0;
|
522
|
+
if (!noAssert) {
|
523
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
524
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
525
|
+
}
|
526
|
+
let i = 0;
|
527
|
+
let mul = 1;
|
528
|
+
let sub = 0;
|
529
|
+
this[offset] = value & 255;
|
530
|
+
while (++i < byteLength && (mul *= 256)) {
|
531
|
+
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
532
|
+
sub = 1;
|
533
|
+
}
|
534
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
535
|
+
}
|
536
|
+
return offset + byteLength;
|
537
|
+
}
|
538
|
+
/**
|
539
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
|
540
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
541
|
+
*
|
542
|
+
* @param value Number to write.
|
543
|
+
* @param offset Number of bytes to skip before starting to write.
|
544
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
545
|
+
* @param noAssert
|
546
|
+
* @returns `offset` plus the number of bytes written.
|
547
|
+
*/
|
548
|
+
writeIntBE(value, offset, byteLength, noAssert) {
|
549
|
+
value = +value;
|
550
|
+
offset = offset >>> 0;
|
551
|
+
if (!noAssert) {
|
552
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
553
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
554
|
+
}
|
555
|
+
let i = byteLength - 1;
|
556
|
+
let mul = 1;
|
557
|
+
let sub = 0;
|
558
|
+
this[offset + i] = value & 255;
|
559
|
+
while (--i >= 0 && (mul *= 256)) {
|
560
|
+
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
561
|
+
sub = 1;
|
562
|
+
}
|
563
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
564
|
+
}
|
565
|
+
return offset + byteLength;
|
566
|
+
}
|
567
|
+
/**
|
568
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
569
|
+
* unsigned, little-endian integer supporting up to 48 bits of accuracy.
|
570
|
+
*
|
571
|
+
* @param offset Number of bytes to skip before starting to read.
|
572
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
573
|
+
* @param noAssert
|
574
|
+
*/
|
575
|
+
readUIntLE(offset, byteLength, noAssert) {
|
576
|
+
offset = offset >>> 0;
|
577
|
+
byteLength = byteLength >>> 0;
|
578
|
+
if (!noAssert) {
|
579
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
580
|
+
}
|
581
|
+
let val = this[offset];
|
582
|
+
let mul = 1;
|
583
|
+
let i = 0;
|
584
|
+
while (++i < byteLength && (mul *= 256)) {
|
585
|
+
val += this[offset + i] * mul;
|
586
|
+
}
|
587
|
+
return val;
|
588
|
+
}
|
589
|
+
/**
|
590
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
591
|
+
* unsigned, big-endian integer supporting up to 48 bits of accuracy.
|
592
|
+
*
|
593
|
+
* @param offset Number of bytes to skip before starting to read.
|
594
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
595
|
+
* @param noAssert
|
596
|
+
*/
|
597
|
+
readUIntBE(offset, byteLength, noAssert) {
|
598
|
+
offset = offset >>> 0;
|
599
|
+
byteLength = byteLength >>> 0;
|
600
|
+
if (!noAssert) {
|
601
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
602
|
+
}
|
603
|
+
let val = this[offset + --byteLength];
|
604
|
+
let mul = 1;
|
605
|
+
while (byteLength > 0 && (mul *= 256)) {
|
606
|
+
val += this[offset + --byteLength] * mul;
|
607
|
+
}
|
608
|
+
return val;
|
609
|
+
}
|
610
|
+
/**
|
611
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
612
|
+
* little-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
613
|
+
*
|
614
|
+
* @param offset Number of bytes to skip before starting to read.
|
615
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
616
|
+
* @param noAssert
|
617
|
+
*/
|
618
|
+
readIntLE(offset, byteLength, noAssert) {
|
619
|
+
offset = offset >>> 0;
|
620
|
+
byteLength = byteLength >>> 0;
|
621
|
+
if (!noAssert) {
|
622
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
623
|
+
}
|
624
|
+
let val = this[offset];
|
625
|
+
let mul = 1;
|
626
|
+
let i = 0;
|
627
|
+
while (++i < byteLength && (mul *= 256)) {
|
628
|
+
val += this[offset + i] * mul;
|
629
|
+
}
|
630
|
+
mul *= 128;
|
631
|
+
if (val >= mul) {
|
632
|
+
val -= Math.pow(2, 8 * byteLength);
|
633
|
+
}
|
634
|
+
return val;
|
635
|
+
}
|
636
|
+
/**
|
637
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
638
|
+
* big-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
639
|
+
*
|
640
|
+
* @param offset Number of bytes to skip before starting to read.
|
641
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
642
|
+
* @param noAssert
|
643
|
+
*/
|
644
|
+
readIntBE(offset, byteLength, noAssert) {
|
645
|
+
offset = offset >>> 0;
|
646
|
+
byteLength = byteLength >>> 0;
|
647
|
+
if (!noAssert) {
|
648
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
649
|
+
}
|
650
|
+
let i = byteLength;
|
651
|
+
let mul = 1;
|
652
|
+
let val = this[offset + --i];
|
653
|
+
while (i > 0 && (mul *= 256)) {
|
654
|
+
val += this[offset + --i] * mul;
|
655
|
+
}
|
656
|
+
mul *= 128;
|
657
|
+
if (val >= mul) {
|
658
|
+
val -= Math.pow(2, 8 * byteLength);
|
659
|
+
}
|
660
|
+
return val;
|
661
|
+
}
|
662
|
+
/**
|
663
|
+
* Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
|
664
|
+
*
|
665
|
+
* @param offset Number of bytes to skip before starting to read.
|
666
|
+
* @param noAssert
|
667
|
+
*/
|
668
|
+
readUInt8(offset, noAssert) {
|
669
|
+
offset = offset >>> 0;
|
670
|
+
if (!noAssert) {
|
671
|
+
Buffer._checkOffset(offset, 1, this.length);
|
672
|
+
}
|
673
|
+
return this[offset];
|
674
|
+
}
|
675
|
+
/**
|
676
|
+
* Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
|
677
|
+
*
|
678
|
+
* @param offset Number of bytes to skip before starting to read.
|
679
|
+
* @param noAssert
|
680
|
+
*/
|
681
|
+
readUInt16LE(offset, noAssert) {
|
682
|
+
offset = offset >>> 0;
|
683
|
+
if (!noAssert) {
|
684
|
+
Buffer._checkOffset(offset, 2, this.length);
|
685
|
+
}
|
686
|
+
return this[offset] | this[offset + 1] << 8;
|
687
|
+
}
|
688
|
+
/**
|
689
|
+
* Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
|
690
|
+
*
|
691
|
+
* @param offset Number of bytes to skip before starting to read.
|
692
|
+
* @param noAssert
|
693
|
+
*/
|
694
|
+
readUInt16BE(offset, noAssert) {
|
695
|
+
offset = offset >>> 0;
|
696
|
+
if (!noAssert) {
|
697
|
+
Buffer._checkOffset(offset, 2, this.length);
|
698
|
+
}
|
699
|
+
return this[offset] << 8 | this[offset + 1];
|
700
|
+
}
|
701
|
+
/**
|
702
|
+
* Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
|
703
|
+
*
|
704
|
+
* @param offset Number of bytes to skip before starting to read.
|
705
|
+
* @param noAssert
|
706
|
+
*/
|
707
|
+
readUInt32LE(offset, noAssert) {
|
708
|
+
offset = offset >>> 0;
|
709
|
+
if (!noAssert) {
|
710
|
+
Buffer._checkOffset(offset, 4, this.length);
|
711
|
+
}
|
712
|
+
return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
|
713
|
+
}
|
714
|
+
/**
|
715
|
+
* Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
|
716
|
+
*
|
717
|
+
* @param offset Number of bytes to skip before starting to read.
|
718
|
+
* @param noAssert
|
719
|
+
*/
|
720
|
+
readUInt32BE(offset, noAssert) {
|
721
|
+
offset = offset >>> 0;
|
722
|
+
if (!noAssert) {
|
723
|
+
Buffer._checkOffset(offset, 4, this.length);
|
724
|
+
}
|
725
|
+
return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
|
726
|
+
}
|
727
|
+
/**
|
728
|
+
* Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
|
729
|
+
* as two's complement signed values.
|
730
|
+
*
|
731
|
+
* @param offset Number of bytes to skip before starting to read.
|
732
|
+
* @param noAssert
|
733
|
+
*/
|
734
|
+
readInt8(offset, noAssert) {
|
735
|
+
offset = offset >>> 0;
|
736
|
+
if (!noAssert) {
|
737
|
+
Buffer._checkOffset(offset, 1, this.length);
|
738
|
+
}
|
739
|
+
if (!(this[offset] & 128)) {
|
740
|
+
return this[offset];
|
741
|
+
}
|
742
|
+
return (255 - this[offset] + 1) * -1;
|
743
|
+
}
|
744
|
+
/**
|
745
|
+
* Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
746
|
+
* are interpreted as two's complement signed values.
|
747
|
+
*
|
748
|
+
* @param offset Number of bytes to skip before starting to read.
|
749
|
+
* @param noAssert
|
750
|
+
*/
|
751
|
+
readInt16LE(offset, noAssert) {
|
752
|
+
offset = offset >>> 0;
|
753
|
+
if (!noAssert) {
|
754
|
+
Buffer._checkOffset(offset, 2, this.length);
|
755
|
+
}
|
756
|
+
const val = this[offset] | this[offset + 1] << 8;
|
757
|
+
return val & 32768 ? val | 4294901760 : val;
|
758
|
+
}
|
759
|
+
/**
|
760
|
+
* Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
761
|
+
* are interpreted as two's complement signed values.
|
762
|
+
*
|
763
|
+
* @param offset Number of bytes to skip before starting to read.
|
764
|
+
* @param noAssert
|
765
|
+
*/
|
766
|
+
readInt16BE(offset, noAssert) {
|
767
|
+
offset = offset >>> 0;
|
768
|
+
if (!noAssert) {
|
769
|
+
Buffer._checkOffset(offset, 2, this.length);
|
770
|
+
}
|
771
|
+
const val = this[offset + 1] | this[offset] << 8;
|
772
|
+
return val & 32768 ? val | 4294901760 : val;
|
773
|
+
}
|
774
|
+
/**
|
775
|
+
* Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
776
|
+
* are interpreted as two's complement signed values.
|
777
|
+
*
|
778
|
+
* @param offset Number of bytes to skip before starting to read.
|
779
|
+
* @param noAssert
|
780
|
+
*/
|
781
|
+
readInt32LE(offset, noAssert) {
|
782
|
+
offset = offset >>> 0;
|
783
|
+
if (!noAssert) {
|
784
|
+
Buffer._checkOffset(offset, 4, this.length);
|
785
|
+
}
|
786
|
+
return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
|
787
|
+
}
|
788
|
+
/**
|
789
|
+
* Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
790
|
+
* are interpreted as two's complement signed values.
|
791
|
+
*
|
792
|
+
* @param offset Number of bytes to skip before starting to read.
|
793
|
+
* @param noAssert
|
794
|
+
*/
|
795
|
+
readInt32BE(offset, noAssert) {
|
796
|
+
offset = offset >>> 0;
|
797
|
+
if (!noAssert) {
|
798
|
+
Buffer._checkOffset(offset, 4, this.length);
|
799
|
+
}
|
800
|
+
return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
|
801
|
+
}
|
802
|
+
/**
|
803
|
+
* Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
|
804
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 2.
|
805
|
+
*/
|
806
|
+
swap16() {
|
807
|
+
const len = this.length;
|
808
|
+
if (len % 2 !== 0) {
|
809
|
+
throw new RangeError("Buffer size must be a multiple of 16-bits");
|
810
|
+
}
|
811
|
+
for (let i = 0; i < len; i += 2) {
|
812
|
+
this._swap(this, i, i + 1);
|
813
|
+
}
|
814
|
+
return this;
|
815
|
+
}
|
816
|
+
/**
|
817
|
+
* Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
|
818
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 4.
|
819
|
+
*/
|
820
|
+
swap32() {
|
821
|
+
const len = this.length;
|
822
|
+
if (len % 4 !== 0) {
|
823
|
+
throw new RangeError("Buffer size must be a multiple of 32-bits");
|
824
|
+
}
|
825
|
+
for (let i = 0; i < len; i += 4) {
|
826
|
+
this._swap(this, i, i + 3);
|
827
|
+
this._swap(this, i + 1, i + 2);
|
828
|
+
}
|
829
|
+
return this;
|
830
|
+
}
|
831
|
+
/**
|
832
|
+
* Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
|
833
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 8.
|
834
|
+
*/
|
835
|
+
swap64() {
|
836
|
+
const len = this.length;
|
837
|
+
if (len % 8 !== 0) {
|
838
|
+
throw new RangeError("Buffer size must be a multiple of 64-bits");
|
839
|
+
}
|
840
|
+
for (let i = 0; i < len; i += 8) {
|
841
|
+
this._swap(this, i, i + 7);
|
842
|
+
this._swap(this, i + 1, i + 6);
|
843
|
+
this._swap(this, i + 2, i + 5);
|
844
|
+
this._swap(this, i + 3, i + 4);
|
845
|
+
}
|
846
|
+
return this;
|
847
|
+
}
|
848
|
+
/**
|
849
|
+
* Swaps two octets.
|
850
|
+
*
|
851
|
+
* @param b
|
852
|
+
* @param n
|
853
|
+
* @param m
|
854
|
+
*/
|
855
|
+
_swap(b, n, m) {
|
856
|
+
const i = b[n];
|
857
|
+
b[n] = b[m];
|
858
|
+
b[m] = i;
|
859
|
+
}
|
860
|
+
/**
|
861
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
|
862
|
+
* Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
|
863
|
+
*
|
864
|
+
* @param value Number to write.
|
865
|
+
* @param offset Number of bytes to skip before starting to write.
|
866
|
+
* @param noAssert
|
867
|
+
* @returns `offset` plus the number of bytes written.
|
868
|
+
*/
|
869
|
+
writeUInt8(value, offset, noAssert) {
|
870
|
+
value = +value;
|
871
|
+
offset = offset >>> 0;
|
872
|
+
if (!noAssert) {
|
873
|
+
Buffer._checkInt(this, value, offset, 1, 255, 0);
|
874
|
+
}
|
875
|
+
this[offset] = value & 255;
|
876
|
+
return offset + 1;
|
877
|
+
}
|
878
|
+
/**
|
879
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
|
880
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
881
|
+
*
|
882
|
+
* @param value Number to write.
|
883
|
+
* @param offset Number of bytes to skip before starting to write.
|
884
|
+
* @param noAssert
|
885
|
+
* @returns `offset` plus the number of bytes written.
|
886
|
+
*/
|
887
|
+
writeUInt16LE(value, offset, noAssert) {
|
888
|
+
value = +value;
|
889
|
+
offset = offset >>> 0;
|
890
|
+
if (!noAssert) {
|
891
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
892
|
+
}
|
893
|
+
this[offset] = value & 255;
|
894
|
+
this[offset + 1] = value >>> 8;
|
895
|
+
return offset + 2;
|
896
|
+
}
|
897
|
+
/**
|
898
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
|
899
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
900
|
+
*
|
901
|
+
* @param value Number to write.
|
902
|
+
* @param offset Number of bytes to skip before starting to write.
|
903
|
+
* @param noAssert
|
904
|
+
* @returns `offset` plus the number of bytes written.
|
905
|
+
*/
|
906
|
+
writeUInt16BE(value, offset, noAssert) {
|
907
|
+
value = +value;
|
908
|
+
offset = offset >>> 0;
|
909
|
+
if (!noAssert) {
|
910
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
911
|
+
}
|
912
|
+
this[offset] = value >>> 8;
|
913
|
+
this[offset + 1] = value & 255;
|
914
|
+
return offset + 2;
|
915
|
+
}
|
916
|
+
/**
|
917
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
|
918
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
919
|
+
*
|
920
|
+
* @param value Number to write.
|
921
|
+
* @param offset Number of bytes to skip before starting to write.
|
922
|
+
* @param noAssert
|
923
|
+
* @returns `offset` plus the number of bytes written.
|
924
|
+
*/
|
925
|
+
writeUInt32LE(value, offset, noAssert) {
|
926
|
+
value = +value;
|
927
|
+
offset = offset >>> 0;
|
928
|
+
if (!noAssert) {
|
929
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
930
|
+
}
|
931
|
+
this[offset + 3] = value >>> 24;
|
932
|
+
this[offset + 2] = value >>> 16;
|
933
|
+
this[offset + 1] = value >>> 8;
|
934
|
+
this[offset] = value & 255;
|
935
|
+
return offset + 4;
|
936
|
+
}
|
937
|
+
/**
|
938
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
|
939
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
940
|
+
*
|
941
|
+
* @param value Number to write.
|
942
|
+
* @param offset Number of bytes to skip before starting to write.
|
943
|
+
* @param noAssert
|
944
|
+
* @returns `offset` plus the number of bytes written.
|
945
|
+
*/
|
946
|
+
writeUInt32BE(value, offset, noAssert) {
|
947
|
+
value = +value;
|
948
|
+
offset = offset >>> 0;
|
949
|
+
if (!noAssert) {
|
950
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
951
|
+
}
|
952
|
+
this[offset] = value >>> 24;
|
953
|
+
this[offset + 1] = value >>> 16;
|
954
|
+
this[offset + 2] = value >>> 8;
|
955
|
+
this[offset + 3] = value & 255;
|
956
|
+
return offset + 4;
|
957
|
+
}
|
958
|
+
/**
|
959
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
|
960
|
+
* Behavior is undefined when `value` is anything other than a signed 8-bit integer.
|
961
|
+
*
|
962
|
+
* @param value Number to write.
|
963
|
+
* @param offset Number of bytes to skip before starting to write.
|
964
|
+
* @param noAssert
|
965
|
+
* @returns `offset` plus the number of bytes written.
|
966
|
+
*/
|
967
|
+
writeInt8(value, offset, noAssert) {
|
968
|
+
value = +value;
|
969
|
+
offset = offset >>> 0;
|
970
|
+
if (!noAssert) {
|
971
|
+
Buffer._checkInt(this, value, offset, 1, 127, -128);
|
972
|
+
}
|
973
|
+
if (value < 0) {
|
974
|
+
value = 255 + value + 1;
|
975
|
+
}
|
976
|
+
this[offset] = value & 255;
|
977
|
+
return offset + 1;
|
978
|
+
}
|
979
|
+
/**
|
980
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
|
981
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
982
|
+
*
|
983
|
+
* @param value Number to write.
|
984
|
+
* @param offset Number of bytes to skip before starting to write.
|
985
|
+
* @param noAssert
|
986
|
+
* @returns `offset` plus the number of bytes written.
|
987
|
+
*/
|
988
|
+
writeInt16LE(value, offset, noAssert) {
|
989
|
+
value = +value;
|
990
|
+
offset = offset >>> 0;
|
991
|
+
if (!noAssert) {
|
992
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
993
|
+
}
|
994
|
+
this[offset] = value & 255;
|
995
|
+
this[offset + 1] = value >>> 8;
|
996
|
+
return offset + 2;
|
997
|
+
}
|
998
|
+
/**
|
999
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
|
1000
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
1001
|
+
*
|
1002
|
+
* @param value Number to write.
|
1003
|
+
* @param offset Number of bytes to skip before starting to write.
|
1004
|
+
* @param noAssert
|
1005
|
+
* @returns `offset` plus the number of bytes written.
|
1006
|
+
*/
|
1007
|
+
writeInt16BE(value, offset, noAssert) {
|
1008
|
+
value = +value;
|
1009
|
+
offset = offset >>> 0;
|
1010
|
+
if (!noAssert) {
|
1011
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1012
|
+
}
|
1013
|
+
this[offset] = value >>> 8;
|
1014
|
+
this[offset + 1] = value & 255;
|
1015
|
+
return offset + 2;
|
1016
|
+
}
|
1017
|
+
/**
|
1018
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
|
1019
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1020
|
+
*
|
1021
|
+
* @param value Number to write.
|
1022
|
+
* @param offset Number of bytes to skip before starting to write.
|
1023
|
+
* @param noAssert
|
1024
|
+
* @returns `offset` plus the number of bytes written.
|
1025
|
+
*/
|
1026
|
+
writeInt32LE(value, offset, noAssert) {
|
1027
|
+
value = +value;
|
1028
|
+
offset = offset >>> 0;
|
1029
|
+
if (!noAssert) {
|
1030
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1031
|
+
}
|
1032
|
+
this[offset] = value & 255;
|
1033
|
+
this[offset + 1] = value >>> 8;
|
1034
|
+
this[offset + 2] = value >>> 16;
|
1035
|
+
this[offset + 3] = value >>> 24;
|
1036
|
+
return offset + 4;
|
1037
|
+
}
|
1038
|
+
/**
|
1039
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
|
1040
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1041
|
+
*
|
1042
|
+
* @param value Number to write.
|
1043
|
+
* @param offset Number of bytes to skip before starting to write.
|
1044
|
+
* @param noAssert
|
1045
|
+
* @returns `offset` plus the number of bytes written.
|
1046
|
+
*/
|
1047
|
+
writeInt32BE(value, offset, noAssert) {
|
1048
|
+
value = +value;
|
1049
|
+
offset = offset >>> 0;
|
1050
|
+
if (!noAssert) {
|
1051
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1052
|
+
}
|
1053
|
+
if (value < 0) {
|
1054
|
+
value = 4294967295 + value + 1;
|
1055
|
+
}
|
1056
|
+
this[offset] = value >>> 24;
|
1057
|
+
this[offset + 1] = value >>> 16;
|
1058
|
+
this[offset + 2] = value >>> 8;
|
1059
|
+
this[offset + 3] = value & 255;
|
1060
|
+
return offset + 4;
|
1061
|
+
}
|
1062
|
+
/**
|
1063
|
+
* Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
|
1064
|
+
* filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
|
1065
|
+
* integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
|
1066
|
+
*
|
1067
|
+
* If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
|
1068
|
+
* character that fit into `buf` are written.
|
1069
|
+
*
|
1070
|
+
* If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
|
1071
|
+
*
|
1072
|
+
* @param value
|
1073
|
+
* @param encoding
|
1074
|
+
*/
|
1075
|
+
fill(value, offset, end, encoding) {
|
1076
|
+
if (typeof value === "string") {
|
1077
|
+
if (typeof offset === "string") {
|
1078
|
+
encoding = offset;
|
1079
|
+
offset = 0;
|
1080
|
+
end = this.length;
|
1081
|
+
} else if (typeof end === "string") {
|
1082
|
+
encoding = end;
|
1083
|
+
end = this.length;
|
1084
|
+
}
|
1085
|
+
if (encoding !== void 0 && typeof encoding !== "string") {
|
1086
|
+
throw new TypeError("encoding must be a string");
|
1087
|
+
}
|
1088
|
+
if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
|
1089
|
+
throw new TypeError("Unknown encoding: " + encoding);
|
1090
|
+
}
|
1091
|
+
if (value.length === 1) {
|
1092
|
+
const code = value.charCodeAt(0);
|
1093
|
+
if (encoding === "utf8" && code < 128) {
|
1094
|
+
value = code;
|
1095
|
+
}
|
1096
|
+
}
|
1097
|
+
} else if (typeof value === "number") {
|
1098
|
+
value = value & 255;
|
1099
|
+
} else if (typeof value === "boolean") {
|
1100
|
+
value = Number(value);
|
1101
|
+
}
|
1102
|
+
offset ?? (offset = 0);
|
1103
|
+
end ?? (end = this.length);
|
1104
|
+
if (offset < 0 || this.length < offset || this.length < end) {
|
1105
|
+
throw new RangeError("Out of range index");
|
1106
|
+
}
|
1107
|
+
if (end <= offset) {
|
1108
|
+
return this;
|
1109
|
+
}
|
1110
|
+
offset = offset >>> 0;
|
1111
|
+
end = end === void 0 ? this.length : end >>> 0;
|
1112
|
+
value || (value = 0);
|
1113
|
+
let i;
|
1114
|
+
if (typeof value === "number") {
|
1115
|
+
for (i = offset; i < end; ++i) {
|
1116
|
+
this[i] = value;
|
1117
|
+
}
|
1118
|
+
} else {
|
1119
|
+
const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
|
1120
|
+
const len = bytes.length;
|
1121
|
+
if (len === 0) {
|
1122
|
+
throw new TypeError('The value "' + value + '" is invalid for argument "value"');
|
1123
|
+
}
|
1124
|
+
for (i = 0; i < end - offset; ++i) {
|
1125
|
+
this[i + offset] = bytes[i % len];
|
1126
|
+
}
|
1127
|
+
}
|
1128
|
+
return this;
|
1129
|
+
}
|
1130
|
+
/**
|
1131
|
+
* Returns the index of the specified value.
|
1132
|
+
*
|
1133
|
+
* If `value` is:
|
1134
|
+
* - a string, `value` is interpreted according to the character encoding in `encoding`.
|
1135
|
+
* - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
|
1136
|
+
* - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
|
1137
|
+
*
|
1138
|
+
* Any other types will throw a `TypeError`.
|
1139
|
+
*
|
1140
|
+
* @param value What to search for.
|
1141
|
+
* @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
|
1142
|
+
* @param encoding If `value` is a string, this is the encoding used to search.
|
1143
|
+
* @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
|
1144
|
+
*/
|
1145
|
+
indexOf(value, byteOffset, encoding) {
|
1146
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
|
1147
|
+
}
|
1148
|
+
/**
|
1149
|
+
* Gets the last index of the specified value.
|
1150
|
+
*
|
1151
|
+
* @see indexOf()
|
1152
|
+
* @param value
|
1153
|
+
* @param byteOffset
|
1154
|
+
* @param encoding
|
1155
|
+
*/
|
1156
|
+
lastIndexOf(value, byteOffset, encoding) {
|
1157
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
|
1158
|
+
}
|
1159
|
+
_bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
|
1160
|
+
if (buffer.length === 0) {
|
1161
|
+
return -1;
|
1162
|
+
}
|
1163
|
+
if (typeof byteOffset === "string") {
|
1164
|
+
encoding = byteOffset;
|
1165
|
+
byteOffset = 0;
|
1166
|
+
} else if (typeof byteOffset === "undefined") {
|
1167
|
+
byteOffset = 0;
|
1168
|
+
} else if (byteOffset > 2147483647) {
|
1169
|
+
byteOffset = 2147483647;
|
1170
|
+
} else if (byteOffset < -2147483648) {
|
1171
|
+
byteOffset = -2147483648;
|
1172
|
+
}
|
1173
|
+
byteOffset = +byteOffset;
|
1174
|
+
if (byteOffset !== byteOffset) {
|
1175
|
+
byteOffset = dir ? 0 : buffer.length - 1;
|
1176
|
+
}
|
1177
|
+
if (byteOffset < 0) {
|
1178
|
+
byteOffset = buffer.length + byteOffset;
|
1179
|
+
}
|
1180
|
+
if (byteOffset >= buffer.length) {
|
1181
|
+
if (dir) {
|
1182
|
+
return -1;
|
1183
|
+
} else {
|
1184
|
+
byteOffset = buffer.length - 1;
|
1185
|
+
}
|
1186
|
+
} else if (byteOffset < 0) {
|
1187
|
+
if (dir) {
|
1188
|
+
byteOffset = 0;
|
1189
|
+
} else {
|
1190
|
+
return -1;
|
1191
|
+
}
|
1192
|
+
}
|
1193
|
+
if (typeof val === "string") {
|
1194
|
+
val = Buffer.from(val, encoding);
|
1195
|
+
}
|
1196
|
+
if (Buffer.isBuffer(val)) {
|
1197
|
+
if (val.length === 0) {
|
1198
|
+
return -1;
|
1199
|
+
}
|
1200
|
+
return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
|
1201
|
+
} else if (typeof val === "number") {
|
1202
|
+
val = val & 255;
|
1203
|
+
if (typeof Uint8Array.prototype.indexOf === "function") {
|
1204
|
+
if (dir) {
|
1205
|
+
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
|
1206
|
+
} else {
|
1207
|
+
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
|
1208
|
+
}
|
1209
|
+
}
|
1210
|
+
return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
|
1211
|
+
}
|
1212
|
+
throw new TypeError("val must be string, number or Buffer");
|
1213
|
+
}
|
1214
|
+
/**
|
1215
|
+
* Equivalent to `buf.indexOf() !== -1`.
|
1216
|
+
*
|
1217
|
+
* @param value
|
1218
|
+
* @param byteOffset
|
1219
|
+
* @param encoding
|
1220
|
+
*/
|
1221
|
+
includes(value, byteOffset, encoding) {
|
1222
|
+
return this.indexOf(value, byteOffset, encoding) !== -1;
|
1223
|
+
}
|
1224
|
+
/**
|
1225
|
+
* Creates a new buffer from the given parameters.
|
1226
|
+
*
|
1227
|
+
* @param data
|
1228
|
+
* @param encoding
|
1229
|
+
*/
|
1230
|
+
static from(a, b, c) {
|
1231
|
+
return new Buffer(a, b, c);
|
1232
|
+
}
|
1233
|
+
/**
|
1234
|
+
* Returns true if `obj` is a Buffer.
|
1235
|
+
*
|
1236
|
+
* @param obj
|
1237
|
+
*/
|
1238
|
+
static isBuffer(obj) {
|
1239
|
+
return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
|
1240
|
+
}
|
1241
|
+
/**
|
1242
|
+
* Returns true if `encoding` is a supported encoding.
|
1243
|
+
*
|
1244
|
+
* @param encoding
|
1245
|
+
*/
|
1246
|
+
static isEncoding(encoding) {
|
1247
|
+
switch (encoding.toLowerCase()) {
|
1248
|
+
case "hex":
|
1249
|
+
case "utf8":
|
1250
|
+
case "ascii":
|
1251
|
+
case "binary":
|
1252
|
+
case "latin1":
|
1253
|
+
case "ucs2":
|
1254
|
+
case "utf16le":
|
1255
|
+
case "base64":
|
1256
|
+
return true;
|
1257
|
+
default:
|
1258
|
+
return false;
|
1259
|
+
}
|
1260
|
+
}
|
1261
|
+
/**
|
1262
|
+
* Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
|
1263
|
+
* returns the number of characters in the string.
|
1264
|
+
*
|
1265
|
+
* @param string The string to test.
|
1266
|
+
* @param encoding The encoding to use for calculation. Defaults is `utf8`.
|
1267
|
+
*/
|
1268
|
+
static byteLength(string, encoding) {
|
1269
|
+
if (Buffer.isBuffer(string)) {
|
1270
|
+
return string.length;
|
1271
|
+
}
|
1272
|
+
if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
|
1273
|
+
return string.byteLength;
|
1274
|
+
}
|
1275
|
+
if (typeof string !== "string") {
|
1276
|
+
throw new TypeError(
|
1277
|
+
'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
|
1278
|
+
);
|
1279
|
+
}
|
1280
|
+
const len = string.length;
|
1281
|
+
const mustMatch = arguments.length > 2 && arguments[2] === true;
|
1282
|
+
if (!mustMatch && len === 0) {
|
1283
|
+
return 0;
|
1284
|
+
}
|
1285
|
+
switch (encoding?.toLowerCase()) {
|
1286
|
+
case "ascii":
|
1287
|
+
case "latin1":
|
1288
|
+
case "binary":
|
1289
|
+
return len;
|
1290
|
+
case "utf8":
|
1291
|
+
return Buffer._utf8ToBytes(string).length;
|
1292
|
+
case "hex":
|
1293
|
+
return len >>> 1;
|
1294
|
+
case "ucs2":
|
1295
|
+
case "utf16le":
|
1296
|
+
return len * 2;
|
1297
|
+
case "base64":
|
1298
|
+
return Buffer._base64ToBytes(string).length;
|
1299
|
+
default:
|
1300
|
+
return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
|
1301
|
+
}
|
1302
|
+
}
|
1303
|
+
/**
|
1304
|
+
* Returns a Buffer which is the result of concatenating all the buffers in the list together.
|
1305
|
+
*
|
1306
|
+
* - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
|
1307
|
+
* - If the list has exactly one item, then the first item is returned.
|
1308
|
+
* - If the list has more than one item, then a new buffer is created.
|
1309
|
+
*
|
1310
|
+
* It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
|
1311
|
+
* a small computational expense.
|
1312
|
+
*
|
1313
|
+
* @param list An array of Buffer objects to concatenate.
|
1314
|
+
* @param totalLength Total length of the buffers when concatenated.
|
1315
|
+
*/
|
1316
|
+
static concat(list, totalLength) {
|
1317
|
+
if (!Array.isArray(list)) {
|
1318
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1319
|
+
}
|
1320
|
+
if (list.length === 0) {
|
1321
|
+
return Buffer.alloc(0);
|
1322
|
+
}
|
1323
|
+
let i;
|
1324
|
+
if (totalLength === void 0) {
|
1325
|
+
totalLength = 0;
|
1326
|
+
for (i = 0; i < list.length; ++i) {
|
1327
|
+
totalLength += list[i].length;
|
1328
|
+
}
|
1329
|
+
}
|
1330
|
+
const buffer = Buffer.allocUnsafe(totalLength);
|
1331
|
+
let pos = 0;
|
1332
|
+
for (i = 0; i < list.length; ++i) {
|
1333
|
+
let buf = list[i];
|
1334
|
+
if (Buffer._isInstance(buf, Uint8Array)) {
|
1335
|
+
if (pos + buf.length > buffer.length) {
|
1336
|
+
if (!Buffer.isBuffer(buf)) {
|
1337
|
+
buf = Buffer.from(buf);
|
1338
|
+
}
|
1339
|
+
buf.copy(buffer, pos);
|
1340
|
+
} else {
|
1341
|
+
Uint8Array.prototype.set.call(buffer, buf, pos);
|
1342
|
+
}
|
1343
|
+
} else if (!Buffer.isBuffer(buf)) {
|
1344
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1345
|
+
} else {
|
1346
|
+
buf.copy(buffer, pos);
|
1347
|
+
}
|
1348
|
+
pos += buf.length;
|
1349
|
+
}
|
1350
|
+
return buffer;
|
1351
|
+
}
|
1352
|
+
/**
|
1353
|
+
* The same as `buf1.compare(buf2)`.
|
1354
|
+
*/
|
1355
|
+
static compare(buf1, buf2) {
|
1356
|
+
if (Buffer._isInstance(buf1, Uint8Array)) {
|
1357
|
+
buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
|
1358
|
+
}
|
1359
|
+
if (Buffer._isInstance(buf2, Uint8Array)) {
|
1360
|
+
buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
|
1361
|
+
}
|
1362
|
+
if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
|
1363
|
+
throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
|
1364
|
+
}
|
1365
|
+
if (buf1 === buf2) {
|
1366
|
+
return 0;
|
1367
|
+
}
|
1368
|
+
let x = buf1.length;
|
1369
|
+
let y = buf2.length;
|
1370
|
+
for (let i = 0, len = Math.min(x, y); i < len; ++i) {
|
1371
|
+
if (buf1[i] !== buf2[i]) {
|
1372
|
+
x = buf1[i];
|
1373
|
+
y = buf2[i];
|
1374
|
+
break;
|
1375
|
+
}
|
1376
|
+
}
|
1377
|
+
if (x < y) {
|
1378
|
+
return -1;
|
1379
|
+
}
|
1380
|
+
if (y < x) {
|
1381
|
+
return 1;
|
1382
|
+
}
|
1383
|
+
return 0;
|
1384
|
+
}
|
1385
|
+
/**
|
1386
|
+
* Allocates a new buffer of `size` octets.
|
1387
|
+
*
|
1388
|
+
* @param size The number of octets to allocate.
|
1389
|
+
* @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
|
1390
|
+
* @param encoding The encoding used for the call to `buf.fill()` while initializing.
|
1391
|
+
*/
|
1392
|
+
static alloc(size, fill, encoding) {
|
1393
|
+
if (typeof size !== "number") {
|
1394
|
+
throw new TypeError('"size" argument must be of type number');
|
1395
|
+
} else if (size < 0) {
|
1396
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1397
|
+
}
|
1398
|
+
if (size <= 0) {
|
1399
|
+
return new Buffer(size);
|
1400
|
+
}
|
1401
|
+
if (fill !== void 0) {
|
1402
|
+
return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
|
1403
|
+
}
|
1404
|
+
return new Buffer(size);
|
1405
|
+
}
|
1406
|
+
/**
|
1407
|
+
* Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
|
1408
|
+
*
|
1409
|
+
* @param size
|
1410
|
+
*/
|
1411
|
+
static allocUnsafe(size) {
|
1412
|
+
if (typeof size !== "number") {
|
1413
|
+
throw new TypeError('"size" argument must be of type number');
|
1414
|
+
} else if (size < 0) {
|
1415
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1416
|
+
}
|
1417
|
+
return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
|
1418
|
+
}
|
1419
|
+
/**
|
1420
|
+
* Returns true if the given `obj` is an instance of `type`.
|
1421
|
+
*
|
1422
|
+
* @param obj
|
1423
|
+
* @param type
|
1424
|
+
*/
|
1425
|
+
static _isInstance(obj, type) {
|
1426
|
+
return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
|
1427
|
+
}
|
1428
|
+
static _checked(length) {
|
1429
|
+
if (length >= K_MAX_LENGTH) {
|
1430
|
+
throw new RangeError(
|
1431
|
+
"Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
|
1432
|
+
);
|
1433
|
+
}
|
1434
|
+
return length | 0;
|
1435
|
+
}
|
1436
|
+
static _blitBuffer(src, dst, offset, length) {
|
1437
|
+
let i;
|
1438
|
+
for (i = 0; i < length; ++i) {
|
1439
|
+
if (i + offset >= dst.length || i >= src.length) {
|
1440
|
+
break;
|
1441
|
+
}
|
1442
|
+
dst[i + offset] = src[i];
|
1443
|
+
}
|
1444
|
+
return i;
|
1445
|
+
}
|
1446
|
+
static _utf8Write(buf, string, offset, length) {
|
1447
|
+
return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
|
1448
|
+
}
|
1449
|
+
static _asciiWrite(buf, string, offset, length) {
|
1450
|
+
return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
|
1451
|
+
}
|
1452
|
+
static _base64Write(buf, string, offset, length) {
|
1453
|
+
return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
|
1454
|
+
}
|
1455
|
+
static _ucs2Write(buf, string, offset, length) {
|
1456
|
+
return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
|
1457
|
+
}
|
1458
|
+
static _hexWrite(buf, string, offset, length) {
|
1459
|
+
offset = Number(offset) || 0;
|
1460
|
+
const remaining = buf.length - offset;
|
1461
|
+
if (!length) {
|
1462
|
+
length = remaining;
|
1463
|
+
} else {
|
1464
|
+
length = Number(length);
|
1465
|
+
if (length > remaining) {
|
1466
|
+
length = remaining;
|
1467
|
+
}
|
1468
|
+
}
|
1469
|
+
const strLen = string.length;
|
1470
|
+
if (length > strLen / 2) {
|
1471
|
+
length = strLen / 2;
|
1472
|
+
}
|
1473
|
+
let i;
|
1474
|
+
for (i = 0; i < length; ++i) {
|
1475
|
+
const parsed = parseInt(string.substr(i * 2, 2), 16);
|
1476
|
+
if (parsed !== parsed) {
|
1477
|
+
return i;
|
1478
|
+
}
|
1479
|
+
buf[offset + i] = parsed;
|
1480
|
+
}
|
1481
|
+
return i;
|
1482
|
+
}
|
1483
|
+
static _utf8ToBytes(string, units) {
|
1484
|
+
units = units || Infinity;
|
1485
|
+
const length = string.length;
|
1486
|
+
const bytes = [];
|
1487
|
+
let codePoint;
|
1488
|
+
let leadSurrogate = null;
|
1489
|
+
for (let i = 0; i < length; ++i) {
|
1490
|
+
codePoint = string.charCodeAt(i);
|
1491
|
+
if (codePoint > 55295 && codePoint < 57344) {
|
1492
|
+
if (!leadSurrogate) {
|
1493
|
+
if (codePoint > 56319) {
|
1494
|
+
if ((units -= 3) > -1) {
|
1495
|
+
bytes.push(239, 191, 189);
|
1496
|
+
}
|
1497
|
+
continue;
|
1498
|
+
} else if (i + 1 === length) {
|
1499
|
+
if ((units -= 3) > -1) {
|
1500
|
+
bytes.push(239, 191, 189);
|
1501
|
+
}
|
1502
|
+
continue;
|
1503
|
+
}
|
1504
|
+
leadSurrogate = codePoint;
|
1505
|
+
continue;
|
1506
|
+
}
|
1507
|
+
if (codePoint < 56320) {
|
1508
|
+
if ((units -= 3) > -1) {
|
1509
|
+
bytes.push(239, 191, 189);
|
1510
|
+
}
|
1511
|
+
leadSurrogate = codePoint;
|
1512
|
+
continue;
|
1513
|
+
}
|
1514
|
+
codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
|
1515
|
+
} else if (leadSurrogate) {
|
1516
|
+
if ((units -= 3) > -1) {
|
1517
|
+
bytes.push(239, 191, 189);
|
1518
|
+
}
|
1519
|
+
}
|
1520
|
+
leadSurrogate = null;
|
1521
|
+
if (codePoint < 128) {
|
1522
|
+
if ((units -= 1) < 0) {
|
1523
|
+
break;
|
1524
|
+
}
|
1525
|
+
bytes.push(codePoint);
|
1526
|
+
} else if (codePoint < 2048) {
|
1527
|
+
if ((units -= 2) < 0) {
|
1528
|
+
break;
|
1529
|
+
}
|
1530
|
+
bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
|
1531
|
+
} else if (codePoint < 65536) {
|
1532
|
+
if ((units -= 3) < 0) {
|
1533
|
+
break;
|
1534
|
+
}
|
1535
|
+
bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
|
1536
|
+
} else if (codePoint < 1114112) {
|
1537
|
+
if ((units -= 4) < 0) {
|
1538
|
+
break;
|
1539
|
+
}
|
1540
|
+
bytes.push(
|
1541
|
+
codePoint >> 18 | 240,
|
1542
|
+
codePoint >> 12 & 63 | 128,
|
1543
|
+
codePoint >> 6 & 63 | 128,
|
1544
|
+
codePoint & 63 | 128
|
1545
|
+
);
|
1546
|
+
} else {
|
1547
|
+
throw new Error("Invalid code point");
|
1548
|
+
}
|
1549
|
+
}
|
1550
|
+
return bytes;
|
1551
|
+
}
|
1552
|
+
static _base64ToBytes(str) {
|
1553
|
+
return toByteArray(base64clean(str));
|
1554
|
+
}
|
1555
|
+
static _asciiToBytes(str) {
|
1556
|
+
const byteArray = [];
|
1557
|
+
for (let i = 0; i < str.length; ++i) {
|
1558
|
+
byteArray.push(str.charCodeAt(i) & 255);
|
1559
|
+
}
|
1560
|
+
return byteArray;
|
1561
|
+
}
|
1562
|
+
static _utf16leToBytes(str, units) {
|
1563
|
+
let c, hi, lo;
|
1564
|
+
const byteArray = [];
|
1565
|
+
for (let i = 0; i < str.length; ++i) {
|
1566
|
+
if ((units -= 2) < 0) break;
|
1567
|
+
c = str.charCodeAt(i);
|
1568
|
+
hi = c >> 8;
|
1569
|
+
lo = c % 256;
|
1570
|
+
byteArray.push(lo);
|
1571
|
+
byteArray.push(hi);
|
1572
|
+
}
|
1573
|
+
return byteArray;
|
1574
|
+
}
|
1575
|
+
static _hexSlice(buf, start, end) {
|
1576
|
+
const len = buf.length;
|
1577
|
+
if (!start || start < 0) {
|
1578
|
+
start = 0;
|
1579
|
+
}
|
1580
|
+
if (!end || end < 0 || end > len) {
|
1581
|
+
end = len;
|
1582
|
+
}
|
1583
|
+
let out = "";
|
1584
|
+
for (let i = start; i < end; ++i) {
|
1585
|
+
out += hexSliceLookupTable[buf[i]];
|
1586
|
+
}
|
1587
|
+
return out;
|
1588
|
+
}
|
1589
|
+
static _base64Slice(buf, start, end) {
|
1590
|
+
if (start === 0 && end === buf.length) {
|
1591
|
+
return fromByteArray(buf);
|
1592
|
+
} else {
|
1593
|
+
return fromByteArray(buf.slice(start, end));
|
1594
|
+
}
|
1595
|
+
}
|
1596
|
+
static _utf8Slice(buf, start, end) {
|
1597
|
+
end = Math.min(buf.length, end);
|
1598
|
+
const res = [];
|
1599
|
+
let i = start;
|
1600
|
+
while (i < end) {
|
1601
|
+
const firstByte = buf[i];
|
1602
|
+
let codePoint = null;
|
1603
|
+
let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
|
1604
|
+
if (i + bytesPerSequence <= end) {
|
1605
|
+
let secondByte, thirdByte, fourthByte, tempCodePoint;
|
1606
|
+
switch (bytesPerSequence) {
|
1607
|
+
case 1:
|
1608
|
+
if (firstByte < 128) {
|
1609
|
+
codePoint = firstByte;
|
1610
|
+
}
|
1611
|
+
break;
|
1612
|
+
case 2:
|
1613
|
+
secondByte = buf[i + 1];
|
1614
|
+
if ((secondByte & 192) === 128) {
|
1615
|
+
tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
|
1616
|
+
if (tempCodePoint > 127) {
|
1617
|
+
codePoint = tempCodePoint;
|
1618
|
+
}
|
1619
|
+
}
|
1620
|
+
break;
|
1621
|
+
case 3:
|
1622
|
+
secondByte = buf[i + 1];
|
1623
|
+
thirdByte = buf[i + 2];
|
1624
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
|
1625
|
+
tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
|
1626
|
+
if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
|
1627
|
+
codePoint = tempCodePoint;
|
1628
|
+
}
|
1629
|
+
}
|
1630
|
+
break;
|
1631
|
+
case 4:
|
1632
|
+
secondByte = buf[i + 1];
|
1633
|
+
thirdByte = buf[i + 2];
|
1634
|
+
fourthByte = buf[i + 3];
|
1635
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
|
1636
|
+
tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
|
1637
|
+
if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
|
1638
|
+
codePoint = tempCodePoint;
|
1639
|
+
}
|
1640
|
+
}
|
1641
|
+
}
|
1642
|
+
}
|
1643
|
+
if (codePoint === null) {
|
1644
|
+
codePoint = 65533;
|
1645
|
+
bytesPerSequence = 1;
|
1646
|
+
} else if (codePoint > 65535) {
|
1647
|
+
codePoint -= 65536;
|
1648
|
+
res.push(codePoint >>> 10 & 1023 | 55296);
|
1649
|
+
codePoint = 56320 | codePoint & 1023;
|
1650
|
+
}
|
1651
|
+
res.push(codePoint);
|
1652
|
+
i += bytesPerSequence;
|
1653
|
+
}
|
1654
|
+
return Buffer._decodeCodePointsArray(res);
|
1655
|
+
}
|
1656
|
+
static _decodeCodePointsArray(codePoints) {
|
1657
|
+
const len = codePoints.length;
|
1658
|
+
if (len <= MAX_ARGUMENTS_LENGTH) {
|
1659
|
+
return String.fromCharCode.apply(String, codePoints);
|
1660
|
+
}
|
1661
|
+
let res = "";
|
1662
|
+
let i = 0;
|
1663
|
+
while (i < len) {
|
1664
|
+
res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
|
1665
|
+
}
|
1666
|
+
return res;
|
1667
|
+
}
|
1668
|
+
static _asciiSlice(buf, start, end) {
|
1669
|
+
let ret = "";
|
1670
|
+
end = Math.min(buf.length, end);
|
1671
|
+
for (let i = start; i < end; ++i) {
|
1672
|
+
ret += String.fromCharCode(buf[i] & 127);
|
1673
|
+
}
|
1674
|
+
return ret;
|
1675
|
+
}
|
1676
|
+
static _latin1Slice(buf, start, end) {
|
1677
|
+
let ret = "";
|
1678
|
+
end = Math.min(buf.length, end);
|
1679
|
+
for (let i = start; i < end; ++i) {
|
1680
|
+
ret += String.fromCharCode(buf[i]);
|
1681
|
+
}
|
1682
|
+
return ret;
|
1683
|
+
}
|
1684
|
+
static _utf16leSlice(buf, start, end) {
|
1685
|
+
const bytes = buf.slice(start, end);
|
1686
|
+
let res = "";
|
1687
|
+
for (let i = 0; i < bytes.length - 1; i += 2) {
|
1688
|
+
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
|
1689
|
+
}
|
1690
|
+
return res;
|
1691
|
+
}
|
1692
|
+
static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
|
1693
|
+
let indexSize = 1;
|
1694
|
+
let arrLength = arr.length;
|
1695
|
+
let valLength = val.length;
|
1696
|
+
if (encoding !== void 0) {
|
1697
|
+
encoding = Buffer._getEncoding(encoding);
|
1698
|
+
if (encoding === "ucs2" || encoding === "utf16le") {
|
1699
|
+
if (arr.length < 2 || val.length < 2) {
|
1700
|
+
return -1;
|
1701
|
+
}
|
1702
|
+
indexSize = 2;
|
1703
|
+
arrLength /= 2;
|
1704
|
+
valLength /= 2;
|
1705
|
+
byteOffset /= 2;
|
1706
|
+
}
|
1707
|
+
}
|
1708
|
+
function read(buf, i2) {
|
1709
|
+
if (indexSize === 1) {
|
1710
|
+
return buf[i2];
|
1711
|
+
} else {
|
1712
|
+
return buf.readUInt16BE(i2 * indexSize);
|
1713
|
+
}
|
1714
|
+
}
|
1715
|
+
let i;
|
1716
|
+
if (dir) {
|
1717
|
+
let foundIndex = -1;
|
1718
|
+
for (i = byteOffset; i < arrLength; i++) {
|
1719
|
+
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
1720
|
+
if (foundIndex === -1) foundIndex = i;
|
1721
|
+
if (i - foundIndex + 1 === valLength) return foundIndex * indexSize;
|
1722
|
+
} else {
|
1723
|
+
if (foundIndex !== -1) i -= i - foundIndex;
|
1724
|
+
foundIndex = -1;
|
1725
|
+
}
|
1726
|
+
}
|
1727
|
+
} else {
|
1728
|
+
if (byteOffset + valLength > arrLength) {
|
1729
|
+
byteOffset = arrLength - valLength;
|
1730
|
+
}
|
1731
|
+
for (i = byteOffset; i >= 0; i--) {
|
1732
|
+
let found = true;
|
1733
|
+
for (let j = 0; j < valLength; j++) {
|
1734
|
+
if (read(arr, i + j) !== read(val, j)) {
|
1735
|
+
found = false;
|
1736
|
+
break;
|
1737
|
+
}
|
1738
|
+
}
|
1739
|
+
if (found) {
|
1740
|
+
return i;
|
1741
|
+
}
|
1742
|
+
}
|
1743
|
+
}
|
1744
|
+
return -1;
|
1745
|
+
}
|
1746
|
+
static _checkOffset(offset, ext, length) {
|
1747
|
+
if (offset % 1 !== 0 || offset < 0) throw new RangeError("offset is not uint");
|
1748
|
+
if (offset + ext > length) throw new RangeError("Trying to access beyond buffer length");
|
1749
|
+
}
|
1750
|
+
static _checkInt(buf, value, offset, ext, max, min) {
|
1751
|
+
if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance');
|
1752
|
+
if (value > max || value < min) throw new RangeError('"value" argument is out of bounds');
|
1753
|
+
if (offset + ext > buf.length) throw new RangeError("Index out of range");
|
1754
|
+
}
|
1755
|
+
static _getEncoding(encoding) {
|
1756
|
+
let toLowerCase = false;
|
1757
|
+
let originalEncoding = "";
|
1758
|
+
for (; ; ) {
|
1759
|
+
switch (encoding) {
|
1760
|
+
case "hex":
|
1761
|
+
return "hex";
|
1762
|
+
case "utf8":
|
1763
|
+
return "utf8";
|
1764
|
+
case "ascii":
|
1765
|
+
return "ascii";
|
1766
|
+
case "binary":
|
1767
|
+
return "binary";
|
1768
|
+
case "latin1":
|
1769
|
+
return "latin1";
|
1770
|
+
case "ucs2":
|
1771
|
+
return "ucs2";
|
1772
|
+
case "utf16le":
|
1773
|
+
return "utf16le";
|
1774
|
+
case "base64":
|
1775
|
+
return "base64";
|
1776
|
+
default: {
|
1777
|
+
if (toLowerCase) {
|
1778
|
+
throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
|
1779
|
+
}
|
1780
|
+
toLowerCase = true;
|
1781
|
+
originalEncoding = encoding;
|
1782
|
+
encoding = encoding.toLowerCase();
|
1783
|
+
}
|
1784
|
+
}
|
1785
|
+
}
|
1786
|
+
}
|
1787
|
+
}
|
1788
|
+
const hexSliceLookupTable = function() {
|
1789
|
+
const alphabet = "0123456789abcdef";
|
1790
|
+
const table = new Array(256);
|
1791
|
+
for (let i = 0; i < 16; ++i) {
|
1792
|
+
const i16 = i * 16;
|
1793
|
+
for (let j = 0; j < 16; ++j) {
|
1794
|
+
table[i16 + j] = alphabet[i] + alphabet[j];
|
1795
|
+
}
|
1796
|
+
}
|
1797
|
+
return table;
|
1798
|
+
}();
|
1799
|
+
const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
|
1800
|
+
function base64clean(str) {
|
1801
|
+
str = str.split("=")[0];
|
1802
|
+
str = str.trim().replace(INVALID_BASE64_RE, "");
|
1803
|
+
if (str.length < 2) return "";
|
1804
|
+
while (str.length % 4 !== 0) {
|
1805
|
+
str = str + "=";
|
1806
|
+
}
|
1807
|
+
return str;
|
1808
|
+
}
|
1809
|
+
|
27
1810
|
function notEmpty(value) {
|
28
1811
|
return value !== null && value !== void 0;
|
29
1812
|
}
|
@@ -118,155 +1901,15 @@ function promiseMap(inputValues, mapper) {
|
|
118
1901
|
return inputValues.reduce(reducer, Promise.resolve([]));
|
119
1902
|
}
|
120
1903
|
|
121
|
-
|
122
|
-
|
123
|
-
if (isDefined(process) && isDefined(process.env)) {
|
124
|
-
return {
|
125
|
-
apiKey: process.env.XATA_API_KEY ?? getGlobalApiKey(),
|
126
|
-
databaseURL: process.env.XATA_DATABASE_URL ?? getGlobalDatabaseURL(),
|
127
|
-
branch: process.env.XATA_BRANCH ?? getGlobalBranch(),
|
128
|
-
deployPreview: process.env.XATA_PREVIEW,
|
129
|
-
deployPreviewBranch: process.env.XATA_PREVIEW_BRANCH,
|
130
|
-
vercelGitCommitRef: process.env.VERCEL_GIT_COMMIT_REF,
|
131
|
-
vercelGitRepoOwner: process.env.VERCEL_GIT_REPO_OWNER
|
132
|
-
};
|
133
|
-
}
|
134
|
-
} catch (err) {
|
135
|
-
}
|
136
|
-
try {
|
137
|
-
if (isObject(Deno) && isObject(Deno.env)) {
|
138
|
-
return {
|
139
|
-
apiKey: Deno.env.get("XATA_API_KEY") ?? getGlobalApiKey(),
|
140
|
-
databaseURL: Deno.env.get("XATA_DATABASE_URL") ?? getGlobalDatabaseURL(),
|
141
|
-
branch: Deno.env.get("XATA_BRANCH") ?? getGlobalBranch(),
|
142
|
-
deployPreview: Deno.env.get("XATA_PREVIEW"),
|
143
|
-
deployPreviewBranch: Deno.env.get("XATA_PREVIEW_BRANCH"),
|
144
|
-
vercelGitCommitRef: Deno.env.get("VERCEL_GIT_COMMIT_REF"),
|
145
|
-
vercelGitRepoOwner: Deno.env.get("VERCEL_GIT_REPO_OWNER")
|
146
|
-
};
|
147
|
-
}
|
148
|
-
} catch (err) {
|
149
|
-
}
|
150
|
-
return {
|
151
|
-
apiKey: getGlobalApiKey(),
|
152
|
-
databaseURL: getGlobalDatabaseURL(),
|
153
|
-
branch: getGlobalBranch(),
|
154
|
-
deployPreview: void 0,
|
155
|
-
deployPreviewBranch: void 0,
|
156
|
-
vercelGitCommitRef: void 0,
|
157
|
-
vercelGitRepoOwner: void 0
|
158
|
-
};
|
159
|
-
}
|
160
|
-
function getEnableBrowserVariable() {
|
161
|
-
try {
|
162
|
-
if (isObject(process) && isObject(process.env) && process.env.XATA_ENABLE_BROWSER !== void 0) {
|
163
|
-
return process.env.XATA_ENABLE_BROWSER === "true";
|
164
|
-
}
|
165
|
-
} catch (err) {
|
166
|
-
}
|
167
|
-
try {
|
168
|
-
if (isObject(Deno) && isObject(Deno.env) && Deno.env.get("XATA_ENABLE_BROWSER") !== void 0) {
|
169
|
-
return Deno.env.get("XATA_ENABLE_BROWSER") === "true";
|
170
|
-
}
|
171
|
-
} catch (err) {
|
172
|
-
}
|
173
|
-
try {
|
174
|
-
return XATA_ENABLE_BROWSER === true || XATA_ENABLE_BROWSER === "true";
|
175
|
-
} catch (err) {
|
176
|
-
return void 0;
|
177
|
-
}
|
178
|
-
}
|
179
|
-
function getGlobalApiKey() {
|
180
|
-
try {
|
181
|
-
return XATA_API_KEY;
|
182
|
-
} catch (err) {
|
183
|
-
return void 0;
|
184
|
-
}
|
185
|
-
}
|
186
|
-
function getGlobalDatabaseURL() {
|
187
|
-
try {
|
188
|
-
return XATA_DATABASE_URL;
|
189
|
-
} catch (err) {
|
190
|
-
return void 0;
|
191
|
-
}
|
192
|
-
}
|
193
|
-
function getGlobalBranch() {
|
194
|
-
try {
|
195
|
-
return XATA_BRANCH;
|
196
|
-
} catch (err) {
|
197
|
-
return void 0;
|
198
|
-
}
|
199
|
-
}
|
200
|
-
function getDatabaseURL() {
|
201
|
-
try {
|
202
|
-
const { databaseURL } = getEnvironment();
|
203
|
-
return databaseURL;
|
204
|
-
} catch (err) {
|
205
|
-
return void 0;
|
206
|
-
}
|
207
|
-
}
|
208
|
-
function getAPIKey() {
|
209
|
-
try {
|
210
|
-
const { apiKey } = getEnvironment();
|
211
|
-
return apiKey;
|
212
|
-
} catch (err) {
|
213
|
-
return void 0;
|
214
|
-
}
|
215
|
-
}
|
216
|
-
function getBranch() {
|
217
|
-
try {
|
218
|
-
const { branch } = getEnvironment();
|
219
|
-
return branch;
|
220
|
-
} catch (err) {
|
221
|
-
return void 0;
|
222
|
-
}
|
223
|
-
}
|
224
|
-
function buildPreviewBranchName({ org, branch }) {
|
225
|
-
return `preview-${org}-${branch}`;
|
226
|
-
}
|
227
|
-
function getPreviewBranch() {
|
228
|
-
try {
|
229
|
-
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = getEnvironment();
|
230
|
-
if (deployPreviewBranch)
|
231
|
-
return deployPreviewBranch;
|
232
|
-
switch (deployPreview) {
|
233
|
-
case "vercel": {
|
234
|
-
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
235
|
-
console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
|
236
|
-
return void 0;
|
237
|
-
}
|
238
|
-
return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
|
239
|
-
}
|
240
|
-
}
|
241
|
-
return void 0;
|
242
|
-
} catch (err) {
|
243
|
-
return void 0;
|
244
|
-
}
|
245
|
-
}
|
246
|
-
|
247
|
-
var __accessCheck$6 = (obj, member, msg) => {
|
248
|
-
if (!member.has(obj))
|
249
|
-
throw TypeError("Cannot " + msg);
|
250
|
-
};
|
251
|
-
var __privateGet$5 = (obj, member, getter) => {
|
252
|
-
__accessCheck$6(obj, member, "read from private field");
|
253
|
-
return getter ? getter.call(obj) : member.get(obj);
|
254
|
-
};
|
255
|
-
var __privateAdd$6 = (obj, member, value) => {
|
256
|
-
if (member.has(obj))
|
257
|
-
throw TypeError("Cannot add the same private member more than once");
|
258
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
259
|
-
};
|
260
|
-
var __privateSet$4 = (obj, member, value, setter) => {
|
261
|
-
__accessCheck$6(obj, member, "write to private field");
|
262
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
263
|
-
return value;
|
264
|
-
};
|
265
|
-
var __privateMethod$4 = (obj, member, method) => {
|
266
|
-
__accessCheck$6(obj, member, "access private method");
|
267
|
-
return method;
|
1904
|
+
var __typeError$6 = (msg) => {
|
1905
|
+
throw TypeError(msg);
|
268
1906
|
};
|
269
|
-
var
|
1907
|
+
var __accessCheck$6 = (obj, member, msg) => member.has(obj) || __typeError$6("Cannot " + msg);
|
1908
|
+
var __privateGet$5 = (obj, member, getter) => (__accessCheck$6(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
1909
|
+
var __privateAdd$6 = (obj, member, value) => member.has(obj) ? __typeError$6("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1910
|
+
var __privateSet$4 = (obj, member, value, setter) => (__accessCheck$6(obj, member, "write to private field"), member.set(obj, value), value);
|
1911
|
+
var __privateMethod$4 = (obj, member, method) => (__accessCheck$6(obj, member, "access private method"), method);
|
1912
|
+
var _fetch, _queue, _concurrency, _ApiRequestPool_instances, enqueue_fn;
|
270
1913
|
const REQUEST_TIMEOUT = 5 * 60 * 1e3;
|
271
1914
|
function getFetchImplementation(userFetch) {
|
272
1915
|
const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
|
@@ -279,10 +1922,10 @@ function getFetchImplementation(userFetch) {
|
|
279
1922
|
}
|
280
1923
|
class ApiRequestPool {
|
281
1924
|
constructor(concurrency = 10) {
|
282
|
-
__privateAdd$6(this,
|
283
|
-
__privateAdd$6(this, _fetch
|
284
|
-
__privateAdd$6(this, _queue
|
285
|
-
__privateAdd$6(this, _concurrency
|
1925
|
+
__privateAdd$6(this, _ApiRequestPool_instances);
|
1926
|
+
__privateAdd$6(this, _fetch);
|
1927
|
+
__privateAdd$6(this, _queue);
|
1928
|
+
__privateAdd$6(this, _concurrency);
|
286
1929
|
__privateSet$4(this, _queue, []);
|
287
1930
|
__privateSet$4(this, _concurrency, concurrency);
|
288
1931
|
this.running = 0;
|
@@ -317,7 +1960,7 @@ class ApiRequestPool {
|
|
317
1960
|
}
|
318
1961
|
return response;
|
319
1962
|
};
|
320
|
-
return __privateMethod$4(this,
|
1963
|
+
return __privateMethod$4(this, _ApiRequestPool_instances, enqueue_fn).call(this, async () => {
|
321
1964
|
return await runRequest();
|
322
1965
|
});
|
323
1966
|
}
|
@@ -325,7 +1968,7 @@ class ApiRequestPool {
|
|
325
1968
|
_fetch = new WeakMap();
|
326
1969
|
_queue = new WeakMap();
|
327
1970
|
_concurrency = new WeakMap();
|
328
|
-
|
1971
|
+
_ApiRequestPool_instances = new WeakSet();
|
329
1972
|
enqueue_fn = function(task) {
|
330
1973
|
const promise = new Promise((resolve) => __privateGet$5(this, _queue).push(resolve)).finally(() => {
|
331
1974
|
this.started--;
|
@@ -528,7 +2171,7 @@ function defaultOnOpen(response) {
|
|
528
2171
|
}
|
529
2172
|
}
|
530
2173
|
|
531
|
-
const VERSION = "0.29.
|
2174
|
+
const VERSION = "0.29.4";
|
532
2175
|
|
533
2176
|
class ErrorWithCause extends Error {
|
534
2177
|
constructor(message, options) {
|
@@ -608,35 +2251,30 @@ function parseProviderString(provider = "production") {
|
|
608
2251
|
return provider;
|
609
2252
|
}
|
610
2253
|
const [main, workspaces] = provider.split(",");
|
611
|
-
if (!main || !workspaces)
|
612
|
-
return null;
|
2254
|
+
if (!main || !workspaces) return null;
|
613
2255
|
return { main, workspaces };
|
614
2256
|
}
|
615
2257
|
function buildProviderString(provider) {
|
616
|
-
if (isHostProviderAlias(provider))
|
617
|
-
return provider;
|
2258
|
+
if (isHostProviderAlias(provider)) return provider;
|
618
2259
|
return `${provider.main},${provider.workspaces}`;
|
619
2260
|
}
|
620
2261
|
function parseWorkspacesUrlParts(url) {
|
621
|
-
if (!isString(url))
|
622
|
-
return null;
|
2262
|
+
if (!isString(url)) return null;
|
623
2263
|
const matches = {
|
624
|
-
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh
|
625
|
-
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev
|
626
|
-
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev
|
627
|
-
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(
|
2264
|
+
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
|
2265
|
+
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2266
|
+
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2267
|
+
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
|
628
2268
|
};
|
629
2269
|
const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
|
630
|
-
if (!isHostProviderAlias(host) || !match)
|
631
|
-
|
632
|
-
return { workspace: match[1], region: match[2], host };
|
2270
|
+
if (!isHostProviderAlias(host) || !match) return null;
|
2271
|
+
return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
|
633
2272
|
}
|
634
2273
|
|
635
2274
|
const pool = new ApiRequestPool();
|
636
2275
|
const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
|
637
2276
|
const cleanQueryParams = Object.entries(queryParams).reduce((acc, [key, value]) => {
|
638
|
-
if (value === void 0 || value === null)
|
639
|
-
return acc;
|
2277
|
+
if (value === void 0 || value === null) return acc;
|
640
2278
|
return { ...acc, [key]: value };
|
641
2279
|
}, {});
|
642
2280
|
const query = new URLSearchParams(cleanQueryParams).toString();
|
@@ -684,8 +2322,7 @@ function hostHeader(url) {
|
|
684
2322
|
return groups?.host ? { Host: groups.host } : {};
|
685
2323
|
}
|
686
2324
|
async function parseBody(body, headers) {
|
687
|
-
if (!isDefined(body))
|
688
|
-
return void 0;
|
2325
|
+
if (!isDefined(body)) return void 0;
|
689
2326
|
if (isBlob(body) || typeof body.text === "function") {
|
690
2327
|
return body;
|
691
2328
|
}
|
@@ -764,8 +2401,7 @@ async function fetch$1({
|
|
764
2401
|
[TraceAttributes.CLOUDFLARE_RAY_ID]: response.headers?.get("cf-ray") ?? void 0
|
765
2402
|
});
|
766
2403
|
const message = response.headers?.get("x-xata-message");
|
767
|
-
if (message)
|
768
|
-
console.warn(message);
|
2404
|
+
if (message) console.warn(message);
|
769
2405
|
if (response.status === 204) {
|
770
2406
|
return {};
|
771
2407
|
}
|
@@ -849,16 +2485,60 @@ function parseUrl(url) {
|
|
849
2485
|
|
850
2486
|
const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
|
851
2487
|
|
852
|
-
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2488
|
+
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2489
|
+
url: "/db/{dbBranchName}/migrations/apply",
|
2490
|
+
method: "post",
|
2491
|
+
...variables,
|
2492
|
+
signal
|
2493
|
+
});
|
2494
|
+
const startMigration = (variables, signal) => dataPlaneFetch({
|
2495
|
+
url: "/db/{dbBranchName}/migrations/start",
|
2496
|
+
method: "post",
|
2497
|
+
...variables,
|
2498
|
+
signal
|
2499
|
+
});
|
2500
|
+
const completeMigration = (variables, signal) => dataPlaneFetch({
|
2501
|
+
url: "/db/{dbBranchName}/migrations/complete",
|
2502
|
+
method: "post",
|
2503
|
+
...variables,
|
2504
|
+
signal
|
2505
|
+
});
|
2506
|
+
const rollbackMigration = (variables, signal) => dataPlaneFetch({
|
2507
|
+
url: "/db/{dbBranchName}/migrations/rollback",
|
2508
|
+
method: "post",
|
2509
|
+
...variables,
|
2510
|
+
signal
|
2511
|
+
});
|
853
2512
|
const adaptTable = (variables, signal) => dataPlaneFetch({
|
854
2513
|
url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
|
855
2514
|
method: "post",
|
856
2515
|
...variables,
|
857
2516
|
signal
|
858
2517
|
});
|
859
|
-
const
|
860
|
-
|
861
|
-
|
2518
|
+
const adaptAllTables = (variables, signal) => dataPlaneFetch({
|
2519
|
+
url: "/db/{dbBranchName}/migrations/adapt",
|
2520
|
+
method: "post",
|
2521
|
+
...variables,
|
2522
|
+
signal
|
2523
|
+
});
|
2524
|
+
const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2525
|
+
url: "/db/{dbBranchName}/migrations/status",
|
2526
|
+
method: "get",
|
2527
|
+
...variables,
|
2528
|
+
signal
|
2529
|
+
});
|
2530
|
+
const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2531
|
+
url: "/db/{dbBranchName}/migrations/jobs/{jobId}",
|
2532
|
+
method: "get",
|
2533
|
+
...variables,
|
2534
|
+
signal
|
2535
|
+
});
|
2536
|
+
const getMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2537
|
+
url: "/db/{dbBranchName}/migrations/history",
|
2538
|
+
method: "get",
|
2539
|
+
...variables,
|
2540
|
+
signal
|
2541
|
+
});
|
862
2542
|
const getBranchList = (variables, signal) => dataPlaneFetch({
|
863
2543
|
url: "/dbs/{dbName}",
|
864
2544
|
method: "get",
|
@@ -885,68 +2565,153 @@ const deleteBranch = (variables, signal) => dataPlaneFetch({
|
|
885
2565
|
...variables,
|
886
2566
|
signal
|
887
2567
|
});
|
888
|
-
const getSchema = (variables, signal) => dataPlaneFetch({
|
889
|
-
url: "/db/{dbBranchName}/schema",
|
890
|
-
method: "get",
|
2568
|
+
const getSchema = (variables, signal) => dataPlaneFetch({
|
2569
|
+
url: "/db/{dbBranchName}/schema",
|
2570
|
+
method: "get",
|
2571
|
+
...variables,
|
2572
|
+
signal
|
2573
|
+
});
|
2574
|
+
const copyBranch = (variables, signal) => dataPlaneFetch({
|
2575
|
+
url: "/db/{dbBranchName}/copy",
|
2576
|
+
method: "post",
|
2577
|
+
...variables,
|
2578
|
+
signal
|
2579
|
+
});
|
2580
|
+
const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2581
|
+
url: "/db/{dbBranchName}/metadata",
|
2582
|
+
method: "put",
|
2583
|
+
...variables,
|
2584
|
+
signal
|
2585
|
+
});
|
2586
|
+
const getBranchMetadata = (variables, signal) => dataPlaneFetch({
|
2587
|
+
url: "/db/{dbBranchName}/metadata",
|
2588
|
+
method: "get",
|
2589
|
+
...variables,
|
2590
|
+
signal
|
2591
|
+
});
|
2592
|
+
const getBranchStats = (variables, signal) => dataPlaneFetch({
|
2593
|
+
url: "/db/{dbBranchName}/stats",
|
2594
|
+
method: "get",
|
2595
|
+
...variables,
|
2596
|
+
signal
|
2597
|
+
});
|
2598
|
+
const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
|
2599
|
+
const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
|
2600
|
+
const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({
|
2601
|
+
url: "/dbs/{dbName}/gitBranches",
|
2602
|
+
method: "delete",
|
2603
|
+
...variables,
|
2604
|
+
signal
|
2605
|
+
});
|
2606
|
+
const resolveBranch = (variables, signal) => dataPlaneFetch({
|
2607
|
+
url: "/dbs/{dbName}/resolveBranch",
|
2608
|
+
method: "get",
|
2609
|
+
...variables,
|
2610
|
+
signal
|
2611
|
+
});
|
2612
|
+
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2613
|
+
url: "/db/{dbBranchName}/migrations",
|
2614
|
+
method: "get",
|
2615
|
+
...variables,
|
2616
|
+
signal
|
2617
|
+
});
|
2618
|
+
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2619
|
+
url: "/db/{dbBranchName}/migrations/plan",
|
2620
|
+
method: "post",
|
2621
|
+
...variables,
|
2622
|
+
signal
|
2623
|
+
});
|
2624
|
+
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2625
|
+
url: "/db/{dbBranchName}/migrations/execute",
|
2626
|
+
method: "post",
|
2627
|
+
...variables,
|
2628
|
+
signal
|
2629
|
+
});
|
2630
|
+
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({
|
2631
|
+
url: "/dbs/{dbName}/migrations/query",
|
2632
|
+
method: "post",
|
2633
|
+
...variables,
|
2634
|
+
signal
|
2635
|
+
});
|
2636
|
+
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
2637
|
+
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2638
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2639
|
+
method: "get",
|
2640
|
+
...variables,
|
2641
|
+
signal
|
2642
|
+
});
|
2643
|
+
const updateMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2644
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2645
|
+
method: "patch",
|
2646
|
+
...variables,
|
2647
|
+
signal
|
2648
|
+
});
|
2649
|
+
const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({
|
2650
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/commits",
|
2651
|
+
method: "post",
|
2652
|
+
...variables,
|
2653
|
+
signal
|
2654
|
+
});
|
2655
|
+
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2656
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/compare",
|
2657
|
+
method: "post",
|
2658
|
+
...variables,
|
2659
|
+
signal
|
2660
|
+
});
|
2661
|
+
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({
|
2662
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2663
|
+
method: "get",
|
2664
|
+
...variables,
|
2665
|
+
signal
|
2666
|
+
});
|
2667
|
+
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2668
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2669
|
+
method: "post",
|
2670
|
+
...variables,
|
2671
|
+
signal
|
2672
|
+
});
|
2673
|
+
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({
|
2674
|
+
url: "/db/{dbBranchName}/schema/history",
|
2675
|
+
method: "post",
|
891
2676
|
...variables,
|
892
2677
|
signal
|
893
2678
|
});
|
894
|
-
const
|
895
|
-
url: "/db/{dbBranchName}/
|
2679
|
+
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({
|
2680
|
+
url: "/db/{dbBranchName}/schema/compare",
|
896
2681
|
method: "post",
|
897
2682
|
...variables,
|
898
2683
|
signal
|
899
2684
|
});
|
900
|
-
const
|
901
|
-
url: "/db/{dbBranchName}/
|
902
|
-
method: "
|
2685
|
+
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({
|
2686
|
+
url: "/db/{dbBranchName}/schema/compare/{branchName}",
|
2687
|
+
method: "post",
|
903
2688
|
...variables,
|
904
2689
|
signal
|
905
2690
|
});
|
906
|
-
const
|
907
|
-
url: "/db/{dbBranchName}/
|
908
|
-
method: "
|
2691
|
+
const updateBranchSchema = (variables, signal) => dataPlaneFetch({
|
2692
|
+
url: "/db/{dbBranchName}/schema/update",
|
2693
|
+
method: "post",
|
909
2694
|
...variables,
|
910
2695
|
signal
|
911
2696
|
});
|
912
|
-
const
|
913
|
-
url: "/db/{dbBranchName}/
|
914
|
-
method: "
|
2697
|
+
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2698
|
+
url: "/db/{dbBranchName}/schema/preview",
|
2699
|
+
method: "post",
|
915
2700
|
...variables,
|
916
2701
|
signal
|
917
2702
|
});
|
918
|
-
const
|
919
|
-
|
920
|
-
|
921
|
-
const resolveBranch = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/resolveBranch", method: "get", ...variables, signal });
|
922
|
-
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations", method: "get", ...variables, signal });
|
923
|
-
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/plan", method: "post", ...variables, signal });
|
924
|
-
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/execute", method: "post", ...variables, signal });
|
925
|
-
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/query", method: "post", ...variables, signal });
|
926
|
-
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
927
|
-
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
928
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
929
|
-
method: "get",
|
2703
|
+
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2704
|
+
url: "/db/{dbBranchName}/schema/apply",
|
2705
|
+
method: "post",
|
930
2706
|
...variables,
|
931
2707
|
signal
|
932
2708
|
});
|
933
|
-
const
|
934
|
-
|
935
|
-
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/compare", method: "post", ...variables, signal });
|
936
|
-
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations/{mrNumber}/merge", method: "get", ...variables, signal });
|
937
|
-
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
938
|
-
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2709
|
+
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({
|
2710
|
+
url: "/db/{dbBranchName}/schema/push",
|
939
2711
|
method: "post",
|
940
2712
|
...variables,
|
941
2713
|
signal
|
942
2714
|
});
|
943
|
-
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/history", method: "post", ...variables, signal });
|
944
|
-
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare", method: "post", ...variables, signal });
|
945
|
-
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/compare/{branchName}", method: "post", ...variables, signal });
|
946
|
-
const updateBranchSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/update", method: "post", ...variables, signal });
|
947
|
-
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/preview", method: "post", ...variables, signal });
|
948
|
-
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/apply", method: "post", ...variables, signal });
|
949
|
-
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/push", method: "post", ...variables, signal });
|
950
2715
|
const createTable = (variables, signal) => dataPlaneFetch({
|
951
2716
|
url: "/db/{dbBranchName}/tables/{tableName}",
|
952
2717
|
method: "put",
|
@@ -959,14 +2724,24 @@ const deleteTable = (variables, signal) => dataPlaneFetch({
|
|
959
2724
|
...variables,
|
960
2725
|
signal
|
961
2726
|
});
|
962
|
-
const updateTable = (variables, signal) => dataPlaneFetch({
|
2727
|
+
const updateTable = (variables, signal) => dataPlaneFetch({
|
2728
|
+
url: "/db/{dbBranchName}/tables/{tableName}",
|
2729
|
+
method: "patch",
|
2730
|
+
...variables,
|
2731
|
+
signal
|
2732
|
+
});
|
963
2733
|
const getTableSchema = (variables, signal) => dataPlaneFetch({
|
964
2734
|
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
965
2735
|
method: "get",
|
966
2736
|
...variables,
|
967
2737
|
signal
|
968
2738
|
});
|
969
|
-
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2739
|
+
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2740
|
+
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
2741
|
+
method: "put",
|
2742
|
+
...variables,
|
2743
|
+
signal
|
2744
|
+
});
|
970
2745
|
const getTableColumns = (variables, signal) => dataPlaneFetch({
|
971
2746
|
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
972
2747
|
method: "get",
|
@@ -974,7 +2749,12 @@ const getTableColumns = (variables, signal) => dataPlaneFetch({
|
|
974
2749
|
signal
|
975
2750
|
});
|
976
2751
|
const addTableColumn = (variables, signal) => dataPlaneFetch(
|
977
|
-
{
|
2752
|
+
{
|
2753
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
2754
|
+
method: "post",
|
2755
|
+
...variables,
|
2756
|
+
signal
|
2757
|
+
}
|
978
2758
|
);
|
979
2759
|
const getColumn = (variables, signal) => dataPlaneFetch({
|
980
2760
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
@@ -982,15 +2762,30 @@ const getColumn = (variables, signal) => dataPlaneFetch({
|
|
982
2762
|
...variables,
|
983
2763
|
signal
|
984
2764
|
});
|
985
|
-
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2765
|
+
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2766
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
2767
|
+
method: "patch",
|
2768
|
+
...variables,
|
2769
|
+
signal
|
2770
|
+
});
|
986
2771
|
const deleteColumn = (variables, signal) => dataPlaneFetch({
|
987
2772
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
988
2773
|
method: "delete",
|
989
2774
|
...variables,
|
990
2775
|
signal
|
991
2776
|
});
|
992
|
-
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
993
|
-
|
2777
|
+
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
2778
|
+
url: "/db/{dbBranchName}/transaction",
|
2779
|
+
method: "post",
|
2780
|
+
...variables,
|
2781
|
+
signal
|
2782
|
+
});
|
2783
|
+
const insertRecord = (variables, signal) => dataPlaneFetch({
|
2784
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data",
|
2785
|
+
method: "post",
|
2786
|
+
...variables,
|
2787
|
+
signal
|
2788
|
+
});
|
994
2789
|
const getFileItem = (variables, signal) => dataPlaneFetch({
|
995
2790
|
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
|
996
2791
|
method: "get",
|
@@ -1033,11 +2828,36 @@ const getRecord = (variables, signal) => dataPlaneFetch({
|
|
1033
2828
|
...variables,
|
1034
2829
|
signal
|
1035
2830
|
});
|
1036
|
-
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
1037
|
-
|
1038
|
-
|
1039
|
-
|
1040
|
-
|
2831
|
+
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2832
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2833
|
+
method: "put",
|
2834
|
+
...variables,
|
2835
|
+
signal
|
2836
|
+
});
|
2837
|
+
const updateRecordWithID = (variables, signal) => dataPlaneFetch({
|
2838
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2839
|
+
method: "patch",
|
2840
|
+
...variables,
|
2841
|
+
signal
|
2842
|
+
});
|
2843
|
+
const upsertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2844
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2845
|
+
method: "post",
|
2846
|
+
...variables,
|
2847
|
+
signal
|
2848
|
+
});
|
2849
|
+
const deleteRecord = (variables, signal) => dataPlaneFetch({
|
2850
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2851
|
+
method: "delete",
|
2852
|
+
...variables,
|
2853
|
+
signal
|
2854
|
+
});
|
2855
|
+
const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({
|
2856
|
+
url: "/db/{dbBranchName}/tables/{tableName}/bulk",
|
2857
|
+
method: "post",
|
2858
|
+
...variables,
|
2859
|
+
signal
|
2860
|
+
});
|
1041
2861
|
const queryTable = (variables, signal) => dataPlaneFetch({
|
1042
2862
|
url: "/db/{dbBranchName}/tables/{tableName}/query",
|
1043
2863
|
method: "post",
|
@@ -1056,16 +2876,36 @@ const searchTable = (variables, signal) => dataPlaneFetch({
|
|
1056
2876
|
...variables,
|
1057
2877
|
signal
|
1058
2878
|
});
|
1059
|
-
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
2879
|
+
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
2880
|
+
url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch",
|
2881
|
+
method: "post",
|
2882
|
+
...variables,
|
2883
|
+
signal
|
2884
|
+
});
|
1060
2885
|
const askTable = (variables, signal) => dataPlaneFetch({
|
1061
2886
|
url: "/db/{dbBranchName}/tables/{tableName}/ask",
|
1062
2887
|
method: "post",
|
1063
2888
|
...variables,
|
1064
2889
|
signal
|
1065
2890
|
});
|
1066
|
-
const askTableSession = (variables, signal) => dataPlaneFetch({
|
1067
|
-
|
1068
|
-
|
2891
|
+
const askTableSession = (variables, signal) => dataPlaneFetch({
|
2892
|
+
url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}",
|
2893
|
+
method: "post",
|
2894
|
+
...variables,
|
2895
|
+
signal
|
2896
|
+
});
|
2897
|
+
const summarizeTable = (variables, signal) => dataPlaneFetch({
|
2898
|
+
url: "/db/{dbBranchName}/tables/{tableName}/summarize",
|
2899
|
+
method: "post",
|
2900
|
+
...variables,
|
2901
|
+
signal
|
2902
|
+
});
|
2903
|
+
const aggregateTable = (variables, signal) => dataPlaneFetch({
|
2904
|
+
url: "/db/{dbBranchName}/tables/{tableName}/aggregate",
|
2905
|
+
method: "post",
|
2906
|
+
...variables,
|
2907
|
+
signal
|
2908
|
+
});
|
1069
2909
|
const fileAccess = (variables, signal) => dataPlaneFetch({
|
1070
2910
|
url: "/file/{fileId}",
|
1071
2911
|
method: "get",
|
@@ -1084,10 +2924,20 @@ const sqlQuery = (variables, signal) => dataPlaneFetch({
|
|
1084
2924
|
...variables,
|
1085
2925
|
signal
|
1086
2926
|
});
|
2927
|
+
const sqlBatchQuery = (variables, signal) => dataPlaneFetch({
|
2928
|
+
url: "/db/{dbBranchName}/sql/batch",
|
2929
|
+
method: "post",
|
2930
|
+
...variables,
|
2931
|
+
signal
|
2932
|
+
});
|
1087
2933
|
const operationsByTag$2 = {
|
1088
2934
|
migrations: {
|
1089
2935
|
applyMigration,
|
2936
|
+
startMigration,
|
2937
|
+
completeMigration,
|
2938
|
+
rollbackMigration,
|
1090
2939
|
adaptTable,
|
2940
|
+
adaptAllTables,
|
1091
2941
|
getBranchMigrationJobStatus,
|
1092
2942
|
getMigrationJobStatus,
|
1093
2943
|
getMigrationHistory,
|
@@ -1150,7 +3000,16 @@ const operationsByTag$2 = {
|
|
1150
3000
|
deleteRecord,
|
1151
3001
|
bulkInsertTableRecords
|
1152
3002
|
},
|
1153
|
-
files: {
|
3003
|
+
files: {
|
3004
|
+
getFileItem,
|
3005
|
+
putFileItem,
|
3006
|
+
deleteFileItem,
|
3007
|
+
getFile,
|
3008
|
+
putFile,
|
3009
|
+
deleteFile,
|
3010
|
+
fileAccess,
|
3011
|
+
fileUpload
|
3012
|
+
},
|
1154
3013
|
searchAndFilter: {
|
1155
3014
|
queryTable,
|
1156
3015
|
searchBranch,
|
@@ -1161,7 +3020,7 @@ const operationsByTag$2 = {
|
|
1161
3020
|
summarizeTable,
|
1162
3021
|
aggregateTable
|
1163
3022
|
},
|
1164
|
-
sql: { sqlQuery }
|
3023
|
+
sql: { sqlQuery, sqlBatchQuery }
|
1165
3024
|
};
|
1166
3025
|
|
1167
3026
|
const controlPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "controlPlane" });
|
@@ -1228,7 +3087,12 @@ const deleteOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
|
1228
3087
|
...variables,
|
1229
3088
|
signal
|
1230
3089
|
});
|
1231
|
-
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3090
|
+
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3091
|
+
url: "/user/oauth/tokens/{token}",
|
3092
|
+
method: "patch",
|
3093
|
+
...variables,
|
3094
|
+
signal
|
3095
|
+
});
|
1232
3096
|
const getWorkspacesList = (variables, signal) => controlPlaneFetch({
|
1233
3097
|
url: "/workspaces",
|
1234
3098
|
method: "get",
|
@@ -1259,47 +3123,150 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
|
|
1259
3123
|
...variables,
|
1260
3124
|
signal
|
1261
3125
|
});
|
1262
|
-
const
|
1263
|
-
|
3126
|
+
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3127
|
+
url: "/workspaces/{workspaceId}/settings",
|
3128
|
+
method: "get",
|
3129
|
+
...variables,
|
3130
|
+
signal
|
3131
|
+
});
|
3132
|
+
const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3133
|
+
url: "/workspaces/{workspaceId}/settings",
|
3134
|
+
method: "patch",
|
3135
|
+
...variables,
|
3136
|
+
signal
|
3137
|
+
});
|
3138
|
+
const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({
|
3139
|
+
url: "/workspaces/{workspaceId}/members",
|
3140
|
+
method: "get",
|
3141
|
+
...variables,
|
3142
|
+
signal
|
3143
|
+
});
|
3144
|
+
const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({
|
3145
|
+
url: "/workspaces/{workspaceId}/members/{userId}",
|
3146
|
+
method: "put",
|
3147
|
+
...variables,
|
3148
|
+
signal
|
3149
|
+
});
|
1264
3150
|
const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1265
3151
|
url: "/workspaces/{workspaceId}/members/{userId}",
|
1266
3152
|
method: "delete",
|
1267
3153
|
...variables,
|
1268
3154
|
signal
|
1269
3155
|
});
|
1270
|
-
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1271
|
-
|
1272
|
-
|
1273
|
-
|
1274
|
-
|
1275
|
-
|
1276
|
-
const
|
3156
|
+
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
3157
|
+
url: "/workspaces/{workspaceId}/invites",
|
3158
|
+
method: "post",
|
3159
|
+
...variables,
|
3160
|
+
signal
|
3161
|
+
});
|
3162
|
+
const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3163
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3164
|
+
method: "patch",
|
3165
|
+
...variables,
|
3166
|
+
signal
|
3167
|
+
});
|
3168
|
+
const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3169
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3170
|
+
method: "delete",
|
3171
|
+
...variables,
|
3172
|
+
signal
|
3173
|
+
});
|
3174
|
+
const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3175
|
+
url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept",
|
3176
|
+
method: "post",
|
3177
|
+
...variables,
|
3178
|
+
signal
|
3179
|
+
});
|
3180
|
+
const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3181
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}/resend",
|
3182
|
+
method: "post",
|
3183
|
+
...variables,
|
3184
|
+
signal
|
3185
|
+
});
|
3186
|
+
const listClusters = (variables, signal) => controlPlaneFetch({
|
3187
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3188
|
+
method: "get",
|
3189
|
+
...variables,
|
3190
|
+
signal
|
3191
|
+
});
|
3192
|
+
const createCluster = (variables, signal) => controlPlaneFetch({
|
3193
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3194
|
+
method: "post",
|
3195
|
+
...variables,
|
3196
|
+
signal
|
3197
|
+
});
|
1277
3198
|
const getCluster = (variables, signal) => controlPlaneFetch({
|
1278
3199
|
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
1279
3200
|
method: "get",
|
1280
3201
|
...variables,
|
1281
3202
|
signal
|
1282
3203
|
});
|
1283
|
-
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3204
|
+
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3205
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3206
|
+
method: "patch",
|
3207
|
+
...variables,
|
3208
|
+
signal
|
3209
|
+
});
|
3210
|
+
const deleteCluster = (variables, signal) => controlPlaneFetch({
|
3211
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3212
|
+
method: "delete",
|
3213
|
+
...variables,
|
3214
|
+
signal
|
3215
|
+
});
|
1284
3216
|
const getDatabaseList = (variables, signal) => controlPlaneFetch({
|
1285
3217
|
url: "/workspaces/{workspaceId}/dbs",
|
1286
3218
|
method: "get",
|
1287
3219
|
...variables,
|
1288
3220
|
signal
|
1289
3221
|
});
|
1290
|
-
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3222
|
+
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3223
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3224
|
+
method: "put",
|
3225
|
+
...variables,
|
3226
|
+
signal
|
3227
|
+
});
|
1291
3228
|
const deleteDatabase = (variables, signal) => controlPlaneFetch({
|
1292
3229
|
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
1293
3230
|
method: "delete",
|
1294
3231
|
...variables,
|
1295
3232
|
signal
|
1296
3233
|
});
|
1297
|
-
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
1298
|
-
|
1299
|
-
|
1300
|
-
|
1301
|
-
|
1302
|
-
|
3234
|
+
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3235
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3236
|
+
method: "get",
|
3237
|
+
...variables,
|
3238
|
+
signal
|
3239
|
+
});
|
3240
|
+
const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3241
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3242
|
+
method: "patch",
|
3243
|
+
...variables,
|
3244
|
+
signal
|
3245
|
+
});
|
3246
|
+
const renameDatabase = (variables, signal) => controlPlaneFetch({
|
3247
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/rename",
|
3248
|
+
method: "post",
|
3249
|
+
...variables,
|
3250
|
+
signal
|
3251
|
+
});
|
3252
|
+
const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3253
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3254
|
+
method: "get",
|
3255
|
+
...variables,
|
3256
|
+
signal
|
3257
|
+
});
|
3258
|
+
const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3259
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3260
|
+
method: "put",
|
3261
|
+
...variables,
|
3262
|
+
signal
|
3263
|
+
});
|
3264
|
+
const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3265
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3266
|
+
method: "delete",
|
3267
|
+
...variables,
|
3268
|
+
signal
|
3269
|
+
});
|
1303
3270
|
const listRegions = (variables, signal) => controlPlaneFetch({
|
1304
3271
|
url: "/workspaces/{workspaceId}/regions",
|
1305
3272
|
method: "get",
|
@@ -1324,6 +3291,8 @@ const operationsByTag$1 = {
|
|
1324
3291
|
getWorkspace,
|
1325
3292
|
updateWorkspace,
|
1326
3293
|
deleteWorkspace,
|
3294
|
+
getWorkspaceSettings,
|
3295
|
+
updateWorkspaceSettings,
|
1327
3296
|
getWorkspaceMembersList,
|
1328
3297
|
updateWorkspaceMemberRole,
|
1329
3298
|
removeWorkspaceMember
|
@@ -1335,7 +3304,13 @@ const operationsByTag$1 = {
|
|
1335
3304
|
acceptWorkspaceMemberInvite,
|
1336
3305
|
resendWorkspaceMemberInvite
|
1337
3306
|
},
|
1338
|
-
xbcontrolOther: {
|
3307
|
+
xbcontrolOther: {
|
3308
|
+
listClusters,
|
3309
|
+
createCluster,
|
3310
|
+
getCluster,
|
3311
|
+
updateCluster,
|
3312
|
+
deleteCluster
|
3313
|
+
},
|
1339
3314
|
databases: {
|
1340
3315
|
getDatabaseList,
|
1341
3316
|
createDatabase,
|
@@ -1355,7 +3330,7 @@ const operationsByTag = deepMerge(operationsByTag$2, operationsByTag$1);
|
|
1355
3330
|
const buildApiClient = () => class {
|
1356
3331
|
constructor(options = {}) {
|
1357
3332
|
const provider = options.host ?? "production";
|
1358
|
-
const apiKey = options.apiKey
|
3333
|
+
const apiKey = options.apiKey;
|
1359
3334
|
const trace = options.trace ?? defaultTrace;
|
1360
3335
|
const clientID = generateUUID();
|
1361
3336
|
if (!apiKey) {
|
@@ -1422,8 +3397,7 @@ function buildTransformString(transformations) {
|
|
1422
3397
|
).join(",");
|
1423
3398
|
}
|
1424
3399
|
function transformImage(url, ...transformations) {
|
1425
|
-
if (!isDefined(url))
|
1426
|
-
return void 0;
|
3400
|
+
if (!isDefined(url)) return void 0;
|
1427
3401
|
const newTransformations = buildTransformString(transformations);
|
1428
3402
|
const { hostname, pathname, search } = new URL(url);
|
1429
3403
|
const pathParts = pathname.split("/");
|
@@ -1536,8 +3510,7 @@ class XataFile {
|
|
1536
3510
|
}
|
1537
3511
|
}
|
1538
3512
|
const parseInputFileEntry = async (entry) => {
|
1539
|
-
if (!isDefined(entry))
|
1540
|
-
return null;
|
3513
|
+
if (!isDefined(entry)) return null;
|
1541
3514
|
const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout, uploadUrlTimeout } = await entry;
|
1542
3515
|
return compactObject({
|
1543
3516
|
id,
|
@@ -1552,24 +3525,19 @@ const parseInputFileEntry = async (entry) => {
|
|
1552
3525
|
};
|
1553
3526
|
|
1554
3527
|
function cleanFilter(filter) {
|
1555
|
-
if (!isDefined(filter))
|
1556
|
-
|
1557
|
-
if (!isObject(filter))
|
1558
|
-
return filter;
|
3528
|
+
if (!isDefined(filter)) return void 0;
|
3529
|
+
if (!isObject(filter)) return filter;
|
1559
3530
|
const values = Object.fromEntries(
|
1560
3531
|
Object.entries(filter).reduce((acc, [key, value]) => {
|
1561
|
-
if (!isDefined(value))
|
1562
|
-
return acc;
|
3532
|
+
if (!isDefined(value)) return acc;
|
1563
3533
|
if (Array.isArray(value)) {
|
1564
3534
|
const clean = value.map((item) => cleanFilter(item)).filter((item) => isDefined(item));
|
1565
|
-
if (clean.length === 0)
|
1566
|
-
return acc;
|
3535
|
+
if (clean.length === 0) return acc;
|
1567
3536
|
return [...acc, [key, clean]];
|
1568
3537
|
}
|
1569
3538
|
if (isObject(value)) {
|
1570
3539
|
const clean = cleanFilter(value);
|
1571
|
-
if (!isDefined(clean))
|
1572
|
-
return acc;
|
3540
|
+
if (!isDefined(clean)) return acc;
|
1573
3541
|
return [...acc, [key, clean]];
|
1574
3542
|
}
|
1575
3543
|
return [...acc, [key, value]];
|
@@ -1579,10 +3547,8 @@ function cleanFilter(filter) {
|
|
1579
3547
|
}
|
1580
3548
|
|
1581
3549
|
function stringifyJson(value) {
|
1582
|
-
if (!isDefined(value))
|
1583
|
-
|
1584
|
-
if (isString(value))
|
1585
|
-
return value;
|
3550
|
+
if (!isDefined(value)) return value;
|
3551
|
+
if (isString(value)) return value;
|
1586
3552
|
try {
|
1587
3553
|
return JSON.stringify(value);
|
1588
3554
|
} catch (e) {
|
@@ -1597,28 +3563,17 @@ function parseJson(value) {
|
|
1597
3563
|
}
|
1598
3564
|
}
|
1599
3565
|
|
1600
|
-
var
|
1601
|
-
|
1602
|
-
throw TypeError("Cannot " + msg);
|
1603
|
-
};
|
1604
|
-
var __privateGet$4 = (obj, member, getter) => {
|
1605
|
-
__accessCheck$5(obj, member, "read from private field");
|
1606
|
-
return getter ? getter.call(obj) : member.get(obj);
|
1607
|
-
};
|
1608
|
-
var __privateAdd$5 = (obj, member, value) => {
|
1609
|
-
if (member.has(obj))
|
1610
|
-
throw TypeError("Cannot add the same private member more than once");
|
1611
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1612
|
-
};
|
1613
|
-
var __privateSet$3 = (obj, member, value, setter) => {
|
1614
|
-
__accessCheck$5(obj, member, "write to private field");
|
1615
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1616
|
-
return value;
|
3566
|
+
var __typeError$5 = (msg) => {
|
3567
|
+
throw TypeError(msg);
|
1617
3568
|
};
|
3569
|
+
var __accessCheck$5 = (obj, member, msg) => member.has(obj) || __typeError$5("Cannot " + msg);
|
3570
|
+
var __privateGet$4 = (obj, member, getter) => (__accessCheck$5(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3571
|
+
var __privateAdd$5 = (obj, member, value) => member.has(obj) ? __typeError$5("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3572
|
+
var __privateSet$3 = (obj, member, value, setter) => (__accessCheck$5(obj, member, "write to private field"), member.set(obj, value), value);
|
1618
3573
|
var _query, _page;
|
1619
3574
|
class Page {
|
1620
3575
|
constructor(query, meta, records = []) {
|
1621
|
-
__privateAdd$5(this, _query
|
3576
|
+
__privateAdd$5(this, _query);
|
1622
3577
|
__privateSet$3(this, _query, query);
|
1623
3578
|
this.meta = meta;
|
1624
3579
|
this.records = new PageRecordArray(this, records);
|
@@ -1705,7 +3660,7 @@ class RecordArray extends Array {
|
|
1705
3660
|
const _PageRecordArray = class _PageRecordArray extends Array {
|
1706
3661
|
constructor(...args) {
|
1707
3662
|
super(..._PageRecordArray.parseConstructorParams(...args));
|
1708
|
-
__privateAdd$5(this, _page
|
3663
|
+
__privateAdd$5(this, _page);
|
1709
3664
|
__privateSet$3(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
|
1710
3665
|
}
|
1711
3666
|
static parseConstructorParams(...args) {
|
@@ -1776,34 +3731,20 @@ const _PageRecordArray = class _PageRecordArray extends Array {
|
|
1776
3731
|
_page = new WeakMap();
|
1777
3732
|
let PageRecordArray = _PageRecordArray;
|
1778
3733
|
|
1779
|
-
var
|
1780
|
-
|
1781
|
-
throw TypeError("Cannot " + msg);
|
3734
|
+
var __typeError$4 = (msg) => {
|
3735
|
+
throw TypeError(msg);
|
1782
3736
|
};
|
1783
|
-
var
|
1784
|
-
|
1785
|
-
|
1786
|
-
|
1787
|
-
var
|
1788
|
-
|
1789
|
-
throw TypeError("Cannot add the same private member more than once");
|
1790
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1791
|
-
};
|
1792
|
-
var __privateSet$2 = (obj, member, value, setter) => {
|
1793
|
-
__accessCheck$4(obj, member, "write to private field");
|
1794
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
1795
|
-
return value;
|
1796
|
-
};
|
1797
|
-
var __privateMethod$3 = (obj, member, method) => {
|
1798
|
-
__accessCheck$4(obj, member, "access private method");
|
1799
|
-
return method;
|
1800
|
-
};
|
1801
|
-
var _table$1, _repository, _data, _cleanFilterConstraint, cleanFilterConstraint_fn;
|
3737
|
+
var __accessCheck$4 = (obj, member, msg) => member.has(obj) || __typeError$4("Cannot " + msg);
|
3738
|
+
var __privateGet$3 = (obj, member, getter) => (__accessCheck$4(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
3739
|
+
var __privateAdd$4 = (obj, member, value) => member.has(obj) ? __typeError$4("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3740
|
+
var __privateSet$2 = (obj, member, value, setter) => (__accessCheck$4(obj, member, "write to private field"), member.set(obj, value), value);
|
3741
|
+
var __privateMethod$3 = (obj, member, method) => (__accessCheck$4(obj, member, "access private method"), method);
|
3742
|
+
var _table$1, _repository, _data, _Query_instances, cleanFilterConstraint_fn;
|
1802
3743
|
const _Query = class _Query {
|
1803
3744
|
constructor(repository, table, data, rawParent) {
|
1804
|
-
__privateAdd$4(this,
|
1805
|
-
__privateAdd$4(this, _table$1
|
1806
|
-
__privateAdd$4(this, _repository
|
3745
|
+
__privateAdd$4(this, _Query_instances);
|
3746
|
+
__privateAdd$4(this, _table$1);
|
3747
|
+
__privateAdd$4(this, _repository);
|
1807
3748
|
__privateAdd$4(this, _data, { filter: {} });
|
1808
3749
|
// Implements pagination
|
1809
3750
|
this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
|
@@ -1881,12 +3822,12 @@ const _Query = class _Query {
|
|
1881
3822
|
filter(a, b) {
|
1882
3823
|
if (arguments.length === 1) {
|
1883
3824
|
const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
|
1884
|
-
[column]: __privateMethod$3(this,
|
3825
|
+
[column]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, column, constraint)
|
1885
3826
|
}));
|
1886
3827
|
const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
|
1887
3828
|
return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
|
1888
3829
|
} else {
|
1889
|
-
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this,
|
3830
|
+
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _Query_instances, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
|
1890
3831
|
const $all = compact([__privateGet$3(this, _data).filter?.$all].flat().concat(constraints));
|
1891
3832
|
return new _Query(__privateGet$3(this, _repository), __privateGet$3(this, _table$1), { filter: { $all } }, __privateGet$3(this, _data));
|
1892
3833
|
}
|
@@ -1965,8 +3906,7 @@ const _Query = class _Query {
|
|
1965
3906
|
}
|
1966
3907
|
async getFirstOrThrow(options = {}) {
|
1967
3908
|
const records = await this.getMany({ ...options, pagination: { size: 1 } });
|
1968
|
-
if (records[0] === void 0)
|
1969
|
-
throw new Error("No results found.");
|
3909
|
+
if (records[0] === void 0) throw new Error("No results found.");
|
1970
3910
|
return records[0];
|
1971
3911
|
}
|
1972
3912
|
async summarize(params = {}) {
|
@@ -2021,7 +3961,7 @@ const _Query = class _Query {
|
|
2021
3961
|
_table$1 = new WeakMap();
|
2022
3962
|
_repository = new WeakMap();
|
2023
3963
|
_data = new WeakMap();
|
2024
|
-
|
3964
|
+
_Query_instances = new WeakSet();
|
2025
3965
|
cleanFilterConstraint_fn = function(column, value) {
|
2026
3966
|
const columnType = __privateGet$3(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
|
2027
3967
|
if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
|
@@ -2082,8 +4022,7 @@ function isSortFilterString(value) {
|
|
2082
4022
|
}
|
2083
4023
|
function isSortFilterBase(filter) {
|
2084
4024
|
return isObject(filter) && Object.entries(filter).every(([key, value]) => {
|
2085
|
-
if (key === "*")
|
2086
|
-
return value === "random";
|
4025
|
+
if (key === "*") return value === "random";
|
2087
4026
|
return value === "asc" || value === "desc";
|
2088
4027
|
});
|
2089
4028
|
}
|
@@ -2104,29 +4043,15 @@ function buildSortFilter(filter) {
|
|
2104
4043
|
}
|
2105
4044
|
}
|
2106
4045
|
|
2107
|
-
var
|
2108
|
-
|
2109
|
-
throw TypeError("Cannot " + msg);
|
2110
|
-
};
|
2111
|
-
var __privateGet$2 = (obj, member, getter) => {
|
2112
|
-
__accessCheck$3(obj, member, "read from private field");
|
2113
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2114
|
-
};
|
2115
|
-
var __privateAdd$3 = (obj, member, value) => {
|
2116
|
-
if (member.has(obj))
|
2117
|
-
throw TypeError("Cannot add the same private member more than once");
|
2118
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2119
|
-
};
|
2120
|
-
var __privateSet$1 = (obj, member, value, setter) => {
|
2121
|
-
__accessCheck$3(obj, member, "write to private field");
|
2122
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
2123
|
-
return value;
|
2124
|
-
};
|
2125
|
-
var __privateMethod$2 = (obj, member, method) => {
|
2126
|
-
__accessCheck$3(obj, member, "access private method");
|
2127
|
-
return method;
|
4046
|
+
var __typeError$3 = (msg) => {
|
4047
|
+
throw TypeError(msg);
|
2128
4048
|
};
|
2129
|
-
var
|
4049
|
+
var __accessCheck$3 = (obj, member, msg) => member.has(obj) || __typeError$3("Cannot " + msg);
|
4050
|
+
var __privateGet$2 = (obj, member, getter) => (__accessCheck$3(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4051
|
+
var __privateAdd$3 = (obj, member, value) => member.has(obj) ? __typeError$3("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4052
|
+
var __privateSet$1 = (obj, member, value, setter) => (__accessCheck$3(obj, member, "write to private field"), member.set(obj, value), value);
|
4053
|
+
var __privateMethod$2 = (obj, member, method) => (__accessCheck$3(obj, member, "access private method"), method);
|
4054
|
+
var _table, _getFetchProps, _db, _schemaTables, _trace, _RestRepository_instances, insertRecordWithoutId_fn, insertRecordWithId_fn, insertRecords_fn, updateRecordWithID_fn, updateRecords_fn, upsertRecordWithID_fn, deleteRecord_fn, deleteRecords_fn, getSchemaTables_fn, transformObjectToApi_fn;
|
2130
4055
|
const BULK_OPERATION_MAX_SIZE = 1e3;
|
2131
4056
|
class Repository extends Query {
|
2132
4057
|
}
|
@@ -2137,21 +4062,12 @@ class RestRepository extends Query {
|
|
2137
4062
|
{ name: options.table, schema: options.schemaTables?.find((table) => table.name === options.table) },
|
2138
4063
|
{}
|
2139
4064
|
);
|
2140
|
-
__privateAdd$3(this,
|
2141
|
-
__privateAdd$3(this,
|
2142
|
-
__privateAdd$3(this,
|
2143
|
-
__privateAdd$3(this,
|
2144
|
-
__privateAdd$3(this,
|
2145
|
-
__privateAdd$3(this,
|
2146
|
-
__privateAdd$3(this, _deleteRecord);
|
2147
|
-
__privateAdd$3(this, _deleteRecords);
|
2148
|
-
__privateAdd$3(this, _getSchemaTables);
|
2149
|
-
__privateAdd$3(this, _transformObjectToApi);
|
2150
|
-
__privateAdd$3(this, _table, void 0);
|
2151
|
-
__privateAdd$3(this, _getFetchProps, void 0);
|
2152
|
-
__privateAdd$3(this, _db, void 0);
|
2153
|
-
__privateAdd$3(this, _schemaTables, void 0);
|
2154
|
-
__privateAdd$3(this, _trace, void 0);
|
4065
|
+
__privateAdd$3(this, _RestRepository_instances);
|
4066
|
+
__privateAdd$3(this, _table);
|
4067
|
+
__privateAdd$3(this, _getFetchProps);
|
4068
|
+
__privateAdd$3(this, _db);
|
4069
|
+
__privateAdd$3(this, _schemaTables);
|
4070
|
+
__privateAdd$3(this, _trace);
|
2155
4071
|
__privateSet$1(this, _table, options.table);
|
2156
4072
|
__privateSet$1(this, _db, options.db);
|
2157
4073
|
__privateSet$1(this, _schemaTables, options.schemaTables);
|
@@ -2170,31 +4086,28 @@ class RestRepository extends Query {
|
|
2170
4086
|
return __privateGet$2(this, _trace).call(this, "create", async () => {
|
2171
4087
|
const ifVersion = parseIfVersion(b, c, d);
|
2172
4088
|
if (Array.isArray(a)) {
|
2173
|
-
if (a.length === 0)
|
2174
|
-
|
2175
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
4089
|
+
if (a.length === 0) return [];
|
4090
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
2176
4091
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2177
4092
|
const result = await this.read(ids, columns);
|
2178
4093
|
return result;
|
2179
4094
|
}
|
2180
4095
|
if (isString(a) && isObject(b)) {
|
2181
|
-
if (a === "")
|
2182
|
-
throw new Error("The id can't be empty");
|
4096
|
+
if (a === "") throw new Error("The id can't be empty");
|
2183
4097
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2184
|
-
return await __privateMethod$2(this,
|
4098
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
|
2185
4099
|
}
|
2186
4100
|
if (isObject(a) && isString(a.xata_id)) {
|
2187
|
-
if (a.xata_id === "")
|
2188
|
-
throw new Error("The id can't be empty");
|
4101
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2189
4102
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2190
|
-
return await __privateMethod$2(this,
|
4103
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
2191
4104
|
createOnly: true,
|
2192
4105
|
ifVersion
|
2193
4106
|
});
|
2194
4107
|
}
|
2195
4108
|
if (isObject(a)) {
|
2196
4109
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2197
|
-
return __privateMethod$2(this,
|
4110
|
+
return __privateMethod$2(this, _RestRepository_instances, insertRecordWithoutId_fn).call(this, a, columns);
|
2198
4111
|
}
|
2199
4112
|
throw new Error("Invalid arguments for create method");
|
2200
4113
|
});
|
@@ -2203,8 +4116,7 @@ class RestRepository extends Query {
|
|
2203
4116
|
return __privateGet$2(this, _trace).call(this, "read", async () => {
|
2204
4117
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2205
4118
|
if (Array.isArray(a)) {
|
2206
|
-
if (a.length === 0)
|
2207
|
-
return [];
|
4119
|
+
if (a.length === 0) return [];
|
2208
4120
|
const ids = a.map((item) => extractId(item));
|
2209
4121
|
const finalObjects = await this.getAll({ filter: { xata_id: { $any: compact(ids) } }, columns });
|
2210
4122
|
const dictionary = finalObjects.reduce((acc, object) => {
|
@@ -2227,7 +4139,7 @@ class RestRepository extends Query {
|
|
2227
4139
|
queryParams: { columns },
|
2228
4140
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2229
4141
|
});
|
2230
|
-
const schemaTables = await __privateMethod$2(this,
|
4142
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2231
4143
|
return initObject(
|
2232
4144
|
__privateGet$2(this, _db),
|
2233
4145
|
schemaTables,
|
@@ -2268,11 +4180,10 @@ class RestRepository extends Query {
|
|
2268
4180
|
return __privateGet$2(this, _trace).call(this, "update", async () => {
|
2269
4181
|
const ifVersion = parseIfVersion(b, c, d);
|
2270
4182
|
if (Array.isArray(a)) {
|
2271
|
-
if (a.length === 0)
|
2272
|
-
return [];
|
4183
|
+
if (a.length === 0) return [];
|
2273
4184
|
const existing = await this.read(a, ["xata_id"]);
|
2274
4185
|
const updates = a.filter((_item, index) => existing[index] !== null);
|
2275
|
-
await __privateMethod$2(this,
|
4186
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, updates, {
|
2276
4187
|
ifVersion,
|
2277
4188
|
upsert: false
|
2278
4189
|
});
|
@@ -2283,15 +4194,14 @@ class RestRepository extends Query {
|
|
2283
4194
|
try {
|
2284
4195
|
if (isString(a) && isObject(b)) {
|
2285
4196
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2286
|
-
return await __privateMethod$2(this,
|
4197
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2287
4198
|
}
|
2288
4199
|
if (isObject(a) && isString(a.xata_id)) {
|
2289
4200
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2290
|
-
return await __privateMethod$2(this,
|
4201
|
+
return await __privateMethod$2(this, _RestRepository_instances, updateRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2291
4202
|
}
|
2292
4203
|
} catch (error) {
|
2293
|
-
if (error.status === 422)
|
2294
|
-
return null;
|
4204
|
+
if (error.status === 422) return null;
|
2295
4205
|
throw error;
|
2296
4206
|
}
|
2297
4207
|
throw new Error("Invalid arguments for update method");
|
@@ -2320,9 +4230,8 @@ class RestRepository extends Query {
|
|
2320
4230
|
return __privateGet$2(this, _trace).call(this, "createOrUpdate", async () => {
|
2321
4231
|
const ifVersion = parseIfVersion(b, c, d);
|
2322
4232
|
if (Array.isArray(a)) {
|
2323
|
-
if (a.length === 0)
|
2324
|
-
|
2325
|
-
await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, a, {
|
4233
|
+
if (a.length === 0) return [];
|
4234
|
+
await __privateMethod$2(this, _RestRepository_instances, updateRecords_fn).call(this, a, {
|
2326
4235
|
ifVersion,
|
2327
4236
|
upsert: true
|
2328
4237
|
});
|
@@ -2331,16 +4240,14 @@ class RestRepository extends Query {
|
|
2331
4240
|
return result;
|
2332
4241
|
}
|
2333
4242
|
if (isString(a) && isObject(b)) {
|
2334
|
-
if (a === "")
|
2335
|
-
throw new Error("The id can't be empty");
|
4243
|
+
if (a === "") throw new Error("The id can't be empty");
|
2336
4244
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2337
|
-
return await __privateMethod$2(this,
|
4245
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2338
4246
|
}
|
2339
4247
|
if (isObject(a) && isString(a.xata_id)) {
|
2340
|
-
if (a.xata_id === "")
|
2341
|
-
throw new Error("The id can't be empty");
|
4248
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2342
4249
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2343
|
-
return await __privateMethod$2(this,
|
4250
|
+
return await __privateMethod$2(this, _RestRepository_instances, upsertRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2344
4251
|
}
|
2345
4252
|
if (!isDefined(a) && isObject(b)) {
|
2346
4253
|
return await this.create(b, c);
|
@@ -2355,24 +4262,21 @@ class RestRepository extends Query {
|
|
2355
4262
|
return __privateGet$2(this, _trace).call(this, "createOrReplace", async () => {
|
2356
4263
|
const ifVersion = parseIfVersion(b, c, d);
|
2357
4264
|
if (Array.isArray(a)) {
|
2358
|
-
if (a.length === 0)
|
2359
|
-
|
2360
|
-
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
4265
|
+
if (a.length === 0) return [];
|
4266
|
+
const ids = await __privateMethod$2(this, _RestRepository_instances, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
2361
4267
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2362
4268
|
const result = await this.read(ids, columns);
|
2363
4269
|
return result;
|
2364
4270
|
}
|
2365
4271
|
if (isString(a) && isObject(b)) {
|
2366
|
-
if (a === "")
|
2367
|
-
throw new Error("The id can't be empty");
|
4272
|
+
if (a === "") throw new Error("The id can't be empty");
|
2368
4273
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2369
|
-
return await __privateMethod$2(this,
|
4274
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
|
2370
4275
|
}
|
2371
4276
|
if (isObject(a) && isString(a.xata_id)) {
|
2372
|
-
if (a.xata_id === "")
|
2373
|
-
throw new Error("The id can't be empty");
|
4277
|
+
if (a.xata_id === "") throw new Error("The id can't be empty");
|
2374
4278
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2375
|
-
return await __privateMethod$2(this,
|
4279
|
+
return await __privateMethod$2(this, _RestRepository_instances, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
2376
4280
|
createOnly: false,
|
2377
4281
|
ifVersion
|
2378
4282
|
});
|
@@ -2389,25 +4293,22 @@ class RestRepository extends Query {
|
|
2389
4293
|
async delete(a, b) {
|
2390
4294
|
return __privateGet$2(this, _trace).call(this, "delete", async () => {
|
2391
4295
|
if (Array.isArray(a)) {
|
2392
|
-
if (a.length === 0)
|
2393
|
-
return [];
|
4296
|
+
if (a.length === 0) return [];
|
2394
4297
|
const ids = a.map((o) => {
|
2395
|
-
if (isString(o))
|
2396
|
-
|
2397
|
-
if (isString(o.xata_id))
|
2398
|
-
return o.xata_id;
|
4298
|
+
if (isString(o)) return o;
|
4299
|
+
if (isString(o.xata_id)) return o.xata_id;
|
2399
4300
|
throw new Error("Invalid arguments for delete method");
|
2400
4301
|
});
|
2401
4302
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
2402
4303
|
const result = await this.read(a, columns);
|
2403
|
-
await __privateMethod$2(this,
|
4304
|
+
await __privateMethod$2(this, _RestRepository_instances, deleteRecords_fn).call(this, ids);
|
2404
4305
|
return result;
|
2405
4306
|
}
|
2406
4307
|
if (isString(a)) {
|
2407
|
-
return __privateMethod$2(this,
|
4308
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a, b);
|
2408
4309
|
}
|
2409
4310
|
if (isObject(a) && isString(a.xata_id)) {
|
2410
|
-
return __privateMethod$2(this,
|
4311
|
+
return __privateMethod$2(this, _RestRepository_instances, deleteRecord_fn).call(this, a.xata_id, b);
|
2411
4312
|
}
|
2412
4313
|
throw new Error("Invalid arguments for delete method");
|
2413
4314
|
});
|
@@ -2451,7 +4352,7 @@ class RestRepository extends Query {
|
|
2451
4352
|
},
|
2452
4353
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2453
4354
|
});
|
2454
|
-
const schemaTables = await __privateMethod$2(this,
|
4355
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2455
4356
|
return {
|
2456
4357
|
records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
|
2457
4358
|
totalCount
|
@@ -2476,7 +4377,7 @@ class RestRepository extends Query {
|
|
2476
4377
|
},
|
2477
4378
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2478
4379
|
});
|
2479
|
-
const schemaTables = await __privateMethod$2(this,
|
4380
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2480
4381
|
return {
|
2481
4382
|
records: records.map((item) => initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), item, ["*"])),
|
2482
4383
|
totalCount
|
@@ -2518,7 +4419,7 @@ class RestRepository extends Query {
|
|
2518
4419
|
fetchOptions: data.fetchOptions,
|
2519
4420
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2520
4421
|
});
|
2521
|
-
const schemaTables = await __privateMethod$2(this,
|
4422
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2522
4423
|
const records = objects.map(
|
2523
4424
|
(record) => initObject(
|
2524
4425
|
__privateGet$2(this, _db),
|
@@ -2552,7 +4453,7 @@ class RestRepository extends Query {
|
|
2552
4453
|
},
|
2553
4454
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2554
4455
|
});
|
2555
|
-
const schemaTables = await __privateMethod$2(this,
|
4456
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2556
4457
|
return {
|
2557
4458
|
...result,
|
2558
4459
|
summaries: result.summaries.map(
|
@@ -2600,9 +4501,9 @@ _getFetchProps = new WeakMap();
|
|
2600
4501
|
_db = new WeakMap();
|
2601
4502
|
_schemaTables = new WeakMap();
|
2602
4503
|
_trace = new WeakMap();
|
2603
|
-
|
4504
|
+
_RestRepository_instances = new WeakSet();
|
2604
4505
|
insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
2605
|
-
const record = await __privateMethod$2(this,
|
4506
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2606
4507
|
const response = await insertRecord({
|
2607
4508
|
pathParams: {
|
2608
4509
|
workspace: "{workspaceId}",
|
@@ -2614,14 +4515,12 @@ insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
|
2614
4515
|
body: record,
|
2615
4516
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2616
4517
|
});
|
2617
|
-
const schemaTables = await __privateMethod$2(this,
|
4518
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2618
4519
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2619
4520
|
};
|
2620
|
-
_insertRecordWithId = new WeakSet();
|
2621
4521
|
insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
|
2622
|
-
if (!recordId)
|
2623
|
-
|
2624
|
-
const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4522
|
+
if (!recordId) return null;
|
4523
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2625
4524
|
const response = await insertRecordWithID({
|
2626
4525
|
pathParams: {
|
2627
4526
|
workspace: "{workspaceId}",
|
@@ -2634,13 +4533,12 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
|
|
2634
4533
|
queryParams: { createOnly, columns, ifVersion },
|
2635
4534
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2636
4535
|
});
|
2637
|
-
const schemaTables = await __privateMethod$2(this,
|
4536
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2638
4537
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2639
4538
|
};
|
2640
|
-
_insertRecords = new WeakSet();
|
2641
4539
|
insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
2642
4540
|
const operations = await promiseMap(objects, async (object) => {
|
2643
|
-
const record = await __privateMethod$2(this,
|
4541
|
+
const record = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2644
4542
|
return { insert: { table: __privateGet$2(this, _table), record, createOnly, ifVersion } };
|
2645
4543
|
});
|
2646
4544
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -2665,11 +4563,9 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
|
2665
4563
|
}
|
2666
4564
|
return ids;
|
2667
4565
|
};
|
2668
|
-
_updateRecordWithID = new WeakSet();
|
2669
4566
|
updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2670
|
-
if (!recordId)
|
2671
|
-
|
2672
|
-
const { xata_id: _id, ...record } = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
4567
|
+
if (!recordId) return null;
|
4568
|
+
const { xata_id: _id, ...record } = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2673
4569
|
try {
|
2674
4570
|
const response = await updateRecordWithID({
|
2675
4571
|
pathParams: {
|
@@ -2683,7 +4579,7 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2683
4579
|
body: record,
|
2684
4580
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2685
4581
|
});
|
2686
|
-
const schemaTables = await __privateMethod$2(this,
|
4582
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2687
4583
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2688
4584
|
} catch (e) {
|
2689
4585
|
if (isObject(e) && e.status === 404) {
|
@@ -2692,10 +4588,9 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2692
4588
|
throw e;
|
2693
4589
|
}
|
2694
4590
|
};
|
2695
|
-
_updateRecords = new WeakSet();
|
2696
4591
|
updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
2697
4592
|
const operations = await promiseMap(objects, async ({ xata_id, ...object }) => {
|
2698
|
-
const fields = await __privateMethod$2(this,
|
4593
|
+
const fields = await __privateMethod$2(this, _RestRepository_instances, transformObjectToApi_fn).call(this, object);
|
2699
4594
|
return { update: { table: __privateGet$2(this, _table), id: xata_id, ifVersion, upsert, fields } };
|
2700
4595
|
});
|
2701
4596
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
@@ -2720,10 +4615,8 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
|
2720
4615
|
}
|
2721
4616
|
return ids;
|
2722
4617
|
};
|
2723
|
-
_upsertRecordWithID = new WeakSet();
|
2724
4618
|
upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2725
|
-
if (!recordId)
|
2726
|
-
return null;
|
4619
|
+
if (!recordId) return null;
|
2727
4620
|
const response = await upsertRecordWithID({
|
2728
4621
|
pathParams: {
|
2729
4622
|
workspace: "{workspaceId}",
|
@@ -2736,13 +4629,11 @@ upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2736
4629
|
body: object,
|
2737
4630
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2738
4631
|
});
|
2739
|
-
const schemaTables = await __privateMethod$2(this,
|
4632
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2740
4633
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2741
4634
|
};
|
2742
|
-
_deleteRecord = new WeakSet();
|
2743
4635
|
deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
2744
|
-
if (!recordId)
|
2745
|
-
return null;
|
4636
|
+
if (!recordId) return null;
|
2746
4637
|
try {
|
2747
4638
|
const response = await deleteRecord({
|
2748
4639
|
pathParams: {
|
@@ -2755,7 +4646,7 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2755
4646
|
queryParams: { columns },
|
2756
4647
|
...__privateGet$2(this, _getFetchProps).call(this)
|
2757
4648
|
});
|
2758
|
-
const schemaTables = await __privateMethod$2(this,
|
4649
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2759
4650
|
return initObject(__privateGet$2(this, _db), schemaTables, __privateGet$2(this, _table), response, columns);
|
2760
4651
|
} catch (e) {
|
2761
4652
|
if (isObject(e) && e.status === 404) {
|
@@ -2764,7 +4655,6 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2764
4655
|
throw e;
|
2765
4656
|
}
|
2766
4657
|
};
|
2767
|
-
_deleteRecords = new WeakSet();
|
2768
4658
|
deleteRecords_fn = async function(recordIds) {
|
2769
4659
|
const chunkedOperations = chunk(
|
2770
4660
|
compact(recordIds).map((id) => ({ delete: { table: __privateGet$2(this, _table), id } })),
|
@@ -2782,10 +4672,8 @@ deleteRecords_fn = async function(recordIds) {
|
|
2782
4672
|
});
|
2783
4673
|
}
|
2784
4674
|
};
|
2785
|
-
_getSchemaTables = new WeakSet();
|
2786
4675
|
getSchemaTables_fn = async function() {
|
2787
|
-
if (__privateGet$2(this, _schemaTables))
|
2788
|
-
return __privateGet$2(this, _schemaTables);
|
4676
|
+
if (__privateGet$2(this, _schemaTables)) return __privateGet$2(this, _schemaTables);
|
2789
4677
|
const { schema } = await getBranchDetails({
|
2790
4678
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
2791
4679
|
...__privateGet$2(this, _getFetchProps).call(this)
|
@@ -2793,16 +4681,13 @@ getSchemaTables_fn = async function() {
|
|
2793
4681
|
__privateSet$1(this, _schemaTables, schema.tables);
|
2794
4682
|
return schema.tables;
|
2795
4683
|
};
|
2796
|
-
_transformObjectToApi = new WeakSet();
|
2797
4684
|
transformObjectToApi_fn = async function(object) {
|
2798
|
-
const schemaTables = await __privateMethod$2(this,
|
4685
|
+
const schemaTables = await __privateMethod$2(this, _RestRepository_instances, getSchemaTables_fn).call(this);
|
2799
4686
|
const schema = schemaTables.find((table) => table.name === __privateGet$2(this, _table));
|
2800
|
-
if (!schema)
|
2801
|
-
throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
|
4687
|
+
if (!schema) throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
|
2802
4688
|
const result = {};
|
2803
4689
|
for (const [key, value] of Object.entries(object)) {
|
2804
|
-
if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key))
|
2805
|
-
continue;
|
4690
|
+
if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key)) continue;
|
2806
4691
|
const type = schema.columns.find((column) => column.name === key)?.type;
|
2807
4692
|
switch (type) {
|
2808
4693
|
case "link": {
|
@@ -2832,11 +4717,9 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2832
4717
|
const data = {};
|
2833
4718
|
Object.assign(data, { ...object });
|
2834
4719
|
const { columns } = schemaTables.find(({ name }) => name === table) ?? {};
|
2835
|
-
if (!columns)
|
2836
|
-
console.error(`Table ${table} not found in schema`);
|
4720
|
+
if (!columns) console.error(`Table ${table} not found in schema`);
|
2837
4721
|
for (const column of columns ?? []) {
|
2838
|
-
if (!isValidColumn(selectedColumns, column))
|
2839
|
-
continue;
|
4722
|
+
if (!isValidColumn(selectedColumns, column)) continue;
|
2840
4723
|
const value = data[column.name];
|
2841
4724
|
switch (column.type) {
|
2842
4725
|
case "datetime": {
|
@@ -2922,15 +4805,12 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2922
4805
|
return record;
|
2923
4806
|
};
|
2924
4807
|
function extractId(value) {
|
2925
|
-
if (isString(value))
|
2926
|
-
|
2927
|
-
if (isObject(value) && isString(value.xata_id))
|
2928
|
-
return value.xata_id;
|
4808
|
+
if (isString(value)) return value;
|
4809
|
+
if (isObject(value) && isString(value.xata_id)) return value.xata_id;
|
2929
4810
|
return void 0;
|
2930
4811
|
}
|
2931
4812
|
function isValidColumn(columns, column) {
|
2932
|
-
if (columns.includes("*"))
|
2933
|
-
return true;
|
4813
|
+
if (columns.includes("*")) return true;
|
2934
4814
|
return columns.filter((item) => isString(item) && item.startsWith(column.name)).length > 0;
|
2935
4815
|
}
|
2936
4816
|
function parseIfVersion(...args) {
|
@@ -2970,19 +4850,12 @@ const includesAll = (value) => ({ $includesAll: value });
|
|
2970
4850
|
const includesNone = (value) => ({ $includesNone: value });
|
2971
4851
|
const includesAny = (value) => ({ $includesAny: value });
|
2972
4852
|
|
2973
|
-
var
|
2974
|
-
|
2975
|
-
throw TypeError("Cannot " + msg);
|
2976
|
-
};
|
2977
|
-
var __privateGet$1 = (obj, member, getter) => {
|
2978
|
-
__accessCheck$2(obj, member, "read from private field");
|
2979
|
-
return getter ? getter.call(obj) : member.get(obj);
|
2980
|
-
};
|
2981
|
-
var __privateAdd$2 = (obj, member, value) => {
|
2982
|
-
if (member.has(obj))
|
2983
|
-
throw TypeError("Cannot add the same private member more than once");
|
2984
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4853
|
+
var __typeError$2 = (msg) => {
|
4854
|
+
throw TypeError(msg);
|
2985
4855
|
};
|
4856
|
+
var __accessCheck$2 = (obj, member, msg) => member.has(obj) || __typeError$2("Cannot " + msg);
|
4857
|
+
var __privateGet$1 = (obj, member, getter) => (__accessCheck$2(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
4858
|
+
var __privateAdd$2 = (obj, member, value) => member.has(obj) ? __typeError$2("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2986
4859
|
var _tables;
|
2987
4860
|
class SchemaPlugin extends XataPlugin {
|
2988
4861
|
constructor() {
|
@@ -2994,8 +4867,7 @@ class SchemaPlugin extends XataPlugin {
|
|
2994
4867
|
{},
|
2995
4868
|
{
|
2996
4869
|
get: (_target, table) => {
|
2997
|
-
if (!isString(table))
|
2998
|
-
throw new Error("Invalid table name");
|
4870
|
+
if (!isString(table)) throw new Error("Invalid table name");
|
2999
4871
|
if (__privateGet$1(this, _tables)[table] === void 0) {
|
3000
4872
|
__privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
3001
4873
|
}
|
@@ -3086,30 +4958,23 @@ function getContentType(file) {
|
|
3086
4958
|
return "application/octet-stream";
|
3087
4959
|
}
|
3088
4960
|
|
3089
|
-
var
|
3090
|
-
|
3091
|
-
throw TypeError("Cannot " + msg);
|
3092
|
-
};
|
3093
|
-
var __privateAdd$1 = (obj, member, value) => {
|
3094
|
-
if (member.has(obj))
|
3095
|
-
throw TypeError("Cannot add the same private member more than once");
|
3096
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3097
|
-
};
|
3098
|
-
var __privateMethod$1 = (obj, member, method) => {
|
3099
|
-
__accessCheck$1(obj, member, "access private method");
|
3100
|
-
return method;
|
4961
|
+
var __typeError$1 = (msg) => {
|
4962
|
+
throw TypeError(msg);
|
3101
4963
|
};
|
3102
|
-
var
|
4964
|
+
var __accessCheck$1 = (obj, member, msg) => member.has(obj) || __typeError$1("Cannot " + msg);
|
4965
|
+
var __privateAdd$1 = (obj, member, value) => member.has(obj) ? __typeError$1("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4966
|
+
var __privateMethod$1 = (obj, member, method) => (__accessCheck$1(obj, member, "access private method"), method);
|
4967
|
+
var _SearchPlugin_instances, search_fn;
|
3103
4968
|
class SearchPlugin extends XataPlugin {
|
3104
4969
|
constructor(db) {
|
3105
4970
|
super();
|
3106
4971
|
this.db = db;
|
3107
|
-
__privateAdd$1(this,
|
4972
|
+
__privateAdd$1(this, _SearchPlugin_instances);
|
3108
4973
|
}
|
3109
4974
|
build(pluginOptions) {
|
3110
4975
|
return {
|
3111
4976
|
all: async (query, options = {}) => {
|
3112
|
-
const { records, totalCount } = await __privateMethod$1(this,
|
4977
|
+
const { records, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3113
4978
|
return {
|
3114
4979
|
totalCount,
|
3115
4980
|
records: records.map((record) => {
|
@@ -3119,7 +4984,7 @@ class SearchPlugin extends XataPlugin {
|
|
3119
4984
|
};
|
3120
4985
|
},
|
3121
4986
|
byTable: async (query, options = {}) => {
|
3122
|
-
const { records: rawRecords, totalCount } = await __privateMethod$1(this,
|
4987
|
+
const { records: rawRecords, totalCount } = await __privateMethod$1(this, _SearchPlugin_instances, search_fn).call(this, query, options, pluginOptions);
|
3123
4988
|
const records = rawRecords.reduce((acc, record) => {
|
3124
4989
|
const table = record.xata_table;
|
3125
4990
|
const items = acc[table] ?? [];
|
@@ -3131,7 +4996,7 @@ class SearchPlugin extends XataPlugin {
|
|
3131
4996
|
};
|
3132
4997
|
}
|
3133
4998
|
}
|
3134
|
-
|
4999
|
+
_SearchPlugin_instances = new WeakSet();
|
3135
5000
|
search_fn = async function(query, options, pluginOptions) {
|
3136
5001
|
const { tables, fuzziness, highlight, prefix, page } = options ?? {};
|
3137
5002
|
const { records, totalCount } = await searchBranch({
|
@@ -3167,8 +5032,7 @@ function arrayString(val) {
|
|
3167
5032
|
return result;
|
3168
5033
|
}
|
3169
5034
|
function prepareValue(value) {
|
3170
|
-
if (!isDefined(value))
|
3171
|
-
return null;
|
5035
|
+
if (!isDefined(value)) return null;
|
3172
5036
|
if (value instanceof Date) {
|
3173
5037
|
return value.toISOString();
|
3174
5038
|
}
|
@@ -3195,19 +5059,19 @@ function prepareParams(param1, param2) {
|
|
3195
5059
|
return { statement, params: param2?.map((value) => prepareValue(value)) };
|
3196
5060
|
}
|
3197
5061
|
if (isObject(param1)) {
|
3198
|
-
const { statement, params, consistency } = param1;
|
3199
|
-
return { statement, params: params?.map((value) => prepareValue(value)), consistency };
|
5062
|
+
const { statement, params, consistency, responseType } = param1;
|
5063
|
+
return { statement, params: params?.map((value) => prepareValue(value)), consistency, responseType };
|
3200
5064
|
}
|
3201
5065
|
throw new Error("Invalid query");
|
3202
5066
|
}
|
3203
5067
|
|
3204
5068
|
class SQLPlugin extends XataPlugin {
|
3205
5069
|
build(pluginOptions) {
|
3206
|
-
|
5070
|
+
const sqlFunction = async (query, ...parameters) => {
|
3207
5071
|
if (!isParamsObject(query) && (!isTemplateStringsArray(query) || !Array.isArray(parameters))) {
|
3208
5072
|
throw new Error("Invalid usage of `xata.sql`. Please use it as a tagged template or with an object.");
|
3209
5073
|
}
|
3210
|
-
const { statement, params, consistency } = prepareParams(query, parameters);
|
5074
|
+
const { statement, params, consistency, responseType } = prepareParams(query, parameters);
|
3211
5075
|
const {
|
3212
5076
|
records,
|
3213
5077
|
rows,
|
@@ -3215,11 +5079,25 @@ class SQLPlugin extends XataPlugin {
|
|
3215
5079
|
columns = []
|
3216
5080
|
} = await sqlQuery({
|
3217
5081
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
3218
|
-
body: { statement, params, consistency },
|
5082
|
+
body: { statement, params, consistency, responseType },
|
3219
5083
|
...pluginOptions
|
3220
5084
|
});
|
3221
5085
|
return { records, rows, warning, columns };
|
3222
5086
|
};
|
5087
|
+
sqlFunction.connectionString = buildConnectionString(pluginOptions);
|
5088
|
+
sqlFunction.batch = async (query) => {
|
5089
|
+
const { results } = await sqlBatchQuery({
|
5090
|
+
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
5091
|
+
body: {
|
5092
|
+
statements: query.statements.map(({ statement, params }) => ({ statement, params })),
|
5093
|
+
consistency: query.consistency,
|
5094
|
+
responseType: query.responseType
|
5095
|
+
},
|
5096
|
+
...pluginOptions
|
5097
|
+
});
|
5098
|
+
return { results };
|
5099
|
+
};
|
5100
|
+
return sqlFunction;
|
3223
5101
|
}
|
3224
5102
|
}
|
3225
5103
|
function isTemplateStringsArray(strings) {
|
@@ -3228,6 +5106,32 @@ function isTemplateStringsArray(strings) {
|
|
3228
5106
|
function isParamsObject(params) {
|
3229
5107
|
return isObject(params) && "statement" in params;
|
3230
5108
|
}
|
5109
|
+
function buildDomain(host, region) {
|
5110
|
+
switch (host) {
|
5111
|
+
case "production":
|
5112
|
+
return `${region}.sql.xata.sh`;
|
5113
|
+
case "staging":
|
5114
|
+
return `${region}.sql.staging-xata.dev`;
|
5115
|
+
case "dev":
|
5116
|
+
return `${region}.sql.dev-xata.dev`;
|
5117
|
+
case "local":
|
5118
|
+
return "localhost:7654";
|
5119
|
+
default:
|
5120
|
+
throw new Error("Invalid host provider");
|
5121
|
+
}
|
5122
|
+
}
|
5123
|
+
function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
|
5124
|
+
const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
|
5125
|
+
const parts = parseWorkspacesUrlParts(url);
|
5126
|
+
if (!parts) throw new Error("Invalid workspaces URL");
|
5127
|
+
const { workspace: workspaceSlug, region, database, host } = parts;
|
5128
|
+
const domain = buildDomain(host, region);
|
5129
|
+
const workspace = workspaceSlug.split("-").pop();
|
5130
|
+
if (!workspace || !region || !database || !apiKey || !branch) {
|
5131
|
+
throw new Error("Unable to build xata connection string");
|
5132
|
+
}
|
5133
|
+
return `postgresql://${workspace}:${apiKey}@${domain}/${database}:${branch}?sslmode=require`;
|
5134
|
+
}
|
3231
5135
|
|
3232
5136
|
class TransactionPlugin extends XataPlugin {
|
3233
5137
|
build(pluginOptions) {
|
@@ -3244,41 +5148,27 @@ class TransactionPlugin extends XataPlugin {
|
|
3244
5148
|
}
|
3245
5149
|
}
|
3246
5150
|
|
3247
|
-
var
|
3248
|
-
|
3249
|
-
throw TypeError("Cannot " + msg);
|
3250
|
-
};
|
3251
|
-
var __privateGet = (obj, member, getter) => {
|
3252
|
-
__accessCheck(obj, member, "read from private field");
|
3253
|
-
return getter ? getter.call(obj) : member.get(obj);
|
3254
|
-
};
|
3255
|
-
var __privateAdd = (obj, member, value) => {
|
3256
|
-
if (member.has(obj))
|
3257
|
-
throw TypeError("Cannot add the same private member more than once");
|
3258
|
-
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3259
|
-
};
|
3260
|
-
var __privateSet = (obj, member, value, setter) => {
|
3261
|
-
__accessCheck(obj, member, "write to private field");
|
3262
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
3263
|
-
return value;
|
3264
|
-
};
|
3265
|
-
var __privateMethod = (obj, member, method) => {
|
3266
|
-
__accessCheck(obj, member, "access private method");
|
3267
|
-
return method;
|
5151
|
+
var __typeError = (msg) => {
|
5152
|
+
throw TypeError(msg);
|
3268
5153
|
};
|
5154
|
+
var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
|
5155
|
+
var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
|
5156
|
+
var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
5157
|
+
var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), member.set(obj, value), value);
|
5158
|
+
var __privateMethod = (obj, member, method) => (__accessCheck(obj, member, "access private method"), method);
|
3269
5159
|
const buildClient = (plugins) => {
|
3270
|
-
var _options,
|
5160
|
+
var _options, _instances, parseOptions_fn, getFetchProps_fn, _a;
|
3271
5161
|
return _a = class {
|
3272
5162
|
constructor(options = {}, tables) {
|
3273
|
-
__privateAdd(this,
|
3274
|
-
__privateAdd(this,
|
3275
|
-
|
3276
|
-
const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
|
5163
|
+
__privateAdd(this, _instances);
|
5164
|
+
__privateAdd(this, _options);
|
5165
|
+
const safeOptions = __privateMethod(this, _instances, parseOptions_fn).call(this, options);
|
3277
5166
|
__privateSet(this, _options, safeOptions);
|
3278
5167
|
const pluginOptions = {
|
3279
|
-
...__privateMethod(this,
|
5168
|
+
...__privateMethod(this, _instances, getFetchProps_fn).call(this, safeOptions),
|
3280
5169
|
host: safeOptions.host,
|
3281
|
-
tables
|
5170
|
+
tables,
|
5171
|
+
branch: safeOptions.branch
|
3282
5172
|
};
|
3283
5173
|
const db = new SchemaPlugin().build(pluginOptions);
|
3284
5174
|
const search = new SearchPlugin(db).build(pluginOptions);
|
@@ -3292,8 +5182,7 @@ const buildClient = (plugins) => {
|
|
3292
5182
|
this.sql = sql;
|
3293
5183
|
this.files = files;
|
3294
5184
|
for (const [key, namespace] of Object.entries(plugins ?? {})) {
|
3295
|
-
if (namespace === void 0)
|
3296
|
-
continue;
|
5185
|
+
if (namespace === void 0) continue;
|
3297
5186
|
this[key] = namespace.build(pluginOptions);
|
3298
5187
|
}
|
3299
5188
|
}
|
@@ -3302,8 +5191,8 @@ const buildClient = (plugins) => {
|
|
3302
5191
|
const branch = __privateGet(this, _options).branch;
|
3303
5192
|
return { databaseURL, branch };
|
3304
5193
|
}
|
3305
|
-
}, _options = new WeakMap(),
|
3306
|
-
const enableBrowser = options?.enableBrowser ??
|
5194
|
+
}, _options = new WeakMap(), _instances = new WeakSet(), parseOptions_fn = function(options) {
|
5195
|
+
const enableBrowser = options?.enableBrowser ?? false;
|
3307
5196
|
const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
|
3308
5197
|
if (isBrowser && !enableBrowser) {
|
3309
5198
|
throw new Error(
|
@@ -3311,8 +5200,9 @@ const buildClient = (plugins) => {
|
|
3311
5200
|
);
|
3312
5201
|
}
|
3313
5202
|
const fetch = getFetchImplementation(options?.fetch);
|
3314
|
-
const databaseURL = options?.databaseURL
|
3315
|
-
const apiKey = options?.apiKey
|
5203
|
+
const databaseURL = options?.databaseURL;
|
5204
|
+
const apiKey = options?.apiKey;
|
5205
|
+
const branch = options?.branch;
|
3316
5206
|
const trace = options?.trace ?? defaultTrace;
|
3317
5207
|
const clientName = options?.clientName;
|
3318
5208
|
const host = options?.host ?? "production";
|
@@ -3323,25 +5213,8 @@ const buildClient = (plugins) => {
|
|
3323
5213
|
if (!databaseURL) {
|
3324
5214
|
throw new Error("Option databaseURL is required");
|
3325
5215
|
}
|
3326
|
-
|
3327
|
-
|
3328
|
-
const branch = options?.branch || previewBranch || envBranch || "main";
|
3329
|
-
if (!!previewBranch && branch !== previewBranch) {
|
3330
|
-
console.warn(
|
3331
|
-
`Ignoring preview branch ${previewBranch} because branch option was passed to the client constructor with value ${branch}`
|
3332
|
-
);
|
3333
|
-
} else if (!!envBranch && branch !== envBranch) {
|
3334
|
-
console.warn(
|
3335
|
-
`Ignoring branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
|
3336
|
-
);
|
3337
|
-
} else if (!!previewBranch && !!envBranch && previewBranch !== envBranch) {
|
3338
|
-
console.warn(
|
3339
|
-
`Ignoring preview branch ${previewBranch} and branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
|
3340
|
-
);
|
3341
|
-
} else if (!previewBranch && !envBranch && options?.branch === void 0) {
|
3342
|
-
console.warn(
|
3343
|
-
`No branch was passed to the client constructor. Using default branch ${branch}. You can set the branch with the environment variable XATA_BRANCH or by passing the branch option to the client constructor.`
|
3344
|
-
);
|
5216
|
+
if (!branch) {
|
5217
|
+
throw new Error("Option branch is required");
|
3345
5218
|
}
|
3346
5219
|
return {
|
3347
5220
|
fetch,
|
@@ -3355,7 +5228,7 @@ const buildClient = (plugins) => {
|
|
3355
5228
|
clientName,
|
3356
5229
|
xataAgentExtra
|
3357
5230
|
};
|
3358
|
-
},
|
5231
|
+
}, getFetchProps_fn = function({
|
3359
5232
|
fetch,
|
3360
5233
|
apiKey,
|
3361
5234
|
databaseURL,
|
@@ -3396,26 +5269,19 @@ class Serializer {
|
|
3396
5269
|
}
|
3397
5270
|
toJSON(data) {
|
3398
5271
|
function visit(obj) {
|
3399
|
-
if (Array.isArray(obj))
|
3400
|
-
return obj.map(visit);
|
5272
|
+
if (Array.isArray(obj)) return obj.map(visit);
|
3401
5273
|
const type = typeof obj;
|
3402
|
-
if (type === "undefined")
|
3403
|
-
|
3404
|
-
if (
|
3405
|
-
return { [META]: "bigint", [VALUE]: obj.toString() };
|
3406
|
-
if (obj === null || type !== "object")
|
3407
|
-
return obj;
|
5274
|
+
if (type === "undefined") return { [META]: "undefined" };
|
5275
|
+
if (type === "bigint") return { [META]: "bigint", [VALUE]: obj.toString() };
|
5276
|
+
if (obj === null || type !== "object") return obj;
|
3408
5277
|
const constructor = obj.constructor;
|
3409
5278
|
const o = { [META]: constructor.name };
|
3410
5279
|
for (const [key, value] of Object.entries(obj)) {
|
3411
5280
|
o[key] = visit(value);
|
3412
5281
|
}
|
3413
|
-
if (constructor === Date)
|
3414
|
-
|
3415
|
-
if (constructor ===
|
3416
|
-
o[VALUE] = Object.fromEntries(obj);
|
3417
|
-
if (constructor === Set)
|
3418
|
-
o[VALUE] = [...obj];
|
5282
|
+
if (constructor === Date) o[VALUE] = obj.toISOString();
|
5283
|
+
if (constructor === Map) o[VALUE] = Object.fromEntries(obj);
|
5284
|
+
if (constructor === Set) o[VALUE] = [...obj];
|
3419
5285
|
return o;
|
3420
5286
|
}
|
3421
5287
|
return JSON.stringify(visit(data));
|
@@ -3428,16 +5294,11 @@ class Serializer {
|
|
3428
5294
|
if (constructor) {
|
3429
5295
|
return Object.assign(Object.create(constructor.prototype), rest);
|
3430
5296
|
}
|
3431
|
-
if (clazz === "Date")
|
3432
|
-
|
3433
|
-
if (clazz === "
|
3434
|
-
|
3435
|
-
if (clazz === "
|
3436
|
-
return new Map(Object.entries(val));
|
3437
|
-
if (clazz === "bigint")
|
3438
|
-
return BigInt(val);
|
3439
|
-
if (clazz === "undefined")
|
3440
|
-
return void 0;
|
5297
|
+
if (clazz === "Date") return new Date(val);
|
5298
|
+
if (clazz === "Set") return new Set(val);
|
5299
|
+
if (clazz === "Map") return new Map(Object.entries(val));
|
5300
|
+
if (clazz === "bigint") return BigInt(val);
|
5301
|
+
if (clazz === "undefined") return void 0;
|
3441
5302
|
return rest;
|
3442
5303
|
}
|
3443
5304
|
return value;
|
@@ -3452,6 +5313,47 @@ const deserialize = (json) => {
|
|
3452
5313
|
return defaultSerializer.fromJSON(json);
|
3453
5314
|
};
|
3454
5315
|
|
5316
|
+
function parseEnvironment(environment) {
|
5317
|
+
try {
|
5318
|
+
if (typeof environment === "function") {
|
5319
|
+
return new Proxy(
|
5320
|
+
{},
|
5321
|
+
{
|
5322
|
+
get(target) {
|
5323
|
+
return environment(target);
|
5324
|
+
}
|
5325
|
+
}
|
5326
|
+
);
|
5327
|
+
}
|
5328
|
+
if (isObject(environment)) {
|
5329
|
+
return environment;
|
5330
|
+
}
|
5331
|
+
} catch (error) {
|
5332
|
+
}
|
5333
|
+
return {};
|
5334
|
+
}
|
5335
|
+
function buildPreviewBranchName({ org, branch }) {
|
5336
|
+
return `preview-${org}-${branch}`;
|
5337
|
+
}
|
5338
|
+
function getDeployPreviewBranch(environment) {
|
5339
|
+
try {
|
5340
|
+
const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = parseEnvironment(environment);
|
5341
|
+
if (deployPreviewBranch) return deployPreviewBranch;
|
5342
|
+
switch (deployPreview) {
|
5343
|
+
case "vercel": {
|
5344
|
+
if (!vercelGitCommitRef || !vercelGitRepoOwner) {
|
5345
|
+
console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
|
5346
|
+
return void 0;
|
5347
|
+
}
|
5348
|
+
return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
|
5349
|
+
}
|
5350
|
+
}
|
5351
|
+
return void 0;
|
5352
|
+
} catch (err) {
|
5353
|
+
return void 0;
|
5354
|
+
}
|
5355
|
+
}
|
5356
|
+
|
3455
5357
|
class XataError extends Error {
|
3456
5358
|
constructor(message, status) {
|
3457
5359
|
super(message);
|
@@ -3460,6 +5362,7 @@ class XataError extends Error {
|
|
3460
5362
|
}
|
3461
5363
|
|
3462
5364
|
exports.BaseClient = BaseClient;
|
5365
|
+
exports.Buffer = Buffer;
|
3463
5366
|
exports.FetcherError = FetcherError;
|
3464
5367
|
exports.FilesPlugin = FilesPlugin;
|
3465
5368
|
exports.Operations = operationsByTag;
|
@@ -3485,6 +5388,7 @@ exports.XataError = XataError;
|
|
3485
5388
|
exports.XataFile = XataFile;
|
3486
5389
|
exports.XataPlugin = XataPlugin;
|
3487
5390
|
exports.acceptWorkspaceMemberInvite = acceptWorkspaceMemberInvite;
|
5391
|
+
exports.adaptAllTables = adaptAllTables;
|
3488
5392
|
exports.adaptTable = adaptTable;
|
3489
5393
|
exports.addGitBranchesEntry = addGitBranchesEntry;
|
3490
5394
|
exports.addTableColumn = addTableColumn;
|
@@ -3502,6 +5406,7 @@ exports.cancelWorkspaceMemberInvite = cancelWorkspaceMemberInvite;
|
|
3502
5406
|
exports.compareBranchSchemas = compareBranchSchemas;
|
3503
5407
|
exports.compareBranchWithUserSchema = compareBranchWithUserSchema;
|
3504
5408
|
exports.compareMigrationRequest = compareMigrationRequest;
|
5409
|
+
exports.completeMigration = completeMigration;
|
3505
5410
|
exports.contains = contains;
|
3506
5411
|
exports.copyBranch = copyBranch;
|
3507
5412
|
exports.createBranch = createBranch;
|
@@ -3512,6 +5417,7 @@ exports.createTable = createTable;
|
|
3512
5417
|
exports.createUserAPIKey = createUserAPIKey;
|
3513
5418
|
exports.createWorkspace = createWorkspace;
|
3514
5419
|
exports.deleteBranch = deleteBranch;
|
5420
|
+
exports.deleteCluster = deleteCluster;
|
3515
5421
|
exports.deleteColumn = deleteColumn;
|
3516
5422
|
exports.deleteDatabase = deleteDatabase;
|
3517
5423
|
exports.deleteDatabaseGithubSettings = deleteDatabaseGithubSettings;
|
@@ -3532,9 +5438,7 @@ exports.exists = exists;
|
|
3532
5438
|
exports.fileAccess = fileAccess;
|
3533
5439
|
exports.fileUpload = fileUpload;
|
3534
5440
|
exports.ge = ge;
|
3535
|
-
exports.getAPIKey = getAPIKey;
|
3536
5441
|
exports.getAuthorizationCode = getAuthorizationCode;
|
3537
|
-
exports.getBranch = getBranch;
|
3538
5442
|
exports.getBranchDetails = getBranchDetails;
|
3539
5443
|
exports.getBranchList = getBranchList;
|
3540
5444
|
exports.getBranchMetadata = getBranchMetadata;
|
@@ -3549,7 +5453,7 @@ exports.getDatabaseGithubSettings = getDatabaseGithubSettings;
|
|
3549
5453
|
exports.getDatabaseList = getDatabaseList;
|
3550
5454
|
exports.getDatabaseMetadata = getDatabaseMetadata;
|
3551
5455
|
exports.getDatabaseSettings = getDatabaseSettings;
|
3552
|
-
exports.
|
5456
|
+
exports.getDeployPreviewBranch = getDeployPreviewBranch;
|
3553
5457
|
exports.getFile = getFile;
|
3554
5458
|
exports.getFileItem = getFileItem;
|
3555
5459
|
exports.getGitBranchesMapping = getGitBranchesMapping;
|
@@ -3558,7 +5462,6 @@ exports.getMigrationHistory = getMigrationHistory;
|
|
3558
5462
|
exports.getMigrationJobStatus = getMigrationJobStatus;
|
3559
5463
|
exports.getMigrationRequest = getMigrationRequest;
|
3560
5464
|
exports.getMigrationRequestIsMerged = getMigrationRequestIsMerged;
|
3561
|
-
exports.getPreviewBranch = getPreviewBranch;
|
3562
5465
|
exports.getRecord = getRecord;
|
3563
5466
|
exports.getSchema = getSchema;
|
3564
5467
|
exports.getTableColumns = getTableColumns;
|
@@ -3569,6 +5472,7 @@ exports.getUserOAuthAccessTokens = getUserOAuthAccessTokens;
|
|
3569
5472
|
exports.getUserOAuthClients = getUserOAuthClients;
|
3570
5473
|
exports.getWorkspace = getWorkspace;
|
3571
5474
|
exports.getWorkspaceMembersList = getWorkspaceMembersList;
|
5475
|
+
exports.getWorkspaceSettings = getWorkspaceSettings;
|
3572
5476
|
exports.getWorkspacesList = getWorkspacesList;
|
3573
5477
|
exports.grantAuthorizationCode = grantAuthorizationCode;
|
3574
5478
|
exports.greaterEquals = greaterEquals;
|
@@ -3619,11 +5523,14 @@ exports.removeWorkspaceMember = removeWorkspaceMember;
|
|
3619
5523
|
exports.renameDatabase = renameDatabase;
|
3620
5524
|
exports.resendWorkspaceMemberInvite = resendWorkspaceMemberInvite;
|
3621
5525
|
exports.resolveBranch = resolveBranch;
|
5526
|
+
exports.rollbackMigration = rollbackMigration;
|
3622
5527
|
exports.searchBranch = searchBranch;
|
3623
5528
|
exports.searchTable = searchTable;
|
3624
5529
|
exports.serialize = serialize;
|
3625
5530
|
exports.setTableSchema = setTableSchema;
|
5531
|
+
exports.sqlBatchQuery = sqlBatchQuery;
|
3626
5532
|
exports.sqlQuery = sqlQuery;
|
5533
|
+
exports.startMigration = startMigration;
|
3627
5534
|
exports.startsWith = startsWith;
|
3628
5535
|
exports.summarizeTable = summarizeTable;
|
3629
5536
|
exports.transformImage = transformImage;
|
@@ -3642,6 +5549,7 @@ exports.updateUser = updateUser;
|
|
3642
5549
|
exports.updateWorkspace = updateWorkspace;
|
3643
5550
|
exports.updateWorkspaceMemberInvite = updateWorkspaceMemberInvite;
|
3644
5551
|
exports.updateWorkspaceMemberRole = updateWorkspaceMemberRole;
|
5552
|
+
exports.updateWorkspaceSettings = updateWorkspaceSettings;
|
3645
5553
|
exports.upsertRecordWithID = upsertRecordWithID;
|
3646
5554
|
exports.vectorSearchTable = vectorSearchTable;
|
3647
5555
|
//# sourceMappingURL=index.cjs.map
|