@xata.io/client 0.0.0-alpha.vf7a5219a6da9afdecee2e8995fcc249036ff88a1 → 0.0.0-alpha.vf7b3447057053443041e94106d7efe270aaea321
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-add-version.log +1 -1
- package/.turbo/turbo-build.log +4 -4
- package/CHANGELOG.md +50 -4
- package/dist/index.cjs +2214 -308
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +1144 -97
- package/dist/index.mjs +2204 -306
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
@@ -22,6 +22,1814 @@ const TraceAttributes = {
|
|
22
22
|
CLOUDFLARE_RAY_ID: "cf.ray"
|
23
23
|
};
|
24
24
|
|
25
|
+
const lookup = [];
|
26
|
+
const revLookup = [];
|
27
|
+
const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
28
|
+
for (let i = 0, len = code.length; i < len; ++i) {
|
29
|
+
lookup[i] = code[i];
|
30
|
+
revLookup[code.charCodeAt(i)] = i;
|
31
|
+
}
|
32
|
+
revLookup["-".charCodeAt(0)] = 62;
|
33
|
+
revLookup["_".charCodeAt(0)] = 63;
|
34
|
+
function getLens(b64) {
|
35
|
+
const len = b64.length;
|
36
|
+
if (len % 4 > 0) {
|
37
|
+
throw new Error("Invalid string. Length must be a multiple of 4");
|
38
|
+
}
|
39
|
+
let validLen = b64.indexOf("=");
|
40
|
+
if (validLen === -1)
|
41
|
+
validLen = len;
|
42
|
+
const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
|
43
|
+
return [validLen, placeHoldersLen];
|
44
|
+
}
|
45
|
+
function _byteLength(_b64, validLen, placeHoldersLen) {
|
46
|
+
return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
|
47
|
+
}
|
48
|
+
function toByteArray(b64) {
|
49
|
+
let tmp;
|
50
|
+
const lens = getLens(b64);
|
51
|
+
const validLen = lens[0];
|
52
|
+
const placeHoldersLen = lens[1];
|
53
|
+
const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
|
54
|
+
let curByte = 0;
|
55
|
+
const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
|
56
|
+
let i;
|
57
|
+
for (i = 0; i < len; i += 4) {
|
58
|
+
tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
|
59
|
+
arr[curByte++] = tmp >> 16 & 255;
|
60
|
+
arr[curByte++] = tmp >> 8 & 255;
|
61
|
+
arr[curByte++] = tmp & 255;
|
62
|
+
}
|
63
|
+
if (placeHoldersLen === 2) {
|
64
|
+
tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
|
65
|
+
arr[curByte++] = tmp & 255;
|
66
|
+
}
|
67
|
+
if (placeHoldersLen === 1) {
|
68
|
+
tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
|
69
|
+
arr[curByte++] = tmp >> 8 & 255;
|
70
|
+
arr[curByte++] = tmp & 255;
|
71
|
+
}
|
72
|
+
return arr;
|
73
|
+
}
|
74
|
+
function tripletToBase64(num) {
|
75
|
+
return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
|
76
|
+
}
|
77
|
+
function encodeChunk(uint8, start, end) {
|
78
|
+
let tmp;
|
79
|
+
const output = [];
|
80
|
+
for (let i = start; i < end; i += 3) {
|
81
|
+
tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
|
82
|
+
output.push(tripletToBase64(tmp));
|
83
|
+
}
|
84
|
+
return output.join("");
|
85
|
+
}
|
86
|
+
function fromByteArray(uint8) {
|
87
|
+
let tmp;
|
88
|
+
const len = uint8.length;
|
89
|
+
const extraBytes = len % 3;
|
90
|
+
const parts = [];
|
91
|
+
const maxChunkLength = 16383;
|
92
|
+
for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
93
|
+
parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
|
94
|
+
}
|
95
|
+
if (extraBytes === 1) {
|
96
|
+
tmp = uint8[len - 1];
|
97
|
+
parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
|
98
|
+
} else if (extraBytes === 2) {
|
99
|
+
tmp = (uint8[len - 2] << 8) + uint8[len - 1];
|
100
|
+
parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
|
101
|
+
}
|
102
|
+
return parts.join("");
|
103
|
+
}
|
104
|
+
|
105
|
+
const K_MAX_LENGTH = 2147483647;
|
106
|
+
const MAX_ARGUMENTS_LENGTH = 4096;
|
107
|
+
class Buffer extends Uint8Array {
|
108
|
+
/**
|
109
|
+
* Constructs a new `Buffer` instance.
|
110
|
+
*
|
111
|
+
* @param value
|
112
|
+
* @param encodingOrOffset
|
113
|
+
* @param length
|
114
|
+
*/
|
115
|
+
constructor(value, encodingOrOffset, length) {
|
116
|
+
if (typeof value === "number") {
|
117
|
+
if (typeof encodingOrOffset === "string") {
|
118
|
+
throw new TypeError("The first argument must be of type string, received type number");
|
119
|
+
}
|
120
|
+
if (value < 0) {
|
121
|
+
throw new RangeError("The buffer size cannot be negative");
|
122
|
+
}
|
123
|
+
super(value < 0 ? 0 : Buffer._checked(value) | 0);
|
124
|
+
} else if (typeof value === "string") {
|
125
|
+
if (typeof encodingOrOffset !== "string") {
|
126
|
+
encodingOrOffset = "utf8";
|
127
|
+
}
|
128
|
+
if (!Buffer.isEncoding(encodingOrOffset)) {
|
129
|
+
throw new TypeError("Unknown encoding: " + encodingOrOffset);
|
130
|
+
}
|
131
|
+
const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
|
132
|
+
super(length2);
|
133
|
+
const written = this.write(value, 0, this.length, encodingOrOffset);
|
134
|
+
if (written !== length2) {
|
135
|
+
throw new TypeError(
|
136
|
+
"Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
|
137
|
+
);
|
138
|
+
}
|
139
|
+
} else if (ArrayBuffer.isView(value)) {
|
140
|
+
if (Buffer._isInstance(value, Uint8Array)) {
|
141
|
+
const copy = new Uint8Array(value);
|
142
|
+
const array = copy.buffer;
|
143
|
+
const byteOffset = copy.byteOffset;
|
144
|
+
const length2 = copy.byteLength;
|
145
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
146
|
+
throw new RangeError("offset is outside of buffer bounds");
|
147
|
+
}
|
148
|
+
if (array.byteLength < byteOffset + (length2 || 0)) {
|
149
|
+
throw new RangeError("length is outside of buffer bounds");
|
150
|
+
}
|
151
|
+
super(new Uint8Array(array, byteOffset, length2));
|
152
|
+
} else {
|
153
|
+
const array = value;
|
154
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
155
|
+
super(new Uint8Array(length2));
|
156
|
+
for (let i = 0; i < length2; i++) {
|
157
|
+
this[i] = array[i] & 255;
|
158
|
+
}
|
159
|
+
}
|
160
|
+
} else if (value == null) {
|
161
|
+
throw new TypeError(
|
162
|
+
"The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
|
163
|
+
);
|
164
|
+
} else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
|
165
|
+
const array = value;
|
166
|
+
const byteOffset = encodingOrOffset;
|
167
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
168
|
+
throw new RangeError("offset is outside of buffer bounds");
|
169
|
+
}
|
170
|
+
if (array.byteLength < byteOffset + (length || 0)) {
|
171
|
+
throw new RangeError("length is outside of buffer bounds");
|
172
|
+
}
|
173
|
+
super(new Uint8Array(array, byteOffset, length));
|
174
|
+
} else if (Array.isArray(value)) {
|
175
|
+
const array = value;
|
176
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
177
|
+
super(new Uint8Array(length2));
|
178
|
+
for (let i = 0; i < length2; i++) {
|
179
|
+
this[i] = array[i] & 255;
|
180
|
+
}
|
181
|
+
} else {
|
182
|
+
throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
|
183
|
+
}
|
184
|
+
}
|
185
|
+
/**
|
186
|
+
* Return JSON representation of the buffer.
|
187
|
+
*/
|
188
|
+
toJSON() {
|
189
|
+
return {
|
190
|
+
type: "Buffer",
|
191
|
+
data: Array.prototype.slice.call(this)
|
192
|
+
};
|
193
|
+
}
|
194
|
+
/**
|
195
|
+
* Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
|
196
|
+
* parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
|
197
|
+
* only part of `string` will be written. However, partially encoded characters will not be written.
|
198
|
+
*
|
199
|
+
* @param string String to write to `buf`.
|
200
|
+
* @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
|
201
|
+
* @param length Maximum number of bytes to write: Default: `buf.length - offset`.
|
202
|
+
* @param encoding The character encoding of `string`. Default: `utf8`.
|
203
|
+
*/
|
204
|
+
write(string, offset, length, encoding) {
|
205
|
+
if (typeof offset === "undefined") {
|
206
|
+
encoding = "utf8";
|
207
|
+
length = this.length;
|
208
|
+
offset = 0;
|
209
|
+
} else if (typeof length === "undefined" && typeof offset === "string") {
|
210
|
+
encoding = offset;
|
211
|
+
length = this.length;
|
212
|
+
offset = 0;
|
213
|
+
} else if (typeof offset === "number" && isFinite(offset)) {
|
214
|
+
offset = offset >>> 0;
|
215
|
+
if (typeof length === "number" && isFinite(length)) {
|
216
|
+
length = length >>> 0;
|
217
|
+
encoding ?? (encoding = "utf8");
|
218
|
+
} else if (typeof length === "string") {
|
219
|
+
encoding = length;
|
220
|
+
length = void 0;
|
221
|
+
}
|
222
|
+
} else {
|
223
|
+
throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
|
224
|
+
}
|
225
|
+
const remaining = this.length - offset;
|
226
|
+
if (typeof length === "undefined" || length > remaining) {
|
227
|
+
length = remaining;
|
228
|
+
}
|
229
|
+
if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
|
230
|
+
throw new RangeError("Attempt to write outside buffer bounds");
|
231
|
+
}
|
232
|
+
encoding || (encoding = "utf8");
|
233
|
+
switch (Buffer._getEncoding(encoding)) {
|
234
|
+
case "hex":
|
235
|
+
return Buffer._hexWrite(this, string, offset, length);
|
236
|
+
case "utf8":
|
237
|
+
return Buffer._utf8Write(this, string, offset, length);
|
238
|
+
case "ascii":
|
239
|
+
case "latin1":
|
240
|
+
case "binary":
|
241
|
+
return Buffer._asciiWrite(this, string, offset, length);
|
242
|
+
case "ucs2":
|
243
|
+
case "utf16le":
|
244
|
+
return Buffer._ucs2Write(this, string, offset, length);
|
245
|
+
case "base64":
|
246
|
+
return Buffer._base64Write(this, string, offset, length);
|
247
|
+
}
|
248
|
+
}
|
249
|
+
/**
|
250
|
+
* Decodes the buffer to a string according to the specified character encoding.
|
251
|
+
* Passing `start` and `end` will decode only a subset of the buffer.
|
252
|
+
*
|
253
|
+
* Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
|
254
|
+
* will be replaced with `U+FFFD`.
|
255
|
+
*
|
256
|
+
* @param encoding
|
257
|
+
* @param start
|
258
|
+
* @param end
|
259
|
+
*/
|
260
|
+
toString(encoding, start, end) {
|
261
|
+
const length = this.length;
|
262
|
+
if (length === 0) {
|
263
|
+
return "";
|
264
|
+
}
|
265
|
+
if (arguments.length === 0) {
|
266
|
+
return Buffer._utf8Slice(this, 0, length);
|
267
|
+
}
|
268
|
+
if (typeof start === "undefined" || start < 0) {
|
269
|
+
start = 0;
|
270
|
+
}
|
271
|
+
if (start > this.length) {
|
272
|
+
return "";
|
273
|
+
}
|
274
|
+
if (typeof end === "undefined" || end > this.length) {
|
275
|
+
end = this.length;
|
276
|
+
}
|
277
|
+
if (end <= 0) {
|
278
|
+
return "";
|
279
|
+
}
|
280
|
+
end >>>= 0;
|
281
|
+
start >>>= 0;
|
282
|
+
if (end <= start) {
|
283
|
+
return "";
|
284
|
+
}
|
285
|
+
if (!encoding) {
|
286
|
+
encoding = "utf8";
|
287
|
+
}
|
288
|
+
switch (Buffer._getEncoding(encoding)) {
|
289
|
+
case "hex":
|
290
|
+
return Buffer._hexSlice(this, start, end);
|
291
|
+
case "utf8":
|
292
|
+
return Buffer._utf8Slice(this, start, end);
|
293
|
+
case "ascii":
|
294
|
+
return Buffer._asciiSlice(this, start, end);
|
295
|
+
case "latin1":
|
296
|
+
case "binary":
|
297
|
+
return Buffer._latin1Slice(this, start, end);
|
298
|
+
case "ucs2":
|
299
|
+
case "utf16le":
|
300
|
+
return Buffer._utf16leSlice(this, start, end);
|
301
|
+
case "base64":
|
302
|
+
return Buffer._base64Slice(this, start, end);
|
303
|
+
}
|
304
|
+
}
|
305
|
+
/**
|
306
|
+
* Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
|
307
|
+
*
|
308
|
+
* @param otherBuffer
|
309
|
+
*/
|
310
|
+
equals(otherBuffer) {
|
311
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
312
|
+
throw new TypeError("Argument must be a Buffer");
|
313
|
+
}
|
314
|
+
if (this === otherBuffer) {
|
315
|
+
return true;
|
316
|
+
}
|
317
|
+
return Buffer.compare(this, otherBuffer) === 0;
|
318
|
+
}
|
319
|
+
/**
|
320
|
+
* Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
|
321
|
+
* or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
|
322
|
+
* buffer.
|
323
|
+
*
|
324
|
+
* - `0` is returned if `otherBuffer` is the same as this buffer.
|
325
|
+
* - `1` is returned if `otherBuffer` should come before this buffer when sorted.
|
326
|
+
* - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
|
327
|
+
*
|
328
|
+
* @param otherBuffer The buffer to compare to.
|
329
|
+
* @param targetStart The offset within `otherBuffer` at which to begin comparison.
|
330
|
+
* @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
|
331
|
+
* @param sourceStart The offset within this buffer at which to begin comparison.
|
332
|
+
* @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
|
333
|
+
*/
|
334
|
+
compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
|
335
|
+
if (Buffer._isInstance(otherBuffer, Uint8Array)) {
|
336
|
+
otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
|
337
|
+
}
|
338
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
339
|
+
throw new TypeError("Argument must be a Buffer or Uint8Array");
|
340
|
+
}
|
341
|
+
targetStart ?? (targetStart = 0);
|
342
|
+
targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
|
343
|
+
sourceStart ?? (sourceStart = 0);
|
344
|
+
sourceEnd ?? (sourceEnd = this.length);
|
345
|
+
if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
|
346
|
+
throw new RangeError("Out of range index");
|
347
|
+
}
|
348
|
+
if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
|
349
|
+
return 0;
|
350
|
+
}
|
351
|
+
if (sourceStart >= sourceEnd) {
|
352
|
+
return -1;
|
353
|
+
}
|
354
|
+
if (targetStart >= targetEnd) {
|
355
|
+
return 1;
|
356
|
+
}
|
357
|
+
targetStart >>>= 0;
|
358
|
+
targetEnd >>>= 0;
|
359
|
+
sourceStart >>>= 0;
|
360
|
+
sourceEnd >>>= 0;
|
361
|
+
if (this === otherBuffer) {
|
362
|
+
return 0;
|
363
|
+
}
|
364
|
+
let x = sourceEnd - sourceStart;
|
365
|
+
let y = targetEnd - targetStart;
|
366
|
+
const len = Math.min(x, y);
|
367
|
+
const thisCopy = this.slice(sourceStart, sourceEnd);
|
368
|
+
const targetCopy = otherBuffer.slice(targetStart, targetEnd);
|
369
|
+
for (let i = 0; i < len; ++i) {
|
370
|
+
if (thisCopy[i] !== targetCopy[i]) {
|
371
|
+
x = thisCopy[i];
|
372
|
+
y = targetCopy[i];
|
373
|
+
break;
|
374
|
+
}
|
375
|
+
}
|
376
|
+
if (x < y)
|
377
|
+
return -1;
|
378
|
+
if (y < x)
|
379
|
+
return 1;
|
380
|
+
return 0;
|
381
|
+
}
|
382
|
+
/**
|
383
|
+
* Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
|
384
|
+
* region overlaps with this buffer.
|
385
|
+
*
|
386
|
+
* @param targetBuffer The target buffer to copy into.
|
387
|
+
* @param targetStart The offset within `targetBuffer` at which to begin writing.
|
388
|
+
* @param sourceStart The offset within this buffer at which to begin copying.
|
389
|
+
* @param sourceEnd The offset within this buffer at which to end copying (exclusive).
|
390
|
+
*/
|
391
|
+
copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
|
392
|
+
if (!Buffer.isBuffer(targetBuffer))
|
393
|
+
throw new TypeError("argument should be a Buffer");
|
394
|
+
if (!sourceStart)
|
395
|
+
sourceStart = 0;
|
396
|
+
if (!targetStart)
|
397
|
+
targetStart = 0;
|
398
|
+
if (!sourceEnd && sourceEnd !== 0)
|
399
|
+
sourceEnd = this.length;
|
400
|
+
if (targetStart >= targetBuffer.length)
|
401
|
+
targetStart = targetBuffer.length;
|
402
|
+
if (!targetStart)
|
403
|
+
targetStart = 0;
|
404
|
+
if (sourceEnd > 0 && sourceEnd < sourceStart)
|
405
|
+
sourceEnd = sourceStart;
|
406
|
+
if (sourceEnd === sourceStart)
|
407
|
+
return 0;
|
408
|
+
if (targetBuffer.length === 0 || this.length === 0)
|
409
|
+
return 0;
|
410
|
+
if (targetStart < 0) {
|
411
|
+
throw new RangeError("targetStart out of bounds");
|
412
|
+
}
|
413
|
+
if (sourceStart < 0 || sourceStart >= this.length)
|
414
|
+
throw new RangeError("Index out of range");
|
415
|
+
if (sourceEnd < 0)
|
416
|
+
throw new RangeError("sourceEnd out of bounds");
|
417
|
+
if (sourceEnd > this.length)
|
418
|
+
sourceEnd = this.length;
|
419
|
+
if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
|
420
|
+
sourceEnd = targetBuffer.length - targetStart + sourceStart;
|
421
|
+
}
|
422
|
+
const len = sourceEnd - sourceStart;
|
423
|
+
if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
|
424
|
+
this.copyWithin(targetStart, sourceStart, sourceEnd);
|
425
|
+
} else {
|
426
|
+
Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
|
427
|
+
}
|
428
|
+
return len;
|
429
|
+
}
|
430
|
+
/**
|
431
|
+
* Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
|
432
|
+
* and `end` indices. This is the same behavior as `buf.subarray()`.
|
433
|
+
*
|
434
|
+
* This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
|
435
|
+
* the slice, use `Uint8Array.prototype.slice()`.
|
436
|
+
*
|
437
|
+
* @param start
|
438
|
+
* @param end
|
439
|
+
*/
|
440
|
+
slice(start, end) {
|
441
|
+
if (!start) {
|
442
|
+
start = 0;
|
443
|
+
}
|
444
|
+
const len = this.length;
|
445
|
+
start = ~~start;
|
446
|
+
end = end === void 0 ? len : ~~end;
|
447
|
+
if (start < 0) {
|
448
|
+
start += len;
|
449
|
+
if (start < 0) {
|
450
|
+
start = 0;
|
451
|
+
}
|
452
|
+
} else if (start > len) {
|
453
|
+
start = len;
|
454
|
+
}
|
455
|
+
if (end < 0) {
|
456
|
+
end += len;
|
457
|
+
if (end < 0) {
|
458
|
+
end = 0;
|
459
|
+
}
|
460
|
+
} else if (end > len) {
|
461
|
+
end = len;
|
462
|
+
}
|
463
|
+
if (end < start) {
|
464
|
+
end = start;
|
465
|
+
}
|
466
|
+
const newBuf = this.subarray(start, end);
|
467
|
+
Object.setPrototypeOf(newBuf, Buffer.prototype);
|
468
|
+
return newBuf;
|
469
|
+
}
|
470
|
+
/**
|
471
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
472
|
+
* of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
|
473
|
+
*
|
474
|
+
* @param value Number to write.
|
475
|
+
* @param offset Number of bytes to skip before starting to write.
|
476
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
477
|
+
* @param noAssert
|
478
|
+
* @returns `offset` plus the number of bytes written.
|
479
|
+
*/
|
480
|
+
writeUIntLE(value, offset, byteLength, noAssert) {
|
481
|
+
value = +value;
|
482
|
+
offset = offset >>> 0;
|
483
|
+
byteLength = byteLength >>> 0;
|
484
|
+
if (!noAssert) {
|
485
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
486
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
487
|
+
}
|
488
|
+
let mul = 1;
|
489
|
+
let i = 0;
|
490
|
+
this[offset] = value & 255;
|
491
|
+
while (++i < byteLength && (mul *= 256)) {
|
492
|
+
this[offset + i] = value / mul & 255;
|
493
|
+
}
|
494
|
+
return offset + byteLength;
|
495
|
+
}
|
496
|
+
/**
|
497
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
|
498
|
+
* accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
|
499
|
+
*
|
500
|
+
* @param value Number to write.
|
501
|
+
* @param offset Number of bytes to skip before starting to write.
|
502
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
503
|
+
* @param noAssert
|
504
|
+
* @returns `offset` plus the number of bytes written.
|
505
|
+
*/
|
506
|
+
writeUIntBE(value, offset, byteLength, noAssert) {
|
507
|
+
value = +value;
|
508
|
+
offset = offset >>> 0;
|
509
|
+
byteLength = byteLength >>> 0;
|
510
|
+
if (!noAssert) {
|
511
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
512
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
513
|
+
}
|
514
|
+
let i = byteLength - 1;
|
515
|
+
let mul = 1;
|
516
|
+
this[offset + i] = value & 255;
|
517
|
+
while (--i >= 0 && (mul *= 256)) {
|
518
|
+
this[offset + i] = value / mul & 255;
|
519
|
+
}
|
520
|
+
return offset + byteLength;
|
521
|
+
}
|
522
|
+
/**
|
523
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
524
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
525
|
+
*
|
526
|
+
* @param value Number to write.
|
527
|
+
* @param offset Number of bytes to skip before starting to write.
|
528
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
529
|
+
* @param noAssert
|
530
|
+
* @returns `offset` plus the number of bytes written.
|
531
|
+
*/
|
532
|
+
writeIntLE(value, offset, byteLength, noAssert) {
|
533
|
+
value = +value;
|
534
|
+
offset = offset >>> 0;
|
535
|
+
if (!noAssert) {
|
536
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
537
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
538
|
+
}
|
539
|
+
let i = 0;
|
540
|
+
let mul = 1;
|
541
|
+
let sub = 0;
|
542
|
+
this[offset] = value & 255;
|
543
|
+
while (++i < byteLength && (mul *= 256)) {
|
544
|
+
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
545
|
+
sub = 1;
|
546
|
+
}
|
547
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
548
|
+
}
|
549
|
+
return offset + byteLength;
|
550
|
+
}
|
551
|
+
/**
|
552
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
|
553
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
554
|
+
*
|
555
|
+
* @param value Number to write.
|
556
|
+
* @param offset Number of bytes to skip before starting to write.
|
557
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
558
|
+
* @param noAssert
|
559
|
+
* @returns `offset` plus the number of bytes written.
|
560
|
+
*/
|
561
|
+
writeIntBE(value, offset, byteLength, noAssert) {
|
562
|
+
value = +value;
|
563
|
+
offset = offset >>> 0;
|
564
|
+
if (!noAssert) {
|
565
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
566
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
567
|
+
}
|
568
|
+
let i = byteLength - 1;
|
569
|
+
let mul = 1;
|
570
|
+
let sub = 0;
|
571
|
+
this[offset + i] = value & 255;
|
572
|
+
while (--i >= 0 && (mul *= 256)) {
|
573
|
+
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
574
|
+
sub = 1;
|
575
|
+
}
|
576
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
577
|
+
}
|
578
|
+
return offset + byteLength;
|
579
|
+
}
|
580
|
+
/**
|
581
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
582
|
+
* unsigned, little-endian integer supporting up to 48 bits of accuracy.
|
583
|
+
*
|
584
|
+
* @param offset Number of bytes to skip before starting to read.
|
585
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
586
|
+
* @param noAssert
|
587
|
+
*/
|
588
|
+
readUIntLE(offset, byteLength, noAssert) {
|
589
|
+
offset = offset >>> 0;
|
590
|
+
byteLength = byteLength >>> 0;
|
591
|
+
if (!noAssert) {
|
592
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
593
|
+
}
|
594
|
+
let val = this[offset];
|
595
|
+
let mul = 1;
|
596
|
+
let i = 0;
|
597
|
+
while (++i < byteLength && (mul *= 256)) {
|
598
|
+
val += this[offset + i] * mul;
|
599
|
+
}
|
600
|
+
return val;
|
601
|
+
}
|
602
|
+
/**
|
603
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
604
|
+
* unsigned, big-endian integer supporting up to 48 bits of accuracy.
|
605
|
+
*
|
606
|
+
* @param offset Number of bytes to skip before starting to read.
|
607
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
608
|
+
* @param noAssert
|
609
|
+
*/
|
610
|
+
readUIntBE(offset, byteLength, noAssert) {
|
611
|
+
offset = offset >>> 0;
|
612
|
+
byteLength = byteLength >>> 0;
|
613
|
+
if (!noAssert) {
|
614
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
615
|
+
}
|
616
|
+
let val = this[offset + --byteLength];
|
617
|
+
let mul = 1;
|
618
|
+
while (byteLength > 0 && (mul *= 256)) {
|
619
|
+
val += this[offset + --byteLength] * mul;
|
620
|
+
}
|
621
|
+
return val;
|
622
|
+
}
|
623
|
+
/**
|
624
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
625
|
+
* little-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
626
|
+
*
|
627
|
+
* @param offset Number of bytes to skip before starting to read.
|
628
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
629
|
+
* @param noAssert
|
630
|
+
*/
|
631
|
+
readIntLE(offset, byteLength, noAssert) {
|
632
|
+
offset = offset >>> 0;
|
633
|
+
byteLength = byteLength >>> 0;
|
634
|
+
if (!noAssert) {
|
635
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
636
|
+
}
|
637
|
+
let val = this[offset];
|
638
|
+
let mul = 1;
|
639
|
+
let i = 0;
|
640
|
+
while (++i < byteLength && (mul *= 256)) {
|
641
|
+
val += this[offset + i] * mul;
|
642
|
+
}
|
643
|
+
mul *= 128;
|
644
|
+
if (val >= mul) {
|
645
|
+
val -= Math.pow(2, 8 * byteLength);
|
646
|
+
}
|
647
|
+
return val;
|
648
|
+
}
|
649
|
+
/**
|
650
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
651
|
+
* big-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
652
|
+
*
|
653
|
+
* @param offset Number of bytes to skip before starting to read.
|
654
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
655
|
+
* @param noAssert
|
656
|
+
*/
|
657
|
+
readIntBE(offset, byteLength, noAssert) {
|
658
|
+
offset = offset >>> 0;
|
659
|
+
byteLength = byteLength >>> 0;
|
660
|
+
if (!noAssert) {
|
661
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
662
|
+
}
|
663
|
+
let i = byteLength;
|
664
|
+
let mul = 1;
|
665
|
+
let val = this[offset + --i];
|
666
|
+
while (i > 0 && (mul *= 256)) {
|
667
|
+
val += this[offset + --i] * mul;
|
668
|
+
}
|
669
|
+
mul *= 128;
|
670
|
+
if (val >= mul) {
|
671
|
+
val -= Math.pow(2, 8 * byteLength);
|
672
|
+
}
|
673
|
+
return val;
|
674
|
+
}
|
675
|
+
/**
|
676
|
+
* Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
|
677
|
+
*
|
678
|
+
* @param offset Number of bytes to skip before starting to read.
|
679
|
+
* @param noAssert
|
680
|
+
*/
|
681
|
+
readUInt8(offset, noAssert) {
|
682
|
+
offset = offset >>> 0;
|
683
|
+
if (!noAssert) {
|
684
|
+
Buffer._checkOffset(offset, 1, this.length);
|
685
|
+
}
|
686
|
+
return this[offset];
|
687
|
+
}
|
688
|
+
/**
|
689
|
+
* Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
|
690
|
+
*
|
691
|
+
* @param offset Number of bytes to skip before starting to read.
|
692
|
+
* @param noAssert
|
693
|
+
*/
|
694
|
+
readUInt16LE(offset, noAssert) {
|
695
|
+
offset = offset >>> 0;
|
696
|
+
if (!noAssert) {
|
697
|
+
Buffer._checkOffset(offset, 2, this.length);
|
698
|
+
}
|
699
|
+
return this[offset] | this[offset + 1] << 8;
|
700
|
+
}
|
701
|
+
/**
|
702
|
+
* Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
|
703
|
+
*
|
704
|
+
* @param offset Number of bytes to skip before starting to read.
|
705
|
+
* @param noAssert
|
706
|
+
*/
|
707
|
+
readUInt16BE(offset, noAssert) {
|
708
|
+
offset = offset >>> 0;
|
709
|
+
if (!noAssert) {
|
710
|
+
Buffer._checkOffset(offset, 2, this.length);
|
711
|
+
}
|
712
|
+
return this[offset] << 8 | this[offset + 1];
|
713
|
+
}
|
714
|
+
/**
|
715
|
+
* Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
|
716
|
+
*
|
717
|
+
* @param offset Number of bytes to skip before starting to read.
|
718
|
+
* @param noAssert
|
719
|
+
*/
|
720
|
+
readUInt32LE(offset, noAssert) {
|
721
|
+
offset = offset >>> 0;
|
722
|
+
if (!noAssert) {
|
723
|
+
Buffer._checkOffset(offset, 4, this.length);
|
724
|
+
}
|
725
|
+
return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
|
726
|
+
}
|
727
|
+
/**
|
728
|
+
* Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
|
729
|
+
*
|
730
|
+
* @param offset Number of bytes to skip before starting to read.
|
731
|
+
* @param noAssert
|
732
|
+
*/
|
733
|
+
readUInt32BE(offset, noAssert) {
|
734
|
+
offset = offset >>> 0;
|
735
|
+
if (!noAssert) {
|
736
|
+
Buffer._checkOffset(offset, 4, this.length);
|
737
|
+
}
|
738
|
+
return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
|
739
|
+
}
|
740
|
+
/**
|
741
|
+
* Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
|
742
|
+
* as two's complement signed values.
|
743
|
+
*
|
744
|
+
* @param offset Number of bytes to skip before starting to read.
|
745
|
+
* @param noAssert
|
746
|
+
*/
|
747
|
+
readInt8(offset, noAssert) {
|
748
|
+
offset = offset >>> 0;
|
749
|
+
if (!noAssert) {
|
750
|
+
Buffer._checkOffset(offset, 1, this.length);
|
751
|
+
}
|
752
|
+
if (!(this[offset] & 128)) {
|
753
|
+
return this[offset];
|
754
|
+
}
|
755
|
+
return (255 - this[offset] + 1) * -1;
|
756
|
+
}
|
757
|
+
/**
|
758
|
+
* Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
759
|
+
* are interpreted as two's complement signed values.
|
760
|
+
*
|
761
|
+
* @param offset Number of bytes to skip before starting to read.
|
762
|
+
* @param noAssert
|
763
|
+
*/
|
764
|
+
readInt16LE(offset, noAssert) {
|
765
|
+
offset = offset >>> 0;
|
766
|
+
if (!noAssert) {
|
767
|
+
Buffer._checkOffset(offset, 2, this.length);
|
768
|
+
}
|
769
|
+
const val = this[offset] | this[offset + 1] << 8;
|
770
|
+
return val & 32768 ? val | 4294901760 : val;
|
771
|
+
}
|
772
|
+
/**
|
773
|
+
* Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
774
|
+
* are interpreted as two's complement signed values.
|
775
|
+
*
|
776
|
+
* @param offset Number of bytes to skip before starting to read.
|
777
|
+
* @param noAssert
|
778
|
+
*/
|
779
|
+
readInt16BE(offset, noAssert) {
|
780
|
+
offset = offset >>> 0;
|
781
|
+
if (!noAssert) {
|
782
|
+
Buffer._checkOffset(offset, 2, this.length);
|
783
|
+
}
|
784
|
+
const val = this[offset + 1] | this[offset] << 8;
|
785
|
+
return val & 32768 ? val | 4294901760 : val;
|
786
|
+
}
|
787
|
+
/**
|
788
|
+
* Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
789
|
+
* are interpreted as two's complement signed values.
|
790
|
+
*
|
791
|
+
* @param offset Number of bytes to skip before starting to read.
|
792
|
+
* @param noAssert
|
793
|
+
*/
|
794
|
+
readInt32LE(offset, noAssert) {
|
795
|
+
offset = offset >>> 0;
|
796
|
+
if (!noAssert) {
|
797
|
+
Buffer._checkOffset(offset, 4, this.length);
|
798
|
+
}
|
799
|
+
return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
|
800
|
+
}
|
801
|
+
/**
|
802
|
+
* Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
803
|
+
* are interpreted as two's complement signed values.
|
804
|
+
*
|
805
|
+
* @param offset Number of bytes to skip before starting to read.
|
806
|
+
* @param noAssert
|
807
|
+
*/
|
808
|
+
readInt32BE(offset, noAssert) {
|
809
|
+
offset = offset >>> 0;
|
810
|
+
if (!noAssert) {
|
811
|
+
Buffer._checkOffset(offset, 4, this.length);
|
812
|
+
}
|
813
|
+
return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
|
814
|
+
}
|
815
|
+
/**
|
816
|
+
* Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
|
817
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 2.
|
818
|
+
*/
|
819
|
+
swap16() {
|
820
|
+
const len = this.length;
|
821
|
+
if (len % 2 !== 0) {
|
822
|
+
throw new RangeError("Buffer size must be a multiple of 16-bits");
|
823
|
+
}
|
824
|
+
for (let i = 0; i < len; i += 2) {
|
825
|
+
this._swap(this, i, i + 1);
|
826
|
+
}
|
827
|
+
return this;
|
828
|
+
}
|
829
|
+
/**
|
830
|
+
* Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
|
831
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 4.
|
832
|
+
*/
|
833
|
+
swap32() {
|
834
|
+
const len = this.length;
|
835
|
+
if (len % 4 !== 0) {
|
836
|
+
throw new RangeError("Buffer size must be a multiple of 32-bits");
|
837
|
+
}
|
838
|
+
for (let i = 0; i < len; i += 4) {
|
839
|
+
this._swap(this, i, i + 3);
|
840
|
+
this._swap(this, i + 1, i + 2);
|
841
|
+
}
|
842
|
+
return this;
|
843
|
+
}
|
844
|
+
/**
|
845
|
+
* Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
|
846
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 8.
|
847
|
+
*/
|
848
|
+
swap64() {
|
849
|
+
const len = this.length;
|
850
|
+
if (len % 8 !== 0) {
|
851
|
+
throw new RangeError("Buffer size must be a multiple of 64-bits");
|
852
|
+
}
|
853
|
+
for (let i = 0; i < len; i += 8) {
|
854
|
+
this._swap(this, i, i + 7);
|
855
|
+
this._swap(this, i + 1, i + 6);
|
856
|
+
this._swap(this, i + 2, i + 5);
|
857
|
+
this._swap(this, i + 3, i + 4);
|
858
|
+
}
|
859
|
+
return this;
|
860
|
+
}
|
861
|
+
/**
|
862
|
+
* Swaps two octets.
|
863
|
+
*
|
864
|
+
* @param b
|
865
|
+
* @param n
|
866
|
+
* @param m
|
867
|
+
*/
|
868
|
+
_swap(b, n, m) {
|
869
|
+
const i = b[n];
|
870
|
+
b[n] = b[m];
|
871
|
+
b[m] = i;
|
872
|
+
}
|
873
|
+
/**
|
874
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
|
875
|
+
* Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
|
876
|
+
*
|
877
|
+
* @param value Number to write.
|
878
|
+
* @param offset Number of bytes to skip before starting to write.
|
879
|
+
* @param noAssert
|
880
|
+
* @returns `offset` plus the number of bytes written.
|
881
|
+
*/
|
882
|
+
writeUInt8(value, offset, noAssert) {
|
883
|
+
value = +value;
|
884
|
+
offset = offset >>> 0;
|
885
|
+
if (!noAssert) {
|
886
|
+
Buffer._checkInt(this, value, offset, 1, 255, 0);
|
887
|
+
}
|
888
|
+
this[offset] = value & 255;
|
889
|
+
return offset + 1;
|
890
|
+
}
|
891
|
+
/**
|
892
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
|
893
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
894
|
+
*
|
895
|
+
* @param value Number to write.
|
896
|
+
* @param offset Number of bytes to skip before starting to write.
|
897
|
+
* @param noAssert
|
898
|
+
* @returns `offset` plus the number of bytes written.
|
899
|
+
*/
|
900
|
+
writeUInt16LE(value, offset, noAssert) {
|
901
|
+
value = +value;
|
902
|
+
offset = offset >>> 0;
|
903
|
+
if (!noAssert) {
|
904
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
905
|
+
}
|
906
|
+
this[offset] = value & 255;
|
907
|
+
this[offset + 1] = value >>> 8;
|
908
|
+
return offset + 2;
|
909
|
+
}
|
910
|
+
/**
|
911
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
|
912
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
913
|
+
*
|
914
|
+
* @param value Number to write.
|
915
|
+
* @param offset Number of bytes to skip before starting to write.
|
916
|
+
* @param noAssert
|
917
|
+
* @returns `offset` plus the number of bytes written.
|
918
|
+
*/
|
919
|
+
writeUInt16BE(value, offset, noAssert) {
|
920
|
+
value = +value;
|
921
|
+
offset = offset >>> 0;
|
922
|
+
if (!noAssert) {
|
923
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
924
|
+
}
|
925
|
+
this[offset] = value >>> 8;
|
926
|
+
this[offset + 1] = value & 255;
|
927
|
+
return offset + 2;
|
928
|
+
}
|
929
|
+
/**
|
930
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
|
931
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
932
|
+
*
|
933
|
+
* @param value Number to write.
|
934
|
+
* @param offset Number of bytes to skip before starting to write.
|
935
|
+
* @param noAssert
|
936
|
+
* @returns `offset` plus the number of bytes written.
|
937
|
+
*/
|
938
|
+
writeUInt32LE(value, offset, noAssert) {
|
939
|
+
value = +value;
|
940
|
+
offset = offset >>> 0;
|
941
|
+
if (!noAssert) {
|
942
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
943
|
+
}
|
944
|
+
this[offset + 3] = value >>> 24;
|
945
|
+
this[offset + 2] = value >>> 16;
|
946
|
+
this[offset + 1] = value >>> 8;
|
947
|
+
this[offset] = value & 255;
|
948
|
+
return offset + 4;
|
949
|
+
}
|
950
|
+
/**
|
951
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
|
952
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
953
|
+
*
|
954
|
+
* @param value Number to write.
|
955
|
+
* @param offset Number of bytes to skip before starting to write.
|
956
|
+
* @param noAssert
|
957
|
+
* @returns `offset` plus the number of bytes written.
|
958
|
+
*/
|
959
|
+
writeUInt32BE(value, offset, noAssert) {
|
960
|
+
value = +value;
|
961
|
+
offset = offset >>> 0;
|
962
|
+
if (!noAssert) {
|
963
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
964
|
+
}
|
965
|
+
this[offset] = value >>> 24;
|
966
|
+
this[offset + 1] = value >>> 16;
|
967
|
+
this[offset + 2] = value >>> 8;
|
968
|
+
this[offset + 3] = value & 255;
|
969
|
+
return offset + 4;
|
970
|
+
}
|
971
|
+
/**
|
972
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
|
973
|
+
* Behavior is undefined when `value` is anything other than a signed 8-bit integer.
|
974
|
+
*
|
975
|
+
* @param value Number to write.
|
976
|
+
* @param offset Number of bytes to skip before starting to write.
|
977
|
+
* @param noAssert
|
978
|
+
* @returns `offset` plus the number of bytes written.
|
979
|
+
*/
|
980
|
+
writeInt8(value, offset, noAssert) {
|
981
|
+
value = +value;
|
982
|
+
offset = offset >>> 0;
|
983
|
+
if (!noAssert) {
|
984
|
+
Buffer._checkInt(this, value, offset, 1, 127, -128);
|
985
|
+
}
|
986
|
+
if (value < 0) {
|
987
|
+
value = 255 + value + 1;
|
988
|
+
}
|
989
|
+
this[offset] = value & 255;
|
990
|
+
return offset + 1;
|
991
|
+
}
|
992
|
+
/**
|
993
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
|
994
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
995
|
+
*
|
996
|
+
* @param value Number to write.
|
997
|
+
* @param offset Number of bytes to skip before starting to write.
|
998
|
+
* @param noAssert
|
999
|
+
* @returns `offset` plus the number of bytes written.
|
1000
|
+
*/
|
1001
|
+
writeInt16LE(value, offset, noAssert) {
|
1002
|
+
value = +value;
|
1003
|
+
offset = offset >>> 0;
|
1004
|
+
if (!noAssert) {
|
1005
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1006
|
+
}
|
1007
|
+
this[offset] = value & 255;
|
1008
|
+
this[offset + 1] = value >>> 8;
|
1009
|
+
return offset + 2;
|
1010
|
+
}
|
1011
|
+
/**
|
1012
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
|
1013
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
1014
|
+
*
|
1015
|
+
* @param value Number to write.
|
1016
|
+
* @param offset Number of bytes to skip before starting to write.
|
1017
|
+
* @param noAssert
|
1018
|
+
* @returns `offset` plus the number of bytes written.
|
1019
|
+
*/
|
1020
|
+
writeInt16BE(value, offset, noAssert) {
|
1021
|
+
value = +value;
|
1022
|
+
offset = offset >>> 0;
|
1023
|
+
if (!noAssert) {
|
1024
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1025
|
+
}
|
1026
|
+
this[offset] = value >>> 8;
|
1027
|
+
this[offset + 1] = value & 255;
|
1028
|
+
return offset + 2;
|
1029
|
+
}
|
1030
|
+
/**
|
1031
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
|
1032
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1033
|
+
*
|
1034
|
+
* @param value Number to write.
|
1035
|
+
* @param offset Number of bytes to skip before starting to write.
|
1036
|
+
* @param noAssert
|
1037
|
+
* @returns `offset` plus the number of bytes written.
|
1038
|
+
*/
|
1039
|
+
writeInt32LE(value, offset, noAssert) {
|
1040
|
+
value = +value;
|
1041
|
+
offset = offset >>> 0;
|
1042
|
+
if (!noAssert) {
|
1043
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1044
|
+
}
|
1045
|
+
this[offset] = value & 255;
|
1046
|
+
this[offset + 1] = value >>> 8;
|
1047
|
+
this[offset + 2] = value >>> 16;
|
1048
|
+
this[offset + 3] = value >>> 24;
|
1049
|
+
return offset + 4;
|
1050
|
+
}
|
1051
|
+
/**
|
1052
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
|
1053
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1054
|
+
*
|
1055
|
+
* @param value Number to write.
|
1056
|
+
* @param offset Number of bytes to skip before starting to write.
|
1057
|
+
* @param noAssert
|
1058
|
+
* @returns `offset` plus the number of bytes written.
|
1059
|
+
*/
|
1060
|
+
writeInt32BE(value, offset, noAssert) {
|
1061
|
+
value = +value;
|
1062
|
+
offset = offset >>> 0;
|
1063
|
+
if (!noAssert) {
|
1064
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1065
|
+
}
|
1066
|
+
if (value < 0) {
|
1067
|
+
value = 4294967295 + value + 1;
|
1068
|
+
}
|
1069
|
+
this[offset] = value >>> 24;
|
1070
|
+
this[offset + 1] = value >>> 16;
|
1071
|
+
this[offset + 2] = value >>> 8;
|
1072
|
+
this[offset + 3] = value & 255;
|
1073
|
+
return offset + 4;
|
1074
|
+
}
|
1075
|
+
/**
|
1076
|
+
* Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
|
1077
|
+
* filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
|
1078
|
+
* integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
|
1079
|
+
*
|
1080
|
+
* If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
|
1081
|
+
* character that fit into `buf` are written.
|
1082
|
+
*
|
1083
|
+
* If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
|
1084
|
+
*
|
1085
|
+
* @param value
|
1086
|
+
* @param encoding
|
1087
|
+
*/
|
1088
|
+
fill(value, offset, end, encoding) {
|
1089
|
+
if (typeof value === "string") {
|
1090
|
+
if (typeof offset === "string") {
|
1091
|
+
encoding = offset;
|
1092
|
+
offset = 0;
|
1093
|
+
end = this.length;
|
1094
|
+
} else if (typeof end === "string") {
|
1095
|
+
encoding = end;
|
1096
|
+
end = this.length;
|
1097
|
+
}
|
1098
|
+
if (encoding !== void 0 && typeof encoding !== "string") {
|
1099
|
+
throw new TypeError("encoding must be a string");
|
1100
|
+
}
|
1101
|
+
if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
|
1102
|
+
throw new TypeError("Unknown encoding: " + encoding);
|
1103
|
+
}
|
1104
|
+
if (value.length === 1) {
|
1105
|
+
const code = value.charCodeAt(0);
|
1106
|
+
if (encoding === "utf8" && code < 128) {
|
1107
|
+
value = code;
|
1108
|
+
}
|
1109
|
+
}
|
1110
|
+
} else if (typeof value === "number") {
|
1111
|
+
value = value & 255;
|
1112
|
+
} else if (typeof value === "boolean") {
|
1113
|
+
value = Number(value);
|
1114
|
+
}
|
1115
|
+
offset ?? (offset = 0);
|
1116
|
+
end ?? (end = this.length);
|
1117
|
+
if (offset < 0 || this.length < offset || this.length < end) {
|
1118
|
+
throw new RangeError("Out of range index");
|
1119
|
+
}
|
1120
|
+
if (end <= offset) {
|
1121
|
+
return this;
|
1122
|
+
}
|
1123
|
+
offset = offset >>> 0;
|
1124
|
+
end = end === void 0 ? this.length : end >>> 0;
|
1125
|
+
value || (value = 0);
|
1126
|
+
let i;
|
1127
|
+
if (typeof value === "number") {
|
1128
|
+
for (i = offset; i < end; ++i) {
|
1129
|
+
this[i] = value;
|
1130
|
+
}
|
1131
|
+
} else {
|
1132
|
+
const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
|
1133
|
+
const len = bytes.length;
|
1134
|
+
if (len === 0) {
|
1135
|
+
throw new TypeError('The value "' + value + '" is invalid for argument "value"');
|
1136
|
+
}
|
1137
|
+
for (i = 0; i < end - offset; ++i) {
|
1138
|
+
this[i + offset] = bytes[i % len];
|
1139
|
+
}
|
1140
|
+
}
|
1141
|
+
return this;
|
1142
|
+
}
|
1143
|
+
/**
|
1144
|
+
* Returns the index of the specified value.
|
1145
|
+
*
|
1146
|
+
* If `value` is:
|
1147
|
+
* - a string, `value` is interpreted according to the character encoding in `encoding`.
|
1148
|
+
* - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
|
1149
|
+
* - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
|
1150
|
+
*
|
1151
|
+
* Any other types will throw a `TypeError`.
|
1152
|
+
*
|
1153
|
+
* @param value What to search for.
|
1154
|
+
* @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
|
1155
|
+
* @param encoding If `value` is a string, this is the encoding used to search.
|
1156
|
+
* @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
|
1157
|
+
*/
|
1158
|
+
indexOf(value, byteOffset, encoding) {
|
1159
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
|
1160
|
+
}
|
1161
|
+
/**
|
1162
|
+
* Gets the last index of the specified value.
|
1163
|
+
*
|
1164
|
+
* @see indexOf()
|
1165
|
+
* @param value
|
1166
|
+
* @param byteOffset
|
1167
|
+
* @param encoding
|
1168
|
+
*/
|
1169
|
+
lastIndexOf(value, byteOffset, encoding) {
|
1170
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
|
1171
|
+
}
|
1172
|
+
_bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
|
1173
|
+
if (buffer.length === 0) {
|
1174
|
+
return -1;
|
1175
|
+
}
|
1176
|
+
if (typeof byteOffset === "string") {
|
1177
|
+
encoding = byteOffset;
|
1178
|
+
byteOffset = 0;
|
1179
|
+
} else if (typeof byteOffset === "undefined") {
|
1180
|
+
byteOffset = 0;
|
1181
|
+
} else if (byteOffset > 2147483647) {
|
1182
|
+
byteOffset = 2147483647;
|
1183
|
+
} else if (byteOffset < -2147483648) {
|
1184
|
+
byteOffset = -2147483648;
|
1185
|
+
}
|
1186
|
+
byteOffset = +byteOffset;
|
1187
|
+
if (byteOffset !== byteOffset) {
|
1188
|
+
byteOffset = dir ? 0 : buffer.length - 1;
|
1189
|
+
}
|
1190
|
+
if (byteOffset < 0) {
|
1191
|
+
byteOffset = buffer.length + byteOffset;
|
1192
|
+
}
|
1193
|
+
if (byteOffset >= buffer.length) {
|
1194
|
+
if (dir) {
|
1195
|
+
return -1;
|
1196
|
+
} else {
|
1197
|
+
byteOffset = buffer.length - 1;
|
1198
|
+
}
|
1199
|
+
} else if (byteOffset < 0) {
|
1200
|
+
if (dir) {
|
1201
|
+
byteOffset = 0;
|
1202
|
+
} else {
|
1203
|
+
return -1;
|
1204
|
+
}
|
1205
|
+
}
|
1206
|
+
if (typeof val === "string") {
|
1207
|
+
val = Buffer.from(val, encoding);
|
1208
|
+
}
|
1209
|
+
if (Buffer.isBuffer(val)) {
|
1210
|
+
if (val.length === 0) {
|
1211
|
+
return -1;
|
1212
|
+
}
|
1213
|
+
return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
|
1214
|
+
} else if (typeof val === "number") {
|
1215
|
+
val = val & 255;
|
1216
|
+
if (typeof Uint8Array.prototype.indexOf === "function") {
|
1217
|
+
if (dir) {
|
1218
|
+
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
|
1219
|
+
} else {
|
1220
|
+
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
|
1221
|
+
}
|
1222
|
+
}
|
1223
|
+
return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
|
1224
|
+
}
|
1225
|
+
throw new TypeError("val must be string, number or Buffer");
|
1226
|
+
}
|
1227
|
+
/**
|
1228
|
+
* Equivalent to `buf.indexOf() !== -1`.
|
1229
|
+
*
|
1230
|
+
* @param value
|
1231
|
+
* @param byteOffset
|
1232
|
+
* @param encoding
|
1233
|
+
*/
|
1234
|
+
includes(value, byteOffset, encoding) {
|
1235
|
+
return this.indexOf(value, byteOffset, encoding) !== -1;
|
1236
|
+
}
|
1237
|
+
/**
|
1238
|
+
* Creates a new buffer from the given parameters.
|
1239
|
+
*
|
1240
|
+
* @param data
|
1241
|
+
* @param encoding
|
1242
|
+
*/
|
1243
|
+
static from(a, b, c) {
|
1244
|
+
return new Buffer(a, b, c);
|
1245
|
+
}
|
1246
|
+
/**
|
1247
|
+
* Returns true if `obj` is a Buffer.
|
1248
|
+
*
|
1249
|
+
* @param obj
|
1250
|
+
*/
|
1251
|
+
static isBuffer(obj) {
|
1252
|
+
return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
|
1253
|
+
}
|
1254
|
+
/**
|
1255
|
+
* Returns true if `encoding` is a supported encoding.
|
1256
|
+
*
|
1257
|
+
* @param encoding
|
1258
|
+
*/
|
1259
|
+
static isEncoding(encoding) {
|
1260
|
+
switch (encoding.toLowerCase()) {
|
1261
|
+
case "hex":
|
1262
|
+
case "utf8":
|
1263
|
+
case "ascii":
|
1264
|
+
case "binary":
|
1265
|
+
case "latin1":
|
1266
|
+
case "ucs2":
|
1267
|
+
case "utf16le":
|
1268
|
+
case "base64":
|
1269
|
+
return true;
|
1270
|
+
default:
|
1271
|
+
return false;
|
1272
|
+
}
|
1273
|
+
}
|
1274
|
+
/**
|
1275
|
+
* Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
|
1276
|
+
* returns the number of characters in the string.
|
1277
|
+
*
|
1278
|
+
* @param string The string to test.
|
1279
|
+
* @param encoding The encoding to use for calculation. Defaults is `utf8`.
|
1280
|
+
*/
|
1281
|
+
static byteLength(string, encoding) {
|
1282
|
+
if (Buffer.isBuffer(string)) {
|
1283
|
+
return string.length;
|
1284
|
+
}
|
1285
|
+
if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
|
1286
|
+
return string.byteLength;
|
1287
|
+
}
|
1288
|
+
if (typeof string !== "string") {
|
1289
|
+
throw new TypeError(
|
1290
|
+
'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
|
1291
|
+
);
|
1292
|
+
}
|
1293
|
+
const len = string.length;
|
1294
|
+
const mustMatch = arguments.length > 2 && arguments[2] === true;
|
1295
|
+
if (!mustMatch && len === 0) {
|
1296
|
+
return 0;
|
1297
|
+
}
|
1298
|
+
switch (encoding?.toLowerCase()) {
|
1299
|
+
case "ascii":
|
1300
|
+
case "latin1":
|
1301
|
+
case "binary":
|
1302
|
+
return len;
|
1303
|
+
case "utf8":
|
1304
|
+
return Buffer._utf8ToBytes(string).length;
|
1305
|
+
case "hex":
|
1306
|
+
return len >>> 1;
|
1307
|
+
case "ucs2":
|
1308
|
+
case "utf16le":
|
1309
|
+
return len * 2;
|
1310
|
+
case "base64":
|
1311
|
+
return Buffer._base64ToBytes(string).length;
|
1312
|
+
default:
|
1313
|
+
return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
|
1314
|
+
}
|
1315
|
+
}
|
1316
|
+
/**
|
1317
|
+
* Returns a Buffer which is the result of concatenating all the buffers in the list together.
|
1318
|
+
*
|
1319
|
+
* - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
|
1320
|
+
* - If the list has exactly one item, then the first item is returned.
|
1321
|
+
* - If the list has more than one item, then a new buffer is created.
|
1322
|
+
*
|
1323
|
+
* It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
|
1324
|
+
* a small computational expense.
|
1325
|
+
*
|
1326
|
+
* @param list An array of Buffer objects to concatenate.
|
1327
|
+
* @param totalLength Total length of the buffers when concatenated.
|
1328
|
+
*/
|
1329
|
+
static concat(list, totalLength) {
|
1330
|
+
if (!Array.isArray(list)) {
|
1331
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1332
|
+
}
|
1333
|
+
if (list.length === 0) {
|
1334
|
+
return Buffer.alloc(0);
|
1335
|
+
}
|
1336
|
+
let i;
|
1337
|
+
if (totalLength === void 0) {
|
1338
|
+
totalLength = 0;
|
1339
|
+
for (i = 0; i < list.length; ++i) {
|
1340
|
+
totalLength += list[i].length;
|
1341
|
+
}
|
1342
|
+
}
|
1343
|
+
const buffer = Buffer.allocUnsafe(totalLength);
|
1344
|
+
let pos = 0;
|
1345
|
+
for (i = 0; i < list.length; ++i) {
|
1346
|
+
let buf = list[i];
|
1347
|
+
if (Buffer._isInstance(buf, Uint8Array)) {
|
1348
|
+
if (pos + buf.length > buffer.length) {
|
1349
|
+
if (!Buffer.isBuffer(buf)) {
|
1350
|
+
buf = Buffer.from(buf);
|
1351
|
+
}
|
1352
|
+
buf.copy(buffer, pos);
|
1353
|
+
} else {
|
1354
|
+
Uint8Array.prototype.set.call(buffer, buf, pos);
|
1355
|
+
}
|
1356
|
+
} else if (!Buffer.isBuffer(buf)) {
|
1357
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1358
|
+
} else {
|
1359
|
+
buf.copy(buffer, pos);
|
1360
|
+
}
|
1361
|
+
pos += buf.length;
|
1362
|
+
}
|
1363
|
+
return buffer;
|
1364
|
+
}
|
1365
|
+
/**
|
1366
|
+
* The same as `buf1.compare(buf2)`.
|
1367
|
+
*/
|
1368
|
+
static compare(buf1, buf2) {
|
1369
|
+
if (Buffer._isInstance(buf1, Uint8Array)) {
|
1370
|
+
buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
|
1371
|
+
}
|
1372
|
+
if (Buffer._isInstance(buf2, Uint8Array)) {
|
1373
|
+
buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
|
1374
|
+
}
|
1375
|
+
if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
|
1376
|
+
throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
|
1377
|
+
}
|
1378
|
+
if (buf1 === buf2) {
|
1379
|
+
return 0;
|
1380
|
+
}
|
1381
|
+
let x = buf1.length;
|
1382
|
+
let y = buf2.length;
|
1383
|
+
for (let i = 0, len = Math.min(x, y); i < len; ++i) {
|
1384
|
+
if (buf1[i] !== buf2[i]) {
|
1385
|
+
x = buf1[i];
|
1386
|
+
y = buf2[i];
|
1387
|
+
break;
|
1388
|
+
}
|
1389
|
+
}
|
1390
|
+
if (x < y) {
|
1391
|
+
return -1;
|
1392
|
+
}
|
1393
|
+
if (y < x) {
|
1394
|
+
return 1;
|
1395
|
+
}
|
1396
|
+
return 0;
|
1397
|
+
}
|
1398
|
+
/**
|
1399
|
+
* Allocates a new buffer of `size` octets.
|
1400
|
+
*
|
1401
|
+
* @param size The number of octets to allocate.
|
1402
|
+
* @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
|
1403
|
+
* @param encoding The encoding used for the call to `buf.fill()` while initializing.
|
1404
|
+
*/
|
1405
|
+
static alloc(size, fill, encoding) {
|
1406
|
+
if (typeof size !== "number") {
|
1407
|
+
throw new TypeError('"size" argument must be of type number');
|
1408
|
+
} else if (size < 0) {
|
1409
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1410
|
+
}
|
1411
|
+
if (size <= 0) {
|
1412
|
+
return new Buffer(size);
|
1413
|
+
}
|
1414
|
+
if (fill !== void 0) {
|
1415
|
+
return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
|
1416
|
+
}
|
1417
|
+
return new Buffer(size);
|
1418
|
+
}
|
1419
|
+
/**
|
1420
|
+
* Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
|
1421
|
+
*
|
1422
|
+
* @param size
|
1423
|
+
*/
|
1424
|
+
static allocUnsafe(size) {
|
1425
|
+
if (typeof size !== "number") {
|
1426
|
+
throw new TypeError('"size" argument must be of type number');
|
1427
|
+
} else if (size < 0) {
|
1428
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1429
|
+
}
|
1430
|
+
return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
|
1431
|
+
}
|
1432
|
+
/**
|
1433
|
+
* Returns true if the given `obj` is an instance of `type`.
|
1434
|
+
*
|
1435
|
+
* @param obj
|
1436
|
+
* @param type
|
1437
|
+
*/
|
1438
|
+
static _isInstance(obj, type) {
|
1439
|
+
return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
|
1440
|
+
}
|
1441
|
+
static _checked(length) {
|
1442
|
+
if (length >= K_MAX_LENGTH) {
|
1443
|
+
throw new RangeError(
|
1444
|
+
"Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
|
1445
|
+
);
|
1446
|
+
}
|
1447
|
+
return length | 0;
|
1448
|
+
}
|
1449
|
+
static _blitBuffer(src, dst, offset, length) {
|
1450
|
+
let i;
|
1451
|
+
for (i = 0; i < length; ++i) {
|
1452
|
+
if (i + offset >= dst.length || i >= src.length) {
|
1453
|
+
break;
|
1454
|
+
}
|
1455
|
+
dst[i + offset] = src[i];
|
1456
|
+
}
|
1457
|
+
return i;
|
1458
|
+
}
|
1459
|
+
static _utf8Write(buf, string, offset, length) {
|
1460
|
+
return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
|
1461
|
+
}
|
1462
|
+
static _asciiWrite(buf, string, offset, length) {
|
1463
|
+
return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
|
1464
|
+
}
|
1465
|
+
static _base64Write(buf, string, offset, length) {
|
1466
|
+
return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
|
1467
|
+
}
|
1468
|
+
static _ucs2Write(buf, string, offset, length) {
|
1469
|
+
return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
|
1470
|
+
}
|
1471
|
+
static _hexWrite(buf, string, offset, length) {
|
1472
|
+
offset = Number(offset) || 0;
|
1473
|
+
const remaining = buf.length - offset;
|
1474
|
+
if (!length) {
|
1475
|
+
length = remaining;
|
1476
|
+
} else {
|
1477
|
+
length = Number(length);
|
1478
|
+
if (length > remaining) {
|
1479
|
+
length = remaining;
|
1480
|
+
}
|
1481
|
+
}
|
1482
|
+
const strLen = string.length;
|
1483
|
+
if (length > strLen / 2) {
|
1484
|
+
length = strLen / 2;
|
1485
|
+
}
|
1486
|
+
let i;
|
1487
|
+
for (i = 0; i < length; ++i) {
|
1488
|
+
const parsed = parseInt(string.substr(i * 2, 2), 16);
|
1489
|
+
if (parsed !== parsed) {
|
1490
|
+
return i;
|
1491
|
+
}
|
1492
|
+
buf[offset + i] = parsed;
|
1493
|
+
}
|
1494
|
+
return i;
|
1495
|
+
}
|
1496
|
+
static _utf8ToBytes(string, units) {
|
1497
|
+
units = units || Infinity;
|
1498
|
+
const length = string.length;
|
1499
|
+
const bytes = [];
|
1500
|
+
let codePoint;
|
1501
|
+
let leadSurrogate = null;
|
1502
|
+
for (let i = 0; i < length; ++i) {
|
1503
|
+
codePoint = string.charCodeAt(i);
|
1504
|
+
if (codePoint > 55295 && codePoint < 57344) {
|
1505
|
+
if (!leadSurrogate) {
|
1506
|
+
if (codePoint > 56319) {
|
1507
|
+
if ((units -= 3) > -1) {
|
1508
|
+
bytes.push(239, 191, 189);
|
1509
|
+
}
|
1510
|
+
continue;
|
1511
|
+
} else if (i + 1 === length) {
|
1512
|
+
if ((units -= 3) > -1) {
|
1513
|
+
bytes.push(239, 191, 189);
|
1514
|
+
}
|
1515
|
+
continue;
|
1516
|
+
}
|
1517
|
+
leadSurrogate = codePoint;
|
1518
|
+
continue;
|
1519
|
+
}
|
1520
|
+
if (codePoint < 56320) {
|
1521
|
+
if ((units -= 3) > -1) {
|
1522
|
+
bytes.push(239, 191, 189);
|
1523
|
+
}
|
1524
|
+
leadSurrogate = codePoint;
|
1525
|
+
continue;
|
1526
|
+
}
|
1527
|
+
codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
|
1528
|
+
} else if (leadSurrogate) {
|
1529
|
+
if ((units -= 3) > -1) {
|
1530
|
+
bytes.push(239, 191, 189);
|
1531
|
+
}
|
1532
|
+
}
|
1533
|
+
leadSurrogate = null;
|
1534
|
+
if (codePoint < 128) {
|
1535
|
+
if ((units -= 1) < 0) {
|
1536
|
+
break;
|
1537
|
+
}
|
1538
|
+
bytes.push(codePoint);
|
1539
|
+
} else if (codePoint < 2048) {
|
1540
|
+
if ((units -= 2) < 0) {
|
1541
|
+
break;
|
1542
|
+
}
|
1543
|
+
bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
|
1544
|
+
} else if (codePoint < 65536) {
|
1545
|
+
if ((units -= 3) < 0) {
|
1546
|
+
break;
|
1547
|
+
}
|
1548
|
+
bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
|
1549
|
+
} else if (codePoint < 1114112) {
|
1550
|
+
if ((units -= 4) < 0) {
|
1551
|
+
break;
|
1552
|
+
}
|
1553
|
+
bytes.push(
|
1554
|
+
codePoint >> 18 | 240,
|
1555
|
+
codePoint >> 12 & 63 | 128,
|
1556
|
+
codePoint >> 6 & 63 | 128,
|
1557
|
+
codePoint & 63 | 128
|
1558
|
+
);
|
1559
|
+
} else {
|
1560
|
+
throw new Error("Invalid code point");
|
1561
|
+
}
|
1562
|
+
}
|
1563
|
+
return bytes;
|
1564
|
+
}
|
1565
|
+
static _base64ToBytes(str) {
|
1566
|
+
return toByteArray(base64clean(str));
|
1567
|
+
}
|
1568
|
+
static _asciiToBytes(str) {
|
1569
|
+
const byteArray = [];
|
1570
|
+
for (let i = 0; i < str.length; ++i) {
|
1571
|
+
byteArray.push(str.charCodeAt(i) & 255);
|
1572
|
+
}
|
1573
|
+
return byteArray;
|
1574
|
+
}
|
1575
|
+
static _utf16leToBytes(str, units) {
|
1576
|
+
let c, hi, lo;
|
1577
|
+
const byteArray = [];
|
1578
|
+
for (let i = 0; i < str.length; ++i) {
|
1579
|
+
if ((units -= 2) < 0)
|
1580
|
+
break;
|
1581
|
+
c = str.charCodeAt(i);
|
1582
|
+
hi = c >> 8;
|
1583
|
+
lo = c % 256;
|
1584
|
+
byteArray.push(lo);
|
1585
|
+
byteArray.push(hi);
|
1586
|
+
}
|
1587
|
+
return byteArray;
|
1588
|
+
}
|
1589
|
+
static _hexSlice(buf, start, end) {
|
1590
|
+
const len = buf.length;
|
1591
|
+
if (!start || start < 0) {
|
1592
|
+
start = 0;
|
1593
|
+
}
|
1594
|
+
if (!end || end < 0 || end > len) {
|
1595
|
+
end = len;
|
1596
|
+
}
|
1597
|
+
let out = "";
|
1598
|
+
for (let i = start; i < end; ++i) {
|
1599
|
+
out += hexSliceLookupTable[buf[i]];
|
1600
|
+
}
|
1601
|
+
return out;
|
1602
|
+
}
|
1603
|
+
static _base64Slice(buf, start, end) {
|
1604
|
+
if (start === 0 && end === buf.length) {
|
1605
|
+
return fromByteArray(buf);
|
1606
|
+
} else {
|
1607
|
+
return fromByteArray(buf.slice(start, end));
|
1608
|
+
}
|
1609
|
+
}
|
1610
|
+
static _utf8Slice(buf, start, end) {
|
1611
|
+
end = Math.min(buf.length, end);
|
1612
|
+
const res = [];
|
1613
|
+
let i = start;
|
1614
|
+
while (i < end) {
|
1615
|
+
const firstByte = buf[i];
|
1616
|
+
let codePoint = null;
|
1617
|
+
let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
|
1618
|
+
if (i + bytesPerSequence <= end) {
|
1619
|
+
let secondByte, thirdByte, fourthByte, tempCodePoint;
|
1620
|
+
switch (bytesPerSequence) {
|
1621
|
+
case 1:
|
1622
|
+
if (firstByte < 128) {
|
1623
|
+
codePoint = firstByte;
|
1624
|
+
}
|
1625
|
+
break;
|
1626
|
+
case 2:
|
1627
|
+
secondByte = buf[i + 1];
|
1628
|
+
if ((secondByte & 192) === 128) {
|
1629
|
+
tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
|
1630
|
+
if (tempCodePoint > 127) {
|
1631
|
+
codePoint = tempCodePoint;
|
1632
|
+
}
|
1633
|
+
}
|
1634
|
+
break;
|
1635
|
+
case 3:
|
1636
|
+
secondByte = buf[i + 1];
|
1637
|
+
thirdByte = buf[i + 2];
|
1638
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
|
1639
|
+
tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
|
1640
|
+
if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
|
1641
|
+
codePoint = tempCodePoint;
|
1642
|
+
}
|
1643
|
+
}
|
1644
|
+
break;
|
1645
|
+
case 4:
|
1646
|
+
secondByte = buf[i + 1];
|
1647
|
+
thirdByte = buf[i + 2];
|
1648
|
+
fourthByte = buf[i + 3];
|
1649
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
|
1650
|
+
tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
|
1651
|
+
if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
|
1652
|
+
codePoint = tempCodePoint;
|
1653
|
+
}
|
1654
|
+
}
|
1655
|
+
}
|
1656
|
+
}
|
1657
|
+
if (codePoint === null) {
|
1658
|
+
codePoint = 65533;
|
1659
|
+
bytesPerSequence = 1;
|
1660
|
+
} else if (codePoint > 65535) {
|
1661
|
+
codePoint -= 65536;
|
1662
|
+
res.push(codePoint >>> 10 & 1023 | 55296);
|
1663
|
+
codePoint = 56320 | codePoint & 1023;
|
1664
|
+
}
|
1665
|
+
res.push(codePoint);
|
1666
|
+
i += bytesPerSequence;
|
1667
|
+
}
|
1668
|
+
return Buffer._decodeCodePointsArray(res);
|
1669
|
+
}
|
1670
|
+
static _decodeCodePointsArray(codePoints) {
|
1671
|
+
const len = codePoints.length;
|
1672
|
+
if (len <= MAX_ARGUMENTS_LENGTH) {
|
1673
|
+
return String.fromCharCode.apply(String, codePoints);
|
1674
|
+
}
|
1675
|
+
let res = "";
|
1676
|
+
let i = 0;
|
1677
|
+
while (i < len) {
|
1678
|
+
res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
|
1679
|
+
}
|
1680
|
+
return res;
|
1681
|
+
}
|
1682
|
+
static _asciiSlice(buf, start, end) {
|
1683
|
+
let ret = "";
|
1684
|
+
end = Math.min(buf.length, end);
|
1685
|
+
for (let i = start; i < end; ++i) {
|
1686
|
+
ret += String.fromCharCode(buf[i] & 127);
|
1687
|
+
}
|
1688
|
+
return ret;
|
1689
|
+
}
|
1690
|
+
static _latin1Slice(buf, start, end) {
|
1691
|
+
let ret = "";
|
1692
|
+
end = Math.min(buf.length, end);
|
1693
|
+
for (let i = start; i < end; ++i) {
|
1694
|
+
ret += String.fromCharCode(buf[i]);
|
1695
|
+
}
|
1696
|
+
return ret;
|
1697
|
+
}
|
1698
|
+
static _utf16leSlice(buf, start, end) {
|
1699
|
+
const bytes = buf.slice(start, end);
|
1700
|
+
let res = "";
|
1701
|
+
for (let i = 0; i < bytes.length - 1; i += 2) {
|
1702
|
+
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
|
1703
|
+
}
|
1704
|
+
return res;
|
1705
|
+
}
|
1706
|
+
static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
|
1707
|
+
let indexSize = 1;
|
1708
|
+
let arrLength = arr.length;
|
1709
|
+
let valLength = val.length;
|
1710
|
+
if (encoding !== void 0) {
|
1711
|
+
encoding = Buffer._getEncoding(encoding);
|
1712
|
+
if (encoding === "ucs2" || encoding === "utf16le") {
|
1713
|
+
if (arr.length < 2 || val.length < 2) {
|
1714
|
+
return -1;
|
1715
|
+
}
|
1716
|
+
indexSize = 2;
|
1717
|
+
arrLength /= 2;
|
1718
|
+
valLength /= 2;
|
1719
|
+
byteOffset /= 2;
|
1720
|
+
}
|
1721
|
+
}
|
1722
|
+
function read(buf, i2) {
|
1723
|
+
if (indexSize === 1) {
|
1724
|
+
return buf[i2];
|
1725
|
+
} else {
|
1726
|
+
return buf.readUInt16BE(i2 * indexSize);
|
1727
|
+
}
|
1728
|
+
}
|
1729
|
+
let i;
|
1730
|
+
if (dir) {
|
1731
|
+
let foundIndex = -1;
|
1732
|
+
for (i = byteOffset; i < arrLength; i++) {
|
1733
|
+
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
1734
|
+
if (foundIndex === -1)
|
1735
|
+
foundIndex = i;
|
1736
|
+
if (i - foundIndex + 1 === valLength)
|
1737
|
+
return foundIndex * indexSize;
|
1738
|
+
} else {
|
1739
|
+
if (foundIndex !== -1)
|
1740
|
+
i -= i - foundIndex;
|
1741
|
+
foundIndex = -1;
|
1742
|
+
}
|
1743
|
+
}
|
1744
|
+
} else {
|
1745
|
+
if (byteOffset + valLength > arrLength) {
|
1746
|
+
byteOffset = arrLength - valLength;
|
1747
|
+
}
|
1748
|
+
for (i = byteOffset; i >= 0; i--) {
|
1749
|
+
let found = true;
|
1750
|
+
for (let j = 0; j < valLength; j++) {
|
1751
|
+
if (read(arr, i + j) !== read(val, j)) {
|
1752
|
+
found = false;
|
1753
|
+
break;
|
1754
|
+
}
|
1755
|
+
}
|
1756
|
+
if (found) {
|
1757
|
+
return i;
|
1758
|
+
}
|
1759
|
+
}
|
1760
|
+
}
|
1761
|
+
return -1;
|
1762
|
+
}
|
1763
|
+
static _checkOffset(offset, ext, length) {
|
1764
|
+
if (offset % 1 !== 0 || offset < 0)
|
1765
|
+
throw new RangeError("offset is not uint");
|
1766
|
+
if (offset + ext > length)
|
1767
|
+
throw new RangeError("Trying to access beyond buffer length");
|
1768
|
+
}
|
1769
|
+
static _checkInt(buf, value, offset, ext, max, min) {
|
1770
|
+
if (!Buffer.isBuffer(buf))
|
1771
|
+
throw new TypeError('"buffer" argument must be a Buffer instance');
|
1772
|
+
if (value > max || value < min)
|
1773
|
+
throw new RangeError('"value" argument is out of bounds');
|
1774
|
+
if (offset + ext > buf.length)
|
1775
|
+
throw new RangeError("Index out of range");
|
1776
|
+
}
|
1777
|
+
static _getEncoding(encoding) {
|
1778
|
+
let toLowerCase = false;
|
1779
|
+
let originalEncoding = "";
|
1780
|
+
for (; ; ) {
|
1781
|
+
switch (encoding) {
|
1782
|
+
case "hex":
|
1783
|
+
return "hex";
|
1784
|
+
case "utf8":
|
1785
|
+
return "utf8";
|
1786
|
+
case "ascii":
|
1787
|
+
return "ascii";
|
1788
|
+
case "binary":
|
1789
|
+
return "binary";
|
1790
|
+
case "latin1":
|
1791
|
+
return "latin1";
|
1792
|
+
case "ucs2":
|
1793
|
+
return "ucs2";
|
1794
|
+
case "utf16le":
|
1795
|
+
return "utf16le";
|
1796
|
+
case "base64":
|
1797
|
+
return "base64";
|
1798
|
+
default: {
|
1799
|
+
if (toLowerCase) {
|
1800
|
+
throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
|
1801
|
+
}
|
1802
|
+
toLowerCase = true;
|
1803
|
+
originalEncoding = encoding;
|
1804
|
+
encoding = encoding.toLowerCase();
|
1805
|
+
}
|
1806
|
+
}
|
1807
|
+
}
|
1808
|
+
}
|
1809
|
+
}
|
1810
|
+
const hexSliceLookupTable = function() {
|
1811
|
+
const alphabet = "0123456789abcdef";
|
1812
|
+
const table = new Array(256);
|
1813
|
+
for (let i = 0; i < 16; ++i) {
|
1814
|
+
const i16 = i * 16;
|
1815
|
+
for (let j = 0; j < 16; ++j) {
|
1816
|
+
table[i16 + j] = alphabet[i] + alphabet[j];
|
1817
|
+
}
|
1818
|
+
}
|
1819
|
+
return table;
|
1820
|
+
}();
|
1821
|
+
const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
|
1822
|
+
function base64clean(str) {
|
1823
|
+
str = str.split("=")[0];
|
1824
|
+
str = str.trim().replace(INVALID_BASE64_RE, "");
|
1825
|
+
if (str.length < 2)
|
1826
|
+
return "";
|
1827
|
+
while (str.length % 4 !== 0) {
|
1828
|
+
str = str + "=";
|
1829
|
+
}
|
1830
|
+
return str;
|
1831
|
+
}
|
1832
|
+
|
25
1833
|
function notEmpty(value) {
|
26
1834
|
return value !== null && value !== void 0;
|
27
1835
|
}
|
@@ -246,7 +2054,7 @@ var __accessCheck$8 = (obj, member, msg) => {
|
|
246
2054
|
if (!member.has(obj))
|
247
2055
|
throw TypeError("Cannot " + msg);
|
248
2056
|
};
|
249
|
-
var __privateGet$
|
2057
|
+
var __privateGet$7 = (obj, member, getter) => {
|
250
2058
|
__accessCheck$8(obj, member, "read from private field");
|
251
2059
|
return getter ? getter.call(obj) : member.get(obj);
|
252
2060
|
};
|
@@ -255,7 +2063,7 @@ var __privateAdd$8 = (obj, member, value) => {
|
|
255
2063
|
throw TypeError("Cannot add the same private member more than once");
|
256
2064
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
257
2065
|
};
|
258
|
-
var __privateSet$
|
2066
|
+
var __privateSet$6 = (obj, member, value, setter) => {
|
259
2067
|
__accessCheck$8(obj, member, "write to private field");
|
260
2068
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
261
2069
|
return value;
|
@@ -281,19 +2089,19 @@ class ApiRequestPool {
|
|
281
2089
|
__privateAdd$8(this, _fetch, void 0);
|
282
2090
|
__privateAdd$8(this, _queue, void 0);
|
283
2091
|
__privateAdd$8(this, _concurrency, void 0);
|
284
|
-
__privateSet$
|
285
|
-
__privateSet$
|
2092
|
+
__privateSet$6(this, _queue, []);
|
2093
|
+
__privateSet$6(this, _concurrency, concurrency);
|
286
2094
|
this.running = 0;
|
287
2095
|
this.started = 0;
|
288
2096
|
}
|
289
2097
|
setFetch(fetch2) {
|
290
|
-
__privateSet$
|
2098
|
+
__privateSet$6(this, _fetch, fetch2);
|
291
2099
|
}
|
292
2100
|
getFetch() {
|
293
|
-
if (!__privateGet$
|
2101
|
+
if (!__privateGet$7(this, _fetch)) {
|
294
2102
|
throw new Error("Fetch not set");
|
295
2103
|
}
|
296
|
-
return __privateGet$
|
2104
|
+
return __privateGet$7(this, _fetch);
|
297
2105
|
}
|
298
2106
|
request(url, options) {
|
299
2107
|
const start = /* @__PURE__ */ new Date();
|
@@ -325,19 +2133,19 @@ _queue = new WeakMap();
|
|
325
2133
|
_concurrency = new WeakMap();
|
326
2134
|
_enqueue = new WeakSet();
|
327
2135
|
enqueue_fn = function(task) {
|
328
|
-
const promise = new Promise((resolve) => __privateGet$
|
2136
|
+
const promise = new Promise((resolve) => __privateGet$7(this, _queue).push(resolve)).finally(() => {
|
329
2137
|
this.started--;
|
330
2138
|
this.running++;
|
331
2139
|
}).then(() => task()).finally(() => {
|
332
2140
|
this.running--;
|
333
|
-
const next = __privateGet$
|
2141
|
+
const next = __privateGet$7(this, _queue).shift();
|
334
2142
|
if (next !== void 0) {
|
335
2143
|
this.started++;
|
336
2144
|
next();
|
337
2145
|
}
|
338
2146
|
});
|
339
|
-
if (this.running + this.started < __privateGet$
|
340
|
-
const next = __privateGet$
|
2147
|
+
if (this.running + this.started < __privateGet$7(this, _concurrency)) {
|
2148
|
+
const next = __privateGet$7(this, _queue).shift();
|
341
2149
|
if (next !== void 0) {
|
342
2150
|
this.started++;
|
343
2151
|
next();
|
@@ -526,7 +2334,7 @@ function defaultOnOpen(response) {
|
|
526
2334
|
}
|
527
2335
|
}
|
528
2336
|
|
529
|
-
const VERSION = "0.
|
2337
|
+
const VERSION = "0.29.4";
|
530
2338
|
|
531
2339
|
class ErrorWithCause extends Error {
|
532
2340
|
constructor(message, options) {
|
@@ -619,15 +2427,15 @@ function parseWorkspacesUrlParts(url) {
|
|
619
2427
|
if (!isString(url))
|
620
2428
|
return null;
|
621
2429
|
const matches = {
|
622
|
-
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh
|
623
|
-
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev
|
624
|
-
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev
|
625
|
-
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(
|
2430
|
+
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
|
2431
|
+
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2432
|
+
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2433
|
+
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
|
626
2434
|
};
|
627
2435
|
const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
|
628
2436
|
if (!isHostProviderAlias(host) || !match)
|
629
2437
|
return null;
|
630
|
-
return { workspace: match[1], region: match[2], host };
|
2438
|
+
return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
|
631
2439
|
}
|
632
2440
|
|
633
2441
|
const pool = new ApiRequestPool();
|
@@ -845,26 +2653,35 @@ function parseUrl(url) {
|
|
845
2653
|
|
846
2654
|
const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
|
847
2655
|
|
848
|
-
const applyMigration = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/
|
849
|
-
const
|
850
|
-
url: "/db/{dbBranchName}/
|
851
|
-
method: "
|
2656
|
+
const applyMigration = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/apply", method: "post", ...variables, signal });
|
2657
|
+
const adaptTable = (variables, signal) => dataPlaneFetch({
|
2658
|
+
url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
|
2659
|
+
method: "post",
|
852
2660
|
...variables,
|
853
2661
|
signal
|
854
2662
|
});
|
855
|
-
const
|
856
|
-
url: "/db/{dbBranchName}/
|
857
|
-
method: "
|
2663
|
+
const adaptAllTables = (variables, signal) => dataPlaneFetch({
|
2664
|
+
url: "/db/{dbBranchName}/migrations/adapt",
|
2665
|
+
method: "post",
|
858
2666
|
...variables,
|
859
2667
|
signal
|
860
2668
|
});
|
861
|
-
const
|
2669
|
+
const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/status", method: "get", ...variables, signal });
|
2670
|
+
const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/jobs/{jobId}", method: "get", ...variables, signal });
|
2671
|
+
const getMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/history", method: "get", ...variables, signal });
|
862
2672
|
const getBranchList = (variables, signal) => dataPlaneFetch({
|
863
2673
|
url: "/dbs/{dbName}",
|
864
2674
|
method: "get",
|
865
2675
|
...variables,
|
866
2676
|
signal
|
867
2677
|
});
|
2678
|
+
const getDatabaseSettings = (variables, signal) => dataPlaneFetch({
|
2679
|
+
url: "/dbs/{dbName}/settings",
|
2680
|
+
method: "get",
|
2681
|
+
...variables,
|
2682
|
+
signal
|
2683
|
+
});
|
2684
|
+
const updateDatabaseSettings = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/settings", method: "patch", ...variables, signal });
|
868
2685
|
const getBranchDetails = (variables, signal) => dataPlaneFetch({
|
869
2686
|
url: "/db/{dbBranchName}",
|
870
2687
|
method: "get",
|
@@ -1078,11 +2895,26 @@ const sqlQuery = (variables, signal) => dataPlaneFetch({
|
|
1078
2895
|
signal
|
1079
2896
|
});
|
1080
2897
|
const operationsByTag$2 = {
|
1081
|
-
|
2898
|
+
migrations: {
|
1082
2899
|
applyMigration,
|
1083
|
-
|
1084
|
-
|
1085
|
-
|
2900
|
+
adaptTable,
|
2901
|
+
adaptAllTables,
|
2902
|
+
getBranchMigrationJobStatus,
|
2903
|
+
getMigrationJobStatus,
|
2904
|
+
getMigrationHistory,
|
2905
|
+
getSchema,
|
2906
|
+
getBranchMigrationHistory,
|
2907
|
+
getBranchMigrationPlan,
|
2908
|
+
executeBranchMigrationPlan,
|
2909
|
+
getBranchSchemaHistory,
|
2910
|
+
compareBranchWithUserSchema,
|
2911
|
+
compareBranchSchemas,
|
2912
|
+
updateBranchSchema,
|
2913
|
+
previewBranchSchemaEdit,
|
2914
|
+
applyBranchSchemaEdit,
|
2915
|
+
pushBranchMigrations
|
2916
|
+
},
|
2917
|
+
branch: {
|
1086
2918
|
getBranchList,
|
1087
2919
|
getBranchDetails,
|
1088
2920
|
createBranch,
|
@@ -1096,19 +2928,7 @@ const operationsByTag$2 = {
|
|
1096
2928
|
removeGitBranchesEntry,
|
1097
2929
|
resolveBranch
|
1098
2930
|
},
|
1099
|
-
|
1100
|
-
getSchema,
|
1101
|
-
getBranchMigrationHistory,
|
1102
|
-
getBranchMigrationPlan,
|
1103
|
-
executeBranchMigrationPlan,
|
1104
|
-
getBranchSchemaHistory,
|
1105
|
-
compareBranchWithUserSchema,
|
1106
|
-
compareBranchSchemas,
|
1107
|
-
updateBranchSchema,
|
1108
|
-
previewBranchSchemaEdit,
|
1109
|
-
applyBranchSchemaEdit,
|
1110
|
-
pushBranchMigrations
|
1111
|
-
},
|
2931
|
+
database: { getDatabaseSettings, updateDatabaseSettings },
|
1112
2932
|
migrationRequests: {
|
1113
2933
|
queryMigrationRequests,
|
1114
2934
|
createMigrationRequest,
|
@@ -1250,6 +3070,8 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
|
|
1250
3070
|
...variables,
|
1251
3071
|
signal
|
1252
3072
|
});
|
3073
|
+
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/settings", method: "get", ...variables, signal });
|
3074
|
+
const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/settings", method: "patch", ...variables, signal });
|
1253
3075
|
const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/members", method: "get", ...variables, signal });
|
1254
3076
|
const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/members/{userId}", method: "put", ...variables, signal });
|
1255
3077
|
const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
@@ -1315,6 +3137,8 @@ const operationsByTag$1 = {
|
|
1315
3137
|
getWorkspace,
|
1316
3138
|
updateWorkspace,
|
1317
3139
|
deleteWorkspace,
|
3140
|
+
getWorkspaceSettings,
|
3141
|
+
updateWorkspaceSettings,
|
1318
3142
|
getWorkspaceMembersList,
|
1319
3143
|
updateWorkspaceMemberRole,
|
1320
3144
|
removeWorkspaceMember
|
@@ -1347,7 +3171,7 @@ var __accessCheck$7 = (obj, member, msg) => {
|
|
1347
3171
|
if (!member.has(obj))
|
1348
3172
|
throw TypeError("Cannot " + msg);
|
1349
3173
|
};
|
1350
|
-
var __privateGet$
|
3174
|
+
var __privateGet$6 = (obj, member, getter) => {
|
1351
3175
|
__accessCheck$7(obj, member, "read from private field");
|
1352
3176
|
return getter ? getter.call(obj) : member.get(obj);
|
1353
3177
|
};
|
@@ -1356,7 +3180,7 @@ var __privateAdd$7 = (obj, member, value) => {
|
|
1356
3180
|
throw TypeError("Cannot add the same private member more than once");
|
1357
3181
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1358
3182
|
};
|
1359
|
-
var __privateSet$
|
3183
|
+
var __privateSet$5 = (obj, member, value, setter) => {
|
1360
3184
|
__accessCheck$7(obj, member, "write to private field");
|
1361
3185
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
1362
3186
|
return value;
|
@@ -1373,7 +3197,7 @@ class XataApiClient {
|
|
1373
3197
|
if (!apiKey) {
|
1374
3198
|
throw new Error("Could not resolve a valid apiKey");
|
1375
3199
|
}
|
1376
|
-
__privateSet$
|
3200
|
+
__privateSet$5(this, _extraProps, {
|
1377
3201
|
apiUrl: getHostUrl(provider, "main"),
|
1378
3202
|
workspacesApiUrl: getHostUrl(provider, "workspaces"),
|
1379
3203
|
fetch: getFetchImplementation(options.fetch),
|
@@ -1385,64 +3209,64 @@ class XataApiClient {
|
|
1385
3209
|
});
|
1386
3210
|
}
|
1387
3211
|
get user() {
|
1388
|
-
if (!__privateGet$
|
1389
|
-
__privateGet$
|
1390
|
-
return __privateGet$
|
3212
|
+
if (!__privateGet$6(this, _namespaces).user)
|
3213
|
+
__privateGet$6(this, _namespaces).user = new UserApi(__privateGet$6(this, _extraProps));
|
3214
|
+
return __privateGet$6(this, _namespaces).user;
|
1391
3215
|
}
|
1392
3216
|
get authentication() {
|
1393
|
-
if (!__privateGet$
|
1394
|
-
__privateGet$
|
1395
|
-
return __privateGet$
|
3217
|
+
if (!__privateGet$6(this, _namespaces).authentication)
|
3218
|
+
__privateGet$6(this, _namespaces).authentication = new AuthenticationApi(__privateGet$6(this, _extraProps));
|
3219
|
+
return __privateGet$6(this, _namespaces).authentication;
|
1396
3220
|
}
|
1397
3221
|
get workspaces() {
|
1398
|
-
if (!__privateGet$
|
1399
|
-
__privateGet$
|
1400
|
-
return __privateGet$
|
3222
|
+
if (!__privateGet$6(this, _namespaces).workspaces)
|
3223
|
+
__privateGet$6(this, _namespaces).workspaces = new WorkspaceApi(__privateGet$6(this, _extraProps));
|
3224
|
+
return __privateGet$6(this, _namespaces).workspaces;
|
1401
3225
|
}
|
1402
3226
|
get invites() {
|
1403
|
-
if (!__privateGet$
|
1404
|
-
__privateGet$
|
1405
|
-
return __privateGet$
|
3227
|
+
if (!__privateGet$6(this, _namespaces).invites)
|
3228
|
+
__privateGet$6(this, _namespaces).invites = new InvitesApi(__privateGet$6(this, _extraProps));
|
3229
|
+
return __privateGet$6(this, _namespaces).invites;
|
1406
3230
|
}
|
1407
3231
|
get database() {
|
1408
|
-
if (!__privateGet$
|
1409
|
-
__privateGet$
|
1410
|
-
return __privateGet$
|
3232
|
+
if (!__privateGet$6(this, _namespaces).database)
|
3233
|
+
__privateGet$6(this, _namespaces).database = new DatabaseApi(__privateGet$6(this, _extraProps));
|
3234
|
+
return __privateGet$6(this, _namespaces).database;
|
1411
3235
|
}
|
1412
3236
|
get branches() {
|
1413
|
-
if (!__privateGet$
|
1414
|
-
__privateGet$
|
1415
|
-
return __privateGet$
|
3237
|
+
if (!__privateGet$6(this, _namespaces).branches)
|
3238
|
+
__privateGet$6(this, _namespaces).branches = new BranchApi(__privateGet$6(this, _extraProps));
|
3239
|
+
return __privateGet$6(this, _namespaces).branches;
|
1416
3240
|
}
|
1417
3241
|
get migrations() {
|
1418
|
-
if (!__privateGet$
|
1419
|
-
__privateGet$
|
1420
|
-
return __privateGet$
|
3242
|
+
if (!__privateGet$6(this, _namespaces).migrations)
|
3243
|
+
__privateGet$6(this, _namespaces).migrations = new MigrationsApi(__privateGet$6(this, _extraProps));
|
3244
|
+
return __privateGet$6(this, _namespaces).migrations;
|
1421
3245
|
}
|
1422
3246
|
get migrationRequests() {
|
1423
|
-
if (!__privateGet$
|
1424
|
-
__privateGet$
|
1425
|
-
return __privateGet$
|
3247
|
+
if (!__privateGet$6(this, _namespaces).migrationRequests)
|
3248
|
+
__privateGet$6(this, _namespaces).migrationRequests = new MigrationRequestsApi(__privateGet$6(this, _extraProps));
|
3249
|
+
return __privateGet$6(this, _namespaces).migrationRequests;
|
1426
3250
|
}
|
1427
3251
|
get tables() {
|
1428
|
-
if (!__privateGet$
|
1429
|
-
__privateGet$
|
1430
|
-
return __privateGet$
|
3252
|
+
if (!__privateGet$6(this, _namespaces).tables)
|
3253
|
+
__privateGet$6(this, _namespaces).tables = new TableApi(__privateGet$6(this, _extraProps));
|
3254
|
+
return __privateGet$6(this, _namespaces).tables;
|
1431
3255
|
}
|
1432
3256
|
get records() {
|
1433
|
-
if (!__privateGet$
|
1434
|
-
__privateGet$
|
1435
|
-
return __privateGet$
|
3257
|
+
if (!__privateGet$6(this, _namespaces).records)
|
3258
|
+
__privateGet$6(this, _namespaces).records = new RecordsApi(__privateGet$6(this, _extraProps));
|
3259
|
+
return __privateGet$6(this, _namespaces).records;
|
1436
3260
|
}
|
1437
3261
|
get files() {
|
1438
|
-
if (!__privateGet$
|
1439
|
-
__privateGet$
|
1440
|
-
return __privateGet$
|
3262
|
+
if (!__privateGet$6(this, _namespaces).files)
|
3263
|
+
__privateGet$6(this, _namespaces).files = new FilesApi(__privateGet$6(this, _extraProps));
|
3264
|
+
return __privateGet$6(this, _namespaces).files;
|
1441
3265
|
}
|
1442
3266
|
get searchAndFilter() {
|
1443
|
-
if (!__privateGet$
|
1444
|
-
__privateGet$
|
1445
|
-
return __privateGet$
|
3267
|
+
if (!__privateGet$6(this, _namespaces).searchAndFilter)
|
3268
|
+
__privateGet$6(this, _namespaces).searchAndFilter = new SearchAndFilterApi(__privateGet$6(this, _extraProps));
|
3269
|
+
return __privateGet$6(this, _namespaces).searchAndFilter;
|
1446
3270
|
}
|
1447
3271
|
}
|
1448
3272
|
_extraProps = new WeakMap();
|
@@ -1744,6 +3568,30 @@ class BranchApi {
|
|
1744
3568
|
...this.extraProps
|
1745
3569
|
});
|
1746
3570
|
}
|
3571
|
+
pgRollMigrationHistory({
|
3572
|
+
workspace,
|
3573
|
+
region,
|
3574
|
+
database,
|
3575
|
+
branch
|
3576
|
+
}) {
|
3577
|
+
return operationsByTag.migrations.getMigrationHistory({
|
3578
|
+
pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
|
3579
|
+
...this.extraProps
|
3580
|
+
});
|
3581
|
+
}
|
3582
|
+
applyMigration({
|
3583
|
+
workspace,
|
3584
|
+
region,
|
3585
|
+
database,
|
3586
|
+
branch,
|
3587
|
+
migration
|
3588
|
+
}) {
|
3589
|
+
return operationsByTag.migrations.applyMigration({
|
3590
|
+
pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
|
3591
|
+
body: migration,
|
3592
|
+
...this.extraProps
|
3593
|
+
});
|
3594
|
+
}
|
1747
3595
|
}
|
1748
3596
|
class TableApi {
|
1749
3597
|
constructor(extraProps) {
|
@@ -2557,6 +4405,17 @@ class MigrationsApi {
|
|
2557
4405
|
...this.extraProps
|
2558
4406
|
});
|
2559
4407
|
}
|
4408
|
+
getSchema({
|
4409
|
+
workspace,
|
4410
|
+
region,
|
4411
|
+
database,
|
4412
|
+
branch
|
4413
|
+
}) {
|
4414
|
+
return operationsByTag.migrations.getSchema({
|
4415
|
+
pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
|
4416
|
+
...this.extraProps
|
4417
|
+
});
|
4418
|
+
}
|
2560
4419
|
}
|
2561
4420
|
class DatabaseApi {
|
2562
4421
|
constructor(extraProps) {
|
@@ -2862,7 +4721,7 @@ var __accessCheck$6 = (obj, member, msg) => {
|
|
2862
4721
|
if (!member.has(obj))
|
2863
4722
|
throw TypeError("Cannot " + msg);
|
2864
4723
|
};
|
2865
|
-
var __privateGet$
|
4724
|
+
var __privateGet$5 = (obj, member, getter) => {
|
2866
4725
|
__accessCheck$6(obj, member, "read from private field");
|
2867
4726
|
return getter ? getter.call(obj) : member.get(obj);
|
2868
4727
|
};
|
@@ -2871,7 +4730,7 @@ var __privateAdd$6 = (obj, member, value) => {
|
|
2871
4730
|
throw TypeError("Cannot add the same private member more than once");
|
2872
4731
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2873
4732
|
};
|
2874
|
-
var __privateSet$
|
4733
|
+
var __privateSet$4 = (obj, member, value, setter) => {
|
2875
4734
|
__accessCheck$6(obj, member, "write to private field");
|
2876
4735
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
2877
4736
|
return value;
|
@@ -2880,9 +4739,9 @@ var _query, _page;
|
|
2880
4739
|
class Page {
|
2881
4740
|
constructor(query, meta, records = []) {
|
2882
4741
|
__privateAdd$6(this, _query, void 0);
|
2883
|
-
__privateSet$
|
4742
|
+
__privateSet$4(this, _query, query);
|
2884
4743
|
this.meta = meta;
|
2885
|
-
this.records = new
|
4744
|
+
this.records = new PageRecordArray(this, records);
|
2886
4745
|
}
|
2887
4746
|
/**
|
2888
4747
|
* Retrieves the next page of results.
|
@@ -2891,7 +4750,7 @@ class Page {
|
|
2891
4750
|
* @returns The next page or results.
|
2892
4751
|
*/
|
2893
4752
|
async nextPage(size, offset) {
|
2894
|
-
return __privateGet$
|
4753
|
+
return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, after: this.meta.page.cursor } });
|
2895
4754
|
}
|
2896
4755
|
/**
|
2897
4756
|
* Retrieves the previous page of results.
|
@@ -2900,7 +4759,7 @@ class Page {
|
|
2900
4759
|
* @returns The previous page or results.
|
2901
4760
|
*/
|
2902
4761
|
async previousPage(size, offset) {
|
2903
|
-
return __privateGet$
|
4762
|
+
return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, before: this.meta.page.cursor } });
|
2904
4763
|
}
|
2905
4764
|
/**
|
2906
4765
|
* Retrieves the start page of results.
|
@@ -2909,7 +4768,7 @@ class Page {
|
|
2909
4768
|
* @returns The start page or results.
|
2910
4769
|
*/
|
2911
4770
|
async startPage(size, offset) {
|
2912
|
-
return __privateGet$
|
4771
|
+
return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, start: this.meta.page.cursor } });
|
2913
4772
|
}
|
2914
4773
|
/**
|
2915
4774
|
* Retrieves the end page of results.
|
@@ -2918,7 +4777,7 @@ class Page {
|
|
2918
4777
|
* @returns The end page or results.
|
2919
4778
|
*/
|
2920
4779
|
async endPage(size, offset) {
|
2921
|
-
return __privateGet$
|
4780
|
+
return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, end: this.meta.page.cursor } });
|
2922
4781
|
}
|
2923
4782
|
/**
|
2924
4783
|
* Shortcut method to check if there will be additional results if the next page of results is retrieved.
|
@@ -2936,11 +4795,38 @@ const PAGINATION_DEFAULT_OFFSET = 0;
|
|
2936
4795
|
function isCursorPaginationOptions(options) {
|
2937
4796
|
return isDefined(options) && (isDefined(options.start) || isDefined(options.end) || isDefined(options.after) || isDefined(options.before));
|
2938
4797
|
}
|
2939
|
-
|
4798
|
+
class RecordArray extends Array {
|
4799
|
+
constructor(...args) {
|
4800
|
+
super(...RecordArray.parseConstructorParams(...args));
|
4801
|
+
}
|
4802
|
+
static parseConstructorParams(...args) {
|
4803
|
+
if (args.length === 1 && typeof args[0] === "number") {
|
4804
|
+
return new Array(args[0]);
|
4805
|
+
}
|
4806
|
+
if (args.length <= 1 && Array.isArray(args[0] ?? [])) {
|
4807
|
+
const result = args[0] ?? [];
|
4808
|
+
return new Array(...result);
|
4809
|
+
}
|
4810
|
+
return new Array(...args);
|
4811
|
+
}
|
4812
|
+
toArray() {
|
4813
|
+
return new Array(...this);
|
4814
|
+
}
|
4815
|
+
toSerializable() {
|
4816
|
+
return JSON.parse(this.toString());
|
4817
|
+
}
|
4818
|
+
toString() {
|
4819
|
+
return JSON.stringify(this.toArray());
|
4820
|
+
}
|
4821
|
+
map(callbackfn, thisArg) {
|
4822
|
+
return this.toArray().map(callbackfn, thisArg);
|
4823
|
+
}
|
4824
|
+
}
|
4825
|
+
const _PageRecordArray = class _PageRecordArray extends Array {
|
2940
4826
|
constructor(...args) {
|
2941
|
-
super(...
|
4827
|
+
super(..._PageRecordArray.parseConstructorParams(...args));
|
2942
4828
|
__privateAdd$6(this, _page, void 0);
|
2943
|
-
__privateSet$
|
4829
|
+
__privateSet$4(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
|
2944
4830
|
}
|
2945
4831
|
static parseConstructorParams(...args) {
|
2946
4832
|
if (args.length === 1 && typeof args[0] === "number") {
|
@@ -2970,8 +4856,8 @@ const _RecordArray = class _RecordArray extends Array {
|
|
2970
4856
|
* @returns A new array of objects
|
2971
4857
|
*/
|
2972
4858
|
async nextPage(size, offset) {
|
2973
|
-
const newPage = await __privateGet$
|
2974
|
-
return new
|
4859
|
+
const newPage = await __privateGet$5(this, _page).nextPage(size, offset);
|
4860
|
+
return new _PageRecordArray(newPage);
|
2975
4861
|
}
|
2976
4862
|
/**
|
2977
4863
|
* Retrieve previous page of records
|
@@ -2979,8 +4865,8 @@ const _RecordArray = class _RecordArray extends Array {
|
|
2979
4865
|
* @returns A new array of objects
|
2980
4866
|
*/
|
2981
4867
|
async previousPage(size, offset) {
|
2982
|
-
const newPage = await __privateGet$
|
2983
|
-
return new
|
4868
|
+
const newPage = await __privateGet$5(this, _page).previousPage(size, offset);
|
4869
|
+
return new _PageRecordArray(newPage);
|
2984
4870
|
}
|
2985
4871
|
/**
|
2986
4872
|
* Retrieve start page of records
|
@@ -2988,8 +4874,8 @@ const _RecordArray = class _RecordArray extends Array {
|
|
2988
4874
|
* @returns A new array of objects
|
2989
4875
|
*/
|
2990
4876
|
async startPage(size, offset) {
|
2991
|
-
const newPage = await __privateGet$
|
2992
|
-
return new
|
4877
|
+
const newPage = await __privateGet$5(this, _page).startPage(size, offset);
|
4878
|
+
return new _PageRecordArray(newPage);
|
2993
4879
|
}
|
2994
4880
|
/**
|
2995
4881
|
* Retrieve end page of records
|
@@ -2997,24 +4883,24 @@ const _RecordArray = class _RecordArray extends Array {
|
|
2997
4883
|
* @returns A new array of objects
|
2998
4884
|
*/
|
2999
4885
|
async endPage(size, offset) {
|
3000
|
-
const newPage = await __privateGet$
|
3001
|
-
return new
|
4886
|
+
const newPage = await __privateGet$5(this, _page).endPage(size, offset);
|
4887
|
+
return new _PageRecordArray(newPage);
|
3002
4888
|
}
|
3003
4889
|
/**
|
3004
4890
|
* @returns Boolean indicating if there is a next page
|
3005
4891
|
*/
|
3006
4892
|
hasNextPage() {
|
3007
|
-
return __privateGet$
|
4893
|
+
return __privateGet$5(this, _page).meta.page.more;
|
3008
4894
|
}
|
3009
4895
|
};
|
3010
4896
|
_page = new WeakMap();
|
3011
|
-
let
|
4897
|
+
let PageRecordArray = _PageRecordArray;
|
3012
4898
|
|
3013
4899
|
var __accessCheck$5 = (obj, member, msg) => {
|
3014
4900
|
if (!member.has(obj))
|
3015
4901
|
throw TypeError("Cannot " + msg);
|
3016
4902
|
};
|
3017
|
-
var __privateGet$
|
4903
|
+
var __privateGet$4 = (obj, member, getter) => {
|
3018
4904
|
__accessCheck$5(obj, member, "read from private field");
|
3019
4905
|
return getter ? getter.call(obj) : member.get(obj);
|
3020
4906
|
};
|
@@ -3023,7 +4909,7 @@ var __privateAdd$5 = (obj, member, value) => {
|
|
3023
4909
|
throw TypeError("Cannot add the same private member more than once");
|
3024
4910
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3025
4911
|
};
|
3026
|
-
var __privateSet$
|
4912
|
+
var __privateSet$3 = (obj, member, value, setter) => {
|
3027
4913
|
__accessCheck$5(obj, member, "write to private field");
|
3028
4914
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
3029
4915
|
return value;
|
@@ -3041,25 +4927,25 @@ const _Query = class _Query {
|
|
3041
4927
|
__privateAdd$5(this, _data, { filter: {} });
|
3042
4928
|
// Implements pagination
|
3043
4929
|
this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
|
3044
|
-
this.records = new
|
3045
|
-
__privateSet$
|
4930
|
+
this.records = new PageRecordArray(this, []);
|
4931
|
+
__privateSet$3(this, _table$1, table);
|
3046
4932
|
if (repository) {
|
3047
|
-
__privateSet$
|
4933
|
+
__privateSet$3(this, _repository, repository);
|
3048
4934
|
} else {
|
3049
|
-
__privateSet$
|
4935
|
+
__privateSet$3(this, _repository, this);
|
3050
4936
|
}
|
3051
4937
|
const parent = cleanParent(data, rawParent);
|
3052
|
-
__privateGet$
|
3053
|
-
__privateGet$
|
3054
|
-
__privateGet$
|
3055
|
-
__privateGet$
|
3056
|
-
__privateGet$
|
3057
|
-
__privateGet$
|
3058
|
-
__privateGet$
|
3059
|
-
__privateGet$
|
3060
|
-
__privateGet$
|
3061
|
-
__privateGet$
|
3062
|
-
__privateGet$
|
4938
|
+
__privateGet$4(this, _data).filter = data.filter ?? parent?.filter ?? {};
|
4939
|
+
__privateGet$4(this, _data).filter.$any = data.filter?.$any ?? parent?.filter?.$any;
|
4940
|
+
__privateGet$4(this, _data).filter.$all = data.filter?.$all ?? parent?.filter?.$all;
|
4941
|
+
__privateGet$4(this, _data).filter.$not = data.filter?.$not ?? parent?.filter?.$not;
|
4942
|
+
__privateGet$4(this, _data).filter.$none = data.filter?.$none ?? parent?.filter?.$none;
|
4943
|
+
__privateGet$4(this, _data).sort = data.sort ?? parent?.sort;
|
4944
|
+
__privateGet$4(this, _data).columns = data.columns ?? parent?.columns;
|
4945
|
+
__privateGet$4(this, _data).consistency = data.consistency ?? parent?.consistency;
|
4946
|
+
__privateGet$4(this, _data).pagination = data.pagination ?? parent?.pagination;
|
4947
|
+
__privateGet$4(this, _data).cache = data.cache ?? parent?.cache;
|
4948
|
+
__privateGet$4(this, _data).fetchOptions = data.fetchOptions ?? parent?.fetchOptions;
|
3063
4949
|
this.any = this.any.bind(this);
|
3064
4950
|
this.all = this.all.bind(this);
|
3065
4951
|
this.not = this.not.bind(this);
|
@@ -3070,10 +4956,10 @@ const _Query = class _Query {
|
|
3070
4956
|
Object.defineProperty(this, "repository", { enumerable: false });
|
3071
4957
|
}
|
3072
4958
|
getQueryOptions() {
|
3073
|
-
return __privateGet$
|
4959
|
+
return __privateGet$4(this, _data);
|
3074
4960
|
}
|
3075
4961
|
key() {
|
3076
|
-
const { columns = [], filter = {}, sort = [], pagination = {} } = __privateGet$
|
4962
|
+
const { columns = [], filter = {}, sort = [], pagination = {} } = __privateGet$4(this, _data);
|
3077
4963
|
const key = JSON.stringify({ columns, filter, sort, pagination });
|
3078
4964
|
return toBase64(key);
|
3079
4965
|
}
|
@@ -3084,7 +4970,7 @@ const _Query = class _Query {
|
|
3084
4970
|
*/
|
3085
4971
|
any(...queries) {
|
3086
4972
|
const $any = queries.map((query) => query.getQueryOptions().filter ?? {});
|
3087
|
-
return new _Query(__privateGet$
|
4973
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $any } }, __privateGet$4(this, _data));
|
3088
4974
|
}
|
3089
4975
|
/**
|
3090
4976
|
* Builds a new query object representing a logical AND between the given subqueries.
|
@@ -3093,7 +4979,7 @@ const _Query = class _Query {
|
|
3093
4979
|
*/
|
3094
4980
|
all(...queries) {
|
3095
4981
|
const $all = queries.map((query) => query.getQueryOptions().filter ?? {});
|
3096
|
-
return new _Query(__privateGet$
|
4982
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
|
3097
4983
|
}
|
3098
4984
|
/**
|
3099
4985
|
* Builds a new query object representing a logical OR negating each subquery. In pseudo-code: !q1 OR !q2
|
@@ -3102,7 +4988,7 @@ const _Query = class _Query {
|
|
3102
4988
|
*/
|
3103
4989
|
not(...queries) {
|
3104
4990
|
const $not = queries.map((query) => query.getQueryOptions().filter ?? {});
|
3105
|
-
return new _Query(__privateGet$
|
4991
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $not } }, __privateGet$4(this, _data));
|
3106
4992
|
}
|
3107
4993
|
/**
|
3108
4994
|
* Builds a new query object representing a logical AND negating each subquery. In pseudo-code: !q1 AND !q2
|
@@ -3111,25 +4997,25 @@ const _Query = class _Query {
|
|
3111
4997
|
*/
|
3112
4998
|
none(...queries) {
|
3113
4999
|
const $none = queries.map((query) => query.getQueryOptions().filter ?? {});
|
3114
|
-
return new _Query(__privateGet$
|
5000
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $none } }, __privateGet$4(this, _data));
|
3115
5001
|
}
|
3116
5002
|
filter(a, b) {
|
3117
5003
|
if (arguments.length === 1) {
|
3118
5004
|
const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
|
3119
5005
|
[column]: __privateMethod$3(this, _cleanFilterConstraint, cleanFilterConstraint_fn).call(this, column, constraint)
|
3120
5006
|
}));
|
3121
|
-
const $all = compact([__privateGet$
|
3122
|
-
return new _Query(__privateGet$
|
5007
|
+
const $all = compact([__privateGet$4(this, _data).filter?.$all].flat().concat(constraints));
|
5008
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
|
3123
5009
|
} else {
|
3124
5010
|
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _cleanFilterConstraint, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
|
3125
|
-
const $all = compact([__privateGet$
|
3126
|
-
return new _Query(__privateGet$
|
5011
|
+
const $all = compact([__privateGet$4(this, _data).filter?.$all].flat().concat(constraints));
|
5012
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
|
3127
5013
|
}
|
3128
5014
|
}
|
3129
5015
|
sort(column, direction = "asc") {
|
3130
|
-
const originalSort = [__privateGet$
|
5016
|
+
const originalSort = [__privateGet$4(this, _data).sort ?? []].flat();
|
3131
5017
|
const sort = [...originalSort, { column, direction }];
|
3132
|
-
return new _Query(__privateGet$
|
5018
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { sort }, __privateGet$4(this, _data));
|
3133
5019
|
}
|
3134
5020
|
/**
|
3135
5021
|
* Builds a new query specifying the set of columns to be returned in the query response.
|
@@ -3138,15 +5024,15 @@ const _Query = class _Query {
|
|
3138
5024
|
*/
|
3139
5025
|
select(columns) {
|
3140
5026
|
return new _Query(
|
3141
|
-
__privateGet$
|
3142
|
-
__privateGet$
|
5027
|
+
__privateGet$4(this, _repository),
|
5028
|
+
__privateGet$4(this, _table$1),
|
3143
5029
|
{ columns },
|
3144
|
-
__privateGet$
|
5030
|
+
__privateGet$4(this, _data)
|
3145
5031
|
);
|
3146
5032
|
}
|
3147
5033
|
getPaginated(options = {}) {
|
3148
|
-
const query = new _Query(__privateGet$
|
3149
|
-
return __privateGet$
|
5034
|
+
const query = new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), options, __privateGet$4(this, _data));
|
5035
|
+
return __privateGet$4(this, _repository).query(query);
|
3150
5036
|
}
|
3151
5037
|
/**
|
3152
5038
|
* Get results in an iterator
|
@@ -3183,7 +5069,7 @@ const _Query = class _Query {
|
|
3183
5069
|
if (page.hasNextPage() && options.pagination?.size === void 0) {
|
3184
5070
|
console.trace("Calling getMany does not return all results. Paginate to get all results or call getAll.");
|
3185
5071
|
}
|
3186
|
-
const array = new
|
5072
|
+
const array = new PageRecordArray(page, results.slice(0, size));
|
3187
5073
|
return array;
|
3188
5074
|
}
|
3189
5075
|
async getAll(options = {}) {
|
@@ -3192,7 +5078,7 @@ const _Query = class _Query {
|
|
3192
5078
|
for await (const page of this.getIterator({ ...rest, batchSize })) {
|
3193
5079
|
results.push(...page);
|
3194
5080
|
}
|
3195
|
-
return results;
|
5081
|
+
return new RecordArray(results);
|
3196
5082
|
}
|
3197
5083
|
async getFirst(options = {}) {
|
3198
5084
|
const records = await this.getMany({ ...options, pagination: { size: 1 } });
|
@@ -3207,12 +5093,12 @@ const _Query = class _Query {
|
|
3207
5093
|
async summarize(params = {}) {
|
3208
5094
|
const { summaries, summariesFilter, ...options } = params;
|
3209
5095
|
const query = new _Query(
|
3210
|
-
__privateGet$
|
3211
|
-
__privateGet$
|
5096
|
+
__privateGet$4(this, _repository),
|
5097
|
+
__privateGet$4(this, _table$1),
|
3212
5098
|
options,
|
3213
|
-
__privateGet$
|
5099
|
+
__privateGet$4(this, _data)
|
3214
5100
|
);
|
3215
|
-
return __privateGet$
|
5101
|
+
return __privateGet$4(this, _repository).summarizeTable(query, summaries, summariesFilter);
|
3216
5102
|
}
|
3217
5103
|
/**
|
3218
5104
|
* Builds a new query object adding a cache TTL in milliseconds.
|
@@ -3220,7 +5106,7 @@ const _Query = class _Query {
|
|
3220
5106
|
* @returns A new Query object.
|
3221
5107
|
*/
|
3222
5108
|
cache(ttl) {
|
3223
|
-
return new _Query(__privateGet$
|
5109
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { cache: ttl }, __privateGet$4(this, _data));
|
3224
5110
|
}
|
3225
5111
|
/**
|
3226
5112
|
* Retrieve next page of records
|
@@ -3266,7 +5152,7 @@ _repository = new WeakMap();
|
|
3266
5152
|
_data = new WeakMap();
|
3267
5153
|
_cleanFilterConstraint = new WeakSet();
|
3268
5154
|
cleanFilterConstraint_fn = function(column, value) {
|
3269
|
-
const columnType = __privateGet$
|
5155
|
+
const columnType = __privateGet$4(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
|
3270
5156
|
if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
|
3271
5157
|
return { $includes: value };
|
3272
5158
|
}
|
@@ -3356,7 +5242,7 @@ var __accessCheck$4 = (obj, member, msg) => {
|
|
3356
5242
|
if (!member.has(obj))
|
3357
5243
|
throw TypeError("Cannot " + msg);
|
3358
5244
|
};
|
3359
|
-
var __privateGet$
|
5245
|
+
var __privateGet$3 = (obj, member, getter) => {
|
3360
5246
|
__accessCheck$4(obj, member, "read from private field");
|
3361
5247
|
return getter ? getter.call(obj) : member.get(obj);
|
3362
5248
|
};
|
@@ -3365,7 +5251,7 @@ var __privateAdd$4 = (obj, member, value) => {
|
|
3365
5251
|
throw TypeError("Cannot add the same private member more than once");
|
3366
5252
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3367
5253
|
};
|
3368
|
-
var __privateSet$
|
5254
|
+
var __privateSet$2 = (obj, member, value, setter) => {
|
3369
5255
|
__accessCheck$4(obj, member, "write to private field");
|
3370
5256
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
3371
5257
|
return value;
|
@@ -3374,7 +5260,7 @@ var __privateMethod$2 = (obj, member, method) => {
|
|
3374
5260
|
__accessCheck$4(obj, member, "access private method");
|
3375
5261
|
return method;
|
3376
5262
|
};
|
3377
|
-
var _table, _getFetchProps, _db, _cache, _schemaTables
|
5263
|
+
var _table, _getFetchProps, _db, _cache, _schemaTables, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn, _insertRecords, insertRecords_fn, _updateRecordWithID, updateRecordWithID_fn, _updateRecords, updateRecords_fn, _upsertRecordWithID, upsertRecordWithID_fn, _deleteRecord, deleteRecord_fn, _deleteRecords, deleteRecords_fn, _setCacheQuery, setCacheQuery_fn, _getCacheQuery, getCacheQuery_fn, _getSchemaTables, getSchemaTables_fn, _transformObjectToApi, transformObjectToApi_fn;
|
3378
5264
|
const BULK_OPERATION_MAX_SIZE = 1e3;
|
3379
5265
|
class Repository extends Query {
|
3380
5266
|
}
|
@@ -3395,31 +5281,31 @@ class RestRepository extends Query {
|
|
3395
5281
|
__privateAdd$4(this, _deleteRecords);
|
3396
5282
|
__privateAdd$4(this, _setCacheQuery);
|
3397
5283
|
__privateAdd$4(this, _getCacheQuery);
|
3398
|
-
__privateAdd$4(this, _getSchemaTables
|
5284
|
+
__privateAdd$4(this, _getSchemaTables);
|
3399
5285
|
__privateAdd$4(this, _transformObjectToApi);
|
3400
5286
|
__privateAdd$4(this, _table, void 0);
|
3401
5287
|
__privateAdd$4(this, _getFetchProps, void 0);
|
3402
5288
|
__privateAdd$4(this, _db, void 0);
|
3403
5289
|
__privateAdd$4(this, _cache, void 0);
|
3404
|
-
__privateAdd$4(this, _schemaTables
|
5290
|
+
__privateAdd$4(this, _schemaTables, void 0);
|
3405
5291
|
__privateAdd$4(this, _trace, void 0);
|
3406
|
-
__privateSet$
|
3407
|
-
__privateSet$
|
3408
|
-
__privateSet$
|
3409
|
-
__privateSet$
|
3410
|
-
__privateSet$
|
5292
|
+
__privateSet$2(this, _table, options.table);
|
5293
|
+
__privateSet$2(this, _db, options.db);
|
5294
|
+
__privateSet$2(this, _cache, options.pluginOptions.cache);
|
5295
|
+
__privateSet$2(this, _schemaTables, options.schemaTables);
|
5296
|
+
__privateSet$2(this, _getFetchProps, () => ({ ...options.pluginOptions, sessionID: generateUUID() }));
|
3411
5297
|
const trace = options.pluginOptions.trace ?? defaultTrace;
|
3412
|
-
__privateSet$
|
5298
|
+
__privateSet$2(this, _trace, async (name, fn, options2 = {}) => {
|
3413
5299
|
return trace(name, fn, {
|
3414
5300
|
...options2,
|
3415
|
-
[TraceAttributes.TABLE]: __privateGet$
|
5301
|
+
[TraceAttributes.TABLE]: __privateGet$3(this, _table),
|
3416
5302
|
[TraceAttributes.KIND]: "sdk-operation",
|
3417
5303
|
[TraceAttributes.VERSION]: VERSION
|
3418
5304
|
});
|
3419
5305
|
});
|
3420
5306
|
}
|
3421
5307
|
async create(a, b, c, d) {
|
3422
|
-
return __privateGet$
|
5308
|
+
return __privateGet$3(this, _trace).call(this, "create", async () => {
|
3423
5309
|
const ifVersion = parseIfVersion(b, c, d);
|
3424
5310
|
if (Array.isArray(a)) {
|
3425
5311
|
if (a.length === 0)
|
@@ -3449,7 +5335,7 @@ class RestRepository extends Query {
|
|
3449
5335
|
});
|
3450
5336
|
}
|
3451
5337
|
async read(a, b) {
|
3452
|
-
return __privateGet$
|
5338
|
+
return __privateGet$3(this, _trace).call(this, "read", async () => {
|
3453
5339
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3454
5340
|
if (Array.isArray(a)) {
|
3455
5341
|
if (a.length === 0)
|
@@ -3470,17 +5356,17 @@ class RestRepository extends Query {
|
|
3470
5356
|
workspace: "{workspaceId}",
|
3471
5357
|
dbBranchName: "{dbBranch}",
|
3472
5358
|
region: "{region}",
|
3473
|
-
tableName: __privateGet$
|
5359
|
+
tableName: __privateGet$3(this, _table),
|
3474
5360
|
recordId: id
|
3475
5361
|
},
|
3476
5362
|
queryParams: { columns },
|
3477
|
-
...__privateGet$
|
5363
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3478
5364
|
});
|
3479
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
5365
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
3480
5366
|
return initObject(
|
3481
|
-
__privateGet$
|
5367
|
+
__privateGet$3(this, _db),
|
3482
5368
|
schemaTables,
|
3483
|
-
__privateGet$
|
5369
|
+
__privateGet$3(this, _table),
|
3484
5370
|
response,
|
3485
5371
|
columns
|
3486
5372
|
);
|
@@ -3495,7 +5381,7 @@ class RestRepository extends Query {
|
|
3495
5381
|
});
|
3496
5382
|
}
|
3497
5383
|
async readOrThrow(a, b) {
|
3498
|
-
return __privateGet$
|
5384
|
+
return __privateGet$3(this, _trace).call(this, "readOrThrow", async () => {
|
3499
5385
|
const result = await this.read(a, b);
|
3500
5386
|
if (Array.isArray(result)) {
|
3501
5387
|
const missingIds = compact(
|
@@ -3514,7 +5400,7 @@ class RestRepository extends Query {
|
|
3514
5400
|
});
|
3515
5401
|
}
|
3516
5402
|
async update(a, b, c, d) {
|
3517
|
-
return __privateGet$
|
5403
|
+
return __privateGet$3(this, _trace).call(this, "update", async () => {
|
3518
5404
|
const ifVersion = parseIfVersion(b, c, d);
|
3519
5405
|
if (Array.isArray(a)) {
|
3520
5406
|
if (a.length === 0)
|
@@ -3547,7 +5433,7 @@ class RestRepository extends Query {
|
|
3547
5433
|
});
|
3548
5434
|
}
|
3549
5435
|
async updateOrThrow(a, b, c, d) {
|
3550
|
-
return __privateGet$
|
5436
|
+
return __privateGet$3(this, _trace).call(this, "updateOrThrow", async () => {
|
3551
5437
|
const result = await this.update(a, b, c, d);
|
3552
5438
|
if (Array.isArray(result)) {
|
3553
5439
|
const missingIds = compact(
|
@@ -3566,7 +5452,7 @@ class RestRepository extends Query {
|
|
3566
5452
|
});
|
3567
5453
|
}
|
3568
5454
|
async createOrUpdate(a, b, c, d) {
|
3569
|
-
return __privateGet$
|
5455
|
+
return __privateGet$3(this, _trace).call(this, "createOrUpdate", async () => {
|
3570
5456
|
const ifVersion = parseIfVersion(b, c, d);
|
3571
5457
|
if (Array.isArray(a)) {
|
3572
5458
|
if (a.length === 0)
|
@@ -3601,7 +5487,7 @@ class RestRepository extends Query {
|
|
3601
5487
|
});
|
3602
5488
|
}
|
3603
5489
|
async createOrReplace(a, b, c, d) {
|
3604
|
-
return __privateGet$
|
5490
|
+
return __privateGet$3(this, _trace).call(this, "createOrReplace", async () => {
|
3605
5491
|
const ifVersion = parseIfVersion(b, c, d);
|
3606
5492
|
if (Array.isArray(a)) {
|
3607
5493
|
if (a.length === 0)
|
@@ -3633,7 +5519,7 @@ class RestRepository extends Query {
|
|
3633
5519
|
});
|
3634
5520
|
}
|
3635
5521
|
async delete(a, b) {
|
3636
|
-
return __privateGet$
|
5522
|
+
return __privateGet$3(this, _trace).call(this, "delete", async () => {
|
3637
5523
|
if (Array.isArray(a)) {
|
3638
5524
|
if (a.length === 0)
|
3639
5525
|
return [];
|
@@ -3659,7 +5545,7 @@ class RestRepository extends Query {
|
|
3659
5545
|
});
|
3660
5546
|
}
|
3661
5547
|
async deleteOrThrow(a, b) {
|
3662
|
-
return __privateGet$
|
5548
|
+
return __privateGet$3(this, _trace).call(this, "deleteOrThrow", async () => {
|
3663
5549
|
const result = await this.delete(a, b);
|
3664
5550
|
if (Array.isArray(result)) {
|
3665
5551
|
const missingIds = compact(
|
@@ -3677,13 +5563,13 @@ class RestRepository extends Query {
|
|
3677
5563
|
});
|
3678
5564
|
}
|
3679
5565
|
async search(query, options = {}) {
|
3680
|
-
return __privateGet$
|
5566
|
+
return __privateGet$3(this, _trace).call(this, "search", async () => {
|
3681
5567
|
const { records, totalCount } = await searchTable({
|
3682
5568
|
pathParams: {
|
3683
5569
|
workspace: "{workspaceId}",
|
3684
5570
|
dbBranchName: "{dbBranch}",
|
3685
5571
|
region: "{region}",
|
3686
|
-
tableName: __privateGet$
|
5572
|
+
tableName: __privateGet$3(this, _table)
|
3687
5573
|
},
|
3688
5574
|
body: {
|
3689
5575
|
query,
|
@@ -3695,23 +5581,23 @@ class RestRepository extends Query {
|
|
3695
5581
|
page: options.page,
|
3696
5582
|
target: options.target
|
3697
5583
|
},
|
3698
|
-
...__privateGet$
|
5584
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3699
5585
|
});
|
3700
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
5586
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
3701
5587
|
return {
|
3702
|
-
records: records.map((item) => initObject(__privateGet$
|
5588
|
+
records: records.map((item) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), item, ["*"])),
|
3703
5589
|
totalCount
|
3704
5590
|
};
|
3705
5591
|
});
|
3706
5592
|
}
|
3707
5593
|
async vectorSearch(column, query, options) {
|
3708
|
-
return __privateGet$
|
5594
|
+
return __privateGet$3(this, _trace).call(this, "vectorSearch", async () => {
|
3709
5595
|
const { records, totalCount } = await vectorSearchTable({
|
3710
5596
|
pathParams: {
|
3711
5597
|
workspace: "{workspaceId}",
|
3712
5598
|
dbBranchName: "{dbBranch}",
|
3713
5599
|
region: "{region}",
|
3714
|
-
tableName: __privateGet$
|
5600
|
+
tableName: __privateGet$3(this, _table)
|
3715
5601
|
},
|
3716
5602
|
body: {
|
3717
5603
|
column,
|
@@ -3720,32 +5606,32 @@ class RestRepository extends Query {
|
|
3720
5606
|
size: options?.size,
|
3721
5607
|
filter: options?.filter
|
3722
5608
|
},
|
3723
|
-
...__privateGet$
|
5609
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3724
5610
|
});
|
3725
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
5611
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
3726
5612
|
return {
|
3727
|
-
records: records.map((item) => initObject(__privateGet$
|
5613
|
+
records: records.map((item) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), item, ["*"])),
|
3728
5614
|
totalCount
|
3729
5615
|
};
|
3730
5616
|
});
|
3731
5617
|
}
|
3732
5618
|
async aggregate(aggs, filter) {
|
3733
|
-
return __privateGet$
|
5619
|
+
return __privateGet$3(this, _trace).call(this, "aggregate", async () => {
|
3734
5620
|
const result = await aggregateTable({
|
3735
5621
|
pathParams: {
|
3736
5622
|
workspace: "{workspaceId}",
|
3737
5623
|
dbBranchName: "{dbBranch}",
|
3738
5624
|
region: "{region}",
|
3739
|
-
tableName: __privateGet$
|
5625
|
+
tableName: __privateGet$3(this, _table)
|
3740
5626
|
},
|
3741
5627
|
body: { aggs, filter },
|
3742
|
-
...__privateGet$
|
5628
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3743
5629
|
});
|
3744
5630
|
return result;
|
3745
5631
|
});
|
3746
5632
|
}
|
3747
5633
|
async query(query) {
|
3748
|
-
return __privateGet$
|
5634
|
+
return __privateGet$3(this, _trace).call(this, "query", async () => {
|
3749
5635
|
const cacheQuery = await __privateMethod$2(this, _getCacheQuery, getCacheQuery_fn).call(this, query);
|
3750
5636
|
if (cacheQuery)
|
3751
5637
|
return new Page(query, cacheQuery.meta, cacheQuery.records);
|
@@ -3755,7 +5641,7 @@ class RestRepository extends Query {
|
|
3755
5641
|
workspace: "{workspaceId}",
|
3756
5642
|
dbBranchName: "{dbBranch}",
|
3757
5643
|
region: "{region}",
|
3758
|
-
tableName: __privateGet$
|
5644
|
+
tableName: __privateGet$3(this, _table)
|
3759
5645
|
},
|
3760
5646
|
body: {
|
3761
5647
|
filter: cleanFilter(data.filter),
|
@@ -3765,14 +5651,14 @@ class RestRepository extends Query {
|
|
3765
5651
|
consistency: data.consistency
|
3766
5652
|
},
|
3767
5653
|
fetchOptions: data.fetchOptions,
|
3768
|
-
...__privateGet$
|
5654
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3769
5655
|
});
|
3770
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
5656
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
3771
5657
|
const records = objects.map(
|
3772
5658
|
(record) => initObject(
|
3773
|
-
__privateGet$
|
5659
|
+
__privateGet$3(this, _db),
|
3774
5660
|
schemaTables,
|
3775
|
-
__privateGet$
|
5661
|
+
__privateGet$3(this, _table),
|
3776
5662
|
record,
|
3777
5663
|
data.columns ?? ["*"]
|
3778
5664
|
)
|
@@ -3782,14 +5668,14 @@ class RestRepository extends Query {
|
|
3782
5668
|
});
|
3783
5669
|
}
|
3784
5670
|
async summarizeTable(query, summaries, summariesFilter) {
|
3785
|
-
return __privateGet$
|
5671
|
+
return __privateGet$3(this, _trace).call(this, "summarize", async () => {
|
3786
5672
|
const data = query.getQueryOptions();
|
3787
5673
|
const result = await summarizeTable({
|
3788
5674
|
pathParams: {
|
3789
5675
|
workspace: "{workspaceId}",
|
3790
5676
|
dbBranchName: "{dbBranch}",
|
3791
5677
|
region: "{region}",
|
3792
|
-
tableName: __privateGet$
|
5678
|
+
tableName: __privateGet$3(this, _table)
|
3793
5679
|
},
|
3794
5680
|
body: {
|
3795
5681
|
filter: cleanFilter(data.filter),
|
@@ -3800,13 +5686,13 @@ class RestRepository extends Query {
|
|
3800
5686
|
summaries,
|
3801
5687
|
summariesFilter
|
3802
5688
|
},
|
3803
|
-
...__privateGet$
|
5689
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3804
5690
|
});
|
3805
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
5691
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
3806
5692
|
return {
|
3807
5693
|
...result,
|
3808
5694
|
summaries: result.summaries.map(
|
3809
|
-
(summary) => initObject(__privateGet$
|
5695
|
+
(summary) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), summary, data.columns ?? [])
|
3810
5696
|
)
|
3811
5697
|
};
|
3812
5698
|
});
|
@@ -3818,7 +5704,7 @@ class RestRepository extends Query {
|
|
3818
5704
|
workspace: "{workspaceId}",
|
3819
5705
|
dbBranchName: "{dbBranch}",
|
3820
5706
|
region: "{region}",
|
3821
|
-
tableName: __privateGet$
|
5707
|
+
tableName: __privateGet$3(this, _table),
|
3822
5708
|
sessionId: options?.sessionId
|
3823
5709
|
},
|
3824
5710
|
body: {
|
@@ -3828,7 +5714,7 @@ class RestRepository extends Query {
|
|
3828
5714
|
search: options?.searchType === "keyword" ? options?.search : void 0,
|
3829
5715
|
vectorSearch: options?.searchType === "vector" ? options?.vectorSearch : void 0
|
3830
5716
|
},
|
3831
|
-
...__privateGet$
|
5717
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3832
5718
|
};
|
3833
5719
|
if (options?.onMessage) {
|
3834
5720
|
fetchSSERequest({
|
@@ -3849,7 +5735,7 @@ _table = new WeakMap();
|
|
3849
5735
|
_getFetchProps = new WeakMap();
|
3850
5736
|
_db = new WeakMap();
|
3851
5737
|
_cache = new WeakMap();
|
3852
|
-
_schemaTables
|
5738
|
+
_schemaTables = new WeakMap();
|
3853
5739
|
_trace = new WeakMap();
|
3854
5740
|
_insertRecordWithoutId = new WeakSet();
|
3855
5741
|
insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
@@ -3859,14 +5745,14 @@ insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
|
3859
5745
|
workspace: "{workspaceId}",
|
3860
5746
|
dbBranchName: "{dbBranch}",
|
3861
5747
|
region: "{region}",
|
3862
|
-
tableName: __privateGet$
|
5748
|
+
tableName: __privateGet$3(this, _table)
|
3863
5749
|
},
|
3864
5750
|
queryParams: { columns },
|
3865
5751
|
body: record,
|
3866
|
-
...__privateGet$
|
5752
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3867
5753
|
});
|
3868
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3869
|
-
return initObject(__privateGet$
|
5754
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5755
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3870
5756
|
};
|
3871
5757
|
_insertRecordWithId = new WeakSet();
|
3872
5758
|
insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
|
@@ -3878,21 +5764,21 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
|
|
3878
5764
|
workspace: "{workspaceId}",
|
3879
5765
|
dbBranchName: "{dbBranch}",
|
3880
5766
|
region: "{region}",
|
3881
|
-
tableName: __privateGet$
|
5767
|
+
tableName: __privateGet$3(this, _table),
|
3882
5768
|
recordId
|
3883
5769
|
},
|
3884
5770
|
body: record,
|
3885
5771
|
queryParams: { createOnly, columns, ifVersion },
|
3886
|
-
...__privateGet$
|
5772
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3887
5773
|
});
|
3888
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3889
|
-
return initObject(__privateGet$
|
5774
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5775
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3890
5776
|
};
|
3891
5777
|
_insertRecords = new WeakSet();
|
3892
5778
|
insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
3893
5779
|
const operations = await promiseMap(objects, async (object) => {
|
3894
5780
|
const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
3895
|
-
return { insert: { table: __privateGet$
|
5781
|
+
return { insert: { table: __privateGet$3(this, _table), record, createOnly, ifVersion } };
|
3896
5782
|
});
|
3897
5783
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
3898
5784
|
const ids = [];
|
@@ -3904,7 +5790,7 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
|
3904
5790
|
region: "{region}"
|
3905
5791
|
},
|
3906
5792
|
body: { operations: operations2 },
|
3907
|
-
...__privateGet$
|
5793
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3908
5794
|
});
|
3909
5795
|
for (const result of results) {
|
3910
5796
|
if (result.operation === "insert") {
|
@@ -3927,15 +5813,15 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
3927
5813
|
workspace: "{workspaceId}",
|
3928
5814
|
dbBranchName: "{dbBranch}",
|
3929
5815
|
region: "{region}",
|
3930
|
-
tableName: __privateGet$
|
5816
|
+
tableName: __privateGet$3(this, _table),
|
3931
5817
|
recordId
|
3932
5818
|
},
|
3933
5819
|
queryParams: { columns, ifVersion },
|
3934
5820
|
body: record,
|
3935
|
-
...__privateGet$
|
5821
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3936
5822
|
});
|
3937
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3938
|
-
return initObject(__privateGet$
|
5823
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5824
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3939
5825
|
} catch (e) {
|
3940
5826
|
if (isObject(e) && e.status === 404) {
|
3941
5827
|
return null;
|
@@ -3947,7 +5833,7 @@ _updateRecords = new WeakSet();
|
|
3947
5833
|
updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
3948
5834
|
const operations = await promiseMap(objects, async ({ id, ...object }) => {
|
3949
5835
|
const fields = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
3950
|
-
return { update: { table: __privateGet$
|
5836
|
+
return { update: { table: __privateGet$3(this, _table), id, ifVersion, upsert, fields } };
|
3951
5837
|
});
|
3952
5838
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
3953
5839
|
const ids = [];
|
@@ -3959,7 +5845,7 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
|
3959
5845
|
region: "{region}"
|
3960
5846
|
},
|
3961
5847
|
body: { operations: operations2 },
|
3962
|
-
...__privateGet$
|
5848
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3963
5849
|
});
|
3964
5850
|
for (const result of results) {
|
3965
5851
|
if (result.operation === "update") {
|
@@ -3980,15 +5866,15 @@ upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
3980
5866
|
workspace: "{workspaceId}",
|
3981
5867
|
dbBranchName: "{dbBranch}",
|
3982
5868
|
region: "{region}",
|
3983
|
-
tableName: __privateGet$
|
5869
|
+
tableName: __privateGet$3(this, _table),
|
3984
5870
|
recordId
|
3985
5871
|
},
|
3986
5872
|
queryParams: { columns, ifVersion },
|
3987
5873
|
body: object,
|
3988
|
-
...__privateGet$
|
5874
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3989
5875
|
});
|
3990
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3991
|
-
return initObject(__privateGet$
|
5876
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5877
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3992
5878
|
};
|
3993
5879
|
_deleteRecord = new WeakSet();
|
3994
5880
|
deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
@@ -4000,14 +5886,14 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
4000
5886
|
workspace: "{workspaceId}",
|
4001
5887
|
dbBranchName: "{dbBranch}",
|
4002
5888
|
region: "{region}",
|
4003
|
-
tableName: __privateGet$
|
5889
|
+
tableName: __privateGet$3(this, _table),
|
4004
5890
|
recordId
|
4005
5891
|
},
|
4006
5892
|
queryParams: { columns },
|
4007
|
-
...__privateGet$
|
5893
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
4008
5894
|
});
|
4009
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
4010
|
-
return initObject(__privateGet$
|
5895
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5896
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
4011
5897
|
} catch (e) {
|
4012
5898
|
if (isObject(e) && e.status === 404) {
|
4013
5899
|
return null;
|
@@ -4018,7 +5904,7 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
4018
5904
|
_deleteRecords = new WeakSet();
|
4019
5905
|
deleteRecords_fn = async function(recordIds) {
|
4020
5906
|
const chunkedOperations = chunk(
|
4021
|
-
compact(recordIds).map((id) => ({ delete: { table: __privateGet$
|
5907
|
+
compact(recordIds).map((id) => ({ delete: { table: __privateGet$3(this, _table), id } })),
|
4022
5908
|
BULK_OPERATION_MAX_SIZE
|
4023
5909
|
);
|
4024
5910
|
for (const operations of chunkedOperations) {
|
@@ -4029,44 +5915,44 @@ deleteRecords_fn = async function(recordIds) {
|
|
4029
5915
|
region: "{region}"
|
4030
5916
|
},
|
4031
5917
|
body: { operations },
|
4032
|
-
...__privateGet$
|
5918
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
4033
5919
|
});
|
4034
5920
|
}
|
4035
5921
|
};
|
4036
5922
|
_setCacheQuery = new WeakSet();
|
4037
5923
|
setCacheQuery_fn = async function(query, meta, records) {
|
4038
|
-
await __privateGet$
|
5924
|
+
await __privateGet$3(this, _cache)?.set(`query_${__privateGet$3(this, _table)}:${query.key()}`, { date: /* @__PURE__ */ new Date(), meta, records });
|
4039
5925
|
};
|
4040
5926
|
_getCacheQuery = new WeakSet();
|
4041
5927
|
getCacheQuery_fn = async function(query) {
|
4042
|
-
const key = `query_${__privateGet$
|
4043
|
-
const result = await __privateGet$
|
5928
|
+
const key = `query_${__privateGet$3(this, _table)}:${query.key()}`;
|
5929
|
+
const result = await __privateGet$3(this, _cache)?.get(key);
|
4044
5930
|
if (!result)
|
4045
5931
|
return null;
|
4046
|
-
const defaultTTL = __privateGet$
|
5932
|
+
const defaultTTL = __privateGet$3(this, _cache)?.defaultQueryTTL ?? -1;
|
4047
5933
|
const { cache: ttl = defaultTTL } = query.getQueryOptions();
|
4048
5934
|
if (ttl < 0)
|
4049
5935
|
return null;
|
4050
5936
|
const hasExpired = result.date.getTime() + ttl < Date.now();
|
4051
5937
|
return hasExpired ? null : result;
|
4052
5938
|
};
|
4053
|
-
_getSchemaTables
|
4054
|
-
getSchemaTables_fn
|
4055
|
-
if (__privateGet$
|
4056
|
-
return __privateGet$
|
5939
|
+
_getSchemaTables = new WeakSet();
|
5940
|
+
getSchemaTables_fn = async function() {
|
5941
|
+
if (__privateGet$3(this, _schemaTables))
|
5942
|
+
return __privateGet$3(this, _schemaTables);
|
4057
5943
|
const { schema } = await getBranchDetails({
|
4058
5944
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
4059
|
-
...__privateGet$
|
5945
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
4060
5946
|
});
|
4061
|
-
__privateSet$
|
5947
|
+
__privateSet$2(this, _schemaTables, schema.tables);
|
4062
5948
|
return schema.tables;
|
4063
5949
|
};
|
4064
5950
|
_transformObjectToApi = new WeakSet();
|
4065
5951
|
transformObjectToApi_fn = async function(object) {
|
4066
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
4067
|
-
const schema = schemaTables.find((table) => table.name === __privateGet$
|
5952
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5953
|
+
const schema = schemaTables.find((table) => table.name === __privateGet$3(this, _table));
|
4068
5954
|
if (!schema)
|
4069
|
-
throw new Error(`Table ${__privateGet$
|
5955
|
+
throw new Error(`Table ${__privateGet$3(this, _table)} not found in schema`);
|
4070
5956
|
const result = {};
|
4071
5957
|
for (const [key, value] of Object.entries(object)) {
|
4072
5958
|
if (key === "xata")
|
@@ -4222,7 +6108,7 @@ var __accessCheck$3 = (obj, member, msg) => {
|
|
4222
6108
|
if (!member.has(obj))
|
4223
6109
|
throw TypeError("Cannot " + msg);
|
4224
6110
|
};
|
4225
|
-
var __privateGet$
|
6111
|
+
var __privateGet$2 = (obj, member, getter) => {
|
4226
6112
|
__accessCheck$3(obj, member, "read from private field");
|
4227
6113
|
return getter ? getter.call(obj) : member.get(obj);
|
4228
6114
|
};
|
@@ -4231,7 +6117,7 @@ var __privateAdd$3 = (obj, member, value) => {
|
|
4231
6117
|
throw TypeError("Cannot add the same private member more than once");
|
4232
6118
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4233
6119
|
};
|
4234
|
-
var __privateSet$
|
6120
|
+
var __privateSet$1 = (obj, member, value, setter) => {
|
4235
6121
|
__accessCheck$3(obj, member, "write to private field");
|
4236
6122
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
4237
6123
|
return value;
|
@@ -4240,29 +6126,29 @@ var _map;
|
|
4240
6126
|
class SimpleCache {
|
4241
6127
|
constructor(options = {}) {
|
4242
6128
|
__privateAdd$3(this, _map, void 0);
|
4243
|
-
__privateSet$
|
6129
|
+
__privateSet$1(this, _map, /* @__PURE__ */ new Map());
|
4244
6130
|
this.capacity = options.max ?? 500;
|
4245
6131
|
this.defaultQueryTTL = options.defaultQueryTTL ?? 60 * 1e3;
|
4246
6132
|
}
|
4247
6133
|
async getAll() {
|
4248
|
-
return Object.fromEntries(__privateGet$
|
6134
|
+
return Object.fromEntries(__privateGet$2(this, _map));
|
4249
6135
|
}
|
4250
6136
|
async get(key) {
|
4251
|
-
return __privateGet$
|
6137
|
+
return __privateGet$2(this, _map).get(key) ?? null;
|
4252
6138
|
}
|
4253
6139
|
async set(key, value) {
|
4254
6140
|
await this.delete(key);
|
4255
|
-
__privateGet$
|
4256
|
-
if (__privateGet$
|
4257
|
-
const leastRecentlyUsed = __privateGet$
|
6141
|
+
__privateGet$2(this, _map).set(key, value);
|
6142
|
+
if (__privateGet$2(this, _map).size > this.capacity) {
|
6143
|
+
const leastRecentlyUsed = __privateGet$2(this, _map).keys().next().value;
|
4258
6144
|
await this.delete(leastRecentlyUsed);
|
4259
6145
|
}
|
4260
6146
|
}
|
4261
6147
|
async delete(key) {
|
4262
|
-
__privateGet$
|
6148
|
+
__privateGet$2(this, _map).delete(key);
|
4263
6149
|
}
|
4264
6150
|
async clear() {
|
4265
|
-
return __privateGet$
|
6151
|
+
return __privateGet$2(this, _map).clear();
|
4266
6152
|
}
|
4267
6153
|
}
|
4268
6154
|
_map = new WeakMap();
|
@@ -4299,7 +6185,7 @@ var __accessCheck$2 = (obj, member, msg) => {
|
|
4299
6185
|
if (!member.has(obj))
|
4300
6186
|
throw TypeError("Cannot " + msg);
|
4301
6187
|
};
|
4302
|
-
var __privateGet$
|
6188
|
+
var __privateGet$1 = (obj, member, getter) => {
|
4303
6189
|
__accessCheck$2(obj, member, "read from private field");
|
4304
6190
|
return getter ? getter.call(obj) : member.get(obj);
|
4305
6191
|
};
|
@@ -4308,18 +6194,11 @@ var __privateAdd$2 = (obj, member, value) => {
|
|
4308
6194
|
throw TypeError("Cannot add the same private member more than once");
|
4309
6195
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4310
6196
|
};
|
4311
|
-
var
|
4312
|
-
__accessCheck$2(obj, member, "write to private field");
|
4313
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
4314
|
-
return value;
|
4315
|
-
};
|
4316
|
-
var _tables, _schemaTables$1;
|
6197
|
+
var _tables;
|
4317
6198
|
class SchemaPlugin extends XataPlugin {
|
4318
|
-
constructor(
|
6199
|
+
constructor() {
|
4319
6200
|
super();
|
4320
6201
|
__privateAdd$2(this, _tables, {});
|
4321
|
-
__privateAdd$2(this, _schemaTables$1, void 0);
|
4322
|
-
__privateSet$2(this, _schemaTables$1, schemaTables);
|
4323
6202
|
}
|
4324
6203
|
build(pluginOptions) {
|
4325
6204
|
const db = new Proxy(
|
@@ -4328,22 +6207,21 @@ class SchemaPlugin extends XataPlugin {
|
|
4328
6207
|
get: (_target, table) => {
|
4329
6208
|
if (!isString(table))
|
4330
6209
|
throw new Error("Invalid table name");
|
4331
|
-
if (__privateGet$
|
4332
|
-
__privateGet$
|
6210
|
+
if (__privateGet$1(this, _tables)[table] === void 0) {
|
6211
|
+
__privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
4333
6212
|
}
|
4334
|
-
return __privateGet$
|
6213
|
+
return __privateGet$1(this, _tables)[table];
|
4335
6214
|
}
|
4336
6215
|
}
|
4337
6216
|
);
|
4338
|
-
const tableNames =
|
6217
|
+
const tableNames = pluginOptions.tables?.map(({ name }) => name) ?? [];
|
4339
6218
|
for (const table of tableNames) {
|
4340
|
-
db[table] = new RestRepository({ db, pluginOptions, table, schemaTables:
|
6219
|
+
db[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
4341
6220
|
}
|
4342
6221
|
return db;
|
4343
6222
|
}
|
4344
6223
|
}
|
4345
6224
|
_tables = new WeakMap();
|
4346
|
-
_schemaTables$1 = new WeakMap();
|
4347
6225
|
|
4348
6226
|
class FilesPlugin extends XataPlugin {
|
4349
6227
|
build(pluginOptions) {
|
@@ -4423,54 +6301,40 @@ var __accessCheck$1 = (obj, member, msg) => {
|
|
4423
6301
|
if (!member.has(obj))
|
4424
6302
|
throw TypeError("Cannot " + msg);
|
4425
6303
|
};
|
4426
|
-
var __privateGet$1 = (obj, member, getter) => {
|
4427
|
-
__accessCheck$1(obj, member, "read from private field");
|
4428
|
-
return getter ? getter.call(obj) : member.get(obj);
|
4429
|
-
};
|
4430
6304
|
var __privateAdd$1 = (obj, member, value) => {
|
4431
6305
|
if (member.has(obj))
|
4432
6306
|
throw TypeError("Cannot add the same private member more than once");
|
4433
6307
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4434
6308
|
};
|
4435
|
-
var __privateSet$1 = (obj, member, value, setter) => {
|
4436
|
-
__accessCheck$1(obj, member, "write to private field");
|
4437
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
4438
|
-
return value;
|
4439
|
-
};
|
4440
6309
|
var __privateMethod$1 = (obj, member, method) => {
|
4441
6310
|
__accessCheck$1(obj, member, "access private method");
|
4442
6311
|
return method;
|
4443
6312
|
};
|
4444
|
-
var
|
6313
|
+
var _search, search_fn;
|
4445
6314
|
class SearchPlugin extends XataPlugin {
|
4446
|
-
constructor(db
|
6315
|
+
constructor(db) {
|
4447
6316
|
super();
|
4448
6317
|
this.db = db;
|
4449
6318
|
__privateAdd$1(this, _search);
|
4450
|
-
__privateAdd$1(this, _getSchemaTables);
|
4451
|
-
__privateAdd$1(this, _schemaTables, void 0);
|
4452
|
-
__privateSet$1(this, _schemaTables, schemaTables);
|
4453
6319
|
}
|
4454
6320
|
build(pluginOptions) {
|
4455
6321
|
return {
|
4456
6322
|
all: async (query, options = {}) => {
|
4457
6323
|
const { records, totalCount } = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
|
4458
|
-
const schemaTables = await __privateMethod$1(this, _getSchemaTables, getSchemaTables_fn).call(this, pluginOptions);
|
4459
6324
|
return {
|
4460
6325
|
totalCount,
|
4461
6326
|
records: records.map((record) => {
|
4462
6327
|
const { table = "orphan" } = record.xata;
|
4463
|
-
return { table, record: initObject(this.db,
|
6328
|
+
return { table, record: initObject(this.db, pluginOptions.tables, table, record, ["*"]) };
|
4464
6329
|
})
|
4465
6330
|
};
|
4466
6331
|
},
|
4467
6332
|
byTable: async (query, options = {}) => {
|
4468
6333
|
const { records: rawRecords, totalCount } = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
|
4469
|
-
const schemaTables = await __privateMethod$1(this, _getSchemaTables, getSchemaTables_fn).call(this, pluginOptions);
|
4470
6334
|
const records = rawRecords.reduce((acc, record) => {
|
4471
6335
|
const { table = "orphan" } = record.xata;
|
4472
6336
|
const items = acc[table] ?? [];
|
4473
|
-
const item = initObject(this.db,
|
6337
|
+
const item = initObject(this.db, pluginOptions.tables, table, record, ["*"]);
|
4474
6338
|
return { ...acc, [table]: [...items, item] };
|
4475
6339
|
}, {});
|
4476
6340
|
return { totalCount, records };
|
@@ -4478,29 +6342,17 @@ class SearchPlugin extends XataPlugin {
|
|
4478
6342
|
};
|
4479
6343
|
}
|
4480
6344
|
}
|
4481
|
-
_schemaTables = new WeakMap();
|
4482
6345
|
_search = new WeakSet();
|
4483
6346
|
search_fn = async function(query, options, pluginOptions) {
|
4484
6347
|
const { tables, fuzziness, highlight, prefix, page } = options ?? {};
|
4485
6348
|
const { records, totalCount } = await searchBranch({
|
4486
6349
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
4487
|
-
// @ts-
|
6350
|
+
// @ts-expect-error Filter properties do not match inferred type
|
4488
6351
|
body: { tables, query, fuzziness, prefix, highlight, page },
|
4489
6352
|
...pluginOptions
|
4490
6353
|
});
|
4491
6354
|
return { records, totalCount };
|
4492
6355
|
};
|
4493
|
-
_getSchemaTables = new WeakSet();
|
4494
|
-
getSchemaTables_fn = async function(pluginOptions) {
|
4495
|
-
if (__privateGet$1(this, _schemaTables))
|
4496
|
-
return __privateGet$1(this, _schemaTables);
|
4497
|
-
const { schema } = await getBranchDetails({
|
4498
|
-
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
4499
|
-
...pluginOptions
|
4500
|
-
});
|
4501
|
-
__privateSet$1(this, _schemaTables, schema.tables);
|
4502
|
-
return schema.tables;
|
4503
|
-
};
|
4504
6356
|
|
4505
6357
|
function escapeElement(elementRepresentation) {
|
4506
6358
|
const escaped = elementRepresentation.replace(/\\/g, "\\\\").replace(/"/g, '\\"');
|
@@ -4554,25 +6406,68 @@ function prepareParams(param1, param2) {
|
|
4554
6406
|
return { statement, params: param2?.map((value) => prepareValue(value)) };
|
4555
6407
|
}
|
4556
6408
|
if (isObject(param1)) {
|
4557
|
-
const { statement, params, consistency } = param1;
|
4558
|
-
return { statement, params: params?.map((value) => prepareValue(value)), consistency };
|
6409
|
+
const { statement, params, consistency, responseType } = param1;
|
6410
|
+
return { statement, params: params?.map((value) => prepareValue(value)), consistency, responseType };
|
4559
6411
|
}
|
4560
6412
|
throw new Error("Invalid query");
|
4561
6413
|
}
|
4562
6414
|
|
4563
6415
|
class SQLPlugin extends XataPlugin {
|
4564
6416
|
build(pluginOptions) {
|
4565
|
-
|
4566
|
-
|
4567
|
-
|
6417
|
+
const sqlFunction = async (query, ...parameters) => {
|
6418
|
+
if (!isParamsObject(query) && (!isTemplateStringsArray(query) || !Array.isArray(parameters))) {
|
6419
|
+
throw new Error("Invalid usage of `xata.sql`. Please use it as a tagged template or with an object.");
|
6420
|
+
}
|
6421
|
+
const { statement, params, consistency, responseType } = prepareParams(query, parameters);
|
6422
|
+
const {
|
6423
|
+
records,
|
6424
|
+
rows,
|
6425
|
+
warning,
|
6426
|
+
columns = []
|
6427
|
+
} = await sqlQuery({
|
4568
6428
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
4569
|
-
body: { statement, params, consistency },
|
6429
|
+
body: { statement, params, consistency, responseType },
|
4570
6430
|
...pluginOptions
|
4571
6431
|
});
|
4572
|
-
return { records, warning };
|
6432
|
+
return { records, rows, warning, columns };
|
4573
6433
|
};
|
6434
|
+
sqlFunction.connectionString = buildConnectionString(pluginOptions);
|
6435
|
+
return sqlFunction;
|
6436
|
+
}
|
6437
|
+
}
|
6438
|
+
function isTemplateStringsArray(strings) {
|
6439
|
+
return Array.isArray(strings) && "raw" in strings && Array.isArray(strings.raw);
|
6440
|
+
}
|
6441
|
+
function isParamsObject(params) {
|
6442
|
+
return isObject(params) && "statement" in params;
|
6443
|
+
}
|
6444
|
+
function buildDomain(host, region) {
|
6445
|
+
switch (host) {
|
6446
|
+
case "production":
|
6447
|
+
return `${region}.sql.xata.sh`;
|
6448
|
+
case "staging":
|
6449
|
+
return `${region}.sql.staging-xata.dev`;
|
6450
|
+
case "dev":
|
6451
|
+
return `${region}.sql.dev-xata.dev`;
|
6452
|
+
case "local":
|
6453
|
+
return "localhost:7654";
|
6454
|
+
default:
|
6455
|
+
throw new Error("Invalid host provider");
|
4574
6456
|
}
|
4575
6457
|
}
|
6458
|
+
function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
|
6459
|
+
const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
|
6460
|
+
const parts = parseWorkspacesUrlParts(url);
|
6461
|
+
if (!parts)
|
6462
|
+
throw new Error("Invalid workspaces URL");
|
6463
|
+
const { workspace: workspaceSlug, region, database, host } = parts;
|
6464
|
+
const domain = buildDomain(host, region);
|
6465
|
+
const workspace = workspaceSlug.split("-").pop();
|
6466
|
+
if (!workspace || !region || !database || !apiKey || !branch) {
|
6467
|
+
throw new Error("Unable to build xata connection string");
|
6468
|
+
}
|
6469
|
+
return `postgresql://${workspace}:${apiKey}@${domain}/${database}:${branch}?sslmode=require`;
|
6470
|
+
}
|
4576
6471
|
|
4577
6472
|
class TransactionPlugin extends XataPlugin {
|
4578
6473
|
build(pluginOptions) {
|
@@ -4614,7 +6509,7 @@ var __privateMethod = (obj, member, method) => {
|
|
4614
6509
|
const buildClient = (plugins) => {
|
4615
6510
|
var _options, _parseOptions, parseOptions_fn, _getFetchProps, getFetchProps_fn, _a;
|
4616
6511
|
return _a = class {
|
4617
|
-
constructor(options = {},
|
6512
|
+
constructor(options = {}, tables) {
|
4618
6513
|
__privateAdd(this, _parseOptions);
|
4619
6514
|
__privateAdd(this, _getFetchProps);
|
4620
6515
|
__privateAdd(this, _options, void 0);
|
@@ -4623,13 +6518,16 @@ const buildClient = (plugins) => {
|
|
4623
6518
|
const pluginOptions = {
|
4624
6519
|
...__privateMethod(this, _getFetchProps, getFetchProps_fn).call(this, safeOptions),
|
4625
6520
|
cache: safeOptions.cache,
|
4626
|
-
host: safeOptions.host
|
6521
|
+
host: safeOptions.host,
|
6522
|
+
tables,
|
6523
|
+
branch: safeOptions.branch
|
4627
6524
|
};
|
4628
|
-
const db = new SchemaPlugin(
|
4629
|
-
const search = new SearchPlugin(db
|
6525
|
+
const db = new SchemaPlugin().build(pluginOptions);
|
6526
|
+
const search = new SearchPlugin(db).build(pluginOptions);
|
4630
6527
|
const transactions = new TransactionPlugin().build(pluginOptions);
|
4631
6528
|
const sql = new SQLPlugin().build(pluginOptions);
|
4632
6529
|
const files = new FilesPlugin().build(pluginOptions);
|
6530
|
+
this.schema = { tables };
|
4633
6531
|
this.db = db;
|
4634
6532
|
this.search = search;
|
4635
6533
|
this.transactions = transactions;
|
@@ -4651,7 +6549,7 @@ const buildClient = (plugins) => {
|
|
4651
6549
|
const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
|
4652
6550
|
if (isBrowser && !enableBrowser) {
|
4653
6551
|
throw new Error(
|
4654
|
-
"You are trying to use Xata from the browser, which is potentially a non-secure environment.
|
6552
|
+
"You are trying to use Xata from the browser, which is potentially a non-secure environment. How to fix: https://xata.io/docs/messages/api-key-browser-error"
|
4655
6553
|
);
|
4656
6554
|
}
|
4657
6555
|
const fetch = getFetchImplementation(options?.fetch);
|
@@ -4805,5 +6703,5 @@ class XataError extends Error {
|
|
4805
6703
|
}
|
4806
6704
|
}
|
4807
6705
|
|
4808
|
-
export { BaseClient, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, SimpleCache, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge, getAPIKey, getAuthorizationCode, getBranch, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationPlan, getBranchSchemaHistory, getBranchStats, getCluster, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseURL, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationRequest, getMigrationRequestIsMerged, getPreviewBranch, getRecord, getSchema, getTableColumns, getTableSchema, getUser, getUserAPIKeys, getUserOAuthAccessTokens, getUserOAuthClients, getWorkspace, getWorkspaceMembersList, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, iContains, iPattern, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isValidExpandedColumn, isValidSelectableColumns, isXataRecord, le, lessEquals, lessThan, lessThanEquals, listClusters, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern,
|
6706
|
+
export { BaseClient, Buffer, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, SimpleCache, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptAllTables, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge, getAPIKey, getAuthorizationCode, getBranch, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationJobStatus, getBranchMigrationPlan, getBranchSchemaHistory, getBranchStats, getCluster, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseSettings, getDatabaseURL, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationHistory, getMigrationJobStatus, getMigrationRequest, getMigrationRequestIsMerged, getPreviewBranch, getRecord, getSchema, getTableColumns, getTableSchema, getUser, getUserAPIKeys, getUserOAuthAccessTokens, getUserOAuthClients, getWorkspace, getWorkspaceMembersList, getWorkspaceSettings, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, iContains, iPattern, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isValidExpandedColumn, isValidSelectableColumns, isXataRecord, le, lessEquals, lessThan, lessThanEquals, listClusters, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern, previewBranchSchemaEdit, pushBranchMigrations, putFile, putFileItem, queryMigrationRequests, queryTable, removeGitBranchesEntry, removeWorkspaceMember, renameDatabase, resendWorkspaceMemberInvite, resolveBranch, searchBranch, searchTable, serialize, setTableSchema, sqlQuery, startsWith, summarizeTable, transformImage, updateBranchMetadata, updateBranchSchema, updateCluster, updateColumn, updateDatabaseGithubSettings, updateDatabaseMetadata, updateDatabaseSettings, updateMigrationRequest, updateOAuthAccessToken, updateRecordWithID, updateTable, updateUser, updateWorkspace, updateWorkspaceMemberInvite, updateWorkspaceMemberRole, updateWorkspaceSettings, upsertRecordWithID, vectorSearchTable };
|
4809
6707
|
//# sourceMappingURL=index.mjs.map
|