@xata.io/client 0.0.0-alpha.vf7a5219a6da9afdecee2e8995fcc249036ff88a1 → 0.0.0-alpha.vf7b3447057053443041e94106d7efe270aaea321
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-add-version.log +1 -1
- package/.turbo/turbo-build.log +4 -4
- package/CHANGELOG.md +50 -4
- package/dist/index.cjs +2214 -308
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +1144 -97
- package/dist/index.mjs +2204 -306
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
@@ -24,6 +24,1814 @@ const TraceAttributes = {
|
|
24
24
|
CLOUDFLARE_RAY_ID: "cf.ray"
|
25
25
|
};
|
26
26
|
|
27
|
+
const lookup = [];
|
28
|
+
const revLookup = [];
|
29
|
+
const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
30
|
+
for (let i = 0, len = code.length; i < len; ++i) {
|
31
|
+
lookup[i] = code[i];
|
32
|
+
revLookup[code.charCodeAt(i)] = i;
|
33
|
+
}
|
34
|
+
revLookup["-".charCodeAt(0)] = 62;
|
35
|
+
revLookup["_".charCodeAt(0)] = 63;
|
36
|
+
function getLens(b64) {
|
37
|
+
const len = b64.length;
|
38
|
+
if (len % 4 > 0) {
|
39
|
+
throw new Error("Invalid string. Length must be a multiple of 4");
|
40
|
+
}
|
41
|
+
let validLen = b64.indexOf("=");
|
42
|
+
if (validLen === -1)
|
43
|
+
validLen = len;
|
44
|
+
const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
|
45
|
+
return [validLen, placeHoldersLen];
|
46
|
+
}
|
47
|
+
function _byteLength(_b64, validLen, placeHoldersLen) {
|
48
|
+
return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
|
49
|
+
}
|
50
|
+
function toByteArray(b64) {
|
51
|
+
let tmp;
|
52
|
+
const lens = getLens(b64);
|
53
|
+
const validLen = lens[0];
|
54
|
+
const placeHoldersLen = lens[1];
|
55
|
+
const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
|
56
|
+
let curByte = 0;
|
57
|
+
const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
|
58
|
+
let i;
|
59
|
+
for (i = 0; i < len; i += 4) {
|
60
|
+
tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
|
61
|
+
arr[curByte++] = tmp >> 16 & 255;
|
62
|
+
arr[curByte++] = tmp >> 8 & 255;
|
63
|
+
arr[curByte++] = tmp & 255;
|
64
|
+
}
|
65
|
+
if (placeHoldersLen === 2) {
|
66
|
+
tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
|
67
|
+
arr[curByte++] = tmp & 255;
|
68
|
+
}
|
69
|
+
if (placeHoldersLen === 1) {
|
70
|
+
tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
|
71
|
+
arr[curByte++] = tmp >> 8 & 255;
|
72
|
+
arr[curByte++] = tmp & 255;
|
73
|
+
}
|
74
|
+
return arr;
|
75
|
+
}
|
76
|
+
function tripletToBase64(num) {
|
77
|
+
return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
|
78
|
+
}
|
79
|
+
function encodeChunk(uint8, start, end) {
|
80
|
+
let tmp;
|
81
|
+
const output = [];
|
82
|
+
for (let i = start; i < end; i += 3) {
|
83
|
+
tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
|
84
|
+
output.push(tripletToBase64(tmp));
|
85
|
+
}
|
86
|
+
return output.join("");
|
87
|
+
}
|
88
|
+
function fromByteArray(uint8) {
|
89
|
+
let tmp;
|
90
|
+
const len = uint8.length;
|
91
|
+
const extraBytes = len % 3;
|
92
|
+
const parts = [];
|
93
|
+
const maxChunkLength = 16383;
|
94
|
+
for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
95
|
+
parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
|
96
|
+
}
|
97
|
+
if (extraBytes === 1) {
|
98
|
+
tmp = uint8[len - 1];
|
99
|
+
parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
|
100
|
+
} else if (extraBytes === 2) {
|
101
|
+
tmp = (uint8[len - 2] << 8) + uint8[len - 1];
|
102
|
+
parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
|
103
|
+
}
|
104
|
+
return parts.join("");
|
105
|
+
}
|
106
|
+
|
107
|
+
const K_MAX_LENGTH = 2147483647;
|
108
|
+
const MAX_ARGUMENTS_LENGTH = 4096;
|
109
|
+
class Buffer extends Uint8Array {
|
110
|
+
/**
|
111
|
+
* Constructs a new `Buffer` instance.
|
112
|
+
*
|
113
|
+
* @param value
|
114
|
+
* @param encodingOrOffset
|
115
|
+
* @param length
|
116
|
+
*/
|
117
|
+
constructor(value, encodingOrOffset, length) {
|
118
|
+
if (typeof value === "number") {
|
119
|
+
if (typeof encodingOrOffset === "string") {
|
120
|
+
throw new TypeError("The first argument must be of type string, received type number");
|
121
|
+
}
|
122
|
+
if (value < 0) {
|
123
|
+
throw new RangeError("The buffer size cannot be negative");
|
124
|
+
}
|
125
|
+
super(value < 0 ? 0 : Buffer._checked(value) | 0);
|
126
|
+
} else if (typeof value === "string") {
|
127
|
+
if (typeof encodingOrOffset !== "string") {
|
128
|
+
encodingOrOffset = "utf8";
|
129
|
+
}
|
130
|
+
if (!Buffer.isEncoding(encodingOrOffset)) {
|
131
|
+
throw new TypeError("Unknown encoding: " + encodingOrOffset);
|
132
|
+
}
|
133
|
+
const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
|
134
|
+
super(length2);
|
135
|
+
const written = this.write(value, 0, this.length, encodingOrOffset);
|
136
|
+
if (written !== length2) {
|
137
|
+
throw new TypeError(
|
138
|
+
"Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
|
139
|
+
);
|
140
|
+
}
|
141
|
+
} else if (ArrayBuffer.isView(value)) {
|
142
|
+
if (Buffer._isInstance(value, Uint8Array)) {
|
143
|
+
const copy = new Uint8Array(value);
|
144
|
+
const array = copy.buffer;
|
145
|
+
const byteOffset = copy.byteOffset;
|
146
|
+
const length2 = copy.byteLength;
|
147
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
148
|
+
throw new RangeError("offset is outside of buffer bounds");
|
149
|
+
}
|
150
|
+
if (array.byteLength < byteOffset + (length2 || 0)) {
|
151
|
+
throw new RangeError("length is outside of buffer bounds");
|
152
|
+
}
|
153
|
+
super(new Uint8Array(array, byteOffset, length2));
|
154
|
+
} else {
|
155
|
+
const array = value;
|
156
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
157
|
+
super(new Uint8Array(length2));
|
158
|
+
for (let i = 0; i < length2; i++) {
|
159
|
+
this[i] = array[i] & 255;
|
160
|
+
}
|
161
|
+
}
|
162
|
+
} else if (value == null) {
|
163
|
+
throw new TypeError(
|
164
|
+
"The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
|
165
|
+
);
|
166
|
+
} else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
|
167
|
+
const array = value;
|
168
|
+
const byteOffset = encodingOrOffset;
|
169
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
170
|
+
throw new RangeError("offset is outside of buffer bounds");
|
171
|
+
}
|
172
|
+
if (array.byteLength < byteOffset + (length || 0)) {
|
173
|
+
throw new RangeError("length is outside of buffer bounds");
|
174
|
+
}
|
175
|
+
super(new Uint8Array(array, byteOffset, length));
|
176
|
+
} else if (Array.isArray(value)) {
|
177
|
+
const array = value;
|
178
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
179
|
+
super(new Uint8Array(length2));
|
180
|
+
for (let i = 0; i < length2; i++) {
|
181
|
+
this[i] = array[i] & 255;
|
182
|
+
}
|
183
|
+
} else {
|
184
|
+
throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
|
185
|
+
}
|
186
|
+
}
|
187
|
+
/**
|
188
|
+
* Return JSON representation of the buffer.
|
189
|
+
*/
|
190
|
+
toJSON() {
|
191
|
+
return {
|
192
|
+
type: "Buffer",
|
193
|
+
data: Array.prototype.slice.call(this)
|
194
|
+
};
|
195
|
+
}
|
196
|
+
/**
|
197
|
+
* Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
|
198
|
+
* parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
|
199
|
+
* only part of `string` will be written. However, partially encoded characters will not be written.
|
200
|
+
*
|
201
|
+
* @param string String to write to `buf`.
|
202
|
+
* @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
|
203
|
+
* @param length Maximum number of bytes to write: Default: `buf.length - offset`.
|
204
|
+
* @param encoding The character encoding of `string`. Default: `utf8`.
|
205
|
+
*/
|
206
|
+
write(string, offset, length, encoding) {
|
207
|
+
if (typeof offset === "undefined") {
|
208
|
+
encoding = "utf8";
|
209
|
+
length = this.length;
|
210
|
+
offset = 0;
|
211
|
+
} else if (typeof length === "undefined" && typeof offset === "string") {
|
212
|
+
encoding = offset;
|
213
|
+
length = this.length;
|
214
|
+
offset = 0;
|
215
|
+
} else if (typeof offset === "number" && isFinite(offset)) {
|
216
|
+
offset = offset >>> 0;
|
217
|
+
if (typeof length === "number" && isFinite(length)) {
|
218
|
+
length = length >>> 0;
|
219
|
+
encoding ?? (encoding = "utf8");
|
220
|
+
} else if (typeof length === "string") {
|
221
|
+
encoding = length;
|
222
|
+
length = void 0;
|
223
|
+
}
|
224
|
+
} else {
|
225
|
+
throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
|
226
|
+
}
|
227
|
+
const remaining = this.length - offset;
|
228
|
+
if (typeof length === "undefined" || length > remaining) {
|
229
|
+
length = remaining;
|
230
|
+
}
|
231
|
+
if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
|
232
|
+
throw new RangeError("Attempt to write outside buffer bounds");
|
233
|
+
}
|
234
|
+
encoding || (encoding = "utf8");
|
235
|
+
switch (Buffer._getEncoding(encoding)) {
|
236
|
+
case "hex":
|
237
|
+
return Buffer._hexWrite(this, string, offset, length);
|
238
|
+
case "utf8":
|
239
|
+
return Buffer._utf8Write(this, string, offset, length);
|
240
|
+
case "ascii":
|
241
|
+
case "latin1":
|
242
|
+
case "binary":
|
243
|
+
return Buffer._asciiWrite(this, string, offset, length);
|
244
|
+
case "ucs2":
|
245
|
+
case "utf16le":
|
246
|
+
return Buffer._ucs2Write(this, string, offset, length);
|
247
|
+
case "base64":
|
248
|
+
return Buffer._base64Write(this, string, offset, length);
|
249
|
+
}
|
250
|
+
}
|
251
|
+
/**
|
252
|
+
* Decodes the buffer to a string according to the specified character encoding.
|
253
|
+
* Passing `start` and `end` will decode only a subset of the buffer.
|
254
|
+
*
|
255
|
+
* Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
|
256
|
+
* will be replaced with `U+FFFD`.
|
257
|
+
*
|
258
|
+
* @param encoding
|
259
|
+
* @param start
|
260
|
+
* @param end
|
261
|
+
*/
|
262
|
+
toString(encoding, start, end) {
|
263
|
+
const length = this.length;
|
264
|
+
if (length === 0) {
|
265
|
+
return "";
|
266
|
+
}
|
267
|
+
if (arguments.length === 0) {
|
268
|
+
return Buffer._utf8Slice(this, 0, length);
|
269
|
+
}
|
270
|
+
if (typeof start === "undefined" || start < 0) {
|
271
|
+
start = 0;
|
272
|
+
}
|
273
|
+
if (start > this.length) {
|
274
|
+
return "";
|
275
|
+
}
|
276
|
+
if (typeof end === "undefined" || end > this.length) {
|
277
|
+
end = this.length;
|
278
|
+
}
|
279
|
+
if (end <= 0) {
|
280
|
+
return "";
|
281
|
+
}
|
282
|
+
end >>>= 0;
|
283
|
+
start >>>= 0;
|
284
|
+
if (end <= start) {
|
285
|
+
return "";
|
286
|
+
}
|
287
|
+
if (!encoding) {
|
288
|
+
encoding = "utf8";
|
289
|
+
}
|
290
|
+
switch (Buffer._getEncoding(encoding)) {
|
291
|
+
case "hex":
|
292
|
+
return Buffer._hexSlice(this, start, end);
|
293
|
+
case "utf8":
|
294
|
+
return Buffer._utf8Slice(this, start, end);
|
295
|
+
case "ascii":
|
296
|
+
return Buffer._asciiSlice(this, start, end);
|
297
|
+
case "latin1":
|
298
|
+
case "binary":
|
299
|
+
return Buffer._latin1Slice(this, start, end);
|
300
|
+
case "ucs2":
|
301
|
+
case "utf16le":
|
302
|
+
return Buffer._utf16leSlice(this, start, end);
|
303
|
+
case "base64":
|
304
|
+
return Buffer._base64Slice(this, start, end);
|
305
|
+
}
|
306
|
+
}
|
307
|
+
/**
|
308
|
+
* Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
|
309
|
+
*
|
310
|
+
* @param otherBuffer
|
311
|
+
*/
|
312
|
+
equals(otherBuffer) {
|
313
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
314
|
+
throw new TypeError("Argument must be a Buffer");
|
315
|
+
}
|
316
|
+
if (this === otherBuffer) {
|
317
|
+
return true;
|
318
|
+
}
|
319
|
+
return Buffer.compare(this, otherBuffer) === 0;
|
320
|
+
}
|
321
|
+
/**
|
322
|
+
* Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
|
323
|
+
* or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
|
324
|
+
* buffer.
|
325
|
+
*
|
326
|
+
* - `0` is returned if `otherBuffer` is the same as this buffer.
|
327
|
+
* - `1` is returned if `otherBuffer` should come before this buffer when sorted.
|
328
|
+
* - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
|
329
|
+
*
|
330
|
+
* @param otherBuffer The buffer to compare to.
|
331
|
+
* @param targetStart The offset within `otherBuffer` at which to begin comparison.
|
332
|
+
* @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
|
333
|
+
* @param sourceStart The offset within this buffer at which to begin comparison.
|
334
|
+
* @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
|
335
|
+
*/
|
336
|
+
compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
|
337
|
+
if (Buffer._isInstance(otherBuffer, Uint8Array)) {
|
338
|
+
otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
|
339
|
+
}
|
340
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
341
|
+
throw new TypeError("Argument must be a Buffer or Uint8Array");
|
342
|
+
}
|
343
|
+
targetStart ?? (targetStart = 0);
|
344
|
+
targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
|
345
|
+
sourceStart ?? (sourceStart = 0);
|
346
|
+
sourceEnd ?? (sourceEnd = this.length);
|
347
|
+
if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
|
348
|
+
throw new RangeError("Out of range index");
|
349
|
+
}
|
350
|
+
if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
|
351
|
+
return 0;
|
352
|
+
}
|
353
|
+
if (sourceStart >= sourceEnd) {
|
354
|
+
return -1;
|
355
|
+
}
|
356
|
+
if (targetStart >= targetEnd) {
|
357
|
+
return 1;
|
358
|
+
}
|
359
|
+
targetStart >>>= 0;
|
360
|
+
targetEnd >>>= 0;
|
361
|
+
sourceStart >>>= 0;
|
362
|
+
sourceEnd >>>= 0;
|
363
|
+
if (this === otherBuffer) {
|
364
|
+
return 0;
|
365
|
+
}
|
366
|
+
let x = sourceEnd - sourceStart;
|
367
|
+
let y = targetEnd - targetStart;
|
368
|
+
const len = Math.min(x, y);
|
369
|
+
const thisCopy = this.slice(sourceStart, sourceEnd);
|
370
|
+
const targetCopy = otherBuffer.slice(targetStart, targetEnd);
|
371
|
+
for (let i = 0; i < len; ++i) {
|
372
|
+
if (thisCopy[i] !== targetCopy[i]) {
|
373
|
+
x = thisCopy[i];
|
374
|
+
y = targetCopy[i];
|
375
|
+
break;
|
376
|
+
}
|
377
|
+
}
|
378
|
+
if (x < y)
|
379
|
+
return -1;
|
380
|
+
if (y < x)
|
381
|
+
return 1;
|
382
|
+
return 0;
|
383
|
+
}
|
384
|
+
/**
|
385
|
+
* Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
|
386
|
+
* region overlaps with this buffer.
|
387
|
+
*
|
388
|
+
* @param targetBuffer The target buffer to copy into.
|
389
|
+
* @param targetStart The offset within `targetBuffer` at which to begin writing.
|
390
|
+
* @param sourceStart The offset within this buffer at which to begin copying.
|
391
|
+
* @param sourceEnd The offset within this buffer at which to end copying (exclusive).
|
392
|
+
*/
|
393
|
+
copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
|
394
|
+
if (!Buffer.isBuffer(targetBuffer))
|
395
|
+
throw new TypeError("argument should be a Buffer");
|
396
|
+
if (!sourceStart)
|
397
|
+
sourceStart = 0;
|
398
|
+
if (!targetStart)
|
399
|
+
targetStart = 0;
|
400
|
+
if (!sourceEnd && sourceEnd !== 0)
|
401
|
+
sourceEnd = this.length;
|
402
|
+
if (targetStart >= targetBuffer.length)
|
403
|
+
targetStart = targetBuffer.length;
|
404
|
+
if (!targetStart)
|
405
|
+
targetStart = 0;
|
406
|
+
if (sourceEnd > 0 && sourceEnd < sourceStart)
|
407
|
+
sourceEnd = sourceStart;
|
408
|
+
if (sourceEnd === sourceStart)
|
409
|
+
return 0;
|
410
|
+
if (targetBuffer.length === 0 || this.length === 0)
|
411
|
+
return 0;
|
412
|
+
if (targetStart < 0) {
|
413
|
+
throw new RangeError("targetStart out of bounds");
|
414
|
+
}
|
415
|
+
if (sourceStart < 0 || sourceStart >= this.length)
|
416
|
+
throw new RangeError("Index out of range");
|
417
|
+
if (sourceEnd < 0)
|
418
|
+
throw new RangeError("sourceEnd out of bounds");
|
419
|
+
if (sourceEnd > this.length)
|
420
|
+
sourceEnd = this.length;
|
421
|
+
if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
|
422
|
+
sourceEnd = targetBuffer.length - targetStart + sourceStart;
|
423
|
+
}
|
424
|
+
const len = sourceEnd - sourceStart;
|
425
|
+
if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
|
426
|
+
this.copyWithin(targetStart, sourceStart, sourceEnd);
|
427
|
+
} else {
|
428
|
+
Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
|
429
|
+
}
|
430
|
+
return len;
|
431
|
+
}
|
432
|
+
/**
|
433
|
+
* Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
|
434
|
+
* and `end` indices. This is the same behavior as `buf.subarray()`.
|
435
|
+
*
|
436
|
+
* This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
|
437
|
+
* the slice, use `Uint8Array.prototype.slice()`.
|
438
|
+
*
|
439
|
+
* @param start
|
440
|
+
* @param end
|
441
|
+
*/
|
442
|
+
slice(start, end) {
|
443
|
+
if (!start) {
|
444
|
+
start = 0;
|
445
|
+
}
|
446
|
+
const len = this.length;
|
447
|
+
start = ~~start;
|
448
|
+
end = end === void 0 ? len : ~~end;
|
449
|
+
if (start < 0) {
|
450
|
+
start += len;
|
451
|
+
if (start < 0) {
|
452
|
+
start = 0;
|
453
|
+
}
|
454
|
+
} else if (start > len) {
|
455
|
+
start = len;
|
456
|
+
}
|
457
|
+
if (end < 0) {
|
458
|
+
end += len;
|
459
|
+
if (end < 0) {
|
460
|
+
end = 0;
|
461
|
+
}
|
462
|
+
} else if (end > len) {
|
463
|
+
end = len;
|
464
|
+
}
|
465
|
+
if (end < start) {
|
466
|
+
end = start;
|
467
|
+
}
|
468
|
+
const newBuf = this.subarray(start, end);
|
469
|
+
Object.setPrototypeOf(newBuf, Buffer.prototype);
|
470
|
+
return newBuf;
|
471
|
+
}
|
472
|
+
/**
|
473
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
474
|
+
* of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
|
475
|
+
*
|
476
|
+
* @param value Number to write.
|
477
|
+
* @param offset Number of bytes to skip before starting to write.
|
478
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
479
|
+
* @param noAssert
|
480
|
+
* @returns `offset` plus the number of bytes written.
|
481
|
+
*/
|
482
|
+
writeUIntLE(value, offset, byteLength, noAssert) {
|
483
|
+
value = +value;
|
484
|
+
offset = offset >>> 0;
|
485
|
+
byteLength = byteLength >>> 0;
|
486
|
+
if (!noAssert) {
|
487
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
488
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
489
|
+
}
|
490
|
+
let mul = 1;
|
491
|
+
let i = 0;
|
492
|
+
this[offset] = value & 255;
|
493
|
+
while (++i < byteLength && (mul *= 256)) {
|
494
|
+
this[offset + i] = value / mul & 255;
|
495
|
+
}
|
496
|
+
return offset + byteLength;
|
497
|
+
}
|
498
|
+
/**
|
499
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
|
500
|
+
* accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
|
501
|
+
*
|
502
|
+
* @param value Number to write.
|
503
|
+
* @param offset Number of bytes to skip before starting to write.
|
504
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
505
|
+
* @param noAssert
|
506
|
+
* @returns `offset` plus the number of bytes written.
|
507
|
+
*/
|
508
|
+
writeUIntBE(value, offset, byteLength, noAssert) {
|
509
|
+
value = +value;
|
510
|
+
offset = offset >>> 0;
|
511
|
+
byteLength = byteLength >>> 0;
|
512
|
+
if (!noAssert) {
|
513
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
514
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
515
|
+
}
|
516
|
+
let i = byteLength - 1;
|
517
|
+
let mul = 1;
|
518
|
+
this[offset + i] = value & 255;
|
519
|
+
while (--i >= 0 && (mul *= 256)) {
|
520
|
+
this[offset + i] = value / mul & 255;
|
521
|
+
}
|
522
|
+
return offset + byteLength;
|
523
|
+
}
|
524
|
+
/**
|
525
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
526
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
527
|
+
*
|
528
|
+
* @param value Number to write.
|
529
|
+
* @param offset Number of bytes to skip before starting to write.
|
530
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
531
|
+
* @param noAssert
|
532
|
+
* @returns `offset` plus the number of bytes written.
|
533
|
+
*/
|
534
|
+
writeIntLE(value, offset, byteLength, noAssert) {
|
535
|
+
value = +value;
|
536
|
+
offset = offset >>> 0;
|
537
|
+
if (!noAssert) {
|
538
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
539
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
540
|
+
}
|
541
|
+
let i = 0;
|
542
|
+
let mul = 1;
|
543
|
+
let sub = 0;
|
544
|
+
this[offset] = value & 255;
|
545
|
+
while (++i < byteLength && (mul *= 256)) {
|
546
|
+
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
547
|
+
sub = 1;
|
548
|
+
}
|
549
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
550
|
+
}
|
551
|
+
return offset + byteLength;
|
552
|
+
}
|
553
|
+
/**
|
554
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
|
555
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
556
|
+
*
|
557
|
+
* @param value Number to write.
|
558
|
+
* @param offset Number of bytes to skip before starting to write.
|
559
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
560
|
+
* @param noAssert
|
561
|
+
* @returns `offset` plus the number of bytes written.
|
562
|
+
*/
|
563
|
+
writeIntBE(value, offset, byteLength, noAssert) {
|
564
|
+
value = +value;
|
565
|
+
offset = offset >>> 0;
|
566
|
+
if (!noAssert) {
|
567
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
568
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
569
|
+
}
|
570
|
+
let i = byteLength - 1;
|
571
|
+
let mul = 1;
|
572
|
+
let sub = 0;
|
573
|
+
this[offset + i] = value & 255;
|
574
|
+
while (--i >= 0 && (mul *= 256)) {
|
575
|
+
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
576
|
+
sub = 1;
|
577
|
+
}
|
578
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
579
|
+
}
|
580
|
+
return offset + byteLength;
|
581
|
+
}
|
582
|
+
/**
|
583
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
584
|
+
* unsigned, little-endian integer supporting up to 48 bits of accuracy.
|
585
|
+
*
|
586
|
+
* @param offset Number of bytes to skip before starting to read.
|
587
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
588
|
+
* @param noAssert
|
589
|
+
*/
|
590
|
+
readUIntLE(offset, byteLength, noAssert) {
|
591
|
+
offset = offset >>> 0;
|
592
|
+
byteLength = byteLength >>> 0;
|
593
|
+
if (!noAssert) {
|
594
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
595
|
+
}
|
596
|
+
let val = this[offset];
|
597
|
+
let mul = 1;
|
598
|
+
let i = 0;
|
599
|
+
while (++i < byteLength && (mul *= 256)) {
|
600
|
+
val += this[offset + i] * mul;
|
601
|
+
}
|
602
|
+
return val;
|
603
|
+
}
|
604
|
+
/**
|
605
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
606
|
+
* unsigned, big-endian integer supporting up to 48 bits of accuracy.
|
607
|
+
*
|
608
|
+
* @param offset Number of bytes to skip before starting to read.
|
609
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
610
|
+
* @param noAssert
|
611
|
+
*/
|
612
|
+
readUIntBE(offset, byteLength, noAssert) {
|
613
|
+
offset = offset >>> 0;
|
614
|
+
byteLength = byteLength >>> 0;
|
615
|
+
if (!noAssert) {
|
616
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
617
|
+
}
|
618
|
+
let val = this[offset + --byteLength];
|
619
|
+
let mul = 1;
|
620
|
+
while (byteLength > 0 && (mul *= 256)) {
|
621
|
+
val += this[offset + --byteLength] * mul;
|
622
|
+
}
|
623
|
+
return val;
|
624
|
+
}
|
625
|
+
/**
|
626
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
627
|
+
* little-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
628
|
+
*
|
629
|
+
* @param offset Number of bytes to skip before starting to read.
|
630
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
631
|
+
* @param noAssert
|
632
|
+
*/
|
633
|
+
readIntLE(offset, byteLength, noAssert) {
|
634
|
+
offset = offset >>> 0;
|
635
|
+
byteLength = byteLength >>> 0;
|
636
|
+
if (!noAssert) {
|
637
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
638
|
+
}
|
639
|
+
let val = this[offset];
|
640
|
+
let mul = 1;
|
641
|
+
let i = 0;
|
642
|
+
while (++i < byteLength && (mul *= 256)) {
|
643
|
+
val += this[offset + i] * mul;
|
644
|
+
}
|
645
|
+
mul *= 128;
|
646
|
+
if (val >= mul) {
|
647
|
+
val -= Math.pow(2, 8 * byteLength);
|
648
|
+
}
|
649
|
+
return val;
|
650
|
+
}
|
651
|
+
/**
|
652
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
653
|
+
* big-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
654
|
+
*
|
655
|
+
* @param offset Number of bytes to skip before starting to read.
|
656
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
657
|
+
* @param noAssert
|
658
|
+
*/
|
659
|
+
readIntBE(offset, byteLength, noAssert) {
|
660
|
+
offset = offset >>> 0;
|
661
|
+
byteLength = byteLength >>> 0;
|
662
|
+
if (!noAssert) {
|
663
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
664
|
+
}
|
665
|
+
let i = byteLength;
|
666
|
+
let mul = 1;
|
667
|
+
let val = this[offset + --i];
|
668
|
+
while (i > 0 && (mul *= 256)) {
|
669
|
+
val += this[offset + --i] * mul;
|
670
|
+
}
|
671
|
+
mul *= 128;
|
672
|
+
if (val >= mul) {
|
673
|
+
val -= Math.pow(2, 8 * byteLength);
|
674
|
+
}
|
675
|
+
return val;
|
676
|
+
}
|
677
|
+
/**
|
678
|
+
* Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
|
679
|
+
*
|
680
|
+
* @param offset Number of bytes to skip before starting to read.
|
681
|
+
* @param noAssert
|
682
|
+
*/
|
683
|
+
readUInt8(offset, noAssert) {
|
684
|
+
offset = offset >>> 0;
|
685
|
+
if (!noAssert) {
|
686
|
+
Buffer._checkOffset(offset, 1, this.length);
|
687
|
+
}
|
688
|
+
return this[offset];
|
689
|
+
}
|
690
|
+
/**
|
691
|
+
* Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
|
692
|
+
*
|
693
|
+
* @param offset Number of bytes to skip before starting to read.
|
694
|
+
* @param noAssert
|
695
|
+
*/
|
696
|
+
readUInt16LE(offset, noAssert) {
|
697
|
+
offset = offset >>> 0;
|
698
|
+
if (!noAssert) {
|
699
|
+
Buffer._checkOffset(offset, 2, this.length);
|
700
|
+
}
|
701
|
+
return this[offset] | this[offset + 1] << 8;
|
702
|
+
}
|
703
|
+
/**
|
704
|
+
* Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
|
705
|
+
*
|
706
|
+
* @param offset Number of bytes to skip before starting to read.
|
707
|
+
* @param noAssert
|
708
|
+
*/
|
709
|
+
readUInt16BE(offset, noAssert) {
|
710
|
+
offset = offset >>> 0;
|
711
|
+
if (!noAssert) {
|
712
|
+
Buffer._checkOffset(offset, 2, this.length);
|
713
|
+
}
|
714
|
+
return this[offset] << 8 | this[offset + 1];
|
715
|
+
}
|
716
|
+
/**
|
717
|
+
* Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
|
718
|
+
*
|
719
|
+
* @param offset Number of bytes to skip before starting to read.
|
720
|
+
* @param noAssert
|
721
|
+
*/
|
722
|
+
readUInt32LE(offset, noAssert) {
|
723
|
+
offset = offset >>> 0;
|
724
|
+
if (!noAssert) {
|
725
|
+
Buffer._checkOffset(offset, 4, this.length);
|
726
|
+
}
|
727
|
+
return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
|
728
|
+
}
|
729
|
+
/**
|
730
|
+
* Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
|
731
|
+
*
|
732
|
+
* @param offset Number of bytes to skip before starting to read.
|
733
|
+
* @param noAssert
|
734
|
+
*/
|
735
|
+
readUInt32BE(offset, noAssert) {
|
736
|
+
offset = offset >>> 0;
|
737
|
+
if (!noAssert) {
|
738
|
+
Buffer._checkOffset(offset, 4, this.length);
|
739
|
+
}
|
740
|
+
return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
|
741
|
+
}
|
742
|
+
/**
|
743
|
+
* Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
|
744
|
+
* as two's complement signed values.
|
745
|
+
*
|
746
|
+
* @param offset Number of bytes to skip before starting to read.
|
747
|
+
* @param noAssert
|
748
|
+
*/
|
749
|
+
readInt8(offset, noAssert) {
|
750
|
+
offset = offset >>> 0;
|
751
|
+
if (!noAssert) {
|
752
|
+
Buffer._checkOffset(offset, 1, this.length);
|
753
|
+
}
|
754
|
+
if (!(this[offset] & 128)) {
|
755
|
+
return this[offset];
|
756
|
+
}
|
757
|
+
return (255 - this[offset] + 1) * -1;
|
758
|
+
}
|
759
|
+
/**
|
760
|
+
* Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
761
|
+
* are interpreted as two's complement signed values.
|
762
|
+
*
|
763
|
+
* @param offset Number of bytes to skip before starting to read.
|
764
|
+
* @param noAssert
|
765
|
+
*/
|
766
|
+
readInt16LE(offset, noAssert) {
|
767
|
+
offset = offset >>> 0;
|
768
|
+
if (!noAssert) {
|
769
|
+
Buffer._checkOffset(offset, 2, this.length);
|
770
|
+
}
|
771
|
+
const val = this[offset] | this[offset + 1] << 8;
|
772
|
+
return val & 32768 ? val | 4294901760 : val;
|
773
|
+
}
|
774
|
+
/**
|
775
|
+
* Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
776
|
+
* are interpreted as two's complement signed values.
|
777
|
+
*
|
778
|
+
* @param offset Number of bytes to skip before starting to read.
|
779
|
+
* @param noAssert
|
780
|
+
*/
|
781
|
+
readInt16BE(offset, noAssert) {
|
782
|
+
offset = offset >>> 0;
|
783
|
+
if (!noAssert) {
|
784
|
+
Buffer._checkOffset(offset, 2, this.length);
|
785
|
+
}
|
786
|
+
const val = this[offset + 1] | this[offset] << 8;
|
787
|
+
return val & 32768 ? val | 4294901760 : val;
|
788
|
+
}
|
789
|
+
/**
|
790
|
+
* Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
791
|
+
* are interpreted as two's complement signed values.
|
792
|
+
*
|
793
|
+
* @param offset Number of bytes to skip before starting to read.
|
794
|
+
* @param noAssert
|
795
|
+
*/
|
796
|
+
readInt32LE(offset, noAssert) {
|
797
|
+
offset = offset >>> 0;
|
798
|
+
if (!noAssert) {
|
799
|
+
Buffer._checkOffset(offset, 4, this.length);
|
800
|
+
}
|
801
|
+
return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
|
802
|
+
}
|
803
|
+
/**
|
804
|
+
* Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
805
|
+
* are interpreted as two's complement signed values.
|
806
|
+
*
|
807
|
+
* @param offset Number of bytes to skip before starting to read.
|
808
|
+
* @param noAssert
|
809
|
+
*/
|
810
|
+
readInt32BE(offset, noAssert) {
|
811
|
+
offset = offset >>> 0;
|
812
|
+
if (!noAssert) {
|
813
|
+
Buffer._checkOffset(offset, 4, this.length);
|
814
|
+
}
|
815
|
+
return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
|
816
|
+
}
|
817
|
+
/**
|
818
|
+
* Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
|
819
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 2.
|
820
|
+
*/
|
821
|
+
swap16() {
|
822
|
+
const len = this.length;
|
823
|
+
if (len % 2 !== 0) {
|
824
|
+
throw new RangeError("Buffer size must be a multiple of 16-bits");
|
825
|
+
}
|
826
|
+
for (let i = 0; i < len; i += 2) {
|
827
|
+
this._swap(this, i, i + 1);
|
828
|
+
}
|
829
|
+
return this;
|
830
|
+
}
|
831
|
+
/**
|
832
|
+
* Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
|
833
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 4.
|
834
|
+
*/
|
835
|
+
swap32() {
|
836
|
+
const len = this.length;
|
837
|
+
if (len % 4 !== 0) {
|
838
|
+
throw new RangeError("Buffer size must be a multiple of 32-bits");
|
839
|
+
}
|
840
|
+
for (let i = 0; i < len; i += 4) {
|
841
|
+
this._swap(this, i, i + 3);
|
842
|
+
this._swap(this, i + 1, i + 2);
|
843
|
+
}
|
844
|
+
return this;
|
845
|
+
}
|
846
|
+
/**
|
847
|
+
* Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
|
848
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 8.
|
849
|
+
*/
|
850
|
+
swap64() {
|
851
|
+
const len = this.length;
|
852
|
+
if (len % 8 !== 0) {
|
853
|
+
throw new RangeError("Buffer size must be a multiple of 64-bits");
|
854
|
+
}
|
855
|
+
for (let i = 0; i < len; i += 8) {
|
856
|
+
this._swap(this, i, i + 7);
|
857
|
+
this._swap(this, i + 1, i + 6);
|
858
|
+
this._swap(this, i + 2, i + 5);
|
859
|
+
this._swap(this, i + 3, i + 4);
|
860
|
+
}
|
861
|
+
return this;
|
862
|
+
}
|
863
|
+
/**
|
864
|
+
* Swaps two octets.
|
865
|
+
*
|
866
|
+
* @param b
|
867
|
+
* @param n
|
868
|
+
* @param m
|
869
|
+
*/
|
870
|
+
_swap(b, n, m) {
|
871
|
+
const i = b[n];
|
872
|
+
b[n] = b[m];
|
873
|
+
b[m] = i;
|
874
|
+
}
|
875
|
+
/**
|
876
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
|
877
|
+
* Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
|
878
|
+
*
|
879
|
+
* @param value Number to write.
|
880
|
+
* @param offset Number of bytes to skip before starting to write.
|
881
|
+
* @param noAssert
|
882
|
+
* @returns `offset` plus the number of bytes written.
|
883
|
+
*/
|
884
|
+
writeUInt8(value, offset, noAssert) {
|
885
|
+
value = +value;
|
886
|
+
offset = offset >>> 0;
|
887
|
+
if (!noAssert) {
|
888
|
+
Buffer._checkInt(this, value, offset, 1, 255, 0);
|
889
|
+
}
|
890
|
+
this[offset] = value & 255;
|
891
|
+
return offset + 1;
|
892
|
+
}
|
893
|
+
/**
|
894
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
|
895
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
896
|
+
*
|
897
|
+
* @param value Number to write.
|
898
|
+
* @param offset Number of bytes to skip before starting to write.
|
899
|
+
* @param noAssert
|
900
|
+
* @returns `offset` plus the number of bytes written.
|
901
|
+
*/
|
902
|
+
writeUInt16LE(value, offset, noAssert) {
|
903
|
+
value = +value;
|
904
|
+
offset = offset >>> 0;
|
905
|
+
if (!noAssert) {
|
906
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
907
|
+
}
|
908
|
+
this[offset] = value & 255;
|
909
|
+
this[offset + 1] = value >>> 8;
|
910
|
+
return offset + 2;
|
911
|
+
}
|
912
|
+
/**
|
913
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
|
914
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
915
|
+
*
|
916
|
+
* @param value Number to write.
|
917
|
+
* @param offset Number of bytes to skip before starting to write.
|
918
|
+
* @param noAssert
|
919
|
+
* @returns `offset` plus the number of bytes written.
|
920
|
+
*/
|
921
|
+
writeUInt16BE(value, offset, noAssert) {
|
922
|
+
value = +value;
|
923
|
+
offset = offset >>> 0;
|
924
|
+
if (!noAssert) {
|
925
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
926
|
+
}
|
927
|
+
this[offset] = value >>> 8;
|
928
|
+
this[offset + 1] = value & 255;
|
929
|
+
return offset + 2;
|
930
|
+
}
|
931
|
+
/**
|
932
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
|
933
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
934
|
+
*
|
935
|
+
* @param value Number to write.
|
936
|
+
* @param offset Number of bytes to skip before starting to write.
|
937
|
+
* @param noAssert
|
938
|
+
* @returns `offset` plus the number of bytes written.
|
939
|
+
*/
|
940
|
+
writeUInt32LE(value, offset, noAssert) {
|
941
|
+
value = +value;
|
942
|
+
offset = offset >>> 0;
|
943
|
+
if (!noAssert) {
|
944
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
945
|
+
}
|
946
|
+
this[offset + 3] = value >>> 24;
|
947
|
+
this[offset + 2] = value >>> 16;
|
948
|
+
this[offset + 1] = value >>> 8;
|
949
|
+
this[offset] = value & 255;
|
950
|
+
return offset + 4;
|
951
|
+
}
|
952
|
+
/**
|
953
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
|
954
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
955
|
+
*
|
956
|
+
* @param value Number to write.
|
957
|
+
* @param offset Number of bytes to skip before starting to write.
|
958
|
+
* @param noAssert
|
959
|
+
* @returns `offset` plus the number of bytes written.
|
960
|
+
*/
|
961
|
+
writeUInt32BE(value, offset, noAssert) {
|
962
|
+
value = +value;
|
963
|
+
offset = offset >>> 0;
|
964
|
+
if (!noAssert) {
|
965
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
966
|
+
}
|
967
|
+
this[offset] = value >>> 24;
|
968
|
+
this[offset + 1] = value >>> 16;
|
969
|
+
this[offset + 2] = value >>> 8;
|
970
|
+
this[offset + 3] = value & 255;
|
971
|
+
return offset + 4;
|
972
|
+
}
|
973
|
+
/**
|
974
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
|
975
|
+
* Behavior is undefined when `value` is anything other than a signed 8-bit integer.
|
976
|
+
*
|
977
|
+
* @param value Number to write.
|
978
|
+
* @param offset Number of bytes to skip before starting to write.
|
979
|
+
* @param noAssert
|
980
|
+
* @returns `offset` plus the number of bytes written.
|
981
|
+
*/
|
982
|
+
writeInt8(value, offset, noAssert) {
|
983
|
+
value = +value;
|
984
|
+
offset = offset >>> 0;
|
985
|
+
if (!noAssert) {
|
986
|
+
Buffer._checkInt(this, value, offset, 1, 127, -128);
|
987
|
+
}
|
988
|
+
if (value < 0) {
|
989
|
+
value = 255 + value + 1;
|
990
|
+
}
|
991
|
+
this[offset] = value & 255;
|
992
|
+
return offset + 1;
|
993
|
+
}
|
994
|
+
/**
|
995
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
|
996
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
997
|
+
*
|
998
|
+
* @param value Number to write.
|
999
|
+
* @param offset Number of bytes to skip before starting to write.
|
1000
|
+
* @param noAssert
|
1001
|
+
* @returns `offset` plus the number of bytes written.
|
1002
|
+
*/
|
1003
|
+
writeInt16LE(value, offset, noAssert) {
|
1004
|
+
value = +value;
|
1005
|
+
offset = offset >>> 0;
|
1006
|
+
if (!noAssert) {
|
1007
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1008
|
+
}
|
1009
|
+
this[offset] = value & 255;
|
1010
|
+
this[offset + 1] = value >>> 8;
|
1011
|
+
return offset + 2;
|
1012
|
+
}
|
1013
|
+
/**
|
1014
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
|
1015
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
1016
|
+
*
|
1017
|
+
* @param value Number to write.
|
1018
|
+
* @param offset Number of bytes to skip before starting to write.
|
1019
|
+
* @param noAssert
|
1020
|
+
* @returns `offset` plus the number of bytes written.
|
1021
|
+
*/
|
1022
|
+
writeInt16BE(value, offset, noAssert) {
|
1023
|
+
value = +value;
|
1024
|
+
offset = offset >>> 0;
|
1025
|
+
if (!noAssert) {
|
1026
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1027
|
+
}
|
1028
|
+
this[offset] = value >>> 8;
|
1029
|
+
this[offset + 1] = value & 255;
|
1030
|
+
return offset + 2;
|
1031
|
+
}
|
1032
|
+
/**
|
1033
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
|
1034
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1035
|
+
*
|
1036
|
+
* @param value Number to write.
|
1037
|
+
* @param offset Number of bytes to skip before starting to write.
|
1038
|
+
* @param noAssert
|
1039
|
+
* @returns `offset` plus the number of bytes written.
|
1040
|
+
*/
|
1041
|
+
writeInt32LE(value, offset, noAssert) {
|
1042
|
+
value = +value;
|
1043
|
+
offset = offset >>> 0;
|
1044
|
+
if (!noAssert) {
|
1045
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1046
|
+
}
|
1047
|
+
this[offset] = value & 255;
|
1048
|
+
this[offset + 1] = value >>> 8;
|
1049
|
+
this[offset + 2] = value >>> 16;
|
1050
|
+
this[offset + 3] = value >>> 24;
|
1051
|
+
return offset + 4;
|
1052
|
+
}
|
1053
|
+
/**
|
1054
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
|
1055
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1056
|
+
*
|
1057
|
+
* @param value Number to write.
|
1058
|
+
* @param offset Number of bytes to skip before starting to write.
|
1059
|
+
* @param noAssert
|
1060
|
+
* @returns `offset` plus the number of bytes written.
|
1061
|
+
*/
|
1062
|
+
writeInt32BE(value, offset, noAssert) {
|
1063
|
+
value = +value;
|
1064
|
+
offset = offset >>> 0;
|
1065
|
+
if (!noAssert) {
|
1066
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1067
|
+
}
|
1068
|
+
if (value < 0) {
|
1069
|
+
value = 4294967295 + value + 1;
|
1070
|
+
}
|
1071
|
+
this[offset] = value >>> 24;
|
1072
|
+
this[offset + 1] = value >>> 16;
|
1073
|
+
this[offset + 2] = value >>> 8;
|
1074
|
+
this[offset + 3] = value & 255;
|
1075
|
+
return offset + 4;
|
1076
|
+
}
|
1077
|
+
/**
|
1078
|
+
* Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
|
1079
|
+
* filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
|
1080
|
+
* integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
|
1081
|
+
*
|
1082
|
+
* If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
|
1083
|
+
* character that fit into `buf` are written.
|
1084
|
+
*
|
1085
|
+
* If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
|
1086
|
+
*
|
1087
|
+
* @param value
|
1088
|
+
* @param encoding
|
1089
|
+
*/
|
1090
|
+
fill(value, offset, end, encoding) {
|
1091
|
+
if (typeof value === "string") {
|
1092
|
+
if (typeof offset === "string") {
|
1093
|
+
encoding = offset;
|
1094
|
+
offset = 0;
|
1095
|
+
end = this.length;
|
1096
|
+
} else if (typeof end === "string") {
|
1097
|
+
encoding = end;
|
1098
|
+
end = this.length;
|
1099
|
+
}
|
1100
|
+
if (encoding !== void 0 && typeof encoding !== "string") {
|
1101
|
+
throw new TypeError("encoding must be a string");
|
1102
|
+
}
|
1103
|
+
if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
|
1104
|
+
throw new TypeError("Unknown encoding: " + encoding);
|
1105
|
+
}
|
1106
|
+
if (value.length === 1) {
|
1107
|
+
const code = value.charCodeAt(0);
|
1108
|
+
if (encoding === "utf8" && code < 128) {
|
1109
|
+
value = code;
|
1110
|
+
}
|
1111
|
+
}
|
1112
|
+
} else if (typeof value === "number") {
|
1113
|
+
value = value & 255;
|
1114
|
+
} else if (typeof value === "boolean") {
|
1115
|
+
value = Number(value);
|
1116
|
+
}
|
1117
|
+
offset ?? (offset = 0);
|
1118
|
+
end ?? (end = this.length);
|
1119
|
+
if (offset < 0 || this.length < offset || this.length < end) {
|
1120
|
+
throw new RangeError("Out of range index");
|
1121
|
+
}
|
1122
|
+
if (end <= offset) {
|
1123
|
+
return this;
|
1124
|
+
}
|
1125
|
+
offset = offset >>> 0;
|
1126
|
+
end = end === void 0 ? this.length : end >>> 0;
|
1127
|
+
value || (value = 0);
|
1128
|
+
let i;
|
1129
|
+
if (typeof value === "number") {
|
1130
|
+
for (i = offset; i < end; ++i) {
|
1131
|
+
this[i] = value;
|
1132
|
+
}
|
1133
|
+
} else {
|
1134
|
+
const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
|
1135
|
+
const len = bytes.length;
|
1136
|
+
if (len === 0) {
|
1137
|
+
throw new TypeError('The value "' + value + '" is invalid for argument "value"');
|
1138
|
+
}
|
1139
|
+
for (i = 0; i < end - offset; ++i) {
|
1140
|
+
this[i + offset] = bytes[i % len];
|
1141
|
+
}
|
1142
|
+
}
|
1143
|
+
return this;
|
1144
|
+
}
|
1145
|
+
/**
|
1146
|
+
* Returns the index of the specified value.
|
1147
|
+
*
|
1148
|
+
* If `value` is:
|
1149
|
+
* - a string, `value` is interpreted according to the character encoding in `encoding`.
|
1150
|
+
* - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
|
1151
|
+
* - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
|
1152
|
+
*
|
1153
|
+
* Any other types will throw a `TypeError`.
|
1154
|
+
*
|
1155
|
+
* @param value What to search for.
|
1156
|
+
* @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
|
1157
|
+
* @param encoding If `value` is a string, this is the encoding used to search.
|
1158
|
+
* @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
|
1159
|
+
*/
|
1160
|
+
indexOf(value, byteOffset, encoding) {
|
1161
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
|
1162
|
+
}
|
1163
|
+
/**
|
1164
|
+
* Gets the last index of the specified value.
|
1165
|
+
*
|
1166
|
+
* @see indexOf()
|
1167
|
+
* @param value
|
1168
|
+
* @param byteOffset
|
1169
|
+
* @param encoding
|
1170
|
+
*/
|
1171
|
+
lastIndexOf(value, byteOffset, encoding) {
|
1172
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
|
1173
|
+
}
|
1174
|
+
_bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
|
1175
|
+
if (buffer.length === 0) {
|
1176
|
+
return -1;
|
1177
|
+
}
|
1178
|
+
if (typeof byteOffset === "string") {
|
1179
|
+
encoding = byteOffset;
|
1180
|
+
byteOffset = 0;
|
1181
|
+
} else if (typeof byteOffset === "undefined") {
|
1182
|
+
byteOffset = 0;
|
1183
|
+
} else if (byteOffset > 2147483647) {
|
1184
|
+
byteOffset = 2147483647;
|
1185
|
+
} else if (byteOffset < -2147483648) {
|
1186
|
+
byteOffset = -2147483648;
|
1187
|
+
}
|
1188
|
+
byteOffset = +byteOffset;
|
1189
|
+
if (byteOffset !== byteOffset) {
|
1190
|
+
byteOffset = dir ? 0 : buffer.length - 1;
|
1191
|
+
}
|
1192
|
+
if (byteOffset < 0) {
|
1193
|
+
byteOffset = buffer.length + byteOffset;
|
1194
|
+
}
|
1195
|
+
if (byteOffset >= buffer.length) {
|
1196
|
+
if (dir) {
|
1197
|
+
return -1;
|
1198
|
+
} else {
|
1199
|
+
byteOffset = buffer.length - 1;
|
1200
|
+
}
|
1201
|
+
} else if (byteOffset < 0) {
|
1202
|
+
if (dir) {
|
1203
|
+
byteOffset = 0;
|
1204
|
+
} else {
|
1205
|
+
return -1;
|
1206
|
+
}
|
1207
|
+
}
|
1208
|
+
if (typeof val === "string") {
|
1209
|
+
val = Buffer.from(val, encoding);
|
1210
|
+
}
|
1211
|
+
if (Buffer.isBuffer(val)) {
|
1212
|
+
if (val.length === 0) {
|
1213
|
+
return -1;
|
1214
|
+
}
|
1215
|
+
return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
|
1216
|
+
} else if (typeof val === "number") {
|
1217
|
+
val = val & 255;
|
1218
|
+
if (typeof Uint8Array.prototype.indexOf === "function") {
|
1219
|
+
if (dir) {
|
1220
|
+
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
|
1221
|
+
} else {
|
1222
|
+
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
|
1223
|
+
}
|
1224
|
+
}
|
1225
|
+
return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
|
1226
|
+
}
|
1227
|
+
throw new TypeError("val must be string, number or Buffer");
|
1228
|
+
}
|
1229
|
+
/**
|
1230
|
+
* Equivalent to `buf.indexOf() !== -1`.
|
1231
|
+
*
|
1232
|
+
* @param value
|
1233
|
+
* @param byteOffset
|
1234
|
+
* @param encoding
|
1235
|
+
*/
|
1236
|
+
includes(value, byteOffset, encoding) {
|
1237
|
+
return this.indexOf(value, byteOffset, encoding) !== -1;
|
1238
|
+
}
|
1239
|
+
/**
|
1240
|
+
* Creates a new buffer from the given parameters.
|
1241
|
+
*
|
1242
|
+
* @param data
|
1243
|
+
* @param encoding
|
1244
|
+
*/
|
1245
|
+
static from(a, b, c) {
|
1246
|
+
return new Buffer(a, b, c);
|
1247
|
+
}
|
1248
|
+
/**
|
1249
|
+
* Returns true if `obj` is a Buffer.
|
1250
|
+
*
|
1251
|
+
* @param obj
|
1252
|
+
*/
|
1253
|
+
static isBuffer(obj) {
|
1254
|
+
return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
|
1255
|
+
}
|
1256
|
+
/**
|
1257
|
+
* Returns true if `encoding` is a supported encoding.
|
1258
|
+
*
|
1259
|
+
* @param encoding
|
1260
|
+
*/
|
1261
|
+
static isEncoding(encoding) {
|
1262
|
+
switch (encoding.toLowerCase()) {
|
1263
|
+
case "hex":
|
1264
|
+
case "utf8":
|
1265
|
+
case "ascii":
|
1266
|
+
case "binary":
|
1267
|
+
case "latin1":
|
1268
|
+
case "ucs2":
|
1269
|
+
case "utf16le":
|
1270
|
+
case "base64":
|
1271
|
+
return true;
|
1272
|
+
default:
|
1273
|
+
return false;
|
1274
|
+
}
|
1275
|
+
}
|
1276
|
+
/**
|
1277
|
+
* Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
|
1278
|
+
* returns the number of characters in the string.
|
1279
|
+
*
|
1280
|
+
* @param string The string to test.
|
1281
|
+
* @param encoding The encoding to use for calculation. Defaults is `utf8`.
|
1282
|
+
*/
|
1283
|
+
static byteLength(string, encoding) {
|
1284
|
+
if (Buffer.isBuffer(string)) {
|
1285
|
+
return string.length;
|
1286
|
+
}
|
1287
|
+
if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
|
1288
|
+
return string.byteLength;
|
1289
|
+
}
|
1290
|
+
if (typeof string !== "string") {
|
1291
|
+
throw new TypeError(
|
1292
|
+
'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
|
1293
|
+
);
|
1294
|
+
}
|
1295
|
+
const len = string.length;
|
1296
|
+
const mustMatch = arguments.length > 2 && arguments[2] === true;
|
1297
|
+
if (!mustMatch && len === 0) {
|
1298
|
+
return 0;
|
1299
|
+
}
|
1300
|
+
switch (encoding?.toLowerCase()) {
|
1301
|
+
case "ascii":
|
1302
|
+
case "latin1":
|
1303
|
+
case "binary":
|
1304
|
+
return len;
|
1305
|
+
case "utf8":
|
1306
|
+
return Buffer._utf8ToBytes(string).length;
|
1307
|
+
case "hex":
|
1308
|
+
return len >>> 1;
|
1309
|
+
case "ucs2":
|
1310
|
+
case "utf16le":
|
1311
|
+
return len * 2;
|
1312
|
+
case "base64":
|
1313
|
+
return Buffer._base64ToBytes(string).length;
|
1314
|
+
default:
|
1315
|
+
return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
|
1316
|
+
}
|
1317
|
+
}
|
1318
|
+
/**
|
1319
|
+
* Returns a Buffer which is the result of concatenating all the buffers in the list together.
|
1320
|
+
*
|
1321
|
+
* - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
|
1322
|
+
* - If the list has exactly one item, then the first item is returned.
|
1323
|
+
* - If the list has more than one item, then a new buffer is created.
|
1324
|
+
*
|
1325
|
+
* It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
|
1326
|
+
* a small computational expense.
|
1327
|
+
*
|
1328
|
+
* @param list An array of Buffer objects to concatenate.
|
1329
|
+
* @param totalLength Total length of the buffers when concatenated.
|
1330
|
+
*/
|
1331
|
+
static concat(list, totalLength) {
|
1332
|
+
if (!Array.isArray(list)) {
|
1333
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1334
|
+
}
|
1335
|
+
if (list.length === 0) {
|
1336
|
+
return Buffer.alloc(0);
|
1337
|
+
}
|
1338
|
+
let i;
|
1339
|
+
if (totalLength === void 0) {
|
1340
|
+
totalLength = 0;
|
1341
|
+
for (i = 0; i < list.length; ++i) {
|
1342
|
+
totalLength += list[i].length;
|
1343
|
+
}
|
1344
|
+
}
|
1345
|
+
const buffer = Buffer.allocUnsafe(totalLength);
|
1346
|
+
let pos = 0;
|
1347
|
+
for (i = 0; i < list.length; ++i) {
|
1348
|
+
let buf = list[i];
|
1349
|
+
if (Buffer._isInstance(buf, Uint8Array)) {
|
1350
|
+
if (pos + buf.length > buffer.length) {
|
1351
|
+
if (!Buffer.isBuffer(buf)) {
|
1352
|
+
buf = Buffer.from(buf);
|
1353
|
+
}
|
1354
|
+
buf.copy(buffer, pos);
|
1355
|
+
} else {
|
1356
|
+
Uint8Array.prototype.set.call(buffer, buf, pos);
|
1357
|
+
}
|
1358
|
+
} else if (!Buffer.isBuffer(buf)) {
|
1359
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1360
|
+
} else {
|
1361
|
+
buf.copy(buffer, pos);
|
1362
|
+
}
|
1363
|
+
pos += buf.length;
|
1364
|
+
}
|
1365
|
+
return buffer;
|
1366
|
+
}
|
1367
|
+
/**
|
1368
|
+
* The same as `buf1.compare(buf2)`.
|
1369
|
+
*/
|
1370
|
+
static compare(buf1, buf2) {
|
1371
|
+
if (Buffer._isInstance(buf1, Uint8Array)) {
|
1372
|
+
buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
|
1373
|
+
}
|
1374
|
+
if (Buffer._isInstance(buf2, Uint8Array)) {
|
1375
|
+
buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
|
1376
|
+
}
|
1377
|
+
if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
|
1378
|
+
throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
|
1379
|
+
}
|
1380
|
+
if (buf1 === buf2) {
|
1381
|
+
return 0;
|
1382
|
+
}
|
1383
|
+
let x = buf1.length;
|
1384
|
+
let y = buf2.length;
|
1385
|
+
for (let i = 0, len = Math.min(x, y); i < len; ++i) {
|
1386
|
+
if (buf1[i] !== buf2[i]) {
|
1387
|
+
x = buf1[i];
|
1388
|
+
y = buf2[i];
|
1389
|
+
break;
|
1390
|
+
}
|
1391
|
+
}
|
1392
|
+
if (x < y) {
|
1393
|
+
return -1;
|
1394
|
+
}
|
1395
|
+
if (y < x) {
|
1396
|
+
return 1;
|
1397
|
+
}
|
1398
|
+
return 0;
|
1399
|
+
}
|
1400
|
+
/**
|
1401
|
+
* Allocates a new buffer of `size` octets.
|
1402
|
+
*
|
1403
|
+
* @param size The number of octets to allocate.
|
1404
|
+
* @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
|
1405
|
+
* @param encoding The encoding used for the call to `buf.fill()` while initializing.
|
1406
|
+
*/
|
1407
|
+
static alloc(size, fill, encoding) {
|
1408
|
+
if (typeof size !== "number") {
|
1409
|
+
throw new TypeError('"size" argument must be of type number');
|
1410
|
+
} else if (size < 0) {
|
1411
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1412
|
+
}
|
1413
|
+
if (size <= 0) {
|
1414
|
+
return new Buffer(size);
|
1415
|
+
}
|
1416
|
+
if (fill !== void 0) {
|
1417
|
+
return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
|
1418
|
+
}
|
1419
|
+
return new Buffer(size);
|
1420
|
+
}
|
1421
|
+
/**
|
1422
|
+
* Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
|
1423
|
+
*
|
1424
|
+
* @param size
|
1425
|
+
*/
|
1426
|
+
static allocUnsafe(size) {
|
1427
|
+
if (typeof size !== "number") {
|
1428
|
+
throw new TypeError('"size" argument must be of type number');
|
1429
|
+
} else if (size < 0) {
|
1430
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1431
|
+
}
|
1432
|
+
return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
|
1433
|
+
}
|
1434
|
+
/**
|
1435
|
+
* Returns true if the given `obj` is an instance of `type`.
|
1436
|
+
*
|
1437
|
+
* @param obj
|
1438
|
+
* @param type
|
1439
|
+
*/
|
1440
|
+
static _isInstance(obj, type) {
|
1441
|
+
return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
|
1442
|
+
}
|
1443
|
+
static _checked(length) {
|
1444
|
+
if (length >= K_MAX_LENGTH) {
|
1445
|
+
throw new RangeError(
|
1446
|
+
"Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
|
1447
|
+
);
|
1448
|
+
}
|
1449
|
+
return length | 0;
|
1450
|
+
}
|
1451
|
+
static _blitBuffer(src, dst, offset, length) {
|
1452
|
+
let i;
|
1453
|
+
for (i = 0; i < length; ++i) {
|
1454
|
+
if (i + offset >= dst.length || i >= src.length) {
|
1455
|
+
break;
|
1456
|
+
}
|
1457
|
+
dst[i + offset] = src[i];
|
1458
|
+
}
|
1459
|
+
return i;
|
1460
|
+
}
|
1461
|
+
static _utf8Write(buf, string, offset, length) {
|
1462
|
+
return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
|
1463
|
+
}
|
1464
|
+
static _asciiWrite(buf, string, offset, length) {
|
1465
|
+
return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
|
1466
|
+
}
|
1467
|
+
static _base64Write(buf, string, offset, length) {
|
1468
|
+
return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
|
1469
|
+
}
|
1470
|
+
static _ucs2Write(buf, string, offset, length) {
|
1471
|
+
return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
|
1472
|
+
}
|
1473
|
+
static _hexWrite(buf, string, offset, length) {
|
1474
|
+
offset = Number(offset) || 0;
|
1475
|
+
const remaining = buf.length - offset;
|
1476
|
+
if (!length) {
|
1477
|
+
length = remaining;
|
1478
|
+
} else {
|
1479
|
+
length = Number(length);
|
1480
|
+
if (length > remaining) {
|
1481
|
+
length = remaining;
|
1482
|
+
}
|
1483
|
+
}
|
1484
|
+
const strLen = string.length;
|
1485
|
+
if (length > strLen / 2) {
|
1486
|
+
length = strLen / 2;
|
1487
|
+
}
|
1488
|
+
let i;
|
1489
|
+
for (i = 0; i < length; ++i) {
|
1490
|
+
const parsed = parseInt(string.substr(i * 2, 2), 16);
|
1491
|
+
if (parsed !== parsed) {
|
1492
|
+
return i;
|
1493
|
+
}
|
1494
|
+
buf[offset + i] = parsed;
|
1495
|
+
}
|
1496
|
+
return i;
|
1497
|
+
}
|
1498
|
+
static _utf8ToBytes(string, units) {
|
1499
|
+
units = units || Infinity;
|
1500
|
+
const length = string.length;
|
1501
|
+
const bytes = [];
|
1502
|
+
let codePoint;
|
1503
|
+
let leadSurrogate = null;
|
1504
|
+
for (let i = 0; i < length; ++i) {
|
1505
|
+
codePoint = string.charCodeAt(i);
|
1506
|
+
if (codePoint > 55295 && codePoint < 57344) {
|
1507
|
+
if (!leadSurrogate) {
|
1508
|
+
if (codePoint > 56319) {
|
1509
|
+
if ((units -= 3) > -1) {
|
1510
|
+
bytes.push(239, 191, 189);
|
1511
|
+
}
|
1512
|
+
continue;
|
1513
|
+
} else if (i + 1 === length) {
|
1514
|
+
if ((units -= 3) > -1) {
|
1515
|
+
bytes.push(239, 191, 189);
|
1516
|
+
}
|
1517
|
+
continue;
|
1518
|
+
}
|
1519
|
+
leadSurrogate = codePoint;
|
1520
|
+
continue;
|
1521
|
+
}
|
1522
|
+
if (codePoint < 56320) {
|
1523
|
+
if ((units -= 3) > -1) {
|
1524
|
+
bytes.push(239, 191, 189);
|
1525
|
+
}
|
1526
|
+
leadSurrogate = codePoint;
|
1527
|
+
continue;
|
1528
|
+
}
|
1529
|
+
codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
|
1530
|
+
} else if (leadSurrogate) {
|
1531
|
+
if ((units -= 3) > -1) {
|
1532
|
+
bytes.push(239, 191, 189);
|
1533
|
+
}
|
1534
|
+
}
|
1535
|
+
leadSurrogate = null;
|
1536
|
+
if (codePoint < 128) {
|
1537
|
+
if ((units -= 1) < 0) {
|
1538
|
+
break;
|
1539
|
+
}
|
1540
|
+
bytes.push(codePoint);
|
1541
|
+
} else if (codePoint < 2048) {
|
1542
|
+
if ((units -= 2) < 0) {
|
1543
|
+
break;
|
1544
|
+
}
|
1545
|
+
bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
|
1546
|
+
} else if (codePoint < 65536) {
|
1547
|
+
if ((units -= 3) < 0) {
|
1548
|
+
break;
|
1549
|
+
}
|
1550
|
+
bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
|
1551
|
+
} else if (codePoint < 1114112) {
|
1552
|
+
if ((units -= 4) < 0) {
|
1553
|
+
break;
|
1554
|
+
}
|
1555
|
+
bytes.push(
|
1556
|
+
codePoint >> 18 | 240,
|
1557
|
+
codePoint >> 12 & 63 | 128,
|
1558
|
+
codePoint >> 6 & 63 | 128,
|
1559
|
+
codePoint & 63 | 128
|
1560
|
+
);
|
1561
|
+
} else {
|
1562
|
+
throw new Error("Invalid code point");
|
1563
|
+
}
|
1564
|
+
}
|
1565
|
+
return bytes;
|
1566
|
+
}
|
1567
|
+
static _base64ToBytes(str) {
|
1568
|
+
return toByteArray(base64clean(str));
|
1569
|
+
}
|
1570
|
+
static _asciiToBytes(str) {
|
1571
|
+
const byteArray = [];
|
1572
|
+
for (let i = 0; i < str.length; ++i) {
|
1573
|
+
byteArray.push(str.charCodeAt(i) & 255);
|
1574
|
+
}
|
1575
|
+
return byteArray;
|
1576
|
+
}
|
1577
|
+
static _utf16leToBytes(str, units) {
|
1578
|
+
let c, hi, lo;
|
1579
|
+
const byteArray = [];
|
1580
|
+
for (let i = 0; i < str.length; ++i) {
|
1581
|
+
if ((units -= 2) < 0)
|
1582
|
+
break;
|
1583
|
+
c = str.charCodeAt(i);
|
1584
|
+
hi = c >> 8;
|
1585
|
+
lo = c % 256;
|
1586
|
+
byteArray.push(lo);
|
1587
|
+
byteArray.push(hi);
|
1588
|
+
}
|
1589
|
+
return byteArray;
|
1590
|
+
}
|
1591
|
+
static _hexSlice(buf, start, end) {
|
1592
|
+
const len = buf.length;
|
1593
|
+
if (!start || start < 0) {
|
1594
|
+
start = 0;
|
1595
|
+
}
|
1596
|
+
if (!end || end < 0 || end > len) {
|
1597
|
+
end = len;
|
1598
|
+
}
|
1599
|
+
let out = "";
|
1600
|
+
for (let i = start; i < end; ++i) {
|
1601
|
+
out += hexSliceLookupTable[buf[i]];
|
1602
|
+
}
|
1603
|
+
return out;
|
1604
|
+
}
|
1605
|
+
static _base64Slice(buf, start, end) {
|
1606
|
+
if (start === 0 && end === buf.length) {
|
1607
|
+
return fromByteArray(buf);
|
1608
|
+
} else {
|
1609
|
+
return fromByteArray(buf.slice(start, end));
|
1610
|
+
}
|
1611
|
+
}
|
1612
|
+
static _utf8Slice(buf, start, end) {
|
1613
|
+
end = Math.min(buf.length, end);
|
1614
|
+
const res = [];
|
1615
|
+
let i = start;
|
1616
|
+
while (i < end) {
|
1617
|
+
const firstByte = buf[i];
|
1618
|
+
let codePoint = null;
|
1619
|
+
let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
|
1620
|
+
if (i + bytesPerSequence <= end) {
|
1621
|
+
let secondByte, thirdByte, fourthByte, tempCodePoint;
|
1622
|
+
switch (bytesPerSequence) {
|
1623
|
+
case 1:
|
1624
|
+
if (firstByte < 128) {
|
1625
|
+
codePoint = firstByte;
|
1626
|
+
}
|
1627
|
+
break;
|
1628
|
+
case 2:
|
1629
|
+
secondByte = buf[i + 1];
|
1630
|
+
if ((secondByte & 192) === 128) {
|
1631
|
+
tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
|
1632
|
+
if (tempCodePoint > 127) {
|
1633
|
+
codePoint = tempCodePoint;
|
1634
|
+
}
|
1635
|
+
}
|
1636
|
+
break;
|
1637
|
+
case 3:
|
1638
|
+
secondByte = buf[i + 1];
|
1639
|
+
thirdByte = buf[i + 2];
|
1640
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
|
1641
|
+
tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
|
1642
|
+
if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
|
1643
|
+
codePoint = tempCodePoint;
|
1644
|
+
}
|
1645
|
+
}
|
1646
|
+
break;
|
1647
|
+
case 4:
|
1648
|
+
secondByte = buf[i + 1];
|
1649
|
+
thirdByte = buf[i + 2];
|
1650
|
+
fourthByte = buf[i + 3];
|
1651
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
|
1652
|
+
tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
|
1653
|
+
if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
|
1654
|
+
codePoint = tempCodePoint;
|
1655
|
+
}
|
1656
|
+
}
|
1657
|
+
}
|
1658
|
+
}
|
1659
|
+
if (codePoint === null) {
|
1660
|
+
codePoint = 65533;
|
1661
|
+
bytesPerSequence = 1;
|
1662
|
+
} else if (codePoint > 65535) {
|
1663
|
+
codePoint -= 65536;
|
1664
|
+
res.push(codePoint >>> 10 & 1023 | 55296);
|
1665
|
+
codePoint = 56320 | codePoint & 1023;
|
1666
|
+
}
|
1667
|
+
res.push(codePoint);
|
1668
|
+
i += bytesPerSequence;
|
1669
|
+
}
|
1670
|
+
return Buffer._decodeCodePointsArray(res);
|
1671
|
+
}
|
1672
|
+
static _decodeCodePointsArray(codePoints) {
|
1673
|
+
const len = codePoints.length;
|
1674
|
+
if (len <= MAX_ARGUMENTS_LENGTH) {
|
1675
|
+
return String.fromCharCode.apply(String, codePoints);
|
1676
|
+
}
|
1677
|
+
let res = "";
|
1678
|
+
let i = 0;
|
1679
|
+
while (i < len) {
|
1680
|
+
res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
|
1681
|
+
}
|
1682
|
+
return res;
|
1683
|
+
}
|
1684
|
+
static _asciiSlice(buf, start, end) {
|
1685
|
+
let ret = "";
|
1686
|
+
end = Math.min(buf.length, end);
|
1687
|
+
for (let i = start; i < end; ++i) {
|
1688
|
+
ret += String.fromCharCode(buf[i] & 127);
|
1689
|
+
}
|
1690
|
+
return ret;
|
1691
|
+
}
|
1692
|
+
static _latin1Slice(buf, start, end) {
|
1693
|
+
let ret = "";
|
1694
|
+
end = Math.min(buf.length, end);
|
1695
|
+
for (let i = start; i < end; ++i) {
|
1696
|
+
ret += String.fromCharCode(buf[i]);
|
1697
|
+
}
|
1698
|
+
return ret;
|
1699
|
+
}
|
1700
|
+
static _utf16leSlice(buf, start, end) {
|
1701
|
+
const bytes = buf.slice(start, end);
|
1702
|
+
let res = "";
|
1703
|
+
for (let i = 0; i < bytes.length - 1; i += 2) {
|
1704
|
+
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
|
1705
|
+
}
|
1706
|
+
return res;
|
1707
|
+
}
|
1708
|
+
static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
|
1709
|
+
let indexSize = 1;
|
1710
|
+
let arrLength = arr.length;
|
1711
|
+
let valLength = val.length;
|
1712
|
+
if (encoding !== void 0) {
|
1713
|
+
encoding = Buffer._getEncoding(encoding);
|
1714
|
+
if (encoding === "ucs2" || encoding === "utf16le") {
|
1715
|
+
if (arr.length < 2 || val.length < 2) {
|
1716
|
+
return -1;
|
1717
|
+
}
|
1718
|
+
indexSize = 2;
|
1719
|
+
arrLength /= 2;
|
1720
|
+
valLength /= 2;
|
1721
|
+
byteOffset /= 2;
|
1722
|
+
}
|
1723
|
+
}
|
1724
|
+
function read(buf, i2) {
|
1725
|
+
if (indexSize === 1) {
|
1726
|
+
return buf[i2];
|
1727
|
+
} else {
|
1728
|
+
return buf.readUInt16BE(i2 * indexSize);
|
1729
|
+
}
|
1730
|
+
}
|
1731
|
+
let i;
|
1732
|
+
if (dir) {
|
1733
|
+
let foundIndex = -1;
|
1734
|
+
for (i = byteOffset; i < arrLength; i++) {
|
1735
|
+
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
1736
|
+
if (foundIndex === -1)
|
1737
|
+
foundIndex = i;
|
1738
|
+
if (i - foundIndex + 1 === valLength)
|
1739
|
+
return foundIndex * indexSize;
|
1740
|
+
} else {
|
1741
|
+
if (foundIndex !== -1)
|
1742
|
+
i -= i - foundIndex;
|
1743
|
+
foundIndex = -1;
|
1744
|
+
}
|
1745
|
+
}
|
1746
|
+
} else {
|
1747
|
+
if (byteOffset + valLength > arrLength) {
|
1748
|
+
byteOffset = arrLength - valLength;
|
1749
|
+
}
|
1750
|
+
for (i = byteOffset; i >= 0; i--) {
|
1751
|
+
let found = true;
|
1752
|
+
for (let j = 0; j < valLength; j++) {
|
1753
|
+
if (read(arr, i + j) !== read(val, j)) {
|
1754
|
+
found = false;
|
1755
|
+
break;
|
1756
|
+
}
|
1757
|
+
}
|
1758
|
+
if (found) {
|
1759
|
+
return i;
|
1760
|
+
}
|
1761
|
+
}
|
1762
|
+
}
|
1763
|
+
return -1;
|
1764
|
+
}
|
1765
|
+
static _checkOffset(offset, ext, length) {
|
1766
|
+
if (offset % 1 !== 0 || offset < 0)
|
1767
|
+
throw new RangeError("offset is not uint");
|
1768
|
+
if (offset + ext > length)
|
1769
|
+
throw new RangeError("Trying to access beyond buffer length");
|
1770
|
+
}
|
1771
|
+
static _checkInt(buf, value, offset, ext, max, min) {
|
1772
|
+
if (!Buffer.isBuffer(buf))
|
1773
|
+
throw new TypeError('"buffer" argument must be a Buffer instance');
|
1774
|
+
if (value > max || value < min)
|
1775
|
+
throw new RangeError('"value" argument is out of bounds');
|
1776
|
+
if (offset + ext > buf.length)
|
1777
|
+
throw new RangeError("Index out of range");
|
1778
|
+
}
|
1779
|
+
static _getEncoding(encoding) {
|
1780
|
+
let toLowerCase = false;
|
1781
|
+
let originalEncoding = "";
|
1782
|
+
for (; ; ) {
|
1783
|
+
switch (encoding) {
|
1784
|
+
case "hex":
|
1785
|
+
return "hex";
|
1786
|
+
case "utf8":
|
1787
|
+
return "utf8";
|
1788
|
+
case "ascii":
|
1789
|
+
return "ascii";
|
1790
|
+
case "binary":
|
1791
|
+
return "binary";
|
1792
|
+
case "latin1":
|
1793
|
+
return "latin1";
|
1794
|
+
case "ucs2":
|
1795
|
+
return "ucs2";
|
1796
|
+
case "utf16le":
|
1797
|
+
return "utf16le";
|
1798
|
+
case "base64":
|
1799
|
+
return "base64";
|
1800
|
+
default: {
|
1801
|
+
if (toLowerCase) {
|
1802
|
+
throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
|
1803
|
+
}
|
1804
|
+
toLowerCase = true;
|
1805
|
+
originalEncoding = encoding;
|
1806
|
+
encoding = encoding.toLowerCase();
|
1807
|
+
}
|
1808
|
+
}
|
1809
|
+
}
|
1810
|
+
}
|
1811
|
+
}
|
1812
|
+
const hexSliceLookupTable = function() {
|
1813
|
+
const alphabet = "0123456789abcdef";
|
1814
|
+
const table = new Array(256);
|
1815
|
+
for (let i = 0; i < 16; ++i) {
|
1816
|
+
const i16 = i * 16;
|
1817
|
+
for (let j = 0; j < 16; ++j) {
|
1818
|
+
table[i16 + j] = alphabet[i] + alphabet[j];
|
1819
|
+
}
|
1820
|
+
}
|
1821
|
+
return table;
|
1822
|
+
}();
|
1823
|
+
const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
|
1824
|
+
function base64clean(str) {
|
1825
|
+
str = str.split("=")[0];
|
1826
|
+
str = str.trim().replace(INVALID_BASE64_RE, "");
|
1827
|
+
if (str.length < 2)
|
1828
|
+
return "";
|
1829
|
+
while (str.length % 4 !== 0) {
|
1830
|
+
str = str + "=";
|
1831
|
+
}
|
1832
|
+
return str;
|
1833
|
+
}
|
1834
|
+
|
27
1835
|
function notEmpty(value) {
|
28
1836
|
return value !== null && value !== void 0;
|
29
1837
|
}
|
@@ -248,7 +2056,7 @@ var __accessCheck$8 = (obj, member, msg) => {
|
|
248
2056
|
if (!member.has(obj))
|
249
2057
|
throw TypeError("Cannot " + msg);
|
250
2058
|
};
|
251
|
-
var __privateGet$
|
2059
|
+
var __privateGet$7 = (obj, member, getter) => {
|
252
2060
|
__accessCheck$8(obj, member, "read from private field");
|
253
2061
|
return getter ? getter.call(obj) : member.get(obj);
|
254
2062
|
};
|
@@ -257,7 +2065,7 @@ var __privateAdd$8 = (obj, member, value) => {
|
|
257
2065
|
throw TypeError("Cannot add the same private member more than once");
|
258
2066
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
259
2067
|
};
|
260
|
-
var __privateSet$
|
2068
|
+
var __privateSet$6 = (obj, member, value, setter) => {
|
261
2069
|
__accessCheck$8(obj, member, "write to private field");
|
262
2070
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
263
2071
|
return value;
|
@@ -283,19 +2091,19 @@ class ApiRequestPool {
|
|
283
2091
|
__privateAdd$8(this, _fetch, void 0);
|
284
2092
|
__privateAdd$8(this, _queue, void 0);
|
285
2093
|
__privateAdd$8(this, _concurrency, void 0);
|
286
|
-
__privateSet$
|
287
|
-
__privateSet$
|
2094
|
+
__privateSet$6(this, _queue, []);
|
2095
|
+
__privateSet$6(this, _concurrency, concurrency);
|
288
2096
|
this.running = 0;
|
289
2097
|
this.started = 0;
|
290
2098
|
}
|
291
2099
|
setFetch(fetch2) {
|
292
|
-
__privateSet$
|
2100
|
+
__privateSet$6(this, _fetch, fetch2);
|
293
2101
|
}
|
294
2102
|
getFetch() {
|
295
|
-
if (!__privateGet$
|
2103
|
+
if (!__privateGet$7(this, _fetch)) {
|
296
2104
|
throw new Error("Fetch not set");
|
297
2105
|
}
|
298
|
-
return __privateGet$
|
2106
|
+
return __privateGet$7(this, _fetch);
|
299
2107
|
}
|
300
2108
|
request(url, options) {
|
301
2109
|
const start = /* @__PURE__ */ new Date();
|
@@ -327,19 +2135,19 @@ _queue = new WeakMap();
|
|
327
2135
|
_concurrency = new WeakMap();
|
328
2136
|
_enqueue = new WeakSet();
|
329
2137
|
enqueue_fn = function(task) {
|
330
|
-
const promise = new Promise((resolve) => __privateGet$
|
2138
|
+
const promise = new Promise((resolve) => __privateGet$7(this, _queue).push(resolve)).finally(() => {
|
331
2139
|
this.started--;
|
332
2140
|
this.running++;
|
333
2141
|
}).then(() => task()).finally(() => {
|
334
2142
|
this.running--;
|
335
|
-
const next = __privateGet$
|
2143
|
+
const next = __privateGet$7(this, _queue).shift();
|
336
2144
|
if (next !== void 0) {
|
337
2145
|
this.started++;
|
338
2146
|
next();
|
339
2147
|
}
|
340
2148
|
});
|
341
|
-
if (this.running + this.started < __privateGet$
|
342
|
-
const next = __privateGet$
|
2149
|
+
if (this.running + this.started < __privateGet$7(this, _concurrency)) {
|
2150
|
+
const next = __privateGet$7(this, _queue).shift();
|
343
2151
|
if (next !== void 0) {
|
344
2152
|
this.started++;
|
345
2153
|
next();
|
@@ -528,7 +2336,7 @@ function defaultOnOpen(response) {
|
|
528
2336
|
}
|
529
2337
|
}
|
530
2338
|
|
531
|
-
const VERSION = "0.
|
2339
|
+
const VERSION = "0.29.4";
|
532
2340
|
|
533
2341
|
class ErrorWithCause extends Error {
|
534
2342
|
constructor(message, options) {
|
@@ -621,15 +2429,15 @@ function parseWorkspacesUrlParts(url) {
|
|
621
2429
|
if (!isString(url))
|
622
2430
|
return null;
|
623
2431
|
const matches = {
|
624
|
-
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh
|
625
|
-
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev
|
626
|
-
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev
|
627
|
-
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(
|
2432
|
+
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
|
2433
|
+
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2434
|
+
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2435
|
+
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
|
628
2436
|
};
|
629
2437
|
const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
|
630
2438
|
if (!isHostProviderAlias(host) || !match)
|
631
2439
|
return null;
|
632
|
-
return { workspace: match[1], region: match[2], host };
|
2440
|
+
return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
|
633
2441
|
}
|
634
2442
|
|
635
2443
|
const pool = new ApiRequestPool();
|
@@ -847,26 +2655,35 @@ function parseUrl(url) {
|
|
847
2655
|
|
848
2656
|
const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
|
849
2657
|
|
850
|
-
const applyMigration = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/
|
851
|
-
const
|
852
|
-
url: "/db/{dbBranchName}/
|
853
|
-
method: "
|
2658
|
+
const applyMigration = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/apply", method: "post", ...variables, signal });
|
2659
|
+
const adaptTable = (variables, signal) => dataPlaneFetch({
|
2660
|
+
url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
|
2661
|
+
method: "post",
|
854
2662
|
...variables,
|
855
2663
|
signal
|
856
2664
|
});
|
857
|
-
const
|
858
|
-
url: "/db/{dbBranchName}/
|
859
|
-
method: "
|
2665
|
+
const adaptAllTables = (variables, signal) => dataPlaneFetch({
|
2666
|
+
url: "/db/{dbBranchName}/migrations/adapt",
|
2667
|
+
method: "post",
|
860
2668
|
...variables,
|
861
2669
|
signal
|
862
2670
|
});
|
863
|
-
const
|
2671
|
+
const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/status", method: "get", ...variables, signal });
|
2672
|
+
const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/jobs/{jobId}", method: "get", ...variables, signal });
|
2673
|
+
const getMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/history", method: "get", ...variables, signal });
|
864
2674
|
const getBranchList = (variables, signal) => dataPlaneFetch({
|
865
2675
|
url: "/dbs/{dbName}",
|
866
2676
|
method: "get",
|
867
2677
|
...variables,
|
868
2678
|
signal
|
869
2679
|
});
|
2680
|
+
const getDatabaseSettings = (variables, signal) => dataPlaneFetch({
|
2681
|
+
url: "/dbs/{dbName}/settings",
|
2682
|
+
method: "get",
|
2683
|
+
...variables,
|
2684
|
+
signal
|
2685
|
+
});
|
2686
|
+
const updateDatabaseSettings = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/settings", method: "patch", ...variables, signal });
|
870
2687
|
const getBranchDetails = (variables, signal) => dataPlaneFetch({
|
871
2688
|
url: "/db/{dbBranchName}",
|
872
2689
|
method: "get",
|
@@ -1080,11 +2897,26 @@ const sqlQuery = (variables, signal) => dataPlaneFetch({
|
|
1080
2897
|
signal
|
1081
2898
|
});
|
1082
2899
|
const operationsByTag$2 = {
|
1083
|
-
|
2900
|
+
migrations: {
|
1084
2901
|
applyMigration,
|
1085
|
-
|
1086
|
-
|
1087
|
-
|
2902
|
+
adaptTable,
|
2903
|
+
adaptAllTables,
|
2904
|
+
getBranchMigrationJobStatus,
|
2905
|
+
getMigrationJobStatus,
|
2906
|
+
getMigrationHistory,
|
2907
|
+
getSchema,
|
2908
|
+
getBranchMigrationHistory,
|
2909
|
+
getBranchMigrationPlan,
|
2910
|
+
executeBranchMigrationPlan,
|
2911
|
+
getBranchSchemaHistory,
|
2912
|
+
compareBranchWithUserSchema,
|
2913
|
+
compareBranchSchemas,
|
2914
|
+
updateBranchSchema,
|
2915
|
+
previewBranchSchemaEdit,
|
2916
|
+
applyBranchSchemaEdit,
|
2917
|
+
pushBranchMigrations
|
2918
|
+
},
|
2919
|
+
branch: {
|
1088
2920
|
getBranchList,
|
1089
2921
|
getBranchDetails,
|
1090
2922
|
createBranch,
|
@@ -1098,19 +2930,7 @@ const operationsByTag$2 = {
|
|
1098
2930
|
removeGitBranchesEntry,
|
1099
2931
|
resolveBranch
|
1100
2932
|
},
|
1101
|
-
|
1102
|
-
getSchema,
|
1103
|
-
getBranchMigrationHistory,
|
1104
|
-
getBranchMigrationPlan,
|
1105
|
-
executeBranchMigrationPlan,
|
1106
|
-
getBranchSchemaHistory,
|
1107
|
-
compareBranchWithUserSchema,
|
1108
|
-
compareBranchSchemas,
|
1109
|
-
updateBranchSchema,
|
1110
|
-
previewBranchSchemaEdit,
|
1111
|
-
applyBranchSchemaEdit,
|
1112
|
-
pushBranchMigrations
|
1113
|
-
},
|
2933
|
+
database: { getDatabaseSettings, updateDatabaseSettings },
|
1114
2934
|
migrationRequests: {
|
1115
2935
|
queryMigrationRequests,
|
1116
2936
|
createMigrationRequest,
|
@@ -1252,6 +3072,8 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
|
|
1252
3072
|
...variables,
|
1253
3073
|
signal
|
1254
3074
|
});
|
3075
|
+
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/settings", method: "get", ...variables, signal });
|
3076
|
+
const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/settings", method: "patch", ...variables, signal });
|
1255
3077
|
const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/members", method: "get", ...variables, signal });
|
1256
3078
|
const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/members/{userId}", method: "put", ...variables, signal });
|
1257
3079
|
const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
@@ -1317,6 +3139,8 @@ const operationsByTag$1 = {
|
|
1317
3139
|
getWorkspace,
|
1318
3140
|
updateWorkspace,
|
1319
3141
|
deleteWorkspace,
|
3142
|
+
getWorkspaceSettings,
|
3143
|
+
updateWorkspaceSettings,
|
1320
3144
|
getWorkspaceMembersList,
|
1321
3145
|
updateWorkspaceMemberRole,
|
1322
3146
|
removeWorkspaceMember
|
@@ -1349,7 +3173,7 @@ var __accessCheck$7 = (obj, member, msg) => {
|
|
1349
3173
|
if (!member.has(obj))
|
1350
3174
|
throw TypeError("Cannot " + msg);
|
1351
3175
|
};
|
1352
|
-
var __privateGet$
|
3176
|
+
var __privateGet$6 = (obj, member, getter) => {
|
1353
3177
|
__accessCheck$7(obj, member, "read from private field");
|
1354
3178
|
return getter ? getter.call(obj) : member.get(obj);
|
1355
3179
|
};
|
@@ -1358,7 +3182,7 @@ var __privateAdd$7 = (obj, member, value) => {
|
|
1358
3182
|
throw TypeError("Cannot add the same private member more than once");
|
1359
3183
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1360
3184
|
};
|
1361
|
-
var __privateSet$
|
3185
|
+
var __privateSet$5 = (obj, member, value, setter) => {
|
1362
3186
|
__accessCheck$7(obj, member, "write to private field");
|
1363
3187
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
1364
3188
|
return value;
|
@@ -1375,7 +3199,7 @@ class XataApiClient {
|
|
1375
3199
|
if (!apiKey) {
|
1376
3200
|
throw new Error("Could not resolve a valid apiKey");
|
1377
3201
|
}
|
1378
|
-
__privateSet$
|
3202
|
+
__privateSet$5(this, _extraProps, {
|
1379
3203
|
apiUrl: getHostUrl(provider, "main"),
|
1380
3204
|
workspacesApiUrl: getHostUrl(provider, "workspaces"),
|
1381
3205
|
fetch: getFetchImplementation(options.fetch),
|
@@ -1387,64 +3211,64 @@ class XataApiClient {
|
|
1387
3211
|
});
|
1388
3212
|
}
|
1389
3213
|
get user() {
|
1390
|
-
if (!__privateGet$
|
1391
|
-
__privateGet$
|
1392
|
-
return __privateGet$
|
3214
|
+
if (!__privateGet$6(this, _namespaces).user)
|
3215
|
+
__privateGet$6(this, _namespaces).user = new UserApi(__privateGet$6(this, _extraProps));
|
3216
|
+
return __privateGet$6(this, _namespaces).user;
|
1393
3217
|
}
|
1394
3218
|
get authentication() {
|
1395
|
-
if (!__privateGet$
|
1396
|
-
__privateGet$
|
1397
|
-
return __privateGet$
|
3219
|
+
if (!__privateGet$6(this, _namespaces).authentication)
|
3220
|
+
__privateGet$6(this, _namespaces).authentication = new AuthenticationApi(__privateGet$6(this, _extraProps));
|
3221
|
+
return __privateGet$6(this, _namespaces).authentication;
|
1398
3222
|
}
|
1399
3223
|
get workspaces() {
|
1400
|
-
if (!__privateGet$
|
1401
|
-
__privateGet$
|
1402
|
-
return __privateGet$
|
3224
|
+
if (!__privateGet$6(this, _namespaces).workspaces)
|
3225
|
+
__privateGet$6(this, _namespaces).workspaces = new WorkspaceApi(__privateGet$6(this, _extraProps));
|
3226
|
+
return __privateGet$6(this, _namespaces).workspaces;
|
1403
3227
|
}
|
1404
3228
|
get invites() {
|
1405
|
-
if (!__privateGet$
|
1406
|
-
__privateGet$
|
1407
|
-
return __privateGet$
|
3229
|
+
if (!__privateGet$6(this, _namespaces).invites)
|
3230
|
+
__privateGet$6(this, _namespaces).invites = new InvitesApi(__privateGet$6(this, _extraProps));
|
3231
|
+
return __privateGet$6(this, _namespaces).invites;
|
1408
3232
|
}
|
1409
3233
|
get database() {
|
1410
|
-
if (!__privateGet$
|
1411
|
-
__privateGet$
|
1412
|
-
return __privateGet$
|
3234
|
+
if (!__privateGet$6(this, _namespaces).database)
|
3235
|
+
__privateGet$6(this, _namespaces).database = new DatabaseApi(__privateGet$6(this, _extraProps));
|
3236
|
+
return __privateGet$6(this, _namespaces).database;
|
1413
3237
|
}
|
1414
3238
|
get branches() {
|
1415
|
-
if (!__privateGet$
|
1416
|
-
__privateGet$
|
1417
|
-
return __privateGet$
|
3239
|
+
if (!__privateGet$6(this, _namespaces).branches)
|
3240
|
+
__privateGet$6(this, _namespaces).branches = new BranchApi(__privateGet$6(this, _extraProps));
|
3241
|
+
return __privateGet$6(this, _namespaces).branches;
|
1418
3242
|
}
|
1419
3243
|
get migrations() {
|
1420
|
-
if (!__privateGet$
|
1421
|
-
__privateGet$
|
1422
|
-
return __privateGet$
|
3244
|
+
if (!__privateGet$6(this, _namespaces).migrations)
|
3245
|
+
__privateGet$6(this, _namespaces).migrations = new MigrationsApi(__privateGet$6(this, _extraProps));
|
3246
|
+
return __privateGet$6(this, _namespaces).migrations;
|
1423
3247
|
}
|
1424
3248
|
get migrationRequests() {
|
1425
|
-
if (!__privateGet$
|
1426
|
-
__privateGet$
|
1427
|
-
return __privateGet$
|
3249
|
+
if (!__privateGet$6(this, _namespaces).migrationRequests)
|
3250
|
+
__privateGet$6(this, _namespaces).migrationRequests = new MigrationRequestsApi(__privateGet$6(this, _extraProps));
|
3251
|
+
return __privateGet$6(this, _namespaces).migrationRequests;
|
1428
3252
|
}
|
1429
3253
|
get tables() {
|
1430
|
-
if (!__privateGet$
|
1431
|
-
__privateGet$
|
1432
|
-
return __privateGet$
|
3254
|
+
if (!__privateGet$6(this, _namespaces).tables)
|
3255
|
+
__privateGet$6(this, _namespaces).tables = new TableApi(__privateGet$6(this, _extraProps));
|
3256
|
+
return __privateGet$6(this, _namespaces).tables;
|
1433
3257
|
}
|
1434
3258
|
get records() {
|
1435
|
-
if (!__privateGet$
|
1436
|
-
__privateGet$
|
1437
|
-
return __privateGet$
|
3259
|
+
if (!__privateGet$6(this, _namespaces).records)
|
3260
|
+
__privateGet$6(this, _namespaces).records = new RecordsApi(__privateGet$6(this, _extraProps));
|
3261
|
+
return __privateGet$6(this, _namespaces).records;
|
1438
3262
|
}
|
1439
3263
|
get files() {
|
1440
|
-
if (!__privateGet$
|
1441
|
-
__privateGet$
|
1442
|
-
return __privateGet$
|
3264
|
+
if (!__privateGet$6(this, _namespaces).files)
|
3265
|
+
__privateGet$6(this, _namespaces).files = new FilesApi(__privateGet$6(this, _extraProps));
|
3266
|
+
return __privateGet$6(this, _namespaces).files;
|
1443
3267
|
}
|
1444
3268
|
get searchAndFilter() {
|
1445
|
-
if (!__privateGet$
|
1446
|
-
__privateGet$
|
1447
|
-
return __privateGet$
|
3269
|
+
if (!__privateGet$6(this, _namespaces).searchAndFilter)
|
3270
|
+
__privateGet$6(this, _namespaces).searchAndFilter = new SearchAndFilterApi(__privateGet$6(this, _extraProps));
|
3271
|
+
return __privateGet$6(this, _namespaces).searchAndFilter;
|
1448
3272
|
}
|
1449
3273
|
}
|
1450
3274
|
_extraProps = new WeakMap();
|
@@ -1746,6 +3570,30 @@ class BranchApi {
|
|
1746
3570
|
...this.extraProps
|
1747
3571
|
});
|
1748
3572
|
}
|
3573
|
+
pgRollMigrationHistory({
|
3574
|
+
workspace,
|
3575
|
+
region,
|
3576
|
+
database,
|
3577
|
+
branch
|
3578
|
+
}) {
|
3579
|
+
return operationsByTag.migrations.getMigrationHistory({
|
3580
|
+
pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
|
3581
|
+
...this.extraProps
|
3582
|
+
});
|
3583
|
+
}
|
3584
|
+
applyMigration({
|
3585
|
+
workspace,
|
3586
|
+
region,
|
3587
|
+
database,
|
3588
|
+
branch,
|
3589
|
+
migration
|
3590
|
+
}) {
|
3591
|
+
return operationsByTag.migrations.applyMigration({
|
3592
|
+
pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
|
3593
|
+
body: migration,
|
3594
|
+
...this.extraProps
|
3595
|
+
});
|
3596
|
+
}
|
1749
3597
|
}
|
1750
3598
|
class TableApi {
|
1751
3599
|
constructor(extraProps) {
|
@@ -2559,6 +4407,17 @@ class MigrationsApi {
|
|
2559
4407
|
...this.extraProps
|
2560
4408
|
});
|
2561
4409
|
}
|
4410
|
+
getSchema({
|
4411
|
+
workspace,
|
4412
|
+
region,
|
4413
|
+
database,
|
4414
|
+
branch
|
4415
|
+
}) {
|
4416
|
+
return operationsByTag.migrations.getSchema({
|
4417
|
+
pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
|
4418
|
+
...this.extraProps
|
4419
|
+
});
|
4420
|
+
}
|
2562
4421
|
}
|
2563
4422
|
class DatabaseApi {
|
2564
4423
|
constructor(extraProps) {
|
@@ -2864,7 +4723,7 @@ var __accessCheck$6 = (obj, member, msg) => {
|
|
2864
4723
|
if (!member.has(obj))
|
2865
4724
|
throw TypeError("Cannot " + msg);
|
2866
4725
|
};
|
2867
|
-
var __privateGet$
|
4726
|
+
var __privateGet$5 = (obj, member, getter) => {
|
2868
4727
|
__accessCheck$6(obj, member, "read from private field");
|
2869
4728
|
return getter ? getter.call(obj) : member.get(obj);
|
2870
4729
|
};
|
@@ -2873,7 +4732,7 @@ var __privateAdd$6 = (obj, member, value) => {
|
|
2873
4732
|
throw TypeError("Cannot add the same private member more than once");
|
2874
4733
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2875
4734
|
};
|
2876
|
-
var __privateSet$
|
4735
|
+
var __privateSet$4 = (obj, member, value, setter) => {
|
2877
4736
|
__accessCheck$6(obj, member, "write to private field");
|
2878
4737
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
2879
4738
|
return value;
|
@@ -2882,9 +4741,9 @@ var _query, _page;
|
|
2882
4741
|
class Page {
|
2883
4742
|
constructor(query, meta, records = []) {
|
2884
4743
|
__privateAdd$6(this, _query, void 0);
|
2885
|
-
__privateSet$
|
4744
|
+
__privateSet$4(this, _query, query);
|
2886
4745
|
this.meta = meta;
|
2887
|
-
this.records = new
|
4746
|
+
this.records = new PageRecordArray(this, records);
|
2888
4747
|
}
|
2889
4748
|
/**
|
2890
4749
|
* Retrieves the next page of results.
|
@@ -2893,7 +4752,7 @@ class Page {
|
|
2893
4752
|
* @returns The next page or results.
|
2894
4753
|
*/
|
2895
4754
|
async nextPage(size, offset) {
|
2896
|
-
return __privateGet$
|
4755
|
+
return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, after: this.meta.page.cursor } });
|
2897
4756
|
}
|
2898
4757
|
/**
|
2899
4758
|
* Retrieves the previous page of results.
|
@@ -2902,7 +4761,7 @@ class Page {
|
|
2902
4761
|
* @returns The previous page or results.
|
2903
4762
|
*/
|
2904
4763
|
async previousPage(size, offset) {
|
2905
|
-
return __privateGet$
|
4764
|
+
return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, before: this.meta.page.cursor } });
|
2906
4765
|
}
|
2907
4766
|
/**
|
2908
4767
|
* Retrieves the start page of results.
|
@@ -2911,7 +4770,7 @@ class Page {
|
|
2911
4770
|
* @returns The start page or results.
|
2912
4771
|
*/
|
2913
4772
|
async startPage(size, offset) {
|
2914
|
-
return __privateGet$
|
4773
|
+
return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, start: this.meta.page.cursor } });
|
2915
4774
|
}
|
2916
4775
|
/**
|
2917
4776
|
* Retrieves the end page of results.
|
@@ -2920,7 +4779,7 @@ class Page {
|
|
2920
4779
|
* @returns The end page or results.
|
2921
4780
|
*/
|
2922
4781
|
async endPage(size, offset) {
|
2923
|
-
return __privateGet$
|
4782
|
+
return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, end: this.meta.page.cursor } });
|
2924
4783
|
}
|
2925
4784
|
/**
|
2926
4785
|
* Shortcut method to check if there will be additional results if the next page of results is retrieved.
|
@@ -2938,11 +4797,38 @@ const PAGINATION_DEFAULT_OFFSET = 0;
|
|
2938
4797
|
function isCursorPaginationOptions(options) {
|
2939
4798
|
return isDefined(options) && (isDefined(options.start) || isDefined(options.end) || isDefined(options.after) || isDefined(options.before));
|
2940
4799
|
}
|
2941
|
-
|
4800
|
+
class RecordArray extends Array {
|
4801
|
+
constructor(...args) {
|
4802
|
+
super(...RecordArray.parseConstructorParams(...args));
|
4803
|
+
}
|
4804
|
+
static parseConstructorParams(...args) {
|
4805
|
+
if (args.length === 1 && typeof args[0] === "number") {
|
4806
|
+
return new Array(args[0]);
|
4807
|
+
}
|
4808
|
+
if (args.length <= 1 && Array.isArray(args[0] ?? [])) {
|
4809
|
+
const result = args[0] ?? [];
|
4810
|
+
return new Array(...result);
|
4811
|
+
}
|
4812
|
+
return new Array(...args);
|
4813
|
+
}
|
4814
|
+
toArray() {
|
4815
|
+
return new Array(...this);
|
4816
|
+
}
|
4817
|
+
toSerializable() {
|
4818
|
+
return JSON.parse(this.toString());
|
4819
|
+
}
|
4820
|
+
toString() {
|
4821
|
+
return JSON.stringify(this.toArray());
|
4822
|
+
}
|
4823
|
+
map(callbackfn, thisArg) {
|
4824
|
+
return this.toArray().map(callbackfn, thisArg);
|
4825
|
+
}
|
4826
|
+
}
|
4827
|
+
const _PageRecordArray = class _PageRecordArray extends Array {
|
2942
4828
|
constructor(...args) {
|
2943
|
-
super(...
|
4829
|
+
super(..._PageRecordArray.parseConstructorParams(...args));
|
2944
4830
|
__privateAdd$6(this, _page, void 0);
|
2945
|
-
__privateSet$
|
4831
|
+
__privateSet$4(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
|
2946
4832
|
}
|
2947
4833
|
static parseConstructorParams(...args) {
|
2948
4834
|
if (args.length === 1 && typeof args[0] === "number") {
|
@@ -2972,8 +4858,8 @@ const _RecordArray = class _RecordArray extends Array {
|
|
2972
4858
|
* @returns A new array of objects
|
2973
4859
|
*/
|
2974
4860
|
async nextPage(size, offset) {
|
2975
|
-
const newPage = await __privateGet$
|
2976
|
-
return new
|
4861
|
+
const newPage = await __privateGet$5(this, _page).nextPage(size, offset);
|
4862
|
+
return new _PageRecordArray(newPage);
|
2977
4863
|
}
|
2978
4864
|
/**
|
2979
4865
|
* Retrieve previous page of records
|
@@ -2981,8 +4867,8 @@ const _RecordArray = class _RecordArray extends Array {
|
|
2981
4867
|
* @returns A new array of objects
|
2982
4868
|
*/
|
2983
4869
|
async previousPage(size, offset) {
|
2984
|
-
const newPage = await __privateGet$
|
2985
|
-
return new
|
4870
|
+
const newPage = await __privateGet$5(this, _page).previousPage(size, offset);
|
4871
|
+
return new _PageRecordArray(newPage);
|
2986
4872
|
}
|
2987
4873
|
/**
|
2988
4874
|
* Retrieve start page of records
|
@@ -2990,8 +4876,8 @@ const _RecordArray = class _RecordArray extends Array {
|
|
2990
4876
|
* @returns A new array of objects
|
2991
4877
|
*/
|
2992
4878
|
async startPage(size, offset) {
|
2993
|
-
const newPage = await __privateGet$
|
2994
|
-
return new
|
4879
|
+
const newPage = await __privateGet$5(this, _page).startPage(size, offset);
|
4880
|
+
return new _PageRecordArray(newPage);
|
2995
4881
|
}
|
2996
4882
|
/**
|
2997
4883
|
* Retrieve end page of records
|
@@ -2999,24 +4885,24 @@ const _RecordArray = class _RecordArray extends Array {
|
|
2999
4885
|
* @returns A new array of objects
|
3000
4886
|
*/
|
3001
4887
|
async endPage(size, offset) {
|
3002
|
-
const newPage = await __privateGet$
|
3003
|
-
return new
|
4888
|
+
const newPage = await __privateGet$5(this, _page).endPage(size, offset);
|
4889
|
+
return new _PageRecordArray(newPage);
|
3004
4890
|
}
|
3005
4891
|
/**
|
3006
4892
|
* @returns Boolean indicating if there is a next page
|
3007
4893
|
*/
|
3008
4894
|
hasNextPage() {
|
3009
|
-
return __privateGet$
|
4895
|
+
return __privateGet$5(this, _page).meta.page.more;
|
3010
4896
|
}
|
3011
4897
|
};
|
3012
4898
|
_page = new WeakMap();
|
3013
|
-
let
|
4899
|
+
let PageRecordArray = _PageRecordArray;
|
3014
4900
|
|
3015
4901
|
var __accessCheck$5 = (obj, member, msg) => {
|
3016
4902
|
if (!member.has(obj))
|
3017
4903
|
throw TypeError("Cannot " + msg);
|
3018
4904
|
};
|
3019
|
-
var __privateGet$
|
4905
|
+
var __privateGet$4 = (obj, member, getter) => {
|
3020
4906
|
__accessCheck$5(obj, member, "read from private field");
|
3021
4907
|
return getter ? getter.call(obj) : member.get(obj);
|
3022
4908
|
};
|
@@ -3025,7 +4911,7 @@ var __privateAdd$5 = (obj, member, value) => {
|
|
3025
4911
|
throw TypeError("Cannot add the same private member more than once");
|
3026
4912
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3027
4913
|
};
|
3028
|
-
var __privateSet$
|
4914
|
+
var __privateSet$3 = (obj, member, value, setter) => {
|
3029
4915
|
__accessCheck$5(obj, member, "write to private field");
|
3030
4916
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
3031
4917
|
return value;
|
@@ -3043,25 +4929,25 @@ const _Query = class _Query {
|
|
3043
4929
|
__privateAdd$5(this, _data, { filter: {} });
|
3044
4930
|
// Implements pagination
|
3045
4931
|
this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
|
3046
|
-
this.records = new
|
3047
|
-
__privateSet$
|
4932
|
+
this.records = new PageRecordArray(this, []);
|
4933
|
+
__privateSet$3(this, _table$1, table);
|
3048
4934
|
if (repository) {
|
3049
|
-
__privateSet$
|
4935
|
+
__privateSet$3(this, _repository, repository);
|
3050
4936
|
} else {
|
3051
|
-
__privateSet$
|
4937
|
+
__privateSet$3(this, _repository, this);
|
3052
4938
|
}
|
3053
4939
|
const parent = cleanParent(data, rawParent);
|
3054
|
-
__privateGet$
|
3055
|
-
__privateGet$
|
3056
|
-
__privateGet$
|
3057
|
-
__privateGet$
|
3058
|
-
__privateGet$
|
3059
|
-
__privateGet$
|
3060
|
-
__privateGet$
|
3061
|
-
__privateGet$
|
3062
|
-
__privateGet$
|
3063
|
-
__privateGet$
|
3064
|
-
__privateGet$
|
4940
|
+
__privateGet$4(this, _data).filter = data.filter ?? parent?.filter ?? {};
|
4941
|
+
__privateGet$4(this, _data).filter.$any = data.filter?.$any ?? parent?.filter?.$any;
|
4942
|
+
__privateGet$4(this, _data).filter.$all = data.filter?.$all ?? parent?.filter?.$all;
|
4943
|
+
__privateGet$4(this, _data).filter.$not = data.filter?.$not ?? parent?.filter?.$not;
|
4944
|
+
__privateGet$4(this, _data).filter.$none = data.filter?.$none ?? parent?.filter?.$none;
|
4945
|
+
__privateGet$4(this, _data).sort = data.sort ?? parent?.sort;
|
4946
|
+
__privateGet$4(this, _data).columns = data.columns ?? parent?.columns;
|
4947
|
+
__privateGet$4(this, _data).consistency = data.consistency ?? parent?.consistency;
|
4948
|
+
__privateGet$4(this, _data).pagination = data.pagination ?? parent?.pagination;
|
4949
|
+
__privateGet$4(this, _data).cache = data.cache ?? parent?.cache;
|
4950
|
+
__privateGet$4(this, _data).fetchOptions = data.fetchOptions ?? parent?.fetchOptions;
|
3065
4951
|
this.any = this.any.bind(this);
|
3066
4952
|
this.all = this.all.bind(this);
|
3067
4953
|
this.not = this.not.bind(this);
|
@@ -3072,10 +4958,10 @@ const _Query = class _Query {
|
|
3072
4958
|
Object.defineProperty(this, "repository", { enumerable: false });
|
3073
4959
|
}
|
3074
4960
|
getQueryOptions() {
|
3075
|
-
return __privateGet$
|
4961
|
+
return __privateGet$4(this, _data);
|
3076
4962
|
}
|
3077
4963
|
key() {
|
3078
|
-
const { columns = [], filter = {}, sort = [], pagination = {} } = __privateGet$
|
4964
|
+
const { columns = [], filter = {}, sort = [], pagination = {} } = __privateGet$4(this, _data);
|
3079
4965
|
const key = JSON.stringify({ columns, filter, sort, pagination });
|
3080
4966
|
return toBase64(key);
|
3081
4967
|
}
|
@@ -3086,7 +4972,7 @@ const _Query = class _Query {
|
|
3086
4972
|
*/
|
3087
4973
|
any(...queries) {
|
3088
4974
|
const $any = queries.map((query) => query.getQueryOptions().filter ?? {});
|
3089
|
-
return new _Query(__privateGet$
|
4975
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $any } }, __privateGet$4(this, _data));
|
3090
4976
|
}
|
3091
4977
|
/**
|
3092
4978
|
* Builds a new query object representing a logical AND between the given subqueries.
|
@@ -3095,7 +4981,7 @@ const _Query = class _Query {
|
|
3095
4981
|
*/
|
3096
4982
|
all(...queries) {
|
3097
4983
|
const $all = queries.map((query) => query.getQueryOptions().filter ?? {});
|
3098
|
-
return new _Query(__privateGet$
|
4984
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
|
3099
4985
|
}
|
3100
4986
|
/**
|
3101
4987
|
* Builds a new query object representing a logical OR negating each subquery. In pseudo-code: !q1 OR !q2
|
@@ -3104,7 +4990,7 @@ const _Query = class _Query {
|
|
3104
4990
|
*/
|
3105
4991
|
not(...queries) {
|
3106
4992
|
const $not = queries.map((query) => query.getQueryOptions().filter ?? {});
|
3107
|
-
return new _Query(__privateGet$
|
4993
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $not } }, __privateGet$4(this, _data));
|
3108
4994
|
}
|
3109
4995
|
/**
|
3110
4996
|
* Builds a new query object representing a logical AND negating each subquery. In pseudo-code: !q1 AND !q2
|
@@ -3113,25 +4999,25 @@ const _Query = class _Query {
|
|
3113
4999
|
*/
|
3114
5000
|
none(...queries) {
|
3115
5001
|
const $none = queries.map((query) => query.getQueryOptions().filter ?? {});
|
3116
|
-
return new _Query(__privateGet$
|
5002
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $none } }, __privateGet$4(this, _data));
|
3117
5003
|
}
|
3118
5004
|
filter(a, b) {
|
3119
5005
|
if (arguments.length === 1) {
|
3120
5006
|
const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
|
3121
5007
|
[column]: __privateMethod$3(this, _cleanFilterConstraint, cleanFilterConstraint_fn).call(this, column, constraint)
|
3122
5008
|
}));
|
3123
|
-
const $all = compact([__privateGet$
|
3124
|
-
return new _Query(__privateGet$
|
5009
|
+
const $all = compact([__privateGet$4(this, _data).filter?.$all].flat().concat(constraints));
|
5010
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
|
3125
5011
|
} else {
|
3126
5012
|
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _cleanFilterConstraint, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
|
3127
|
-
const $all = compact([__privateGet$
|
3128
|
-
return new _Query(__privateGet$
|
5013
|
+
const $all = compact([__privateGet$4(this, _data).filter?.$all].flat().concat(constraints));
|
5014
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
|
3129
5015
|
}
|
3130
5016
|
}
|
3131
5017
|
sort(column, direction = "asc") {
|
3132
|
-
const originalSort = [__privateGet$
|
5018
|
+
const originalSort = [__privateGet$4(this, _data).sort ?? []].flat();
|
3133
5019
|
const sort = [...originalSort, { column, direction }];
|
3134
|
-
return new _Query(__privateGet$
|
5020
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { sort }, __privateGet$4(this, _data));
|
3135
5021
|
}
|
3136
5022
|
/**
|
3137
5023
|
* Builds a new query specifying the set of columns to be returned in the query response.
|
@@ -3140,15 +5026,15 @@ const _Query = class _Query {
|
|
3140
5026
|
*/
|
3141
5027
|
select(columns) {
|
3142
5028
|
return new _Query(
|
3143
|
-
__privateGet$
|
3144
|
-
__privateGet$
|
5029
|
+
__privateGet$4(this, _repository),
|
5030
|
+
__privateGet$4(this, _table$1),
|
3145
5031
|
{ columns },
|
3146
|
-
__privateGet$
|
5032
|
+
__privateGet$4(this, _data)
|
3147
5033
|
);
|
3148
5034
|
}
|
3149
5035
|
getPaginated(options = {}) {
|
3150
|
-
const query = new _Query(__privateGet$
|
3151
|
-
return __privateGet$
|
5036
|
+
const query = new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), options, __privateGet$4(this, _data));
|
5037
|
+
return __privateGet$4(this, _repository).query(query);
|
3152
5038
|
}
|
3153
5039
|
/**
|
3154
5040
|
* Get results in an iterator
|
@@ -3185,7 +5071,7 @@ const _Query = class _Query {
|
|
3185
5071
|
if (page.hasNextPage() && options.pagination?.size === void 0) {
|
3186
5072
|
console.trace("Calling getMany does not return all results. Paginate to get all results or call getAll.");
|
3187
5073
|
}
|
3188
|
-
const array = new
|
5074
|
+
const array = new PageRecordArray(page, results.slice(0, size));
|
3189
5075
|
return array;
|
3190
5076
|
}
|
3191
5077
|
async getAll(options = {}) {
|
@@ -3194,7 +5080,7 @@ const _Query = class _Query {
|
|
3194
5080
|
for await (const page of this.getIterator({ ...rest, batchSize })) {
|
3195
5081
|
results.push(...page);
|
3196
5082
|
}
|
3197
|
-
return results;
|
5083
|
+
return new RecordArray(results);
|
3198
5084
|
}
|
3199
5085
|
async getFirst(options = {}) {
|
3200
5086
|
const records = await this.getMany({ ...options, pagination: { size: 1 } });
|
@@ -3209,12 +5095,12 @@ const _Query = class _Query {
|
|
3209
5095
|
async summarize(params = {}) {
|
3210
5096
|
const { summaries, summariesFilter, ...options } = params;
|
3211
5097
|
const query = new _Query(
|
3212
|
-
__privateGet$
|
3213
|
-
__privateGet$
|
5098
|
+
__privateGet$4(this, _repository),
|
5099
|
+
__privateGet$4(this, _table$1),
|
3214
5100
|
options,
|
3215
|
-
__privateGet$
|
5101
|
+
__privateGet$4(this, _data)
|
3216
5102
|
);
|
3217
|
-
return __privateGet$
|
5103
|
+
return __privateGet$4(this, _repository).summarizeTable(query, summaries, summariesFilter);
|
3218
5104
|
}
|
3219
5105
|
/**
|
3220
5106
|
* Builds a new query object adding a cache TTL in milliseconds.
|
@@ -3222,7 +5108,7 @@ const _Query = class _Query {
|
|
3222
5108
|
* @returns A new Query object.
|
3223
5109
|
*/
|
3224
5110
|
cache(ttl) {
|
3225
|
-
return new _Query(__privateGet$
|
5111
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { cache: ttl }, __privateGet$4(this, _data));
|
3226
5112
|
}
|
3227
5113
|
/**
|
3228
5114
|
* Retrieve next page of records
|
@@ -3268,7 +5154,7 @@ _repository = new WeakMap();
|
|
3268
5154
|
_data = new WeakMap();
|
3269
5155
|
_cleanFilterConstraint = new WeakSet();
|
3270
5156
|
cleanFilterConstraint_fn = function(column, value) {
|
3271
|
-
const columnType = __privateGet$
|
5157
|
+
const columnType = __privateGet$4(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
|
3272
5158
|
if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
|
3273
5159
|
return { $includes: value };
|
3274
5160
|
}
|
@@ -3358,7 +5244,7 @@ var __accessCheck$4 = (obj, member, msg) => {
|
|
3358
5244
|
if (!member.has(obj))
|
3359
5245
|
throw TypeError("Cannot " + msg);
|
3360
5246
|
};
|
3361
|
-
var __privateGet$
|
5247
|
+
var __privateGet$3 = (obj, member, getter) => {
|
3362
5248
|
__accessCheck$4(obj, member, "read from private field");
|
3363
5249
|
return getter ? getter.call(obj) : member.get(obj);
|
3364
5250
|
};
|
@@ -3367,7 +5253,7 @@ var __privateAdd$4 = (obj, member, value) => {
|
|
3367
5253
|
throw TypeError("Cannot add the same private member more than once");
|
3368
5254
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3369
5255
|
};
|
3370
|
-
var __privateSet$
|
5256
|
+
var __privateSet$2 = (obj, member, value, setter) => {
|
3371
5257
|
__accessCheck$4(obj, member, "write to private field");
|
3372
5258
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
3373
5259
|
return value;
|
@@ -3376,7 +5262,7 @@ var __privateMethod$2 = (obj, member, method) => {
|
|
3376
5262
|
__accessCheck$4(obj, member, "access private method");
|
3377
5263
|
return method;
|
3378
5264
|
};
|
3379
|
-
var _table, _getFetchProps, _db, _cache, _schemaTables
|
5265
|
+
var _table, _getFetchProps, _db, _cache, _schemaTables, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn, _insertRecords, insertRecords_fn, _updateRecordWithID, updateRecordWithID_fn, _updateRecords, updateRecords_fn, _upsertRecordWithID, upsertRecordWithID_fn, _deleteRecord, deleteRecord_fn, _deleteRecords, deleteRecords_fn, _setCacheQuery, setCacheQuery_fn, _getCacheQuery, getCacheQuery_fn, _getSchemaTables, getSchemaTables_fn, _transformObjectToApi, transformObjectToApi_fn;
|
3380
5266
|
const BULK_OPERATION_MAX_SIZE = 1e3;
|
3381
5267
|
class Repository extends Query {
|
3382
5268
|
}
|
@@ -3397,31 +5283,31 @@ class RestRepository extends Query {
|
|
3397
5283
|
__privateAdd$4(this, _deleteRecords);
|
3398
5284
|
__privateAdd$4(this, _setCacheQuery);
|
3399
5285
|
__privateAdd$4(this, _getCacheQuery);
|
3400
|
-
__privateAdd$4(this, _getSchemaTables
|
5286
|
+
__privateAdd$4(this, _getSchemaTables);
|
3401
5287
|
__privateAdd$4(this, _transformObjectToApi);
|
3402
5288
|
__privateAdd$4(this, _table, void 0);
|
3403
5289
|
__privateAdd$4(this, _getFetchProps, void 0);
|
3404
5290
|
__privateAdd$4(this, _db, void 0);
|
3405
5291
|
__privateAdd$4(this, _cache, void 0);
|
3406
|
-
__privateAdd$4(this, _schemaTables
|
5292
|
+
__privateAdd$4(this, _schemaTables, void 0);
|
3407
5293
|
__privateAdd$4(this, _trace, void 0);
|
3408
|
-
__privateSet$
|
3409
|
-
__privateSet$
|
3410
|
-
__privateSet$
|
3411
|
-
__privateSet$
|
3412
|
-
__privateSet$
|
5294
|
+
__privateSet$2(this, _table, options.table);
|
5295
|
+
__privateSet$2(this, _db, options.db);
|
5296
|
+
__privateSet$2(this, _cache, options.pluginOptions.cache);
|
5297
|
+
__privateSet$2(this, _schemaTables, options.schemaTables);
|
5298
|
+
__privateSet$2(this, _getFetchProps, () => ({ ...options.pluginOptions, sessionID: generateUUID() }));
|
3413
5299
|
const trace = options.pluginOptions.trace ?? defaultTrace;
|
3414
|
-
__privateSet$
|
5300
|
+
__privateSet$2(this, _trace, async (name, fn, options2 = {}) => {
|
3415
5301
|
return trace(name, fn, {
|
3416
5302
|
...options2,
|
3417
|
-
[TraceAttributes.TABLE]: __privateGet$
|
5303
|
+
[TraceAttributes.TABLE]: __privateGet$3(this, _table),
|
3418
5304
|
[TraceAttributes.KIND]: "sdk-operation",
|
3419
5305
|
[TraceAttributes.VERSION]: VERSION
|
3420
5306
|
});
|
3421
5307
|
});
|
3422
5308
|
}
|
3423
5309
|
async create(a, b, c, d) {
|
3424
|
-
return __privateGet$
|
5310
|
+
return __privateGet$3(this, _trace).call(this, "create", async () => {
|
3425
5311
|
const ifVersion = parseIfVersion(b, c, d);
|
3426
5312
|
if (Array.isArray(a)) {
|
3427
5313
|
if (a.length === 0)
|
@@ -3451,7 +5337,7 @@ class RestRepository extends Query {
|
|
3451
5337
|
});
|
3452
5338
|
}
|
3453
5339
|
async read(a, b) {
|
3454
|
-
return __privateGet$
|
5340
|
+
return __privateGet$3(this, _trace).call(this, "read", async () => {
|
3455
5341
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3456
5342
|
if (Array.isArray(a)) {
|
3457
5343
|
if (a.length === 0)
|
@@ -3472,17 +5358,17 @@ class RestRepository extends Query {
|
|
3472
5358
|
workspace: "{workspaceId}",
|
3473
5359
|
dbBranchName: "{dbBranch}",
|
3474
5360
|
region: "{region}",
|
3475
|
-
tableName: __privateGet$
|
5361
|
+
tableName: __privateGet$3(this, _table),
|
3476
5362
|
recordId: id
|
3477
5363
|
},
|
3478
5364
|
queryParams: { columns },
|
3479
|
-
...__privateGet$
|
5365
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3480
5366
|
});
|
3481
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
5367
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
3482
5368
|
return initObject(
|
3483
|
-
__privateGet$
|
5369
|
+
__privateGet$3(this, _db),
|
3484
5370
|
schemaTables,
|
3485
|
-
__privateGet$
|
5371
|
+
__privateGet$3(this, _table),
|
3486
5372
|
response,
|
3487
5373
|
columns
|
3488
5374
|
);
|
@@ -3497,7 +5383,7 @@ class RestRepository extends Query {
|
|
3497
5383
|
});
|
3498
5384
|
}
|
3499
5385
|
async readOrThrow(a, b) {
|
3500
|
-
return __privateGet$
|
5386
|
+
return __privateGet$3(this, _trace).call(this, "readOrThrow", async () => {
|
3501
5387
|
const result = await this.read(a, b);
|
3502
5388
|
if (Array.isArray(result)) {
|
3503
5389
|
const missingIds = compact(
|
@@ -3516,7 +5402,7 @@ class RestRepository extends Query {
|
|
3516
5402
|
});
|
3517
5403
|
}
|
3518
5404
|
async update(a, b, c, d) {
|
3519
|
-
return __privateGet$
|
5405
|
+
return __privateGet$3(this, _trace).call(this, "update", async () => {
|
3520
5406
|
const ifVersion = parseIfVersion(b, c, d);
|
3521
5407
|
if (Array.isArray(a)) {
|
3522
5408
|
if (a.length === 0)
|
@@ -3549,7 +5435,7 @@ class RestRepository extends Query {
|
|
3549
5435
|
});
|
3550
5436
|
}
|
3551
5437
|
async updateOrThrow(a, b, c, d) {
|
3552
|
-
return __privateGet$
|
5438
|
+
return __privateGet$3(this, _trace).call(this, "updateOrThrow", async () => {
|
3553
5439
|
const result = await this.update(a, b, c, d);
|
3554
5440
|
if (Array.isArray(result)) {
|
3555
5441
|
const missingIds = compact(
|
@@ -3568,7 +5454,7 @@ class RestRepository extends Query {
|
|
3568
5454
|
});
|
3569
5455
|
}
|
3570
5456
|
async createOrUpdate(a, b, c, d) {
|
3571
|
-
return __privateGet$
|
5457
|
+
return __privateGet$3(this, _trace).call(this, "createOrUpdate", async () => {
|
3572
5458
|
const ifVersion = parseIfVersion(b, c, d);
|
3573
5459
|
if (Array.isArray(a)) {
|
3574
5460
|
if (a.length === 0)
|
@@ -3603,7 +5489,7 @@ class RestRepository extends Query {
|
|
3603
5489
|
});
|
3604
5490
|
}
|
3605
5491
|
async createOrReplace(a, b, c, d) {
|
3606
|
-
return __privateGet$
|
5492
|
+
return __privateGet$3(this, _trace).call(this, "createOrReplace", async () => {
|
3607
5493
|
const ifVersion = parseIfVersion(b, c, d);
|
3608
5494
|
if (Array.isArray(a)) {
|
3609
5495
|
if (a.length === 0)
|
@@ -3635,7 +5521,7 @@ class RestRepository extends Query {
|
|
3635
5521
|
});
|
3636
5522
|
}
|
3637
5523
|
async delete(a, b) {
|
3638
|
-
return __privateGet$
|
5524
|
+
return __privateGet$3(this, _trace).call(this, "delete", async () => {
|
3639
5525
|
if (Array.isArray(a)) {
|
3640
5526
|
if (a.length === 0)
|
3641
5527
|
return [];
|
@@ -3661,7 +5547,7 @@ class RestRepository extends Query {
|
|
3661
5547
|
});
|
3662
5548
|
}
|
3663
5549
|
async deleteOrThrow(a, b) {
|
3664
|
-
return __privateGet$
|
5550
|
+
return __privateGet$3(this, _trace).call(this, "deleteOrThrow", async () => {
|
3665
5551
|
const result = await this.delete(a, b);
|
3666
5552
|
if (Array.isArray(result)) {
|
3667
5553
|
const missingIds = compact(
|
@@ -3679,13 +5565,13 @@ class RestRepository extends Query {
|
|
3679
5565
|
});
|
3680
5566
|
}
|
3681
5567
|
async search(query, options = {}) {
|
3682
|
-
return __privateGet$
|
5568
|
+
return __privateGet$3(this, _trace).call(this, "search", async () => {
|
3683
5569
|
const { records, totalCount } = await searchTable({
|
3684
5570
|
pathParams: {
|
3685
5571
|
workspace: "{workspaceId}",
|
3686
5572
|
dbBranchName: "{dbBranch}",
|
3687
5573
|
region: "{region}",
|
3688
|
-
tableName: __privateGet$
|
5574
|
+
tableName: __privateGet$3(this, _table)
|
3689
5575
|
},
|
3690
5576
|
body: {
|
3691
5577
|
query,
|
@@ -3697,23 +5583,23 @@ class RestRepository extends Query {
|
|
3697
5583
|
page: options.page,
|
3698
5584
|
target: options.target
|
3699
5585
|
},
|
3700
|
-
...__privateGet$
|
5586
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3701
5587
|
});
|
3702
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
5588
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
3703
5589
|
return {
|
3704
|
-
records: records.map((item) => initObject(__privateGet$
|
5590
|
+
records: records.map((item) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), item, ["*"])),
|
3705
5591
|
totalCount
|
3706
5592
|
};
|
3707
5593
|
});
|
3708
5594
|
}
|
3709
5595
|
async vectorSearch(column, query, options) {
|
3710
|
-
return __privateGet$
|
5596
|
+
return __privateGet$3(this, _trace).call(this, "vectorSearch", async () => {
|
3711
5597
|
const { records, totalCount } = await vectorSearchTable({
|
3712
5598
|
pathParams: {
|
3713
5599
|
workspace: "{workspaceId}",
|
3714
5600
|
dbBranchName: "{dbBranch}",
|
3715
5601
|
region: "{region}",
|
3716
|
-
tableName: __privateGet$
|
5602
|
+
tableName: __privateGet$3(this, _table)
|
3717
5603
|
},
|
3718
5604
|
body: {
|
3719
5605
|
column,
|
@@ -3722,32 +5608,32 @@ class RestRepository extends Query {
|
|
3722
5608
|
size: options?.size,
|
3723
5609
|
filter: options?.filter
|
3724
5610
|
},
|
3725
|
-
...__privateGet$
|
5611
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3726
5612
|
});
|
3727
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
5613
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
3728
5614
|
return {
|
3729
|
-
records: records.map((item) => initObject(__privateGet$
|
5615
|
+
records: records.map((item) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), item, ["*"])),
|
3730
5616
|
totalCount
|
3731
5617
|
};
|
3732
5618
|
});
|
3733
5619
|
}
|
3734
5620
|
async aggregate(aggs, filter) {
|
3735
|
-
return __privateGet$
|
5621
|
+
return __privateGet$3(this, _trace).call(this, "aggregate", async () => {
|
3736
5622
|
const result = await aggregateTable({
|
3737
5623
|
pathParams: {
|
3738
5624
|
workspace: "{workspaceId}",
|
3739
5625
|
dbBranchName: "{dbBranch}",
|
3740
5626
|
region: "{region}",
|
3741
|
-
tableName: __privateGet$
|
5627
|
+
tableName: __privateGet$3(this, _table)
|
3742
5628
|
},
|
3743
5629
|
body: { aggs, filter },
|
3744
|
-
...__privateGet$
|
5630
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3745
5631
|
});
|
3746
5632
|
return result;
|
3747
5633
|
});
|
3748
5634
|
}
|
3749
5635
|
async query(query) {
|
3750
|
-
return __privateGet$
|
5636
|
+
return __privateGet$3(this, _trace).call(this, "query", async () => {
|
3751
5637
|
const cacheQuery = await __privateMethod$2(this, _getCacheQuery, getCacheQuery_fn).call(this, query);
|
3752
5638
|
if (cacheQuery)
|
3753
5639
|
return new Page(query, cacheQuery.meta, cacheQuery.records);
|
@@ -3757,7 +5643,7 @@ class RestRepository extends Query {
|
|
3757
5643
|
workspace: "{workspaceId}",
|
3758
5644
|
dbBranchName: "{dbBranch}",
|
3759
5645
|
region: "{region}",
|
3760
|
-
tableName: __privateGet$
|
5646
|
+
tableName: __privateGet$3(this, _table)
|
3761
5647
|
},
|
3762
5648
|
body: {
|
3763
5649
|
filter: cleanFilter(data.filter),
|
@@ -3767,14 +5653,14 @@ class RestRepository extends Query {
|
|
3767
5653
|
consistency: data.consistency
|
3768
5654
|
},
|
3769
5655
|
fetchOptions: data.fetchOptions,
|
3770
|
-
...__privateGet$
|
5656
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3771
5657
|
});
|
3772
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
5658
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
3773
5659
|
const records = objects.map(
|
3774
5660
|
(record) => initObject(
|
3775
|
-
__privateGet$
|
5661
|
+
__privateGet$3(this, _db),
|
3776
5662
|
schemaTables,
|
3777
|
-
__privateGet$
|
5663
|
+
__privateGet$3(this, _table),
|
3778
5664
|
record,
|
3779
5665
|
data.columns ?? ["*"]
|
3780
5666
|
)
|
@@ -3784,14 +5670,14 @@ class RestRepository extends Query {
|
|
3784
5670
|
});
|
3785
5671
|
}
|
3786
5672
|
async summarizeTable(query, summaries, summariesFilter) {
|
3787
|
-
return __privateGet$
|
5673
|
+
return __privateGet$3(this, _trace).call(this, "summarize", async () => {
|
3788
5674
|
const data = query.getQueryOptions();
|
3789
5675
|
const result = await summarizeTable({
|
3790
5676
|
pathParams: {
|
3791
5677
|
workspace: "{workspaceId}",
|
3792
5678
|
dbBranchName: "{dbBranch}",
|
3793
5679
|
region: "{region}",
|
3794
|
-
tableName: __privateGet$
|
5680
|
+
tableName: __privateGet$3(this, _table)
|
3795
5681
|
},
|
3796
5682
|
body: {
|
3797
5683
|
filter: cleanFilter(data.filter),
|
@@ -3802,13 +5688,13 @@ class RestRepository extends Query {
|
|
3802
5688
|
summaries,
|
3803
5689
|
summariesFilter
|
3804
5690
|
},
|
3805
|
-
...__privateGet$
|
5691
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3806
5692
|
});
|
3807
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
5693
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
3808
5694
|
return {
|
3809
5695
|
...result,
|
3810
5696
|
summaries: result.summaries.map(
|
3811
|
-
(summary) => initObject(__privateGet$
|
5697
|
+
(summary) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), summary, data.columns ?? [])
|
3812
5698
|
)
|
3813
5699
|
};
|
3814
5700
|
});
|
@@ -3820,7 +5706,7 @@ class RestRepository extends Query {
|
|
3820
5706
|
workspace: "{workspaceId}",
|
3821
5707
|
dbBranchName: "{dbBranch}",
|
3822
5708
|
region: "{region}",
|
3823
|
-
tableName: __privateGet$
|
5709
|
+
tableName: __privateGet$3(this, _table),
|
3824
5710
|
sessionId: options?.sessionId
|
3825
5711
|
},
|
3826
5712
|
body: {
|
@@ -3830,7 +5716,7 @@ class RestRepository extends Query {
|
|
3830
5716
|
search: options?.searchType === "keyword" ? options?.search : void 0,
|
3831
5717
|
vectorSearch: options?.searchType === "vector" ? options?.vectorSearch : void 0
|
3832
5718
|
},
|
3833
|
-
...__privateGet$
|
5719
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3834
5720
|
};
|
3835
5721
|
if (options?.onMessage) {
|
3836
5722
|
fetchSSERequest({
|
@@ -3851,7 +5737,7 @@ _table = new WeakMap();
|
|
3851
5737
|
_getFetchProps = new WeakMap();
|
3852
5738
|
_db = new WeakMap();
|
3853
5739
|
_cache = new WeakMap();
|
3854
|
-
_schemaTables
|
5740
|
+
_schemaTables = new WeakMap();
|
3855
5741
|
_trace = new WeakMap();
|
3856
5742
|
_insertRecordWithoutId = new WeakSet();
|
3857
5743
|
insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
@@ -3861,14 +5747,14 @@ insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
|
3861
5747
|
workspace: "{workspaceId}",
|
3862
5748
|
dbBranchName: "{dbBranch}",
|
3863
5749
|
region: "{region}",
|
3864
|
-
tableName: __privateGet$
|
5750
|
+
tableName: __privateGet$3(this, _table)
|
3865
5751
|
},
|
3866
5752
|
queryParams: { columns },
|
3867
5753
|
body: record,
|
3868
|
-
...__privateGet$
|
5754
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3869
5755
|
});
|
3870
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3871
|
-
return initObject(__privateGet$
|
5756
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5757
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3872
5758
|
};
|
3873
5759
|
_insertRecordWithId = new WeakSet();
|
3874
5760
|
insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
|
@@ -3880,21 +5766,21 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
|
|
3880
5766
|
workspace: "{workspaceId}",
|
3881
5767
|
dbBranchName: "{dbBranch}",
|
3882
5768
|
region: "{region}",
|
3883
|
-
tableName: __privateGet$
|
5769
|
+
tableName: __privateGet$3(this, _table),
|
3884
5770
|
recordId
|
3885
5771
|
},
|
3886
5772
|
body: record,
|
3887
5773
|
queryParams: { createOnly, columns, ifVersion },
|
3888
|
-
...__privateGet$
|
5774
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3889
5775
|
});
|
3890
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3891
|
-
return initObject(__privateGet$
|
5776
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5777
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3892
5778
|
};
|
3893
5779
|
_insertRecords = new WeakSet();
|
3894
5780
|
insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
3895
5781
|
const operations = await promiseMap(objects, async (object) => {
|
3896
5782
|
const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
3897
|
-
return { insert: { table: __privateGet$
|
5783
|
+
return { insert: { table: __privateGet$3(this, _table), record, createOnly, ifVersion } };
|
3898
5784
|
});
|
3899
5785
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
3900
5786
|
const ids = [];
|
@@ -3906,7 +5792,7 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
|
3906
5792
|
region: "{region}"
|
3907
5793
|
},
|
3908
5794
|
body: { operations: operations2 },
|
3909
|
-
...__privateGet$
|
5795
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3910
5796
|
});
|
3911
5797
|
for (const result of results) {
|
3912
5798
|
if (result.operation === "insert") {
|
@@ -3929,15 +5815,15 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
3929
5815
|
workspace: "{workspaceId}",
|
3930
5816
|
dbBranchName: "{dbBranch}",
|
3931
5817
|
region: "{region}",
|
3932
|
-
tableName: __privateGet$
|
5818
|
+
tableName: __privateGet$3(this, _table),
|
3933
5819
|
recordId
|
3934
5820
|
},
|
3935
5821
|
queryParams: { columns, ifVersion },
|
3936
5822
|
body: record,
|
3937
|
-
...__privateGet$
|
5823
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3938
5824
|
});
|
3939
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3940
|
-
return initObject(__privateGet$
|
5825
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5826
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3941
5827
|
} catch (e) {
|
3942
5828
|
if (isObject(e) && e.status === 404) {
|
3943
5829
|
return null;
|
@@ -3949,7 +5835,7 @@ _updateRecords = new WeakSet();
|
|
3949
5835
|
updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
3950
5836
|
const operations = await promiseMap(objects, async ({ id, ...object }) => {
|
3951
5837
|
const fields = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
3952
|
-
return { update: { table: __privateGet$
|
5838
|
+
return { update: { table: __privateGet$3(this, _table), id, ifVersion, upsert, fields } };
|
3953
5839
|
});
|
3954
5840
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
3955
5841
|
const ids = [];
|
@@ -3961,7 +5847,7 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
|
3961
5847
|
region: "{region}"
|
3962
5848
|
},
|
3963
5849
|
body: { operations: operations2 },
|
3964
|
-
...__privateGet$
|
5850
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3965
5851
|
});
|
3966
5852
|
for (const result of results) {
|
3967
5853
|
if (result.operation === "update") {
|
@@ -3982,15 +5868,15 @@ upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
3982
5868
|
workspace: "{workspaceId}",
|
3983
5869
|
dbBranchName: "{dbBranch}",
|
3984
5870
|
region: "{region}",
|
3985
|
-
tableName: __privateGet$
|
5871
|
+
tableName: __privateGet$3(this, _table),
|
3986
5872
|
recordId
|
3987
5873
|
},
|
3988
5874
|
queryParams: { columns, ifVersion },
|
3989
5875
|
body: object,
|
3990
|
-
...__privateGet$
|
5876
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3991
5877
|
});
|
3992
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3993
|
-
return initObject(__privateGet$
|
5878
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5879
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3994
5880
|
};
|
3995
5881
|
_deleteRecord = new WeakSet();
|
3996
5882
|
deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
@@ -4002,14 +5888,14 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
4002
5888
|
workspace: "{workspaceId}",
|
4003
5889
|
dbBranchName: "{dbBranch}",
|
4004
5890
|
region: "{region}",
|
4005
|
-
tableName: __privateGet$
|
5891
|
+
tableName: __privateGet$3(this, _table),
|
4006
5892
|
recordId
|
4007
5893
|
},
|
4008
5894
|
queryParams: { columns },
|
4009
|
-
...__privateGet$
|
5895
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
4010
5896
|
});
|
4011
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
4012
|
-
return initObject(__privateGet$
|
5897
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5898
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
4013
5899
|
} catch (e) {
|
4014
5900
|
if (isObject(e) && e.status === 404) {
|
4015
5901
|
return null;
|
@@ -4020,7 +5906,7 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
4020
5906
|
_deleteRecords = new WeakSet();
|
4021
5907
|
deleteRecords_fn = async function(recordIds) {
|
4022
5908
|
const chunkedOperations = chunk(
|
4023
|
-
compact(recordIds).map((id) => ({ delete: { table: __privateGet$
|
5909
|
+
compact(recordIds).map((id) => ({ delete: { table: __privateGet$3(this, _table), id } })),
|
4024
5910
|
BULK_OPERATION_MAX_SIZE
|
4025
5911
|
);
|
4026
5912
|
for (const operations of chunkedOperations) {
|
@@ -4031,44 +5917,44 @@ deleteRecords_fn = async function(recordIds) {
|
|
4031
5917
|
region: "{region}"
|
4032
5918
|
},
|
4033
5919
|
body: { operations },
|
4034
|
-
...__privateGet$
|
5920
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
4035
5921
|
});
|
4036
5922
|
}
|
4037
5923
|
};
|
4038
5924
|
_setCacheQuery = new WeakSet();
|
4039
5925
|
setCacheQuery_fn = async function(query, meta, records) {
|
4040
|
-
await __privateGet$
|
5926
|
+
await __privateGet$3(this, _cache)?.set(`query_${__privateGet$3(this, _table)}:${query.key()}`, { date: /* @__PURE__ */ new Date(), meta, records });
|
4041
5927
|
};
|
4042
5928
|
_getCacheQuery = new WeakSet();
|
4043
5929
|
getCacheQuery_fn = async function(query) {
|
4044
|
-
const key = `query_${__privateGet$
|
4045
|
-
const result = await __privateGet$
|
5930
|
+
const key = `query_${__privateGet$3(this, _table)}:${query.key()}`;
|
5931
|
+
const result = await __privateGet$3(this, _cache)?.get(key);
|
4046
5932
|
if (!result)
|
4047
5933
|
return null;
|
4048
|
-
const defaultTTL = __privateGet$
|
5934
|
+
const defaultTTL = __privateGet$3(this, _cache)?.defaultQueryTTL ?? -1;
|
4049
5935
|
const { cache: ttl = defaultTTL } = query.getQueryOptions();
|
4050
5936
|
if (ttl < 0)
|
4051
5937
|
return null;
|
4052
5938
|
const hasExpired = result.date.getTime() + ttl < Date.now();
|
4053
5939
|
return hasExpired ? null : result;
|
4054
5940
|
};
|
4055
|
-
_getSchemaTables
|
4056
|
-
getSchemaTables_fn
|
4057
|
-
if (__privateGet$
|
4058
|
-
return __privateGet$
|
5941
|
+
_getSchemaTables = new WeakSet();
|
5942
|
+
getSchemaTables_fn = async function() {
|
5943
|
+
if (__privateGet$3(this, _schemaTables))
|
5944
|
+
return __privateGet$3(this, _schemaTables);
|
4059
5945
|
const { schema } = await getBranchDetails({
|
4060
5946
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
4061
|
-
...__privateGet$
|
5947
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
4062
5948
|
});
|
4063
|
-
__privateSet$
|
5949
|
+
__privateSet$2(this, _schemaTables, schema.tables);
|
4064
5950
|
return schema.tables;
|
4065
5951
|
};
|
4066
5952
|
_transformObjectToApi = new WeakSet();
|
4067
5953
|
transformObjectToApi_fn = async function(object) {
|
4068
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
4069
|
-
const schema = schemaTables.find((table) => table.name === __privateGet$
|
5954
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5955
|
+
const schema = schemaTables.find((table) => table.name === __privateGet$3(this, _table));
|
4070
5956
|
if (!schema)
|
4071
|
-
throw new Error(`Table ${__privateGet$
|
5957
|
+
throw new Error(`Table ${__privateGet$3(this, _table)} not found in schema`);
|
4072
5958
|
const result = {};
|
4073
5959
|
for (const [key, value] of Object.entries(object)) {
|
4074
5960
|
if (key === "xata")
|
@@ -4224,7 +6110,7 @@ var __accessCheck$3 = (obj, member, msg) => {
|
|
4224
6110
|
if (!member.has(obj))
|
4225
6111
|
throw TypeError("Cannot " + msg);
|
4226
6112
|
};
|
4227
|
-
var __privateGet$
|
6113
|
+
var __privateGet$2 = (obj, member, getter) => {
|
4228
6114
|
__accessCheck$3(obj, member, "read from private field");
|
4229
6115
|
return getter ? getter.call(obj) : member.get(obj);
|
4230
6116
|
};
|
@@ -4233,7 +6119,7 @@ var __privateAdd$3 = (obj, member, value) => {
|
|
4233
6119
|
throw TypeError("Cannot add the same private member more than once");
|
4234
6120
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4235
6121
|
};
|
4236
|
-
var __privateSet$
|
6122
|
+
var __privateSet$1 = (obj, member, value, setter) => {
|
4237
6123
|
__accessCheck$3(obj, member, "write to private field");
|
4238
6124
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
4239
6125
|
return value;
|
@@ -4242,29 +6128,29 @@ var _map;
|
|
4242
6128
|
class SimpleCache {
|
4243
6129
|
constructor(options = {}) {
|
4244
6130
|
__privateAdd$3(this, _map, void 0);
|
4245
|
-
__privateSet$
|
6131
|
+
__privateSet$1(this, _map, /* @__PURE__ */ new Map());
|
4246
6132
|
this.capacity = options.max ?? 500;
|
4247
6133
|
this.defaultQueryTTL = options.defaultQueryTTL ?? 60 * 1e3;
|
4248
6134
|
}
|
4249
6135
|
async getAll() {
|
4250
|
-
return Object.fromEntries(__privateGet$
|
6136
|
+
return Object.fromEntries(__privateGet$2(this, _map));
|
4251
6137
|
}
|
4252
6138
|
async get(key) {
|
4253
|
-
return __privateGet$
|
6139
|
+
return __privateGet$2(this, _map).get(key) ?? null;
|
4254
6140
|
}
|
4255
6141
|
async set(key, value) {
|
4256
6142
|
await this.delete(key);
|
4257
|
-
__privateGet$
|
4258
|
-
if (__privateGet$
|
4259
|
-
const leastRecentlyUsed = __privateGet$
|
6143
|
+
__privateGet$2(this, _map).set(key, value);
|
6144
|
+
if (__privateGet$2(this, _map).size > this.capacity) {
|
6145
|
+
const leastRecentlyUsed = __privateGet$2(this, _map).keys().next().value;
|
4260
6146
|
await this.delete(leastRecentlyUsed);
|
4261
6147
|
}
|
4262
6148
|
}
|
4263
6149
|
async delete(key) {
|
4264
|
-
__privateGet$
|
6150
|
+
__privateGet$2(this, _map).delete(key);
|
4265
6151
|
}
|
4266
6152
|
async clear() {
|
4267
|
-
return __privateGet$
|
6153
|
+
return __privateGet$2(this, _map).clear();
|
4268
6154
|
}
|
4269
6155
|
}
|
4270
6156
|
_map = new WeakMap();
|
@@ -4301,7 +6187,7 @@ var __accessCheck$2 = (obj, member, msg) => {
|
|
4301
6187
|
if (!member.has(obj))
|
4302
6188
|
throw TypeError("Cannot " + msg);
|
4303
6189
|
};
|
4304
|
-
var __privateGet$
|
6190
|
+
var __privateGet$1 = (obj, member, getter) => {
|
4305
6191
|
__accessCheck$2(obj, member, "read from private field");
|
4306
6192
|
return getter ? getter.call(obj) : member.get(obj);
|
4307
6193
|
};
|
@@ -4310,18 +6196,11 @@ var __privateAdd$2 = (obj, member, value) => {
|
|
4310
6196
|
throw TypeError("Cannot add the same private member more than once");
|
4311
6197
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4312
6198
|
};
|
4313
|
-
var
|
4314
|
-
__accessCheck$2(obj, member, "write to private field");
|
4315
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
4316
|
-
return value;
|
4317
|
-
};
|
4318
|
-
var _tables, _schemaTables$1;
|
6199
|
+
var _tables;
|
4319
6200
|
class SchemaPlugin extends XataPlugin {
|
4320
|
-
constructor(
|
6201
|
+
constructor() {
|
4321
6202
|
super();
|
4322
6203
|
__privateAdd$2(this, _tables, {});
|
4323
|
-
__privateAdd$2(this, _schemaTables$1, void 0);
|
4324
|
-
__privateSet$2(this, _schemaTables$1, schemaTables);
|
4325
6204
|
}
|
4326
6205
|
build(pluginOptions) {
|
4327
6206
|
const db = new Proxy(
|
@@ -4330,22 +6209,21 @@ class SchemaPlugin extends XataPlugin {
|
|
4330
6209
|
get: (_target, table) => {
|
4331
6210
|
if (!isString(table))
|
4332
6211
|
throw new Error("Invalid table name");
|
4333
|
-
if (__privateGet$
|
4334
|
-
__privateGet$
|
6212
|
+
if (__privateGet$1(this, _tables)[table] === void 0) {
|
6213
|
+
__privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
4335
6214
|
}
|
4336
|
-
return __privateGet$
|
6215
|
+
return __privateGet$1(this, _tables)[table];
|
4337
6216
|
}
|
4338
6217
|
}
|
4339
6218
|
);
|
4340
|
-
const tableNames =
|
6219
|
+
const tableNames = pluginOptions.tables?.map(({ name }) => name) ?? [];
|
4341
6220
|
for (const table of tableNames) {
|
4342
|
-
db[table] = new RestRepository({ db, pluginOptions, table, schemaTables:
|
6221
|
+
db[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
4343
6222
|
}
|
4344
6223
|
return db;
|
4345
6224
|
}
|
4346
6225
|
}
|
4347
6226
|
_tables = new WeakMap();
|
4348
|
-
_schemaTables$1 = new WeakMap();
|
4349
6227
|
|
4350
6228
|
class FilesPlugin extends XataPlugin {
|
4351
6229
|
build(pluginOptions) {
|
@@ -4425,54 +6303,40 @@ var __accessCheck$1 = (obj, member, msg) => {
|
|
4425
6303
|
if (!member.has(obj))
|
4426
6304
|
throw TypeError("Cannot " + msg);
|
4427
6305
|
};
|
4428
|
-
var __privateGet$1 = (obj, member, getter) => {
|
4429
|
-
__accessCheck$1(obj, member, "read from private field");
|
4430
|
-
return getter ? getter.call(obj) : member.get(obj);
|
4431
|
-
};
|
4432
6306
|
var __privateAdd$1 = (obj, member, value) => {
|
4433
6307
|
if (member.has(obj))
|
4434
6308
|
throw TypeError("Cannot add the same private member more than once");
|
4435
6309
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4436
6310
|
};
|
4437
|
-
var __privateSet$1 = (obj, member, value, setter) => {
|
4438
|
-
__accessCheck$1(obj, member, "write to private field");
|
4439
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
4440
|
-
return value;
|
4441
|
-
};
|
4442
6311
|
var __privateMethod$1 = (obj, member, method) => {
|
4443
6312
|
__accessCheck$1(obj, member, "access private method");
|
4444
6313
|
return method;
|
4445
6314
|
};
|
4446
|
-
var
|
6315
|
+
var _search, search_fn;
|
4447
6316
|
class SearchPlugin extends XataPlugin {
|
4448
|
-
constructor(db
|
6317
|
+
constructor(db) {
|
4449
6318
|
super();
|
4450
6319
|
this.db = db;
|
4451
6320
|
__privateAdd$1(this, _search);
|
4452
|
-
__privateAdd$1(this, _getSchemaTables);
|
4453
|
-
__privateAdd$1(this, _schemaTables, void 0);
|
4454
|
-
__privateSet$1(this, _schemaTables, schemaTables);
|
4455
6321
|
}
|
4456
6322
|
build(pluginOptions) {
|
4457
6323
|
return {
|
4458
6324
|
all: async (query, options = {}) => {
|
4459
6325
|
const { records, totalCount } = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
|
4460
|
-
const schemaTables = await __privateMethod$1(this, _getSchemaTables, getSchemaTables_fn).call(this, pluginOptions);
|
4461
6326
|
return {
|
4462
6327
|
totalCount,
|
4463
6328
|
records: records.map((record) => {
|
4464
6329
|
const { table = "orphan" } = record.xata;
|
4465
|
-
return { table, record: initObject(this.db,
|
6330
|
+
return { table, record: initObject(this.db, pluginOptions.tables, table, record, ["*"]) };
|
4466
6331
|
})
|
4467
6332
|
};
|
4468
6333
|
},
|
4469
6334
|
byTable: async (query, options = {}) => {
|
4470
6335
|
const { records: rawRecords, totalCount } = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
|
4471
|
-
const schemaTables = await __privateMethod$1(this, _getSchemaTables, getSchemaTables_fn).call(this, pluginOptions);
|
4472
6336
|
const records = rawRecords.reduce((acc, record) => {
|
4473
6337
|
const { table = "orphan" } = record.xata;
|
4474
6338
|
const items = acc[table] ?? [];
|
4475
|
-
const item = initObject(this.db,
|
6339
|
+
const item = initObject(this.db, pluginOptions.tables, table, record, ["*"]);
|
4476
6340
|
return { ...acc, [table]: [...items, item] };
|
4477
6341
|
}, {});
|
4478
6342
|
return { totalCount, records };
|
@@ -4480,29 +6344,17 @@ class SearchPlugin extends XataPlugin {
|
|
4480
6344
|
};
|
4481
6345
|
}
|
4482
6346
|
}
|
4483
|
-
_schemaTables = new WeakMap();
|
4484
6347
|
_search = new WeakSet();
|
4485
6348
|
search_fn = async function(query, options, pluginOptions) {
|
4486
6349
|
const { tables, fuzziness, highlight, prefix, page } = options ?? {};
|
4487
6350
|
const { records, totalCount } = await searchBranch({
|
4488
6351
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
4489
|
-
// @ts-
|
6352
|
+
// @ts-expect-error Filter properties do not match inferred type
|
4490
6353
|
body: { tables, query, fuzziness, prefix, highlight, page },
|
4491
6354
|
...pluginOptions
|
4492
6355
|
});
|
4493
6356
|
return { records, totalCount };
|
4494
6357
|
};
|
4495
|
-
_getSchemaTables = new WeakSet();
|
4496
|
-
getSchemaTables_fn = async function(pluginOptions) {
|
4497
|
-
if (__privateGet$1(this, _schemaTables))
|
4498
|
-
return __privateGet$1(this, _schemaTables);
|
4499
|
-
const { schema } = await getBranchDetails({
|
4500
|
-
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
4501
|
-
...pluginOptions
|
4502
|
-
});
|
4503
|
-
__privateSet$1(this, _schemaTables, schema.tables);
|
4504
|
-
return schema.tables;
|
4505
|
-
};
|
4506
6358
|
|
4507
6359
|
function escapeElement(elementRepresentation) {
|
4508
6360
|
const escaped = elementRepresentation.replace(/\\/g, "\\\\").replace(/"/g, '\\"');
|
@@ -4556,25 +6408,68 @@ function prepareParams(param1, param2) {
|
|
4556
6408
|
return { statement, params: param2?.map((value) => prepareValue(value)) };
|
4557
6409
|
}
|
4558
6410
|
if (isObject(param1)) {
|
4559
|
-
const { statement, params, consistency } = param1;
|
4560
|
-
return { statement, params: params?.map((value) => prepareValue(value)), consistency };
|
6411
|
+
const { statement, params, consistency, responseType } = param1;
|
6412
|
+
return { statement, params: params?.map((value) => prepareValue(value)), consistency, responseType };
|
4561
6413
|
}
|
4562
6414
|
throw new Error("Invalid query");
|
4563
6415
|
}
|
4564
6416
|
|
4565
6417
|
class SQLPlugin extends XataPlugin {
|
4566
6418
|
build(pluginOptions) {
|
4567
|
-
|
4568
|
-
|
4569
|
-
|
6419
|
+
const sqlFunction = async (query, ...parameters) => {
|
6420
|
+
if (!isParamsObject(query) && (!isTemplateStringsArray(query) || !Array.isArray(parameters))) {
|
6421
|
+
throw new Error("Invalid usage of `xata.sql`. Please use it as a tagged template or with an object.");
|
6422
|
+
}
|
6423
|
+
const { statement, params, consistency, responseType } = prepareParams(query, parameters);
|
6424
|
+
const {
|
6425
|
+
records,
|
6426
|
+
rows,
|
6427
|
+
warning,
|
6428
|
+
columns = []
|
6429
|
+
} = await sqlQuery({
|
4570
6430
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
4571
|
-
body: { statement, params, consistency },
|
6431
|
+
body: { statement, params, consistency, responseType },
|
4572
6432
|
...pluginOptions
|
4573
6433
|
});
|
4574
|
-
return { records, warning };
|
6434
|
+
return { records, rows, warning, columns };
|
4575
6435
|
};
|
6436
|
+
sqlFunction.connectionString = buildConnectionString(pluginOptions);
|
6437
|
+
return sqlFunction;
|
6438
|
+
}
|
6439
|
+
}
|
6440
|
+
function isTemplateStringsArray(strings) {
|
6441
|
+
return Array.isArray(strings) && "raw" in strings && Array.isArray(strings.raw);
|
6442
|
+
}
|
6443
|
+
function isParamsObject(params) {
|
6444
|
+
return isObject(params) && "statement" in params;
|
6445
|
+
}
|
6446
|
+
function buildDomain(host, region) {
|
6447
|
+
switch (host) {
|
6448
|
+
case "production":
|
6449
|
+
return `${region}.sql.xata.sh`;
|
6450
|
+
case "staging":
|
6451
|
+
return `${region}.sql.staging-xata.dev`;
|
6452
|
+
case "dev":
|
6453
|
+
return `${region}.sql.dev-xata.dev`;
|
6454
|
+
case "local":
|
6455
|
+
return "localhost:7654";
|
6456
|
+
default:
|
6457
|
+
throw new Error("Invalid host provider");
|
4576
6458
|
}
|
4577
6459
|
}
|
6460
|
+
function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
|
6461
|
+
const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
|
6462
|
+
const parts = parseWorkspacesUrlParts(url);
|
6463
|
+
if (!parts)
|
6464
|
+
throw new Error("Invalid workspaces URL");
|
6465
|
+
const { workspace: workspaceSlug, region, database, host } = parts;
|
6466
|
+
const domain = buildDomain(host, region);
|
6467
|
+
const workspace = workspaceSlug.split("-").pop();
|
6468
|
+
if (!workspace || !region || !database || !apiKey || !branch) {
|
6469
|
+
throw new Error("Unable to build xata connection string");
|
6470
|
+
}
|
6471
|
+
return `postgresql://${workspace}:${apiKey}@${domain}/${database}:${branch}?sslmode=require`;
|
6472
|
+
}
|
4578
6473
|
|
4579
6474
|
class TransactionPlugin extends XataPlugin {
|
4580
6475
|
build(pluginOptions) {
|
@@ -4616,7 +6511,7 @@ var __privateMethod = (obj, member, method) => {
|
|
4616
6511
|
const buildClient = (plugins) => {
|
4617
6512
|
var _options, _parseOptions, parseOptions_fn, _getFetchProps, getFetchProps_fn, _a;
|
4618
6513
|
return _a = class {
|
4619
|
-
constructor(options = {},
|
6514
|
+
constructor(options = {}, tables) {
|
4620
6515
|
__privateAdd(this, _parseOptions);
|
4621
6516
|
__privateAdd(this, _getFetchProps);
|
4622
6517
|
__privateAdd(this, _options, void 0);
|
@@ -4625,13 +6520,16 @@ const buildClient = (plugins) => {
|
|
4625
6520
|
const pluginOptions = {
|
4626
6521
|
...__privateMethod(this, _getFetchProps, getFetchProps_fn).call(this, safeOptions),
|
4627
6522
|
cache: safeOptions.cache,
|
4628
|
-
host: safeOptions.host
|
6523
|
+
host: safeOptions.host,
|
6524
|
+
tables,
|
6525
|
+
branch: safeOptions.branch
|
4629
6526
|
};
|
4630
|
-
const db = new SchemaPlugin(
|
4631
|
-
const search = new SearchPlugin(db
|
6527
|
+
const db = new SchemaPlugin().build(pluginOptions);
|
6528
|
+
const search = new SearchPlugin(db).build(pluginOptions);
|
4632
6529
|
const transactions = new TransactionPlugin().build(pluginOptions);
|
4633
6530
|
const sql = new SQLPlugin().build(pluginOptions);
|
4634
6531
|
const files = new FilesPlugin().build(pluginOptions);
|
6532
|
+
this.schema = { tables };
|
4635
6533
|
this.db = db;
|
4636
6534
|
this.search = search;
|
4637
6535
|
this.transactions = transactions;
|
@@ -4653,7 +6551,7 @@ const buildClient = (plugins) => {
|
|
4653
6551
|
const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
|
4654
6552
|
if (isBrowser && !enableBrowser) {
|
4655
6553
|
throw new Error(
|
4656
|
-
"You are trying to use Xata from the browser, which is potentially a non-secure environment.
|
6554
|
+
"You are trying to use Xata from the browser, which is potentially a non-secure environment. How to fix: https://xata.io/docs/messages/api-key-browser-error"
|
4657
6555
|
);
|
4658
6556
|
}
|
4659
6557
|
const fetch = getFetchImplementation(options?.fetch);
|
@@ -4808,6 +6706,7 @@ class XataError extends Error {
|
|
4808
6706
|
}
|
4809
6707
|
|
4810
6708
|
exports.BaseClient = BaseClient;
|
6709
|
+
exports.Buffer = Buffer;
|
4811
6710
|
exports.FetcherError = FetcherError;
|
4812
6711
|
exports.FilesPlugin = FilesPlugin;
|
4813
6712
|
exports.Operations = operationsByTag;
|
@@ -4816,6 +6715,7 @@ exports.PAGINATION_DEFAULT_SIZE = PAGINATION_DEFAULT_SIZE;
|
|
4816
6715
|
exports.PAGINATION_MAX_OFFSET = PAGINATION_MAX_OFFSET;
|
4817
6716
|
exports.PAGINATION_MAX_SIZE = PAGINATION_MAX_SIZE;
|
4818
6717
|
exports.Page = Page;
|
6718
|
+
exports.PageRecordArray = PageRecordArray;
|
4819
6719
|
exports.Query = Query;
|
4820
6720
|
exports.RecordArray = RecordArray;
|
4821
6721
|
exports.RecordColumnTypes = RecordColumnTypes;
|
@@ -4833,6 +6733,8 @@ exports.XataError = XataError;
|
|
4833
6733
|
exports.XataFile = XataFile;
|
4834
6734
|
exports.XataPlugin = XataPlugin;
|
4835
6735
|
exports.acceptWorkspaceMemberInvite = acceptWorkspaceMemberInvite;
|
6736
|
+
exports.adaptAllTables = adaptAllTables;
|
6737
|
+
exports.adaptTable = adaptTable;
|
4836
6738
|
exports.addGitBranchesEntry = addGitBranchesEntry;
|
4837
6739
|
exports.addTableColumn = addTableColumn;
|
4838
6740
|
exports.aggregateTable = aggregateTable;
|
@@ -4886,6 +6788,7 @@ exports.getBranchDetails = getBranchDetails;
|
|
4886
6788
|
exports.getBranchList = getBranchList;
|
4887
6789
|
exports.getBranchMetadata = getBranchMetadata;
|
4888
6790
|
exports.getBranchMigrationHistory = getBranchMigrationHistory;
|
6791
|
+
exports.getBranchMigrationJobStatus = getBranchMigrationJobStatus;
|
4889
6792
|
exports.getBranchMigrationPlan = getBranchMigrationPlan;
|
4890
6793
|
exports.getBranchSchemaHistory = getBranchSchemaHistory;
|
4891
6794
|
exports.getBranchStats = getBranchStats;
|
@@ -4894,11 +6797,14 @@ exports.getColumn = getColumn;
|
|
4894
6797
|
exports.getDatabaseGithubSettings = getDatabaseGithubSettings;
|
4895
6798
|
exports.getDatabaseList = getDatabaseList;
|
4896
6799
|
exports.getDatabaseMetadata = getDatabaseMetadata;
|
6800
|
+
exports.getDatabaseSettings = getDatabaseSettings;
|
4897
6801
|
exports.getDatabaseURL = getDatabaseURL;
|
4898
6802
|
exports.getFile = getFile;
|
4899
6803
|
exports.getFileItem = getFileItem;
|
4900
6804
|
exports.getGitBranchesMapping = getGitBranchesMapping;
|
4901
6805
|
exports.getHostUrl = getHostUrl;
|
6806
|
+
exports.getMigrationHistory = getMigrationHistory;
|
6807
|
+
exports.getMigrationJobStatus = getMigrationJobStatus;
|
4902
6808
|
exports.getMigrationRequest = getMigrationRequest;
|
4903
6809
|
exports.getMigrationRequestIsMerged = getMigrationRequestIsMerged;
|
4904
6810
|
exports.getPreviewBranch = getPreviewBranch;
|
@@ -4912,6 +6818,7 @@ exports.getUserOAuthAccessTokens = getUserOAuthAccessTokens;
|
|
4912
6818
|
exports.getUserOAuthClients = getUserOAuthClients;
|
4913
6819
|
exports.getWorkspace = getWorkspace;
|
4914
6820
|
exports.getWorkspaceMembersList = getWorkspaceMembersList;
|
6821
|
+
exports.getWorkspaceSettings = getWorkspaceSettings;
|
4915
6822
|
exports.getWorkspacesList = getWorkspacesList;
|
4916
6823
|
exports.grantAuthorizationCode = grantAuthorizationCode;
|
4917
6824
|
exports.greaterEquals = greaterEquals;
|
@@ -4952,9 +6859,6 @@ exports.operationsByTag = operationsByTag;
|
|
4952
6859
|
exports.parseProviderString = parseProviderString;
|
4953
6860
|
exports.parseWorkspacesUrlParts = parseWorkspacesUrlParts;
|
4954
6861
|
exports.pattern = pattern;
|
4955
|
-
exports.pgRollJobStatus = pgRollJobStatus;
|
4956
|
-
exports.pgRollMigrationHistory = pgRollMigrationHistory;
|
4957
|
-
exports.pgRollStatus = pgRollStatus;
|
4958
6862
|
exports.previewBranchSchemaEdit = previewBranchSchemaEdit;
|
4959
6863
|
exports.pushBranchMigrations = pushBranchMigrations;
|
4960
6864
|
exports.putFile = putFile;
|
@@ -4980,6 +6884,7 @@ exports.updateCluster = updateCluster;
|
|
4980
6884
|
exports.updateColumn = updateColumn;
|
4981
6885
|
exports.updateDatabaseGithubSettings = updateDatabaseGithubSettings;
|
4982
6886
|
exports.updateDatabaseMetadata = updateDatabaseMetadata;
|
6887
|
+
exports.updateDatabaseSettings = updateDatabaseSettings;
|
4983
6888
|
exports.updateMigrationRequest = updateMigrationRequest;
|
4984
6889
|
exports.updateOAuthAccessToken = updateOAuthAccessToken;
|
4985
6890
|
exports.updateRecordWithID = updateRecordWithID;
|
@@ -4988,6 +6893,7 @@ exports.updateUser = updateUser;
|
|
4988
6893
|
exports.updateWorkspace = updateWorkspace;
|
4989
6894
|
exports.updateWorkspaceMemberInvite = updateWorkspaceMemberInvite;
|
4990
6895
|
exports.updateWorkspaceMemberRole = updateWorkspaceMemberRole;
|
6896
|
+
exports.updateWorkspaceSettings = updateWorkspaceSettings;
|
4991
6897
|
exports.upsertRecordWithID = upsertRecordWithID;
|
4992
6898
|
exports.vectorSearchTable = vectorSearchTable;
|
4993
6899
|
//# sourceMappingURL=index.cjs.map
|