@xata.io/client 0.0.0-alpha.vf7ac0d1 → 0.0.0-alpha.vf7b3447057053443041e94106d7efe270aaea321
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-add-version.log +1 -1
- package/.turbo/turbo-build.log +4 -9
- package/CHANGELOG.md +173 -1
- package/dist/index.cjs +2718 -637
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +5801 -4165
- package/dist/index.mjs +2692 -637
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
@@ -20,9 +20,1818 @@ const TraceAttributes = {
|
|
20
20
|
HTTP_METHOD: "http.method",
|
21
21
|
HTTP_URL: "http.url",
|
22
22
|
HTTP_ROUTE: "http.route",
|
23
|
-
HTTP_TARGET: "http.target"
|
23
|
+
HTTP_TARGET: "http.target",
|
24
|
+
CLOUDFLARE_RAY_ID: "cf.ray"
|
24
25
|
};
|
25
26
|
|
27
|
+
const lookup = [];
|
28
|
+
const revLookup = [];
|
29
|
+
const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
30
|
+
for (let i = 0, len = code.length; i < len; ++i) {
|
31
|
+
lookup[i] = code[i];
|
32
|
+
revLookup[code.charCodeAt(i)] = i;
|
33
|
+
}
|
34
|
+
revLookup["-".charCodeAt(0)] = 62;
|
35
|
+
revLookup["_".charCodeAt(0)] = 63;
|
36
|
+
function getLens(b64) {
|
37
|
+
const len = b64.length;
|
38
|
+
if (len % 4 > 0) {
|
39
|
+
throw new Error("Invalid string. Length must be a multiple of 4");
|
40
|
+
}
|
41
|
+
let validLen = b64.indexOf("=");
|
42
|
+
if (validLen === -1)
|
43
|
+
validLen = len;
|
44
|
+
const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
|
45
|
+
return [validLen, placeHoldersLen];
|
46
|
+
}
|
47
|
+
function _byteLength(_b64, validLen, placeHoldersLen) {
|
48
|
+
return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
|
49
|
+
}
|
50
|
+
function toByteArray(b64) {
|
51
|
+
let tmp;
|
52
|
+
const lens = getLens(b64);
|
53
|
+
const validLen = lens[0];
|
54
|
+
const placeHoldersLen = lens[1];
|
55
|
+
const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
|
56
|
+
let curByte = 0;
|
57
|
+
const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
|
58
|
+
let i;
|
59
|
+
for (i = 0; i < len; i += 4) {
|
60
|
+
tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
|
61
|
+
arr[curByte++] = tmp >> 16 & 255;
|
62
|
+
arr[curByte++] = tmp >> 8 & 255;
|
63
|
+
arr[curByte++] = tmp & 255;
|
64
|
+
}
|
65
|
+
if (placeHoldersLen === 2) {
|
66
|
+
tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
|
67
|
+
arr[curByte++] = tmp & 255;
|
68
|
+
}
|
69
|
+
if (placeHoldersLen === 1) {
|
70
|
+
tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
|
71
|
+
arr[curByte++] = tmp >> 8 & 255;
|
72
|
+
arr[curByte++] = tmp & 255;
|
73
|
+
}
|
74
|
+
return arr;
|
75
|
+
}
|
76
|
+
function tripletToBase64(num) {
|
77
|
+
return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
|
78
|
+
}
|
79
|
+
function encodeChunk(uint8, start, end) {
|
80
|
+
let tmp;
|
81
|
+
const output = [];
|
82
|
+
for (let i = start; i < end; i += 3) {
|
83
|
+
tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
|
84
|
+
output.push(tripletToBase64(tmp));
|
85
|
+
}
|
86
|
+
return output.join("");
|
87
|
+
}
|
88
|
+
function fromByteArray(uint8) {
|
89
|
+
let tmp;
|
90
|
+
const len = uint8.length;
|
91
|
+
const extraBytes = len % 3;
|
92
|
+
const parts = [];
|
93
|
+
const maxChunkLength = 16383;
|
94
|
+
for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
95
|
+
parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
|
96
|
+
}
|
97
|
+
if (extraBytes === 1) {
|
98
|
+
tmp = uint8[len - 1];
|
99
|
+
parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
|
100
|
+
} else if (extraBytes === 2) {
|
101
|
+
tmp = (uint8[len - 2] << 8) + uint8[len - 1];
|
102
|
+
parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
|
103
|
+
}
|
104
|
+
return parts.join("");
|
105
|
+
}
|
106
|
+
|
107
|
+
const K_MAX_LENGTH = 2147483647;
|
108
|
+
const MAX_ARGUMENTS_LENGTH = 4096;
|
109
|
+
class Buffer extends Uint8Array {
|
110
|
+
/**
|
111
|
+
* Constructs a new `Buffer` instance.
|
112
|
+
*
|
113
|
+
* @param value
|
114
|
+
* @param encodingOrOffset
|
115
|
+
* @param length
|
116
|
+
*/
|
117
|
+
constructor(value, encodingOrOffset, length) {
|
118
|
+
if (typeof value === "number") {
|
119
|
+
if (typeof encodingOrOffset === "string") {
|
120
|
+
throw new TypeError("The first argument must be of type string, received type number");
|
121
|
+
}
|
122
|
+
if (value < 0) {
|
123
|
+
throw new RangeError("The buffer size cannot be negative");
|
124
|
+
}
|
125
|
+
super(value < 0 ? 0 : Buffer._checked(value) | 0);
|
126
|
+
} else if (typeof value === "string") {
|
127
|
+
if (typeof encodingOrOffset !== "string") {
|
128
|
+
encodingOrOffset = "utf8";
|
129
|
+
}
|
130
|
+
if (!Buffer.isEncoding(encodingOrOffset)) {
|
131
|
+
throw new TypeError("Unknown encoding: " + encodingOrOffset);
|
132
|
+
}
|
133
|
+
const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
|
134
|
+
super(length2);
|
135
|
+
const written = this.write(value, 0, this.length, encodingOrOffset);
|
136
|
+
if (written !== length2) {
|
137
|
+
throw new TypeError(
|
138
|
+
"Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
|
139
|
+
);
|
140
|
+
}
|
141
|
+
} else if (ArrayBuffer.isView(value)) {
|
142
|
+
if (Buffer._isInstance(value, Uint8Array)) {
|
143
|
+
const copy = new Uint8Array(value);
|
144
|
+
const array = copy.buffer;
|
145
|
+
const byteOffset = copy.byteOffset;
|
146
|
+
const length2 = copy.byteLength;
|
147
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
148
|
+
throw new RangeError("offset is outside of buffer bounds");
|
149
|
+
}
|
150
|
+
if (array.byteLength < byteOffset + (length2 || 0)) {
|
151
|
+
throw new RangeError("length is outside of buffer bounds");
|
152
|
+
}
|
153
|
+
super(new Uint8Array(array, byteOffset, length2));
|
154
|
+
} else {
|
155
|
+
const array = value;
|
156
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
157
|
+
super(new Uint8Array(length2));
|
158
|
+
for (let i = 0; i < length2; i++) {
|
159
|
+
this[i] = array[i] & 255;
|
160
|
+
}
|
161
|
+
}
|
162
|
+
} else if (value == null) {
|
163
|
+
throw new TypeError(
|
164
|
+
"The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
|
165
|
+
);
|
166
|
+
} else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
|
167
|
+
const array = value;
|
168
|
+
const byteOffset = encodingOrOffset;
|
169
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
170
|
+
throw new RangeError("offset is outside of buffer bounds");
|
171
|
+
}
|
172
|
+
if (array.byteLength < byteOffset + (length || 0)) {
|
173
|
+
throw new RangeError("length is outside of buffer bounds");
|
174
|
+
}
|
175
|
+
super(new Uint8Array(array, byteOffset, length));
|
176
|
+
} else if (Array.isArray(value)) {
|
177
|
+
const array = value;
|
178
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
179
|
+
super(new Uint8Array(length2));
|
180
|
+
for (let i = 0; i < length2; i++) {
|
181
|
+
this[i] = array[i] & 255;
|
182
|
+
}
|
183
|
+
} else {
|
184
|
+
throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
|
185
|
+
}
|
186
|
+
}
|
187
|
+
/**
|
188
|
+
* Return JSON representation of the buffer.
|
189
|
+
*/
|
190
|
+
toJSON() {
|
191
|
+
return {
|
192
|
+
type: "Buffer",
|
193
|
+
data: Array.prototype.slice.call(this)
|
194
|
+
};
|
195
|
+
}
|
196
|
+
/**
|
197
|
+
* Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
|
198
|
+
* parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
|
199
|
+
* only part of `string` will be written. However, partially encoded characters will not be written.
|
200
|
+
*
|
201
|
+
* @param string String to write to `buf`.
|
202
|
+
* @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
|
203
|
+
* @param length Maximum number of bytes to write: Default: `buf.length - offset`.
|
204
|
+
* @param encoding The character encoding of `string`. Default: `utf8`.
|
205
|
+
*/
|
206
|
+
write(string, offset, length, encoding) {
|
207
|
+
if (typeof offset === "undefined") {
|
208
|
+
encoding = "utf8";
|
209
|
+
length = this.length;
|
210
|
+
offset = 0;
|
211
|
+
} else if (typeof length === "undefined" && typeof offset === "string") {
|
212
|
+
encoding = offset;
|
213
|
+
length = this.length;
|
214
|
+
offset = 0;
|
215
|
+
} else if (typeof offset === "number" && isFinite(offset)) {
|
216
|
+
offset = offset >>> 0;
|
217
|
+
if (typeof length === "number" && isFinite(length)) {
|
218
|
+
length = length >>> 0;
|
219
|
+
encoding ?? (encoding = "utf8");
|
220
|
+
} else if (typeof length === "string") {
|
221
|
+
encoding = length;
|
222
|
+
length = void 0;
|
223
|
+
}
|
224
|
+
} else {
|
225
|
+
throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
|
226
|
+
}
|
227
|
+
const remaining = this.length - offset;
|
228
|
+
if (typeof length === "undefined" || length > remaining) {
|
229
|
+
length = remaining;
|
230
|
+
}
|
231
|
+
if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
|
232
|
+
throw new RangeError("Attempt to write outside buffer bounds");
|
233
|
+
}
|
234
|
+
encoding || (encoding = "utf8");
|
235
|
+
switch (Buffer._getEncoding(encoding)) {
|
236
|
+
case "hex":
|
237
|
+
return Buffer._hexWrite(this, string, offset, length);
|
238
|
+
case "utf8":
|
239
|
+
return Buffer._utf8Write(this, string, offset, length);
|
240
|
+
case "ascii":
|
241
|
+
case "latin1":
|
242
|
+
case "binary":
|
243
|
+
return Buffer._asciiWrite(this, string, offset, length);
|
244
|
+
case "ucs2":
|
245
|
+
case "utf16le":
|
246
|
+
return Buffer._ucs2Write(this, string, offset, length);
|
247
|
+
case "base64":
|
248
|
+
return Buffer._base64Write(this, string, offset, length);
|
249
|
+
}
|
250
|
+
}
|
251
|
+
/**
|
252
|
+
* Decodes the buffer to a string according to the specified character encoding.
|
253
|
+
* Passing `start` and `end` will decode only a subset of the buffer.
|
254
|
+
*
|
255
|
+
* Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
|
256
|
+
* will be replaced with `U+FFFD`.
|
257
|
+
*
|
258
|
+
* @param encoding
|
259
|
+
* @param start
|
260
|
+
* @param end
|
261
|
+
*/
|
262
|
+
toString(encoding, start, end) {
|
263
|
+
const length = this.length;
|
264
|
+
if (length === 0) {
|
265
|
+
return "";
|
266
|
+
}
|
267
|
+
if (arguments.length === 0) {
|
268
|
+
return Buffer._utf8Slice(this, 0, length);
|
269
|
+
}
|
270
|
+
if (typeof start === "undefined" || start < 0) {
|
271
|
+
start = 0;
|
272
|
+
}
|
273
|
+
if (start > this.length) {
|
274
|
+
return "";
|
275
|
+
}
|
276
|
+
if (typeof end === "undefined" || end > this.length) {
|
277
|
+
end = this.length;
|
278
|
+
}
|
279
|
+
if (end <= 0) {
|
280
|
+
return "";
|
281
|
+
}
|
282
|
+
end >>>= 0;
|
283
|
+
start >>>= 0;
|
284
|
+
if (end <= start) {
|
285
|
+
return "";
|
286
|
+
}
|
287
|
+
if (!encoding) {
|
288
|
+
encoding = "utf8";
|
289
|
+
}
|
290
|
+
switch (Buffer._getEncoding(encoding)) {
|
291
|
+
case "hex":
|
292
|
+
return Buffer._hexSlice(this, start, end);
|
293
|
+
case "utf8":
|
294
|
+
return Buffer._utf8Slice(this, start, end);
|
295
|
+
case "ascii":
|
296
|
+
return Buffer._asciiSlice(this, start, end);
|
297
|
+
case "latin1":
|
298
|
+
case "binary":
|
299
|
+
return Buffer._latin1Slice(this, start, end);
|
300
|
+
case "ucs2":
|
301
|
+
case "utf16le":
|
302
|
+
return Buffer._utf16leSlice(this, start, end);
|
303
|
+
case "base64":
|
304
|
+
return Buffer._base64Slice(this, start, end);
|
305
|
+
}
|
306
|
+
}
|
307
|
+
/**
|
308
|
+
* Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
|
309
|
+
*
|
310
|
+
* @param otherBuffer
|
311
|
+
*/
|
312
|
+
equals(otherBuffer) {
|
313
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
314
|
+
throw new TypeError("Argument must be a Buffer");
|
315
|
+
}
|
316
|
+
if (this === otherBuffer) {
|
317
|
+
return true;
|
318
|
+
}
|
319
|
+
return Buffer.compare(this, otherBuffer) === 0;
|
320
|
+
}
|
321
|
+
/**
|
322
|
+
* Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
|
323
|
+
* or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
|
324
|
+
* buffer.
|
325
|
+
*
|
326
|
+
* - `0` is returned if `otherBuffer` is the same as this buffer.
|
327
|
+
* - `1` is returned if `otherBuffer` should come before this buffer when sorted.
|
328
|
+
* - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
|
329
|
+
*
|
330
|
+
* @param otherBuffer The buffer to compare to.
|
331
|
+
* @param targetStart The offset within `otherBuffer` at which to begin comparison.
|
332
|
+
* @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
|
333
|
+
* @param sourceStart The offset within this buffer at which to begin comparison.
|
334
|
+
* @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
|
335
|
+
*/
|
336
|
+
compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
|
337
|
+
if (Buffer._isInstance(otherBuffer, Uint8Array)) {
|
338
|
+
otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
|
339
|
+
}
|
340
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
341
|
+
throw new TypeError("Argument must be a Buffer or Uint8Array");
|
342
|
+
}
|
343
|
+
targetStart ?? (targetStart = 0);
|
344
|
+
targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
|
345
|
+
sourceStart ?? (sourceStart = 0);
|
346
|
+
sourceEnd ?? (sourceEnd = this.length);
|
347
|
+
if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
|
348
|
+
throw new RangeError("Out of range index");
|
349
|
+
}
|
350
|
+
if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
|
351
|
+
return 0;
|
352
|
+
}
|
353
|
+
if (sourceStart >= sourceEnd) {
|
354
|
+
return -1;
|
355
|
+
}
|
356
|
+
if (targetStart >= targetEnd) {
|
357
|
+
return 1;
|
358
|
+
}
|
359
|
+
targetStart >>>= 0;
|
360
|
+
targetEnd >>>= 0;
|
361
|
+
sourceStart >>>= 0;
|
362
|
+
sourceEnd >>>= 0;
|
363
|
+
if (this === otherBuffer) {
|
364
|
+
return 0;
|
365
|
+
}
|
366
|
+
let x = sourceEnd - sourceStart;
|
367
|
+
let y = targetEnd - targetStart;
|
368
|
+
const len = Math.min(x, y);
|
369
|
+
const thisCopy = this.slice(sourceStart, sourceEnd);
|
370
|
+
const targetCopy = otherBuffer.slice(targetStart, targetEnd);
|
371
|
+
for (let i = 0; i < len; ++i) {
|
372
|
+
if (thisCopy[i] !== targetCopy[i]) {
|
373
|
+
x = thisCopy[i];
|
374
|
+
y = targetCopy[i];
|
375
|
+
break;
|
376
|
+
}
|
377
|
+
}
|
378
|
+
if (x < y)
|
379
|
+
return -1;
|
380
|
+
if (y < x)
|
381
|
+
return 1;
|
382
|
+
return 0;
|
383
|
+
}
|
384
|
+
/**
|
385
|
+
* Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
|
386
|
+
* region overlaps with this buffer.
|
387
|
+
*
|
388
|
+
* @param targetBuffer The target buffer to copy into.
|
389
|
+
* @param targetStart The offset within `targetBuffer` at which to begin writing.
|
390
|
+
* @param sourceStart The offset within this buffer at which to begin copying.
|
391
|
+
* @param sourceEnd The offset within this buffer at which to end copying (exclusive).
|
392
|
+
*/
|
393
|
+
copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
|
394
|
+
if (!Buffer.isBuffer(targetBuffer))
|
395
|
+
throw new TypeError("argument should be a Buffer");
|
396
|
+
if (!sourceStart)
|
397
|
+
sourceStart = 0;
|
398
|
+
if (!targetStart)
|
399
|
+
targetStart = 0;
|
400
|
+
if (!sourceEnd && sourceEnd !== 0)
|
401
|
+
sourceEnd = this.length;
|
402
|
+
if (targetStart >= targetBuffer.length)
|
403
|
+
targetStart = targetBuffer.length;
|
404
|
+
if (!targetStart)
|
405
|
+
targetStart = 0;
|
406
|
+
if (sourceEnd > 0 && sourceEnd < sourceStart)
|
407
|
+
sourceEnd = sourceStart;
|
408
|
+
if (sourceEnd === sourceStart)
|
409
|
+
return 0;
|
410
|
+
if (targetBuffer.length === 0 || this.length === 0)
|
411
|
+
return 0;
|
412
|
+
if (targetStart < 0) {
|
413
|
+
throw new RangeError("targetStart out of bounds");
|
414
|
+
}
|
415
|
+
if (sourceStart < 0 || sourceStart >= this.length)
|
416
|
+
throw new RangeError("Index out of range");
|
417
|
+
if (sourceEnd < 0)
|
418
|
+
throw new RangeError("sourceEnd out of bounds");
|
419
|
+
if (sourceEnd > this.length)
|
420
|
+
sourceEnd = this.length;
|
421
|
+
if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
|
422
|
+
sourceEnd = targetBuffer.length - targetStart + sourceStart;
|
423
|
+
}
|
424
|
+
const len = sourceEnd - sourceStart;
|
425
|
+
if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
|
426
|
+
this.copyWithin(targetStart, sourceStart, sourceEnd);
|
427
|
+
} else {
|
428
|
+
Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
|
429
|
+
}
|
430
|
+
return len;
|
431
|
+
}
|
432
|
+
/**
|
433
|
+
* Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
|
434
|
+
* and `end` indices. This is the same behavior as `buf.subarray()`.
|
435
|
+
*
|
436
|
+
* This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
|
437
|
+
* the slice, use `Uint8Array.prototype.slice()`.
|
438
|
+
*
|
439
|
+
* @param start
|
440
|
+
* @param end
|
441
|
+
*/
|
442
|
+
slice(start, end) {
|
443
|
+
if (!start) {
|
444
|
+
start = 0;
|
445
|
+
}
|
446
|
+
const len = this.length;
|
447
|
+
start = ~~start;
|
448
|
+
end = end === void 0 ? len : ~~end;
|
449
|
+
if (start < 0) {
|
450
|
+
start += len;
|
451
|
+
if (start < 0) {
|
452
|
+
start = 0;
|
453
|
+
}
|
454
|
+
} else if (start > len) {
|
455
|
+
start = len;
|
456
|
+
}
|
457
|
+
if (end < 0) {
|
458
|
+
end += len;
|
459
|
+
if (end < 0) {
|
460
|
+
end = 0;
|
461
|
+
}
|
462
|
+
} else if (end > len) {
|
463
|
+
end = len;
|
464
|
+
}
|
465
|
+
if (end < start) {
|
466
|
+
end = start;
|
467
|
+
}
|
468
|
+
const newBuf = this.subarray(start, end);
|
469
|
+
Object.setPrototypeOf(newBuf, Buffer.prototype);
|
470
|
+
return newBuf;
|
471
|
+
}
|
472
|
+
/**
|
473
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
474
|
+
* of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
|
475
|
+
*
|
476
|
+
* @param value Number to write.
|
477
|
+
* @param offset Number of bytes to skip before starting to write.
|
478
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
479
|
+
* @param noAssert
|
480
|
+
* @returns `offset` plus the number of bytes written.
|
481
|
+
*/
|
482
|
+
writeUIntLE(value, offset, byteLength, noAssert) {
|
483
|
+
value = +value;
|
484
|
+
offset = offset >>> 0;
|
485
|
+
byteLength = byteLength >>> 0;
|
486
|
+
if (!noAssert) {
|
487
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
488
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
489
|
+
}
|
490
|
+
let mul = 1;
|
491
|
+
let i = 0;
|
492
|
+
this[offset] = value & 255;
|
493
|
+
while (++i < byteLength && (mul *= 256)) {
|
494
|
+
this[offset + i] = value / mul & 255;
|
495
|
+
}
|
496
|
+
return offset + byteLength;
|
497
|
+
}
|
498
|
+
/**
|
499
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
|
500
|
+
* accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
|
501
|
+
*
|
502
|
+
* @param value Number to write.
|
503
|
+
* @param offset Number of bytes to skip before starting to write.
|
504
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
505
|
+
* @param noAssert
|
506
|
+
* @returns `offset` plus the number of bytes written.
|
507
|
+
*/
|
508
|
+
writeUIntBE(value, offset, byteLength, noAssert) {
|
509
|
+
value = +value;
|
510
|
+
offset = offset >>> 0;
|
511
|
+
byteLength = byteLength >>> 0;
|
512
|
+
if (!noAssert) {
|
513
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
514
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
515
|
+
}
|
516
|
+
let i = byteLength - 1;
|
517
|
+
let mul = 1;
|
518
|
+
this[offset + i] = value & 255;
|
519
|
+
while (--i >= 0 && (mul *= 256)) {
|
520
|
+
this[offset + i] = value / mul & 255;
|
521
|
+
}
|
522
|
+
return offset + byteLength;
|
523
|
+
}
|
524
|
+
/**
|
525
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
526
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
527
|
+
*
|
528
|
+
* @param value Number to write.
|
529
|
+
* @param offset Number of bytes to skip before starting to write.
|
530
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
531
|
+
* @param noAssert
|
532
|
+
* @returns `offset` plus the number of bytes written.
|
533
|
+
*/
|
534
|
+
writeIntLE(value, offset, byteLength, noAssert) {
|
535
|
+
value = +value;
|
536
|
+
offset = offset >>> 0;
|
537
|
+
if (!noAssert) {
|
538
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
539
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
540
|
+
}
|
541
|
+
let i = 0;
|
542
|
+
let mul = 1;
|
543
|
+
let sub = 0;
|
544
|
+
this[offset] = value & 255;
|
545
|
+
while (++i < byteLength && (mul *= 256)) {
|
546
|
+
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
547
|
+
sub = 1;
|
548
|
+
}
|
549
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
550
|
+
}
|
551
|
+
return offset + byteLength;
|
552
|
+
}
|
553
|
+
/**
|
554
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
|
555
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
556
|
+
*
|
557
|
+
* @param value Number to write.
|
558
|
+
* @param offset Number of bytes to skip before starting to write.
|
559
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
560
|
+
* @param noAssert
|
561
|
+
* @returns `offset` plus the number of bytes written.
|
562
|
+
*/
|
563
|
+
writeIntBE(value, offset, byteLength, noAssert) {
|
564
|
+
value = +value;
|
565
|
+
offset = offset >>> 0;
|
566
|
+
if (!noAssert) {
|
567
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
568
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
569
|
+
}
|
570
|
+
let i = byteLength - 1;
|
571
|
+
let mul = 1;
|
572
|
+
let sub = 0;
|
573
|
+
this[offset + i] = value & 255;
|
574
|
+
while (--i >= 0 && (mul *= 256)) {
|
575
|
+
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
576
|
+
sub = 1;
|
577
|
+
}
|
578
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
579
|
+
}
|
580
|
+
return offset + byteLength;
|
581
|
+
}
|
582
|
+
/**
|
583
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
584
|
+
* unsigned, little-endian integer supporting up to 48 bits of accuracy.
|
585
|
+
*
|
586
|
+
* @param offset Number of bytes to skip before starting to read.
|
587
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
588
|
+
* @param noAssert
|
589
|
+
*/
|
590
|
+
readUIntLE(offset, byteLength, noAssert) {
|
591
|
+
offset = offset >>> 0;
|
592
|
+
byteLength = byteLength >>> 0;
|
593
|
+
if (!noAssert) {
|
594
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
595
|
+
}
|
596
|
+
let val = this[offset];
|
597
|
+
let mul = 1;
|
598
|
+
let i = 0;
|
599
|
+
while (++i < byteLength && (mul *= 256)) {
|
600
|
+
val += this[offset + i] * mul;
|
601
|
+
}
|
602
|
+
return val;
|
603
|
+
}
|
604
|
+
/**
|
605
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
606
|
+
* unsigned, big-endian integer supporting up to 48 bits of accuracy.
|
607
|
+
*
|
608
|
+
* @param offset Number of bytes to skip before starting to read.
|
609
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
610
|
+
* @param noAssert
|
611
|
+
*/
|
612
|
+
readUIntBE(offset, byteLength, noAssert) {
|
613
|
+
offset = offset >>> 0;
|
614
|
+
byteLength = byteLength >>> 0;
|
615
|
+
if (!noAssert) {
|
616
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
617
|
+
}
|
618
|
+
let val = this[offset + --byteLength];
|
619
|
+
let mul = 1;
|
620
|
+
while (byteLength > 0 && (mul *= 256)) {
|
621
|
+
val += this[offset + --byteLength] * mul;
|
622
|
+
}
|
623
|
+
return val;
|
624
|
+
}
|
625
|
+
/**
|
626
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
627
|
+
* little-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
628
|
+
*
|
629
|
+
* @param offset Number of bytes to skip before starting to read.
|
630
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
631
|
+
* @param noAssert
|
632
|
+
*/
|
633
|
+
readIntLE(offset, byteLength, noAssert) {
|
634
|
+
offset = offset >>> 0;
|
635
|
+
byteLength = byteLength >>> 0;
|
636
|
+
if (!noAssert) {
|
637
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
638
|
+
}
|
639
|
+
let val = this[offset];
|
640
|
+
let mul = 1;
|
641
|
+
let i = 0;
|
642
|
+
while (++i < byteLength && (mul *= 256)) {
|
643
|
+
val += this[offset + i] * mul;
|
644
|
+
}
|
645
|
+
mul *= 128;
|
646
|
+
if (val >= mul) {
|
647
|
+
val -= Math.pow(2, 8 * byteLength);
|
648
|
+
}
|
649
|
+
return val;
|
650
|
+
}
|
651
|
+
/**
|
652
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
653
|
+
* big-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
654
|
+
*
|
655
|
+
* @param offset Number of bytes to skip before starting to read.
|
656
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
657
|
+
* @param noAssert
|
658
|
+
*/
|
659
|
+
readIntBE(offset, byteLength, noAssert) {
|
660
|
+
offset = offset >>> 0;
|
661
|
+
byteLength = byteLength >>> 0;
|
662
|
+
if (!noAssert) {
|
663
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
664
|
+
}
|
665
|
+
let i = byteLength;
|
666
|
+
let mul = 1;
|
667
|
+
let val = this[offset + --i];
|
668
|
+
while (i > 0 && (mul *= 256)) {
|
669
|
+
val += this[offset + --i] * mul;
|
670
|
+
}
|
671
|
+
mul *= 128;
|
672
|
+
if (val >= mul) {
|
673
|
+
val -= Math.pow(2, 8 * byteLength);
|
674
|
+
}
|
675
|
+
return val;
|
676
|
+
}
|
677
|
+
/**
|
678
|
+
* Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
|
679
|
+
*
|
680
|
+
* @param offset Number of bytes to skip before starting to read.
|
681
|
+
* @param noAssert
|
682
|
+
*/
|
683
|
+
readUInt8(offset, noAssert) {
|
684
|
+
offset = offset >>> 0;
|
685
|
+
if (!noAssert) {
|
686
|
+
Buffer._checkOffset(offset, 1, this.length);
|
687
|
+
}
|
688
|
+
return this[offset];
|
689
|
+
}
|
690
|
+
/**
|
691
|
+
* Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
|
692
|
+
*
|
693
|
+
* @param offset Number of bytes to skip before starting to read.
|
694
|
+
* @param noAssert
|
695
|
+
*/
|
696
|
+
readUInt16LE(offset, noAssert) {
|
697
|
+
offset = offset >>> 0;
|
698
|
+
if (!noAssert) {
|
699
|
+
Buffer._checkOffset(offset, 2, this.length);
|
700
|
+
}
|
701
|
+
return this[offset] | this[offset + 1] << 8;
|
702
|
+
}
|
703
|
+
/**
|
704
|
+
* Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
|
705
|
+
*
|
706
|
+
* @param offset Number of bytes to skip before starting to read.
|
707
|
+
* @param noAssert
|
708
|
+
*/
|
709
|
+
readUInt16BE(offset, noAssert) {
|
710
|
+
offset = offset >>> 0;
|
711
|
+
if (!noAssert) {
|
712
|
+
Buffer._checkOffset(offset, 2, this.length);
|
713
|
+
}
|
714
|
+
return this[offset] << 8 | this[offset + 1];
|
715
|
+
}
|
716
|
+
/**
|
717
|
+
* Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
|
718
|
+
*
|
719
|
+
* @param offset Number of bytes to skip before starting to read.
|
720
|
+
* @param noAssert
|
721
|
+
*/
|
722
|
+
readUInt32LE(offset, noAssert) {
|
723
|
+
offset = offset >>> 0;
|
724
|
+
if (!noAssert) {
|
725
|
+
Buffer._checkOffset(offset, 4, this.length);
|
726
|
+
}
|
727
|
+
return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
|
728
|
+
}
|
729
|
+
/**
|
730
|
+
* Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
|
731
|
+
*
|
732
|
+
* @param offset Number of bytes to skip before starting to read.
|
733
|
+
* @param noAssert
|
734
|
+
*/
|
735
|
+
readUInt32BE(offset, noAssert) {
|
736
|
+
offset = offset >>> 0;
|
737
|
+
if (!noAssert) {
|
738
|
+
Buffer._checkOffset(offset, 4, this.length);
|
739
|
+
}
|
740
|
+
return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
|
741
|
+
}
|
742
|
+
/**
|
743
|
+
* Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
|
744
|
+
* as two's complement signed values.
|
745
|
+
*
|
746
|
+
* @param offset Number of bytes to skip before starting to read.
|
747
|
+
* @param noAssert
|
748
|
+
*/
|
749
|
+
readInt8(offset, noAssert) {
|
750
|
+
offset = offset >>> 0;
|
751
|
+
if (!noAssert) {
|
752
|
+
Buffer._checkOffset(offset, 1, this.length);
|
753
|
+
}
|
754
|
+
if (!(this[offset] & 128)) {
|
755
|
+
return this[offset];
|
756
|
+
}
|
757
|
+
return (255 - this[offset] + 1) * -1;
|
758
|
+
}
|
759
|
+
/**
|
760
|
+
* Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
761
|
+
* are interpreted as two's complement signed values.
|
762
|
+
*
|
763
|
+
* @param offset Number of bytes to skip before starting to read.
|
764
|
+
* @param noAssert
|
765
|
+
*/
|
766
|
+
readInt16LE(offset, noAssert) {
|
767
|
+
offset = offset >>> 0;
|
768
|
+
if (!noAssert) {
|
769
|
+
Buffer._checkOffset(offset, 2, this.length);
|
770
|
+
}
|
771
|
+
const val = this[offset] | this[offset + 1] << 8;
|
772
|
+
return val & 32768 ? val | 4294901760 : val;
|
773
|
+
}
|
774
|
+
/**
|
775
|
+
* Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
776
|
+
* are interpreted as two's complement signed values.
|
777
|
+
*
|
778
|
+
* @param offset Number of bytes to skip before starting to read.
|
779
|
+
* @param noAssert
|
780
|
+
*/
|
781
|
+
readInt16BE(offset, noAssert) {
|
782
|
+
offset = offset >>> 0;
|
783
|
+
if (!noAssert) {
|
784
|
+
Buffer._checkOffset(offset, 2, this.length);
|
785
|
+
}
|
786
|
+
const val = this[offset + 1] | this[offset] << 8;
|
787
|
+
return val & 32768 ? val | 4294901760 : val;
|
788
|
+
}
|
789
|
+
/**
|
790
|
+
* Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
791
|
+
* are interpreted as two's complement signed values.
|
792
|
+
*
|
793
|
+
* @param offset Number of bytes to skip before starting to read.
|
794
|
+
* @param noAssert
|
795
|
+
*/
|
796
|
+
readInt32LE(offset, noAssert) {
|
797
|
+
offset = offset >>> 0;
|
798
|
+
if (!noAssert) {
|
799
|
+
Buffer._checkOffset(offset, 4, this.length);
|
800
|
+
}
|
801
|
+
return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
|
802
|
+
}
|
803
|
+
/**
|
804
|
+
* Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
805
|
+
* are interpreted as two's complement signed values.
|
806
|
+
*
|
807
|
+
* @param offset Number of bytes to skip before starting to read.
|
808
|
+
* @param noAssert
|
809
|
+
*/
|
810
|
+
readInt32BE(offset, noAssert) {
|
811
|
+
offset = offset >>> 0;
|
812
|
+
if (!noAssert) {
|
813
|
+
Buffer._checkOffset(offset, 4, this.length);
|
814
|
+
}
|
815
|
+
return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
|
816
|
+
}
|
817
|
+
/**
|
818
|
+
* Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
|
819
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 2.
|
820
|
+
*/
|
821
|
+
swap16() {
|
822
|
+
const len = this.length;
|
823
|
+
if (len % 2 !== 0) {
|
824
|
+
throw new RangeError("Buffer size must be a multiple of 16-bits");
|
825
|
+
}
|
826
|
+
for (let i = 0; i < len; i += 2) {
|
827
|
+
this._swap(this, i, i + 1);
|
828
|
+
}
|
829
|
+
return this;
|
830
|
+
}
|
831
|
+
/**
|
832
|
+
* Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
|
833
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 4.
|
834
|
+
*/
|
835
|
+
swap32() {
|
836
|
+
const len = this.length;
|
837
|
+
if (len % 4 !== 0) {
|
838
|
+
throw new RangeError("Buffer size must be a multiple of 32-bits");
|
839
|
+
}
|
840
|
+
for (let i = 0; i < len; i += 4) {
|
841
|
+
this._swap(this, i, i + 3);
|
842
|
+
this._swap(this, i + 1, i + 2);
|
843
|
+
}
|
844
|
+
return this;
|
845
|
+
}
|
846
|
+
/**
|
847
|
+
* Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
|
848
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 8.
|
849
|
+
*/
|
850
|
+
swap64() {
|
851
|
+
const len = this.length;
|
852
|
+
if (len % 8 !== 0) {
|
853
|
+
throw new RangeError("Buffer size must be a multiple of 64-bits");
|
854
|
+
}
|
855
|
+
for (let i = 0; i < len; i += 8) {
|
856
|
+
this._swap(this, i, i + 7);
|
857
|
+
this._swap(this, i + 1, i + 6);
|
858
|
+
this._swap(this, i + 2, i + 5);
|
859
|
+
this._swap(this, i + 3, i + 4);
|
860
|
+
}
|
861
|
+
return this;
|
862
|
+
}
|
863
|
+
/**
|
864
|
+
* Swaps two octets.
|
865
|
+
*
|
866
|
+
* @param b
|
867
|
+
* @param n
|
868
|
+
* @param m
|
869
|
+
*/
|
870
|
+
_swap(b, n, m) {
|
871
|
+
const i = b[n];
|
872
|
+
b[n] = b[m];
|
873
|
+
b[m] = i;
|
874
|
+
}
|
875
|
+
/**
|
876
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
|
877
|
+
* Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
|
878
|
+
*
|
879
|
+
* @param value Number to write.
|
880
|
+
* @param offset Number of bytes to skip before starting to write.
|
881
|
+
* @param noAssert
|
882
|
+
* @returns `offset` plus the number of bytes written.
|
883
|
+
*/
|
884
|
+
writeUInt8(value, offset, noAssert) {
|
885
|
+
value = +value;
|
886
|
+
offset = offset >>> 0;
|
887
|
+
if (!noAssert) {
|
888
|
+
Buffer._checkInt(this, value, offset, 1, 255, 0);
|
889
|
+
}
|
890
|
+
this[offset] = value & 255;
|
891
|
+
return offset + 1;
|
892
|
+
}
|
893
|
+
/**
|
894
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
|
895
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
896
|
+
*
|
897
|
+
* @param value Number to write.
|
898
|
+
* @param offset Number of bytes to skip before starting to write.
|
899
|
+
* @param noAssert
|
900
|
+
* @returns `offset` plus the number of bytes written.
|
901
|
+
*/
|
902
|
+
writeUInt16LE(value, offset, noAssert) {
|
903
|
+
value = +value;
|
904
|
+
offset = offset >>> 0;
|
905
|
+
if (!noAssert) {
|
906
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
907
|
+
}
|
908
|
+
this[offset] = value & 255;
|
909
|
+
this[offset + 1] = value >>> 8;
|
910
|
+
return offset + 2;
|
911
|
+
}
|
912
|
+
/**
|
913
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
|
914
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
915
|
+
*
|
916
|
+
* @param value Number to write.
|
917
|
+
* @param offset Number of bytes to skip before starting to write.
|
918
|
+
* @param noAssert
|
919
|
+
* @returns `offset` plus the number of bytes written.
|
920
|
+
*/
|
921
|
+
writeUInt16BE(value, offset, noAssert) {
|
922
|
+
value = +value;
|
923
|
+
offset = offset >>> 0;
|
924
|
+
if (!noAssert) {
|
925
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
926
|
+
}
|
927
|
+
this[offset] = value >>> 8;
|
928
|
+
this[offset + 1] = value & 255;
|
929
|
+
return offset + 2;
|
930
|
+
}
|
931
|
+
/**
|
932
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
|
933
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
934
|
+
*
|
935
|
+
* @param value Number to write.
|
936
|
+
* @param offset Number of bytes to skip before starting to write.
|
937
|
+
* @param noAssert
|
938
|
+
* @returns `offset` plus the number of bytes written.
|
939
|
+
*/
|
940
|
+
writeUInt32LE(value, offset, noAssert) {
|
941
|
+
value = +value;
|
942
|
+
offset = offset >>> 0;
|
943
|
+
if (!noAssert) {
|
944
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
945
|
+
}
|
946
|
+
this[offset + 3] = value >>> 24;
|
947
|
+
this[offset + 2] = value >>> 16;
|
948
|
+
this[offset + 1] = value >>> 8;
|
949
|
+
this[offset] = value & 255;
|
950
|
+
return offset + 4;
|
951
|
+
}
|
952
|
+
/**
|
953
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
|
954
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
955
|
+
*
|
956
|
+
* @param value Number to write.
|
957
|
+
* @param offset Number of bytes to skip before starting to write.
|
958
|
+
* @param noAssert
|
959
|
+
* @returns `offset` plus the number of bytes written.
|
960
|
+
*/
|
961
|
+
writeUInt32BE(value, offset, noAssert) {
|
962
|
+
value = +value;
|
963
|
+
offset = offset >>> 0;
|
964
|
+
if (!noAssert) {
|
965
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
966
|
+
}
|
967
|
+
this[offset] = value >>> 24;
|
968
|
+
this[offset + 1] = value >>> 16;
|
969
|
+
this[offset + 2] = value >>> 8;
|
970
|
+
this[offset + 3] = value & 255;
|
971
|
+
return offset + 4;
|
972
|
+
}
|
973
|
+
/**
|
974
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
|
975
|
+
* Behavior is undefined when `value` is anything other than a signed 8-bit integer.
|
976
|
+
*
|
977
|
+
* @param value Number to write.
|
978
|
+
* @param offset Number of bytes to skip before starting to write.
|
979
|
+
* @param noAssert
|
980
|
+
* @returns `offset` plus the number of bytes written.
|
981
|
+
*/
|
982
|
+
writeInt8(value, offset, noAssert) {
|
983
|
+
value = +value;
|
984
|
+
offset = offset >>> 0;
|
985
|
+
if (!noAssert) {
|
986
|
+
Buffer._checkInt(this, value, offset, 1, 127, -128);
|
987
|
+
}
|
988
|
+
if (value < 0) {
|
989
|
+
value = 255 + value + 1;
|
990
|
+
}
|
991
|
+
this[offset] = value & 255;
|
992
|
+
return offset + 1;
|
993
|
+
}
|
994
|
+
/**
|
995
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
|
996
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
997
|
+
*
|
998
|
+
* @param value Number to write.
|
999
|
+
* @param offset Number of bytes to skip before starting to write.
|
1000
|
+
* @param noAssert
|
1001
|
+
* @returns `offset` plus the number of bytes written.
|
1002
|
+
*/
|
1003
|
+
writeInt16LE(value, offset, noAssert) {
|
1004
|
+
value = +value;
|
1005
|
+
offset = offset >>> 0;
|
1006
|
+
if (!noAssert) {
|
1007
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1008
|
+
}
|
1009
|
+
this[offset] = value & 255;
|
1010
|
+
this[offset + 1] = value >>> 8;
|
1011
|
+
return offset + 2;
|
1012
|
+
}
|
1013
|
+
/**
|
1014
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
|
1015
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
1016
|
+
*
|
1017
|
+
* @param value Number to write.
|
1018
|
+
* @param offset Number of bytes to skip before starting to write.
|
1019
|
+
* @param noAssert
|
1020
|
+
* @returns `offset` plus the number of bytes written.
|
1021
|
+
*/
|
1022
|
+
writeInt16BE(value, offset, noAssert) {
|
1023
|
+
value = +value;
|
1024
|
+
offset = offset >>> 0;
|
1025
|
+
if (!noAssert) {
|
1026
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1027
|
+
}
|
1028
|
+
this[offset] = value >>> 8;
|
1029
|
+
this[offset + 1] = value & 255;
|
1030
|
+
return offset + 2;
|
1031
|
+
}
|
1032
|
+
/**
|
1033
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
|
1034
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1035
|
+
*
|
1036
|
+
* @param value Number to write.
|
1037
|
+
* @param offset Number of bytes to skip before starting to write.
|
1038
|
+
* @param noAssert
|
1039
|
+
* @returns `offset` plus the number of bytes written.
|
1040
|
+
*/
|
1041
|
+
writeInt32LE(value, offset, noAssert) {
|
1042
|
+
value = +value;
|
1043
|
+
offset = offset >>> 0;
|
1044
|
+
if (!noAssert) {
|
1045
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1046
|
+
}
|
1047
|
+
this[offset] = value & 255;
|
1048
|
+
this[offset + 1] = value >>> 8;
|
1049
|
+
this[offset + 2] = value >>> 16;
|
1050
|
+
this[offset + 3] = value >>> 24;
|
1051
|
+
return offset + 4;
|
1052
|
+
}
|
1053
|
+
/**
|
1054
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
|
1055
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1056
|
+
*
|
1057
|
+
* @param value Number to write.
|
1058
|
+
* @param offset Number of bytes to skip before starting to write.
|
1059
|
+
* @param noAssert
|
1060
|
+
* @returns `offset` plus the number of bytes written.
|
1061
|
+
*/
|
1062
|
+
writeInt32BE(value, offset, noAssert) {
|
1063
|
+
value = +value;
|
1064
|
+
offset = offset >>> 0;
|
1065
|
+
if (!noAssert) {
|
1066
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1067
|
+
}
|
1068
|
+
if (value < 0) {
|
1069
|
+
value = 4294967295 + value + 1;
|
1070
|
+
}
|
1071
|
+
this[offset] = value >>> 24;
|
1072
|
+
this[offset + 1] = value >>> 16;
|
1073
|
+
this[offset + 2] = value >>> 8;
|
1074
|
+
this[offset + 3] = value & 255;
|
1075
|
+
return offset + 4;
|
1076
|
+
}
|
1077
|
+
/**
|
1078
|
+
* Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
|
1079
|
+
* filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
|
1080
|
+
* integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
|
1081
|
+
*
|
1082
|
+
* If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
|
1083
|
+
* character that fit into `buf` are written.
|
1084
|
+
*
|
1085
|
+
* If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
|
1086
|
+
*
|
1087
|
+
* @param value
|
1088
|
+
* @param encoding
|
1089
|
+
*/
|
1090
|
+
fill(value, offset, end, encoding) {
|
1091
|
+
if (typeof value === "string") {
|
1092
|
+
if (typeof offset === "string") {
|
1093
|
+
encoding = offset;
|
1094
|
+
offset = 0;
|
1095
|
+
end = this.length;
|
1096
|
+
} else if (typeof end === "string") {
|
1097
|
+
encoding = end;
|
1098
|
+
end = this.length;
|
1099
|
+
}
|
1100
|
+
if (encoding !== void 0 && typeof encoding !== "string") {
|
1101
|
+
throw new TypeError("encoding must be a string");
|
1102
|
+
}
|
1103
|
+
if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
|
1104
|
+
throw new TypeError("Unknown encoding: " + encoding);
|
1105
|
+
}
|
1106
|
+
if (value.length === 1) {
|
1107
|
+
const code = value.charCodeAt(0);
|
1108
|
+
if (encoding === "utf8" && code < 128) {
|
1109
|
+
value = code;
|
1110
|
+
}
|
1111
|
+
}
|
1112
|
+
} else if (typeof value === "number") {
|
1113
|
+
value = value & 255;
|
1114
|
+
} else if (typeof value === "boolean") {
|
1115
|
+
value = Number(value);
|
1116
|
+
}
|
1117
|
+
offset ?? (offset = 0);
|
1118
|
+
end ?? (end = this.length);
|
1119
|
+
if (offset < 0 || this.length < offset || this.length < end) {
|
1120
|
+
throw new RangeError("Out of range index");
|
1121
|
+
}
|
1122
|
+
if (end <= offset) {
|
1123
|
+
return this;
|
1124
|
+
}
|
1125
|
+
offset = offset >>> 0;
|
1126
|
+
end = end === void 0 ? this.length : end >>> 0;
|
1127
|
+
value || (value = 0);
|
1128
|
+
let i;
|
1129
|
+
if (typeof value === "number") {
|
1130
|
+
for (i = offset; i < end; ++i) {
|
1131
|
+
this[i] = value;
|
1132
|
+
}
|
1133
|
+
} else {
|
1134
|
+
const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
|
1135
|
+
const len = bytes.length;
|
1136
|
+
if (len === 0) {
|
1137
|
+
throw new TypeError('The value "' + value + '" is invalid for argument "value"');
|
1138
|
+
}
|
1139
|
+
for (i = 0; i < end - offset; ++i) {
|
1140
|
+
this[i + offset] = bytes[i % len];
|
1141
|
+
}
|
1142
|
+
}
|
1143
|
+
return this;
|
1144
|
+
}
|
1145
|
+
/**
|
1146
|
+
* Returns the index of the specified value.
|
1147
|
+
*
|
1148
|
+
* If `value` is:
|
1149
|
+
* - a string, `value` is interpreted according to the character encoding in `encoding`.
|
1150
|
+
* - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
|
1151
|
+
* - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
|
1152
|
+
*
|
1153
|
+
* Any other types will throw a `TypeError`.
|
1154
|
+
*
|
1155
|
+
* @param value What to search for.
|
1156
|
+
* @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
|
1157
|
+
* @param encoding If `value` is a string, this is the encoding used to search.
|
1158
|
+
* @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
|
1159
|
+
*/
|
1160
|
+
indexOf(value, byteOffset, encoding) {
|
1161
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
|
1162
|
+
}
|
1163
|
+
/**
|
1164
|
+
* Gets the last index of the specified value.
|
1165
|
+
*
|
1166
|
+
* @see indexOf()
|
1167
|
+
* @param value
|
1168
|
+
* @param byteOffset
|
1169
|
+
* @param encoding
|
1170
|
+
*/
|
1171
|
+
lastIndexOf(value, byteOffset, encoding) {
|
1172
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
|
1173
|
+
}
|
1174
|
+
_bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
|
1175
|
+
if (buffer.length === 0) {
|
1176
|
+
return -1;
|
1177
|
+
}
|
1178
|
+
if (typeof byteOffset === "string") {
|
1179
|
+
encoding = byteOffset;
|
1180
|
+
byteOffset = 0;
|
1181
|
+
} else if (typeof byteOffset === "undefined") {
|
1182
|
+
byteOffset = 0;
|
1183
|
+
} else if (byteOffset > 2147483647) {
|
1184
|
+
byteOffset = 2147483647;
|
1185
|
+
} else if (byteOffset < -2147483648) {
|
1186
|
+
byteOffset = -2147483648;
|
1187
|
+
}
|
1188
|
+
byteOffset = +byteOffset;
|
1189
|
+
if (byteOffset !== byteOffset) {
|
1190
|
+
byteOffset = dir ? 0 : buffer.length - 1;
|
1191
|
+
}
|
1192
|
+
if (byteOffset < 0) {
|
1193
|
+
byteOffset = buffer.length + byteOffset;
|
1194
|
+
}
|
1195
|
+
if (byteOffset >= buffer.length) {
|
1196
|
+
if (dir) {
|
1197
|
+
return -1;
|
1198
|
+
} else {
|
1199
|
+
byteOffset = buffer.length - 1;
|
1200
|
+
}
|
1201
|
+
} else if (byteOffset < 0) {
|
1202
|
+
if (dir) {
|
1203
|
+
byteOffset = 0;
|
1204
|
+
} else {
|
1205
|
+
return -1;
|
1206
|
+
}
|
1207
|
+
}
|
1208
|
+
if (typeof val === "string") {
|
1209
|
+
val = Buffer.from(val, encoding);
|
1210
|
+
}
|
1211
|
+
if (Buffer.isBuffer(val)) {
|
1212
|
+
if (val.length === 0) {
|
1213
|
+
return -1;
|
1214
|
+
}
|
1215
|
+
return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
|
1216
|
+
} else if (typeof val === "number") {
|
1217
|
+
val = val & 255;
|
1218
|
+
if (typeof Uint8Array.prototype.indexOf === "function") {
|
1219
|
+
if (dir) {
|
1220
|
+
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
|
1221
|
+
} else {
|
1222
|
+
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
|
1223
|
+
}
|
1224
|
+
}
|
1225
|
+
return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
|
1226
|
+
}
|
1227
|
+
throw new TypeError("val must be string, number or Buffer");
|
1228
|
+
}
|
1229
|
+
/**
|
1230
|
+
* Equivalent to `buf.indexOf() !== -1`.
|
1231
|
+
*
|
1232
|
+
* @param value
|
1233
|
+
* @param byteOffset
|
1234
|
+
* @param encoding
|
1235
|
+
*/
|
1236
|
+
includes(value, byteOffset, encoding) {
|
1237
|
+
return this.indexOf(value, byteOffset, encoding) !== -1;
|
1238
|
+
}
|
1239
|
+
/**
|
1240
|
+
* Creates a new buffer from the given parameters.
|
1241
|
+
*
|
1242
|
+
* @param data
|
1243
|
+
* @param encoding
|
1244
|
+
*/
|
1245
|
+
static from(a, b, c) {
|
1246
|
+
return new Buffer(a, b, c);
|
1247
|
+
}
|
1248
|
+
/**
|
1249
|
+
* Returns true if `obj` is a Buffer.
|
1250
|
+
*
|
1251
|
+
* @param obj
|
1252
|
+
*/
|
1253
|
+
static isBuffer(obj) {
|
1254
|
+
return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
|
1255
|
+
}
|
1256
|
+
/**
|
1257
|
+
* Returns true if `encoding` is a supported encoding.
|
1258
|
+
*
|
1259
|
+
* @param encoding
|
1260
|
+
*/
|
1261
|
+
static isEncoding(encoding) {
|
1262
|
+
switch (encoding.toLowerCase()) {
|
1263
|
+
case "hex":
|
1264
|
+
case "utf8":
|
1265
|
+
case "ascii":
|
1266
|
+
case "binary":
|
1267
|
+
case "latin1":
|
1268
|
+
case "ucs2":
|
1269
|
+
case "utf16le":
|
1270
|
+
case "base64":
|
1271
|
+
return true;
|
1272
|
+
default:
|
1273
|
+
return false;
|
1274
|
+
}
|
1275
|
+
}
|
1276
|
+
/**
|
1277
|
+
* Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
|
1278
|
+
* returns the number of characters in the string.
|
1279
|
+
*
|
1280
|
+
* @param string The string to test.
|
1281
|
+
* @param encoding The encoding to use for calculation. Defaults is `utf8`.
|
1282
|
+
*/
|
1283
|
+
static byteLength(string, encoding) {
|
1284
|
+
if (Buffer.isBuffer(string)) {
|
1285
|
+
return string.length;
|
1286
|
+
}
|
1287
|
+
if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
|
1288
|
+
return string.byteLength;
|
1289
|
+
}
|
1290
|
+
if (typeof string !== "string") {
|
1291
|
+
throw new TypeError(
|
1292
|
+
'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
|
1293
|
+
);
|
1294
|
+
}
|
1295
|
+
const len = string.length;
|
1296
|
+
const mustMatch = arguments.length > 2 && arguments[2] === true;
|
1297
|
+
if (!mustMatch && len === 0) {
|
1298
|
+
return 0;
|
1299
|
+
}
|
1300
|
+
switch (encoding?.toLowerCase()) {
|
1301
|
+
case "ascii":
|
1302
|
+
case "latin1":
|
1303
|
+
case "binary":
|
1304
|
+
return len;
|
1305
|
+
case "utf8":
|
1306
|
+
return Buffer._utf8ToBytes(string).length;
|
1307
|
+
case "hex":
|
1308
|
+
return len >>> 1;
|
1309
|
+
case "ucs2":
|
1310
|
+
case "utf16le":
|
1311
|
+
return len * 2;
|
1312
|
+
case "base64":
|
1313
|
+
return Buffer._base64ToBytes(string).length;
|
1314
|
+
default:
|
1315
|
+
return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
|
1316
|
+
}
|
1317
|
+
}
|
1318
|
+
/**
|
1319
|
+
* Returns a Buffer which is the result of concatenating all the buffers in the list together.
|
1320
|
+
*
|
1321
|
+
* - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
|
1322
|
+
* - If the list has exactly one item, then the first item is returned.
|
1323
|
+
* - If the list has more than one item, then a new buffer is created.
|
1324
|
+
*
|
1325
|
+
* It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
|
1326
|
+
* a small computational expense.
|
1327
|
+
*
|
1328
|
+
* @param list An array of Buffer objects to concatenate.
|
1329
|
+
* @param totalLength Total length of the buffers when concatenated.
|
1330
|
+
*/
|
1331
|
+
static concat(list, totalLength) {
|
1332
|
+
if (!Array.isArray(list)) {
|
1333
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1334
|
+
}
|
1335
|
+
if (list.length === 0) {
|
1336
|
+
return Buffer.alloc(0);
|
1337
|
+
}
|
1338
|
+
let i;
|
1339
|
+
if (totalLength === void 0) {
|
1340
|
+
totalLength = 0;
|
1341
|
+
for (i = 0; i < list.length; ++i) {
|
1342
|
+
totalLength += list[i].length;
|
1343
|
+
}
|
1344
|
+
}
|
1345
|
+
const buffer = Buffer.allocUnsafe(totalLength);
|
1346
|
+
let pos = 0;
|
1347
|
+
for (i = 0; i < list.length; ++i) {
|
1348
|
+
let buf = list[i];
|
1349
|
+
if (Buffer._isInstance(buf, Uint8Array)) {
|
1350
|
+
if (pos + buf.length > buffer.length) {
|
1351
|
+
if (!Buffer.isBuffer(buf)) {
|
1352
|
+
buf = Buffer.from(buf);
|
1353
|
+
}
|
1354
|
+
buf.copy(buffer, pos);
|
1355
|
+
} else {
|
1356
|
+
Uint8Array.prototype.set.call(buffer, buf, pos);
|
1357
|
+
}
|
1358
|
+
} else if (!Buffer.isBuffer(buf)) {
|
1359
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1360
|
+
} else {
|
1361
|
+
buf.copy(buffer, pos);
|
1362
|
+
}
|
1363
|
+
pos += buf.length;
|
1364
|
+
}
|
1365
|
+
return buffer;
|
1366
|
+
}
|
1367
|
+
/**
|
1368
|
+
* The same as `buf1.compare(buf2)`.
|
1369
|
+
*/
|
1370
|
+
static compare(buf1, buf2) {
|
1371
|
+
if (Buffer._isInstance(buf1, Uint8Array)) {
|
1372
|
+
buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
|
1373
|
+
}
|
1374
|
+
if (Buffer._isInstance(buf2, Uint8Array)) {
|
1375
|
+
buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
|
1376
|
+
}
|
1377
|
+
if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
|
1378
|
+
throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
|
1379
|
+
}
|
1380
|
+
if (buf1 === buf2) {
|
1381
|
+
return 0;
|
1382
|
+
}
|
1383
|
+
let x = buf1.length;
|
1384
|
+
let y = buf2.length;
|
1385
|
+
for (let i = 0, len = Math.min(x, y); i < len; ++i) {
|
1386
|
+
if (buf1[i] !== buf2[i]) {
|
1387
|
+
x = buf1[i];
|
1388
|
+
y = buf2[i];
|
1389
|
+
break;
|
1390
|
+
}
|
1391
|
+
}
|
1392
|
+
if (x < y) {
|
1393
|
+
return -1;
|
1394
|
+
}
|
1395
|
+
if (y < x) {
|
1396
|
+
return 1;
|
1397
|
+
}
|
1398
|
+
return 0;
|
1399
|
+
}
|
1400
|
+
/**
|
1401
|
+
* Allocates a new buffer of `size` octets.
|
1402
|
+
*
|
1403
|
+
* @param size The number of octets to allocate.
|
1404
|
+
* @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
|
1405
|
+
* @param encoding The encoding used for the call to `buf.fill()` while initializing.
|
1406
|
+
*/
|
1407
|
+
static alloc(size, fill, encoding) {
|
1408
|
+
if (typeof size !== "number") {
|
1409
|
+
throw new TypeError('"size" argument must be of type number');
|
1410
|
+
} else if (size < 0) {
|
1411
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1412
|
+
}
|
1413
|
+
if (size <= 0) {
|
1414
|
+
return new Buffer(size);
|
1415
|
+
}
|
1416
|
+
if (fill !== void 0) {
|
1417
|
+
return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
|
1418
|
+
}
|
1419
|
+
return new Buffer(size);
|
1420
|
+
}
|
1421
|
+
/**
|
1422
|
+
* Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
|
1423
|
+
*
|
1424
|
+
* @param size
|
1425
|
+
*/
|
1426
|
+
static allocUnsafe(size) {
|
1427
|
+
if (typeof size !== "number") {
|
1428
|
+
throw new TypeError('"size" argument must be of type number');
|
1429
|
+
} else if (size < 0) {
|
1430
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1431
|
+
}
|
1432
|
+
return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
|
1433
|
+
}
|
1434
|
+
/**
|
1435
|
+
* Returns true if the given `obj` is an instance of `type`.
|
1436
|
+
*
|
1437
|
+
* @param obj
|
1438
|
+
* @param type
|
1439
|
+
*/
|
1440
|
+
static _isInstance(obj, type) {
|
1441
|
+
return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
|
1442
|
+
}
|
1443
|
+
static _checked(length) {
|
1444
|
+
if (length >= K_MAX_LENGTH) {
|
1445
|
+
throw new RangeError(
|
1446
|
+
"Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
|
1447
|
+
);
|
1448
|
+
}
|
1449
|
+
return length | 0;
|
1450
|
+
}
|
1451
|
+
static _blitBuffer(src, dst, offset, length) {
|
1452
|
+
let i;
|
1453
|
+
for (i = 0; i < length; ++i) {
|
1454
|
+
if (i + offset >= dst.length || i >= src.length) {
|
1455
|
+
break;
|
1456
|
+
}
|
1457
|
+
dst[i + offset] = src[i];
|
1458
|
+
}
|
1459
|
+
return i;
|
1460
|
+
}
|
1461
|
+
static _utf8Write(buf, string, offset, length) {
|
1462
|
+
return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
|
1463
|
+
}
|
1464
|
+
static _asciiWrite(buf, string, offset, length) {
|
1465
|
+
return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
|
1466
|
+
}
|
1467
|
+
static _base64Write(buf, string, offset, length) {
|
1468
|
+
return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
|
1469
|
+
}
|
1470
|
+
static _ucs2Write(buf, string, offset, length) {
|
1471
|
+
return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
|
1472
|
+
}
|
1473
|
+
static _hexWrite(buf, string, offset, length) {
|
1474
|
+
offset = Number(offset) || 0;
|
1475
|
+
const remaining = buf.length - offset;
|
1476
|
+
if (!length) {
|
1477
|
+
length = remaining;
|
1478
|
+
} else {
|
1479
|
+
length = Number(length);
|
1480
|
+
if (length > remaining) {
|
1481
|
+
length = remaining;
|
1482
|
+
}
|
1483
|
+
}
|
1484
|
+
const strLen = string.length;
|
1485
|
+
if (length > strLen / 2) {
|
1486
|
+
length = strLen / 2;
|
1487
|
+
}
|
1488
|
+
let i;
|
1489
|
+
for (i = 0; i < length; ++i) {
|
1490
|
+
const parsed = parseInt(string.substr(i * 2, 2), 16);
|
1491
|
+
if (parsed !== parsed) {
|
1492
|
+
return i;
|
1493
|
+
}
|
1494
|
+
buf[offset + i] = parsed;
|
1495
|
+
}
|
1496
|
+
return i;
|
1497
|
+
}
|
1498
|
+
static _utf8ToBytes(string, units) {
|
1499
|
+
units = units || Infinity;
|
1500
|
+
const length = string.length;
|
1501
|
+
const bytes = [];
|
1502
|
+
let codePoint;
|
1503
|
+
let leadSurrogate = null;
|
1504
|
+
for (let i = 0; i < length; ++i) {
|
1505
|
+
codePoint = string.charCodeAt(i);
|
1506
|
+
if (codePoint > 55295 && codePoint < 57344) {
|
1507
|
+
if (!leadSurrogate) {
|
1508
|
+
if (codePoint > 56319) {
|
1509
|
+
if ((units -= 3) > -1) {
|
1510
|
+
bytes.push(239, 191, 189);
|
1511
|
+
}
|
1512
|
+
continue;
|
1513
|
+
} else if (i + 1 === length) {
|
1514
|
+
if ((units -= 3) > -1) {
|
1515
|
+
bytes.push(239, 191, 189);
|
1516
|
+
}
|
1517
|
+
continue;
|
1518
|
+
}
|
1519
|
+
leadSurrogate = codePoint;
|
1520
|
+
continue;
|
1521
|
+
}
|
1522
|
+
if (codePoint < 56320) {
|
1523
|
+
if ((units -= 3) > -1) {
|
1524
|
+
bytes.push(239, 191, 189);
|
1525
|
+
}
|
1526
|
+
leadSurrogate = codePoint;
|
1527
|
+
continue;
|
1528
|
+
}
|
1529
|
+
codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
|
1530
|
+
} else if (leadSurrogate) {
|
1531
|
+
if ((units -= 3) > -1) {
|
1532
|
+
bytes.push(239, 191, 189);
|
1533
|
+
}
|
1534
|
+
}
|
1535
|
+
leadSurrogate = null;
|
1536
|
+
if (codePoint < 128) {
|
1537
|
+
if ((units -= 1) < 0) {
|
1538
|
+
break;
|
1539
|
+
}
|
1540
|
+
bytes.push(codePoint);
|
1541
|
+
} else if (codePoint < 2048) {
|
1542
|
+
if ((units -= 2) < 0) {
|
1543
|
+
break;
|
1544
|
+
}
|
1545
|
+
bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
|
1546
|
+
} else if (codePoint < 65536) {
|
1547
|
+
if ((units -= 3) < 0) {
|
1548
|
+
break;
|
1549
|
+
}
|
1550
|
+
bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
|
1551
|
+
} else if (codePoint < 1114112) {
|
1552
|
+
if ((units -= 4) < 0) {
|
1553
|
+
break;
|
1554
|
+
}
|
1555
|
+
bytes.push(
|
1556
|
+
codePoint >> 18 | 240,
|
1557
|
+
codePoint >> 12 & 63 | 128,
|
1558
|
+
codePoint >> 6 & 63 | 128,
|
1559
|
+
codePoint & 63 | 128
|
1560
|
+
);
|
1561
|
+
} else {
|
1562
|
+
throw new Error("Invalid code point");
|
1563
|
+
}
|
1564
|
+
}
|
1565
|
+
return bytes;
|
1566
|
+
}
|
1567
|
+
static _base64ToBytes(str) {
|
1568
|
+
return toByteArray(base64clean(str));
|
1569
|
+
}
|
1570
|
+
static _asciiToBytes(str) {
|
1571
|
+
const byteArray = [];
|
1572
|
+
for (let i = 0; i < str.length; ++i) {
|
1573
|
+
byteArray.push(str.charCodeAt(i) & 255);
|
1574
|
+
}
|
1575
|
+
return byteArray;
|
1576
|
+
}
|
1577
|
+
static _utf16leToBytes(str, units) {
|
1578
|
+
let c, hi, lo;
|
1579
|
+
const byteArray = [];
|
1580
|
+
for (let i = 0; i < str.length; ++i) {
|
1581
|
+
if ((units -= 2) < 0)
|
1582
|
+
break;
|
1583
|
+
c = str.charCodeAt(i);
|
1584
|
+
hi = c >> 8;
|
1585
|
+
lo = c % 256;
|
1586
|
+
byteArray.push(lo);
|
1587
|
+
byteArray.push(hi);
|
1588
|
+
}
|
1589
|
+
return byteArray;
|
1590
|
+
}
|
1591
|
+
static _hexSlice(buf, start, end) {
|
1592
|
+
const len = buf.length;
|
1593
|
+
if (!start || start < 0) {
|
1594
|
+
start = 0;
|
1595
|
+
}
|
1596
|
+
if (!end || end < 0 || end > len) {
|
1597
|
+
end = len;
|
1598
|
+
}
|
1599
|
+
let out = "";
|
1600
|
+
for (let i = start; i < end; ++i) {
|
1601
|
+
out += hexSliceLookupTable[buf[i]];
|
1602
|
+
}
|
1603
|
+
return out;
|
1604
|
+
}
|
1605
|
+
static _base64Slice(buf, start, end) {
|
1606
|
+
if (start === 0 && end === buf.length) {
|
1607
|
+
return fromByteArray(buf);
|
1608
|
+
} else {
|
1609
|
+
return fromByteArray(buf.slice(start, end));
|
1610
|
+
}
|
1611
|
+
}
|
1612
|
+
static _utf8Slice(buf, start, end) {
|
1613
|
+
end = Math.min(buf.length, end);
|
1614
|
+
const res = [];
|
1615
|
+
let i = start;
|
1616
|
+
while (i < end) {
|
1617
|
+
const firstByte = buf[i];
|
1618
|
+
let codePoint = null;
|
1619
|
+
let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
|
1620
|
+
if (i + bytesPerSequence <= end) {
|
1621
|
+
let secondByte, thirdByte, fourthByte, tempCodePoint;
|
1622
|
+
switch (bytesPerSequence) {
|
1623
|
+
case 1:
|
1624
|
+
if (firstByte < 128) {
|
1625
|
+
codePoint = firstByte;
|
1626
|
+
}
|
1627
|
+
break;
|
1628
|
+
case 2:
|
1629
|
+
secondByte = buf[i + 1];
|
1630
|
+
if ((secondByte & 192) === 128) {
|
1631
|
+
tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
|
1632
|
+
if (tempCodePoint > 127) {
|
1633
|
+
codePoint = tempCodePoint;
|
1634
|
+
}
|
1635
|
+
}
|
1636
|
+
break;
|
1637
|
+
case 3:
|
1638
|
+
secondByte = buf[i + 1];
|
1639
|
+
thirdByte = buf[i + 2];
|
1640
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
|
1641
|
+
tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
|
1642
|
+
if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
|
1643
|
+
codePoint = tempCodePoint;
|
1644
|
+
}
|
1645
|
+
}
|
1646
|
+
break;
|
1647
|
+
case 4:
|
1648
|
+
secondByte = buf[i + 1];
|
1649
|
+
thirdByte = buf[i + 2];
|
1650
|
+
fourthByte = buf[i + 3];
|
1651
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
|
1652
|
+
tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
|
1653
|
+
if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
|
1654
|
+
codePoint = tempCodePoint;
|
1655
|
+
}
|
1656
|
+
}
|
1657
|
+
}
|
1658
|
+
}
|
1659
|
+
if (codePoint === null) {
|
1660
|
+
codePoint = 65533;
|
1661
|
+
bytesPerSequence = 1;
|
1662
|
+
} else if (codePoint > 65535) {
|
1663
|
+
codePoint -= 65536;
|
1664
|
+
res.push(codePoint >>> 10 & 1023 | 55296);
|
1665
|
+
codePoint = 56320 | codePoint & 1023;
|
1666
|
+
}
|
1667
|
+
res.push(codePoint);
|
1668
|
+
i += bytesPerSequence;
|
1669
|
+
}
|
1670
|
+
return Buffer._decodeCodePointsArray(res);
|
1671
|
+
}
|
1672
|
+
static _decodeCodePointsArray(codePoints) {
|
1673
|
+
const len = codePoints.length;
|
1674
|
+
if (len <= MAX_ARGUMENTS_LENGTH) {
|
1675
|
+
return String.fromCharCode.apply(String, codePoints);
|
1676
|
+
}
|
1677
|
+
let res = "";
|
1678
|
+
let i = 0;
|
1679
|
+
while (i < len) {
|
1680
|
+
res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
|
1681
|
+
}
|
1682
|
+
return res;
|
1683
|
+
}
|
1684
|
+
static _asciiSlice(buf, start, end) {
|
1685
|
+
let ret = "";
|
1686
|
+
end = Math.min(buf.length, end);
|
1687
|
+
for (let i = start; i < end; ++i) {
|
1688
|
+
ret += String.fromCharCode(buf[i] & 127);
|
1689
|
+
}
|
1690
|
+
return ret;
|
1691
|
+
}
|
1692
|
+
static _latin1Slice(buf, start, end) {
|
1693
|
+
let ret = "";
|
1694
|
+
end = Math.min(buf.length, end);
|
1695
|
+
for (let i = start; i < end; ++i) {
|
1696
|
+
ret += String.fromCharCode(buf[i]);
|
1697
|
+
}
|
1698
|
+
return ret;
|
1699
|
+
}
|
1700
|
+
static _utf16leSlice(buf, start, end) {
|
1701
|
+
const bytes = buf.slice(start, end);
|
1702
|
+
let res = "";
|
1703
|
+
for (let i = 0; i < bytes.length - 1; i += 2) {
|
1704
|
+
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
|
1705
|
+
}
|
1706
|
+
return res;
|
1707
|
+
}
|
1708
|
+
static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
|
1709
|
+
let indexSize = 1;
|
1710
|
+
let arrLength = arr.length;
|
1711
|
+
let valLength = val.length;
|
1712
|
+
if (encoding !== void 0) {
|
1713
|
+
encoding = Buffer._getEncoding(encoding);
|
1714
|
+
if (encoding === "ucs2" || encoding === "utf16le") {
|
1715
|
+
if (arr.length < 2 || val.length < 2) {
|
1716
|
+
return -1;
|
1717
|
+
}
|
1718
|
+
indexSize = 2;
|
1719
|
+
arrLength /= 2;
|
1720
|
+
valLength /= 2;
|
1721
|
+
byteOffset /= 2;
|
1722
|
+
}
|
1723
|
+
}
|
1724
|
+
function read(buf, i2) {
|
1725
|
+
if (indexSize === 1) {
|
1726
|
+
return buf[i2];
|
1727
|
+
} else {
|
1728
|
+
return buf.readUInt16BE(i2 * indexSize);
|
1729
|
+
}
|
1730
|
+
}
|
1731
|
+
let i;
|
1732
|
+
if (dir) {
|
1733
|
+
let foundIndex = -1;
|
1734
|
+
for (i = byteOffset; i < arrLength; i++) {
|
1735
|
+
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
1736
|
+
if (foundIndex === -1)
|
1737
|
+
foundIndex = i;
|
1738
|
+
if (i - foundIndex + 1 === valLength)
|
1739
|
+
return foundIndex * indexSize;
|
1740
|
+
} else {
|
1741
|
+
if (foundIndex !== -1)
|
1742
|
+
i -= i - foundIndex;
|
1743
|
+
foundIndex = -1;
|
1744
|
+
}
|
1745
|
+
}
|
1746
|
+
} else {
|
1747
|
+
if (byteOffset + valLength > arrLength) {
|
1748
|
+
byteOffset = arrLength - valLength;
|
1749
|
+
}
|
1750
|
+
for (i = byteOffset; i >= 0; i--) {
|
1751
|
+
let found = true;
|
1752
|
+
for (let j = 0; j < valLength; j++) {
|
1753
|
+
if (read(arr, i + j) !== read(val, j)) {
|
1754
|
+
found = false;
|
1755
|
+
break;
|
1756
|
+
}
|
1757
|
+
}
|
1758
|
+
if (found) {
|
1759
|
+
return i;
|
1760
|
+
}
|
1761
|
+
}
|
1762
|
+
}
|
1763
|
+
return -1;
|
1764
|
+
}
|
1765
|
+
static _checkOffset(offset, ext, length) {
|
1766
|
+
if (offset % 1 !== 0 || offset < 0)
|
1767
|
+
throw new RangeError("offset is not uint");
|
1768
|
+
if (offset + ext > length)
|
1769
|
+
throw new RangeError("Trying to access beyond buffer length");
|
1770
|
+
}
|
1771
|
+
static _checkInt(buf, value, offset, ext, max, min) {
|
1772
|
+
if (!Buffer.isBuffer(buf))
|
1773
|
+
throw new TypeError('"buffer" argument must be a Buffer instance');
|
1774
|
+
if (value > max || value < min)
|
1775
|
+
throw new RangeError('"value" argument is out of bounds');
|
1776
|
+
if (offset + ext > buf.length)
|
1777
|
+
throw new RangeError("Index out of range");
|
1778
|
+
}
|
1779
|
+
static _getEncoding(encoding) {
|
1780
|
+
let toLowerCase = false;
|
1781
|
+
let originalEncoding = "";
|
1782
|
+
for (; ; ) {
|
1783
|
+
switch (encoding) {
|
1784
|
+
case "hex":
|
1785
|
+
return "hex";
|
1786
|
+
case "utf8":
|
1787
|
+
return "utf8";
|
1788
|
+
case "ascii":
|
1789
|
+
return "ascii";
|
1790
|
+
case "binary":
|
1791
|
+
return "binary";
|
1792
|
+
case "latin1":
|
1793
|
+
return "latin1";
|
1794
|
+
case "ucs2":
|
1795
|
+
return "ucs2";
|
1796
|
+
case "utf16le":
|
1797
|
+
return "utf16le";
|
1798
|
+
case "base64":
|
1799
|
+
return "base64";
|
1800
|
+
default: {
|
1801
|
+
if (toLowerCase) {
|
1802
|
+
throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
|
1803
|
+
}
|
1804
|
+
toLowerCase = true;
|
1805
|
+
originalEncoding = encoding;
|
1806
|
+
encoding = encoding.toLowerCase();
|
1807
|
+
}
|
1808
|
+
}
|
1809
|
+
}
|
1810
|
+
}
|
1811
|
+
}
|
1812
|
+
const hexSliceLookupTable = function() {
|
1813
|
+
const alphabet = "0123456789abcdef";
|
1814
|
+
const table = new Array(256);
|
1815
|
+
for (let i = 0; i < 16; ++i) {
|
1816
|
+
const i16 = i * 16;
|
1817
|
+
for (let j = 0; j < 16; ++j) {
|
1818
|
+
table[i16 + j] = alphabet[i] + alphabet[j];
|
1819
|
+
}
|
1820
|
+
}
|
1821
|
+
return table;
|
1822
|
+
}();
|
1823
|
+
const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
|
1824
|
+
function base64clean(str) {
|
1825
|
+
str = str.split("=")[0];
|
1826
|
+
str = str.trim().replace(INVALID_BASE64_RE, "");
|
1827
|
+
if (str.length < 2)
|
1828
|
+
return "";
|
1829
|
+
while (str.length % 4 !== 0) {
|
1830
|
+
str = str + "=";
|
1831
|
+
}
|
1832
|
+
return str;
|
1833
|
+
}
|
1834
|
+
|
26
1835
|
function notEmpty(value) {
|
27
1836
|
return value !== null && value !== void 0;
|
28
1837
|
}
|
@@ -32,8 +1841,15 @@ function compact(arr) {
|
|
32
1841
|
function compactObject(obj) {
|
33
1842
|
return Object.fromEntries(Object.entries(obj).filter(([, value]) => notEmpty(value)));
|
34
1843
|
}
|
1844
|
+
function isBlob(value) {
|
1845
|
+
try {
|
1846
|
+
return value instanceof Blob;
|
1847
|
+
} catch (error) {
|
1848
|
+
return false;
|
1849
|
+
}
|
1850
|
+
}
|
35
1851
|
function isObject(value) {
|
36
|
-
return Boolean(value) && typeof value === "object" && !Array.isArray(value) && !(value instanceof Date);
|
1852
|
+
return Boolean(value) && typeof value === "object" && !Array.isArray(value) && !(value instanceof Date) && !isBlob(value);
|
37
1853
|
}
|
38
1854
|
function isDefined(value) {
|
39
1855
|
return value !== null && value !== void 0;
|
@@ -236,17 +2052,11 @@ function getPreviewBranch() {
|
|
236
2052
|
}
|
237
2053
|
}
|
238
2054
|
|
239
|
-
var __defProp$8 = Object.defineProperty;
|
240
|
-
var __defNormalProp$8 = (obj, key, value) => key in obj ? __defProp$8(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
241
|
-
var __publicField$8 = (obj, key, value) => {
|
242
|
-
__defNormalProp$8(obj, typeof key !== "symbol" ? key + "" : key, value);
|
243
|
-
return value;
|
244
|
-
};
|
245
2055
|
var __accessCheck$8 = (obj, member, msg) => {
|
246
2056
|
if (!member.has(obj))
|
247
2057
|
throw TypeError("Cannot " + msg);
|
248
2058
|
};
|
249
|
-
var __privateGet$
|
2059
|
+
var __privateGet$7 = (obj, member, getter) => {
|
250
2060
|
__accessCheck$8(obj, member, "read from private field");
|
251
2061
|
return getter ? getter.call(obj) : member.get(obj);
|
252
2062
|
};
|
@@ -255,7 +2065,7 @@ var __privateAdd$8 = (obj, member, value) => {
|
|
255
2065
|
throw TypeError("Cannot add the same private member more than once");
|
256
2066
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
257
2067
|
};
|
258
|
-
var __privateSet$
|
2068
|
+
var __privateSet$6 = (obj, member, value, setter) => {
|
259
2069
|
__accessCheck$8(obj, member, "write to private field");
|
260
2070
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
261
2071
|
return value;
|
@@ -265,14 +2075,13 @@ var __privateMethod$4 = (obj, member, method) => {
|
|
265
2075
|
return method;
|
266
2076
|
};
|
267
2077
|
var _fetch, _queue, _concurrency, _enqueue, enqueue_fn;
|
268
|
-
const REQUEST_TIMEOUT =
|
2078
|
+
const REQUEST_TIMEOUT = 5 * 60 * 1e3;
|
269
2079
|
function getFetchImplementation(userFetch) {
|
270
2080
|
const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
|
271
|
-
const
|
2081
|
+
const globalThisFetch = typeof globalThis !== "undefined" ? globalThis.fetch : void 0;
|
2082
|
+
const fetchImpl = userFetch ?? globalFetch ?? globalThisFetch;
|
272
2083
|
if (!fetchImpl) {
|
273
|
-
throw new Error(
|
274
|
-
`Couldn't find \`fetch\`. Install a fetch implementation such as \`node-fetch\` and pass it explicitly.`
|
275
|
-
);
|
2084
|
+
throw new Error(`Couldn't find a global \`fetch\`. Pass a fetch implementation explicitly.`);
|
276
2085
|
}
|
277
2086
|
return fetchImpl;
|
278
2087
|
}
|
@@ -282,21 +2091,19 @@ class ApiRequestPool {
|
|
282
2091
|
__privateAdd$8(this, _fetch, void 0);
|
283
2092
|
__privateAdd$8(this, _queue, void 0);
|
284
2093
|
__privateAdd$8(this, _concurrency, void 0);
|
285
|
-
|
286
|
-
|
287
|
-
__privateSet$8(this, _queue, []);
|
288
|
-
__privateSet$8(this, _concurrency, concurrency);
|
2094
|
+
__privateSet$6(this, _queue, []);
|
2095
|
+
__privateSet$6(this, _concurrency, concurrency);
|
289
2096
|
this.running = 0;
|
290
2097
|
this.started = 0;
|
291
2098
|
}
|
292
2099
|
setFetch(fetch2) {
|
293
|
-
__privateSet$
|
2100
|
+
__privateSet$6(this, _fetch, fetch2);
|
294
2101
|
}
|
295
2102
|
getFetch() {
|
296
|
-
if (!__privateGet$
|
2103
|
+
if (!__privateGet$7(this, _fetch)) {
|
297
2104
|
throw new Error("Fetch not set");
|
298
2105
|
}
|
299
|
-
return __privateGet$
|
2106
|
+
return __privateGet$7(this, _fetch);
|
300
2107
|
}
|
301
2108
|
request(url, options) {
|
302
2109
|
const start = /* @__PURE__ */ new Date();
|
@@ -328,19 +2135,19 @@ _queue = new WeakMap();
|
|
328
2135
|
_concurrency = new WeakMap();
|
329
2136
|
_enqueue = new WeakSet();
|
330
2137
|
enqueue_fn = function(task) {
|
331
|
-
const promise = new Promise((resolve) => __privateGet$
|
2138
|
+
const promise = new Promise((resolve) => __privateGet$7(this, _queue).push(resolve)).finally(() => {
|
332
2139
|
this.started--;
|
333
2140
|
this.running++;
|
334
2141
|
}).then(() => task()).finally(() => {
|
335
2142
|
this.running--;
|
336
|
-
const next = __privateGet$
|
2143
|
+
const next = __privateGet$7(this, _queue).shift();
|
337
2144
|
if (next !== void 0) {
|
338
2145
|
this.started++;
|
339
2146
|
next();
|
340
2147
|
}
|
341
2148
|
});
|
342
|
-
if (this.running + this.started < __privateGet$
|
343
|
-
const next = __privateGet$
|
2149
|
+
if (this.running + this.started < __privateGet$7(this, _concurrency)) {
|
2150
|
+
const next = __privateGet$7(this, _queue).shift();
|
344
2151
|
if (next !== void 0) {
|
345
2152
|
this.started++;
|
346
2153
|
next();
|
@@ -529,26 +2336,16 @@ function defaultOnOpen(response) {
|
|
529
2336
|
}
|
530
2337
|
}
|
531
2338
|
|
532
|
-
const VERSION = "0.
|
2339
|
+
const VERSION = "0.29.4";
|
533
2340
|
|
534
|
-
var __defProp$7 = Object.defineProperty;
|
535
|
-
var __defNormalProp$7 = (obj, key, value) => key in obj ? __defProp$7(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
536
|
-
var __publicField$7 = (obj, key, value) => {
|
537
|
-
__defNormalProp$7(obj, typeof key !== "symbol" ? key + "" : key, value);
|
538
|
-
return value;
|
539
|
-
};
|
540
2341
|
class ErrorWithCause extends Error {
|
541
2342
|
constructor(message, options) {
|
542
2343
|
super(message, options);
|
543
|
-
__publicField$7(this, "cause");
|
544
2344
|
}
|
545
2345
|
}
|
546
2346
|
class FetcherError extends ErrorWithCause {
|
547
2347
|
constructor(status, data, requestId) {
|
548
2348
|
super(getMessage(data));
|
549
|
-
__publicField$7(this, "status");
|
550
|
-
__publicField$7(this, "requestId");
|
551
|
-
__publicField$7(this, "errors");
|
552
2349
|
this.status = status;
|
553
2350
|
this.errors = isBulkError(data) ? data.errors : [{ message: getMessage(data), status }];
|
554
2351
|
this.requestId = requestId;
|
@@ -582,6 +2379,67 @@ function getMessage(data) {
|
|
582
2379
|
}
|
583
2380
|
}
|
584
2381
|
|
2382
|
+
function getHostUrl(provider, type) {
|
2383
|
+
if (isHostProviderAlias(provider)) {
|
2384
|
+
return providers[provider][type];
|
2385
|
+
} else if (isHostProviderBuilder(provider)) {
|
2386
|
+
return provider[type];
|
2387
|
+
}
|
2388
|
+
throw new Error("Invalid API provider");
|
2389
|
+
}
|
2390
|
+
const providers = {
|
2391
|
+
production: {
|
2392
|
+
main: "https://api.xata.io",
|
2393
|
+
workspaces: "https://{workspaceId}.{region}.xata.sh"
|
2394
|
+
},
|
2395
|
+
staging: {
|
2396
|
+
main: "https://api.staging-xata.dev",
|
2397
|
+
workspaces: "https://{workspaceId}.{region}.staging-xata.dev"
|
2398
|
+
},
|
2399
|
+
dev: {
|
2400
|
+
main: "https://api.dev-xata.dev",
|
2401
|
+
workspaces: "https://{workspaceId}.{region}.dev-xata.dev"
|
2402
|
+
},
|
2403
|
+
local: {
|
2404
|
+
main: "http://localhost:6001",
|
2405
|
+
workspaces: "http://{workspaceId}.{region}.localhost:6001"
|
2406
|
+
}
|
2407
|
+
};
|
2408
|
+
function isHostProviderAlias(alias) {
|
2409
|
+
return isString(alias) && Object.keys(providers).includes(alias);
|
2410
|
+
}
|
2411
|
+
function isHostProviderBuilder(builder) {
|
2412
|
+
return isObject(builder) && isString(builder.main) && isString(builder.workspaces);
|
2413
|
+
}
|
2414
|
+
function parseProviderString(provider = "production") {
|
2415
|
+
if (isHostProviderAlias(provider)) {
|
2416
|
+
return provider;
|
2417
|
+
}
|
2418
|
+
const [main, workspaces] = provider.split(",");
|
2419
|
+
if (!main || !workspaces)
|
2420
|
+
return null;
|
2421
|
+
return { main, workspaces };
|
2422
|
+
}
|
2423
|
+
function buildProviderString(provider) {
|
2424
|
+
if (isHostProviderAlias(provider))
|
2425
|
+
return provider;
|
2426
|
+
return `${provider.main},${provider.workspaces}`;
|
2427
|
+
}
|
2428
|
+
function parseWorkspacesUrlParts(url) {
|
2429
|
+
if (!isString(url))
|
2430
|
+
return null;
|
2431
|
+
const matches = {
|
2432
|
+
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
|
2433
|
+
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2434
|
+
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2435
|
+
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
|
2436
|
+
};
|
2437
|
+
const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
|
2438
|
+
if (!isHostProviderAlias(host) || !match)
|
2439
|
+
return null;
|
2440
|
+
return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
|
2441
|
+
}
|
2442
|
+
|
585
2443
|
const pool = new ApiRequestPool();
|
586
2444
|
const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
|
587
2445
|
const cleanQueryParams = Object.entries(queryParams).reduce((acc, [key, value]) => {
|
@@ -597,6 +2455,7 @@ const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
|
|
597
2455
|
return url.replace(/\{\w*\}/g, (key) => cleanPathParams[key.slice(1, -1)]) + queryString;
|
598
2456
|
};
|
599
2457
|
function buildBaseUrl({
|
2458
|
+
method,
|
600
2459
|
endpoint,
|
601
2460
|
path,
|
602
2461
|
workspacesApiUrl,
|
@@ -604,7 +2463,24 @@ function buildBaseUrl({
|
|
604
2463
|
pathParams = {}
|
605
2464
|
}) {
|
606
2465
|
if (endpoint === "dataPlane") {
|
607
|
-
|
2466
|
+
let url = isString(workspacesApiUrl) ? `${workspacesApiUrl}${path}` : workspacesApiUrl(path, pathParams);
|
2467
|
+
if (method.toUpperCase() === "PUT" && [
|
2468
|
+
"/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file",
|
2469
|
+
"/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}"
|
2470
|
+
].includes(path)) {
|
2471
|
+
const { host } = parseWorkspacesUrlParts(url) ?? {};
|
2472
|
+
switch (host) {
|
2473
|
+
case "production":
|
2474
|
+
url = url.replace("xata.sh", "upload.xata.sh");
|
2475
|
+
break;
|
2476
|
+
case "staging":
|
2477
|
+
url = url.replace("staging-xata.dev", "upload.staging-xata.dev");
|
2478
|
+
break;
|
2479
|
+
case "dev":
|
2480
|
+
url = url.replace("dev-xata.dev", "upload.dev-xata.dev");
|
2481
|
+
break;
|
2482
|
+
}
|
2483
|
+
}
|
608
2484
|
const urlWithWorkspace = isString(pathParams.workspace) ? url.replace("{workspaceId}", String(pathParams.workspace)) : url;
|
609
2485
|
return isString(pathParams.region) ? urlWithWorkspace.replace("{region}", String(pathParams.region)) : urlWithWorkspace;
|
610
2486
|
}
|
@@ -615,11 +2491,14 @@ function hostHeader(url) {
|
|
615
2491
|
const { groups } = pattern.exec(url) ?? {};
|
616
2492
|
return groups?.host ? { Host: groups.host } : {};
|
617
2493
|
}
|
618
|
-
function parseBody(body, headers) {
|
2494
|
+
async function parseBody(body, headers) {
|
619
2495
|
if (!isDefined(body))
|
620
2496
|
return void 0;
|
2497
|
+
if (isBlob(body) || typeof body.text === "function") {
|
2498
|
+
return body;
|
2499
|
+
}
|
621
2500
|
const { "Content-Type": contentType } = headers ?? {};
|
622
|
-
if (String(contentType).toLowerCase() === "application/json") {
|
2501
|
+
if (String(contentType).toLowerCase() === "application/json" && isObject(body)) {
|
623
2502
|
return JSON.stringify(body);
|
624
2503
|
}
|
625
2504
|
return body;
|
@@ -650,9 +2529,9 @@ async function fetch$1({
|
|
650
2529
|
return await trace(
|
651
2530
|
`${method.toUpperCase()} ${path}`,
|
652
2531
|
async ({ setAttributes }) => {
|
653
|
-
const baseUrl = buildBaseUrl({ endpoint, path, workspacesApiUrl, pathParams, apiUrl });
|
2532
|
+
const baseUrl = buildBaseUrl({ method, endpoint, path, workspacesApiUrl, pathParams, apiUrl });
|
654
2533
|
const fullUrl = resolveUrl(baseUrl, queryParams, pathParams);
|
655
|
-
const url = fullUrl.includes("localhost") ? fullUrl.replace(/^[^.]+\./, "http://") : fullUrl;
|
2534
|
+
const url = fullUrl.includes("localhost") ? fullUrl.replace(/^[^.]+\.[^.]+\./, "http://") : fullUrl;
|
656
2535
|
setAttributes({
|
657
2536
|
[TraceAttributes.HTTP_URL]: url,
|
658
2537
|
[TraceAttributes.HTTP_TARGET]: resolveUrl(path, queryParams, pathParams)
|
@@ -676,7 +2555,7 @@ async function fetch$1({
|
|
676
2555
|
const response = await pool.request(url, {
|
677
2556
|
...fetchOptions,
|
678
2557
|
method: method.toUpperCase(),
|
679
|
-
body: parseBody(body, headers),
|
2558
|
+
body: await parseBody(body, headers),
|
680
2559
|
headers,
|
681
2560
|
signal
|
682
2561
|
});
|
@@ -687,7 +2566,8 @@ async function fetch$1({
|
|
687
2566
|
[TraceAttributes.HTTP_REQUEST_ID]: requestId,
|
688
2567
|
[TraceAttributes.HTTP_STATUS_CODE]: response.status,
|
689
2568
|
[TraceAttributes.HTTP_HOST]: host,
|
690
|
-
[TraceAttributes.HTTP_SCHEME]: protocol?.replace(":", "")
|
2569
|
+
[TraceAttributes.HTTP_SCHEME]: protocol?.replace(":", ""),
|
2570
|
+
[TraceAttributes.CLOUDFLARE_RAY_ID]: response.headers?.get("cf-ray") ?? void 0
|
691
2571
|
});
|
692
2572
|
const message = response.headers?.get("x-xata-message");
|
693
2573
|
if (message)
|
@@ -732,7 +2612,7 @@ function fetchSSERequest({
|
|
732
2612
|
clientName,
|
733
2613
|
xataAgentExtra
|
734
2614
|
}) {
|
735
|
-
const baseUrl = buildBaseUrl({ endpoint, path, workspacesApiUrl, pathParams, apiUrl });
|
2615
|
+
const baseUrl = buildBaseUrl({ method, endpoint, path, workspacesApiUrl, pathParams, apiUrl });
|
736
2616
|
const fullUrl = resolveUrl(baseUrl, queryParams, pathParams);
|
737
2617
|
const url = fullUrl.includes("localhost") ? fullUrl.replace(/^[^.]+\./, "http://") : fullUrl;
|
738
2618
|
void fetchEventSource(url, {
|
@@ -775,12 +2655,35 @@ function parseUrl(url) {
|
|
775
2655
|
|
776
2656
|
const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
|
777
2657
|
|
2658
|
+
const applyMigration = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/apply", method: "post", ...variables, signal });
|
2659
|
+
const adaptTable = (variables, signal) => dataPlaneFetch({
|
2660
|
+
url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
|
2661
|
+
method: "post",
|
2662
|
+
...variables,
|
2663
|
+
signal
|
2664
|
+
});
|
2665
|
+
const adaptAllTables = (variables, signal) => dataPlaneFetch({
|
2666
|
+
url: "/db/{dbBranchName}/migrations/adapt",
|
2667
|
+
method: "post",
|
2668
|
+
...variables,
|
2669
|
+
signal
|
2670
|
+
});
|
2671
|
+
const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/status", method: "get", ...variables, signal });
|
2672
|
+
const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/jobs/{jobId}", method: "get", ...variables, signal });
|
2673
|
+
const getMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/history", method: "get", ...variables, signal });
|
778
2674
|
const getBranchList = (variables, signal) => dataPlaneFetch({
|
779
2675
|
url: "/dbs/{dbName}",
|
780
2676
|
method: "get",
|
781
2677
|
...variables,
|
782
2678
|
signal
|
783
2679
|
});
|
2680
|
+
const getDatabaseSettings = (variables, signal) => dataPlaneFetch({
|
2681
|
+
url: "/dbs/{dbName}/settings",
|
2682
|
+
method: "get",
|
2683
|
+
...variables,
|
2684
|
+
signal
|
2685
|
+
});
|
2686
|
+
const updateDatabaseSettings = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/settings", method: "patch", ...variables, signal });
|
784
2687
|
const getBranchDetails = (variables, signal) => dataPlaneFetch({
|
785
2688
|
url: "/db/{dbBranchName}",
|
786
2689
|
method: "get",
|
@@ -794,6 +2697,12 @@ const deleteBranch = (variables, signal) => dataPlaneFetch({
|
|
794
2697
|
...variables,
|
795
2698
|
signal
|
796
2699
|
});
|
2700
|
+
const getSchema = (variables, signal) => dataPlaneFetch({
|
2701
|
+
url: "/db/{dbBranchName}/schema",
|
2702
|
+
method: "get",
|
2703
|
+
...variables,
|
2704
|
+
signal
|
2705
|
+
});
|
797
2706
|
const copyBranch = (variables, signal) => dataPlaneFetch({
|
798
2707
|
url: "/db/{dbBranchName}/copy",
|
799
2708
|
method: "post",
|
@@ -959,12 +2868,6 @@ const searchTable = (variables, signal) => dataPlaneFetch({
|
|
959
2868
|
...variables,
|
960
2869
|
signal
|
961
2870
|
});
|
962
|
-
const sqlQuery = (variables, signal) => dataPlaneFetch({
|
963
|
-
url: "/db/{dbBranchName}/sql",
|
964
|
-
method: "post",
|
965
|
-
...variables,
|
966
|
-
signal
|
967
|
-
});
|
968
2871
|
const vectorSearchTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch", method: "post", ...variables, signal });
|
969
2872
|
const askTable = (variables, signal) => dataPlaneFetch({
|
970
2873
|
url: "/db/{dbBranchName}/tables/{tableName}/ask",
|
@@ -981,7 +2884,38 @@ const fileAccess = (variables, signal) => dataPlaneFetch({
|
|
981
2884
|
...variables,
|
982
2885
|
signal
|
983
2886
|
});
|
2887
|
+
const fileUpload = (variables, signal) => dataPlaneFetch({
|
2888
|
+
url: "/file/{fileId}",
|
2889
|
+
method: "put",
|
2890
|
+
...variables,
|
2891
|
+
signal
|
2892
|
+
});
|
2893
|
+
const sqlQuery = (variables, signal) => dataPlaneFetch({
|
2894
|
+
url: "/db/{dbBranchName}/sql",
|
2895
|
+
method: "post",
|
2896
|
+
...variables,
|
2897
|
+
signal
|
2898
|
+
});
|
984
2899
|
const operationsByTag$2 = {
|
2900
|
+
migrations: {
|
2901
|
+
applyMigration,
|
2902
|
+
adaptTable,
|
2903
|
+
adaptAllTables,
|
2904
|
+
getBranchMigrationJobStatus,
|
2905
|
+
getMigrationJobStatus,
|
2906
|
+
getMigrationHistory,
|
2907
|
+
getSchema,
|
2908
|
+
getBranchMigrationHistory,
|
2909
|
+
getBranchMigrationPlan,
|
2910
|
+
executeBranchMigrationPlan,
|
2911
|
+
getBranchSchemaHistory,
|
2912
|
+
compareBranchWithUserSchema,
|
2913
|
+
compareBranchSchemas,
|
2914
|
+
updateBranchSchema,
|
2915
|
+
previewBranchSchemaEdit,
|
2916
|
+
applyBranchSchemaEdit,
|
2917
|
+
pushBranchMigrations
|
2918
|
+
},
|
985
2919
|
branch: {
|
986
2920
|
getBranchList,
|
987
2921
|
getBranchDetails,
|
@@ -996,18 +2930,7 @@ const operationsByTag$2 = {
|
|
996
2930
|
removeGitBranchesEntry,
|
997
2931
|
resolveBranch
|
998
2932
|
},
|
999
|
-
|
1000
|
-
getBranchMigrationHistory,
|
1001
|
-
getBranchMigrationPlan,
|
1002
|
-
executeBranchMigrationPlan,
|
1003
|
-
getBranchSchemaHistory,
|
1004
|
-
compareBranchWithUserSchema,
|
1005
|
-
compareBranchSchemas,
|
1006
|
-
updateBranchSchema,
|
1007
|
-
previewBranchSchemaEdit,
|
1008
|
-
applyBranchSchemaEdit,
|
1009
|
-
pushBranchMigrations
|
1010
|
-
},
|
2933
|
+
database: { getDatabaseSettings, updateDatabaseSettings },
|
1011
2934
|
migrationRequests: {
|
1012
2935
|
queryMigrationRequests,
|
1013
2936
|
createMigrationRequest,
|
@@ -1040,18 +2963,18 @@ const operationsByTag$2 = {
|
|
1040
2963
|
deleteRecord,
|
1041
2964
|
bulkInsertTableRecords
|
1042
2965
|
},
|
1043
|
-
files: { getFileItem, putFileItem, deleteFileItem, getFile, putFile, deleteFile, fileAccess },
|
2966
|
+
files: { getFileItem, putFileItem, deleteFileItem, getFile, putFile, deleteFile, fileAccess, fileUpload },
|
1044
2967
|
searchAndFilter: {
|
1045
2968
|
queryTable,
|
1046
2969
|
searchBranch,
|
1047
2970
|
searchTable,
|
1048
|
-
sqlQuery,
|
1049
2971
|
vectorSearchTable,
|
1050
2972
|
askTable,
|
1051
2973
|
askTableSession,
|
1052
2974
|
summarizeTable,
|
1053
2975
|
aggregateTable
|
1054
|
-
}
|
2976
|
+
},
|
2977
|
+
sql: { sqlQuery }
|
1055
2978
|
};
|
1056
2979
|
|
1057
2980
|
const controlPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "controlPlane" });
|
@@ -1100,12 +3023,25 @@ const getUserOAuthClients = (variables, signal) => controlPlaneFetch({
|
|
1100
3023
|
...variables,
|
1101
3024
|
signal
|
1102
3025
|
});
|
3026
|
+
const deleteUserOAuthClient = (variables, signal) => controlPlaneFetch({
|
3027
|
+
url: "/user/oauth/clients/{clientId}",
|
3028
|
+
method: "delete",
|
3029
|
+
...variables,
|
3030
|
+
signal
|
3031
|
+
});
|
1103
3032
|
const getUserOAuthAccessTokens = (variables, signal) => controlPlaneFetch({
|
1104
3033
|
url: "/user/oauth/tokens",
|
1105
3034
|
method: "get",
|
1106
3035
|
...variables,
|
1107
3036
|
signal
|
1108
3037
|
});
|
3038
|
+
const deleteOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3039
|
+
url: "/user/oauth/tokens/{token}",
|
3040
|
+
method: "delete",
|
3041
|
+
...variables,
|
3042
|
+
signal
|
3043
|
+
});
|
3044
|
+
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({ url: "/user/oauth/tokens/{token}", method: "patch", ...variables, signal });
|
1109
3045
|
const getWorkspacesList = (variables, signal) => controlPlaneFetch({
|
1110
3046
|
url: "/workspaces",
|
1111
3047
|
method: "get",
|
@@ -1136,6 +3072,8 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
|
|
1136
3072
|
...variables,
|
1137
3073
|
signal
|
1138
3074
|
});
|
3075
|
+
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/settings", method: "get", ...variables, signal });
|
3076
|
+
const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/settings", method: "patch", ...variables, signal });
|
1139
3077
|
const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/members", method: "get", ...variables, signal });
|
1140
3078
|
const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/members/{userId}", method: "put", ...variables, signal });
|
1141
3079
|
const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
@@ -1149,6 +3087,15 @@ const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ u
|
|
1149
3087
|
const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteId}", method: "delete", ...variables, signal });
|
1150
3088
|
const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept", method: "post", ...variables, signal });
|
1151
3089
|
const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteId}/resend", method: "post", ...variables, signal });
|
3090
|
+
const listClusters = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/clusters", method: "get", ...variables, signal });
|
3091
|
+
const createCluster = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/clusters", method: "post", ...variables, signal });
|
3092
|
+
const getCluster = (variables, signal) => controlPlaneFetch({
|
3093
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3094
|
+
method: "get",
|
3095
|
+
...variables,
|
3096
|
+
signal
|
3097
|
+
});
|
3098
|
+
const updateCluster = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/clusters/{clusterId}", method: "patch", ...variables, signal });
|
1152
3099
|
const getDatabaseList = (variables, signal) => controlPlaneFetch({
|
1153
3100
|
url: "/workspaces/{workspaceId}/dbs",
|
1154
3101
|
method: "get",
|
@@ -1175,15 +3122,25 @@ const listRegions = (variables, signal) => controlPlaneFetch({
|
|
1175
3122
|
signal
|
1176
3123
|
});
|
1177
3124
|
const operationsByTag$1 = {
|
1178
|
-
|
3125
|
+
oAuth: {
|
3126
|
+
getAuthorizationCode,
|
3127
|
+
grantAuthorizationCode,
|
3128
|
+
getUserOAuthClients,
|
3129
|
+
deleteUserOAuthClient,
|
3130
|
+
getUserOAuthAccessTokens,
|
3131
|
+
deleteOAuthAccessToken,
|
3132
|
+
updateOAuthAccessToken
|
3133
|
+
},
|
1179
3134
|
users: { getUser, updateUser, deleteUser },
|
1180
|
-
authentication: { getUserAPIKeys, createUserAPIKey, deleteUserAPIKey
|
3135
|
+
authentication: { getUserAPIKeys, createUserAPIKey, deleteUserAPIKey },
|
1181
3136
|
workspaces: {
|
1182
3137
|
getWorkspacesList,
|
1183
3138
|
createWorkspace,
|
1184
3139
|
getWorkspace,
|
1185
3140
|
updateWorkspace,
|
1186
3141
|
deleteWorkspace,
|
3142
|
+
getWorkspaceSettings,
|
3143
|
+
updateWorkspaceSettings,
|
1187
3144
|
getWorkspaceMembersList,
|
1188
3145
|
updateWorkspaceMemberRole,
|
1189
3146
|
removeWorkspaceMember
|
@@ -1195,6 +3152,7 @@ const operationsByTag$1 = {
|
|
1195
3152
|
acceptWorkspaceMemberInvite,
|
1196
3153
|
resendWorkspaceMemberInvite
|
1197
3154
|
},
|
3155
|
+
xbcontrolOther: { listClusters, createCluster, getCluster, updateCluster },
|
1198
3156
|
databases: {
|
1199
3157
|
getDatabaseList,
|
1200
3158
|
createDatabase,
|
@@ -1211,66 +3169,11 @@ const operationsByTag$1 = {
|
|
1211
3169
|
|
1212
3170
|
const operationsByTag = deepMerge(operationsByTag$2, operationsByTag$1);
|
1213
3171
|
|
1214
|
-
function getHostUrl(provider, type) {
|
1215
|
-
if (isHostProviderAlias(provider)) {
|
1216
|
-
return providers[provider][type];
|
1217
|
-
} else if (isHostProviderBuilder(provider)) {
|
1218
|
-
return provider[type];
|
1219
|
-
}
|
1220
|
-
throw new Error("Invalid API provider");
|
1221
|
-
}
|
1222
|
-
const providers = {
|
1223
|
-
production: {
|
1224
|
-
main: "https://api.xata.io",
|
1225
|
-
workspaces: "https://{workspaceId}.{region}.xata.sh"
|
1226
|
-
},
|
1227
|
-
staging: {
|
1228
|
-
main: "https://api.staging-xata.dev",
|
1229
|
-
workspaces: "https://{workspaceId}.{region}.staging-xata.dev"
|
1230
|
-
},
|
1231
|
-
dev: {
|
1232
|
-
main: "https://api.dev-xata.dev",
|
1233
|
-
workspaces: "https://{workspaceId}.{region}.dev-xata.dev"
|
1234
|
-
}
|
1235
|
-
};
|
1236
|
-
function isHostProviderAlias(alias) {
|
1237
|
-
return isString(alias) && Object.keys(providers).includes(alias);
|
1238
|
-
}
|
1239
|
-
function isHostProviderBuilder(builder) {
|
1240
|
-
return isObject(builder) && isString(builder.main) && isString(builder.workspaces);
|
1241
|
-
}
|
1242
|
-
function parseProviderString(provider = "production") {
|
1243
|
-
if (isHostProviderAlias(provider)) {
|
1244
|
-
return provider;
|
1245
|
-
}
|
1246
|
-
const [main, workspaces] = provider.split(",");
|
1247
|
-
if (!main || !workspaces)
|
1248
|
-
return null;
|
1249
|
-
return { main, workspaces };
|
1250
|
-
}
|
1251
|
-
function buildProviderString(provider) {
|
1252
|
-
if (isHostProviderAlias(provider))
|
1253
|
-
return provider;
|
1254
|
-
return `${provider.main},${provider.workspaces}`;
|
1255
|
-
}
|
1256
|
-
function parseWorkspacesUrlParts(url) {
|
1257
|
-
if (!isString(url))
|
1258
|
-
return null;
|
1259
|
-
const regex = /(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh.*/;
|
1260
|
-
const regexDev = /(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev.*/;
|
1261
|
-
const regexStaging = /(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev.*/;
|
1262
|
-
const regexProdTesting = /(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.tech.*/;
|
1263
|
-
const match = url.match(regex) || url.match(regexDev) || url.match(regexStaging) || url.match(regexProdTesting);
|
1264
|
-
if (!match)
|
1265
|
-
return null;
|
1266
|
-
return { workspace: match[1], region: match[2] };
|
1267
|
-
}
|
1268
|
-
|
1269
3172
|
var __accessCheck$7 = (obj, member, msg) => {
|
1270
3173
|
if (!member.has(obj))
|
1271
3174
|
throw TypeError("Cannot " + msg);
|
1272
3175
|
};
|
1273
|
-
var __privateGet$
|
3176
|
+
var __privateGet$6 = (obj, member, getter) => {
|
1274
3177
|
__accessCheck$7(obj, member, "read from private field");
|
1275
3178
|
return getter ? getter.call(obj) : member.get(obj);
|
1276
3179
|
};
|
@@ -1279,7 +3182,7 @@ var __privateAdd$7 = (obj, member, value) => {
|
|
1279
3182
|
throw TypeError("Cannot add the same private member more than once");
|
1280
3183
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1281
3184
|
};
|
1282
|
-
var __privateSet$
|
3185
|
+
var __privateSet$5 = (obj, member, value, setter) => {
|
1283
3186
|
__accessCheck$7(obj, member, "write to private field");
|
1284
3187
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
1285
3188
|
return value;
|
@@ -1296,7 +3199,7 @@ class XataApiClient {
|
|
1296
3199
|
if (!apiKey) {
|
1297
3200
|
throw new Error("Could not resolve a valid apiKey");
|
1298
3201
|
}
|
1299
|
-
__privateSet$
|
3202
|
+
__privateSet$5(this, _extraProps, {
|
1300
3203
|
apiUrl: getHostUrl(provider, "main"),
|
1301
3204
|
workspacesApiUrl: getHostUrl(provider, "workspaces"),
|
1302
3205
|
fetch: getFetchImplementation(options.fetch),
|
@@ -1308,64 +3211,64 @@ class XataApiClient {
|
|
1308
3211
|
});
|
1309
3212
|
}
|
1310
3213
|
get user() {
|
1311
|
-
if (!__privateGet$
|
1312
|
-
__privateGet$
|
1313
|
-
return __privateGet$
|
3214
|
+
if (!__privateGet$6(this, _namespaces).user)
|
3215
|
+
__privateGet$6(this, _namespaces).user = new UserApi(__privateGet$6(this, _extraProps));
|
3216
|
+
return __privateGet$6(this, _namespaces).user;
|
1314
3217
|
}
|
1315
3218
|
get authentication() {
|
1316
|
-
if (!__privateGet$
|
1317
|
-
__privateGet$
|
1318
|
-
return __privateGet$
|
3219
|
+
if (!__privateGet$6(this, _namespaces).authentication)
|
3220
|
+
__privateGet$6(this, _namespaces).authentication = new AuthenticationApi(__privateGet$6(this, _extraProps));
|
3221
|
+
return __privateGet$6(this, _namespaces).authentication;
|
1319
3222
|
}
|
1320
3223
|
get workspaces() {
|
1321
|
-
if (!__privateGet$
|
1322
|
-
__privateGet$
|
1323
|
-
return __privateGet$
|
3224
|
+
if (!__privateGet$6(this, _namespaces).workspaces)
|
3225
|
+
__privateGet$6(this, _namespaces).workspaces = new WorkspaceApi(__privateGet$6(this, _extraProps));
|
3226
|
+
return __privateGet$6(this, _namespaces).workspaces;
|
1324
3227
|
}
|
1325
3228
|
get invites() {
|
1326
|
-
if (!__privateGet$
|
1327
|
-
__privateGet$
|
1328
|
-
return __privateGet$
|
3229
|
+
if (!__privateGet$6(this, _namespaces).invites)
|
3230
|
+
__privateGet$6(this, _namespaces).invites = new InvitesApi(__privateGet$6(this, _extraProps));
|
3231
|
+
return __privateGet$6(this, _namespaces).invites;
|
1329
3232
|
}
|
1330
3233
|
get database() {
|
1331
|
-
if (!__privateGet$
|
1332
|
-
__privateGet$
|
1333
|
-
return __privateGet$
|
3234
|
+
if (!__privateGet$6(this, _namespaces).database)
|
3235
|
+
__privateGet$6(this, _namespaces).database = new DatabaseApi(__privateGet$6(this, _extraProps));
|
3236
|
+
return __privateGet$6(this, _namespaces).database;
|
1334
3237
|
}
|
1335
3238
|
get branches() {
|
1336
|
-
if (!__privateGet$
|
1337
|
-
__privateGet$
|
1338
|
-
return __privateGet$
|
3239
|
+
if (!__privateGet$6(this, _namespaces).branches)
|
3240
|
+
__privateGet$6(this, _namespaces).branches = new BranchApi(__privateGet$6(this, _extraProps));
|
3241
|
+
return __privateGet$6(this, _namespaces).branches;
|
1339
3242
|
}
|
1340
3243
|
get migrations() {
|
1341
|
-
if (!__privateGet$
|
1342
|
-
__privateGet$
|
1343
|
-
return __privateGet$
|
3244
|
+
if (!__privateGet$6(this, _namespaces).migrations)
|
3245
|
+
__privateGet$6(this, _namespaces).migrations = new MigrationsApi(__privateGet$6(this, _extraProps));
|
3246
|
+
return __privateGet$6(this, _namespaces).migrations;
|
1344
3247
|
}
|
1345
3248
|
get migrationRequests() {
|
1346
|
-
if (!__privateGet$
|
1347
|
-
__privateGet$
|
1348
|
-
return __privateGet$
|
3249
|
+
if (!__privateGet$6(this, _namespaces).migrationRequests)
|
3250
|
+
__privateGet$6(this, _namespaces).migrationRequests = new MigrationRequestsApi(__privateGet$6(this, _extraProps));
|
3251
|
+
return __privateGet$6(this, _namespaces).migrationRequests;
|
1349
3252
|
}
|
1350
3253
|
get tables() {
|
1351
|
-
if (!__privateGet$
|
1352
|
-
__privateGet$
|
1353
|
-
return __privateGet$
|
3254
|
+
if (!__privateGet$6(this, _namespaces).tables)
|
3255
|
+
__privateGet$6(this, _namespaces).tables = new TableApi(__privateGet$6(this, _extraProps));
|
3256
|
+
return __privateGet$6(this, _namespaces).tables;
|
1354
3257
|
}
|
1355
3258
|
get records() {
|
1356
|
-
if (!__privateGet$
|
1357
|
-
__privateGet$
|
1358
|
-
return __privateGet$
|
3259
|
+
if (!__privateGet$6(this, _namespaces).records)
|
3260
|
+
__privateGet$6(this, _namespaces).records = new RecordsApi(__privateGet$6(this, _extraProps));
|
3261
|
+
return __privateGet$6(this, _namespaces).records;
|
1359
3262
|
}
|
1360
3263
|
get files() {
|
1361
|
-
if (!__privateGet$
|
1362
|
-
__privateGet$
|
1363
|
-
return __privateGet$
|
3264
|
+
if (!__privateGet$6(this, _namespaces).files)
|
3265
|
+
__privateGet$6(this, _namespaces).files = new FilesApi(__privateGet$6(this, _extraProps));
|
3266
|
+
return __privateGet$6(this, _namespaces).files;
|
1364
3267
|
}
|
1365
3268
|
get searchAndFilter() {
|
1366
|
-
if (!__privateGet$
|
1367
|
-
__privateGet$
|
1368
|
-
return __privateGet$
|
3269
|
+
if (!__privateGet$6(this, _namespaces).searchAndFilter)
|
3270
|
+
__privateGet$6(this, _namespaces).searchAndFilter = new SearchAndFilterApi(__privateGet$6(this, _extraProps));
|
3271
|
+
return __privateGet$6(this, _namespaces).searchAndFilter;
|
1369
3272
|
}
|
1370
3273
|
}
|
1371
3274
|
_extraProps = new WeakMap();
|
@@ -1667,6 +3570,30 @@ class BranchApi {
|
|
1667
3570
|
...this.extraProps
|
1668
3571
|
});
|
1669
3572
|
}
|
3573
|
+
pgRollMigrationHistory({
|
3574
|
+
workspace,
|
3575
|
+
region,
|
3576
|
+
database,
|
3577
|
+
branch
|
3578
|
+
}) {
|
3579
|
+
return operationsByTag.migrations.getMigrationHistory({
|
3580
|
+
pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
|
3581
|
+
...this.extraProps
|
3582
|
+
});
|
3583
|
+
}
|
3584
|
+
applyMigration({
|
3585
|
+
workspace,
|
3586
|
+
region,
|
3587
|
+
database,
|
3588
|
+
branch,
|
3589
|
+
migration
|
3590
|
+
}) {
|
3591
|
+
return operationsByTag.migrations.applyMigration({
|
3592
|
+
pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
|
3593
|
+
body: migration,
|
3594
|
+
...this.extraProps
|
3595
|
+
});
|
3596
|
+
}
|
1670
3597
|
}
|
1671
3598
|
class TableApi {
|
1672
3599
|
constructor(extraProps) {
|
@@ -2480,6 +4407,17 @@ class MigrationsApi {
|
|
2480
4407
|
...this.extraProps
|
2481
4408
|
});
|
2482
4409
|
}
|
4410
|
+
getSchema({
|
4411
|
+
workspace,
|
4412
|
+
region,
|
4413
|
+
database,
|
4414
|
+
branch
|
4415
|
+
}) {
|
4416
|
+
return operationsByTag.migrations.getSchema({
|
4417
|
+
pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
|
4418
|
+
...this.extraProps
|
4419
|
+
});
|
4420
|
+
}
|
2483
4421
|
}
|
2484
4422
|
class DatabaseApi {
|
2485
4423
|
constructor(extraProps) {
|
@@ -2550,100 +4488,46 @@ class DatabaseApi {
|
|
2550
4488
|
}) {
|
2551
4489
|
return operationsByTag.databases.getDatabaseGithubSettings({
|
2552
4490
|
pathParams: { workspaceId: workspace, dbName: database },
|
2553
|
-
...this.extraProps
|
2554
|
-
});
|
2555
|
-
}
|
2556
|
-
updateDatabaseGithubSettings({
|
2557
|
-
workspace,
|
2558
|
-
database,
|
2559
|
-
settings
|
2560
|
-
}) {
|
2561
|
-
return operationsByTag.databases.updateDatabaseGithubSettings({
|
2562
|
-
pathParams: { workspaceId: workspace, dbName: database },
|
2563
|
-
body: settings,
|
2564
|
-
...this.extraProps
|
2565
|
-
});
|
2566
|
-
}
|
2567
|
-
deleteDatabaseGithubSettings({
|
2568
|
-
workspace,
|
2569
|
-
database
|
2570
|
-
}) {
|
2571
|
-
return operationsByTag.databases.deleteDatabaseGithubSettings({
|
2572
|
-
pathParams: { workspaceId: workspace, dbName: database },
|
2573
|
-
...this.extraProps
|
2574
|
-
});
|
2575
|
-
}
|
2576
|
-
listRegions({ workspace }) {
|
2577
|
-
return operationsByTag.databases.listRegions({
|
2578
|
-
pathParams: { workspaceId: workspace },
|
2579
|
-
...this.extraProps
|
2580
|
-
});
|
2581
|
-
}
|
2582
|
-
}
|
2583
|
-
|
2584
|
-
class XataApiPlugin {
|
2585
|
-
build(options) {
|
2586
|
-
return new XataApiClient(options);
|
2587
|
-
}
|
2588
|
-
}
|
2589
|
-
|
2590
|
-
class XataPlugin {
|
2591
|
-
}
|
2592
|
-
|
2593
|
-
class FilesPlugin extends XataPlugin {
|
2594
|
-
build(pluginOptions) {
|
2595
|
-
return {
|
2596
|
-
download: async (location) => {
|
2597
|
-
const { table, record, column, fileId = "" } = location ?? {};
|
2598
|
-
return await getFileItem({
|
2599
|
-
pathParams: {
|
2600
|
-
workspace: "{workspaceId}",
|
2601
|
-
dbBranchName: "{dbBranch}",
|
2602
|
-
region: "{region}",
|
2603
|
-
tableName: table ?? "",
|
2604
|
-
recordId: record ?? "",
|
2605
|
-
columnName: column ?? "",
|
2606
|
-
fileId
|
2607
|
-
},
|
2608
|
-
...pluginOptions,
|
2609
|
-
rawResponse: true
|
2610
|
-
});
|
2611
|
-
},
|
2612
|
-
upload: async (location, file) => {
|
2613
|
-
const { table, record, column, fileId = "" } = location ?? {};
|
2614
|
-
return await putFileItem({
|
2615
|
-
pathParams: {
|
2616
|
-
workspace: "{workspaceId}",
|
2617
|
-
dbBranchName: "{dbBranch}",
|
2618
|
-
region: "{region}",
|
2619
|
-
tableName: table ?? "",
|
2620
|
-
recordId: record ?? "",
|
2621
|
-
columnName: column ?? "",
|
2622
|
-
fileId
|
2623
|
-
},
|
2624
|
-
body: file,
|
2625
|
-
...pluginOptions
|
2626
|
-
});
|
2627
|
-
},
|
2628
|
-
delete: async (location) => {
|
2629
|
-
const { table, record, column, fileId = "" } = location ?? {};
|
2630
|
-
return await deleteFileItem({
|
2631
|
-
pathParams: {
|
2632
|
-
workspace: "{workspaceId}",
|
2633
|
-
dbBranchName: "{dbBranch}",
|
2634
|
-
region: "{region}",
|
2635
|
-
tableName: table ?? "",
|
2636
|
-
recordId: record ?? "",
|
2637
|
-
columnName: column ?? "",
|
2638
|
-
fileId
|
2639
|
-
},
|
2640
|
-
...pluginOptions
|
2641
|
-
});
|
2642
|
-
}
|
2643
|
-
};
|
4491
|
+
...this.extraProps
|
4492
|
+
});
|
4493
|
+
}
|
4494
|
+
updateDatabaseGithubSettings({
|
4495
|
+
workspace,
|
4496
|
+
database,
|
4497
|
+
settings
|
4498
|
+
}) {
|
4499
|
+
return operationsByTag.databases.updateDatabaseGithubSettings({
|
4500
|
+
pathParams: { workspaceId: workspace, dbName: database },
|
4501
|
+
body: settings,
|
4502
|
+
...this.extraProps
|
4503
|
+
});
|
4504
|
+
}
|
4505
|
+
deleteDatabaseGithubSettings({
|
4506
|
+
workspace,
|
4507
|
+
database
|
4508
|
+
}) {
|
4509
|
+
return operationsByTag.databases.deleteDatabaseGithubSettings({
|
4510
|
+
pathParams: { workspaceId: workspace, dbName: database },
|
4511
|
+
...this.extraProps
|
4512
|
+
});
|
4513
|
+
}
|
4514
|
+
listRegions({ workspace }) {
|
4515
|
+
return operationsByTag.databases.listRegions({
|
4516
|
+
pathParams: { workspaceId: workspace },
|
4517
|
+
...this.extraProps
|
4518
|
+
});
|
4519
|
+
}
|
4520
|
+
}
|
4521
|
+
|
4522
|
+
class XataApiPlugin {
|
4523
|
+
build(options) {
|
4524
|
+
return new XataApiClient(options);
|
2644
4525
|
}
|
2645
4526
|
}
|
2646
4527
|
|
4528
|
+
class XataPlugin {
|
4529
|
+
}
|
4530
|
+
|
2647
4531
|
function buildTransformString(transformations) {
|
2648
4532
|
return transformations.flatMap(
|
2649
4533
|
(t) => Object.entries(t).map(([key, value]) => {
|
@@ -2659,71 +4543,33 @@ function buildTransformString(transformations) {
|
|
2659
4543
|
})
|
2660
4544
|
).join(",");
|
2661
4545
|
}
|
2662
|
-
function transformImage(url, transformations) {
|
4546
|
+
function transformImage(url, ...transformations) {
|
2663
4547
|
if (!isDefined(url))
|
2664
4548
|
return void 0;
|
2665
|
-
const
|
4549
|
+
const newTransformations = buildTransformString(transformations);
|
2666
4550
|
const { hostname, pathname, search } = new URL(url);
|
2667
|
-
|
4551
|
+
const pathParts = pathname.split("/");
|
4552
|
+
const transformIndex = pathParts.findIndex((part) => part === "transform");
|
4553
|
+
const removedItems = transformIndex >= 0 ? pathParts.splice(transformIndex, 2) : [];
|
4554
|
+
const transform = `/transform/${[removedItems[1], newTransformations].filter(isDefined).join(",")}`;
|
4555
|
+
const path = pathParts.join("/");
|
4556
|
+
return `https://${hostname}${transform}${path}${search}`;
|
2668
4557
|
}
|
2669
4558
|
|
2670
|
-
var __defProp$6 = Object.defineProperty;
|
2671
|
-
var __defNormalProp$6 = (obj, key, value) => key in obj ? __defProp$6(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
2672
|
-
var __publicField$6 = (obj, key, value) => {
|
2673
|
-
__defNormalProp$6(obj, typeof key !== "symbol" ? key + "" : key, value);
|
2674
|
-
return value;
|
2675
|
-
};
|
2676
4559
|
class XataFile {
|
2677
4560
|
constructor(file) {
|
2678
|
-
|
2679
|
-
* Name of this file.
|
2680
|
-
*/
|
2681
|
-
__publicField$6(this, "name");
|
2682
|
-
/**
|
2683
|
-
* Media type of this file.
|
2684
|
-
*/
|
2685
|
-
__publicField$6(this, "mediaType");
|
2686
|
-
/**
|
2687
|
-
* Base64 encoded content of this file.
|
2688
|
-
*/
|
2689
|
-
__publicField$6(this, "base64Content");
|
2690
|
-
/**
|
2691
|
-
* Whether to enable public url for this file.
|
2692
|
-
*/
|
2693
|
-
__publicField$6(this, "enablePublicUrl");
|
2694
|
-
/**
|
2695
|
-
* Timeout for the signed url.
|
2696
|
-
*/
|
2697
|
-
__publicField$6(this, "signedUrlTimeout");
|
2698
|
-
/**
|
2699
|
-
* Size of this file.
|
2700
|
-
*/
|
2701
|
-
__publicField$6(this, "size");
|
2702
|
-
/**
|
2703
|
-
* Version of this file.
|
2704
|
-
*/
|
2705
|
-
__publicField$6(this, "version");
|
2706
|
-
/**
|
2707
|
-
* Url of this file.
|
2708
|
-
*/
|
2709
|
-
__publicField$6(this, "url");
|
2710
|
-
/**
|
2711
|
-
* Signed url of this file.
|
2712
|
-
*/
|
2713
|
-
__publicField$6(this, "signedUrl");
|
2714
|
-
/**
|
2715
|
-
* Attributes of this file.
|
2716
|
-
*/
|
2717
|
-
__publicField$6(this, "attributes");
|
4561
|
+
this.id = file.id;
|
2718
4562
|
this.name = file.name;
|
2719
|
-
this.mediaType = file.mediaType
|
4563
|
+
this.mediaType = file.mediaType;
|
2720
4564
|
this.base64Content = file.base64Content;
|
2721
4565
|
this.enablePublicUrl = file.enablePublicUrl;
|
2722
4566
|
this.signedUrlTimeout = file.signedUrlTimeout;
|
4567
|
+
this.uploadUrlTimeout = file.uploadUrlTimeout;
|
2723
4568
|
this.size = file.size;
|
2724
4569
|
this.version = file.version;
|
2725
4570
|
this.url = file.url;
|
2726
4571
|
this.signedUrl = file.signedUrl;
|
4572
|
+
this.uploadUrl = file.uploadUrl;
|
2727
4573
|
this.attributes = file.attributes;
|
2728
4574
|
}
|
2729
4575
|
static fromBuffer(buffer, options = {}) {
|
@@ -2776,8 +4622,12 @@ class XataFile {
|
|
2776
4622
|
if (!this.base64Content) {
|
2777
4623
|
throw new Error(`File content is not available, please select property "base64Content" when querying the file`);
|
2778
4624
|
}
|
2779
|
-
const
|
2780
|
-
|
4625
|
+
const binary = atob(this.base64Content);
|
4626
|
+
const uint8Array = new Uint8Array(binary.length);
|
4627
|
+
for (let i = 0; i < binary.length; i++) {
|
4628
|
+
uint8Array[i] = binary.charCodeAt(i);
|
4629
|
+
}
|
4630
|
+
return new Blob([uint8Array], { type: this.mediaType });
|
2781
4631
|
}
|
2782
4632
|
static fromString(string, options = {}) {
|
2783
4633
|
const base64Content = btoa(string);
|
@@ -2800,16 +4650,27 @@ class XataFile {
|
|
2800
4650
|
}
|
2801
4651
|
transform(...options) {
|
2802
4652
|
return {
|
2803
|
-
url: transformImage(this.url, options),
|
2804
|
-
signedUrl: transformImage(this.signedUrl, options)
|
4653
|
+
url: transformImage(this.url, ...options),
|
4654
|
+
signedUrl: transformImage(this.signedUrl, ...options),
|
4655
|
+
metadataUrl: transformImage(this.url, ...options, { format: "json" }),
|
4656
|
+
metadataSignedUrl: transformImage(this.signedUrl, ...options, { format: "json" })
|
2805
4657
|
};
|
2806
4658
|
}
|
2807
4659
|
}
|
2808
4660
|
const parseInputFileEntry = async (entry) => {
|
2809
4661
|
if (!isDefined(entry))
|
2810
4662
|
return null;
|
2811
|
-
const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout } = await entry;
|
2812
|
-
return compactObject({
|
4663
|
+
const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout, uploadUrlTimeout } = await entry;
|
4664
|
+
return compactObject({
|
4665
|
+
id,
|
4666
|
+
// Name cannot be an empty string in our API
|
4667
|
+
name: name ? name : void 0,
|
4668
|
+
mediaType,
|
4669
|
+
base64Content,
|
4670
|
+
enablePublicUrl,
|
4671
|
+
signedUrlTimeout,
|
4672
|
+
uploadUrlTimeout
|
4673
|
+
});
|
2813
4674
|
};
|
2814
4675
|
|
2815
4676
|
function cleanFilter(filter) {
|
@@ -2839,17 +4700,30 @@ function cleanFilter(filter) {
|
|
2839
4700
|
return Object.keys(values).length > 0 ? values : void 0;
|
2840
4701
|
}
|
2841
4702
|
|
2842
|
-
|
2843
|
-
|
2844
|
-
|
2845
|
-
|
2846
|
-
|
2847
|
-
|
4703
|
+
function stringifyJson(value) {
|
4704
|
+
if (!isDefined(value))
|
4705
|
+
return value;
|
4706
|
+
if (isString(value))
|
4707
|
+
return value;
|
4708
|
+
try {
|
4709
|
+
return JSON.stringify(value);
|
4710
|
+
} catch (e) {
|
4711
|
+
return value;
|
4712
|
+
}
|
4713
|
+
}
|
4714
|
+
function parseJson(value) {
|
4715
|
+
try {
|
4716
|
+
return JSON.parse(value);
|
4717
|
+
} catch (e) {
|
4718
|
+
return value;
|
4719
|
+
}
|
4720
|
+
}
|
4721
|
+
|
2848
4722
|
var __accessCheck$6 = (obj, member, msg) => {
|
2849
4723
|
if (!member.has(obj))
|
2850
4724
|
throw TypeError("Cannot " + msg);
|
2851
4725
|
};
|
2852
|
-
var __privateGet$
|
4726
|
+
var __privateGet$5 = (obj, member, getter) => {
|
2853
4727
|
__accessCheck$6(obj, member, "read from private field");
|
2854
4728
|
return getter ? getter.call(obj) : member.get(obj);
|
2855
4729
|
};
|
@@ -2858,7 +4732,7 @@ var __privateAdd$6 = (obj, member, value) => {
|
|
2858
4732
|
throw TypeError("Cannot add the same private member more than once");
|
2859
4733
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2860
4734
|
};
|
2861
|
-
var __privateSet$
|
4735
|
+
var __privateSet$4 = (obj, member, value, setter) => {
|
2862
4736
|
__accessCheck$6(obj, member, "write to private field");
|
2863
4737
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
2864
4738
|
return value;
|
@@ -2867,17 +4741,9 @@ var _query, _page;
|
|
2867
4741
|
class Page {
|
2868
4742
|
constructor(query, meta, records = []) {
|
2869
4743
|
__privateAdd$6(this, _query, void 0);
|
2870
|
-
|
2871
|
-
* Page metadata, required to retrieve additional records.
|
2872
|
-
*/
|
2873
|
-
__publicField$5(this, "meta");
|
2874
|
-
/**
|
2875
|
-
* The set of results for this page.
|
2876
|
-
*/
|
2877
|
-
__publicField$5(this, "records");
|
2878
|
-
__privateSet$6(this, _query, query);
|
4744
|
+
__privateSet$4(this, _query, query);
|
2879
4745
|
this.meta = meta;
|
2880
|
-
this.records = new
|
4746
|
+
this.records = new PageRecordArray(this, records);
|
2881
4747
|
}
|
2882
4748
|
/**
|
2883
4749
|
* Retrieves the next page of results.
|
@@ -2886,7 +4752,7 @@ class Page {
|
|
2886
4752
|
* @returns The next page or results.
|
2887
4753
|
*/
|
2888
4754
|
async nextPage(size, offset) {
|
2889
|
-
return __privateGet$
|
4755
|
+
return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, after: this.meta.page.cursor } });
|
2890
4756
|
}
|
2891
4757
|
/**
|
2892
4758
|
* Retrieves the previous page of results.
|
@@ -2895,7 +4761,7 @@ class Page {
|
|
2895
4761
|
* @returns The previous page or results.
|
2896
4762
|
*/
|
2897
4763
|
async previousPage(size, offset) {
|
2898
|
-
return __privateGet$
|
4764
|
+
return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, before: this.meta.page.cursor } });
|
2899
4765
|
}
|
2900
4766
|
/**
|
2901
4767
|
* Retrieves the start page of results.
|
@@ -2904,7 +4770,7 @@ class Page {
|
|
2904
4770
|
* @returns The start page or results.
|
2905
4771
|
*/
|
2906
4772
|
async startPage(size, offset) {
|
2907
|
-
return __privateGet$
|
4773
|
+
return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, start: this.meta.page.cursor } });
|
2908
4774
|
}
|
2909
4775
|
/**
|
2910
4776
|
* Retrieves the end page of results.
|
@@ -2913,7 +4779,7 @@ class Page {
|
|
2913
4779
|
* @returns The end page or results.
|
2914
4780
|
*/
|
2915
4781
|
async endPage(size, offset) {
|
2916
|
-
return __privateGet$
|
4782
|
+
return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, end: this.meta.page.cursor } });
|
2917
4783
|
}
|
2918
4784
|
/**
|
2919
4785
|
* Shortcut method to check if there will be additional results if the next page of results is retrieved.
|
@@ -2924,18 +4790,45 @@ class Page {
|
|
2924
4790
|
}
|
2925
4791
|
}
|
2926
4792
|
_query = new WeakMap();
|
2927
|
-
const PAGINATION_MAX_SIZE =
|
4793
|
+
const PAGINATION_MAX_SIZE = 1e3;
|
2928
4794
|
const PAGINATION_DEFAULT_SIZE = 20;
|
2929
|
-
const PAGINATION_MAX_OFFSET =
|
4795
|
+
const PAGINATION_MAX_OFFSET = 49e3;
|
2930
4796
|
const PAGINATION_DEFAULT_OFFSET = 0;
|
2931
4797
|
function isCursorPaginationOptions(options) {
|
2932
4798
|
return isDefined(options) && (isDefined(options.start) || isDefined(options.end) || isDefined(options.after) || isDefined(options.before));
|
2933
4799
|
}
|
2934
|
-
|
4800
|
+
class RecordArray extends Array {
|
4801
|
+
constructor(...args) {
|
4802
|
+
super(...RecordArray.parseConstructorParams(...args));
|
4803
|
+
}
|
4804
|
+
static parseConstructorParams(...args) {
|
4805
|
+
if (args.length === 1 && typeof args[0] === "number") {
|
4806
|
+
return new Array(args[0]);
|
4807
|
+
}
|
4808
|
+
if (args.length <= 1 && Array.isArray(args[0] ?? [])) {
|
4809
|
+
const result = args[0] ?? [];
|
4810
|
+
return new Array(...result);
|
4811
|
+
}
|
4812
|
+
return new Array(...args);
|
4813
|
+
}
|
4814
|
+
toArray() {
|
4815
|
+
return new Array(...this);
|
4816
|
+
}
|
4817
|
+
toSerializable() {
|
4818
|
+
return JSON.parse(this.toString());
|
4819
|
+
}
|
4820
|
+
toString() {
|
4821
|
+
return JSON.stringify(this.toArray());
|
4822
|
+
}
|
4823
|
+
map(callbackfn, thisArg) {
|
4824
|
+
return this.toArray().map(callbackfn, thisArg);
|
4825
|
+
}
|
4826
|
+
}
|
4827
|
+
const _PageRecordArray = class _PageRecordArray extends Array {
|
2935
4828
|
constructor(...args) {
|
2936
|
-
super(...
|
4829
|
+
super(..._PageRecordArray.parseConstructorParams(...args));
|
2937
4830
|
__privateAdd$6(this, _page, void 0);
|
2938
|
-
__privateSet$
|
4831
|
+
__privateSet$4(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
|
2939
4832
|
}
|
2940
4833
|
static parseConstructorParams(...args) {
|
2941
4834
|
if (args.length === 1 && typeof args[0] === "number") {
|
@@ -2965,8 +4858,8 @@ const _RecordArray = class _RecordArray extends Array {
|
|
2965
4858
|
* @returns A new array of objects
|
2966
4859
|
*/
|
2967
4860
|
async nextPage(size, offset) {
|
2968
|
-
const newPage = await __privateGet$
|
2969
|
-
return new
|
4861
|
+
const newPage = await __privateGet$5(this, _page).nextPage(size, offset);
|
4862
|
+
return new _PageRecordArray(newPage);
|
2970
4863
|
}
|
2971
4864
|
/**
|
2972
4865
|
* Retrieve previous page of records
|
@@ -2974,8 +4867,8 @@ const _RecordArray = class _RecordArray extends Array {
|
|
2974
4867
|
* @returns A new array of objects
|
2975
4868
|
*/
|
2976
4869
|
async previousPage(size, offset) {
|
2977
|
-
const newPage = await __privateGet$
|
2978
|
-
return new
|
4870
|
+
const newPage = await __privateGet$5(this, _page).previousPage(size, offset);
|
4871
|
+
return new _PageRecordArray(newPage);
|
2979
4872
|
}
|
2980
4873
|
/**
|
2981
4874
|
* Retrieve start page of records
|
@@ -2983,8 +4876,8 @@ const _RecordArray = class _RecordArray extends Array {
|
|
2983
4876
|
* @returns A new array of objects
|
2984
4877
|
*/
|
2985
4878
|
async startPage(size, offset) {
|
2986
|
-
const newPage = await __privateGet$
|
2987
|
-
return new
|
4879
|
+
const newPage = await __privateGet$5(this, _page).startPage(size, offset);
|
4880
|
+
return new _PageRecordArray(newPage);
|
2988
4881
|
}
|
2989
4882
|
/**
|
2990
4883
|
* Retrieve end page of records
|
@@ -2992,30 +4885,24 @@ const _RecordArray = class _RecordArray extends Array {
|
|
2992
4885
|
* @returns A new array of objects
|
2993
4886
|
*/
|
2994
4887
|
async endPage(size, offset) {
|
2995
|
-
const newPage = await __privateGet$
|
2996
|
-
return new
|
4888
|
+
const newPage = await __privateGet$5(this, _page).endPage(size, offset);
|
4889
|
+
return new _PageRecordArray(newPage);
|
2997
4890
|
}
|
2998
4891
|
/**
|
2999
4892
|
* @returns Boolean indicating if there is a next page
|
3000
4893
|
*/
|
3001
4894
|
hasNextPage() {
|
3002
|
-
return __privateGet$
|
4895
|
+
return __privateGet$5(this, _page).meta.page.more;
|
3003
4896
|
}
|
3004
4897
|
};
|
3005
4898
|
_page = new WeakMap();
|
3006
|
-
let
|
4899
|
+
let PageRecordArray = _PageRecordArray;
|
3007
4900
|
|
3008
|
-
var __defProp$4 = Object.defineProperty;
|
3009
|
-
var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
3010
|
-
var __publicField$4 = (obj, key, value) => {
|
3011
|
-
__defNormalProp$4(obj, typeof key !== "symbol" ? key + "" : key, value);
|
3012
|
-
return value;
|
3013
|
-
};
|
3014
4901
|
var __accessCheck$5 = (obj, member, msg) => {
|
3015
4902
|
if (!member.has(obj))
|
3016
4903
|
throw TypeError("Cannot " + msg);
|
3017
4904
|
};
|
3018
|
-
var __privateGet$
|
4905
|
+
var __privateGet$4 = (obj, member, getter) => {
|
3019
4906
|
__accessCheck$5(obj, member, "read from private field");
|
3020
4907
|
return getter ? getter.call(obj) : member.get(obj);
|
3021
4908
|
};
|
@@ -3024,7 +4911,7 @@ var __privateAdd$5 = (obj, member, value) => {
|
|
3024
4911
|
throw TypeError("Cannot add the same private member more than once");
|
3025
4912
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3026
4913
|
};
|
3027
|
-
var __privateSet$
|
4914
|
+
var __privateSet$3 = (obj, member, value, setter) => {
|
3028
4915
|
__accessCheck$5(obj, member, "write to private field");
|
3029
4916
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
3030
4917
|
return value;
|
@@ -3041,26 +4928,26 @@ const _Query = class _Query {
|
|
3041
4928
|
__privateAdd$5(this, _repository, void 0);
|
3042
4929
|
__privateAdd$5(this, _data, { filter: {} });
|
3043
4930
|
// Implements pagination
|
3044
|
-
|
3045
|
-
|
3046
|
-
__privateSet$
|
4931
|
+
this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
|
4932
|
+
this.records = new PageRecordArray(this, []);
|
4933
|
+
__privateSet$3(this, _table$1, table);
|
3047
4934
|
if (repository) {
|
3048
|
-
__privateSet$
|
4935
|
+
__privateSet$3(this, _repository, repository);
|
3049
4936
|
} else {
|
3050
|
-
__privateSet$
|
4937
|
+
__privateSet$3(this, _repository, this);
|
3051
4938
|
}
|
3052
4939
|
const parent = cleanParent(data, rawParent);
|
3053
|
-
__privateGet$
|
3054
|
-
__privateGet$
|
3055
|
-
__privateGet$
|
3056
|
-
__privateGet$
|
3057
|
-
__privateGet$
|
3058
|
-
__privateGet$
|
3059
|
-
__privateGet$
|
3060
|
-
__privateGet$
|
3061
|
-
__privateGet$
|
3062
|
-
__privateGet$
|
3063
|
-
__privateGet$
|
4940
|
+
__privateGet$4(this, _data).filter = data.filter ?? parent?.filter ?? {};
|
4941
|
+
__privateGet$4(this, _data).filter.$any = data.filter?.$any ?? parent?.filter?.$any;
|
4942
|
+
__privateGet$4(this, _data).filter.$all = data.filter?.$all ?? parent?.filter?.$all;
|
4943
|
+
__privateGet$4(this, _data).filter.$not = data.filter?.$not ?? parent?.filter?.$not;
|
4944
|
+
__privateGet$4(this, _data).filter.$none = data.filter?.$none ?? parent?.filter?.$none;
|
4945
|
+
__privateGet$4(this, _data).sort = data.sort ?? parent?.sort;
|
4946
|
+
__privateGet$4(this, _data).columns = data.columns ?? parent?.columns;
|
4947
|
+
__privateGet$4(this, _data).consistency = data.consistency ?? parent?.consistency;
|
4948
|
+
__privateGet$4(this, _data).pagination = data.pagination ?? parent?.pagination;
|
4949
|
+
__privateGet$4(this, _data).cache = data.cache ?? parent?.cache;
|
4950
|
+
__privateGet$4(this, _data).fetchOptions = data.fetchOptions ?? parent?.fetchOptions;
|
3064
4951
|
this.any = this.any.bind(this);
|
3065
4952
|
this.all = this.all.bind(this);
|
3066
4953
|
this.not = this.not.bind(this);
|
@@ -3071,10 +4958,10 @@ const _Query = class _Query {
|
|
3071
4958
|
Object.defineProperty(this, "repository", { enumerable: false });
|
3072
4959
|
}
|
3073
4960
|
getQueryOptions() {
|
3074
|
-
return __privateGet$
|
4961
|
+
return __privateGet$4(this, _data);
|
3075
4962
|
}
|
3076
4963
|
key() {
|
3077
|
-
const { columns = [], filter = {}, sort = [], pagination = {} } = __privateGet$
|
4964
|
+
const { columns = [], filter = {}, sort = [], pagination = {} } = __privateGet$4(this, _data);
|
3078
4965
|
const key = JSON.stringify({ columns, filter, sort, pagination });
|
3079
4966
|
return toBase64(key);
|
3080
4967
|
}
|
@@ -3085,7 +4972,7 @@ const _Query = class _Query {
|
|
3085
4972
|
*/
|
3086
4973
|
any(...queries) {
|
3087
4974
|
const $any = queries.map((query) => query.getQueryOptions().filter ?? {});
|
3088
|
-
return new _Query(__privateGet$
|
4975
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $any } }, __privateGet$4(this, _data));
|
3089
4976
|
}
|
3090
4977
|
/**
|
3091
4978
|
* Builds a new query object representing a logical AND between the given subqueries.
|
@@ -3094,7 +4981,7 @@ const _Query = class _Query {
|
|
3094
4981
|
*/
|
3095
4982
|
all(...queries) {
|
3096
4983
|
const $all = queries.map((query) => query.getQueryOptions().filter ?? {});
|
3097
|
-
return new _Query(__privateGet$
|
4984
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
|
3098
4985
|
}
|
3099
4986
|
/**
|
3100
4987
|
* Builds a new query object representing a logical OR negating each subquery. In pseudo-code: !q1 OR !q2
|
@@ -3103,7 +4990,7 @@ const _Query = class _Query {
|
|
3103
4990
|
*/
|
3104
4991
|
not(...queries) {
|
3105
4992
|
const $not = queries.map((query) => query.getQueryOptions().filter ?? {});
|
3106
|
-
return new _Query(__privateGet$
|
4993
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $not } }, __privateGet$4(this, _data));
|
3107
4994
|
}
|
3108
4995
|
/**
|
3109
4996
|
* Builds a new query object representing a logical AND negating each subquery. In pseudo-code: !q1 AND !q2
|
@@ -3112,25 +4999,25 @@ const _Query = class _Query {
|
|
3112
4999
|
*/
|
3113
5000
|
none(...queries) {
|
3114
5001
|
const $none = queries.map((query) => query.getQueryOptions().filter ?? {});
|
3115
|
-
return new _Query(__privateGet$
|
5002
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $none } }, __privateGet$4(this, _data));
|
3116
5003
|
}
|
3117
5004
|
filter(a, b) {
|
3118
5005
|
if (arguments.length === 1) {
|
3119
5006
|
const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
|
3120
5007
|
[column]: __privateMethod$3(this, _cleanFilterConstraint, cleanFilterConstraint_fn).call(this, column, constraint)
|
3121
5008
|
}));
|
3122
|
-
const $all = compact([__privateGet$
|
3123
|
-
return new _Query(__privateGet$
|
5009
|
+
const $all = compact([__privateGet$4(this, _data).filter?.$all].flat().concat(constraints));
|
5010
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
|
3124
5011
|
} else {
|
3125
5012
|
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _cleanFilterConstraint, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
|
3126
|
-
const $all = compact([__privateGet$
|
3127
|
-
return new _Query(__privateGet$
|
5013
|
+
const $all = compact([__privateGet$4(this, _data).filter?.$all].flat().concat(constraints));
|
5014
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
|
3128
5015
|
}
|
3129
5016
|
}
|
3130
5017
|
sort(column, direction = "asc") {
|
3131
|
-
const originalSort = [__privateGet$
|
5018
|
+
const originalSort = [__privateGet$4(this, _data).sort ?? []].flat();
|
3132
5019
|
const sort = [...originalSort, { column, direction }];
|
3133
|
-
return new _Query(__privateGet$
|
5020
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { sort }, __privateGet$4(this, _data));
|
3134
5021
|
}
|
3135
5022
|
/**
|
3136
5023
|
* Builds a new query specifying the set of columns to be returned in the query response.
|
@@ -3139,15 +5026,15 @@ const _Query = class _Query {
|
|
3139
5026
|
*/
|
3140
5027
|
select(columns) {
|
3141
5028
|
return new _Query(
|
3142
|
-
__privateGet$
|
3143
|
-
__privateGet$
|
5029
|
+
__privateGet$4(this, _repository),
|
5030
|
+
__privateGet$4(this, _table$1),
|
3144
5031
|
{ columns },
|
3145
|
-
__privateGet$
|
5032
|
+
__privateGet$4(this, _data)
|
3146
5033
|
);
|
3147
5034
|
}
|
3148
5035
|
getPaginated(options = {}) {
|
3149
|
-
const query = new _Query(__privateGet$
|
3150
|
-
return __privateGet$
|
5036
|
+
const query = new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), options, __privateGet$4(this, _data));
|
5037
|
+
return __privateGet$4(this, _repository).query(query);
|
3151
5038
|
}
|
3152
5039
|
/**
|
3153
5040
|
* Get results in an iterator
|
@@ -3184,7 +5071,7 @@ const _Query = class _Query {
|
|
3184
5071
|
if (page.hasNextPage() && options.pagination?.size === void 0) {
|
3185
5072
|
console.trace("Calling getMany does not return all results. Paginate to get all results or call getAll.");
|
3186
5073
|
}
|
3187
|
-
const array = new
|
5074
|
+
const array = new PageRecordArray(page, results.slice(0, size));
|
3188
5075
|
return array;
|
3189
5076
|
}
|
3190
5077
|
async getAll(options = {}) {
|
@@ -3193,7 +5080,7 @@ const _Query = class _Query {
|
|
3193
5080
|
for await (const page of this.getIterator({ ...rest, batchSize })) {
|
3194
5081
|
results.push(...page);
|
3195
5082
|
}
|
3196
|
-
return results;
|
5083
|
+
return new RecordArray(results);
|
3197
5084
|
}
|
3198
5085
|
async getFirst(options = {}) {
|
3199
5086
|
const records = await this.getMany({ ...options, pagination: { size: 1 } });
|
@@ -3208,12 +5095,12 @@ const _Query = class _Query {
|
|
3208
5095
|
async summarize(params = {}) {
|
3209
5096
|
const { summaries, summariesFilter, ...options } = params;
|
3210
5097
|
const query = new _Query(
|
3211
|
-
__privateGet$
|
3212
|
-
__privateGet$
|
5098
|
+
__privateGet$4(this, _repository),
|
5099
|
+
__privateGet$4(this, _table$1),
|
3213
5100
|
options,
|
3214
|
-
__privateGet$
|
5101
|
+
__privateGet$4(this, _data)
|
3215
5102
|
);
|
3216
|
-
return __privateGet$
|
5103
|
+
return __privateGet$4(this, _repository).summarizeTable(query, summaries, summariesFilter);
|
3217
5104
|
}
|
3218
5105
|
/**
|
3219
5106
|
* Builds a new query object adding a cache TTL in milliseconds.
|
@@ -3221,7 +5108,7 @@ const _Query = class _Query {
|
|
3221
5108
|
* @returns A new Query object.
|
3222
5109
|
*/
|
3223
5110
|
cache(ttl) {
|
3224
|
-
return new _Query(__privateGet$
|
5111
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { cache: ttl }, __privateGet$4(this, _data));
|
3225
5112
|
}
|
3226
5113
|
/**
|
3227
5114
|
* Retrieve next page of records
|
@@ -3267,7 +5154,7 @@ _repository = new WeakMap();
|
|
3267
5154
|
_data = new WeakMap();
|
3268
5155
|
_cleanFilterConstraint = new WeakSet();
|
3269
5156
|
cleanFilterConstraint_fn = function(column, value) {
|
3270
|
-
const columnType = __privateGet$
|
5157
|
+
const columnType = __privateGet$4(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
|
3271
5158
|
if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
|
3272
5159
|
return { $includes: value };
|
3273
5160
|
}
|
@@ -3293,11 +5180,11 @@ const RecordColumnTypes = [
|
|
3293
5180
|
"email",
|
3294
5181
|
"multiple",
|
3295
5182
|
"link",
|
3296
|
-
"object",
|
3297
5183
|
"datetime",
|
3298
5184
|
"vector",
|
3299
5185
|
"file[]",
|
3300
|
-
"file"
|
5186
|
+
"file",
|
5187
|
+
"json"
|
3301
5188
|
];
|
3302
5189
|
function isIdentifiable(x) {
|
3303
5190
|
return isObject(x) && isString(x?.id);
|
@@ -3308,6 +5195,24 @@ function isXataRecord(x) {
|
|
3308
5195
|
return isIdentifiable(x) && isObject(metadata) && typeof metadata.version === "number";
|
3309
5196
|
}
|
3310
5197
|
|
5198
|
+
function isValidExpandedColumn(column) {
|
5199
|
+
return isObject(column) && isString(column.name);
|
5200
|
+
}
|
5201
|
+
function isValidSelectableColumns(columns) {
|
5202
|
+
if (!Array.isArray(columns)) {
|
5203
|
+
return false;
|
5204
|
+
}
|
5205
|
+
return columns.every((column) => {
|
5206
|
+
if (typeof column === "string") {
|
5207
|
+
return true;
|
5208
|
+
}
|
5209
|
+
if (typeof column === "object") {
|
5210
|
+
return isValidExpandedColumn(column);
|
5211
|
+
}
|
5212
|
+
return false;
|
5213
|
+
});
|
5214
|
+
}
|
5215
|
+
|
3311
5216
|
function isSortFilterString(value) {
|
3312
5217
|
return isString(value);
|
3313
5218
|
}
|
@@ -3339,7 +5244,7 @@ var __accessCheck$4 = (obj, member, msg) => {
|
|
3339
5244
|
if (!member.has(obj))
|
3340
5245
|
throw TypeError("Cannot " + msg);
|
3341
5246
|
};
|
3342
|
-
var __privateGet$
|
5247
|
+
var __privateGet$3 = (obj, member, getter) => {
|
3343
5248
|
__accessCheck$4(obj, member, "read from private field");
|
3344
5249
|
return getter ? getter.call(obj) : member.get(obj);
|
3345
5250
|
};
|
@@ -3348,7 +5253,7 @@ var __privateAdd$4 = (obj, member, value) => {
|
|
3348
5253
|
throw TypeError("Cannot add the same private member more than once");
|
3349
5254
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3350
5255
|
};
|
3351
|
-
var __privateSet$
|
5256
|
+
var __privateSet$2 = (obj, member, value, setter) => {
|
3352
5257
|
__accessCheck$4(obj, member, "write to private field");
|
3353
5258
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
3354
5259
|
return value;
|
@@ -3357,7 +5262,7 @@ var __privateMethod$2 = (obj, member, method) => {
|
|
3357
5262
|
__accessCheck$4(obj, member, "access private method");
|
3358
5263
|
return method;
|
3359
5264
|
};
|
3360
|
-
var _table, _getFetchProps, _db, _cache, _schemaTables
|
5265
|
+
var _table, _getFetchProps, _db, _cache, _schemaTables, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn, _insertRecords, insertRecords_fn, _updateRecordWithID, updateRecordWithID_fn, _updateRecords, updateRecords_fn, _upsertRecordWithID, upsertRecordWithID_fn, _deleteRecord, deleteRecord_fn, _deleteRecords, deleteRecords_fn, _setCacheQuery, setCacheQuery_fn, _getCacheQuery, getCacheQuery_fn, _getSchemaTables, getSchemaTables_fn, _transformObjectToApi, transformObjectToApi_fn;
|
3361
5266
|
const BULK_OPERATION_MAX_SIZE = 1e3;
|
3362
5267
|
class Repository extends Query {
|
3363
5268
|
}
|
@@ -3378,62 +5283,62 @@ class RestRepository extends Query {
|
|
3378
5283
|
__privateAdd$4(this, _deleteRecords);
|
3379
5284
|
__privateAdd$4(this, _setCacheQuery);
|
3380
5285
|
__privateAdd$4(this, _getCacheQuery);
|
3381
|
-
__privateAdd$4(this, _getSchemaTables
|
5286
|
+
__privateAdd$4(this, _getSchemaTables);
|
3382
5287
|
__privateAdd$4(this, _transformObjectToApi);
|
3383
5288
|
__privateAdd$4(this, _table, void 0);
|
3384
5289
|
__privateAdd$4(this, _getFetchProps, void 0);
|
3385
5290
|
__privateAdd$4(this, _db, void 0);
|
3386
5291
|
__privateAdd$4(this, _cache, void 0);
|
3387
|
-
__privateAdd$4(this, _schemaTables
|
5292
|
+
__privateAdd$4(this, _schemaTables, void 0);
|
3388
5293
|
__privateAdd$4(this, _trace, void 0);
|
3389
|
-
__privateSet$
|
3390
|
-
__privateSet$
|
3391
|
-
__privateSet$
|
3392
|
-
__privateSet$
|
3393
|
-
__privateSet$
|
5294
|
+
__privateSet$2(this, _table, options.table);
|
5295
|
+
__privateSet$2(this, _db, options.db);
|
5296
|
+
__privateSet$2(this, _cache, options.pluginOptions.cache);
|
5297
|
+
__privateSet$2(this, _schemaTables, options.schemaTables);
|
5298
|
+
__privateSet$2(this, _getFetchProps, () => ({ ...options.pluginOptions, sessionID: generateUUID() }));
|
3394
5299
|
const trace = options.pluginOptions.trace ?? defaultTrace;
|
3395
|
-
__privateSet$
|
5300
|
+
__privateSet$2(this, _trace, async (name, fn, options2 = {}) => {
|
3396
5301
|
return trace(name, fn, {
|
3397
5302
|
...options2,
|
3398
|
-
[TraceAttributes.TABLE]: __privateGet$
|
5303
|
+
[TraceAttributes.TABLE]: __privateGet$3(this, _table),
|
3399
5304
|
[TraceAttributes.KIND]: "sdk-operation",
|
3400
5305
|
[TraceAttributes.VERSION]: VERSION
|
3401
5306
|
});
|
3402
5307
|
});
|
3403
5308
|
}
|
3404
5309
|
async create(a, b, c, d) {
|
3405
|
-
return __privateGet$
|
5310
|
+
return __privateGet$3(this, _trace).call(this, "create", async () => {
|
3406
5311
|
const ifVersion = parseIfVersion(b, c, d);
|
3407
5312
|
if (Array.isArray(a)) {
|
3408
5313
|
if (a.length === 0)
|
3409
5314
|
return [];
|
3410
5315
|
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
3411
|
-
const columns =
|
5316
|
+
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3412
5317
|
const result = await this.read(ids, columns);
|
3413
5318
|
return result;
|
3414
5319
|
}
|
3415
5320
|
if (isString(a) && isObject(b)) {
|
3416
5321
|
if (a === "")
|
3417
5322
|
throw new Error("The id can't be empty");
|
3418
|
-
const columns =
|
5323
|
+
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3419
5324
|
return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
|
3420
5325
|
}
|
3421
5326
|
if (isObject(a) && isString(a.id)) {
|
3422
5327
|
if (a.id === "")
|
3423
5328
|
throw new Error("The id can't be empty");
|
3424
|
-
const columns =
|
5329
|
+
const columns = isValidSelectableColumns(b) ? b : void 0;
|
3425
5330
|
return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.id, { ...a, id: void 0 }, columns, { createOnly: true, ifVersion });
|
3426
5331
|
}
|
3427
5332
|
if (isObject(a)) {
|
3428
|
-
const columns =
|
5333
|
+
const columns = isValidSelectableColumns(b) ? b : void 0;
|
3429
5334
|
return __privateMethod$2(this, _insertRecordWithoutId, insertRecordWithoutId_fn).call(this, a, columns);
|
3430
5335
|
}
|
3431
5336
|
throw new Error("Invalid arguments for create method");
|
3432
5337
|
});
|
3433
5338
|
}
|
3434
5339
|
async read(a, b) {
|
3435
|
-
return __privateGet$
|
3436
|
-
const columns =
|
5340
|
+
return __privateGet$3(this, _trace).call(this, "read", async () => {
|
5341
|
+
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3437
5342
|
if (Array.isArray(a)) {
|
3438
5343
|
if (a.length === 0)
|
3439
5344
|
return [];
|
@@ -3453,14 +5358,20 @@ class RestRepository extends Query {
|
|
3453
5358
|
workspace: "{workspaceId}",
|
3454
5359
|
dbBranchName: "{dbBranch}",
|
3455
5360
|
region: "{region}",
|
3456
|
-
tableName: __privateGet$
|
5361
|
+
tableName: __privateGet$3(this, _table),
|
3457
5362
|
recordId: id
|
3458
5363
|
},
|
3459
5364
|
queryParams: { columns },
|
3460
|
-
...__privateGet$
|
5365
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3461
5366
|
});
|
3462
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3463
|
-
return initObject(
|
5367
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5368
|
+
return initObject(
|
5369
|
+
__privateGet$3(this, _db),
|
5370
|
+
schemaTables,
|
5371
|
+
__privateGet$3(this, _table),
|
5372
|
+
response,
|
5373
|
+
columns
|
5374
|
+
);
|
3464
5375
|
} catch (e) {
|
3465
5376
|
if (isObject(e) && e.status === 404) {
|
3466
5377
|
return null;
|
@@ -3472,7 +5383,7 @@ class RestRepository extends Query {
|
|
3472
5383
|
});
|
3473
5384
|
}
|
3474
5385
|
async readOrThrow(a, b) {
|
3475
|
-
return __privateGet$
|
5386
|
+
return __privateGet$3(this, _trace).call(this, "readOrThrow", async () => {
|
3476
5387
|
const result = await this.read(a, b);
|
3477
5388
|
if (Array.isArray(result)) {
|
3478
5389
|
const missingIds = compact(
|
@@ -3491,7 +5402,7 @@ class RestRepository extends Query {
|
|
3491
5402
|
});
|
3492
5403
|
}
|
3493
5404
|
async update(a, b, c, d) {
|
3494
|
-
return __privateGet$
|
5405
|
+
return __privateGet$3(this, _trace).call(this, "update", async () => {
|
3495
5406
|
const ifVersion = parseIfVersion(b, c, d);
|
3496
5407
|
if (Array.isArray(a)) {
|
3497
5408
|
if (a.length === 0)
|
@@ -3502,17 +5413,17 @@ class RestRepository extends Query {
|
|
3502
5413
|
ifVersion,
|
3503
5414
|
upsert: false
|
3504
5415
|
});
|
3505
|
-
const columns =
|
5416
|
+
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3506
5417
|
const result = await this.read(a, columns);
|
3507
5418
|
return result;
|
3508
5419
|
}
|
3509
5420
|
try {
|
3510
5421
|
if (isString(a) && isObject(b)) {
|
3511
|
-
const columns =
|
5422
|
+
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3512
5423
|
return await __privateMethod$2(this, _updateRecordWithID, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
3513
5424
|
}
|
3514
5425
|
if (isObject(a) && isString(a.id)) {
|
3515
|
-
const columns =
|
5426
|
+
const columns = isValidSelectableColumns(b) ? b : void 0;
|
3516
5427
|
return await __privateMethod$2(this, _updateRecordWithID, updateRecordWithID_fn).call(this, a.id, { ...a, id: void 0 }, columns, { ifVersion });
|
3517
5428
|
}
|
3518
5429
|
} catch (error) {
|
@@ -3524,7 +5435,7 @@ class RestRepository extends Query {
|
|
3524
5435
|
});
|
3525
5436
|
}
|
3526
5437
|
async updateOrThrow(a, b, c, d) {
|
3527
|
-
return __privateGet$
|
5438
|
+
return __privateGet$3(this, _trace).call(this, "updateOrThrow", async () => {
|
3528
5439
|
const result = await this.update(a, b, c, d);
|
3529
5440
|
if (Array.isArray(result)) {
|
3530
5441
|
const missingIds = compact(
|
@@ -3543,7 +5454,7 @@ class RestRepository extends Query {
|
|
3543
5454
|
});
|
3544
5455
|
}
|
3545
5456
|
async createOrUpdate(a, b, c, d) {
|
3546
|
-
return __privateGet$
|
5457
|
+
return __privateGet$3(this, _trace).call(this, "createOrUpdate", async () => {
|
3547
5458
|
const ifVersion = parseIfVersion(b, c, d);
|
3548
5459
|
if (Array.isArray(a)) {
|
3549
5460
|
if (a.length === 0)
|
@@ -3552,20 +5463,20 @@ class RestRepository extends Query {
|
|
3552
5463
|
ifVersion,
|
3553
5464
|
upsert: true
|
3554
5465
|
});
|
3555
|
-
const columns =
|
5466
|
+
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3556
5467
|
const result = await this.read(a, columns);
|
3557
5468
|
return result;
|
3558
5469
|
}
|
3559
5470
|
if (isString(a) && isObject(b)) {
|
3560
5471
|
if (a === "")
|
3561
5472
|
throw new Error("The id can't be empty");
|
3562
|
-
const columns =
|
5473
|
+
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3563
5474
|
return await __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
3564
5475
|
}
|
3565
5476
|
if (isObject(a) && isString(a.id)) {
|
3566
5477
|
if (a.id === "")
|
3567
5478
|
throw new Error("The id can't be empty");
|
3568
|
-
const columns =
|
5479
|
+
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3569
5480
|
return await __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a.id, { ...a, id: void 0 }, columns, { ifVersion });
|
3570
5481
|
}
|
3571
5482
|
if (!isDefined(a) && isObject(b)) {
|
@@ -3578,26 +5489,26 @@ class RestRepository extends Query {
|
|
3578
5489
|
});
|
3579
5490
|
}
|
3580
5491
|
async createOrReplace(a, b, c, d) {
|
3581
|
-
return __privateGet$
|
5492
|
+
return __privateGet$3(this, _trace).call(this, "createOrReplace", async () => {
|
3582
5493
|
const ifVersion = parseIfVersion(b, c, d);
|
3583
5494
|
if (Array.isArray(a)) {
|
3584
5495
|
if (a.length === 0)
|
3585
5496
|
return [];
|
3586
5497
|
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
3587
|
-
const columns =
|
5498
|
+
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3588
5499
|
const result = await this.read(ids, columns);
|
3589
5500
|
return result;
|
3590
5501
|
}
|
3591
5502
|
if (isString(a) && isObject(b)) {
|
3592
5503
|
if (a === "")
|
3593
5504
|
throw new Error("The id can't be empty");
|
3594
|
-
const columns =
|
5505
|
+
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3595
5506
|
return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
|
3596
5507
|
}
|
3597
5508
|
if (isObject(a) && isString(a.id)) {
|
3598
5509
|
if (a.id === "")
|
3599
5510
|
throw new Error("The id can't be empty");
|
3600
|
-
const columns =
|
5511
|
+
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3601
5512
|
return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.id, { ...a, id: void 0 }, columns, { createOnly: false, ifVersion });
|
3602
5513
|
}
|
3603
5514
|
if (!isDefined(a) && isObject(b)) {
|
@@ -3610,7 +5521,7 @@ class RestRepository extends Query {
|
|
3610
5521
|
});
|
3611
5522
|
}
|
3612
5523
|
async delete(a, b) {
|
3613
|
-
return __privateGet$
|
5524
|
+
return __privateGet$3(this, _trace).call(this, "delete", async () => {
|
3614
5525
|
if (Array.isArray(a)) {
|
3615
5526
|
if (a.length === 0)
|
3616
5527
|
return [];
|
@@ -3621,7 +5532,7 @@ class RestRepository extends Query {
|
|
3621
5532
|
return o.id;
|
3622
5533
|
throw new Error("Invalid arguments for delete method");
|
3623
5534
|
});
|
3624
|
-
const columns =
|
5535
|
+
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3625
5536
|
const result = await this.read(a, columns);
|
3626
5537
|
await __privateMethod$2(this, _deleteRecords, deleteRecords_fn).call(this, ids);
|
3627
5538
|
return result;
|
@@ -3636,7 +5547,7 @@ class RestRepository extends Query {
|
|
3636
5547
|
});
|
3637
5548
|
}
|
3638
5549
|
async deleteOrThrow(a, b) {
|
3639
|
-
return __privateGet$
|
5550
|
+
return __privateGet$3(this, _trace).call(this, "deleteOrThrow", async () => {
|
3640
5551
|
const result = await this.delete(a, b);
|
3641
5552
|
if (Array.isArray(result)) {
|
3642
5553
|
const missingIds = compact(
|
@@ -3654,13 +5565,13 @@ class RestRepository extends Query {
|
|
3654
5565
|
});
|
3655
5566
|
}
|
3656
5567
|
async search(query, options = {}) {
|
3657
|
-
return __privateGet$
|
3658
|
-
const { records } = await searchTable({
|
5568
|
+
return __privateGet$3(this, _trace).call(this, "search", async () => {
|
5569
|
+
const { records, totalCount } = await searchTable({
|
3659
5570
|
pathParams: {
|
3660
5571
|
workspace: "{workspaceId}",
|
3661
5572
|
dbBranchName: "{dbBranch}",
|
3662
5573
|
region: "{region}",
|
3663
|
-
tableName: __privateGet$
|
5574
|
+
tableName: __privateGet$3(this, _table)
|
3664
5575
|
},
|
3665
5576
|
body: {
|
3666
5577
|
query,
|
@@ -3672,20 +5583,23 @@ class RestRepository extends Query {
|
|
3672
5583
|
page: options.page,
|
3673
5584
|
target: options.target
|
3674
5585
|
},
|
3675
|
-
...__privateGet$
|
5586
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3676
5587
|
});
|
3677
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3678
|
-
return
|
5588
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5589
|
+
return {
|
5590
|
+
records: records.map((item) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), item, ["*"])),
|
5591
|
+
totalCount
|
5592
|
+
};
|
3679
5593
|
});
|
3680
5594
|
}
|
3681
5595
|
async vectorSearch(column, query, options) {
|
3682
|
-
return __privateGet$
|
3683
|
-
const { records } = await vectorSearchTable({
|
5596
|
+
return __privateGet$3(this, _trace).call(this, "vectorSearch", async () => {
|
5597
|
+
const { records, totalCount } = await vectorSearchTable({
|
3684
5598
|
pathParams: {
|
3685
5599
|
workspace: "{workspaceId}",
|
3686
5600
|
dbBranchName: "{dbBranch}",
|
3687
5601
|
region: "{region}",
|
3688
|
-
tableName: __privateGet$
|
5602
|
+
tableName: __privateGet$3(this, _table)
|
3689
5603
|
},
|
3690
5604
|
body: {
|
3691
5605
|
column,
|
@@ -3694,29 +5608,32 @@ class RestRepository extends Query {
|
|
3694
5608
|
size: options?.size,
|
3695
5609
|
filter: options?.filter
|
3696
5610
|
},
|
3697
|
-
...__privateGet$
|
5611
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3698
5612
|
});
|
3699
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3700
|
-
return
|
5613
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5614
|
+
return {
|
5615
|
+
records: records.map((item) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), item, ["*"])),
|
5616
|
+
totalCount
|
5617
|
+
};
|
3701
5618
|
});
|
3702
5619
|
}
|
3703
5620
|
async aggregate(aggs, filter) {
|
3704
|
-
return __privateGet$
|
5621
|
+
return __privateGet$3(this, _trace).call(this, "aggregate", async () => {
|
3705
5622
|
const result = await aggregateTable({
|
3706
5623
|
pathParams: {
|
3707
5624
|
workspace: "{workspaceId}",
|
3708
5625
|
dbBranchName: "{dbBranch}",
|
3709
5626
|
region: "{region}",
|
3710
|
-
tableName: __privateGet$
|
5627
|
+
tableName: __privateGet$3(this, _table)
|
3711
5628
|
},
|
3712
5629
|
body: { aggs, filter },
|
3713
|
-
...__privateGet$
|
5630
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3714
5631
|
});
|
3715
5632
|
return result;
|
3716
5633
|
});
|
3717
5634
|
}
|
3718
5635
|
async query(query) {
|
3719
|
-
return __privateGet$
|
5636
|
+
return __privateGet$3(this, _trace).call(this, "query", async () => {
|
3720
5637
|
const cacheQuery = await __privateMethod$2(this, _getCacheQuery, getCacheQuery_fn).call(this, query);
|
3721
5638
|
if (cacheQuery)
|
3722
5639
|
return new Page(query, cacheQuery.meta, cacheQuery.records);
|
@@ -3726,7 +5643,7 @@ class RestRepository extends Query {
|
|
3726
5643
|
workspace: "{workspaceId}",
|
3727
5644
|
dbBranchName: "{dbBranch}",
|
3728
5645
|
region: "{region}",
|
3729
|
-
tableName: __privateGet$
|
5646
|
+
tableName: __privateGet$3(this, _table)
|
3730
5647
|
},
|
3731
5648
|
body: {
|
3732
5649
|
filter: cleanFilter(data.filter),
|
@@ -3736,25 +5653,31 @@ class RestRepository extends Query {
|
|
3736
5653
|
consistency: data.consistency
|
3737
5654
|
},
|
3738
5655
|
fetchOptions: data.fetchOptions,
|
3739
|
-
...__privateGet$
|
5656
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3740
5657
|
});
|
3741
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
5658
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
3742
5659
|
const records = objects.map(
|
3743
|
-
(record) => initObject(
|
5660
|
+
(record) => initObject(
|
5661
|
+
__privateGet$3(this, _db),
|
5662
|
+
schemaTables,
|
5663
|
+
__privateGet$3(this, _table),
|
5664
|
+
record,
|
5665
|
+
data.columns ?? ["*"]
|
5666
|
+
)
|
3744
5667
|
);
|
3745
5668
|
await __privateMethod$2(this, _setCacheQuery, setCacheQuery_fn).call(this, query, meta, records);
|
3746
5669
|
return new Page(query, meta, records);
|
3747
5670
|
});
|
3748
5671
|
}
|
3749
5672
|
async summarizeTable(query, summaries, summariesFilter) {
|
3750
|
-
return __privateGet$
|
5673
|
+
return __privateGet$3(this, _trace).call(this, "summarize", async () => {
|
3751
5674
|
const data = query.getQueryOptions();
|
3752
5675
|
const result = await summarizeTable({
|
3753
5676
|
pathParams: {
|
3754
5677
|
workspace: "{workspaceId}",
|
3755
5678
|
dbBranchName: "{dbBranch}",
|
3756
5679
|
region: "{region}",
|
3757
|
-
tableName: __privateGet$
|
5680
|
+
tableName: __privateGet$3(this, _table)
|
3758
5681
|
},
|
3759
5682
|
body: {
|
3760
5683
|
filter: cleanFilter(data.filter),
|
@@ -3765,9 +5688,15 @@ class RestRepository extends Query {
|
|
3765
5688
|
summaries,
|
3766
5689
|
summariesFilter
|
3767
5690
|
},
|
3768
|
-
...__privateGet$
|
5691
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3769
5692
|
});
|
3770
|
-
|
5693
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5694
|
+
return {
|
5695
|
+
...result,
|
5696
|
+
summaries: result.summaries.map(
|
5697
|
+
(summary) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), summary, data.columns ?? [])
|
5698
|
+
)
|
5699
|
+
};
|
3771
5700
|
});
|
3772
5701
|
}
|
3773
5702
|
ask(question, options) {
|
@@ -3777,7 +5706,7 @@ class RestRepository extends Query {
|
|
3777
5706
|
workspace: "{workspaceId}",
|
3778
5707
|
dbBranchName: "{dbBranch}",
|
3779
5708
|
region: "{region}",
|
3780
|
-
tableName: __privateGet$
|
5709
|
+
tableName: __privateGet$3(this, _table),
|
3781
5710
|
sessionId: options?.sessionId
|
3782
5711
|
},
|
3783
5712
|
body: {
|
@@ -3787,7 +5716,7 @@ class RestRepository extends Query {
|
|
3787
5716
|
search: options?.searchType === "keyword" ? options?.search : void 0,
|
3788
5717
|
vectorSearch: options?.searchType === "vector" ? options?.vectorSearch : void 0
|
3789
5718
|
},
|
3790
|
-
...__privateGet$
|
5719
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3791
5720
|
};
|
3792
5721
|
if (options?.onMessage) {
|
3793
5722
|
fetchSSERequest({
|
@@ -3808,7 +5737,7 @@ _table = new WeakMap();
|
|
3808
5737
|
_getFetchProps = new WeakMap();
|
3809
5738
|
_db = new WeakMap();
|
3810
5739
|
_cache = new WeakMap();
|
3811
|
-
_schemaTables
|
5740
|
+
_schemaTables = new WeakMap();
|
3812
5741
|
_trace = new WeakMap();
|
3813
5742
|
_insertRecordWithoutId = new WeakSet();
|
3814
5743
|
insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
@@ -3818,14 +5747,14 @@ insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
|
3818
5747
|
workspace: "{workspaceId}",
|
3819
5748
|
dbBranchName: "{dbBranch}",
|
3820
5749
|
region: "{region}",
|
3821
|
-
tableName: __privateGet$
|
5750
|
+
tableName: __privateGet$3(this, _table)
|
3822
5751
|
},
|
3823
5752
|
queryParams: { columns },
|
3824
5753
|
body: record,
|
3825
|
-
...__privateGet$
|
5754
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3826
5755
|
});
|
3827
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3828
|
-
return initObject(__privateGet$
|
5756
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5757
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3829
5758
|
};
|
3830
5759
|
_insertRecordWithId = new WeakSet();
|
3831
5760
|
insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
|
@@ -3837,21 +5766,21 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
|
|
3837
5766
|
workspace: "{workspaceId}",
|
3838
5767
|
dbBranchName: "{dbBranch}",
|
3839
5768
|
region: "{region}",
|
3840
|
-
tableName: __privateGet$
|
5769
|
+
tableName: __privateGet$3(this, _table),
|
3841
5770
|
recordId
|
3842
5771
|
},
|
3843
5772
|
body: record,
|
3844
5773
|
queryParams: { createOnly, columns, ifVersion },
|
3845
|
-
...__privateGet$
|
5774
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3846
5775
|
});
|
3847
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3848
|
-
return initObject(__privateGet$
|
5776
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5777
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3849
5778
|
};
|
3850
5779
|
_insertRecords = new WeakSet();
|
3851
5780
|
insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
3852
5781
|
const operations = await promiseMap(objects, async (object) => {
|
3853
5782
|
const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
3854
|
-
return { insert: { table: __privateGet$
|
5783
|
+
return { insert: { table: __privateGet$3(this, _table), record, createOnly, ifVersion } };
|
3855
5784
|
});
|
3856
5785
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
3857
5786
|
const ids = [];
|
@@ -3863,7 +5792,7 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
|
3863
5792
|
region: "{region}"
|
3864
5793
|
},
|
3865
5794
|
body: { operations: operations2 },
|
3866
|
-
...__privateGet$
|
5795
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3867
5796
|
});
|
3868
5797
|
for (const result of results) {
|
3869
5798
|
if (result.operation === "insert") {
|
@@ -3886,15 +5815,15 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
3886
5815
|
workspace: "{workspaceId}",
|
3887
5816
|
dbBranchName: "{dbBranch}",
|
3888
5817
|
region: "{region}",
|
3889
|
-
tableName: __privateGet$
|
5818
|
+
tableName: __privateGet$3(this, _table),
|
3890
5819
|
recordId
|
3891
5820
|
},
|
3892
5821
|
queryParams: { columns, ifVersion },
|
3893
5822
|
body: record,
|
3894
|
-
...__privateGet$
|
5823
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3895
5824
|
});
|
3896
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3897
|
-
return initObject(__privateGet$
|
5825
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5826
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3898
5827
|
} catch (e) {
|
3899
5828
|
if (isObject(e) && e.status === 404) {
|
3900
5829
|
return null;
|
@@ -3906,7 +5835,7 @@ _updateRecords = new WeakSet();
|
|
3906
5835
|
updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
3907
5836
|
const operations = await promiseMap(objects, async ({ id, ...object }) => {
|
3908
5837
|
const fields = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
3909
|
-
return { update: { table: __privateGet$
|
5838
|
+
return { update: { table: __privateGet$3(this, _table), id, ifVersion, upsert, fields } };
|
3910
5839
|
});
|
3911
5840
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
3912
5841
|
const ids = [];
|
@@ -3918,7 +5847,7 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
|
3918
5847
|
region: "{region}"
|
3919
5848
|
},
|
3920
5849
|
body: { operations: operations2 },
|
3921
|
-
...__privateGet$
|
5850
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3922
5851
|
});
|
3923
5852
|
for (const result of results) {
|
3924
5853
|
if (result.operation === "update") {
|
@@ -3939,15 +5868,15 @@ upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
3939
5868
|
workspace: "{workspaceId}",
|
3940
5869
|
dbBranchName: "{dbBranch}",
|
3941
5870
|
region: "{region}",
|
3942
|
-
tableName: __privateGet$
|
5871
|
+
tableName: __privateGet$3(this, _table),
|
3943
5872
|
recordId
|
3944
5873
|
},
|
3945
5874
|
queryParams: { columns, ifVersion },
|
3946
5875
|
body: object,
|
3947
|
-
...__privateGet$
|
5876
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3948
5877
|
});
|
3949
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3950
|
-
return initObject(__privateGet$
|
5878
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5879
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3951
5880
|
};
|
3952
5881
|
_deleteRecord = new WeakSet();
|
3953
5882
|
deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
@@ -3959,14 +5888,14 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
3959
5888
|
workspace: "{workspaceId}",
|
3960
5889
|
dbBranchName: "{dbBranch}",
|
3961
5890
|
region: "{region}",
|
3962
|
-
tableName: __privateGet$
|
5891
|
+
tableName: __privateGet$3(this, _table),
|
3963
5892
|
recordId
|
3964
5893
|
},
|
3965
5894
|
queryParams: { columns },
|
3966
|
-
...__privateGet$
|
5895
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3967
5896
|
});
|
3968
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3969
|
-
return initObject(__privateGet$
|
5897
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5898
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3970
5899
|
} catch (e) {
|
3971
5900
|
if (isObject(e) && e.status === 404) {
|
3972
5901
|
return null;
|
@@ -3977,7 +5906,7 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
3977
5906
|
_deleteRecords = new WeakSet();
|
3978
5907
|
deleteRecords_fn = async function(recordIds) {
|
3979
5908
|
const chunkedOperations = chunk(
|
3980
|
-
compact(recordIds).map((id) => ({ delete: { table: __privateGet$
|
5909
|
+
compact(recordIds).map((id) => ({ delete: { table: __privateGet$3(this, _table), id } })),
|
3981
5910
|
BULK_OPERATION_MAX_SIZE
|
3982
5911
|
);
|
3983
5912
|
for (const operations of chunkedOperations) {
|
@@ -3988,44 +5917,44 @@ deleteRecords_fn = async function(recordIds) {
|
|
3988
5917
|
region: "{region}"
|
3989
5918
|
},
|
3990
5919
|
body: { operations },
|
3991
|
-
...__privateGet$
|
5920
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3992
5921
|
});
|
3993
5922
|
}
|
3994
5923
|
};
|
3995
5924
|
_setCacheQuery = new WeakSet();
|
3996
5925
|
setCacheQuery_fn = async function(query, meta, records) {
|
3997
|
-
await __privateGet$
|
5926
|
+
await __privateGet$3(this, _cache)?.set(`query_${__privateGet$3(this, _table)}:${query.key()}`, { date: /* @__PURE__ */ new Date(), meta, records });
|
3998
5927
|
};
|
3999
5928
|
_getCacheQuery = new WeakSet();
|
4000
5929
|
getCacheQuery_fn = async function(query) {
|
4001
|
-
const key = `query_${__privateGet$
|
4002
|
-
const result = await __privateGet$
|
5930
|
+
const key = `query_${__privateGet$3(this, _table)}:${query.key()}`;
|
5931
|
+
const result = await __privateGet$3(this, _cache)?.get(key);
|
4003
5932
|
if (!result)
|
4004
5933
|
return null;
|
4005
|
-
const defaultTTL = __privateGet$
|
5934
|
+
const defaultTTL = __privateGet$3(this, _cache)?.defaultQueryTTL ?? -1;
|
4006
5935
|
const { cache: ttl = defaultTTL } = query.getQueryOptions();
|
4007
5936
|
if (ttl < 0)
|
4008
5937
|
return null;
|
4009
5938
|
const hasExpired = result.date.getTime() + ttl < Date.now();
|
4010
5939
|
return hasExpired ? null : result;
|
4011
5940
|
};
|
4012
|
-
_getSchemaTables
|
4013
|
-
getSchemaTables_fn
|
4014
|
-
if (__privateGet$
|
4015
|
-
return __privateGet$
|
5941
|
+
_getSchemaTables = new WeakSet();
|
5942
|
+
getSchemaTables_fn = async function() {
|
5943
|
+
if (__privateGet$3(this, _schemaTables))
|
5944
|
+
return __privateGet$3(this, _schemaTables);
|
4016
5945
|
const { schema } = await getBranchDetails({
|
4017
5946
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
4018
|
-
...__privateGet$
|
5947
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
4019
5948
|
});
|
4020
|
-
__privateSet$
|
5949
|
+
__privateSet$2(this, _schemaTables, schema.tables);
|
4021
5950
|
return schema.tables;
|
4022
5951
|
};
|
4023
5952
|
_transformObjectToApi = new WeakSet();
|
4024
5953
|
transformObjectToApi_fn = async function(object) {
|
4025
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
4026
|
-
const schema = schemaTables.find((table) => table.name === __privateGet$
|
5954
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5955
|
+
const schema = schemaTables.find((table) => table.name === __privateGet$3(this, _table));
|
4027
5956
|
if (!schema)
|
4028
|
-
throw new Error(`Table ${__privateGet$
|
5957
|
+
throw new Error(`Table ${__privateGet$3(this, _table)} not found in schema`);
|
4029
5958
|
const result = {};
|
4030
5959
|
for (const [key, value] of Object.entries(object)) {
|
4031
5960
|
if (key === "xata")
|
@@ -4046,19 +5975,15 @@ transformObjectToApi_fn = async function(object) {
|
|
4046
5975
|
case "file[]":
|
4047
5976
|
result[key] = await promiseMap(value, (item) => parseInputFileEntry(item));
|
4048
5977
|
break;
|
5978
|
+
case "json":
|
5979
|
+
result[key] = stringifyJson(value);
|
5980
|
+
break;
|
4049
5981
|
default:
|
4050
5982
|
result[key] = value;
|
4051
5983
|
}
|
4052
5984
|
}
|
4053
5985
|
return result;
|
4054
5986
|
};
|
4055
|
-
const removeLinksFromObject = (object) => {
|
4056
|
-
return Object.entries(object).reduce((acc, [key, value]) => {
|
4057
|
-
if (key === "xata")
|
4058
|
-
return acc;
|
4059
|
-
return { ...acc, [key]: isIdentifiable(value) ? value.id : value };
|
4060
|
-
}, {});
|
4061
|
-
};
|
4062
5987
|
const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
4063
5988
|
const data = {};
|
4064
5989
|
const { xata, ...rest } = object ?? {};
|
@@ -4089,13 +6014,19 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
4089
6014
|
if (item === column.name) {
|
4090
6015
|
return [...acc, "*"];
|
4091
6016
|
}
|
4092
|
-
if (item.startsWith(`${column.name}.`)) {
|
6017
|
+
if (isString(item) && item.startsWith(`${column.name}.`)) {
|
4093
6018
|
const [, ...path] = item.split(".");
|
4094
6019
|
return [...acc, path.join(".")];
|
4095
6020
|
}
|
4096
6021
|
return acc;
|
4097
6022
|
}, []);
|
4098
|
-
data[column.name] = initObject(
|
6023
|
+
data[column.name] = initObject(
|
6024
|
+
db,
|
6025
|
+
schemaTables,
|
6026
|
+
linkTable,
|
6027
|
+
value,
|
6028
|
+
selectedLinkColumns
|
6029
|
+
);
|
4099
6030
|
} else {
|
4100
6031
|
data[column.name] = null;
|
4101
6032
|
}
|
@@ -4107,6 +6038,9 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
4107
6038
|
case "file[]":
|
4108
6039
|
data[column.name] = value?.map((item) => new XataFile(item)) ?? null;
|
4109
6040
|
break;
|
6041
|
+
case "json":
|
6042
|
+
data[column.name] = parseJson(value);
|
6043
|
+
break;
|
4110
6044
|
default:
|
4111
6045
|
data[column.name] = value ?? null;
|
4112
6046
|
if (column.notNull === true && value === null) {
|
@@ -4116,33 +6050,34 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
4116
6050
|
}
|
4117
6051
|
}
|
4118
6052
|
const record = { ...data };
|
4119
|
-
const serializable = { xata, ...removeLinksFromObject(data) };
|
4120
6053
|
const metadata = xata !== void 0 ? { ...xata, createdAt: new Date(xata.createdAt), updatedAt: new Date(xata.updatedAt) } : void 0;
|
4121
6054
|
record.read = function(columns2) {
|
4122
6055
|
return db[table].read(record["id"], columns2);
|
4123
6056
|
};
|
4124
6057
|
record.update = function(data2, b, c) {
|
4125
|
-
const columns2 =
|
6058
|
+
const columns2 = isValidSelectableColumns(b) ? b : ["*"];
|
4126
6059
|
const ifVersion = parseIfVersion(b, c);
|
4127
6060
|
return db[table].update(record["id"], data2, columns2, { ifVersion });
|
4128
6061
|
};
|
4129
6062
|
record.replace = function(data2, b, c) {
|
4130
|
-
const columns2 =
|
6063
|
+
const columns2 = isValidSelectableColumns(b) ? b : ["*"];
|
4131
6064
|
const ifVersion = parseIfVersion(b, c);
|
4132
6065
|
return db[table].createOrReplace(record["id"], data2, columns2, { ifVersion });
|
4133
6066
|
};
|
4134
6067
|
record.delete = function() {
|
4135
6068
|
return db[table].delete(record["id"]);
|
4136
6069
|
};
|
4137
|
-
|
6070
|
+
if (metadata !== void 0) {
|
6071
|
+
record.xata = Object.freeze(metadata);
|
6072
|
+
}
|
4138
6073
|
record.getMetadata = function() {
|
4139
6074
|
return record.xata;
|
4140
6075
|
};
|
4141
6076
|
record.toSerializable = function() {
|
4142
|
-
return JSON.parse(JSON.stringify(
|
6077
|
+
return JSON.parse(JSON.stringify(record));
|
4143
6078
|
};
|
4144
6079
|
record.toString = function() {
|
4145
|
-
return JSON.stringify(
|
6080
|
+
return JSON.stringify(record);
|
4146
6081
|
};
|
4147
6082
|
for (const prop of ["read", "update", "replace", "delete", "getMetadata", "toSerializable", "toString"]) {
|
4148
6083
|
Object.defineProperty(record, prop, { enumerable: false });
|
@@ -4160,7 +6095,7 @@ function extractId(value) {
|
|
4160
6095
|
function isValidColumn(columns, column) {
|
4161
6096
|
if (columns.includes("*"))
|
4162
6097
|
return true;
|
4163
|
-
return columns.filter((item) => item.startsWith(column.name)).length > 0;
|
6098
|
+
return columns.filter((item) => isString(item) && item.startsWith(column.name)).length > 0;
|
4164
6099
|
}
|
4165
6100
|
function parseIfVersion(...args) {
|
4166
6101
|
for (const arg of args) {
|
@@ -4171,17 +6106,11 @@ function parseIfVersion(...args) {
|
|
4171
6106
|
return void 0;
|
4172
6107
|
}
|
4173
6108
|
|
4174
|
-
var __defProp$3 = Object.defineProperty;
|
4175
|
-
var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
4176
|
-
var __publicField$3 = (obj, key, value) => {
|
4177
|
-
__defNormalProp$3(obj, typeof key !== "symbol" ? key + "" : key, value);
|
4178
|
-
return value;
|
4179
|
-
};
|
4180
6109
|
var __accessCheck$3 = (obj, member, msg) => {
|
4181
6110
|
if (!member.has(obj))
|
4182
6111
|
throw TypeError("Cannot " + msg);
|
4183
6112
|
};
|
4184
|
-
var __privateGet$
|
6113
|
+
var __privateGet$2 = (obj, member, getter) => {
|
4185
6114
|
__accessCheck$3(obj, member, "read from private field");
|
4186
6115
|
return getter ? getter.call(obj) : member.get(obj);
|
4187
6116
|
};
|
@@ -4190,7 +6119,7 @@ var __privateAdd$3 = (obj, member, value) => {
|
|
4190
6119
|
throw TypeError("Cannot add the same private member more than once");
|
4191
6120
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4192
6121
|
};
|
4193
|
-
var __privateSet$
|
6122
|
+
var __privateSet$1 = (obj, member, value, setter) => {
|
4194
6123
|
__accessCheck$3(obj, member, "write to private field");
|
4195
6124
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
4196
6125
|
return value;
|
@@ -4199,31 +6128,29 @@ var _map;
|
|
4199
6128
|
class SimpleCache {
|
4200
6129
|
constructor(options = {}) {
|
4201
6130
|
__privateAdd$3(this, _map, void 0);
|
4202
|
-
|
4203
|
-
__publicField$3(this, "defaultQueryTTL");
|
4204
|
-
__privateSet$3(this, _map, /* @__PURE__ */ new Map());
|
6131
|
+
__privateSet$1(this, _map, /* @__PURE__ */ new Map());
|
4205
6132
|
this.capacity = options.max ?? 500;
|
4206
6133
|
this.defaultQueryTTL = options.defaultQueryTTL ?? 60 * 1e3;
|
4207
6134
|
}
|
4208
6135
|
async getAll() {
|
4209
|
-
return Object.fromEntries(__privateGet$
|
6136
|
+
return Object.fromEntries(__privateGet$2(this, _map));
|
4210
6137
|
}
|
4211
6138
|
async get(key) {
|
4212
|
-
return __privateGet$
|
6139
|
+
return __privateGet$2(this, _map).get(key) ?? null;
|
4213
6140
|
}
|
4214
6141
|
async set(key, value) {
|
4215
6142
|
await this.delete(key);
|
4216
|
-
__privateGet$
|
4217
|
-
if (__privateGet$
|
4218
|
-
const leastRecentlyUsed = __privateGet$
|
6143
|
+
__privateGet$2(this, _map).set(key, value);
|
6144
|
+
if (__privateGet$2(this, _map).size > this.capacity) {
|
6145
|
+
const leastRecentlyUsed = __privateGet$2(this, _map).keys().next().value;
|
4219
6146
|
await this.delete(leastRecentlyUsed);
|
4220
6147
|
}
|
4221
6148
|
}
|
4222
6149
|
async delete(key) {
|
4223
|
-
__privateGet$
|
6150
|
+
__privateGet$2(this, _map).delete(key);
|
4224
6151
|
}
|
4225
6152
|
async clear() {
|
4226
|
-
return __privateGet$
|
6153
|
+
return __privateGet$2(this, _map).clear();
|
4227
6154
|
}
|
4228
6155
|
}
|
4229
6156
|
_map = new WeakMap();
|
@@ -4245,10 +6172,12 @@ const notExists = (column) => ({ $notExists: column });
|
|
4245
6172
|
const startsWith = (value) => ({ $startsWith: value });
|
4246
6173
|
const endsWith = (value) => ({ $endsWith: value });
|
4247
6174
|
const pattern = (value) => ({ $pattern: value });
|
6175
|
+
const iPattern = (value) => ({ $iPattern: value });
|
4248
6176
|
const is = (value) => ({ $is: value });
|
4249
6177
|
const equals = is;
|
4250
6178
|
const isNot = (value) => ({ $isNot: value });
|
4251
6179
|
const contains = (value) => ({ $contains: value });
|
6180
|
+
const iContains = (value) => ({ $iContains: value });
|
4252
6181
|
const includes = (value) => ({ $includes: value });
|
4253
6182
|
const includesAll = (value) => ({ $includesAll: value });
|
4254
6183
|
const includesNone = (value) => ({ $includesNone: value });
|
@@ -4258,7 +6187,7 @@ var __accessCheck$2 = (obj, member, msg) => {
|
|
4258
6187
|
if (!member.has(obj))
|
4259
6188
|
throw TypeError("Cannot " + msg);
|
4260
6189
|
};
|
4261
|
-
var __privateGet$
|
6190
|
+
var __privateGet$1 = (obj, member, getter) => {
|
4262
6191
|
__accessCheck$2(obj, member, "read from private field");
|
4263
6192
|
return getter ? getter.call(obj) : member.get(obj);
|
4264
6193
|
};
|
@@ -4267,18 +6196,11 @@ var __privateAdd$2 = (obj, member, value) => {
|
|
4267
6196
|
throw TypeError("Cannot add the same private member more than once");
|
4268
6197
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4269
6198
|
};
|
4270
|
-
var
|
4271
|
-
__accessCheck$2(obj, member, "write to private field");
|
4272
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
4273
|
-
return value;
|
4274
|
-
};
|
4275
|
-
var _tables, _schemaTables$1;
|
6199
|
+
var _tables;
|
4276
6200
|
class SchemaPlugin extends XataPlugin {
|
4277
|
-
constructor(
|
6201
|
+
constructor() {
|
4278
6202
|
super();
|
4279
6203
|
__privateAdd$2(this, _tables, {});
|
4280
|
-
__privateAdd$2(this, _schemaTables$1, void 0);
|
4281
|
-
__privateSet$2(this, _schemaTables$1, schemaTables);
|
4282
6204
|
}
|
4283
6205
|
build(pluginOptions) {
|
4284
6206
|
const db = new Proxy(
|
@@ -4287,102 +6209,268 @@ class SchemaPlugin extends XataPlugin {
|
|
4287
6209
|
get: (_target, table) => {
|
4288
6210
|
if (!isString(table))
|
4289
6211
|
throw new Error("Invalid table name");
|
4290
|
-
if (__privateGet$
|
4291
|
-
__privateGet$
|
6212
|
+
if (__privateGet$1(this, _tables)[table] === void 0) {
|
6213
|
+
__privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
4292
6214
|
}
|
4293
|
-
return __privateGet$
|
6215
|
+
return __privateGet$1(this, _tables)[table];
|
4294
6216
|
}
|
4295
6217
|
}
|
4296
6218
|
);
|
4297
|
-
const tableNames =
|
6219
|
+
const tableNames = pluginOptions.tables?.map(({ name }) => name) ?? [];
|
4298
6220
|
for (const table of tableNames) {
|
4299
|
-
db[table] = new RestRepository({ db, pluginOptions, table, schemaTables:
|
6221
|
+
db[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
4300
6222
|
}
|
4301
6223
|
return db;
|
4302
6224
|
}
|
4303
6225
|
}
|
4304
6226
|
_tables = new WeakMap();
|
4305
|
-
|
6227
|
+
|
6228
|
+
class FilesPlugin extends XataPlugin {
|
6229
|
+
build(pluginOptions) {
|
6230
|
+
return {
|
6231
|
+
download: async (location) => {
|
6232
|
+
const { table, record, column, fileId = "" } = location ?? {};
|
6233
|
+
return await getFileItem({
|
6234
|
+
pathParams: {
|
6235
|
+
workspace: "{workspaceId}",
|
6236
|
+
dbBranchName: "{dbBranch}",
|
6237
|
+
region: "{region}",
|
6238
|
+
tableName: table ?? "",
|
6239
|
+
recordId: record ?? "",
|
6240
|
+
columnName: column ?? "",
|
6241
|
+
fileId
|
6242
|
+
},
|
6243
|
+
...pluginOptions,
|
6244
|
+
rawResponse: true
|
6245
|
+
});
|
6246
|
+
},
|
6247
|
+
upload: async (location, file, options) => {
|
6248
|
+
const { table, record, column, fileId = "" } = location ?? {};
|
6249
|
+
const resolvedFile = await file;
|
6250
|
+
const contentType = options?.mediaType || getContentType(resolvedFile);
|
6251
|
+
const body = resolvedFile instanceof XataFile ? resolvedFile.toBlob() : resolvedFile;
|
6252
|
+
return await putFileItem({
|
6253
|
+
...pluginOptions,
|
6254
|
+
pathParams: {
|
6255
|
+
workspace: "{workspaceId}",
|
6256
|
+
dbBranchName: "{dbBranch}",
|
6257
|
+
region: "{region}",
|
6258
|
+
tableName: table ?? "",
|
6259
|
+
recordId: record ?? "",
|
6260
|
+
columnName: column ?? "",
|
6261
|
+
fileId
|
6262
|
+
},
|
6263
|
+
body,
|
6264
|
+
headers: { "Content-Type": contentType }
|
6265
|
+
});
|
6266
|
+
},
|
6267
|
+
delete: async (location) => {
|
6268
|
+
const { table, record, column, fileId = "" } = location ?? {};
|
6269
|
+
return await deleteFileItem({
|
6270
|
+
pathParams: {
|
6271
|
+
workspace: "{workspaceId}",
|
6272
|
+
dbBranchName: "{dbBranch}",
|
6273
|
+
region: "{region}",
|
6274
|
+
tableName: table ?? "",
|
6275
|
+
recordId: record ?? "",
|
6276
|
+
columnName: column ?? "",
|
6277
|
+
fileId
|
6278
|
+
},
|
6279
|
+
...pluginOptions
|
6280
|
+
});
|
6281
|
+
}
|
6282
|
+
};
|
6283
|
+
}
|
6284
|
+
}
|
6285
|
+
function getContentType(file) {
|
6286
|
+
if (typeof file === "string") {
|
6287
|
+
return "text/plain";
|
6288
|
+
}
|
6289
|
+
if ("mediaType" in file && file.mediaType !== void 0) {
|
6290
|
+
return file.mediaType;
|
6291
|
+
}
|
6292
|
+
if (isBlob(file)) {
|
6293
|
+
return file.type;
|
6294
|
+
}
|
6295
|
+
try {
|
6296
|
+
return file.type;
|
6297
|
+
} catch (e) {
|
6298
|
+
}
|
6299
|
+
return "application/octet-stream";
|
6300
|
+
}
|
4306
6301
|
|
4307
6302
|
var __accessCheck$1 = (obj, member, msg) => {
|
4308
6303
|
if (!member.has(obj))
|
4309
6304
|
throw TypeError("Cannot " + msg);
|
4310
6305
|
};
|
4311
|
-
var __privateGet$1 = (obj, member, getter) => {
|
4312
|
-
__accessCheck$1(obj, member, "read from private field");
|
4313
|
-
return getter ? getter.call(obj) : member.get(obj);
|
4314
|
-
};
|
4315
6306
|
var __privateAdd$1 = (obj, member, value) => {
|
4316
6307
|
if (member.has(obj))
|
4317
6308
|
throw TypeError("Cannot add the same private member more than once");
|
4318
6309
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4319
6310
|
};
|
4320
|
-
var __privateSet$1 = (obj, member, value, setter) => {
|
4321
|
-
__accessCheck$1(obj, member, "write to private field");
|
4322
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
4323
|
-
return value;
|
4324
|
-
};
|
4325
6311
|
var __privateMethod$1 = (obj, member, method) => {
|
4326
6312
|
__accessCheck$1(obj, member, "access private method");
|
4327
6313
|
return method;
|
4328
6314
|
};
|
4329
|
-
var
|
6315
|
+
var _search, search_fn;
|
4330
6316
|
class SearchPlugin extends XataPlugin {
|
4331
|
-
constructor(db
|
6317
|
+
constructor(db) {
|
4332
6318
|
super();
|
4333
6319
|
this.db = db;
|
4334
6320
|
__privateAdd$1(this, _search);
|
4335
|
-
__privateAdd$1(this, _getSchemaTables);
|
4336
|
-
__privateAdd$1(this, _schemaTables, void 0);
|
4337
|
-
__privateSet$1(this, _schemaTables, schemaTables);
|
4338
6321
|
}
|
4339
6322
|
build(pluginOptions) {
|
4340
6323
|
return {
|
4341
6324
|
all: async (query, options = {}) => {
|
4342
|
-
const records = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
|
4343
|
-
|
4344
|
-
|
4345
|
-
|
4346
|
-
|
4347
|
-
|
6325
|
+
const { records, totalCount } = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
|
6326
|
+
return {
|
6327
|
+
totalCount,
|
6328
|
+
records: records.map((record) => {
|
6329
|
+
const { table = "orphan" } = record.xata;
|
6330
|
+
return { table, record: initObject(this.db, pluginOptions.tables, table, record, ["*"]) };
|
6331
|
+
})
|
6332
|
+
};
|
4348
6333
|
},
|
4349
6334
|
byTable: async (query, options = {}) => {
|
4350
|
-
const records = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
|
4351
|
-
const
|
4352
|
-
return records.reduce((acc, record) => {
|
6335
|
+
const { records: rawRecords, totalCount } = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
|
6336
|
+
const records = rawRecords.reduce((acc, record) => {
|
4353
6337
|
const { table = "orphan" } = record.xata;
|
4354
6338
|
const items = acc[table] ?? [];
|
4355
|
-
const item = initObject(this.db,
|
6339
|
+
const item = initObject(this.db, pluginOptions.tables, table, record, ["*"]);
|
4356
6340
|
return { ...acc, [table]: [...items, item] };
|
4357
6341
|
}, {});
|
6342
|
+
return { totalCount, records };
|
4358
6343
|
}
|
4359
6344
|
};
|
4360
6345
|
}
|
4361
6346
|
}
|
4362
|
-
_schemaTables = new WeakMap();
|
4363
6347
|
_search = new WeakSet();
|
4364
6348
|
search_fn = async function(query, options, pluginOptions) {
|
4365
6349
|
const { tables, fuzziness, highlight, prefix, page } = options ?? {};
|
4366
|
-
const { records } = await searchBranch({
|
6350
|
+
const { records, totalCount } = await searchBranch({
|
4367
6351
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
4368
|
-
// @ts-
|
6352
|
+
// @ts-expect-error Filter properties do not match inferred type
|
4369
6353
|
body: { tables, query, fuzziness, prefix, highlight, page },
|
4370
6354
|
...pluginOptions
|
4371
6355
|
});
|
4372
|
-
return records;
|
4373
|
-
};
|
4374
|
-
_getSchemaTables = new WeakSet();
|
4375
|
-
getSchemaTables_fn = async function(pluginOptions) {
|
4376
|
-
if (__privateGet$1(this, _schemaTables))
|
4377
|
-
return __privateGet$1(this, _schemaTables);
|
4378
|
-
const { schema } = await getBranchDetails({
|
4379
|
-
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
4380
|
-
...pluginOptions
|
4381
|
-
});
|
4382
|
-
__privateSet$1(this, _schemaTables, schema.tables);
|
4383
|
-
return schema.tables;
|
6356
|
+
return { records, totalCount };
|
4384
6357
|
};
|
4385
6358
|
|
6359
|
+
function escapeElement(elementRepresentation) {
|
6360
|
+
const escaped = elementRepresentation.replace(/\\/g, "\\\\").replace(/"/g, '\\"');
|
6361
|
+
return '"' + escaped + '"';
|
6362
|
+
}
|
6363
|
+
function arrayString(val) {
|
6364
|
+
let result = "{";
|
6365
|
+
for (let i = 0; i < val.length; i++) {
|
6366
|
+
if (i > 0) {
|
6367
|
+
result = result + ",";
|
6368
|
+
}
|
6369
|
+
if (val[i] === null || typeof val[i] === "undefined") {
|
6370
|
+
result = result + "NULL";
|
6371
|
+
} else if (Array.isArray(val[i])) {
|
6372
|
+
result = result + arrayString(val[i]);
|
6373
|
+
} else if (val[i] instanceof Buffer) {
|
6374
|
+
result += "\\\\x" + val[i].toString("hex");
|
6375
|
+
} else {
|
6376
|
+
result += escapeElement(prepareValue(val[i]));
|
6377
|
+
}
|
6378
|
+
}
|
6379
|
+
result = result + "}";
|
6380
|
+
return result;
|
6381
|
+
}
|
6382
|
+
function prepareValue(value) {
|
6383
|
+
if (!isDefined(value))
|
6384
|
+
return null;
|
6385
|
+
if (value instanceof Date) {
|
6386
|
+
return value.toISOString();
|
6387
|
+
}
|
6388
|
+
if (Array.isArray(value)) {
|
6389
|
+
return arrayString(value);
|
6390
|
+
}
|
6391
|
+
if (isObject(value)) {
|
6392
|
+
return JSON.stringify(value);
|
6393
|
+
}
|
6394
|
+
try {
|
6395
|
+
return value.toString();
|
6396
|
+
} catch (e) {
|
6397
|
+
return value;
|
6398
|
+
}
|
6399
|
+
}
|
6400
|
+
function prepareParams(param1, param2) {
|
6401
|
+
if (isString(param1)) {
|
6402
|
+
return { statement: param1, params: param2?.map((value) => prepareValue(value)) };
|
6403
|
+
}
|
6404
|
+
if (isStringArray(param1)) {
|
6405
|
+
const statement = param1.reduce((acc, curr, index) => {
|
6406
|
+
return acc + curr + (index < (param2?.length ?? 0) ? "$" + (index + 1) : "");
|
6407
|
+
}, "");
|
6408
|
+
return { statement, params: param2?.map((value) => prepareValue(value)) };
|
6409
|
+
}
|
6410
|
+
if (isObject(param1)) {
|
6411
|
+
const { statement, params, consistency, responseType } = param1;
|
6412
|
+
return { statement, params: params?.map((value) => prepareValue(value)), consistency, responseType };
|
6413
|
+
}
|
6414
|
+
throw new Error("Invalid query");
|
6415
|
+
}
|
6416
|
+
|
6417
|
+
class SQLPlugin extends XataPlugin {
|
6418
|
+
build(pluginOptions) {
|
6419
|
+
const sqlFunction = async (query, ...parameters) => {
|
6420
|
+
if (!isParamsObject(query) && (!isTemplateStringsArray(query) || !Array.isArray(parameters))) {
|
6421
|
+
throw new Error("Invalid usage of `xata.sql`. Please use it as a tagged template or with an object.");
|
6422
|
+
}
|
6423
|
+
const { statement, params, consistency, responseType } = prepareParams(query, parameters);
|
6424
|
+
const {
|
6425
|
+
records,
|
6426
|
+
rows,
|
6427
|
+
warning,
|
6428
|
+
columns = []
|
6429
|
+
} = await sqlQuery({
|
6430
|
+
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
6431
|
+
body: { statement, params, consistency, responseType },
|
6432
|
+
...pluginOptions
|
6433
|
+
});
|
6434
|
+
return { records, rows, warning, columns };
|
6435
|
+
};
|
6436
|
+
sqlFunction.connectionString = buildConnectionString(pluginOptions);
|
6437
|
+
return sqlFunction;
|
6438
|
+
}
|
6439
|
+
}
|
6440
|
+
function isTemplateStringsArray(strings) {
|
6441
|
+
return Array.isArray(strings) && "raw" in strings && Array.isArray(strings.raw);
|
6442
|
+
}
|
6443
|
+
function isParamsObject(params) {
|
6444
|
+
return isObject(params) && "statement" in params;
|
6445
|
+
}
|
6446
|
+
function buildDomain(host, region) {
|
6447
|
+
switch (host) {
|
6448
|
+
case "production":
|
6449
|
+
return `${region}.sql.xata.sh`;
|
6450
|
+
case "staging":
|
6451
|
+
return `${region}.sql.staging-xata.dev`;
|
6452
|
+
case "dev":
|
6453
|
+
return `${region}.sql.dev-xata.dev`;
|
6454
|
+
case "local":
|
6455
|
+
return "localhost:7654";
|
6456
|
+
default:
|
6457
|
+
throw new Error("Invalid host provider");
|
6458
|
+
}
|
6459
|
+
}
|
6460
|
+
function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
|
6461
|
+
const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
|
6462
|
+
const parts = parseWorkspacesUrlParts(url);
|
6463
|
+
if (!parts)
|
6464
|
+
throw new Error("Invalid workspaces URL");
|
6465
|
+
const { workspace: workspaceSlug, region, database, host } = parts;
|
6466
|
+
const domain = buildDomain(host, region);
|
6467
|
+
const workspace = workspaceSlug.split("-").pop();
|
6468
|
+
if (!workspace || !region || !database || !apiKey || !branch) {
|
6469
|
+
throw new Error("Unable to build xata connection string");
|
6470
|
+
}
|
6471
|
+
return `postgresql://${workspace}:${apiKey}@${domain}/${database}:${branch}?sslmode=require`;
|
6472
|
+
}
|
6473
|
+
|
4386
6474
|
class TransactionPlugin extends XataPlugin {
|
4387
6475
|
build(pluginOptions) {
|
4388
6476
|
return {
|
@@ -4398,12 +6486,6 @@ class TransactionPlugin extends XataPlugin {
|
|
4398
6486
|
}
|
4399
6487
|
}
|
4400
6488
|
|
4401
|
-
var __defProp$2 = Object.defineProperty;
|
4402
|
-
var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
4403
|
-
var __publicField$2 = (obj, key, value) => {
|
4404
|
-
__defNormalProp$2(obj, typeof key !== "symbol" ? key + "" : key, value);
|
4405
|
-
return value;
|
4406
|
-
};
|
4407
6489
|
var __accessCheck = (obj, member, msg) => {
|
4408
6490
|
if (!member.has(obj))
|
4409
6491
|
throw TypeError("Cannot " + msg);
|
@@ -4429,28 +6511,29 @@ var __privateMethod = (obj, member, method) => {
|
|
4429
6511
|
const buildClient = (plugins) => {
|
4430
6512
|
var _options, _parseOptions, parseOptions_fn, _getFetchProps, getFetchProps_fn, _a;
|
4431
6513
|
return _a = class {
|
4432
|
-
constructor(options = {},
|
6514
|
+
constructor(options = {}, tables) {
|
4433
6515
|
__privateAdd(this, _parseOptions);
|
4434
6516
|
__privateAdd(this, _getFetchProps);
|
4435
6517
|
__privateAdd(this, _options, void 0);
|
4436
|
-
__publicField$2(this, "db");
|
4437
|
-
__publicField$2(this, "search");
|
4438
|
-
__publicField$2(this, "transactions");
|
4439
|
-
__publicField$2(this, "files");
|
4440
6518
|
const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
|
4441
6519
|
__privateSet(this, _options, safeOptions);
|
4442
6520
|
const pluginOptions = {
|
4443
6521
|
...__privateMethod(this, _getFetchProps, getFetchProps_fn).call(this, safeOptions),
|
4444
6522
|
cache: safeOptions.cache,
|
4445
|
-
host: safeOptions.host
|
6523
|
+
host: safeOptions.host,
|
6524
|
+
tables,
|
6525
|
+
branch: safeOptions.branch
|
4446
6526
|
};
|
4447
|
-
const db = new SchemaPlugin(
|
4448
|
-
const search = new SearchPlugin(db
|
6527
|
+
const db = new SchemaPlugin().build(pluginOptions);
|
6528
|
+
const search = new SearchPlugin(db).build(pluginOptions);
|
4449
6529
|
const transactions = new TransactionPlugin().build(pluginOptions);
|
6530
|
+
const sql = new SQLPlugin().build(pluginOptions);
|
4450
6531
|
const files = new FilesPlugin().build(pluginOptions);
|
6532
|
+
this.schema = { tables };
|
4451
6533
|
this.db = db;
|
4452
6534
|
this.search = search;
|
4453
6535
|
this.transactions = transactions;
|
6536
|
+
this.sql = sql;
|
4454
6537
|
this.files = files;
|
4455
6538
|
for (const [key, namespace] of Object.entries(plugins ?? {})) {
|
4456
6539
|
if (namespace === void 0)
|
@@ -4468,7 +6551,7 @@ const buildClient = (plugins) => {
|
|
4468
6551
|
const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
|
4469
6552
|
if (isBrowser && !enableBrowser) {
|
4470
6553
|
throw new Error(
|
4471
|
-
"You are trying to use Xata from the browser, which is potentially a non-secure environment.
|
6554
|
+
"You are trying to use Xata from the browser, which is potentially a non-secure environment. How to fix: https://xata.io/docs/messages/api-key-browser-error"
|
4472
6555
|
);
|
4473
6556
|
}
|
4474
6557
|
const fetch = getFetchImplementation(options?.fetch);
|
@@ -4548,17 +6631,11 @@ const buildClient = (plugins) => {
|
|
4548
6631
|
class BaseClient extends buildClient() {
|
4549
6632
|
}
|
4550
6633
|
|
4551
|
-
var __defProp$1 = Object.defineProperty;
|
4552
|
-
var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
4553
|
-
var __publicField$1 = (obj, key, value) => {
|
4554
|
-
__defNormalProp$1(obj, typeof key !== "symbol" ? key + "" : key, value);
|
4555
|
-
return value;
|
4556
|
-
};
|
4557
6634
|
const META = "__";
|
4558
6635
|
const VALUE = "___";
|
4559
6636
|
class Serializer {
|
4560
6637
|
constructor() {
|
4561
|
-
|
6638
|
+
this.classes = {};
|
4562
6639
|
}
|
4563
6640
|
add(clazz) {
|
4564
6641
|
this.classes[clazz.name] = clazz;
|
@@ -4621,36 +6698,15 @@ const deserialize = (json) => {
|
|
4621
6698
|
return defaultSerializer.fromJSON(json);
|
4622
6699
|
};
|
4623
6700
|
|
4624
|
-
function buildWorkerRunner(config) {
|
4625
|
-
return function xataWorker(name, worker) {
|
4626
|
-
return async (...args) => {
|
4627
|
-
const url = process.env.NODE_ENV === "development" ? `http://localhost:64749/${name}` : `https://dispatcher.xata.workers.dev/${config.workspace}/${config.worker}/${name}`;
|
4628
|
-
const result = await fetch(url, {
|
4629
|
-
method: "POST",
|
4630
|
-
headers: { "Content-Type": "application/json" },
|
4631
|
-
body: serialize({ args })
|
4632
|
-
});
|
4633
|
-
const text = await result.text();
|
4634
|
-
return deserialize(text);
|
4635
|
-
};
|
4636
|
-
};
|
4637
|
-
}
|
4638
|
-
|
4639
|
-
var __defProp = Object.defineProperty;
|
4640
|
-
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
4641
|
-
var __publicField = (obj, key, value) => {
|
4642
|
-
__defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
4643
|
-
return value;
|
4644
|
-
};
|
4645
6701
|
class XataError extends Error {
|
4646
6702
|
constructor(message, status) {
|
4647
6703
|
super(message);
|
4648
|
-
__publicField(this, "status");
|
4649
6704
|
this.status = status;
|
4650
6705
|
}
|
4651
6706
|
}
|
4652
6707
|
|
4653
6708
|
exports.BaseClient = BaseClient;
|
6709
|
+
exports.Buffer = Buffer;
|
4654
6710
|
exports.FetcherError = FetcherError;
|
4655
6711
|
exports.FilesPlugin = FilesPlugin;
|
4656
6712
|
exports.Operations = operationsByTag;
|
@@ -4659,32 +6715,37 @@ exports.PAGINATION_DEFAULT_SIZE = PAGINATION_DEFAULT_SIZE;
|
|
4659
6715
|
exports.PAGINATION_MAX_OFFSET = PAGINATION_MAX_OFFSET;
|
4660
6716
|
exports.PAGINATION_MAX_SIZE = PAGINATION_MAX_SIZE;
|
4661
6717
|
exports.Page = Page;
|
6718
|
+
exports.PageRecordArray = PageRecordArray;
|
4662
6719
|
exports.Query = Query;
|
4663
6720
|
exports.RecordArray = RecordArray;
|
4664
6721
|
exports.RecordColumnTypes = RecordColumnTypes;
|
4665
6722
|
exports.Repository = Repository;
|
4666
6723
|
exports.RestRepository = RestRepository;
|
6724
|
+
exports.SQLPlugin = SQLPlugin;
|
4667
6725
|
exports.SchemaPlugin = SchemaPlugin;
|
4668
6726
|
exports.SearchPlugin = SearchPlugin;
|
4669
6727
|
exports.Serializer = Serializer;
|
4670
6728
|
exports.SimpleCache = SimpleCache;
|
6729
|
+
exports.TransactionPlugin = TransactionPlugin;
|
4671
6730
|
exports.XataApiClient = XataApiClient;
|
4672
6731
|
exports.XataApiPlugin = XataApiPlugin;
|
4673
6732
|
exports.XataError = XataError;
|
4674
6733
|
exports.XataFile = XataFile;
|
4675
6734
|
exports.XataPlugin = XataPlugin;
|
4676
6735
|
exports.acceptWorkspaceMemberInvite = acceptWorkspaceMemberInvite;
|
6736
|
+
exports.adaptAllTables = adaptAllTables;
|
6737
|
+
exports.adaptTable = adaptTable;
|
4677
6738
|
exports.addGitBranchesEntry = addGitBranchesEntry;
|
4678
6739
|
exports.addTableColumn = addTableColumn;
|
4679
6740
|
exports.aggregateTable = aggregateTable;
|
4680
6741
|
exports.applyBranchSchemaEdit = applyBranchSchemaEdit;
|
6742
|
+
exports.applyMigration = applyMigration;
|
4681
6743
|
exports.askTable = askTable;
|
4682
6744
|
exports.askTableSession = askTableSession;
|
4683
6745
|
exports.branchTransaction = branchTransaction;
|
4684
6746
|
exports.buildClient = buildClient;
|
4685
6747
|
exports.buildPreviewBranchName = buildPreviewBranchName;
|
4686
6748
|
exports.buildProviderString = buildProviderString;
|
4687
|
-
exports.buildWorkerRunner = buildWorkerRunner;
|
4688
6749
|
exports.bulkInsertTableRecords = bulkInsertTableRecords;
|
4689
6750
|
exports.cancelWorkspaceMemberInvite = cancelWorkspaceMemberInvite;
|
4690
6751
|
exports.compareBranchSchemas = compareBranchSchemas;
|
@@ -4693,6 +6754,7 @@ exports.compareMigrationRequest = compareMigrationRequest;
|
|
4693
6754
|
exports.contains = contains;
|
4694
6755
|
exports.copyBranch = copyBranch;
|
4695
6756
|
exports.createBranch = createBranch;
|
6757
|
+
exports.createCluster = createCluster;
|
4696
6758
|
exports.createDatabase = createDatabase;
|
4697
6759
|
exports.createMigrationRequest = createMigrationRequest;
|
4698
6760
|
exports.createTable = createTable;
|
@@ -4704,10 +6766,12 @@ exports.deleteDatabase = deleteDatabase;
|
|
4704
6766
|
exports.deleteDatabaseGithubSettings = deleteDatabaseGithubSettings;
|
4705
6767
|
exports.deleteFile = deleteFile;
|
4706
6768
|
exports.deleteFileItem = deleteFileItem;
|
6769
|
+
exports.deleteOAuthAccessToken = deleteOAuthAccessToken;
|
4707
6770
|
exports.deleteRecord = deleteRecord;
|
4708
6771
|
exports.deleteTable = deleteTable;
|
4709
6772
|
exports.deleteUser = deleteUser;
|
4710
6773
|
exports.deleteUserAPIKey = deleteUserAPIKey;
|
6774
|
+
exports.deleteUserOAuthClient = deleteUserOAuthClient;
|
4711
6775
|
exports.deleteWorkspace = deleteWorkspace;
|
4712
6776
|
exports.deserialize = deserialize;
|
4713
6777
|
exports.endsWith = endsWith;
|
@@ -4715,6 +6779,7 @@ exports.equals = equals;
|
|
4715
6779
|
exports.executeBranchMigrationPlan = executeBranchMigrationPlan;
|
4716
6780
|
exports.exists = exists;
|
4717
6781
|
exports.fileAccess = fileAccess;
|
6782
|
+
exports.fileUpload = fileUpload;
|
4718
6783
|
exports.ge = ge;
|
4719
6784
|
exports.getAPIKey = getAPIKey;
|
4720
6785
|
exports.getAuthorizationCode = getAuthorizationCode;
|
@@ -4723,22 +6788,28 @@ exports.getBranchDetails = getBranchDetails;
|
|
4723
6788
|
exports.getBranchList = getBranchList;
|
4724
6789
|
exports.getBranchMetadata = getBranchMetadata;
|
4725
6790
|
exports.getBranchMigrationHistory = getBranchMigrationHistory;
|
6791
|
+
exports.getBranchMigrationJobStatus = getBranchMigrationJobStatus;
|
4726
6792
|
exports.getBranchMigrationPlan = getBranchMigrationPlan;
|
4727
6793
|
exports.getBranchSchemaHistory = getBranchSchemaHistory;
|
4728
6794
|
exports.getBranchStats = getBranchStats;
|
6795
|
+
exports.getCluster = getCluster;
|
4729
6796
|
exports.getColumn = getColumn;
|
4730
6797
|
exports.getDatabaseGithubSettings = getDatabaseGithubSettings;
|
4731
6798
|
exports.getDatabaseList = getDatabaseList;
|
4732
6799
|
exports.getDatabaseMetadata = getDatabaseMetadata;
|
6800
|
+
exports.getDatabaseSettings = getDatabaseSettings;
|
4733
6801
|
exports.getDatabaseURL = getDatabaseURL;
|
4734
6802
|
exports.getFile = getFile;
|
4735
6803
|
exports.getFileItem = getFileItem;
|
4736
6804
|
exports.getGitBranchesMapping = getGitBranchesMapping;
|
4737
6805
|
exports.getHostUrl = getHostUrl;
|
6806
|
+
exports.getMigrationHistory = getMigrationHistory;
|
6807
|
+
exports.getMigrationJobStatus = getMigrationJobStatus;
|
4738
6808
|
exports.getMigrationRequest = getMigrationRequest;
|
4739
6809
|
exports.getMigrationRequestIsMerged = getMigrationRequestIsMerged;
|
4740
6810
|
exports.getPreviewBranch = getPreviewBranch;
|
4741
6811
|
exports.getRecord = getRecord;
|
6812
|
+
exports.getSchema = getSchema;
|
4742
6813
|
exports.getTableColumns = getTableColumns;
|
4743
6814
|
exports.getTableSchema = getTableSchema;
|
4744
6815
|
exports.getUser = getUser;
|
@@ -4747,6 +6818,7 @@ exports.getUserOAuthAccessTokens = getUserOAuthAccessTokens;
|
|
4747
6818
|
exports.getUserOAuthClients = getUserOAuthClients;
|
4748
6819
|
exports.getWorkspace = getWorkspace;
|
4749
6820
|
exports.getWorkspaceMembersList = getWorkspaceMembersList;
|
6821
|
+
exports.getWorkspaceSettings = getWorkspaceSettings;
|
4750
6822
|
exports.getWorkspacesList = getWorkspacesList;
|
4751
6823
|
exports.grantAuthorizationCode = grantAuthorizationCode;
|
4752
6824
|
exports.greaterEquals = greaterEquals;
|
@@ -4754,6 +6826,8 @@ exports.greaterThan = greaterThan;
|
|
4754
6826
|
exports.greaterThanEquals = greaterThanEquals;
|
4755
6827
|
exports.gt = gt;
|
4756
6828
|
exports.gte = gte;
|
6829
|
+
exports.iContains = iContains;
|
6830
|
+
exports.iPattern = iPattern;
|
4757
6831
|
exports.includes = includes;
|
4758
6832
|
exports.includesAll = includesAll;
|
4759
6833
|
exports.includesAny = includesAny;
|
@@ -4767,11 +6841,14 @@ exports.isHostProviderAlias = isHostProviderAlias;
|
|
4767
6841
|
exports.isHostProviderBuilder = isHostProviderBuilder;
|
4768
6842
|
exports.isIdentifiable = isIdentifiable;
|
4769
6843
|
exports.isNot = isNot;
|
6844
|
+
exports.isValidExpandedColumn = isValidExpandedColumn;
|
6845
|
+
exports.isValidSelectableColumns = isValidSelectableColumns;
|
4770
6846
|
exports.isXataRecord = isXataRecord;
|
4771
6847
|
exports.le = le;
|
4772
6848
|
exports.lessEquals = lessEquals;
|
4773
6849
|
exports.lessThan = lessThan;
|
4774
6850
|
exports.lessThanEquals = lessThanEquals;
|
6851
|
+
exports.listClusters = listClusters;
|
4775
6852
|
exports.listMigrationRequestsCommits = listMigrationRequestsCommits;
|
4776
6853
|
exports.listRegions = listRegions;
|
4777
6854
|
exports.lt = lt;
|
@@ -4803,16 +6880,20 @@ exports.summarizeTable = summarizeTable;
|
|
4803
6880
|
exports.transformImage = transformImage;
|
4804
6881
|
exports.updateBranchMetadata = updateBranchMetadata;
|
4805
6882
|
exports.updateBranchSchema = updateBranchSchema;
|
6883
|
+
exports.updateCluster = updateCluster;
|
4806
6884
|
exports.updateColumn = updateColumn;
|
4807
6885
|
exports.updateDatabaseGithubSettings = updateDatabaseGithubSettings;
|
4808
6886
|
exports.updateDatabaseMetadata = updateDatabaseMetadata;
|
6887
|
+
exports.updateDatabaseSettings = updateDatabaseSettings;
|
4809
6888
|
exports.updateMigrationRequest = updateMigrationRequest;
|
6889
|
+
exports.updateOAuthAccessToken = updateOAuthAccessToken;
|
4810
6890
|
exports.updateRecordWithID = updateRecordWithID;
|
4811
6891
|
exports.updateTable = updateTable;
|
4812
6892
|
exports.updateUser = updateUser;
|
4813
6893
|
exports.updateWorkspace = updateWorkspace;
|
4814
6894
|
exports.updateWorkspaceMemberInvite = updateWorkspaceMemberInvite;
|
4815
6895
|
exports.updateWorkspaceMemberRole = updateWorkspaceMemberRole;
|
6896
|
+
exports.updateWorkspaceSettings = updateWorkspaceSettings;
|
4816
6897
|
exports.upsertRecordWithID = upsertRecordWithID;
|
4817
6898
|
exports.vectorSearchTable = vectorSearchTable;
|
4818
6899
|
//# sourceMappingURL=index.cjs.map
|