@xata.io/client 0.0.0-alpha.vf7ac0d1 → 0.0.0-alpha.vf7b3447057053443041e94106d7efe270aaea321
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-add-version.log +1 -1
- package/.turbo/turbo-build.log +4 -9
- package/CHANGELOG.md +173 -1
- package/dist/index.cjs +2718 -637
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +5801 -4165
- package/dist/index.mjs +2692 -637
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
@@ -18,9 +18,1818 @@ const TraceAttributes = {
|
|
18
18
|
HTTP_METHOD: "http.method",
|
19
19
|
HTTP_URL: "http.url",
|
20
20
|
HTTP_ROUTE: "http.route",
|
21
|
-
HTTP_TARGET: "http.target"
|
21
|
+
HTTP_TARGET: "http.target",
|
22
|
+
CLOUDFLARE_RAY_ID: "cf.ray"
|
22
23
|
};
|
23
24
|
|
25
|
+
const lookup = [];
|
26
|
+
const revLookup = [];
|
27
|
+
const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
28
|
+
for (let i = 0, len = code.length; i < len; ++i) {
|
29
|
+
lookup[i] = code[i];
|
30
|
+
revLookup[code.charCodeAt(i)] = i;
|
31
|
+
}
|
32
|
+
revLookup["-".charCodeAt(0)] = 62;
|
33
|
+
revLookup["_".charCodeAt(0)] = 63;
|
34
|
+
function getLens(b64) {
|
35
|
+
const len = b64.length;
|
36
|
+
if (len % 4 > 0) {
|
37
|
+
throw new Error("Invalid string. Length must be a multiple of 4");
|
38
|
+
}
|
39
|
+
let validLen = b64.indexOf("=");
|
40
|
+
if (validLen === -1)
|
41
|
+
validLen = len;
|
42
|
+
const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
|
43
|
+
return [validLen, placeHoldersLen];
|
44
|
+
}
|
45
|
+
function _byteLength(_b64, validLen, placeHoldersLen) {
|
46
|
+
return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
|
47
|
+
}
|
48
|
+
function toByteArray(b64) {
|
49
|
+
let tmp;
|
50
|
+
const lens = getLens(b64);
|
51
|
+
const validLen = lens[0];
|
52
|
+
const placeHoldersLen = lens[1];
|
53
|
+
const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
|
54
|
+
let curByte = 0;
|
55
|
+
const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
|
56
|
+
let i;
|
57
|
+
for (i = 0; i < len; i += 4) {
|
58
|
+
tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
|
59
|
+
arr[curByte++] = tmp >> 16 & 255;
|
60
|
+
arr[curByte++] = tmp >> 8 & 255;
|
61
|
+
arr[curByte++] = tmp & 255;
|
62
|
+
}
|
63
|
+
if (placeHoldersLen === 2) {
|
64
|
+
tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
|
65
|
+
arr[curByte++] = tmp & 255;
|
66
|
+
}
|
67
|
+
if (placeHoldersLen === 1) {
|
68
|
+
tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
|
69
|
+
arr[curByte++] = tmp >> 8 & 255;
|
70
|
+
arr[curByte++] = tmp & 255;
|
71
|
+
}
|
72
|
+
return arr;
|
73
|
+
}
|
74
|
+
function tripletToBase64(num) {
|
75
|
+
return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
|
76
|
+
}
|
77
|
+
function encodeChunk(uint8, start, end) {
|
78
|
+
let tmp;
|
79
|
+
const output = [];
|
80
|
+
for (let i = start; i < end; i += 3) {
|
81
|
+
tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
|
82
|
+
output.push(tripletToBase64(tmp));
|
83
|
+
}
|
84
|
+
return output.join("");
|
85
|
+
}
|
86
|
+
function fromByteArray(uint8) {
|
87
|
+
let tmp;
|
88
|
+
const len = uint8.length;
|
89
|
+
const extraBytes = len % 3;
|
90
|
+
const parts = [];
|
91
|
+
const maxChunkLength = 16383;
|
92
|
+
for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
93
|
+
parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
|
94
|
+
}
|
95
|
+
if (extraBytes === 1) {
|
96
|
+
tmp = uint8[len - 1];
|
97
|
+
parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
|
98
|
+
} else if (extraBytes === 2) {
|
99
|
+
tmp = (uint8[len - 2] << 8) + uint8[len - 1];
|
100
|
+
parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
|
101
|
+
}
|
102
|
+
return parts.join("");
|
103
|
+
}
|
104
|
+
|
105
|
+
const K_MAX_LENGTH = 2147483647;
|
106
|
+
const MAX_ARGUMENTS_LENGTH = 4096;
|
107
|
+
class Buffer extends Uint8Array {
|
108
|
+
/**
|
109
|
+
* Constructs a new `Buffer` instance.
|
110
|
+
*
|
111
|
+
* @param value
|
112
|
+
* @param encodingOrOffset
|
113
|
+
* @param length
|
114
|
+
*/
|
115
|
+
constructor(value, encodingOrOffset, length) {
|
116
|
+
if (typeof value === "number") {
|
117
|
+
if (typeof encodingOrOffset === "string") {
|
118
|
+
throw new TypeError("The first argument must be of type string, received type number");
|
119
|
+
}
|
120
|
+
if (value < 0) {
|
121
|
+
throw new RangeError("The buffer size cannot be negative");
|
122
|
+
}
|
123
|
+
super(value < 0 ? 0 : Buffer._checked(value) | 0);
|
124
|
+
} else if (typeof value === "string") {
|
125
|
+
if (typeof encodingOrOffset !== "string") {
|
126
|
+
encodingOrOffset = "utf8";
|
127
|
+
}
|
128
|
+
if (!Buffer.isEncoding(encodingOrOffset)) {
|
129
|
+
throw new TypeError("Unknown encoding: " + encodingOrOffset);
|
130
|
+
}
|
131
|
+
const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
|
132
|
+
super(length2);
|
133
|
+
const written = this.write(value, 0, this.length, encodingOrOffset);
|
134
|
+
if (written !== length2) {
|
135
|
+
throw new TypeError(
|
136
|
+
"Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
|
137
|
+
);
|
138
|
+
}
|
139
|
+
} else if (ArrayBuffer.isView(value)) {
|
140
|
+
if (Buffer._isInstance(value, Uint8Array)) {
|
141
|
+
const copy = new Uint8Array(value);
|
142
|
+
const array = copy.buffer;
|
143
|
+
const byteOffset = copy.byteOffset;
|
144
|
+
const length2 = copy.byteLength;
|
145
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
146
|
+
throw new RangeError("offset is outside of buffer bounds");
|
147
|
+
}
|
148
|
+
if (array.byteLength < byteOffset + (length2 || 0)) {
|
149
|
+
throw new RangeError("length is outside of buffer bounds");
|
150
|
+
}
|
151
|
+
super(new Uint8Array(array, byteOffset, length2));
|
152
|
+
} else {
|
153
|
+
const array = value;
|
154
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
155
|
+
super(new Uint8Array(length2));
|
156
|
+
for (let i = 0; i < length2; i++) {
|
157
|
+
this[i] = array[i] & 255;
|
158
|
+
}
|
159
|
+
}
|
160
|
+
} else if (value == null) {
|
161
|
+
throw new TypeError(
|
162
|
+
"The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
|
163
|
+
);
|
164
|
+
} else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
|
165
|
+
const array = value;
|
166
|
+
const byteOffset = encodingOrOffset;
|
167
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
168
|
+
throw new RangeError("offset is outside of buffer bounds");
|
169
|
+
}
|
170
|
+
if (array.byteLength < byteOffset + (length || 0)) {
|
171
|
+
throw new RangeError("length is outside of buffer bounds");
|
172
|
+
}
|
173
|
+
super(new Uint8Array(array, byteOffset, length));
|
174
|
+
} else if (Array.isArray(value)) {
|
175
|
+
const array = value;
|
176
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
177
|
+
super(new Uint8Array(length2));
|
178
|
+
for (let i = 0; i < length2; i++) {
|
179
|
+
this[i] = array[i] & 255;
|
180
|
+
}
|
181
|
+
} else {
|
182
|
+
throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
|
183
|
+
}
|
184
|
+
}
|
185
|
+
/**
|
186
|
+
* Return JSON representation of the buffer.
|
187
|
+
*/
|
188
|
+
toJSON() {
|
189
|
+
return {
|
190
|
+
type: "Buffer",
|
191
|
+
data: Array.prototype.slice.call(this)
|
192
|
+
};
|
193
|
+
}
|
194
|
+
/**
|
195
|
+
* Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
|
196
|
+
* parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
|
197
|
+
* only part of `string` will be written. However, partially encoded characters will not be written.
|
198
|
+
*
|
199
|
+
* @param string String to write to `buf`.
|
200
|
+
* @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
|
201
|
+
* @param length Maximum number of bytes to write: Default: `buf.length - offset`.
|
202
|
+
* @param encoding The character encoding of `string`. Default: `utf8`.
|
203
|
+
*/
|
204
|
+
write(string, offset, length, encoding) {
|
205
|
+
if (typeof offset === "undefined") {
|
206
|
+
encoding = "utf8";
|
207
|
+
length = this.length;
|
208
|
+
offset = 0;
|
209
|
+
} else if (typeof length === "undefined" && typeof offset === "string") {
|
210
|
+
encoding = offset;
|
211
|
+
length = this.length;
|
212
|
+
offset = 0;
|
213
|
+
} else if (typeof offset === "number" && isFinite(offset)) {
|
214
|
+
offset = offset >>> 0;
|
215
|
+
if (typeof length === "number" && isFinite(length)) {
|
216
|
+
length = length >>> 0;
|
217
|
+
encoding ?? (encoding = "utf8");
|
218
|
+
} else if (typeof length === "string") {
|
219
|
+
encoding = length;
|
220
|
+
length = void 0;
|
221
|
+
}
|
222
|
+
} else {
|
223
|
+
throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
|
224
|
+
}
|
225
|
+
const remaining = this.length - offset;
|
226
|
+
if (typeof length === "undefined" || length > remaining) {
|
227
|
+
length = remaining;
|
228
|
+
}
|
229
|
+
if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
|
230
|
+
throw new RangeError("Attempt to write outside buffer bounds");
|
231
|
+
}
|
232
|
+
encoding || (encoding = "utf8");
|
233
|
+
switch (Buffer._getEncoding(encoding)) {
|
234
|
+
case "hex":
|
235
|
+
return Buffer._hexWrite(this, string, offset, length);
|
236
|
+
case "utf8":
|
237
|
+
return Buffer._utf8Write(this, string, offset, length);
|
238
|
+
case "ascii":
|
239
|
+
case "latin1":
|
240
|
+
case "binary":
|
241
|
+
return Buffer._asciiWrite(this, string, offset, length);
|
242
|
+
case "ucs2":
|
243
|
+
case "utf16le":
|
244
|
+
return Buffer._ucs2Write(this, string, offset, length);
|
245
|
+
case "base64":
|
246
|
+
return Buffer._base64Write(this, string, offset, length);
|
247
|
+
}
|
248
|
+
}
|
249
|
+
/**
|
250
|
+
* Decodes the buffer to a string according to the specified character encoding.
|
251
|
+
* Passing `start` and `end` will decode only a subset of the buffer.
|
252
|
+
*
|
253
|
+
* Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
|
254
|
+
* will be replaced with `U+FFFD`.
|
255
|
+
*
|
256
|
+
* @param encoding
|
257
|
+
* @param start
|
258
|
+
* @param end
|
259
|
+
*/
|
260
|
+
toString(encoding, start, end) {
|
261
|
+
const length = this.length;
|
262
|
+
if (length === 0) {
|
263
|
+
return "";
|
264
|
+
}
|
265
|
+
if (arguments.length === 0) {
|
266
|
+
return Buffer._utf8Slice(this, 0, length);
|
267
|
+
}
|
268
|
+
if (typeof start === "undefined" || start < 0) {
|
269
|
+
start = 0;
|
270
|
+
}
|
271
|
+
if (start > this.length) {
|
272
|
+
return "";
|
273
|
+
}
|
274
|
+
if (typeof end === "undefined" || end > this.length) {
|
275
|
+
end = this.length;
|
276
|
+
}
|
277
|
+
if (end <= 0) {
|
278
|
+
return "";
|
279
|
+
}
|
280
|
+
end >>>= 0;
|
281
|
+
start >>>= 0;
|
282
|
+
if (end <= start) {
|
283
|
+
return "";
|
284
|
+
}
|
285
|
+
if (!encoding) {
|
286
|
+
encoding = "utf8";
|
287
|
+
}
|
288
|
+
switch (Buffer._getEncoding(encoding)) {
|
289
|
+
case "hex":
|
290
|
+
return Buffer._hexSlice(this, start, end);
|
291
|
+
case "utf8":
|
292
|
+
return Buffer._utf8Slice(this, start, end);
|
293
|
+
case "ascii":
|
294
|
+
return Buffer._asciiSlice(this, start, end);
|
295
|
+
case "latin1":
|
296
|
+
case "binary":
|
297
|
+
return Buffer._latin1Slice(this, start, end);
|
298
|
+
case "ucs2":
|
299
|
+
case "utf16le":
|
300
|
+
return Buffer._utf16leSlice(this, start, end);
|
301
|
+
case "base64":
|
302
|
+
return Buffer._base64Slice(this, start, end);
|
303
|
+
}
|
304
|
+
}
|
305
|
+
/**
|
306
|
+
* Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
|
307
|
+
*
|
308
|
+
* @param otherBuffer
|
309
|
+
*/
|
310
|
+
equals(otherBuffer) {
|
311
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
312
|
+
throw new TypeError("Argument must be a Buffer");
|
313
|
+
}
|
314
|
+
if (this === otherBuffer) {
|
315
|
+
return true;
|
316
|
+
}
|
317
|
+
return Buffer.compare(this, otherBuffer) === 0;
|
318
|
+
}
|
319
|
+
/**
|
320
|
+
* Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
|
321
|
+
* or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
|
322
|
+
* buffer.
|
323
|
+
*
|
324
|
+
* - `0` is returned if `otherBuffer` is the same as this buffer.
|
325
|
+
* - `1` is returned if `otherBuffer` should come before this buffer when sorted.
|
326
|
+
* - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
|
327
|
+
*
|
328
|
+
* @param otherBuffer The buffer to compare to.
|
329
|
+
* @param targetStart The offset within `otherBuffer` at which to begin comparison.
|
330
|
+
* @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
|
331
|
+
* @param sourceStart The offset within this buffer at which to begin comparison.
|
332
|
+
* @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
|
333
|
+
*/
|
334
|
+
compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
|
335
|
+
if (Buffer._isInstance(otherBuffer, Uint8Array)) {
|
336
|
+
otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
|
337
|
+
}
|
338
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
339
|
+
throw new TypeError("Argument must be a Buffer or Uint8Array");
|
340
|
+
}
|
341
|
+
targetStart ?? (targetStart = 0);
|
342
|
+
targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
|
343
|
+
sourceStart ?? (sourceStart = 0);
|
344
|
+
sourceEnd ?? (sourceEnd = this.length);
|
345
|
+
if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
|
346
|
+
throw new RangeError("Out of range index");
|
347
|
+
}
|
348
|
+
if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
|
349
|
+
return 0;
|
350
|
+
}
|
351
|
+
if (sourceStart >= sourceEnd) {
|
352
|
+
return -1;
|
353
|
+
}
|
354
|
+
if (targetStart >= targetEnd) {
|
355
|
+
return 1;
|
356
|
+
}
|
357
|
+
targetStart >>>= 0;
|
358
|
+
targetEnd >>>= 0;
|
359
|
+
sourceStart >>>= 0;
|
360
|
+
sourceEnd >>>= 0;
|
361
|
+
if (this === otherBuffer) {
|
362
|
+
return 0;
|
363
|
+
}
|
364
|
+
let x = sourceEnd - sourceStart;
|
365
|
+
let y = targetEnd - targetStart;
|
366
|
+
const len = Math.min(x, y);
|
367
|
+
const thisCopy = this.slice(sourceStart, sourceEnd);
|
368
|
+
const targetCopy = otherBuffer.slice(targetStart, targetEnd);
|
369
|
+
for (let i = 0; i < len; ++i) {
|
370
|
+
if (thisCopy[i] !== targetCopy[i]) {
|
371
|
+
x = thisCopy[i];
|
372
|
+
y = targetCopy[i];
|
373
|
+
break;
|
374
|
+
}
|
375
|
+
}
|
376
|
+
if (x < y)
|
377
|
+
return -1;
|
378
|
+
if (y < x)
|
379
|
+
return 1;
|
380
|
+
return 0;
|
381
|
+
}
|
382
|
+
/**
|
383
|
+
* Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
|
384
|
+
* region overlaps with this buffer.
|
385
|
+
*
|
386
|
+
* @param targetBuffer The target buffer to copy into.
|
387
|
+
* @param targetStart The offset within `targetBuffer` at which to begin writing.
|
388
|
+
* @param sourceStart The offset within this buffer at which to begin copying.
|
389
|
+
* @param sourceEnd The offset within this buffer at which to end copying (exclusive).
|
390
|
+
*/
|
391
|
+
copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
|
392
|
+
if (!Buffer.isBuffer(targetBuffer))
|
393
|
+
throw new TypeError("argument should be a Buffer");
|
394
|
+
if (!sourceStart)
|
395
|
+
sourceStart = 0;
|
396
|
+
if (!targetStart)
|
397
|
+
targetStart = 0;
|
398
|
+
if (!sourceEnd && sourceEnd !== 0)
|
399
|
+
sourceEnd = this.length;
|
400
|
+
if (targetStart >= targetBuffer.length)
|
401
|
+
targetStart = targetBuffer.length;
|
402
|
+
if (!targetStart)
|
403
|
+
targetStart = 0;
|
404
|
+
if (sourceEnd > 0 && sourceEnd < sourceStart)
|
405
|
+
sourceEnd = sourceStart;
|
406
|
+
if (sourceEnd === sourceStart)
|
407
|
+
return 0;
|
408
|
+
if (targetBuffer.length === 0 || this.length === 0)
|
409
|
+
return 0;
|
410
|
+
if (targetStart < 0) {
|
411
|
+
throw new RangeError("targetStart out of bounds");
|
412
|
+
}
|
413
|
+
if (sourceStart < 0 || sourceStart >= this.length)
|
414
|
+
throw new RangeError("Index out of range");
|
415
|
+
if (sourceEnd < 0)
|
416
|
+
throw new RangeError("sourceEnd out of bounds");
|
417
|
+
if (sourceEnd > this.length)
|
418
|
+
sourceEnd = this.length;
|
419
|
+
if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
|
420
|
+
sourceEnd = targetBuffer.length - targetStart + sourceStart;
|
421
|
+
}
|
422
|
+
const len = sourceEnd - sourceStart;
|
423
|
+
if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
|
424
|
+
this.copyWithin(targetStart, sourceStart, sourceEnd);
|
425
|
+
} else {
|
426
|
+
Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
|
427
|
+
}
|
428
|
+
return len;
|
429
|
+
}
|
430
|
+
/**
|
431
|
+
* Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
|
432
|
+
* and `end` indices. This is the same behavior as `buf.subarray()`.
|
433
|
+
*
|
434
|
+
* This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
|
435
|
+
* the slice, use `Uint8Array.prototype.slice()`.
|
436
|
+
*
|
437
|
+
* @param start
|
438
|
+
* @param end
|
439
|
+
*/
|
440
|
+
slice(start, end) {
|
441
|
+
if (!start) {
|
442
|
+
start = 0;
|
443
|
+
}
|
444
|
+
const len = this.length;
|
445
|
+
start = ~~start;
|
446
|
+
end = end === void 0 ? len : ~~end;
|
447
|
+
if (start < 0) {
|
448
|
+
start += len;
|
449
|
+
if (start < 0) {
|
450
|
+
start = 0;
|
451
|
+
}
|
452
|
+
} else if (start > len) {
|
453
|
+
start = len;
|
454
|
+
}
|
455
|
+
if (end < 0) {
|
456
|
+
end += len;
|
457
|
+
if (end < 0) {
|
458
|
+
end = 0;
|
459
|
+
}
|
460
|
+
} else if (end > len) {
|
461
|
+
end = len;
|
462
|
+
}
|
463
|
+
if (end < start) {
|
464
|
+
end = start;
|
465
|
+
}
|
466
|
+
const newBuf = this.subarray(start, end);
|
467
|
+
Object.setPrototypeOf(newBuf, Buffer.prototype);
|
468
|
+
return newBuf;
|
469
|
+
}
|
470
|
+
/**
|
471
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
472
|
+
* of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
|
473
|
+
*
|
474
|
+
* @param value Number to write.
|
475
|
+
* @param offset Number of bytes to skip before starting to write.
|
476
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
477
|
+
* @param noAssert
|
478
|
+
* @returns `offset` plus the number of bytes written.
|
479
|
+
*/
|
480
|
+
writeUIntLE(value, offset, byteLength, noAssert) {
|
481
|
+
value = +value;
|
482
|
+
offset = offset >>> 0;
|
483
|
+
byteLength = byteLength >>> 0;
|
484
|
+
if (!noAssert) {
|
485
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
486
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
487
|
+
}
|
488
|
+
let mul = 1;
|
489
|
+
let i = 0;
|
490
|
+
this[offset] = value & 255;
|
491
|
+
while (++i < byteLength && (mul *= 256)) {
|
492
|
+
this[offset + i] = value / mul & 255;
|
493
|
+
}
|
494
|
+
return offset + byteLength;
|
495
|
+
}
|
496
|
+
/**
|
497
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
|
498
|
+
* accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
|
499
|
+
*
|
500
|
+
* @param value Number to write.
|
501
|
+
* @param offset Number of bytes to skip before starting to write.
|
502
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
503
|
+
* @param noAssert
|
504
|
+
* @returns `offset` plus the number of bytes written.
|
505
|
+
*/
|
506
|
+
writeUIntBE(value, offset, byteLength, noAssert) {
|
507
|
+
value = +value;
|
508
|
+
offset = offset >>> 0;
|
509
|
+
byteLength = byteLength >>> 0;
|
510
|
+
if (!noAssert) {
|
511
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
512
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
513
|
+
}
|
514
|
+
let i = byteLength - 1;
|
515
|
+
let mul = 1;
|
516
|
+
this[offset + i] = value & 255;
|
517
|
+
while (--i >= 0 && (mul *= 256)) {
|
518
|
+
this[offset + i] = value / mul & 255;
|
519
|
+
}
|
520
|
+
return offset + byteLength;
|
521
|
+
}
|
522
|
+
/**
|
523
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
524
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
525
|
+
*
|
526
|
+
* @param value Number to write.
|
527
|
+
* @param offset Number of bytes to skip before starting to write.
|
528
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
529
|
+
* @param noAssert
|
530
|
+
* @returns `offset` plus the number of bytes written.
|
531
|
+
*/
|
532
|
+
writeIntLE(value, offset, byteLength, noAssert) {
|
533
|
+
value = +value;
|
534
|
+
offset = offset >>> 0;
|
535
|
+
if (!noAssert) {
|
536
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
537
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
538
|
+
}
|
539
|
+
let i = 0;
|
540
|
+
let mul = 1;
|
541
|
+
let sub = 0;
|
542
|
+
this[offset] = value & 255;
|
543
|
+
while (++i < byteLength && (mul *= 256)) {
|
544
|
+
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
545
|
+
sub = 1;
|
546
|
+
}
|
547
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
548
|
+
}
|
549
|
+
return offset + byteLength;
|
550
|
+
}
|
551
|
+
/**
|
552
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
|
553
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
554
|
+
*
|
555
|
+
* @param value Number to write.
|
556
|
+
* @param offset Number of bytes to skip before starting to write.
|
557
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
558
|
+
* @param noAssert
|
559
|
+
* @returns `offset` plus the number of bytes written.
|
560
|
+
*/
|
561
|
+
writeIntBE(value, offset, byteLength, noAssert) {
|
562
|
+
value = +value;
|
563
|
+
offset = offset >>> 0;
|
564
|
+
if (!noAssert) {
|
565
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
566
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
567
|
+
}
|
568
|
+
let i = byteLength - 1;
|
569
|
+
let mul = 1;
|
570
|
+
let sub = 0;
|
571
|
+
this[offset + i] = value & 255;
|
572
|
+
while (--i >= 0 && (mul *= 256)) {
|
573
|
+
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
574
|
+
sub = 1;
|
575
|
+
}
|
576
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
577
|
+
}
|
578
|
+
return offset + byteLength;
|
579
|
+
}
|
580
|
+
/**
|
581
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
582
|
+
* unsigned, little-endian integer supporting up to 48 bits of accuracy.
|
583
|
+
*
|
584
|
+
* @param offset Number of bytes to skip before starting to read.
|
585
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
586
|
+
* @param noAssert
|
587
|
+
*/
|
588
|
+
readUIntLE(offset, byteLength, noAssert) {
|
589
|
+
offset = offset >>> 0;
|
590
|
+
byteLength = byteLength >>> 0;
|
591
|
+
if (!noAssert) {
|
592
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
593
|
+
}
|
594
|
+
let val = this[offset];
|
595
|
+
let mul = 1;
|
596
|
+
let i = 0;
|
597
|
+
while (++i < byteLength && (mul *= 256)) {
|
598
|
+
val += this[offset + i] * mul;
|
599
|
+
}
|
600
|
+
return val;
|
601
|
+
}
|
602
|
+
/**
|
603
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
604
|
+
* unsigned, big-endian integer supporting up to 48 bits of accuracy.
|
605
|
+
*
|
606
|
+
* @param offset Number of bytes to skip before starting to read.
|
607
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
608
|
+
* @param noAssert
|
609
|
+
*/
|
610
|
+
readUIntBE(offset, byteLength, noAssert) {
|
611
|
+
offset = offset >>> 0;
|
612
|
+
byteLength = byteLength >>> 0;
|
613
|
+
if (!noAssert) {
|
614
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
615
|
+
}
|
616
|
+
let val = this[offset + --byteLength];
|
617
|
+
let mul = 1;
|
618
|
+
while (byteLength > 0 && (mul *= 256)) {
|
619
|
+
val += this[offset + --byteLength] * mul;
|
620
|
+
}
|
621
|
+
return val;
|
622
|
+
}
|
623
|
+
/**
|
624
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
625
|
+
* little-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
626
|
+
*
|
627
|
+
* @param offset Number of bytes to skip before starting to read.
|
628
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
629
|
+
* @param noAssert
|
630
|
+
*/
|
631
|
+
readIntLE(offset, byteLength, noAssert) {
|
632
|
+
offset = offset >>> 0;
|
633
|
+
byteLength = byteLength >>> 0;
|
634
|
+
if (!noAssert) {
|
635
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
636
|
+
}
|
637
|
+
let val = this[offset];
|
638
|
+
let mul = 1;
|
639
|
+
let i = 0;
|
640
|
+
while (++i < byteLength && (mul *= 256)) {
|
641
|
+
val += this[offset + i] * mul;
|
642
|
+
}
|
643
|
+
mul *= 128;
|
644
|
+
if (val >= mul) {
|
645
|
+
val -= Math.pow(2, 8 * byteLength);
|
646
|
+
}
|
647
|
+
return val;
|
648
|
+
}
|
649
|
+
/**
|
650
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
651
|
+
* big-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
652
|
+
*
|
653
|
+
* @param offset Number of bytes to skip before starting to read.
|
654
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
655
|
+
* @param noAssert
|
656
|
+
*/
|
657
|
+
readIntBE(offset, byteLength, noAssert) {
|
658
|
+
offset = offset >>> 0;
|
659
|
+
byteLength = byteLength >>> 0;
|
660
|
+
if (!noAssert) {
|
661
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
662
|
+
}
|
663
|
+
let i = byteLength;
|
664
|
+
let mul = 1;
|
665
|
+
let val = this[offset + --i];
|
666
|
+
while (i > 0 && (mul *= 256)) {
|
667
|
+
val += this[offset + --i] * mul;
|
668
|
+
}
|
669
|
+
mul *= 128;
|
670
|
+
if (val >= mul) {
|
671
|
+
val -= Math.pow(2, 8 * byteLength);
|
672
|
+
}
|
673
|
+
return val;
|
674
|
+
}
|
675
|
+
/**
|
676
|
+
* Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
|
677
|
+
*
|
678
|
+
* @param offset Number of bytes to skip before starting to read.
|
679
|
+
* @param noAssert
|
680
|
+
*/
|
681
|
+
readUInt8(offset, noAssert) {
|
682
|
+
offset = offset >>> 0;
|
683
|
+
if (!noAssert) {
|
684
|
+
Buffer._checkOffset(offset, 1, this.length);
|
685
|
+
}
|
686
|
+
return this[offset];
|
687
|
+
}
|
688
|
+
/**
|
689
|
+
* Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
|
690
|
+
*
|
691
|
+
* @param offset Number of bytes to skip before starting to read.
|
692
|
+
* @param noAssert
|
693
|
+
*/
|
694
|
+
readUInt16LE(offset, noAssert) {
|
695
|
+
offset = offset >>> 0;
|
696
|
+
if (!noAssert) {
|
697
|
+
Buffer._checkOffset(offset, 2, this.length);
|
698
|
+
}
|
699
|
+
return this[offset] | this[offset + 1] << 8;
|
700
|
+
}
|
701
|
+
/**
|
702
|
+
* Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
|
703
|
+
*
|
704
|
+
* @param offset Number of bytes to skip before starting to read.
|
705
|
+
* @param noAssert
|
706
|
+
*/
|
707
|
+
readUInt16BE(offset, noAssert) {
|
708
|
+
offset = offset >>> 0;
|
709
|
+
if (!noAssert) {
|
710
|
+
Buffer._checkOffset(offset, 2, this.length);
|
711
|
+
}
|
712
|
+
return this[offset] << 8 | this[offset + 1];
|
713
|
+
}
|
714
|
+
/**
|
715
|
+
* Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
|
716
|
+
*
|
717
|
+
* @param offset Number of bytes to skip before starting to read.
|
718
|
+
* @param noAssert
|
719
|
+
*/
|
720
|
+
readUInt32LE(offset, noAssert) {
|
721
|
+
offset = offset >>> 0;
|
722
|
+
if (!noAssert) {
|
723
|
+
Buffer._checkOffset(offset, 4, this.length);
|
724
|
+
}
|
725
|
+
return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
|
726
|
+
}
|
727
|
+
/**
|
728
|
+
* Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
|
729
|
+
*
|
730
|
+
* @param offset Number of bytes to skip before starting to read.
|
731
|
+
* @param noAssert
|
732
|
+
*/
|
733
|
+
readUInt32BE(offset, noAssert) {
|
734
|
+
offset = offset >>> 0;
|
735
|
+
if (!noAssert) {
|
736
|
+
Buffer._checkOffset(offset, 4, this.length);
|
737
|
+
}
|
738
|
+
return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
|
739
|
+
}
|
740
|
+
/**
|
741
|
+
* Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
|
742
|
+
* as two's complement signed values.
|
743
|
+
*
|
744
|
+
* @param offset Number of bytes to skip before starting to read.
|
745
|
+
* @param noAssert
|
746
|
+
*/
|
747
|
+
readInt8(offset, noAssert) {
|
748
|
+
offset = offset >>> 0;
|
749
|
+
if (!noAssert) {
|
750
|
+
Buffer._checkOffset(offset, 1, this.length);
|
751
|
+
}
|
752
|
+
if (!(this[offset] & 128)) {
|
753
|
+
return this[offset];
|
754
|
+
}
|
755
|
+
return (255 - this[offset] + 1) * -1;
|
756
|
+
}
|
757
|
+
/**
|
758
|
+
* Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
759
|
+
* are interpreted as two's complement signed values.
|
760
|
+
*
|
761
|
+
* @param offset Number of bytes to skip before starting to read.
|
762
|
+
* @param noAssert
|
763
|
+
*/
|
764
|
+
readInt16LE(offset, noAssert) {
|
765
|
+
offset = offset >>> 0;
|
766
|
+
if (!noAssert) {
|
767
|
+
Buffer._checkOffset(offset, 2, this.length);
|
768
|
+
}
|
769
|
+
const val = this[offset] | this[offset + 1] << 8;
|
770
|
+
return val & 32768 ? val | 4294901760 : val;
|
771
|
+
}
|
772
|
+
/**
|
773
|
+
* Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
774
|
+
* are interpreted as two's complement signed values.
|
775
|
+
*
|
776
|
+
* @param offset Number of bytes to skip before starting to read.
|
777
|
+
* @param noAssert
|
778
|
+
*/
|
779
|
+
readInt16BE(offset, noAssert) {
|
780
|
+
offset = offset >>> 0;
|
781
|
+
if (!noAssert) {
|
782
|
+
Buffer._checkOffset(offset, 2, this.length);
|
783
|
+
}
|
784
|
+
const val = this[offset + 1] | this[offset] << 8;
|
785
|
+
return val & 32768 ? val | 4294901760 : val;
|
786
|
+
}
|
787
|
+
/**
|
788
|
+
* Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
789
|
+
* are interpreted as two's complement signed values.
|
790
|
+
*
|
791
|
+
* @param offset Number of bytes to skip before starting to read.
|
792
|
+
* @param noAssert
|
793
|
+
*/
|
794
|
+
readInt32LE(offset, noAssert) {
|
795
|
+
offset = offset >>> 0;
|
796
|
+
if (!noAssert) {
|
797
|
+
Buffer._checkOffset(offset, 4, this.length);
|
798
|
+
}
|
799
|
+
return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
|
800
|
+
}
|
801
|
+
/**
|
802
|
+
* Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
803
|
+
* are interpreted as two's complement signed values.
|
804
|
+
*
|
805
|
+
* @param offset Number of bytes to skip before starting to read.
|
806
|
+
* @param noAssert
|
807
|
+
*/
|
808
|
+
readInt32BE(offset, noAssert) {
|
809
|
+
offset = offset >>> 0;
|
810
|
+
if (!noAssert) {
|
811
|
+
Buffer._checkOffset(offset, 4, this.length);
|
812
|
+
}
|
813
|
+
return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
|
814
|
+
}
|
815
|
+
/**
|
816
|
+
* Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
|
817
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 2.
|
818
|
+
*/
|
819
|
+
swap16() {
|
820
|
+
const len = this.length;
|
821
|
+
if (len % 2 !== 0) {
|
822
|
+
throw new RangeError("Buffer size must be a multiple of 16-bits");
|
823
|
+
}
|
824
|
+
for (let i = 0; i < len; i += 2) {
|
825
|
+
this._swap(this, i, i + 1);
|
826
|
+
}
|
827
|
+
return this;
|
828
|
+
}
|
829
|
+
/**
|
830
|
+
* Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
|
831
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 4.
|
832
|
+
*/
|
833
|
+
swap32() {
|
834
|
+
const len = this.length;
|
835
|
+
if (len % 4 !== 0) {
|
836
|
+
throw new RangeError("Buffer size must be a multiple of 32-bits");
|
837
|
+
}
|
838
|
+
for (let i = 0; i < len; i += 4) {
|
839
|
+
this._swap(this, i, i + 3);
|
840
|
+
this._swap(this, i + 1, i + 2);
|
841
|
+
}
|
842
|
+
return this;
|
843
|
+
}
|
844
|
+
/**
|
845
|
+
* Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
|
846
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 8.
|
847
|
+
*/
|
848
|
+
swap64() {
|
849
|
+
const len = this.length;
|
850
|
+
if (len % 8 !== 0) {
|
851
|
+
throw new RangeError("Buffer size must be a multiple of 64-bits");
|
852
|
+
}
|
853
|
+
for (let i = 0; i < len; i += 8) {
|
854
|
+
this._swap(this, i, i + 7);
|
855
|
+
this._swap(this, i + 1, i + 6);
|
856
|
+
this._swap(this, i + 2, i + 5);
|
857
|
+
this._swap(this, i + 3, i + 4);
|
858
|
+
}
|
859
|
+
return this;
|
860
|
+
}
|
861
|
+
/**
|
862
|
+
* Swaps two octets.
|
863
|
+
*
|
864
|
+
* @param b
|
865
|
+
* @param n
|
866
|
+
* @param m
|
867
|
+
*/
|
868
|
+
_swap(b, n, m) {
|
869
|
+
const i = b[n];
|
870
|
+
b[n] = b[m];
|
871
|
+
b[m] = i;
|
872
|
+
}
|
873
|
+
/**
|
874
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
|
875
|
+
* Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
|
876
|
+
*
|
877
|
+
* @param value Number to write.
|
878
|
+
* @param offset Number of bytes to skip before starting to write.
|
879
|
+
* @param noAssert
|
880
|
+
* @returns `offset` plus the number of bytes written.
|
881
|
+
*/
|
882
|
+
writeUInt8(value, offset, noAssert) {
|
883
|
+
value = +value;
|
884
|
+
offset = offset >>> 0;
|
885
|
+
if (!noAssert) {
|
886
|
+
Buffer._checkInt(this, value, offset, 1, 255, 0);
|
887
|
+
}
|
888
|
+
this[offset] = value & 255;
|
889
|
+
return offset + 1;
|
890
|
+
}
|
891
|
+
/**
|
892
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
|
893
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
894
|
+
*
|
895
|
+
* @param value Number to write.
|
896
|
+
* @param offset Number of bytes to skip before starting to write.
|
897
|
+
* @param noAssert
|
898
|
+
* @returns `offset` plus the number of bytes written.
|
899
|
+
*/
|
900
|
+
writeUInt16LE(value, offset, noAssert) {
|
901
|
+
value = +value;
|
902
|
+
offset = offset >>> 0;
|
903
|
+
if (!noAssert) {
|
904
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
905
|
+
}
|
906
|
+
this[offset] = value & 255;
|
907
|
+
this[offset + 1] = value >>> 8;
|
908
|
+
return offset + 2;
|
909
|
+
}
|
910
|
+
/**
|
911
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
|
912
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
913
|
+
*
|
914
|
+
* @param value Number to write.
|
915
|
+
* @param offset Number of bytes to skip before starting to write.
|
916
|
+
* @param noAssert
|
917
|
+
* @returns `offset` plus the number of bytes written.
|
918
|
+
*/
|
919
|
+
writeUInt16BE(value, offset, noAssert) {
|
920
|
+
value = +value;
|
921
|
+
offset = offset >>> 0;
|
922
|
+
if (!noAssert) {
|
923
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
924
|
+
}
|
925
|
+
this[offset] = value >>> 8;
|
926
|
+
this[offset + 1] = value & 255;
|
927
|
+
return offset + 2;
|
928
|
+
}
|
929
|
+
/**
|
930
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
|
931
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
932
|
+
*
|
933
|
+
* @param value Number to write.
|
934
|
+
* @param offset Number of bytes to skip before starting to write.
|
935
|
+
* @param noAssert
|
936
|
+
* @returns `offset` plus the number of bytes written.
|
937
|
+
*/
|
938
|
+
writeUInt32LE(value, offset, noAssert) {
|
939
|
+
value = +value;
|
940
|
+
offset = offset >>> 0;
|
941
|
+
if (!noAssert) {
|
942
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
943
|
+
}
|
944
|
+
this[offset + 3] = value >>> 24;
|
945
|
+
this[offset + 2] = value >>> 16;
|
946
|
+
this[offset + 1] = value >>> 8;
|
947
|
+
this[offset] = value & 255;
|
948
|
+
return offset + 4;
|
949
|
+
}
|
950
|
+
/**
|
951
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
|
952
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
953
|
+
*
|
954
|
+
* @param value Number to write.
|
955
|
+
* @param offset Number of bytes to skip before starting to write.
|
956
|
+
* @param noAssert
|
957
|
+
* @returns `offset` plus the number of bytes written.
|
958
|
+
*/
|
959
|
+
writeUInt32BE(value, offset, noAssert) {
|
960
|
+
value = +value;
|
961
|
+
offset = offset >>> 0;
|
962
|
+
if (!noAssert) {
|
963
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
964
|
+
}
|
965
|
+
this[offset] = value >>> 24;
|
966
|
+
this[offset + 1] = value >>> 16;
|
967
|
+
this[offset + 2] = value >>> 8;
|
968
|
+
this[offset + 3] = value & 255;
|
969
|
+
return offset + 4;
|
970
|
+
}
|
971
|
+
/**
|
972
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
|
973
|
+
* Behavior is undefined when `value` is anything other than a signed 8-bit integer.
|
974
|
+
*
|
975
|
+
* @param value Number to write.
|
976
|
+
* @param offset Number of bytes to skip before starting to write.
|
977
|
+
* @param noAssert
|
978
|
+
* @returns `offset` plus the number of bytes written.
|
979
|
+
*/
|
980
|
+
writeInt8(value, offset, noAssert) {
|
981
|
+
value = +value;
|
982
|
+
offset = offset >>> 0;
|
983
|
+
if (!noAssert) {
|
984
|
+
Buffer._checkInt(this, value, offset, 1, 127, -128);
|
985
|
+
}
|
986
|
+
if (value < 0) {
|
987
|
+
value = 255 + value + 1;
|
988
|
+
}
|
989
|
+
this[offset] = value & 255;
|
990
|
+
return offset + 1;
|
991
|
+
}
|
992
|
+
/**
|
993
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
|
994
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
995
|
+
*
|
996
|
+
* @param value Number to write.
|
997
|
+
* @param offset Number of bytes to skip before starting to write.
|
998
|
+
* @param noAssert
|
999
|
+
* @returns `offset` plus the number of bytes written.
|
1000
|
+
*/
|
1001
|
+
writeInt16LE(value, offset, noAssert) {
|
1002
|
+
value = +value;
|
1003
|
+
offset = offset >>> 0;
|
1004
|
+
if (!noAssert) {
|
1005
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1006
|
+
}
|
1007
|
+
this[offset] = value & 255;
|
1008
|
+
this[offset + 1] = value >>> 8;
|
1009
|
+
return offset + 2;
|
1010
|
+
}
|
1011
|
+
/**
|
1012
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
|
1013
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
1014
|
+
*
|
1015
|
+
* @param value Number to write.
|
1016
|
+
* @param offset Number of bytes to skip before starting to write.
|
1017
|
+
* @param noAssert
|
1018
|
+
* @returns `offset` plus the number of bytes written.
|
1019
|
+
*/
|
1020
|
+
writeInt16BE(value, offset, noAssert) {
|
1021
|
+
value = +value;
|
1022
|
+
offset = offset >>> 0;
|
1023
|
+
if (!noAssert) {
|
1024
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1025
|
+
}
|
1026
|
+
this[offset] = value >>> 8;
|
1027
|
+
this[offset + 1] = value & 255;
|
1028
|
+
return offset + 2;
|
1029
|
+
}
|
1030
|
+
/**
|
1031
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
|
1032
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1033
|
+
*
|
1034
|
+
* @param value Number to write.
|
1035
|
+
* @param offset Number of bytes to skip before starting to write.
|
1036
|
+
* @param noAssert
|
1037
|
+
* @returns `offset` plus the number of bytes written.
|
1038
|
+
*/
|
1039
|
+
writeInt32LE(value, offset, noAssert) {
|
1040
|
+
value = +value;
|
1041
|
+
offset = offset >>> 0;
|
1042
|
+
if (!noAssert) {
|
1043
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1044
|
+
}
|
1045
|
+
this[offset] = value & 255;
|
1046
|
+
this[offset + 1] = value >>> 8;
|
1047
|
+
this[offset + 2] = value >>> 16;
|
1048
|
+
this[offset + 3] = value >>> 24;
|
1049
|
+
return offset + 4;
|
1050
|
+
}
|
1051
|
+
/**
|
1052
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
|
1053
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1054
|
+
*
|
1055
|
+
* @param value Number to write.
|
1056
|
+
* @param offset Number of bytes to skip before starting to write.
|
1057
|
+
* @param noAssert
|
1058
|
+
* @returns `offset` plus the number of bytes written.
|
1059
|
+
*/
|
1060
|
+
writeInt32BE(value, offset, noAssert) {
|
1061
|
+
value = +value;
|
1062
|
+
offset = offset >>> 0;
|
1063
|
+
if (!noAssert) {
|
1064
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1065
|
+
}
|
1066
|
+
if (value < 0) {
|
1067
|
+
value = 4294967295 + value + 1;
|
1068
|
+
}
|
1069
|
+
this[offset] = value >>> 24;
|
1070
|
+
this[offset + 1] = value >>> 16;
|
1071
|
+
this[offset + 2] = value >>> 8;
|
1072
|
+
this[offset + 3] = value & 255;
|
1073
|
+
return offset + 4;
|
1074
|
+
}
|
1075
|
+
/**
|
1076
|
+
* Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
|
1077
|
+
* filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
|
1078
|
+
* integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
|
1079
|
+
*
|
1080
|
+
* If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
|
1081
|
+
* character that fit into `buf` are written.
|
1082
|
+
*
|
1083
|
+
* If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
|
1084
|
+
*
|
1085
|
+
* @param value
|
1086
|
+
* @param encoding
|
1087
|
+
*/
|
1088
|
+
fill(value, offset, end, encoding) {
|
1089
|
+
if (typeof value === "string") {
|
1090
|
+
if (typeof offset === "string") {
|
1091
|
+
encoding = offset;
|
1092
|
+
offset = 0;
|
1093
|
+
end = this.length;
|
1094
|
+
} else if (typeof end === "string") {
|
1095
|
+
encoding = end;
|
1096
|
+
end = this.length;
|
1097
|
+
}
|
1098
|
+
if (encoding !== void 0 && typeof encoding !== "string") {
|
1099
|
+
throw new TypeError("encoding must be a string");
|
1100
|
+
}
|
1101
|
+
if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
|
1102
|
+
throw new TypeError("Unknown encoding: " + encoding);
|
1103
|
+
}
|
1104
|
+
if (value.length === 1) {
|
1105
|
+
const code = value.charCodeAt(0);
|
1106
|
+
if (encoding === "utf8" && code < 128) {
|
1107
|
+
value = code;
|
1108
|
+
}
|
1109
|
+
}
|
1110
|
+
} else if (typeof value === "number") {
|
1111
|
+
value = value & 255;
|
1112
|
+
} else if (typeof value === "boolean") {
|
1113
|
+
value = Number(value);
|
1114
|
+
}
|
1115
|
+
offset ?? (offset = 0);
|
1116
|
+
end ?? (end = this.length);
|
1117
|
+
if (offset < 0 || this.length < offset || this.length < end) {
|
1118
|
+
throw new RangeError("Out of range index");
|
1119
|
+
}
|
1120
|
+
if (end <= offset) {
|
1121
|
+
return this;
|
1122
|
+
}
|
1123
|
+
offset = offset >>> 0;
|
1124
|
+
end = end === void 0 ? this.length : end >>> 0;
|
1125
|
+
value || (value = 0);
|
1126
|
+
let i;
|
1127
|
+
if (typeof value === "number") {
|
1128
|
+
for (i = offset; i < end; ++i) {
|
1129
|
+
this[i] = value;
|
1130
|
+
}
|
1131
|
+
} else {
|
1132
|
+
const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
|
1133
|
+
const len = bytes.length;
|
1134
|
+
if (len === 0) {
|
1135
|
+
throw new TypeError('The value "' + value + '" is invalid for argument "value"');
|
1136
|
+
}
|
1137
|
+
for (i = 0; i < end - offset; ++i) {
|
1138
|
+
this[i + offset] = bytes[i % len];
|
1139
|
+
}
|
1140
|
+
}
|
1141
|
+
return this;
|
1142
|
+
}
|
1143
|
+
/**
|
1144
|
+
* Returns the index of the specified value.
|
1145
|
+
*
|
1146
|
+
* If `value` is:
|
1147
|
+
* - a string, `value` is interpreted according to the character encoding in `encoding`.
|
1148
|
+
* - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
|
1149
|
+
* - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
|
1150
|
+
*
|
1151
|
+
* Any other types will throw a `TypeError`.
|
1152
|
+
*
|
1153
|
+
* @param value What to search for.
|
1154
|
+
* @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
|
1155
|
+
* @param encoding If `value` is a string, this is the encoding used to search.
|
1156
|
+
* @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
|
1157
|
+
*/
|
1158
|
+
indexOf(value, byteOffset, encoding) {
|
1159
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
|
1160
|
+
}
|
1161
|
+
/**
|
1162
|
+
* Gets the last index of the specified value.
|
1163
|
+
*
|
1164
|
+
* @see indexOf()
|
1165
|
+
* @param value
|
1166
|
+
* @param byteOffset
|
1167
|
+
* @param encoding
|
1168
|
+
*/
|
1169
|
+
lastIndexOf(value, byteOffset, encoding) {
|
1170
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
|
1171
|
+
}
|
1172
|
+
_bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
|
1173
|
+
if (buffer.length === 0) {
|
1174
|
+
return -1;
|
1175
|
+
}
|
1176
|
+
if (typeof byteOffset === "string") {
|
1177
|
+
encoding = byteOffset;
|
1178
|
+
byteOffset = 0;
|
1179
|
+
} else if (typeof byteOffset === "undefined") {
|
1180
|
+
byteOffset = 0;
|
1181
|
+
} else if (byteOffset > 2147483647) {
|
1182
|
+
byteOffset = 2147483647;
|
1183
|
+
} else if (byteOffset < -2147483648) {
|
1184
|
+
byteOffset = -2147483648;
|
1185
|
+
}
|
1186
|
+
byteOffset = +byteOffset;
|
1187
|
+
if (byteOffset !== byteOffset) {
|
1188
|
+
byteOffset = dir ? 0 : buffer.length - 1;
|
1189
|
+
}
|
1190
|
+
if (byteOffset < 0) {
|
1191
|
+
byteOffset = buffer.length + byteOffset;
|
1192
|
+
}
|
1193
|
+
if (byteOffset >= buffer.length) {
|
1194
|
+
if (dir) {
|
1195
|
+
return -1;
|
1196
|
+
} else {
|
1197
|
+
byteOffset = buffer.length - 1;
|
1198
|
+
}
|
1199
|
+
} else if (byteOffset < 0) {
|
1200
|
+
if (dir) {
|
1201
|
+
byteOffset = 0;
|
1202
|
+
} else {
|
1203
|
+
return -1;
|
1204
|
+
}
|
1205
|
+
}
|
1206
|
+
if (typeof val === "string") {
|
1207
|
+
val = Buffer.from(val, encoding);
|
1208
|
+
}
|
1209
|
+
if (Buffer.isBuffer(val)) {
|
1210
|
+
if (val.length === 0) {
|
1211
|
+
return -1;
|
1212
|
+
}
|
1213
|
+
return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
|
1214
|
+
} else if (typeof val === "number") {
|
1215
|
+
val = val & 255;
|
1216
|
+
if (typeof Uint8Array.prototype.indexOf === "function") {
|
1217
|
+
if (dir) {
|
1218
|
+
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
|
1219
|
+
} else {
|
1220
|
+
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
|
1221
|
+
}
|
1222
|
+
}
|
1223
|
+
return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
|
1224
|
+
}
|
1225
|
+
throw new TypeError("val must be string, number or Buffer");
|
1226
|
+
}
|
1227
|
+
/**
|
1228
|
+
* Equivalent to `buf.indexOf() !== -1`.
|
1229
|
+
*
|
1230
|
+
* @param value
|
1231
|
+
* @param byteOffset
|
1232
|
+
* @param encoding
|
1233
|
+
*/
|
1234
|
+
includes(value, byteOffset, encoding) {
|
1235
|
+
return this.indexOf(value, byteOffset, encoding) !== -1;
|
1236
|
+
}
|
1237
|
+
/**
|
1238
|
+
* Creates a new buffer from the given parameters.
|
1239
|
+
*
|
1240
|
+
* @param data
|
1241
|
+
* @param encoding
|
1242
|
+
*/
|
1243
|
+
static from(a, b, c) {
|
1244
|
+
return new Buffer(a, b, c);
|
1245
|
+
}
|
1246
|
+
/**
|
1247
|
+
* Returns true if `obj` is a Buffer.
|
1248
|
+
*
|
1249
|
+
* @param obj
|
1250
|
+
*/
|
1251
|
+
static isBuffer(obj) {
|
1252
|
+
return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
|
1253
|
+
}
|
1254
|
+
/**
|
1255
|
+
* Returns true if `encoding` is a supported encoding.
|
1256
|
+
*
|
1257
|
+
* @param encoding
|
1258
|
+
*/
|
1259
|
+
static isEncoding(encoding) {
|
1260
|
+
switch (encoding.toLowerCase()) {
|
1261
|
+
case "hex":
|
1262
|
+
case "utf8":
|
1263
|
+
case "ascii":
|
1264
|
+
case "binary":
|
1265
|
+
case "latin1":
|
1266
|
+
case "ucs2":
|
1267
|
+
case "utf16le":
|
1268
|
+
case "base64":
|
1269
|
+
return true;
|
1270
|
+
default:
|
1271
|
+
return false;
|
1272
|
+
}
|
1273
|
+
}
|
1274
|
+
/**
|
1275
|
+
* Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
|
1276
|
+
* returns the number of characters in the string.
|
1277
|
+
*
|
1278
|
+
* @param string The string to test.
|
1279
|
+
* @param encoding The encoding to use for calculation. Defaults is `utf8`.
|
1280
|
+
*/
|
1281
|
+
static byteLength(string, encoding) {
|
1282
|
+
if (Buffer.isBuffer(string)) {
|
1283
|
+
return string.length;
|
1284
|
+
}
|
1285
|
+
if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
|
1286
|
+
return string.byteLength;
|
1287
|
+
}
|
1288
|
+
if (typeof string !== "string") {
|
1289
|
+
throw new TypeError(
|
1290
|
+
'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
|
1291
|
+
);
|
1292
|
+
}
|
1293
|
+
const len = string.length;
|
1294
|
+
const mustMatch = arguments.length > 2 && arguments[2] === true;
|
1295
|
+
if (!mustMatch && len === 0) {
|
1296
|
+
return 0;
|
1297
|
+
}
|
1298
|
+
switch (encoding?.toLowerCase()) {
|
1299
|
+
case "ascii":
|
1300
|
+
case "latin1":
|
1301
|
+
case "binary":
|
1302
|
+
return len;
|
1303
|
+
case "utf8":
|
1304
|
+
return Buffer._utf8ToBytes(string).length;
|
1305
|
+
case "hex":
|
1306
|
+
return len >>> 1;
|
1307
|
+
case "ucs2":
|
1308
|
+
case "utf16le":
|
1309
|
+
return len * 2;
|
1310
|
+
case "base64":
|
1311
|
+
return Buffer._base64ToBytes(string).length;
|
1312
|
+
default:
|
1313
|
+
return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
|
1314
|
+
}
|
1315
|
+
}
|
1316
|
+
/**
|
1317
|
+
* Returns a Buffer which is the result of concatenating all the buffers in the list together.
|
1318
|
+
*
|
1319
|
+
* - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
|
1320
|
+
* - If the list has exactly one item, then the first item is returned.
|
1321
|
+
* - If the list has more than one item, then a new buffer is created.
|
1322
|
+
*
|
1323
|
+
* It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
|
1324
|
+
* a small computational expense.
|
1325
|
+
*
|
1326
|
+
* @param list An array of Buffer objects to concatenate.
|
1327
|
+
* @param totalLength Total length of the buffers when concatenated.
|
1328
|
+
*/
|
1329
|
+
static concat(list, totalLength) {
|
1330
|
+
if (!Array.isArray(list)) {
|
1331
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1332
|
+
}
|
1333
|
+
if (list.length === 0) {
|
1334
|
+
return Buffer.alloc(0);
|
1335
|
+
}
|
1336
|
+
let i;
|
1337
|
+
if (totalLength === void 0) {
|
1338
|
+
totalLength = 0;
|
1339
|
+
for (i = 0; i < list.length; ++i) {
|
1340
|
+
totalLength += list[i].length;
|
1341
|
+
}
|
1342
|
+
}
|
1343
|
+
const buffer = Buffer.allocUnsafe(totalLength);
|
1344
|
+
let pos = 0;
|
1345
|
+
for (i = 0; i < list.length; ++i) {
|
1346
|
+
let buf = list[i];
|
1347
|
+
if (Buffer._isInstance(buf, Uint8Array)) {
|
1348
|
+
if (pos + buf.length > buffer.length) {
|
1349
|
+
if (!Buffer.isBuffer(buf)) {
|
1350
|
+
buf = Buffer.from(buf);
|
1351
|
+
}
|
1352
|
+
buf.copy(buffer, pos);
|
1353
|
+
} else {
|
1354
|
+
Uint8Array.prototype.set.call(buffer, buf, pos);
|
1355
|
+
}
|
1356
|
+
} else if (!Buffer.isBuffer(buf)) {
|
1357
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1358
|
+
} else {
|
1359
|
+
buf.copy(buffer, pos);
|
1360
|
+
}
|
1361
|
+
pos += buf.length;
|
1362
|
+
}
|
1363
|
+
return buffer;
|
1364
|
+
}
|
1365
|
+
/**
|
1366
|
+
* The same as `buf1.compare(buf2)`.
|
1367
|
+
*/
|
1368
|
+
static compare(buf1, buf2) {
|
1369
|
+
if (Buffer._isInstance(buf1, Uint8Array)) {
|
1370
|
+
buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
|
1371
|
+
}
|
1372
|
+
if (Buffer._isInstance(buf2, Uint8Array)) {
|
1373
|
+
buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
|
1374
|
+
}
|
1375
|
+
if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
|
1376
|
+
throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
|
1377
|
+
}
|
1378
|
+
if (buf1 === buf2) {
|
1379
|
+
return 0;
|
1380
|
+
}
|
1381
|
+
let x = buf1.length;
|
1382
|
+
let y = buf2.length;
|
1383
|
+
for (let i = 0, len = Math.min(x, y); i < len; ++i) {
|
1384
|
+
if (buf1[i] !== buf2[i]) {
|
1385
|
+
x = buf1[i];
|
1386
|
+
y = buf2[i];
|
1387
|
+
break;
|
1388
|
+
}
|
1389
|
+
}
|
1390
|
+
if (x < y) {
|
1391
|
+
return -1;
|
1392
|
+
}
|
1393
|
+
if (y < x) {
|
1394
|
+
return 1;
|
1395
|
+
}
|
1396
|
+
return 0;
|
1397
|
+
}
|
1398
|
+
/**
|
1399
|
+
* Allocates a new buffer of `size` octets.
|
1400
|
+
*
|
1401
|
+
* @param size The number of octets to allocate.
|
1402
|
+
* @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
|
1403
|
+
* @param encoding The encoding used for the call to `buf.fill()` while initializing.
|
1404
|
+
*/
|
1405
|
+
static alloc(size, fill, encoding) {
|
1406
|
+
if (typeof size !== "number") {
|
1407
|
+
throw new TypeError('"size" argument must be of type number');
|
1408
|
+
} else if (size < 0) {
|
1409
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1410
|
+
}
|
1411
|
+
if (size <= 0) {
|
1412
|
+
return new Buffer(size);
|
1413
|
+
}
|
1414
|
+
if (fill !== void 0) {
|
1415
|
+
return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
|
1416
|
+
}
|
1417
|
+
return new Buffer(size);
|
1418
|
+
}
|
1419
|
+
/**
|
1420
|
+
* Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
|
1421
|
+
*
|
1422
|
+
* @param size
|
1423
|
+
*/
|
1424
|
+
static allocUnsafe(size) {
|
1425
|
+
if (typeof size !== "number") {
|
1426
|
+
throw new TypeError('"size" argument must be of type number');
|
1427
|
+
} else if (size < 0) {
|
1428
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1429
|
+
}
|
1430
|
+
return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
|
1431
|
+
}
|
1432
|
+
/**
|
1433
|
+
* Returns true if the given `obj` is an instance of `type`.
|
1434
|
+
*
|
1435
|
+
* @param obj
|
1436
|
+
* @param type
|
1437
|
+
*/
|
1438
|
+
static _isInstance(obj, type) {
|
1439
|
+
return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
|
1440
|
+
}
|
1441
|
+
static _checked(length) {
|
1442
|
+
if (length >= K_MAX_LENGTH) {
|
1443
|
+
throw new RangeError(
|
1444
|
+
"Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
|
1445
|
+
);
|
1446
|
+
}
|
1447
|
+
return length | 0;
|
1448
|
+
}
|
1449
|
+
static _blitBuffer(src, dst, offset, length) {
|
1450
|
+
let i;
|
1451
|
+
for (i = 0; i < length; ++i) {
|
1452
|
+
if (i + offset >= dst.length || i >= src.length) {
|
1453
|
+
break;
|
1454
|
+
}
|
1455
|
+
dst[i + offset] = src[i];
|
1456
|
+
}
|
1457
|
+
return i;
|
1458
|
+
}
|
1459
|
+
static _utf8Write(buf, string, offset, length) {
|
1460
|
+
return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
|
1461
|
+
}
|
1462
|
+
static _asciiWrite(buf, string, offset, length) {
|
1463
|
+
return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
|
1464
|
+
}
|
1465
|
+
static _base64Write(buf, string, offset, length) {
|
1466
|
+
return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
|
1467
|
+
}
|
1468
|
+
static _ucs2Write(buf, string, offset, length) {
|
1469
|
+
return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
|
1470
|
+
}
|
1471
|
+
static _hexWrite(buf, string, offset, length) {
|
1472
|
+
offset = Number(offset) || 0;
|
1473
|
+
const remaining = buf.length - offset;
|
1474
|
+
if (!length) {
|
1475
|
+
length = remaining;
|
1476
|
+
} else {
|
1477
|
+
length = Number(length);
|
1478
|
+
if (length > remaining) {
|
1479
|
+
length = remaining;
|
1480
|
+
}
|
1481
|
+
}
|
1482
|
+
const strLen = string.length;
|
1483
|
+
if (length > strLen / 2) {
|
1484
|
+
length = strLen / 2;
|
1485
|
+
}
|
1486
|
+
let i;
|
1487
|
+
for (i = 0; i < length; ++i) {
|
1488
|
+
const parsed = parseInt(string.substr(i * 2, 2), 16);
|
1489
|
+
if (parsed !== parsed) {
|
1490
|
+
return i;
|
1491
|
+
}
|
1492
|
+
buf[offset + i] = parsed;
|
1493
|
+
}
|
1494
|
+
return i;
|
1495
|
+
}
|
1496
|
+
static _utf8ToBytes(string, units) {
|
1497
|
+
units = units || Infinity;
|
1498
|
+
const length = string.length;
|
1499
|
+
const bytes = [];
|
1500
|
+
let codePoint;
|
1501
|
+
let leadSurrogate = null;
|
1502
|
+
for (let i = 0; i < length; ++i) {
|
1503
|
+
codePoint = string.charCodeAt(i);
|
1504
|
+
if (codePoint > 55295 && codePoint < 57344) {
|
1505
|
+
if (!leadSurrogate) {
|
1506
|
+
if (codePoint > 56319) {
|
1507
|
+
if ((units -= 3) > -1) {
|
1508
|
+
bytes.push(239, 191, 189);
|
1509
|
+
}
|
1510
|
+
continue;
|
1511
|
+
} else if (i + 1 === length) {
|
1512
|
+
if ((units -= 3) > -1) {
|
1513
|
+
bytes.push(239, 191, 189);
|
1514
|
+
}
|
1515
|
+
continue;
|
1516
|
+
}
|
1517
|
+
leadSurrogate = codePoint;
|
1518
|
+
continue;
|
1519
|
+
}
|
1520
|
+
if (codePoint < 56320) {
|
1521
|
+
if ((units -= 3) > -1) {
|
1522
|
+
bytes.push(239, 191, 189);
|
1523
|
+
}
|
1524
|
+
leadSurrogate = codePoint;
|
1525
|
+
continue;
|
1526
|
+
}
|
1527
|
+
codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
|
1528
|
+
} else if (leadSurrogate) {
|
1529
|
+
if ((units -= 3) > -1) {
|
1530
|
+
bytes.push(239, 191, 189);
|
1531
|
+
}
|
1532
|
+
}
|
1533
|
+
leadSurrogate = null;
|
1534
|
+
if (codePoint < 128) {
|
1535
|
+
if ((units -= 1) < 0) {
|
1536
|
+
break;
|
1537
|
+
}
|
1538
|
+
bytes.push(codePoint);
|
1539
|
+
} else if (codePoint < 2048) {
|
1540
|
+
if ((units -= 2) < 0) {
|
1541
|
+
break;
|
1542
|
+
}
|
1543
|
+
bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
|
1544
|
+
} else if (codePoint < 65536) {
|
1545
|
+
if ((units -= 3) < 0) {
|
1546
|
+
break;
|
1547
|
+
}
|
1548
|
+
bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
|
1549
|
+
} else if (codePoint < 1114112) {
|
1550
|
+
if ((units -= 4) < 0) {
|
1551
|
+
break;
|
1552
|
+
}
|
1553
|
+
bytes.push(
|
1554
|
+
codePoint >> 18 | 240,
|
1555
|
+
codePoint >> 12 & 63 | 128,
|
1556
|
+
codePoint >> 6 & 63 | 128,
|
1557
|
+
codePoint & 63 | 128
|
1558
|
+
);
|
1559
|
+
} else {
|
1560
|
+
throw new Error("Invalid code point");
|
1561
|
+
}
|
1562
|
+
}
|
1563
|
+
return bytes;
|
1564
|
+
}
|
1565
|
+
static _base64ToBytes(str) {
|
1566
|
+
return toByteArray(base64clean(str));
|
1567
|
+
}
|
1568
|
+
static _asciiToBytes(str) {
|
1569
|
+
const byteArray = [];
|
1570
|
+
for (let i = 0; i < str.length; ++i) {
|
1571
|
+
byteArray.push(str.charCodeAt(i) & 255);
|
1572
|
+
}
|
1573
|
+
return byteArray;
|
1574
|
+
}
|
1575
|
+
static _utf16leToBytes(str, units) {
|
1576
|
+
let c, hi, lo;
|
1577
|
+
const byteArray = [];
|
1578
|
+
for (let i = 0; i < str.length; ++i) {
|
1579
|
+
if ((units -= 2) < 0)
|
1580
|
+
break;
|
1581
|
+
c = str.charCodeAt(i);
|
1582
|
+
hi = c >> 8;
|
1583
|
+
lo = c % 256;
|
1584
|
+
byteArray.push(lo);
|
1585
|
+
byteArray.push(hi);
|
1586
|
+
}
|
1587
|
+
return byteArray;
|
1588
|
+
}
|
1589
|
+
static _hexSlice(buf, start, end) {
|
1590
|
+
const len = buf.length;
|
1591
|
+
if (!start || start < 0) {
|
1592
|
+
start = 0;
|
1593
|
+
}
|
1594
|
+
if (!end || end < 0 || end > len) {
|
1595
|
+
end = len;
|
1596
|
+
}
|
1597
|
+
let out = "";
|
1598
|
+
for (let i = start; i < end; ++i) {
|
1599
|
+
out += hexSliceLookupTable[buf[i]];
|
1600
|
+
}
|
1601
|
+
return out;
|
1602
|
+
}
|
1603
|
+
static _base64Slice(buf, start, end) {
|
1604
|
+
if (start === 0 && end === buf.length) {
|
1605
|
+
return fromByteArray(buf);
|
1606
|
+
} else {
|
1607
|
+
return fromByteArray(buf.slice(start, end));
|
1608
|
+
}
|
1609
|
+
}
|
1610
|
+
static _utf8Slice(buf, start, end) {
|
1611
|
+
end = Math.min(buf.length, end);
|
1612
|
+
const res = [];
|
1613
|
+
let i = start;
|
1614
|
+
while (i < end) {
|
1615
|
+
const firstByte = buf[i];
|
1616
|
+
let codePoint = null;
|
1617
|
+
let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
|
1618
|
+
if (i + bytesPerSequence <= end) {
|
1619
|
+
let secondByte, thirdByte, fourthByte, tempCodePoint;
|
1620
|
+
switch (bytesPerSequence) {
|
1621
|
+
case 1:
|
1622
|
+
if (firstByte < 128) {
|
1623
|
+
codePoint = firstByte;
|
1624
|
+
}
|
1625
|
+
break;
|
1626
|
+
case 2:
|
1627
|
+
secondByte = buf[i + 1];
|
1628
|
+
if ((secondByte & 192) === 128) {
|
1629
|
+
tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
|
1630
|
+
if (tempCodePoint > 127) {
|
1631
|
+
codePoint = tempCodePoint;
|
1632
|
+
}
|
1633
|
+
}
|
1634
|
+
break;
|
1635
|
+
case 3:
|
1636
|
+
secondByte = buf[i + 1];
|
1637
|
+
thirdByte = buf[i + 2];
|
1638
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
|
1639
|
+
tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
|
1640
|
+
if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
|
1641
|
+
codePoint = tempCodePoint;
|
1642
|
+
}
|
1643
|
+
}
|
1644
|
+
break;
|
1645
|
+
case 4:
|
1646
|
+
secondByte = buf[i + 1];
|
1647
|
+
thirdByte = buf[i + 2];
|
1648
|
+
fourthByte = buf[i + 3];
|
1649
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
|
1650
|
+
tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
|
1651
|
+
if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
|
1652
|
+
codePoint = tempCodePoint;
|
1653
|
+
}
|
1654
|
+
}
|
1655
|
+
}
|
1656
|
+
}
|
1657
|
+
if (codePoint === null) {
|
1658
|
+
codePoint = 65533;
|
1659
|
+
bytesPerSequence = 1;
|
1660
|
+
} else if (codePoint > 65535) {
|
1661
|
+
codePoint -= 65536;
|
1662
|
+
res.push(codePoint >>> 10 & 1023 | 55296);
|
1663
|
+
codePoint = 56320 | codePoint & 1023;
|
1664
|
+
}
|
1665
|
+
res.push(codePoint);
|
1666
|
+
i += bytesPerSequence;
|
1667
|
+
}
|
1668
|
+
return Buffer._decodeCodePointsArray(res);
|
1669
|
+
}
|
1670
|
+
static _decodeCodePointsArray(codePoints) {
|
1671
|
+
const len = codePoints.length;
|
1672
|
+
if (len <= MAX_ARGUMENTS_LENGTH) {
|
1673
|
+
return String.fromCharCode.apply(String, codePoints);
|
1674
|
+
}
|
1675
|
+
let res = "";
|
1676
|
+
let i = 0;
|
1677
|
+
while (i < len) {
|
1678
|
+
res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
|
1679
|
+
}
|
1680
|
+
return res;
|
1681
|
+
}
|
1682
|
+
static _asciiSlice(buf, start, end) {
|
1683
|
+
let ret = "";
|
1684
|
+
end = Math.min(buf.length, end);
|
1685
|
+
for (let i = start; i < end; ++i) {
|
1686
|
+
ret += String.fromCharCode(buf[i] & 127);
|
1687
|
+
}
|
1688
|
+
return ret;
|
1689
|
+
}
|
1690
|
+
static _latin1Slice(buf, start, end) {
|
1691
|
+
let ret = "";
|
1692
|
+
end = Math.min(buf.length, end);
|
1693
|
+
for (let i = start; i < end; ++i) {
|
1694
|
+
ret += String.fromCharCode(buf[i]);
|
1695
|
+
}
|
1696
|
+
return ret;
|
1697
|
+
}
|
1698
|
+
static _utf16leSlice(buf, start, end) {
|
1699
|
+
const bytes = buf.slice(start, end);
|
1700
|
+
let res = "";
|
1701
|
+
for (let i = 0; i < bytes.length - 1; i += 2) {
|
1702
|
+
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
|
1703
|
+
}
|
1704
|
+
return res;
|
1705
|
+
}
|
1706
|
+
static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
|
1707
|
+
let indexSize = 1;
|
1708
|
+
let arrLength = arr.length;
|
1709
|
+
let valLength = val.length;
|
1710
|
+
if (encoding !== void 0) {
|
1711
|
+
encoding = Buffer._getEncoding(encoding);
|
1712
|
+
if (encoding === "ucs2" || encoding === "utf16le") {
|
1713
|
+
if (arr.length < 2 || val.length < 2) {
|
1714
|
+
return -1;
|
1715
|
+
}
|
1716
|
+
indexSize = 2;
|
1717
|
+
arrLength /= 2;
|
1718
|
+
valLength /= 2;
|
1719
|
+
byteOffset /= 2;
|
1720
|
+
}
|
1721
|
+
}
|
1722
|
+
function read(buf, i2) {
|
1723
|
+
if (indexSize === 1) {
|
1724
|
+
return buf[i2];
|
1725
|
+
} else {
|
1726
|
+
return buf.readUInt16BE(i2 * indexSize);
|
1727
|
+
}
|
1728
|
+
}
|
1729
|
+
let i;
|
1730
|
+
if (dir) {
|
1731
|
+
let foundIndex = -1;
|
1732
|
+
for (i = byteOffset; i < arrLength; i++) {
|
1733
|
+
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
1734
|
+
if (foundIndex === -1)
|
1735
|
+
foundIndex = i;
|
1736
|
+
if (i - foundIndex + 1 === valLength)
|
1737
|
+
return foundIndex * indexSize;
|
1738
|
+
} else {
|
1739
|
+
if (foundIndex !== -1)
|
1740
|
+
i -= i - foundIndex;
|
1741
|
+
foundIndex = -1;
|
1742
|
+
}
|
1743
|
+
}
|
1744
|
+
} else {
|
1745
|
+
if (byteOffset + valLength > arrLength) {
|
1746
|
+
byteOffset = arrLength - valLength;
|
1747
|
+
}
|
1748
|
+
for (i = byteOffset; i >= 0; i--) {
|
1749
|
+
let found = true;
|
1750
|
+
for (let j = 0; j < valLength; j++) {
|
1751
|
+
if (read(arr, i + j) !== read(val, j)) {
|
1752
|
+
found = false;
|
1753
|
+
break;
|
1754
|
+
}
|
1755
|
+
}
|
1756
|
+
if (found) {
|
1757
|
+
return i;
|
1758
|
+
}
|
1759
|
+
}
|
1760
|
+
}
|
1761
|
+
return -1;
|
1762
|
+
}
|
1763
|
+
static _checkOffset(offset, ext, length) {
|
1764
|
+
if (offset % 1 !== 0 || offset < 0)
|
1765
|
+
throw new RangeError("offset is not uint");
|
1766
|
+
if (offset + ext > length)
|
1767
|
+
throw new RangeError("Trying to access beyond buffer length");
|
1768
|
+
}
|
1769
|
+
static _checkInt(buf, value, offset, ext, max, min) {
|
1770
|
+
if (!Buffer.isBuffer(buf))
|
1771
|
+
throw new TypeError('"buffer" argument must be a Buffer instance');
|
1772
|
+
if (value > max || value < min)
|
1773
|
+
throw new RangeError('"value" argument is out of bounds');
|
1774
|
+
if (offset + ext > buf.length)
|
1775
|
+
throw new RangeError("Index out of range");
|
1776
|
+
}
|
1777
|
+
static _getEncoding(encoding) {
|
1778
|
+
let toLowerCase = false;
|
1779
|
+
let originalEncoding = "";
|
1780
|
+
for (; ; ) {
|
1781
|
+
switch (encoding) {
|
1782
|
+
case "hex":
|
1783
|
+
return "hex";
|
1784
|
+
case "utf8":
|
1785
|
+
return "utf8";
|
1786
|
+
case "ascii":
|
1787
|
+
return "ascii";
|
1788
|
+
case "binary":
|
1789
|
+
return "binary";
|
1790
|
+
case "latin1":
|
1791
|
+
return "latin1";
|
1792
|
+
case "ucs2":
|
1793
|
+
return "ucs2";
|
1794
|
+
case "utf16le":
|
1795
|
+
return "utf16le";
|
1796
|
+
case "base64":
|
1797
|
+
return "base64";
|
1798
|
+
default: {
|
1799
|
+
if (toLowerCase) {
|
1800
|
+
throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
|
1801
|
+
}
|
1802
|
+
toLowerCase = true;
|
1803
|
+
originalEncoding = encoding;
|
1804
|
+
encoding = encoding.toLowerCase();
|
1805
|
+
}
|
1806
|
+
}
|
1807
|
+
}
|
1808
|
+
}
|
1809
|
+
}
|
1810
|
+
const hexSliceLookupTable = function() {
|
1811
|
+
const alphabet = "0123456789abcdef";
|
1812
|
+
const table = new Array(256);
|
1813
|
+
for (let i = 0; i < 16; ++i) {
|
1814
|
+
const i16 = i * 16;
|
1815
|
+
for (let j = 0; j < 16; ++j) {
|
1816
|
+
table[i16 + j] = alphabet[i] + alphabet[j];
|
1817
|
+
}
|
1818
|
+
}
|
1819
|
+
return table;
|
1820
|
+
}();
|
1821
|
+
const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
|
1822
|
+
function base64clean(str) {
|
1823
|
+
str = str.split("=")[0];
|
1824
|
+
str = str.trim().replace(INVALID_BASE64_RE, "");
|
1825
|
+
if (str.length < 2)
|
1826
|
+
return "";
|
1827
|
+
while (str.length % 4 !== 0) {
|
1828
|
+
str = str + "=";
|
1829
|
+
}
|
1830
|
+
return str;
|
1831
|
+
}
|
1832
|
+
|
24
1833
|
function notEmpty(value) {
|
25
1834
|
return value !== null && value !== void 0;
|
26
1835
|
}
|
@@ -30,8 +1839,15 @@ function compact(arr) {
|
|
30
1839
|
function compactObject(obj) {
|
31
1840
|
return Object.fromEntries(Object.entries(obj).filter(([, value]) => notEmpty(value)));
|
32
1841
|
}
|
1842
|
+
function isBlob(value) {
|
1843
|
+
try {
|
1844
|
+
return value instanceof Blob;
|
1845
|
+
} catch (error) {
|
1846
|
+
return false;
|
1847
|
+
}
|
1848
|
+
}
|
33
1849
|
function isObject(value) {
|
34
|
-
return Boolean(value) && typeof value === "object" && !Array.isArray(value) && !(value instanceof Date);
|
1850
|
+
return Boolean(value) && typeof value === "object" && !Array.isArray(value) && !(value instanceof Date) && !isBlob(value);
|
35
1851
|
}
|
36
1852
|
function isDefined(value) {
|
37
1853
|
return value !== null && value !== void 0;
|
@@ -234,17 +2050,11 @@ function getPreviewBranch() {
|
|
234
2050
|
}
|
235
2051
|
}
|
236
2052
|
|
237
|
-
var __defProp$8 = Object.defineProperty;
|
238
|
-
var __defNormalProp$8 = (obj, key, value) => key in obj ? __defProp$8(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
239
|
-
var __publicField$8 = (obj, key, value) => {
|
240
|
-
__defNormalProp$8(obj, typeof key !== "symbol" ? key + "" : key, value);
|
241
|
-
return value;
|
242
|
-
};
|
243
2053
|
var __accessCheck$8 = (obj, member, msg) => {
|
244
2054
|
if (!member.has(obj))
|
245
2055
|
throw TypeError("Cannot " + msg);
|
246
2056
|
};
|
247
|
-
var __privateGet$
|
2057
|
+
var __privateGet$7 = (obj, member, getter) => {
|
248
2058
|
__accessCheck$8(obj, member, "read from private field");
|
249
2059
|
return getter ? getter.call(obj) : member.get(obj);
|
250
2060
|
};
|
@@ -253,7 +2063,7 @@ var __privateAdd$8 = (obj, member, value) => {
|
|
253
2063
|
throw TypeError("Cannot add the same private member more than once");
|
254
2064
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
255
2065
|
};
|
256
|
-
var __privateSet$
|
2066
|
+
var __privateSet$6 = (obj, member, value, setter) => {
|
257
2067
|
__accessCheck$8(obj, member, "write to private field");
|
258
2068
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
259
2069
|
return value;
|
@@ -263,14 +2073,13 @@ var __privateMethod$4 = (obj, member, method) => {
|
|
263
2073
|
return method;
|
264
2074
|
};
|
265
2075
|
var _fetch, _queue, _concurrency, _enqueue, enqueue_fn;
|
266
|
-
const REQUEST_TIMEOUT =
|
2076
|
+
const REQUEST_TIMEOUT = 5 * 60 * 1e3;
|
267
2077
|
function getFetchImplementation(userFetch) {
|
268
2078
|
const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
|
269
|
-
const
|
2079
|
+
const globalThisFetch = typeof globalThis !== "undefined" ? globalThis.fetch : void 0;
|
2080
|
+
const fetchImpl = userFetch ?? globalFetch ?? globalThisFetch;
|
270
2081
|
if (!fetchImpl) {
|
271
|
-
throw new Error(
|
272
|
-
`Couldn't find \`fetch\`. Install a fetch implementation such as \`node-fetch\` and pass it explicitly.`
|
273
|
-
);
|
2082
|
+
throw new Error(`Couldn't find a global \`fetch\`. Pass a fetch implementation explicitly.`);
|
274
2083
|
}
|
275
2084
|
return fetchImpl;
|
276
2085
|
}
|
@@ -280,21 +2089,19 @@ class ApiRequestPool {
|
|
280
2089
|
__privateAdd$8(this, _fetch, void 0);
|
281
2090
|
__privateAdd$8(this, _queue, void 0);
|
282
2091
|
__privateAdd$8(this, _concurrency, void 0);
|
283
|
-
|
284
|
-
|
285
|
-
__privateSet$8(this, _queue, []);
|
286
|
-
__privateSet$8(this, _concurrency, concurrency);
|
2092
|
+
__privateSet$6(this, _queue, []);
|
2093
|
+
__privateSet$6(this, _concurrency, concurrency);
|
287
2094
|
this.running = 0;
|
288
2095
|
this.started = 0;
|
289
2096
|
}
|
290
2097
|
setFetch(fetch2) {
|
291
|
-
__privateSet$
|
2098
|
+
__privateSet$6(this, _fetch, fetch2);
|
292
2099
|
}
|
293
2100
|
getFetch() {
|
294
|
-
if (!__privateGet$
|
2101
|
+
if (!__privateGet$7(this, _fetch)) {
|
295
2102
|
throw new Error("Fetch not set");
|
296
2103
|
}
|
297
|
-
return __privateGet$
|
2104
|
+
return __privateGet$7(this, _fetch);
|
298
2105
|
}
|
299
2106
|
request(url, options) {
|
300
2107
|
const start = /* @__PURE__ */ new Date();
|
@@ -326,19 +2133,19 @@ _queue = new WeakMap();
|
|
326
2133
|
_concurrency = new WeakMap();
|
327
2134
|
_enqueue = new WeakSet();
|
328
2135
|
enqueue_fn = function(task) {
|
329
|
-
const promise = new Promise((resolve) => __privateGet$
|
2136
|
+
const promise = new Promise((resolve) => __privateGet$7(this, _queue).push(resolve)).finally(() => {
|
330
2137
|
this.started--;
|
331
2138
|
this.running++;
|
332
2139
|
}).then(() => task()).finally(() => {
|
333
2140
|
this.running--;
|
334
|
-
const next = __privateGet$
|
2141
|
+
const next = __privateGet$7(this, _queue).shift();
|
335
2142
|
if (next !== void 0) {
|
336
2143
|
this.started++;
|
337
2144
|
next();
|
338
2145
|
}
|
339
2146
|
});
|
340
|
-
if (this.running + this.started < __privateGet$
|
341
|
-
const next = __privateGet$
|
2147
|
+
if (this.running + this.started < __privateGet$7(this, _concurrency)) {
|
2148
|
+
const next = __privateGet$7(this, _queue).shift();
|
342
2149
|
if (next !== void 0) {
|
343
2150
|
this.started++;
|
344
2151
|
next();
|
@@ -527,26 +2334,16 @@ function defaultOnOpen(response) {
|
|
527
2334
|
}
|
528
2335
|
}
|
529
2336
|
|
530
|
-
const VERSION = "0.
|
2337
|
+
const VERSION = "0.29.4";
|
531
2338
|
|
532
|
-
var __defProp$7 = Object.defineProperty;
|
533
|
-
var __defNormalProp$7 = (obj, key, value) => key in obj ? __defProp$7(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
534
|
-
var __publicField$7 = (obj, key, value) => {
|
535
|
-
__defNormalProp$7(obj, typeof key !== "symbol" ? key + "" : key, value);
|
536
|
-
return value;
|
537
|
-
};
|
538
2339
|
class ErrorWithCause extends Error {
|
539
2340
|
constructor(message, options) {
|
540
2341
|
super(message, options);
|
541
|
-
__publicField$7(this, "cause");
|
542
2342
|
}
|
543
2343
|
}
|
544
2344
|
class FetcherError extends ErrorWithCause {
|
545
2345
|
constructor(status, data, requestId) {
|
546
2346
|
super(getMessage(data));
|
547
|
-
__publicField$7(this, "status");
|
548
|
-
__publicField$7(this, "requestId");
|
549
|
-
__publicField$7(this, "errors");
|
550
2347
|
this.status = status;
|
551
2348
|
this.errors = isBulkError(data) ? data.errors : [{ message: getMessage(data), status }];
|
552
2349
|
this.requestId = requestId;
|
@@ -580,6 +2377,67 @@ function getMessage(data) {
|
|
580
2377
|
}
|
581
2378
|
}
|
582
2379
|
|
2380
|
+
function getHostUrl(provider, type) {
|
2381
|
+
if (isHostProviderAlias(provider)) {
|
2382
|
+
return providers[provider][type];
|
2383
|
+
} else if (isHostProviderBuilder(provider)) {
|
2384
|
+
return provider[type];
|
2385
|
+
}
|
2386
|
+
throw new Error("Invalid API provider");
|
2387
|
+
}
|
2388
|
+
const providers = {
|
2389
|
+
production: {
|
2390
|
+
main: "https://api.xata.io",
|
2391
|
+
workspaces: "https://{workspaceId}.{region}.xata.sh"
|
2392
|
+
},
|
2393
|
+
staging: {
|
2394
|
+
main: "https://api.staging-xata.dev",
|
2395
|
+
workspaces: "https://{workspaceId}.{region}.staging-xata.dev"
|
2396
|
+
},
|
2397
|
+
dev: {
|
2398
|
+
main: "https://api.dev-xata.dev",
|
2399
|
+
workspaces: "https://{workspaceId}.{region}.dev-xata.dev"
|
2400
|
+
},
|
2401
|
+
local: {
|
2402
|
+
main: "http://localhost:6001",
|
2403
|
+
workspaces: "http://{workspaceId}.{region}.localhost:6001"
|
2404
|
+
}
|
2405
|
+
};
|
2406
|
+
function isHostProviderAlias(alias) {
|
2407
|
+
return isString(alias) && Object.keys(providers).includes(alias);
|
2408
|
+
}
|
2409
|
+
function isHostProviderBuilder(builder) {
|
2410
|
+
return isObject(builder) && isString(builder.main) && isString(builder.workspaces);
|
2411
|
+
}
|
2412
|
+
function parseProviderString(provider = "production") {
|
2413
|
+
if (isHostProviderAlias(provider)) {
|
2414
|
+
return provider;
|
2415
|
+
}
|
2416
|
+
const [main, workspaces] = provider.split(",");
|
2417
|
+
if (!main || !workspaces)
|
2418
|
+
return null;
|
2419
|
+
return { main, workspaces };
|
2420
|
+
}
|
2421
|
+
function buildProviderString(provider) {
|
2422
|
+
if (isHostProviderAlias(provider))
|
2423
|
+
return provider;
|
2424
|
+
return `${provider.main},${provider.workspaces}`;
|
2425
|
+
}
|
2426
|
+
function parseWorkspacesUrlParts(url) {
|
2427
|
+
if (!isString(url))
|
2428
|
+
return null;
|
2429
|
+
const matches = {
|
2430
|
+
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
|
2431
|
+
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2432
|
+
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2433
|
+
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
|
2434
|
+
};
|
2435
|
+
const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
|
2436
|
+
if (!isHostProviderAlias(host) || !match)
|
2437
|
+
return null;
|
2438
|
+
return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
|
2439
|
+
}
|
2440
|
+
|
583
2441
|
const pool = new ApiRequestPool();
|
584
2442
|
const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
|
585
2443
|
const cleanQueryParams = Object.entries(queryParams).reduce((acc, [key, value]) => {
|
@@ -595,6 +2453,7 @@ const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
|
|
595
2453
|
return url.replace(/\{\w*\}/g, (key) => cleanPathParams[key.slice(1, -1)]) + queryString;
|
596
2454
|
};
|
597
2455
|
function buildBaseUrl({
|
2456
|
+
method,
|
598
2457
|
endpoint,
|
599
2458
|
path,
|
600
2459
|
workspacesApiUrl,
|
@@ -602,7 +2461,24 @@ function buildBaseUrl({
|
|
602
2461
|
pathParams = {}
|
603
2462
|
}) {
|
604
2463
|
if (endpoint === "dataPlane") {
|
605
|
-
|
2464
|
+
let url = isString(workspacesApiUrl) ? `${workspacesApiUrl}${path}` : workspacesApiUrl(path, pathParams);
|
2465
|
+
if (method.toUpperCase() === "PUT" && [
|
2466
|
+
"/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file",
|
2467
|
+
"/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}"
|
2468
|
+
].includes(path)) {
|
2469
|
+
const { host } = parseWorkspacesUrlParts(url) ?? {};
|
2470
|
+
switch (host) {
|
2471
|
+
case "production":
|
2472
|
+
url = url.replace("xata.sh", "upload.xata.sh");
|
2473
|
+
break;
|
2474
|
+
case "staging":
|
2475
|
+
url = url.replace("staging-xata.dev", "upload.staging-xata.dev");
|
2476
|
+
break;
|
2477
|
+
case "dev":
|
2478
|
+
url = url.replace("dev-xata.dev", "upload.dev-xata.dev");
|
2479
|
+
break;
|
2480
|
+
}
|
2481
|
+
}
|
606
2482
|
const urlWithWorkspace = isString(pathParams.workspace) ? url.replace("{workspaceId}", String(pathParams.workspace)) : url;
|
607
2483
|
return isString(pathParams.region) ? urlWithWorkspace.replace("{region}", String(pathParams.region)) : urlWithWorkspace;
|
608
2484
|
}
|
@@ -613,11 +2489,14 @@ function hostHeader(url) {
|
|
613
2489
|
const { groups } = pattern.exec(url) ?? {};
|
614
2490
|
return groups?.host ? { Host: groups.host } : {};
|
615
2491
|
}
|
616
|
-
function parseBody(body, headers) {
|
2492
|
+
async function parseBody(body, headers) {
|
617
2493
|
if (!isDefined(body))
|
618
2494
|
return void 0;
|
2495
|
+
if (isBlob(body) || typeof body.text === "function") {
|
2496
|
+
return body;
|
2497
|
+
}
|
619
2498
|
const { "Content-Type": contentType } = headers ?? {};
|
620
|
-
if (String(contentType).toLowerCase() === "application/json") {
|
2499
|
+
if (String(contentType).toLowerCase() === "application/json" && isObject(body)) {
|
621
2500
|
return JSON.stringify(body);
|
622
2501
|
}
|
623
2502
|
return body;
|
@@ -648,9 +2527,9 @@ async function fetch$1({
|
|
648
2527
|
return await trace(
|
649
2528
|
`${method.toUpperCase()} ${path}`,
|
650
2529
|
async ({ setAttributes }) => {
|
651
|
-
const baseUrl = buildBaseUrl({ endpoint, path, workspacesApiUrl, pathParams, apiUrl });
|
2530
|
+
const baseUrl = buildBaseUrl({ method, endpoint, path, workspacesApiUrl, pathParams, apiUrl });
|
652
2531
|
const fullUrl = resolveUrl(baseUrl, queryParams, pathParams);
|
653
|
-
const url = fullUrl.includes("localhost") ? fullUrl.replace(/^[^.]+\./, "http://") : fullUrl;
|
2532
|
+
const url = fullUrl.includes("localhost") ? fullUrl.replace(/^[^.]+\.[^.]+\./, "http://") : fullUrl;
|
654
2533
|
setAttributes({
|
655
2534
|
[TraceAttributes.HTTP_URL]: url,
|
656
2535
|
[TraceAttributes.HTTP_TARGET]: resolveUrl(path, queryParams, pathParams)
|
@@ -674,7 +2553,7 @@ async function fetch$1({
|
|
674
2553
|
const response = await pool.request(url, {
|
675
2554
|
...fetchOptions,
|
676
2555
|
method: method.toUpperCase(),
|
677
|
-
body: parseBody(body, headers),
|
2556
|
+
body: await parseBody(body, headers),
|
678
2557
|
headers,
|
679
2558
|
signal
|
680
2559
|
});
|
@@ -685,7 +2564,8 @@ async function fetch$1({
|
|
685
2564
|
[TraceAttributes.HTTP_REQUEST_ID]: requestId,
|
686
2565
|
[TraceAttributes.HTTP_STATUS_CODE]: response.status,
|
687
2566
|
[TraceAttributes.HTTP_HOST]: host,
|
688
|
-
[TraceAttributes.HTTP_SCHEME]: protocol?.replace(":", "")
|
2567
|
+
[TraceAttributes.HTTP_SCHEME]: protocol?.replace(":", ""),
|
2568
|
+
[TraceAttributes.CLOUDFLARE_RAY_ID]: response.headers?.get("cf-ray") ?? void 0
|
689
2569
|
});
|
690
2570
|
const message = response.headers?.get("x-xata-message");
|
691
2571
|
if (message)
|
@@ -730,7 +2610,7 @@ function fetchSSERequest({
|
|
730
2610
|
clientName,
|
731
2611
|
xataAgentExtra
|
732
2612
|
}) {
|
733
|
-
const baseUrl = buildBaseUrl({ endpoint, path, workspacesApiUrl, pathParams, apiUrl });
|
2613
|
+
const baseUrl = buildBaseUrl({ method, endpoint, path, workspacesApiUrl, pathParams, apiUrl });
|
734
2614
|
const fullUrl = resolveUrl(baseUrl, queryParams, pathParams);
|
735
2615
|
const url = fullUrl.includes("localhost") ? fullUrl.replace(/^[^.]+\./, "http://") : fullUrl;
|
736
2616
|
void fetchEventSource(url, {
|
@@ -773,12 +2653,35 @@ function parseUrl(url) {
|
|
773
2653
|
|
774
2654
|
const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
|
775
2655
|
|
2656
|
+
const applyMigration = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/apply", method: "post", ...variables, signal });
|
2657
|
+
const adaptTable = (variables, signal) => dataPlaneFetch({
|
2658
|
+
url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
|
2659
|
+
method: "post",
|
2660
|
+
...variables,
|
2661
|
+
signal
|
2662
|
+
});
|
2663
|
+
const adaptAllTables = (variables, signal) => dataPlaneFetch({
|
2664
|
+
url: "/db/{dbBranchName}/migrations/adapt",
|
2665
|
+
method: "post",
|
2666
|
+
...variables,
|
2667
|
+
signal
|
2668
|
+
});
|
2669
|
+
const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/status", method: "get", ...variables, signal });
|
2670
|
+
const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/jobs/{jobId}", method: "get", ...variables, signal });
|
2671
|
+
const getMigrationHistory = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/migrations/history", method: "get", ...variables, signal });
|
776
2672
|
const getBranchList = (variables, signal) => dataPlaneFetch({
|
777
2673
|
url: "/dbs/{dbName}",
|
778
2674
|
method: "get",
|
779
2675
|
...variables,
|
780
2676
|
signal
|
781
2677
|
});
|
2678
|
+
const getDatabaseSettings = (variables, signal) => dataPlaneFetch({
|
2679
|
+
url: "/dbs/{dbName}/settings",
|
2680
|
+
method: "get",
|
2681
|
+
...variables,
|
2682
|
+
signal
|
2683
|
+
});
|
2684
|
+
const updateDatabaseSettings = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/settings", method: "patch", ...variables, signal });
|
782
2685
|
const getBranchDetails = (variables, signal) => dataPlaneFetch({
|
783
2686
|
url: "/db/{dbBranchName}",
|
784
2687
|
method: "get",
|
@@ -792,6 +2695,12 @@ const deleteBranch = (variables, signal) => dataPlaneFetch({
|
|
792
2695
|
...variables,
|
793
2696
|
signal
|
794
2697
|
});
|
2698
|
+
const getSchema = (variables, signal) => dataPlaneFetch({
|
2699
|
+
url: "/db/{dbBranchName}/schema",
|
2700
|
+
method: "get",
|
2701
|
+
...variables,
|
2702
|
+
signal
|
2703
|
+
});
|
795
2704
|
const copyBranch = (variables, signal) => dataPlaneFetch({
|
796
2705
|
url: "/db/{dbBranchName}/copy",
|
797
2706
|
method: "post",
|
@@ -957,12 +2866,6 @@ const searchTable = (variables, signal) => dataPlaneFetch({
|
|
957
2866
|
...variables,
|
958
2867
|
signal
|
959
2868
|
});
|
960
|
-
const sqlQuery = (variables, signal) => dataPlaneFetch({
|
961
|
-
url: "/db/{dbBranchName}/sql",
|
962
|
-
method: "post",
|
963
|
-
...variables,
|
964
|
-
signal
|
965
|
-
});
|
966
2869
|
const vectorSearchTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch", method: "post", ...variables, signal });
|
967
2870
|
const askTable = (variables, signal) => dataPlaneFetch({
|
968
2871
|
url: "/db/{dbBranchName}/tables/{tableName}/ask",
|
@@ -979,7 +2882,38 @@ const fileAccess = (variables, signal) => dataPlaneFetch({
|
|
979
2882
|
...variables,
|
980
2883
|
signal
|
981
2884
|
});
|
2885
|
+
const fileUpload = (variables, signal) => dataPlaneFetch({
|
2886
|
+
url: "/file/{fileId}",
|
2887
|
+
method: "put",
|
2888
|
+
...variables,
|
2889
|
+
signal
|
2890
|
+
});
|
2891
|
+
const sqlQuery = (variables, signal) => dataPlaneFetch({
|
2892
|
+
url: "/db/{dbBranchName}/sql",
|
2893
|
+
method: "post",
|
2894
|
+
...variables,
|
2895
|
+
signal
|
2896
|
+
});
|
982
2897
|
const operationsByTag$2 = {
|
2898
|
+
migrations: {
|
2899
|
+
applyMigration,
|
2900
|
+
adaptTable,
|
2901
|
+
adaptAllTables,
|
2902
|
+
getBranchMigrationJobStatus,
|
2903
|
+
getMigrationJobStatus,
|
2904
|
+
getMigrationHistory,
|
2905
|
+
getSchema,
|
2906
|
+
getBranchMigrationHistory,
|
2907
|
+
getBranchMigrationPlan,
|
2908
|
+
executeBranchMigrationPlan,
|
2909
|
+
getBranchSchemaHistory,
|
2910
|
+
compareBranchWithUserSchema,
|
2911
|
+
compareBranchSchemas,
|
2912
|
+
updateBranchSchema,
|
2913
|
+
previewBranchSchemaEdit,
|
2914
|
+
applyBranchSchemaEdit,
|
2915
|
+
pushBranchMigrations
|
2916
|
+
},
|
983
2917
|
branch: {
|
984
2918
|
getBranchList,
|
985
2919
|
getBranchDetails,
|
@@ -994,18 +2928,7 @@ const operationsByTag$2 = {
|
|
994
2928
|
removeGitBranchesEntry,
|
995
2929
|
resolveBranch
|
996
2930
|
},
|
997
|
-
|
998
|
-
getBranchMigrationHistory,
|
999
|
-
getBranchMigrationPlan,
|
1000
|
-
executeBranchMigrationPlan,
|
1001
|
-
getBranchSchemaHistory,
|
1002
|
-
compareBranchWithUserSchema,
|
1003
|
-
compareBranchSchemas,
|
1004
|
-
updateBranchSchema,
|
1005
|
-
previewBranchSchemaEdit,
|
1006
|
-
applyBranchSchemaEdit,
|
1007
|
-
pushBranchMigrations
|
1008
|
-
},
|
2931
|
+
database: { getDatabaseSettings, updateDatabaseSettings },
|
1009
2932
|
migrationRequests: {
|
1010
2933
|
queryMigrationRequests,
|
1011
2934
|
createMigrationRequest,
|
@@ -1038,18 +2961,18 @@ const operationsByTag$2 = {
|
|
1038
2961
|
deleteRecord,
|
1039
2962
|
bulkInsertTableRecords
|
1040
2963
|
},
|
1041
|
-
files: { getFileItem, putFileItem, deleteFileItem, getFile, putFile, deleteFile, fileAccess },
|
2964
|
+
files: { getFileItem, putFileItem, deleteFileItem, getFile, putFile, deleteFile, fileAccess, fileUpload },
|
1042
2965
|
searchAndFilter: {
|
1043
2966
|
queryTable,
|
1044
2967
|
searchBranch,
|
1045
2968
|
searchTable,
|
1046
|
-
sqlQuery,
|
1047
2969
|
vectorSearchTable,
|
1048
2970
|
askTable,
|
1049
2971
|
askTableSession,
|
1050
2972
|
summarizeTable,
|
1051
2973
|
aggregateTable
|
1052
|
-
}
|
2974
|
+
},
|
2975
|
+
sql: { sqlQuery }
|
1053
2976
|
};
|
1054
2977
|
|
1055
2978
|
const controlPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "controlPlane" });
|
@@ -1098,12 +3021,25 @@ const getUserOAuthClients = (variables, signal) => controlPlaneFetch({
|
|
1098
3021
|
...variables,
|
1099
3022
|
signal
|
1100
3023
|
});
|
3024
|
+
const deleteUserOAuthClient = (variables, signal) => controlPlaneFetch({
|
3025
|
+
url: "/user/oauth/clients/{clientId}",
|
3026
|
+
method: "delete",
|
3027
|
+
...variables,
|
3028
|
+
signal
|
3029
|
+
});
|
1101
3030
|
const getUserOAuthAccessTokens = (variables, signal) => controlPlaneFetch({
|
1102
3031
|
url: "/user/oauth/tokens",
|
1103
3032
|
method: "get",
|
1104
3033
|
...variables,
|
1105
3034
|
signal
|
1106
3035
|
});
|
3036
|
+
const deleteOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3037
|
+
url: "/user/oauth/tokens/{token}",
|
3038
|
+
method: "delete",
|
3039
|
+
...variables,
|
3040
|
+
signal
|
3041
|
+
});
|
3042
|
+
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({ url: "/user/oauth/tokens/{token}", method: "patch", ...variables, signal });
|
1107
3043
|
const getWorkspacesList = (variables, signal) => controlPlaneFetch({
|
1108
3044
|
url: "/workspaces",
|
1109
3045
|
method: "get",
|
@@ -1134,6 +3070,8 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
|
|
1134
3070
|
...variables,
|
1135
3071
|
signal
|
1136
3072
|
});
|
3073
|
+
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/settings", method: "get", ...variables, signal });
|
3074
|
+
const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/settings", method: "patch", ...variables, signal });
|
1137
3075
|
const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/members", method: "get", ...variables, signal });
|
1138
3076
|
const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/members/{userId}", method: "put", ...variables, signal });
|
1139
3077
|
const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
@@ -1147,6 +3085,15 @@ const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ u
|
|
1147
3085
|
const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteId}", method: "delete", ...variables, signal });
|
1148
3086
|
const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept", method: "post", ...variables, signal });
|
1149
3087
|
const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/invites/{inviteId}/resend", method: "post", ...variables, signal });
|
3088
|
+
const listClusters = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/clusters", method: "get", ...variables, signal });
|
3089
|
+
const createCluster = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/clusters", method: "post", ...variables, signal });
|
3090
|
+
const getCluster = (variables, signal) => controlPlaneFetch({
|
3091
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3092
|
+
method: "get",
|
3093
|
+
...variables,
|
3094
|
+
signal
|
3095
|
+
});
|
3096
|
+
const updateCluster = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/clusters/{clusterId}", method: "patch", ...variables, signal });
|
1150
3097
|
const getDatabaseList = (variables, signal) => controlPlaneFetch({
|
1151
3098
|
url: "/workspaces/{workspaceId}/dbs",
|
1152
3099
|
method: "get",
|
@@ -1173,15 +3120,25 @@ const listRegions = (variables, signal) => controlPlaneFetch({
|
|
1173
3120
|
signal
|
1174
3121
|
});
|
1175
3122
|
const operationsByTag$1 = {
|
1176
|
-
|
3123
|
+
oAuth: {
|
3124
|
+
getAuthorizationCode,
|
3125
|
+
grantAuthorizationCode,
|
3126
|
+
getUserOAuthClients,
|
3127
|
+
deleteUserOAuthClient,
|
3128
|
+
getUserOAuthAccessTokens,
|
3129
|
+
deleteOAuthAccessToken,
|
3130
|
+
updateOAuthAccessToken
|
3131
|
+
},
|
1177
3132
|
users: { getUser, updateUser, deleteUser },
|
1178
|
-
authentication: { getUserAPIKeys, createUserAPIKey, deleteUserAPIKey
|
3133
|
+
authentication: { getUserAPIKeys, createUserAPIKey, deleteUserAPIKey },
|
1179
3134
|
workspaces: {
|
1180
3135
|
getWorkspacesList,
|
1181
3136
|
createWorkspace,
|
1182
3137
|
getWorkspace,
|
1183
3138
|
updateWorkspace,
|
1184
3139
|
deleteWorkspace,
|
3140
|
+
getWorkspaceSettings,
|
3141
|
+
updateWorkspaceSettings,
|
1185
3142
|
getWorkspaceMembersList,
|
1186
3143
|
updateWorkspaceMemberRole,
|
1187
3144
|
removeWorkspaceMember
|
@@ -1193,6 +3150,7 @@ const operationsByTag$1 = {
|
|
1193
3150
|
acceptWorkspaceMemberInvite,
|
1194
3151
|
resendWorkspaceMemberInvite
|
1195
3152
|
},
|
3153
|
+
xbcontrolOther: { listClusters, createCluster, getCluster, updateCluster },
|
1196
3154
|
databases: {
|
1197
3155
|
getDatabaseList,
|
1198
3156
|
createDatabase,
|
@@ -1209,66 +3167,11 @@ const operationsByTag$1 = {
|
|
1209
3167
|
|
1210
3168
|
const operationsByTag = deepMerge(operationsByTag$2, operationsByTag$1);
|
1211
3169
|
|
1212
|
-
function getHostUrl(provider, type) {
|
1213
|
-
if (isHostProviderAlias(provider)) {
|
1214
|
-
return providers[provider][type];
|
1215
|
-
} else if (isHostProviderBuilder(provider)) {
|
1216
|
-
return provider[type];
|
1217
|
-
}
|
1218
|
-
throw new Error("Invalid API provider");
|
1219
|
-
}
|
1220
|
-
const providers = {
|
1221
|
-
production: {
|
1222
|
-
main: "https://api.xata.io",
|
1223
|
-
workspaces: "https://{workspaceId}.{region}.xata.sh"
|
1224
|
-
},
|
1225
|
-
staging: {
|
1226
|
-
main: "https://api.staging-xata.dev",
|
1227
|
-
workspaces: "https://{workspaceId}.{region}.staging-xata.dev"
|
1228
|
-
},
|
1229
|
-
dev: {
|
1230
|
-
main: "https://api.dev-xata.dev",
|
1231
|
-
workspaces: "https://{workspaceId}.{region}.dev-xata.dev"
|
1232
|
-
}
|
1233
|
-
};
|
1234
|
-
function isHostProviderAlias(alias) {
|
1235
|
-
return isString(alias) && Object.keys(providers).includes(alias);
|
1236
|
-
}
|
1237
|
-
function isHostProviderBuilder(builder) {
|
1238
|
-
return isObject(builder) && isString(builder.main) && isString(builder.workspaces);
|
1239
|
-
}
|
1240
|
-
function parseProviderString(provider = "production") {
|
1241
|
-
if (isHostProviderAlias(provider)) {
|
1242
|
-
return provider;
|
1243
|
-
}
|
1244
|
-
const [main, workspaces] = provider.split(",");
|
1245
|
-
if (!main || !workspaces)
|
1246
|
-
return null;
|
1247
|
-
return { main, workspaces };
|
1248
|
-
}
|
1249
|
-
function buildProviderString(provider) {
|
1250
|
-
if (isHostProviderAlias(provider))
|
1251
|
-
return provider;
|
1252
|
-
return `${provider.main},${provider.workspaces}`;
|
1253
|
-
}
|
1254
|
-
function parseWorkspacesUrlParts(url) {
|
1255
|
-
if (!isString(url))
|
1256
|
-
return null;
|
1257
|
-
const regex = /(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh.*/;
|
1258
|
-
const regexDev = /(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev.*/;
|
1259
|
-
const regexStaging = /(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev.*/;
|
1260
|
-
const regexProdTesting = /(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.tech.*/;
|
1261
|
-
const match = url.match(regex) || url.match(regexDev) || url.match(regexStaging) || url.match(regexProdTesting);
|
1262
|
-
if (!match)
|
1263
|
-
return null;
|
1264
|
-
return { workspace: match[1], region: match[2] };
|
1265
|
-
}
|
1266
|
-
|
1267
3170
|
var __accessCheck$7 = (obj, member, msg) => {
|
1268
3171
|
if (!member.has(obj))
|
1269
3172
|
throw TypeError("Cannot " + msg);
|
1270
3173
|
};
|
1271
|
-
var __privateGet$
|
3174
|
+
var __privateGet$6 = (obj, member, getter) => {
|
1272
3175
|
__accessCheck$7(obj, member, "read from private field");
|
1273
3176
|
return getter ? getter.call(obj) : member.get(obj);
|
1274
3177
|
};
|
@@ -1277,7 +3180,7 @@ var __privateAdd$7 = (obj, member, value) => {
|
|
1277
3180
|
throw TypeError("Cannot add the same private member more than once");
|
1278
3181
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
1279
3182
|
};
|
1280
|
-
var __privateSet$
|
3183
|
+
var __privateSet$5 = (obj, member, value, setter) => {
|
1281
3184
|
__accessCheck$7(obj, member, "write to private field");
|
1282
3185
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
1283
3186
|
return value;
|
@@ -1294,7 +3197,7 @@ class XataApiClient {
|
|
1294
3197
|
if (!apiKey) {
|
1295
3198
|
throw new Error("Could not resolve a valid apiKey");
|
1296
3199
|
}
|
1297
|
-
__privateSet$
|
3200
|
+
__privateSet$5(this, _extraProps, {
|
1298
3201
|
apiUrl: getHostUrl(provider, "main"),
|
1299
3202
|
workspacesApiUrl: getHostUrl(provider, "workspaces"),
|
1300
3203
|
fetch: getFetchImplementation(options.fetch),
|
@@ -1306,64 +3209,64 @@ class XataApiClient {
|
|
1306
3209
|
});
|
1307
3210
|
}
|
1308
3211
|
get user() {
|
1309
|
-
if (!__privateGet$
|
1310
|
-
__privateGet$
|
1311
|
-
return __privateGet$
|
3212
|
+
if (!__privateGet$6(this, _namespaces).user)
|
3213
|
+
__privateGet$6(this, _namespaces).user = new UserApi(__privateGet$6(this, _extraProps));
|
3214
|
+
return __privateGet$6(this, _namespaces).user;
|
1312
3215
|
}
|
1313
3216
|
get authentication() {
|
1314
|
-
if (!__privateGet$
|
1315
|
-
__privateGet$
|
1316
|
-
return __privateGet$
|
3217
|
+
if (!__privateGet$6(this, _namespaces).authentication)
|
3218
|
+
__privateGet$6(this, _namespaces).authentication = new AuthenticationApi(__privateGet$6(this, _extraProps));
|
3219
|
+
return __privateGet$6(this, _namespaces).authentication;
|
1317
3220
|
}
|
1318
3221
|
get workspaces() {
|
1319
|
-
if (!__privateGet$
|
1320
|
-
__privateGet$
|
1321
|
-
return __privateGet$
|
3222
|
+
if (!__privateGet$6(this, _namespaces).workspaces)
|
3223
|
+
__privateGet$6(this, _namespaces).workspaces = new WorkspaceApi(__privateGet$6(this, _extraProps));
|
3224
|
+
return __privateGet$6(this, _namespaces).workspaces;
|
1322
3225
|
}
|
1323
3226
|
get invites() {
|
1324
|
-
if (!__privateGet$
|
1325
|
-
__privateGet$
|
1326
|
-
return __privateGet$
|
3227
|
+
if (!__privateGet$6(this, _namespaces).invites)
|
3228
|
+
__privateGet$6(this, _namespaces).invites = new InvitesApi(__privateGet$6(this, _extraProps));
|
3229
|
+
return __privateGet$6(this, _namespaces).invites;
|
1327
3230
|
}
|
1328
3231
|
get database() {
|
1329
|
-
if (!__privateGet$
|
1330
|
-
__privateGet$
|
1331
|
-
return __privateGet$
|
3232
|
+
if (!__privateGet$6(this, _namespaces).database)
|
3233
|
+
__privateGet$6(this, _namespaces).database = new DatabaseApi(__privateGet$6(this, _extraProps));
|
3234
|
+
return __privateGet$6(this, _namespaces).database;
|
1332
3235
|
}
|
1333
3236
|
get branches() {
|
1334
|
-
if (!__privateGet$
|
1335
|
-
__privateGet$
|
1336
|
-
return __privateGet$
|
3237
|
+
if (!__privateGet$6(this, _namespaces).branches)
|
3238
|
+
__privateGet$6(this, _namespaces).branches = new BranchApi(__privateGet$6(this, _extraProps));
|
3239
|
+
return __privateGet$6(this, _namespaces).branches;
|
1337
3240
|
}
|
1338
3241
|
get migrations() {
|
1339
|
-
if (!__privateGet$
|
1340
|
-
__privateGet$
|
1341
|
-
return __privateGet$
|
3242
|
+
if (!__privateGet$6(this, _namespaces).migrations)
|
3243
|
+
__privateGet$6(this, _namespaces).migrations = new MigrationsApi(__privateGet$6(this, _extraProps));
|
3244
|
+
return __privateGet$6(this, _namespaces).migrations;
|
1342
3245
|
}
|
1343
3246
|
get migrationRequests() {
|
1344
|
-
if (!__privateGet$
|
1345
|
-
__privateGet$
|
1346
|
-
return __privateGet$
|
3247
|
+
if (!__privateGet$6(this, _namespaces).migrationRequests)
|
3248
|
+
__privateGet$6(this, _namespaces).migrationRequests = new MigrationRequestsApi(__privateGet$6(this, _extraProps));
|
3249
|
+
return __privateGet$6(this, _namespaces).migrationRequests;
|
1347
3250
|
}
|
1348
3251
|
get tables() {
|
1349
|
-
if (!__privateGet$
|
1350
|
-
__privateGet$
|
1351
|
-
return __privateGet$
|
3252
|
+
if (!__privateGet$6(this, _namespaces).tables)
|
3253
|
+
__privateGet$6(this, _namespaces).tables = new TableApi(__privateGet$6(this, _extraProps));
|
3254
|
+
return __privateGet$6(this, _namespaces).tables;
|
1352
3255
|
}
|
1353
3256
|
get records() {
|
1354
|
-
if (!__privateGet$
|
1355
|
-
__privateGet$
|
1356
|
-
return __privateGet$
|
3257
|
+
if (!__privateGet$6(this, _namespaces).records)
|
3258
|
+
__privateGet$6(this, _namespaces).records = new RecordsApi(__privateGet$6(this, _extraProps));
|
3259
|
+
return __privateGet$6(this, _namespaces).records;
|
1357
3260
|
}
|
1358
3261
|
get files() {
|
1359
|
-
if (!__privateGet$
|
1360
|
-
__privateGet$
|
1361
|
-
return __privateGet$
|
3262
|
+
if (!__privateGet$6(this, _namespaces).files)
|
3263
|
+
__privateGet$6(this, _namespaces).files = new FilesApi(__privateGet$6(this, _extraProps));
|
3264
|
+
return __privateGet$6(this, _namespaces).files;
|
1362
3265
|
}
|
1363
3266
|
get searchAndFilter() {
|
1364
|
-
if (!__privateGet$
|
1365
|
-
__privateGet$
|
1366
|
-
return __privateGet$
|
3267
|
+
if (!__privateGet$6(this, _namespaces).searchAndFilter)
|
3268
|
+
__privateGet$6(this, _namespaces).searchAndFilter = new SearchAndFilterApi(__privateGet$6(this, _extraProps));
|
3269
|
+
return __privateGet$6(this, _namespaces).searchAndFilter;
|
1367
3270
|
}
|
1368
3271
|
}
|
1369
3272
|
_extraProps = new WeakMap();
|
@@ -1665,6 +3568,30 @@ class BranchApi {
|
|
1665
3568
|
...this.extraProps
|
1666
3569
|
});
|
1667
3570
|
}
|
3571
|
+
pgRollMigrationHistory({
|
3572
|
+
workspace,
|
3573
|
+
region,
|
3574
|
+
database,
|
3575
|
+
branch
|
3576
|
+
}) {
|
3577
|
+
return operationsByTag.migrations.getMigrationHistory({
|
3578
|
+
pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
|
3579
|
+
...this.extraProps
|
3580
|
+
});
|
3581
|
+
}
|
3582
|
+
applyMigration({
|
3583
|
+
workspace,
|
3584
|
+
region,
|
3585
|
+
database,
|
3586
|
+
branch,
|
3587
|
+
migration
|
3588
|
+
}) {
|
3589
|
+
return operationsByTag.migrations.applyMigration({
|
3590
|
+
pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
|
3591
|
+
body: migration,
|
3592
|
+
...this.extraProps
|
3593
|
+
});
|
3594
|
+
}
|
1668
3595
|
}
|
1669
3596
|
class TableApi {
|
1670
3597
|
constructor(extraProps) {
|
@@ -2478,6 +4405,17 @@ class MigrationsApi {
|
|
2478
4405
|
...this.extraProps
|
2479
4406
|
});
|
2480
4407
|
}
|
4408
|
+
getSchema({
|
4409
|
+
workspace,
|
4410
|
+
region,
|
4411
|
+
database,
|
4412
|
+
branch
|
4413
|
+
}) {
|
4414
|
+
return operationsByTag.migrations.getSchema({
|
4415
|
+
pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
|
4416
|
+
...this.extraProps
|
4417
|
+
});
|
4418
|
+
}
|
2481
4419
|
}
|
2482
4420
|
class DatabaseApi {
|
2483
4421
|
constructor(extraProps) {
|
@@ -2548,100 +4486,46 @@ class DatabaseApi {
|
|
2548
4486
|
}) {
|
2549
4487
|
return operationsByTag.databases.getDatabaseGithubSettings({
|
2550
4488
|
pathParams: { workspaceId: workspace, dbName: database },
|
2551
|
-
...this.extraProps
|
2552
|
-
});
|
2553
|
-
}
|
2554
|
-
updateDatabaseGithubSettings({
|
2555
|
-
workspace,
|
2556
|
-
database,
|
2557
|
-
settings
|
2558
|
-
}) {
|
2559
|
-
return operationsByTag.databases.updateDatabaseGithubSettings({
|
2560
|
-
pathParams: { workspaceId: workspace, dbName: database },
|
2561
|
-
body: settings,
|
2562
|
-
...this.extraProps
|
2563
|
-
});
|
2564
|
-
}
|
2565
|
-
deleteDatabaseGithubSettings({
|
2566
|
-
workspace,
|
2567
|
-
database
|
2568
|
-
}) {
|
2569
|
-
return operationsByTag.databases.deleteDatabaseGithubSettings({
|
2570
|
-
pathParams: { workspaceId: workspace, dbName: database },
|
2571
|
-
...this.extraProps
|
2572
|
-
});
|
2573
|
-
}
|
2574
|
-
listRegions({ workspace }) {
|
2575
|
-
return operationsByTag.databases.listRegions({
|
2576
|
-
pathParams: { workspaceId: workspace },
|
2577
|
-
...this.extraProps
|
2578
|
-
});
|
2579
|
-
}
|
2580
|
-
}
|
2581
|
-
|
2582
|
-
class XataApiPlugin {
|
2583
|
-
build(options) {
|
2584
|
-
return new XataApiClient(options);
|
2585
|
-
}
|
2586
|
-
}
|
2587
|
-
|
2588
|
-
class XataPlugin {
|
2589
|
-
}
|
2590
|
-
|
2591
|
-
class FilesPlugin extends XataPlugin {
|
2592
|
-
build(pluginOptions) {
|
2593
|
-
return {
|
2594
|
-
download: async (location) => {
|
2595
|
-
const { table, record, column, fileId = "" } = location ?? {};
|
2596
|
-
return await getFileItem({
|
2597
|
-
pathParams: {
|
2598
|
-
workspace: "{workspaceId}",
|
2599
|
-
dbBranchName: "{dbBranch}",
|
2600
|
-
region: "{region}",
|
2601
|
-
tableName: table ?? "",
|
2602
|
-
recordId: record ?? "",
|
2603
|
-
columnName: column ?? "",
|
2604
|
-
fileId
|
2605
|
-
},
|
2606
|
-
...pluginOptions,
|
2607
|
-
rawResponse: true
|
2608
|
-
});
|
2609
|
-
},
|
2610
|
-
upload: async (location, file) => {
|
2611
|
-
const { table, record, column, fileId = "" } = location ?? {};
|
2612
|
-
return await putFileItem({
|
2613
|
-
pathParams: {
|
2614
|
-
workspace: "{workspaceId}",
|
2615
|
-
dbBranchName: "{dbBranch}",
|
2616
|
-
region: "{region}",
|
2617
|
-
tableName: table ?? "",
|
2618
|
-
recordId: record ?? "",
|
2619
|
-
columnName: column ?? "",
|
2620
|
-
fileId
|
2621
|
-
},
|
2622
|
-
body: file,
|
2623
|
-
...pluginOptions
|
2624
|
-
});
|
2625
|
-
},
|
2626
|
-
delete: async (location) => {
|
2627
|
-
const { table, record, column, fileId = "" } = location ?? {};
|
2628
|
-
return await deleteFileItem({
|
2629
|
-
pathParams: {
|
2630
|
-
workspace: "{workspaceId}",
|
2631
|
-
dbBranchName: "{dbBranch}",
|
2632
|
-
region: "{region}",
|
2633
|
-
tableName: table ?? "",
|
2634
|
-
recordId: record ?? "",
|
2635
|
-
columnName: column ?? "",
|
2636
|
-
fileId
|
2637
|
-
},
|
2638
|
-
...pluginOptions
|
2639
|
-
});
|
2640
|
-
}
|
2641
|
-
};
|
4489
|
+
...this.extraProps
|
4490
|
+
});
|
4491
|
+
}
|
4492
|
+
updateDatabaseGithubSettings({
|
4493
|
+
workspace,
|
4494
|
+
database,
|
4495
|
+
settings
|
4496
|
+
}) {
|
4497
|
+
return operationsByTag.databases.updateDatabaseGithubSettings({
|
4498
|
+
pathParams: { workspaceId: workspace, dbName: database },
|
4499
|
+
body: settings,
|
4500
|
+
...this.extraProps
|
4501
|
+
});
|
4502
|
+
}
|
4503
|
+
deleteDatabaseGithubSettings({
|
4504
|
+
workspace,
|
4505
|
+
database
|
4506
|
+
}) {
|
4507
|
+
return operationsByTag.databases.deleteDatabaseGithubSettings({
|
4508
|
+
pathParams: { workspaceId: workspace, dbName: database },
|
4509
|
+
...this.extraProps
|
4510
|
+
});
|
4511
|
+
}
|
4512
|
+
listRegions({ workspace }) {
|
4513
|
+
return operationsByTag.databases.listRegions({
|
4514
|
+
pathParams: { workspaceId: workspace },
|
4515
|
+
...this.extraProps
|
4516
|
+
});
|
4517
|
+
}
|
4518
|
+
}
|
4519
|
+
|
4520
|
+
class XataApiPlugin {
|
4521
|
+
build(options) {
|
4522
|
+
return new XataApiClient(options);
|
2642
4523
|
}
|
2643
4524
|
}
|
2644
4525
|
|
4526
|
+
class XataPlugin {
|
4527
|
+
}
|
4528
|
+
|
2645
4529
|
function buildTransformString(transformations) {
|
2646
4530
|
return transformations.flatMap(
|
2647
4531
|
(t) => Object.entries(t).map(([key, value]) => {
|
@@ -2657,71 +4541,33 @@ function buildTransformString(transformations) {
|
|
2657
4541
|
})
|
2658
4542
|
).join(",");
|
2659
4543
|
}
|
2660
|
-
function transformImage(url, transformations) {
|
4544
|
+
function transformImage(url, ...transformations) {
|
2661
4545
|
if (!isDefined(url))
|
2662
4546
|
return void 0;
|
2663
|
-
const
|
4547
|
+
const newTransformations = buildTransformString(transformations);
|
2664
4548
|
const { hostname, pathname, search } = new URL(url);
|
2665
|
-
|
4549
|
+
const pathParts = pathname.split("/");
|
4550
|
+
const transformIndex = pathParts.findIndex((part) => part === "transform");
|
4551
|
+
const removedItems = transformIndex >= 0 ? pathParts.splice(transformIndex, 2) : [];
|
4552
|
+
const transform = `/transform/${[removedItems[1], newTransformations].filter(isDefined).join(",")}`;
|
4553
|
+
const path = pathParts.join("/");
|
4554
|
+
return `https://${hostname}${transform}${path}${search}`;
|
2666
4555
|
}
|
2667
4556
|
|
2668
|
-
var __defProp$6 = Object.defineProperty;
|
2669
|
-
var __defNormalProp$6 = (obj, key, value) => key in obj ? __defProp$6(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
2670
|
-
var __publicField$6 = (obj, key, value) => {
|
2671
|
-
__defNormalProp$6(obj, typeof key !== "symbol" ? key + "" : key, value);
|
2672
|
-
return value;
|
2673
|
-
};
|
2674
4557
|
class XataFile {
|
2675
4558
|
constructor(file) {
|
2676
|
-
|
2677
|
-
* Name of this file.
|
2678
|
-
*/
|
2679
|
-
__publicField$6(this, "name");
|
2680
|
-
/**
|
2681
|
-
* Media type of this file.
|
2682
|
-
*/
|
2683
|
-
__publicField$6(this, "mediaType");
|
2684
|
-
/**
|
2685
|
-
* Base64 encoded content of this file.
|
2686
|
-
*/
|
2687
|
-
__publicField$6(this, "base64Content");
|
2688
|
-
/**
|
2689
|
-
* Whether to enable public url for this file.
|
2690
|
-
*/
|
2691
|
-
__publicField$6(this, "enablePublicUrl");
|
2692
|
-
/**
|
2693
|
-
* Timeout for the signed url.
|
2694
|
-
*/
|
2695
|
-
__publicField$6(this, "signedUrlTimeout");
|
2696
|
-
/**
|
2697
|
-
* Size of this file.
|
2698
|
-
*/
|
2699
|
-
__publicField$6(this, "size");
|
2700
|
-
/**
|
2701
|
-
* Version of this file.
|
2702
|
-
*/
|
2703
|
-
__publicField$6(this, "version");
|
2704
|
-
/**
|
2705
|
-
* Url of this file.
|
2706
|
-
*/
|
2707
|
-
__publicField$6(this, "url");
|
2708
|
-
/**
|
2709
|
-
* Signed url of this file.
|
2710
|
-
*/
|
2711
|
-
__publicField$6(this, "signedUrl");
|
2712
|
-
/**
|
2713
|
-
* Attributes of this file.
|
2714
|
-
*/
|
2715
|
-
__publicField$6(this, "attributes");
|
4559
|
+
this.id = file.id;
|
2716
4560
|
this.name = file.name;
|
2717
|
-
this.mediaType = file.mediaType
|
4561
|
+
this.mediaType = file.mediaType;
|
2718
4562
|
this.base64Content = file.base64Content;
|
2719
4563
|
this.enablePublicUrl = file.enablePublicUrl;
|
2720
4564
|
this.signedUrlTimeout = file.signedUrlTimeout;
|
4565
|
+
this.uploadUrlTimeout = file.uploadUrlTimeout;
|
2721
4566
|
this.size = file.size;
|
2722
4567
|
this.version = file.version;
|
2723
4568
|
this.url = file.url;
|
2724
4569
|
this.signedUrl = file.signedUrl;
|
4570
|
+
this.uploadUrl = file.uploadUrl;
|
2725
4571
|
this.attributes = file.attributes;
|
2726
4572
|
}
|
2727
4573
|
static fromBuffer(buffer, options = {}) {
|
@@ -2774,8 +4620,12 @@ class XataFile {
|
|
2774
4620
|
if (!this.base64Content) {
|
2775
4621
|
throw new Error(`File content is not available, please select property "base64Content" when querying the file`);
|
2776
4622
|
}
|
2777
|
-
const
|
2778
|
-
|
4623
|
+
const binary = atob(this.base64Content);
|
4624
|
+
const uint8Array = new Uint8Array(binary.length);
|
4625
|
+
for (let i = 0; i < binary.length; i++) {
|
4626
|
+
uint8Array[i] = binary.charCodeAt(i);
|
4627
|
+
}
|
4628
|
+
return new Blob([uint8Array], { type: this.mediaType });
|
2779
4629
|
}
|
2780
4630
|
static fromString(string, options = {}) {
|
2781
4631
|
const base64Content = btoa(string);
|
@@ -2798,16 +4648,27 @@ class XataFile {
|
|
2798
4648
|
}
|
2799
4649
|
transform(...options) {
|
2800
4650
|
return {
|
2801
|
-
url: transformImage(this.url, options),
|
2802
|
-
signedUrl: transformImage(this.signedUrl, options)
|
4651
|
+
url: transformImage(this.url, ...options),
|
4652
|
+
signedUrl: transformImage(this.signedUrl, ...options),
|
4653
|
+
metadataUrl: transformImage(this.url, ...options, { format: "json" }),
|
4654
|
+
metadataSignedUrl: transformImage(this.signedUrl, ...options, { format: "json" })
|
2803
4655
|
};
|
2804
4656
|
}
|
2805
4657
|
}
|
2806
4658
|
const parseInputFileEntry = async (entry) => {
|
2807
4659
|
if (!isDefined(entry))
|
2808
4660
|
return null;
|
2809
|
-
const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout } = await entry;
|
2810
|
-
return compactObject({
|
4661
|
+
const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout, uploadUrlTimeout } = await entry;
|
4662
|
+
return compactObject({
|
4663
|
+
id,
|
4664
|
+
// Name cannot be an empty string in our API
|
4665
|
+
name: name ? name : void 0,
|
4666
|
+
mediaType,
|
4667
|
+
base64Content,
|
4668
|
+
enablePublicUrl,
|
4669
|
+
signedUrlTimeout,
|
4670
|
+
uploadUrlTimeout
|
4671
|
+
});
|
2811
4672
|
};
|
2812
4673
|
|
2813
4674
|
function cleanFilter(filter) {
|
@@ -2837,17 +4698,30 @@ function cleanFilter(filter) {
|
|
2837
4698
|
return Object.keys(values).length > 0 ? values : void 0;
|
2838
4699
|
}
|
2839
4700
|
|
2840
|
-
|
2841
|
-
|
2842
|
-
|
2843
|
-
|
2844
|
-
|
2845
|
-
|
4701
|
+
function stringifyJson(value) {
|
4702
|
+
if (!isDefined(value))
|
4703
|
+
return value;
|
4704
|
+
if (isString(value))
|
4705
|
+
return value;
|
4706
|
+
try {
|
4707
|
+
return JSON.stringify(value);
|
4708
|
+
} catch (e) {
|
4709
|
+
return value;
|
4710
|
+
}
|
4711
|
+
}
|
4712
|
+
function parseJson(value) {
|
4713
|
+
try {
|
4714
|
+
return JSON.parse(value);
|
4715
|
+
} catch (e) {
|
4716
|
+
return value;
|
4717
|
+
}
|
4718
|
+
}
|
4719
|
+
|
2846
4720
|
var __accessCheck$6 = (obj, member, msg) => {
|
2847
4721
|
if (!member.has(obj))
|
2848
4722
|
throw TypeError("Cannot " + msg);
|
2849
4723
|
};
|
2850
|
-
var __privateGet$
|
4724
|
+
var __privateGet$5 = (obj, member, getter) => {
|
2851
4725
|
__accessCheck$6(obj, member, "read from private field");
|
2852
4726
|
return getter ? getter.call(obj) : member.get(obj);
|
2853
4727
|
};
|
@@ -2856,7 +4730,7 @@ var __privateAdd$6 = (obj, member, value) => {
|
|
2856
4730
|
throw TypeError("Cannot add the same private member more than once");
|
2857
4731
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
2858
4732
|
};
|
2859
|
-
var __privateSet$
|
4733
|
+
var __privateSet$4 = (obj, member, value, setter) => {
|
2860
4734
|
__accessCheck$6(obj, member, "write to private field");
|
2861
4735
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
2862
4736
|
return value;
|
@@ -2865,17 +4739,9 @@ var _query, _page;
|
|
2865
4739
|
class Page {
|
2866
4740
|
constructor(query, meta, records = []) {
|
2867
4741
|
__privateAdd$6(this, _query, void 0);
|
2868
|
-
|
2869
|
-
* Page metadata, required to retrieve additional records.
|
2870
|
-
*/
|
2871
|
-
__publicField$5(this, "meta");
|
2872
|
-
/**
|
2873
|
-
* The set of results for this page.
|
2874
|
-
*/
|
2875
|
-
__publicField$5(this, "records");
|
2876
|
-
__privateSet$6(this, _query, query);
|
4742
|
+
__privateSet$4(this, _query, query);
|
2877
4743
|
this.meta = meta;
|
2878
|
-
this.records = new
|
4744
|
+
this.records = new PageRecordArray(this, records);
|
2879
4745
|
}
|
2880
4746
|
/**
|
2881
4747
|
* Retrieves the next page of results.
|
@@ -2884,7 +4750,7 @@ class Page {
|
|
2884
4750
|
* @returns The next page or results.
|
2885
4751
|
*/
|
2886
4752
|
async nextPage(size, offset) {
|
2887
|
-
return __privateGet$
|
4753
|
+
return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, after: this.meta.page.cursor } });
|
2888
4754
|
}
|
2889
4755
|
/**
|
2890
4756
|
* Retrieves the previous page of results.
|
@@ -2893,7 +4759,7 @@ class Page {
|
|
2893
4759
|
* @returns The previous page or results.
|
2894
4760
|
*/
|
2895
4761
|
async previousPage(size, offset) {
|
2896
|
-
return __privateGet$
|
4762
|
+
return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, before: this.meta.page.cursor } });
|
2897
4763
|
}
|
2898
4764
|
/**
|
2899
4765
|
* Retrieves the start page of results.
|
@@ -2902,7 +4768,7 @@ class Page {
|
|
2902
4768
|
* @returns The start page or results.
|
2903
4769
|
*/
|
2904
4770
|
async startPage(size, offset) {
|
2905
|
-
return __privateGet$
|
4771
|
+
return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, start: this.meta.page.cursor } });
|
2906
4772
|
}
|
2907
4773
|
/**
|
2908
4774
|
* Retrieves the end page of results.
|
@@ -2911,7 +4777,7 @@ class Page {
|
|
2911
4777
|
* @returns The end page or results.
|
2912
4778
|
*/
|
2913
4779
|
async endPage(size, offset) {
|
2914
|
-
return __privateGet$
|
4780
|
+
return __privateGet$5(this, _query).getPaginated({ pagination: { size, offset, end: this.meta.page.cursor } });
|
2915
4781
|
}
|
2916
4782
|
/**
|
2917
4783
|
* Shortcut method to check if there will be additional results if the next page of results is retrieved.
|
@@ -2922,18 +4788,45 @@ class Page {
|
|
2922
4788
|
}
|
2923
4789
|
}
|
2924
4790
|
_query = new WeakMap();
|
2925
|
-
const PAGINATION_MAX_SIZE =
|
4791
|
+
const PAGINATION_MAX_SIZE = 1e3;
|
2926
4792
|
const PAGINATION_DEFAULT_SIZE = 20;
|
2927
|
-
const PAGINATION_MAX_OFFSET =
|
4793
|
+
const PAGINATION_MAX_OFFSET = 49e3;
|
2928
4794
|
const PAGINATION_DEFAULT_OFFSET = 0;
|
2929
4795
|
function isCursorPaginationOptions(options) {
|
2930
4796
|
return isDefined(options) && (isDefined(options.start) || isDefined(options.end) || isDefined(options.after) || isDefined(options.before));
|
2931
4797
|
}
|
2932
|
-
|
4798
|
+
class RecordArray extends Array {
|
4799
|
+
constructor(...args) {
|
4800
|
+
super(...RecordArray.parseConstructorParams(...args));
|
4801
|
+
}
|
4802
|
+
static parseConstructorParams(...args) {
|
4803
|
+
if (args.length === 1 && typeof args[0] === "number") {
|
4804
|
+
return new Array(args[0]);
|
4805
|
+
}
|
4806
|
+
if (args.length <= 1 && Array.isArray(args[0] ?? [])) {
|
4807
|
+
const result = args[0] ?? [];
|
4808
|
+
return new Array(...result);
|
4809
|
+
}
|
4810
|
+
return new Array(...args);
|
4811
|
+
}
|
4812
|
+
toArray() {
|
4813
|
+
return new Array(...this);
|
4814
|
+
}
|
4815
|
+
toSerializable() {
|
4816
|
+
return JSON.parse(this.toString());
|
4817
|
+
}
|
4818
|
+
toString() {
|
4819
|
+
return JSON.stringify(this.toArray());
|
4820
|
+
}
|
4821
|
+
map(callbackfn, thisArg) {
|
4822
|
+
return this.toArray().map(callbackfn, thisArg);
|
4823
|
+
}
|
4824
|
+
}
|
4825
|
+
const _PageRecordArray = class _PageRecordArray extends Array {
|
2933
4826
|
constructor(...args) {
|
2934
|
-
super(...
|
4827
|
+
super(..._PageRecordArray.parseConstructorParams(...args));
|
2935
4828
|
__privateAdd$6(this, _page, void 0);
|
2936
|
-
__privateSet$
|
4829
|
+
__privateSet$4(this, _page, isObject(args[0]?.meta) ? args[0] : { meta: { page: { cursor: "", more: false } }, records: [] });
|
2937
4830
|
}
|
2938
4831
|
static parseConstructorParams(...args) {
|
2939
4832
|
if (args.length === 1 && typeof args[0] === "number") {
|
@@ -2963,8 +4856,8 @@ const _RecordArray = class _RecordArray extends Array {
|
|
2963
4856
|
* @returns A new array of objects
|
2964
4857
|
*/
|
2965
4858
|
async nextPage(size, offset) {
|
2966
|
-
const newPage = await __privateGet$
|
2967
|
-
return new
|
4859
|
+
const newPage = await __privateGet$5(this, _page).nextPage(size, offset);
|
4860
|
+
return new _PageRecordArray(newPage);
|
2968
4861
|
}
|
2969
4862
|
/**
|
2970
4863
|
* Retrieve previous page of records
|
@@ -2972,8 +4865,8 @@ const _RecordArray = class _RecordArray extends Array {
|
|
2972
4865
|
* @returns A new array of objects
|
2973
4866
|
*/
|
2974
4867
|
async previousPage(size, offset) {
|
2975
|
-
const newPage = await __privateGet$
|
2976
|
-
return new
|
4868
|
+
const newPage = await __privateGet$5(this, _page).previousPage(size, offset);
|
4869
|
+
return new _PageRecordArray(newPage);
|
2977
4870
|
}
|
2978
4871
|
/**
|
2979
4872
|
* Retrieve start page of records
|
@@ -2981,8 +4874,8 @@ const _RecordArray = class _RecordArray extends Array {
|
|
2981
4874
|
* @returns A new array of objects
|
2982
4875
|
*/
|
2983
4876
|
async startPage(size, offset) {
|
2984
|
-
const newPage = await __privateGet$
|
2985
|
-
return new
|
4877
|
+
const newPage = await __privateGet$5(this, _page).startPage(size, offset);
|
4878
|
+
return new _PageRecordArray(newPage);
|
2986
4879
|
}
|
2987
4880
|
/**
|
2988
4881
|
* Retrieve end page of records
|
@@ -2990,30 +4883,24 @@ const _RecordArray = class _RecordArray extends Array {
|
|
2990
4883
|
* @returns A new array of objects
|
2991
4884
|
*/
|
2992
4885
|
async endPage(size, offset) {
|
2993
|
-
const newPage = await __privateGet$
|
2994
|
-
return new
|
4886
|
+
const newPage = await __privateGet$5(this, _page).endPage(size, offset);
|
4887
|
+
return new _PageRecordArray(newPage);
|
2995
4888
|
}
|
2996
4889
|
/**
|
2997
4890
|
* @returns Boolean indicating if there is a next page
|
2998
4891
|
*/
|
2999
4892
|
hasNextPage() {
|
3000
|
-
return __privateGet$
|
4893
|
+
return __privateGet$5(this, _page).meta.page.more;
|
3001
4894
|
}
|
3002
4895
|
};
|
3003
4896
|
_page = new WeakMap();
|
3004
|
-
let
|
4897
|
+
let PageRecordArray = _PageRecordArray;
|
3005
4898
|
|
3006
|
-
var __defProp$4 = Object.defineProperty;
|
3007
|
-
var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
3008
|
-
var __publicField$4 = (obj, key, value) => {
|
3009
|
-
__defNormalProp$4(obj, typeof key !== "symbol" ? key + "" : key, value);
|
3010
|
-
return value;
|
3011
|
-
};
|
3012
4899
|
var __accessCheck$5 = (obj, member, msg) => {
|
3013
4900
|
if (!member.has(obj))
|
3014
4901
|
throw TypeError("Cannot " + msg);
|
3015
4902
|
};
|
3016
|
-
var __privateGet$
|
4903
|
+
var __privateGet$4 = (obj, member, getter) => {
|
3017
4904
|
__accessCheck$5(obj, member, "read from private field");
|
3018
4905
|
return getter ? getter.call(obj) : member.get(obj);
|
3019
4906
|
};
|
@@ -3022,7 +4909,7 @@ var __privateAdd$5 = (obj, member, value) => {
|
|
3022
4909
|
throw TypeError("Cannot add the same private member more than once");
|
3023
4910
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3024
4911
|
};
|
3025
|
-
var __privateSet$
|
4912
|
+
var __privateSet$3 = (obj, member, value, setter) => {
|
3026
4913
|
__accessCheck$5(obj, member, "write to private field");
|
3027
4914
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
3028
4915
|
return value;
|
@@ -3039,26 +4926,26 @@ const _Query = class _Query {
|
|
3039
4926
|
__privateAdd$5(this, _repository, void 0);
|
3040
4927
|
__privateAdd$5(this, _data, { filter: {} });
|
3041
4928
|
// Implements pagination
|
3042
|
-
|
3043
|
-
|
3044
|
-
__privateSet$
|
4929
|
+
this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
|
4930
|
+
this.records = new PageRecordArray(this, []);
|
4931
|
+
__privateSet$3(this, _table$1, table);
|
3045
4932
|
if (repository) {
|
3046
|
-
__privateSet$
|
4933
|
+
__privateSet$3(this, _repository, repository);
|
3047
4934
|
} else {
|
3048
|
-
__privateSet$
|
4935
|
+
__privateSet$3(this, _repository, this);
|
3049
4936
|
}
|
3050
4937
|
const parent = cleanParent(data, rawParent);
|
3051
|
-
__privateGet$
|
3052
|
-
__privateGet$
|
3053
|
-
__privateGet$
|
3054
|
-
__privateGet$
|
3055
|
-
__privateGet$
|
3056
|
-
__privateGet$
|
3057
|
-
__privateGet$
|
3058
|
-
__privateGet$
|
3059
|
-
__privateGet$
|
3060
|
-
__privateGet$
|
3061
|
-
__privateGet$
|
4938
|
+
__privateGet$4(this, _data).filter = data.filter ?? parent?.filter ?? {};
|
4939
|
+
__privateGet$4(this, _data).filter.$any = data.filter?.$any ?? parent?.filter?.$any;
|
4940
|
+
__privateGet$4(this, _data).filter.$all = data.filter?.$all ?? parent?.filter?.$all;
|
4941
|
+
__privateGet$4(this, _data).filter.$not = data.filter?.$not ?? parent?.filter?.$not;
|
4942
|
+
__privateGet$4(this, _data).filter.$none = data.filter?.$none ?? parent?.filter?.$none;
|
4943
|
+
__privateGet$4(this, _data).sort = data.sort ?? parent?.sort;
|
4944
|
+
__privateGet$4(this, _data).columns = data.columns ?? parent?.columns;
|
4945
|
+
__privateGet$4(this, _data).consistency = data.consistency ?? parent?.consistency;
|
4946
|
+
__privateGet$4(this, _data).pagination = data.pagination ?? parent?.pagination;
|
4947
|
+
__privateGet$4(this, _data).cache = data.cache ?? parent?.cache;
|
4948
|
+
__privateGet$4(this, _data).fetchOptions = data.fetchOptions ?? parent?.fetchOptions;
|
3062
4949
|
this.any = this.any.bind(this);
|
3063
4950
|
this.all = this.all.bind(this);
|
3064
4951
|
this.not = this.not.bind(this);
|
@@ -3069,10 +4956,10 @@ const _Query = class _Query {
|
|
3069
4956
|
Object.defineProperty(this, "repository", { enumerable: false });
|
3070
4957
|
}
|
3071
4958
|
getQueryOptions() {
|
3072
|
-
return __privateGet$
|
4959
|
+
return __privateGet$4(this, _data);
|
3073
4960
|
}
|
3074
4961
|
key() {
|
3075
|
-
const { columns = [], filter = {}, sort = [], pagination = {} } = __privateGet$
|
4962
|
+
const { columns = [], filter = {}, sort = [], pagination = {} } = __privateGet$4(this, _data);
|
3076
4963
|
const key = JSON.stringify({ columns, filter, sort, pagination });
|
3077
4964
|
return toBase64(key);
|
3078
4965
|
}
|
@@ -3083,7 +4970,7 @@ const _Query = class _Query {
|
|
3083
4970
|
*/
|
3084
4971
|
any(...queries) {
|
3085
4972
|
const $any = queries.map((query) => query.getQueryOptions().filter ?? {});
|
3086
|
-
return new _Query(__privateGet$
|
4973
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $any } }, __privateGet$4(this, _data));
|
3087
4974
|
}
|
3088
4975
|
/**
|
3089
4976
|
* Builds a new query object representing a logical AND between the given subqueries.
|
@@ -3092,7 +4979,7 @@ const _Query = class _Query {
|
|
3092
4979
|
*/
|
3093
4980
|
all(...queries) {
|
3094
4981
|
const $all = queries.map((query) => query.getQueryOptions().filter ?? {});
|
3095
|
-
return new _Query(__privateGet$
|
4982
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
|
3096
4983
|
}
|
3097
4984
|
/**
|
3098
4985
|
* Builds a new query object representing a logical OR negating each subquery. In pseudo-code: !q1 OR !q2
|
@@ -3101,7 +4988,7 @@ const _Query = class _Query {
|
|
3101
4988
|
*/
|
3102
4989
|
not(...queries) {
|
3103
4990
|
const $not = queries.map((query) => query.getQueryOptions().filter ?? {});
|
3104
|
-
return new _Query(__privateGet$
|
4991
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $not } }, __privateGet$4(this, _data));
|
3105
4992
|
}
|
3106
4993
|
/**
|
3107
4994
|
* Builds a new query object representing a logical AND negating each subquery. In pseudo-code: !q1 AND !q2
|
@@ -3110,25 +4997,25 @@ const _Query = class _Query {
|
|
3110
4997
|
*/
|
3111
4998
|
none(...queries) {
|
3112
4999
|
const $none = queries.map((query) => query.getQueryOptions().filter ?? {});
|
3113
|
-
return new _Query(__privateGet$
|
5000
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $none } }, __privateGet$4(this, _data));
|
3114
5001
|
}
|
3115
5002
|
filter(a, b) {
|
3116
5003
|
if (arguments.length === 1) {
|
3117
5004
|
const constraints = Object.entries(a ?? {}).map(([column, constraint]) => ({
|
3118
5005
|
[column]: __privateMethod$3(this, _cleanFilterConstraint, cleanFilterConstraint_fn).call(this, column, constraint)
|
3119
5006
|
}));
|
3120
|
-
const $all = compact([__privateGet$
|
3121
|
-
return new _Query(__privateGet$
|
5007
|
+
const $all = compact([__privateGet$4(this, _data).filter?.$all].flat().concat(constraints));
|
5008
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
|
3122
5009
|
} else {
|
3123
5010
|
const constraints = isDefined(a) && isDefined(b) ? [{ [a]: __privateMethod$3(this, _cleanFilterConstraint, cleanFilterConstraint_fn).call(this, a, b) }] : void 0;
|
3124
|
-
const $all = compact([__privateGet$
|
3125
|
-
return new _Query(__privateGet$
|
5011
|
+
const $all = compact([__privateGet$4(this, _data).filter?.$all].flat().concat(constraints));
|
5012
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { filter: { $all } }, __privateGet$4(this, _data));
|
3126
5013
|
}
|
3127
5014
|
}
|
3128
5015
|
sort(column, direction = "asc") {
|
3129
|
-
const originalSort = [__privateGet$
|
5016
|
+
const originalSort = [__privateGet$4(this, _data).sort ?? []].flat();
|
3130
5017
|
const sort = [...originalSort, { column, direction }];
|
3131
|
-
return new _Query(__privateGet$
|
5018
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { sort }, __privateGet$4(this, _data));
|
3132
5019
|
}
|
3133
5020
|
/**
|
3134
5021
|
* Builds a new query specifying the set of columns to be returned in the query response.
|
@@ -3137,15 +5024,15 @@ const _Query = class _Query {
|
|
3137
5024
|
*/
|
3138
5025
|
select(columns) {
|
3139
5026
|
return new _Query(
|
3140
|
-
__privateGet$
|
3141
|
-
__privateGet$
|
5027
|
+
__privateGet$4(this, _repository),
|
5028
|
+
__privateGet$4(this, _table$1),
|
3142
5029
|
{ columns },
|
3143
|
-
__privateGet$
|
5030
|
+
__privateGet$4(this, _data)
|
3144
5031
|
);
|
3145
5032
|
}
|
3146
5033
|
getPaginated(options = {}) {
|
3147
|
-
const query = new _Query(__privateGet$
|
3148
|
-
return __privateGet$
|
5034
|
+
const query = new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), options, __privateGet$4(this, _data));
|
5035
|
+
return __privateGet$4(this, _repository).query(query);
|
3149
5036
|
}
|
3150
5037
|
/**
|
3151
5038
|
* Get results in an iterator
|
@@ -3182,7 +5069,7 @@ const _Query = class _Query {
|
|
3182
5069
|
if (page.hasNextPage() && options.pagination?.size === void 0) {
|
3183
5070
|
console.trace("Calling getMany does not return all results. Paginate to get all results or call getAll.");
|
3184
5071
|
}
|
3185
|
-
const array = new
|
5072
|
+
const array = new PageRecordArray(page, results.slice(0, size));
|
3186
5073
|
return array;
|
3187
5074
|
}
|
3188
5075
|
async getAll(options = {}) {
|
@@ -3191,7 +5078,7 @@ const _Query = class _Query {
|
|
3191
5078
|
for await (const page of this.getIterator({ ...rest, batchSize })) {
|
3192
5079
|
results.push(...page);
|
3193
5080
|
}
|
3194
|
-
return results;
|
5081
|
+
return new RecordArray(results);
|
3195
5082
|
}
|
3196
5083
|
async getFirst(options = {}) {
|
3197
5084
|
const records = await this.getMany({ ...options, pagination: { size: 1 } });
|
@@ -3206,12 +5093,12 @@ const _Query = class _Query {
|
|
3206
5093
|
async summarize(params = {}) {
|
3207
5094
|
const { summaries, summariesFilter, ...options } = params;
|
3208
5095
|
const query = new _Query(
|
3209
|
-
__privateGet$
|
3210
|
-
__privateGet$
|
5096
|
+
__privateGet$4(this, _repository),
|
5097
|
+
__privateGet$4(this, _table$1),
|
3211
5098
|
options,
|
3212
|
-
__privateGet$
|
5099
|
+
__privateGet$4(this, _data)
|
3213
5100
|
);
|
3214
|
-
return __privateGet$
|
5101
|
+
return __privateGet$4(this, _repository).summarizeTable(query, summaries, summariesFilter);
|
3215
5102
|
}
|
3216
5103
|
/**
|
3217
5104
|
* Builds a new query object adding a cache TTL in milliseconds.
|
@@ -3219,7 +5106,7 @@ const _Query = class _Query {
|
|
3219
5106
|
* @returns A new Query object.
|
3220
5107
|
*/
|
3221
5108
|
cache(ttl) {
|
3222
|
-
return new _Query(__privateGet$
|
5109
|
+
return new _Query(__privateGet$4(this, _repository), __privateGet$4(this, _table$1), { cache: ttl }, __privateGet$4(this, _data));
|
3223
5110
|
}
|
3224
5111
|
/**
|
3225
5112
|
* Retrieve next page of records
|
@@ -3265,7 +5152,7 @@ _repository = new WeakMap();
|
|
3265
5152
|
_data = new WeakMap();
|
3266
5153
|
_cleanFilterConstraint = new WeakSet();
|
3267
5154
|
cleanFilterConstraint_fn = function(column, value) {
|
3268
|
-
const columnType = __privateGet$
|
5155
|
+
const columnType = __privateGet$4(this, _table$1).schema?.columns.find(({ name }) => name === column)?.type;
|
3269
5156
|
if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
|
3270
5157
|
return { $includes: value };
|
3271
5158
|
}
|
@@ -3291,11 +5178,11 @@ const RecordColumnTypes = [
|
|
3291
5178
|
"email",
|
3292
5179
|
"multiple",
|
3293
5180
|
"link",
|
3294
|
-
"object",
|
3295
5181
|
"datetime",
|
3296
5182
|
"vector",
|
3297
5183
|
"file[]",
|
3298
|
-
"file"
|
5184
|
+
"file",
|
5185
|
+
"json"
|
3299
5186
|
];
|
3300
5187
|
function isIdentifiable(x) {
|
3301
5188
|
return isObject(x) && isString(x?.id);
|
@@ -3306,6 +5193,24 @@ function isXataRecord(x) {
|
|
3306
5193
|
return isIdentifiable(x) && isObject(metadata) && typeof metadata.version === "number";
|
3307
5194
|
}
|
3308
5195
|
|
5196
|
+
function isValidExpandedColumn(column) {
|
5197
|
+
return isObject(column) && isString(column.name);
|
5198
|
+
}
|
5199
|
+
function isValidSelectableColumns(columns) {
|
5200
|
+
if (!Array.isArray(columns)) {
|
5201
|
+
return false;
|
5202
|
+
}
|
5203
|
+
return columns.every((column) => {
|
5204
|
+
if (typeof column === "string") {
|
5205
|
+
return true;
|
5206
|
+
}
|
5207
|
+
if (typeof column === "object") {
|
5208
|
+
return isValidExpandedColumn(column);
|
5209
|
+
}
|
5210
|
+
return false;
|
5211
|
+
});
|
5212
|
+
}
|
5213
|
+
|
3309
5214
|
function isSortFilterString(value) {
|
3310
5215
|
return isString(value);
|
3311
5216
|
}
|
@@ -3337,7 +5242,7 @@ var __accessCheck$4 = (obj, member, msg) => {
|
|
3337
5242
|
if (!member.has(obj))
|
3338
5243
|
throw TypeError("Cannot " + msg);
|
3339
5244
|
};
|
3340
|
-
var __privateGet$
|
5245
|
+
var __privateGet$3 = (obj, member, getter) => {
|
3341
5246
|
__accessCheck$4(obj, member, "read from private field");
|
3342
5247
|
return getter ? getter.call(obj) : member.get(obj);
|
3343
5248
|
};
|
@@ -3346,7 +5251,7 @@ var __privateAdd$4 = (obj, member, value) => {
|
|
3346
5251
|
throw TypeError("Cannot add the same private member more than once");
|
3347
5252
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
3348
5253
|
};
|
3349
|
-
var __privateSet$
|
5254
|
+
var __privateSet$2 = (obj, member, value, setter) => {
|
3350
5255
|
__accessCheck$4(obj, member, "write to private field");
|
3351
5256
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
3352
5257
|
return value;
|
@@ -3355,7 +5260,7 @@ var __privateMethod$2 = (obj, member, method) => {
|
|
3355
5260
|
__accessCheck$4(obj, member, "access private method");
|
3356
5261
|
return method;
|
3357
5262
|
};
|
3358
|
-
var _table, _getFetchProps, _db, _cache, _schemaTables
|
5263
|
+
var _table, _getFetchProps, _db, _cache, _schemaTables, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn, _insertRecords, insertRecords_fn, _updateRecordWithID, updateRecordWithID_fn, _updateRecords, updateRecords_fn, _upsertRecordWithID, upsertRecordWithID_fn, _deleteRecord, deleteRecord_fn, _deleteRecords, deleteRecords_fn, _setCacheQuery, setCacheQuery_fn, _getCacheQuery, getCacheQuery_fn, _getSchemaTables, getSchemaTables_fn, _transformObjectToApi, transformObjectToApi_fn;
|
3359
5264
|
const BULK_OPERATION_MAX_SIZE = 1e3;
|
3360
5265
|
class Repository extends Query {
|
3361
5266
|
}
|
@@ -3376,62 +5281,62 @@ class RestRepository extends Query {
|
|
3376
5281
|
__privateAdd$4(this, _deleteRecords);
|
3377
5282
|
__privateAdd$4(this, _setCacheQuery);
|
3378
5283
|
__privateAdd$4(this, _getCacheQuery);
|
3379
|
-
__privateAdd$4(this, _getSchemaTables
|
5284
|
+
__privateAdd$4(this, _getSchemaTables);
|
3380
5285
|
__privateAdd$4(this, _transformObjectToApi);
|
3381
5286
|
__privateAdd$4(this, _table, void 0);
|
3382
5287
|
__privateAdd$4(this, _getFetchProps, void 0);
|
3383
5288
|
__privateAdd$4(this, _db, void 0);
|
3384
5289
|
__privateAdd$4(this, _cache, void 0);
|
3385
|
-
__privateAdd$4(this, _schemaTables
|
5290
|
+
__privateAdd$4(this, _schemaTables, void 0);
|
3386
5291
|
__privateAdd$4(this, _trace, void 0);
|
3387
|
-
__privateSet$
|
3388
|
-
__privateSet$
|
3389
|
-
__privateSet$
|
3390
|
-
__privateSet$
|
3391
|
-
__privateSet$
|
5292
|
+
__privateSet$2(this, _table, options.table);
|
5293
|
+
__privateSet$2(this, _db, options.db);
|
5294
|
+
__privateSet$2(this, _cache, options.pluginOptions.cache);
|
5295
|
+
__privateSet$2(this, _schemaTables, options.schemaTables);
|
5296
|
+
__privateSet$2(this, _getFetchProps, () => ({ ...options.pluginOptions, sessionID: generateUUID() }));
|
3392
5297
|
const trace = options.pluginOptions.trace ?? defaultTrace;
|
3393
|
-
__privateSet$
|
5298
|
+
__privateSet$2(this, _trace, async (name, fn, options2 = {}) => {
|
3394
5299
|
return trace(name, fn, {
|
3395
5300
|
...options2,
|
3396
|
-
[TraceAttributes.TABLE]: __privateGet$
|
5301
|
+
[TraceAttributes.TABLE]: __privateGet$3(this, _table),
|
3397
5302
|
[TraceAttributes.KIND]: "sdk-operation",
|
3398
5303
|
[TraceAttributes.VERSION]: VERSION
|
3399
5304
|
});
|
3400
5305
|
});
|
3401
5306
|
}
|
3402
5307
|
async create(a, b, c, d) {
|
3403
|
-
return __privateGet$
|
5308
|
+
return __privateGet$3(this, _trace).call(this, "create", async () => {
|
3404
5309
|
const ifVersion = parseIfVersion(b, c, d);
|
3405
5310
|
if (Array.isArray(a)) {
|
3406
5311
|
if (a.length === 0)
|
3407
5312
|
return [];
|
3408
5313
|
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
3409
|
-
const columns =
|
5314
|
+
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3410
5315
|
const result = await this.read(ids, columns);
|
3411
5316
|
return result;
|
3412
5317
|
}
|
3413
5318
|
if (isString(a) && isObject(b)) {
|
3414
5319
|
if (a === "")
|
3415
5320
|
throw new Error("The id can't be empty");
|
3416
|
-
const columns =
|
5321
|
+
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3417
5322
|
return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
|
3418
5323
|
}
|
3419
5324
|
if (isObject(a) && isString(a.id)) {
|
3420
5325
|
if (a.id === "")
|
3421
5326
|
throw new Error("The id can't be empty");
|
3422
|
-
const columns =
|
5327
|
+
const columns = isValidSelectableColumns(b) ? b : void 0;
|
3423
5328
|
return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.id, { ...a, id: void 0 }, columns, { createOnly: true, ifVersion });
|
3424
5329
|
}
|
3425
5330
|
if (isObject(a)) {
|
3426
|
-
const columns =
|
5331
|
+
const columns = isValidSelectableColumns(b) ? b : void 0;
|
3427
5332
|
return __privateMethod$2(this, _insertRecordWithoutId, insertRecordWithoutId_fn).call(this, a, columns);
|
3428
5333
|
}
|
3429
5334
|
throw new Error("Invalid arguments for create method");
|
3430
5335
|
});
|
3431
5336
|
}
|
3432
5337
|
async read(a, b) {
|
3433
|
-
return __privateGet$
|
3434
|
-
const columns =
|
5338
|
+
return __privateGet$3(this, _trace).call(this, "read", async () => {
|
5339
|
+
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3435
5340
|
if (Array.isArray(a)) {
|
3436
5341
|
if (a.length === 0)
|
3437
5342
|
return [];
|
@@ -3451,14 +5356,20 @@ class RestRepository extends Query {
|
|
3451
5356
|
workspace: "{workspaceId}",
|
3452
5357
|
dbBranchName: "{dbBranch}",
|
3453
5358
|
region: "{region}",
|
3454
|
-
tableName: __privateGet$
|
5359
|
+
tableName: __privateGet$3(this, _table),
|
3455
5360
|
recordId: id
|
3456
5361
|
},
|
3457
5362
|
queryParams: { columns },
|
3458
|
-
...__privateGet$
|
5363
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3459
5364
|
});
|
3460
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3461
|
-
return initObject(
|
5365
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5366
|
+
return initObject(
|
5367
|
+
__privateGet$3(this, _db),
|
5368
|
+
schemaTables,
|
5369
|
+
__privateGet$3(this, _table),
|
5370
|
+
response,
|
5371
|
+
columns
|
5372
|
+
);
|
3462
5373
|
} catch (e) {
|
3463
5374
|
if (isObject(e) && e.status === 404) {
|
3464
5375
|
return null;
|
@@ -3470,7 +5381,7 @@ class RestRepository extends Query {
|
|
3470
5381
|
});
|
3471
5382
|
}
|
3472
5383
|
async readOrThrow(a, b) {
|
3473
|
-
return __privateGet$
|
5384
|
+
return __privateGet$3(this, _trace).call(this, "readOrThrow", async () => {
|
3474
5385
|
const result = await this.read(a, b);
|
3475
5386
|
if (Array.isArray(result)) {
|
3476
5387
|
const missingIds = compact(
|
@@ -3489,7 +5400,7 @@ class RestRepository extends Query {
|
|
3489
5400
|
});
|
3490
5401
|
}
|
3491
5402
|
async update(a, b, c, d) {
|
3492
|
-
return __privateGet$
|
5403
|
+
return __privateGet$3(this, _trace).call(this, "update", async () => {
|
3493
5404
|
const ifVersion = parseIfVersion(b, c, d);
|
3494
5405
|
if (Array.isArray(a)) {
|
3495
5406
|
if (a.length === 0)
|
@@ -3500,17 +5411,17 @@ class RestRepository extends Query {
|
|
3500
5411
|
ifVersion,
|
3501
5412
|
upsert: false
|
3502
5413
|
});
|
3503
|
-
const columns =
|
5414
|
+
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3504
5415
|
const result = await this.read(a, columns);
|
3505
5416
|
return result;
|
3506
5417
|
}
|
3507
5418
|
try {
|
3508
5419
|
if (isString(a) && isObject(b)) {
|
3509
|
-
const columns =
|
5420
|
+
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3510
5421
|
return await __privateMethod$2(this, _updateRecordWithID, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
3511
5422
|
}
|
3512
5423
|
if (isObject(a) && isString(a.id)) {
|
3513
|
-
const columns =
|
5424
|
+
const columns = isValidSelectableColumns(b) ? b : void 0;
|
3514
5425
|
return await __privateMethod$2(this, _updateRecordWithID, updateRecordWithID_fn).call(this, a.id, { ...a, id: void 0 }, columns, { ifVersion });
|
3515
5426
|
}
|
3516
5427
|
} catch (error) {
|
@@ -3522,7 +5433,7 @@ class RestRepository extends Query {
|
|
3522
5433
|
});
|
3523
5434
|
}
|
3524
5435
|
async updateOrThrow(a, b, c, d) {
|
3525
|
-
return __privateGet$
|
5436
|
+
return __privateGet$3(this, _trace).call(this, "updateOrThrow", async () => {
|
3526
5437
|
const result = await this.update(a, b, c, d);
|
3527
5438
|
if (Array.isArray(result)) {
|
3528
5439
|
const missingIds = compact(
|
@@ -3541,7 +5452,7 @@ class RestRepository extends Query {
|
|
3541
5452
|
});
|
3542
5453
|
}
|
3543
5454
|
async createOrUpdate(a, b, c, d) {
|
3544
|
-
return __privateGet$
|
5455
|
+
return __privateGet$3(this, _trace).call(this, "createOrUpdate", async () => {
|
3545
5456
|
const ifVersion = parseIfVersion(b, c, d);
|
3546
5457
|
if (Array.isArray(a)) {
|
3547
5458
|
if (a.length === 0)
|
@@ -3550,20 +5461,20 @@ class RestRepository extends Query {
|
|
3550
5461
|
ifVersion,
|
3551
5462
|
upsert: true
|
3552
5463
|
});
|
3553
|
-
const columns =
|
5464
|
+
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3554
5465
|
const result = await this.read(a, columns);
|
3555
5466
|
return result;
|
3556
5467
|
}
|
3557
5468
|
if (isString(a) && isObject(b)) {
|
3558
5469
|
if (a === "")
|
3559
5470
|
throw new Error("The id can't be empty");
|
3560
|
-
const columns =
|
5471
|
+
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3561
5472
|
return await __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
3562
5473
|
}
|
3563
5474
|
if (isObject(a) && isString(a.id)) {
|
3564
5475
|
if (a.id === "")
|
3565
5476
|
throw new Error("The id can't be empty");
|
3566
|
-
const columns =
|
5477
|
+
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3567
5478
|
return await __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a.id, { ...a, id: void 0 }, columns, { ifVersion });
|
3568
5479
|
}
|
3569
5480
|
if (!isDefined(a) && isObject(b)) {
|
@@ -3576,26 +5487,26 @@ class RestRepository extends Query {
|
|
3576
5487
|
});
|
3577
5488
|
}
|
3578
5489
|
async createOrReplace(a, b, c, d) {
|
3579
|
-
return __privateGet$
|
5490
|
+
return __privateGet$3(this, _trace).call(this, "createOrReplace", async () => {
|
3580
5491
|
const ifVersion = parseIfVersion(b, c, d);
|
3581
5492
|
if (Array.isArray(a)) {
|
3582
5493
|
if (a.length === 0)
|
3583
5494
|
return [];
|
3584
5495
|
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
3585
|
-
const columns =
|
5496
|
+
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3586
5497
|
const result = await this.read(ids, columns);
|
3587
5498
|
return result;
|
3588
5499
|
}
|
3589
5500
|
if (isString(a) && isObject(b)) {
|
3590
5501
|
if (a === "")
|
3591
5502
|
throw new Error("The id can't be empty");
|
3592
|
-
const columns =
|
5503
|
+
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3593
5504
|
return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
|
3594
5505
|
}
|
3595
5506
|
if (isObject(a) && isString(a.id)) {
|
3596
5507
|
if (a.id === "")
|
3597
5508
|
throw new Error("The id can't be empty");
|
3598
|
-
const columns =
|
5509
|
+
const columns = isValidSelectableColumns(c) ? c : void 0;
|
3599
5510
|
return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.id, { ...a, id: void 0 }, columns, { createOnly: false, ifVersion });
|
3600
5511
|
}
|
3601
5512
|
if (!isDefined(a) && isObject(b)) {
|
@@ -3608,7 +5519,7 @@ class RestRepository extends Query {
|
|
3608
5519
|
});
|
3609
5520
|
}
|
3610
5521
|
async delete(a, b) {
|
3611
|
-
return __privateGet$
|
5522
|
+
return __privateGet$3(this, _trace).call(this, "delete", async () => {
|
3612
5523
|
if (Array.isArray(a)) {
|
3613
5524
|
if (a.length === 0)
|
3614
5525
|
return [];
|
@@ -3619,7 +5530,7 @@ class RestRepository extends Query {
|
|
3619
5530
|
return o.id;
|
3620
5531
|
throw new Error("Invalid arguments for delete method");
|
3621
5532
|
});
|
3622
|
-
const columns =
|
5533
|
+
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
3623
5534
|
const result = await this.read(a, columns);
|
3624
5535
|
await __privateMethod$2(this, _deleteRecords, deleteRecords_fn).call(this, ids);
|
3625
5536
|
return result;
|
@@ -3634,7 +5545,7 @@ class RestRepository extends Query {
|
|
3634
5545
|
});
|
3635
5546
|
}
|
3636
5547
|
async deleteOrThrow(a, b) {
|
3637
|
-
return __privateGet$
|
5548
|
+
return __privateGet$3(this, _trace).call(this, "deleteOrThrow", async () => {
|
3638
5549
|
const result = await this.delete(a, b);
|
3639
5550
|
if (Array.isArray(result)) {
|
3640
5551
|
const missingIds = compact(
|
@@ -3652,13 +5563,13 @@ class RestRepository extends Query {
|
|
3652
5563
|
});
|
3653
5564
|
}
|
3654
5565
|
async search(query, options = {}) {
|
3655
|
-
return __privateGet$
|
3656
|
-
const { records } = await searchTable({
|
5566
|
+
return __privateGet$3(this, _trace).call(this, "search", async () => {
|
5567
|
+
const { records, totalCount } = await searchTable({
|
3657
5568
|
pathParams: {
|
3658
5569
|
workspace: "{workspaceId}",
|
3659
5570
|
dbBranchName: "{dbBranch}",
|
3660
5571
|
region: "{region}",
|
3661
|
-
tableName: __privateGet$
|
5572
|
+
tableName: __privateGet$3(this, _table)
|
3662
5573
|
},
|
3663
5574
|
body: {
|
3664
5575
|
query,
|
@@ -3670,20 +5581,23 @@ class RestRepository extends Query {
|
|
3670
5581
|
page: options.page,
|
3671
5582
|
target: options.target
|
3672
5583
|
},
|
3673
|
-
...__privateGet$
|
5584
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3674
5585
|
});
|
3675
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3676
|
-
return
|
5586
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5587
|
+
return {
|
5588
|
+
records: records.map((item) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), item, ["*"])),
|
5589
|
+
totalCount
|
5590
|
+
};
|
3677
5591
|
});
|
3678
5592
|
}
|
3679
5593
|
async vectorSearch(column, query, options) {
|
3680
|
-
return __privateGet$
|
3681
|
-
const { records } = await vectorSearchTable({
|
5594
|
+
return __privateGet$3(this, _trace).call(this, "vectorSearch", async () => {
|
5595
|
+
const { records, totalCount } = await vectorSearchTable({
|
3682
5596
|
pathParams: {
|
3683
5597
|
workspace: "{workspaceId}",
|
3684
5598
|
dbBranchName: "{dbBranch}",
|
3685
5599
|
region: "{region}",
|
3686
|
-
tableName: __privateGet$
|
5600
|
+
tableName: __privateGet$3(this, _table)
|
3687
5601
|
},
|
3688
5602
|
body: {
|
3689
5603
|
column,
|
@@ -3692,29 +5606,32 @@ class RestRepository extends Query {
|
|
3692
5606
|
size: options?.size,
|
3693
5607
|
filter: options?.filter
|
3694
5608
|
},
|
3695
|
-
...__privateGet$
|
5609
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3696
5610
|
});
|
3697
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3698
|
-
return
|
5611
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5612
|
+
return {
|
5613
|
+
records: records.map((item) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), item, ["*"])),
|
5614
|
+
totalCount
|
5615
|
+
};
|
3699
5616
|
});
|
3700
5617
|
}
|
3701
5618
|
async aggregate(aggs, filter) {
|
3702
|
-
return __privateGet$
|
5619
|
+
return __privateGet$3(this, _trace).call(this, "aggregate", async () => {
|
3703
5620
|
const result = await aggregateTable({
|
3704
5621
|
pathParams: {
|
3705
5622
|
workspace: "{workspaceId}",
|
3706
5623
|
dbBranchName: "{dbBranch}",
|
3707
5624
|
region: "{region}",
|
3708
|
-
tableName: __privateGet$
|
5625
|
+
tableName: __privateGet$3(this, _table)
|
3709
5626
|
},
|
3710
5627
|
body: { aggs, filter },
|
3711
|
-
...__privateGet$
|
5628
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3712
5629
|
});
|
3713
5630
|
return result;
|
3714
5631
|
});
|
3715
5632
|
}
|
3716
5633
|
async query(query) {
|
3717
|
-
return __privateGet$
|
5634
|
+
return __privateGet$3(this, _trace).call(this, "query", async () => {
|
3718
5635
|
const cacheQuery = await __privateMethod$2(this, _getCacheQuery, getCacheQuery_fn).call(this, query);
|
3719
5636
|
if (cacheQuery)
|
3720
5637
|
return new Page(query, cacheQuery.meta, cacheQuery.records);
|
@@ -3724,7 +5641,7 @@ class RestRepository extends Query {
|
|
3724
5641
|
workspace: "{workspaceId}",
|
3725
5642
|
dbBranchName: "{dbBranch}",
|
3726
5643
|
region: "{region}",
|
3727
|
-
tableName: __privateGet$
|
5644
|
+
tableName: __privateGet$3(this, _table)
|
3728
5645
|
},
|
3729
5646
|
body: {
|
3730
5647
|
filter: cleanFilter(data.filter),
|
@@ -3734,25 +5651,31 @@ class RestRepository extends Query {
|
|
3734
5651
|
consistency: data.consistency
|
3735
5652
|
},
|
3736
5653
|
fetchOptions: data.fetchOptions,
|
3737
|
-
...__privateGet$
|
5654
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3738
5655
|
});
|
3739
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
5656
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
3740
5657
|
const records = objects.map(
|
3741
|
-
(record) => initObject(
|
5658
|
+
(record) => initObject(
|
5659
|
+
__privateGet$3(this, _db),
|
5660
|
+
schemaTables,
|
5661
|
+
__privateGet$3(this, _table),
|
5662
|
+
record,
|
5663
|
+
data.columns ?? ["*"]
|
5664
|
+
)
|
3742
5665
|
);
|
3743
5666
|
await __privateMethod$2(this, _setCacheQuery, setCacheQuery_fn).call(this, query, meta, records);
|
3744
5667
|
return new Page(query, meta, records);
|
3745
5668
|
});
|
3746
5669
|
}
|
3747
5670
|
async summarizeTable(query, summaries, summariesFilter) {
|
3748
|
-
return __privateGet$
|
5671
|
+
return __privateGet$3(this, _trace).call(this, "summarize", async () => {
|
3749
5672
|
const data = query.getQueryOptions();
|
3750
5673
|
const result = await summarizeTable({
|
3751
5674
|
pathParams: {
|
3752
5675
|
workspace: "{workspaceId}",
|
3753
5676
|
dbBranchName: "{dbBranch}",
|
3754
5677
|
region: "{region}",
|
3755
|
-
tableName: __privateGet$
|
5678
|
+
tableName: __privateGet$3(this, _table)
|
3756
5679
|
},
|
3757
5680
|
body: {
|
3758
5681
|
filter: cleanFilter(data.filter),
|
@@ -3763,9 +5686,15 @@ class RestRepository extends Query {
|
|
3763
5686
|
summaries,
|
3764
5687
|
summariesFilter
|
3765
5688
|
},
|
3766
|
-
...__privateGet$
|
5689
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3767
5690
|
});
|
3768
|
-
|
5691
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5692
|
+
return {
|
5693
|
+
...result,
|
5694
|
+
summaries: result.summaries.map(
|
5695
|
+
(summary) => initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), summary, data.columns ?? [])
|
5696
|
+
)
|
5697
|
+
};
|
3769
5698
|
});
|
3770
5699
|
}
|
3771
5700
|
ask(question, options) {
|
@@ -3775,7 +5704,7 @@ class RestRepository extends Query {
|
|
3775
5704
|
workspace: "{workspaceId}",
|
3776
5705
|
dbBranchName: "{dbBranch}",
|
3777
5706
|
region: "{region}",
|
3778
|
-
tableName: __privateGet$
|
5707
|
+
tableName: __privateGet$3(this, _table),
|
3779
5708
|
sessionId: options?.sessionId
|
3780
5709
|
},
|
3781
5710
|
body: {
|
@@ -3785,7 +5714,7 @@ class RestRepository extends Query {
|
|
3785
5714
|
search: options?.searchType === "keyword" ? options?.search : void 0,
|
3786
5715
|
vectorSearch: options?.searchType === "vector" ? options?.vectorSearch : void 0
|
3787
5716
|
},
|
3788
|
-
...__privateGet$
|
5717
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3789
5718
|
};
|
3790
5719
|
if (options?.onMessage) {
|
3791
5720
|
fetchSSERequest({
|
@@ -3806,7 +5735,7 @@ _table = new WeakMap();
|
|
3806
5735
|
_getFetchProps = new WeakMap();
|
3807
5736
|
_db = new WeakMap();
|
3808
5737
|
_cache = new WeakMap();
|
3809
|
-
_schemaTables
|
5738
|
+
_schemaTables = new WeakMap();
|
3810
5739
|
_trace = new WeakMap();
|
3811
5740
|
_insertRecordWithoutId = new WeakSet();
|
3812
5741
|
insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
@@ -3816,14 +5745,14 @@ insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
|
|
3816
5745
|
workspace: "{workspaceId}",
|
3817
5746
|
dbBranchName: "{dbBranch}",
|
3818
5747
|
region: "{region}",
|
3819
|
-
tableName: __privateGet$
|
5748
|
+
tableName: __privateGet$3(this, _table)
|
3820
5749
|
},
|
3821
5750
|
queryParams: { columns },
|
3822
5751
|
body: record,
|
3823
|
-
...__privateGet$
|
5752
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3824
5753
|
});
|
3825
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3826
|
-
return initObject(__privateGet$
|
5754
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5755
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3827
5756
|
};
|
3828
5757
|
_insertRecordWithId = new WeakSet();
|
3829
5758
|
insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
|
@@ -3835,21 +5764,21 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
|
|
3835
5764
|
workspace: "{workspaceId}",
|
3836
5765
|
dbBranchName: "{dbBranch}",
|
3837
5766
|
region: "{region}",
|
3838
|
-
tableName: __privateGet$
|
5767
|
+
tableName: __privateGet$3(this, _table),
|
3839
5768
|
recordId
|
3840
5769
|
},
|
3841
5770
|
body: record,
|
3842
5771
|
queryParams: { createOnly, columns, ifVersion },
|
3843
|
-
...__privateGet$
|
5772
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3844
5773
|
});
|
3845
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3846
|
-
return initObject(__privateGet$
|
5774
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5775
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3847
5776
|
};
|
3848
5777
|
_insertRecords = new WeakSet();
|
3849
5778
|
insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
3850
5779
|
const operations = await promiseMap(objects, async (object) => {
|
3851
5780
|
const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
3852
|
-
return { insert: { table: __privateGet$
|
5781
|
+
return { insert: { table: __privateGet$3(this, _table), record, createOnly, ifVersion } };
|
3853
5782
|
});
|
3854
5783
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
3855
5784
|
const ids = [];
|
@@ -3861,7 +5790,7 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
|
3861
5790
|
region: "{region}"
|
3862
5791
|
},
|
3863
5792
|
body: { operations: operations2 },
|
3864
|
-
...__privateGet$
|
5793
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3865
5794
|
});
|
3866
5795
|
for (const result of results) {
|
3867
5796
|
if (result.operation === "insert") {
|
@@ -3884,15 +5813,15 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
3884
5813
|
workspace: "{workspaceId}",
|
3885
5814
|
dbBranchName: "{dbBranch}",
|
3886
5815
|
region: "{region}",
|
3887
|
-
tableName: __privateGet$
|
5816
|
+
tableName: __privateGet$3(this, _table),
|
3888
5817
|
recordId
|
3889
5818
|
},
|
3890
5819
|
queryParams: { columns, ifVersion },
|
3891
5820
|
body: record,
|
3892
|
-
...__privateGet$
|
5821
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3893
5822
|
});
|
3894
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3895
|
-
return initObject(__privateGet$
|
5823
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5824
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3896
5825
|
} catch (e) {
|
3897
5826
|
if (isObject(e) && e.status === 404) {
|
3898
5827
|
return null;
|
@@ -3904,7 +5833,7 @@ _updateRecords = new WeakSet();
|
|
3904
5833
|
updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
3905
5834
|
const operations = await promiseMap(objects, async ({ id, ...object }) => {
|
3906
5835
|
const fields = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
3907
|
-
return { update: { table: __privateGet$
|
5836
|
+
return { update: { table: __privateGet$3(this, _table), id, ifVersion, upsert, fields } };
|
3908
5837
|
});
|
3909
5838
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
3910
5839
|
const ids = [];
|
@@ -3916,7 +5845,7 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
|
3916
5845
|
region: "{region}"
|
3917
5846
|
},
|
3918
5847
|
body: { operations: operations2 },
|
3919
|
-
...__privateGet$
|
5848
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3920
5849
|
});
|
3921
5850
|
for (const result of results) {
|
3922
5851
|
if (result.operation === "update") {
|
@@ -3937,15 +5866,15 @@ upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
3937
5866
|
workspace: "{workspaceId}",
|
3938
5867
|
dbBranchName: "{dbBranch}",
|
3939
5868
|
region: "{region}",
|
3940
|
-
tableName: __privateGet$
|
5869
|
+
tableName: __privateGet$3(this, _table),
|
3941
5870
|
recordId
|
3942
5871
|
},
|
3943
5872
|
queryParams: { columns, ifVersion },
|
3944
5873
|
body: object,
|
3945
|
-
...__privateGet$
|
5874
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3946
5875
|
});
|
3947
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3948
|
-
return initObject(__privateGet$
|
5876
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5877
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3949
5878
|
};
|
3950
5879
|
_deleteRecord = new WeakSet();
|
3951
5880
|
deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
@@ -3957,14 +5886,14 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
3957
5886
|
workspace: "{workspaceId}",
|
3958
5887
|
dbBranchName: "{dbBranch}",
|
3959
5888
|
region: "{region}",
|
3960
|
-
tableName: __privateGet$
|
5889
|
+
tableName: __privateGet$3(this, _table),
|
3961
5890
|
recordId
|
3962
5891
|
},
|
3963
5892
|
queryParams: { columns },
|
3964
|
-
...__privateGet$
|
5893
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3965
5894
|
});
|
3966
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
3967
|
-
return initObject(__privateGet$
|
5895
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5896
|
+
return initObject(__privateGet$3(this, _db), schemaTables, __privateGet$3(this, _table), response, columns);
|
3968
5897
|
} catch (e) {
|
3969
5898
|
if (isObject(e) && e.status === 404) {
|
3970
5899
|
return null;
|
@@ -3975,7 +5904,7 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
3975
5904
|
_deleteRecords = new WeakSet();
|
3976
5905
|
deleteRecords_fn = async function(recordIds) {
|
3977
5906
|
const chunkedOperations = chunk(
|
3978
|
-
compact(recordIds).map((id) => ({ delete: { table: __privateGet$
|
5907
|
+
compact(recordIds).map((id) => ({ delete: { table: __privateGet$3(this, _table), id } })),
|
3979
5908
|
BULK_OPERATION_MAX_SIZE
|
3980
5909
|
);
|
3981
5910
|
for (const operations of chunkedOperations) {
|
@@ -3986,44 +5915,44 @@ deleteRecords_fn = async function(recordIds) {
|
|
3986
5915
|
region: "{region}"
|
3987
5916
|
},
|
3988
5917
|
body: { operations },
|
3989
|
-
...__privateGet$
|
5918
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
3990
5919
|
});
|
3991
5920
|
}
|
3992
5921
|
};
|
3993
5922
|
_setCacheQuery = new WeakSet();
|
3994
5923
|
setCacheQuery_fn = async function(query, meta, records) {
|
3995
|
-
await __privateGet$
|
5924
|
+
await __privateGet$3(this, _cache)?.set(`query_${__privateGet$3(this, _table)}:${query.key()}`, { date: /* @__PURE__ */ new Date(), meta, records });
|
3996
5925
|
};
|
3997
5926
|
_getCacheQuery = new WeakSet();
|
3998
5927
|
getCacheQuery_fn = async function(query) {
|
3999
|
-
const key = `query_${__privateGet$
|
4000
|
-
const result = await __privateGet$
|
5928
|
+
const key = `query_${__privateGet$3(this, _table)}:${query.key()}`;
|
5929
|
+
const result = await __privateGet$3(this, _cache)?.get(key);
|
4001
5930
|
if (!result)
|
4002
5931
|
return null;
|
4003
|
-
const defaultTTL = __privateGet$
|
5932
|
+
const defaultTTL = __privateGet$3(this, _cache)?.defaultQueryTTL ?? -1;
|
4004
5933
|
const { cache: ttl = defaultTTL } = query.getQueryOptions();
|
4005
5934
|
if (ttl < 0)
|
4006
5935
|
return null;
|
4007
5936
|
const hasExpired = result.date.getTime() + ttl < Date.now();
|
4008
5937
|
return hasExpired ? null : result;
|
4009
5938
|
};
|
4010
|
-
_getSchemaTables
|
4011
|
-
getSchemaTables_fn
|
4012
|
-
if (__privateGet$
|
4013
|
-
return __privateGet$
|
5939
|
+
_getSchemaTables = new WeakSet();
|
5940
|
+
getSchemaTables_fn = async function() {
|
5941
|
+
if (__privateGet$3(this, _schemaTables))
|
5942
|
+
return __privateGet$3(this, _schemaTables);
|
4014
5943
|
const { schema } = await getBranchDetails({
|
4015
5944
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
4016
|
-
...__privateGet$
|
5945
|
+
...__privateGet$3(this, _getFetchProps).call(this)
|
4017
5946
|
});
|
4018
|
-
__privateSet$
|
5947
|
+
__privateSet$2(this, _schemaTables, schema.tables);
|
4019
5948
|
return schema.tables;
|
4020
5949
|
};
|
4021
5950
|
_transformObjectToApi = new WeakSet();
|
4022
5951
|
transformObjectToApi_fn = async function(object) {
|
4023
|
-
const schemaTables = await __privateMethod$2(this, _getSchemaTables
|
4024
|
-
const schema = schemaTables.find((table) => table.name === __privateGet$
|
5952
|
+
const schemaTables = await __privateMethod$2(this, _getSchemaTables, getSchemaTables_fn).call(this);
|
5953
|
+
const schema = schemaTables.find((table) => table.name === __privateGet$3(this, _table));
|
4025
5954
|
if (!schema)
|
4026
|
-
throw new Error(`Table ${__privateGet$
|
5955
|
+
throw new Error(`Table ${__privateGet$3(this, _table)} not found in schema`);
|
4027
5956
|
const result = {};
|
4028
5957
|
for (const [key, value] of Object.entries(object)) {
|
4029
5958
|
if (key === "xata")
|
@@ -4044,19 +5973,15 @@ transformObjectToApi_fn = async function(object) {
|
|
4044
5973
|
case "file[]":
|
4045
5974
|
result[key] = await promiseMap(value, (item) => parseInputFileEntry(item));
|
4046
5975
|
break;
|
5976
|
+
case "json":
|
5977
|
+
result[key] = stringifyJson(value);
|
5978
|
+
break;
|
4047
5979
|
default:
|
4048
5980
|
result[key] = value;
|
4049
5981
|
}
|
4050
5982
|
}
|
4051
5983
|
return result;
|
4052
5984
|
};
|
4053
|
-
const removeLinksFromObject = (object) => {
|
4054
|
-
return Object.entries(object).reduce((acc, [key, value]) => {
|
4055
|
-
if (key === "xata")
|
4056
|
-
return acc;
|
4057
|
-
return { ...acc, [key]: isIdentifiable(value) ? value.id : value };
|
4058
|
-
}, {});
|
4059
|
-
};
|
4060
5985
|
const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
4061
5986
|
const data = {};
|
4062
5987
|
const { xata, ...rest } = object ?? {};
|
@@ -4087,13 +6012,19 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
4087
6012
|
if (item === column.name) {
|
4088
6013
|
return [...acc, "*"];
|
4089
6014
|
}
|
4090
|
-
if (item.startsWith(`${column.name}.`)) {
|
6015
|
+
if (isString(item) && item.startsWith(`${column.name}.`)) {
|
4091
6016
|
const [, ...path] = item.split(".");
|
4092
6017
|
return [...acc, path.join(".")];
|
4093
6018
|
}
|
4094
6019
|
return acc;
|
4095
6020
|
}, []);
|
4096
|
-
data[column.name] = initObject(
|
6021
|
+
data[column.name] = initObject(
|
6022
|
+
db,
|
6023
|
+
schemaTables,
|
6024
|
+
linkTable,
|
6025
|
+
value,
|
6026
|
+
selectedLinkColumns
|
6027
|
+
);
|
4097
6028
|
} else {
|
4098
6029
|
data[column.name] = null;
|
4099
6030
|
}
|
@@ -4105,6 +6036,9 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
4105
6036
|
case "file[]":
|
4106
6037
|
data[column.name] = value?.map((item) => new XataFile(item)) ?? null;
|
4107
6038
|
break;
|
6039
|
+
case "json":
|
6040
|
+
data[column.name] = parseJson(value);
|
6041
|
+
break;
|
4108
6042
|
default:
|
4109
6043
|
data[column.name] = value ?? null;
|
4110
6044
|
if (column.notNull === true && value === null) {
|
@@ -4114,33 +6048,34 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
4114
6048
|
}
|
4115
6049
|
}
|
4116
6050
|
const record = { ...data };
|
4117
|
-
const serializable = { xata, ...removeLinksFromObject(data) };
|
4118
6051
|
const metadata = xata !== void 0 ? { ...xata, createdAt: new Date(xata.createdAt), updatedAt: new Date(xata.updatedAt) } : void 0;
|
4119
6052
|
record.read = function(columns2) {
|
4120
6053
|
return db[table].read(record["id"], columns2);
|
4121
6054
|
};
|
4122
6055
|
record.update = function(data2, b, c) {
|
4123
|
-
const columns2 =
|
6056
|
+
const columns2 = isValidSelectableColumns(b) ? b : ["*"];
|
4124
6057
|
const ifVersion = parseIfVersion(b, c);
|
4125
6058
|
return db[table].update(record["id"], data2, columns2, { ifVersion });
|
4126
6059
|
};
|
4127
6060
|
record.replace = function(data2, b, c) {
|
4128
|
-
const columns2 =
|
6061
|
+
const columns2 = isValidSelectableColumns(b) ? b : ["*"];
|
4129
6062
|
const ifVersion = parseIfVersion(b, c);
|
4130
6063
|
return db[table].createOrReplace(record["id"], data2, columns2, { ifVersion });
|
4131
6064
|
};
|
4132
6065
|
record.delete = function() {
|
4133
6066
|
return db[table].delete(record["id"]);
|
4134
6067
|
};
|
4135
|
-
|
6068
|
+
if (metadata !== void 0) {
|
6069
|
+
record.xata = Object.freeze(metadata);
|
6070
|
+
}
|
4136
6071
|
record.getMetadata = function() {
|
4137
6072
|
return record.xata;
|
4138
6073
|
};
|
4139
6074
|
record.toSerializable = function() {
|
4140
|
-
return JSON.parse(JSON.stringify(
|
6075
|
+
return JSON.parse(JSON.stringify(record));
|
4141
6076
|
};
|
4142
6077
|
record.toString = function() {
|
4143
|
-
return JSON.stringify(
|
6078
|
+
return JSON.stringify(record);
|
4144
6079
|
};
|
4145
6080
|
for (const prop of ["read", "update", "replace", "delete", "getMetadata", "toSerializable", "toString"]) {
|
4146
6081
|
Object.defineProperty(record, prop, { enumerable: false });
|
@@ -4158,7 +6093,7 @@ function extractId(value) {
|
|
4158
6093
|
function isValidColumn(columns, column) {
|
4159
6094
|
if (columns.includes("*"))
|
4160
6095
|
return true;
|
4161
|
-
return columns.filter((item) => item.startsWith(column.name)).length > 0;
|
6096
|
+
return columns.filter((item) => isString(item) && item.startsWith(column.name)).length > 0;
|
4162
6097
|
}
|
4163
6098
|
function parseIfVersion(...args) {
|
4164
6099
|
for (const arg of args) {
|
@@ -4169,17 +6104,11 @@ function parseIfVersion(...args) {
|
|
4169
6104
|
return void 0;
|
4170
6105
|
}
|
4171
6106
|
|
4172
|
-
var __defProp$3 = Object.defineProperty;
|
4173
|
-
var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
4174
|
-
var __publicField$3 = (obj, key, value) => {
|
4175
|
-
__defNormalProp$3(obj, typeof key !== "symbol" ? key + "" : key, value);
|
4176
|
-
return value;
|
4177
|
-
};
|
4178
6107
|
var __accessCheck$3 = (obj, member, msg) => {
|
4179
6108
|
if (!member.has(obj))
|
4180
6109
|
throw TypeError("Cannot " + msg);
|
4181
6110
|
};
|
4182
|
-
var __privateGet$
|
6111
|
+
var __privateGet$2 = (obj, member, getter) => {
|
4183
6112
|
__accessCheck$3(obj, member, "read from private field");
|
4184
6113
|
return getter ? getter.call(obj) : member.get(obj);
|
4185
6114
|
};
|
@@ -4188,7 +6117,7 @@ var __privateAdd$3 = (obj, member, value) => {
|
|
4188
6117
|
throw TypeError("Cannot add the same private member more than once");
|
4189
6118
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4190
6119
|
};
|
4191
|
-
var __privateSet$
|
6120
|
+
var __privateSet$1 = (obj, member, value, setter) => {
|
4192
6121
|
__accessCheck$3(obj, member, "write to private field");
|
4193
6122
|
setter ? setter.call(obj, value) : member.set(obj, value);
|
4194
6123
|
return value;
|
@@ -4197,31 +6126,29 @@ var _map;
|
|
4197
6126
|
class SimpleCache {
|
4198
6127
|
constructor(options = {}) {
|
4199
6128
|
__privateAdd$3(this, _map, void 0);
|
4200
|
-
|
4201
|
-
__publicField$3(this, "defaultQueryTTL");
|
4202
|
-
__privateSet$3(this, _map, /* @__PURE__ */ new Map());
|
6129
|
+
__privateSet$1(this, _map, /* @__PURE__ */ new Map());
|
4203
6130
|
this.capacity = options.max ?? 500;
|
4204
6131
|
this.defaultQueryTTL = options.defaultQueryTTL ?? 60 * 1e3;
|
4205
6132
|
}
|
4206
6133
|
async getAll() {
|
4207
|
-
return Object.fromEntries(__privateGet$
|
6134
|
+
return Object.fromEntries(__privateGet$2(this, _map));
|
4208
6135
|
}
|
4209
6136
|
async get(key) {
|
4210
|
-
return __privateGet$
|
6137
|
+
return __privateGet$2(this, _map).get(key) ?? null;
|
4211
6138
|
}
|
4212
6139
|
async set(key, value) {
|
4213
6140
|
await this.delete(key);
|
4214
|
-
__privateGet$
|
4215
|
-
if (__privateGet$
|
4216
|
-
const leastRecentlyUsed = __privateGet$
|
6141
|
+
__privateGet$2(this, _map).set(key, value);
|
6142
|
+
if (__privateGet$2(this, _map).size > this.capacity) {
|
6143
|
+
const leastRecentlyUsed = __privateGet$2(this, _map).keys().next().value;
|
4217
6144
|
await this.delete(leastRecentlyUsed);
|
4218
6145
|
}
|
4219
6146
|
}
|
4220
6147
|
async delete(key) {
|
4221
|
-
__privateGet$
|
6148
|
+
__privateGet$2(this, _map).delete(key);
|
4222
6149
|
}
|
4223
6150
|
async clear() {
|
4224
|
-
return __privateGet$
|
6151
|
+
return __privateGet$2(this, _map).clear();
|
4225
6152
|
}
|
4226
6153
|
}
|
4227
6154
|
_map = new WeakMap();
|
@@ -4243,10 +6170,12 @@ const notExists = (column) => ({ $notExists: column });
|
|
4243
6170
|
const startsWith = (value) => ({ $startsWith: value });
|
4244
6171
|
const endsWith = (value) => ({ $endsWith: value });
|
4245
6172
|
const pattern = (value) => ({ $pattern: value });
|
6173
|
+
const iPattern = (value) => ({ $iPattern: value });
|
4246
6174
|
const is = (value) => ({ $is: value });
|
4247
6175
|
const equals = is;
|
4248
6176
|
const isNot = (value) => ({ $isNot: value });
|
4249
6177
|
const contains = (value) => ({ $contains: value });
|
6178
|
+
const iContains = (value) => ({ $iContains: value });
|
4250
6179
|
const includes = (value) => ({ $includes: value });
|
4251
6180
|
const includesAll = (value) => ({ $includesAll: value });
|
4252
6181
|
const includesNone = (value) => ({ $includesNone: value });
|
@@ -4256,7 +6185,7 @@ var __accessCheck$2 = (obj, member, msg) => {
|
|
4256
6185
|
if (!member.has(obj))
|
4257
6186
|
throw TypeError("Cannot " + msg);
|
4258
6187
|
};
|
4259
|
-
var __privateGet$
|
6188
|
+
var __privateGet$1 = (obj, member, getter) => {
|
4260
6189
|
__accessCheck$2(obj, member, "read from private field");
|
4261
6190
|
return getter ? getter.call(obj) : member.get(obj);
|
4262
6191
|
};
|
@@ -4265,18 +6194,11 @@ var __privateAdd$2 = (obj, member, value) => {
|
|
4265
6194
|
throw TypeError("Cannot add the same private member more than once");
|
4266
6195
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4267
6196
|
};
|
4268
|
-
var
|
4269
|
-
__accessCheck$2(obj, member, "write to private field");
|
4270
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
4271
|
-
return value;
|
4272
|
-
};
|
4273
|
-
var _tables, _schemaTables$1;
|
6197
|
+
var _tables;
|
4274
6198
|
class SchemaPlugin extends XataPlugin {
|
4275
|
-
constructor(
|
6199
|
+
constructor() {
|
4276
6200
|
super();
|
4277
6201
|
__privateAdd$2(this, _tables, {});
|
4278
|
-
__privateAdd$2(this, _schemaTables$1, void 0);
|
4279
|
-
__privateSet$2(this, _schemaTables$1, schemaTables);
|
4280
6202
|
}
|
4281
6203
|
build(pluginOptions) {
|
4282
6204
|
const db = new Proxy(
|
@@ -4285,102 +6207,268 @@ class SchemaPlugin extends XataPlugin {
|
|
4285
6207
|
get: (_target, table) => {
|
4286
6208
|
if (!isString(table))
|
4287
6209
|
throw new Error("Invalid table name");
|
4288
|
-
if (__privateGet$
|
4289
|
-
__privateGet$
|
6210
|
+
if (__privateGet$1(this, _tables)[table] === void 0) {
|
6211
|
+
__privateGet$1(this, _tables)[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
4290
6212
|
}
|
4291
|
-
return __privateGet$
|
6213
|
+
return __privateGet$1(this, _tables)[table];
|
4292
6214
|
}
|
4293
6215
|
}
|
4294
6216
|
);
|
4295
|
-
const tableNames =
|
6217
|
+
const tableNames = pluginOptions.tables?.map(({ name }) => name) ?? [];
|
4296
6218
|
for (const table of tableNames) {
|
4297
|
-
db[table] = new RestRepository({ db, pluginOptions, table, schemaTables:
|
6219
|
+
db[table] = new RestRepository({ db, pluginOptions, table, schemaTables: pluginOptions.tables });
|
4298
6220
|
}
|
4299
6221
|
return db;
|
4300
6222
|
}
|
4301
6223
|
}
|
4302
6224
|
_tables = new WeakMap();
|
4303
|
-
|
6225
|
+
|
6226
|
+
class FilesPlugin extends XataPlugin {
|
6227
|
+
build(pluginOptions) {
|
6228
|
+
return {
|
6229
|
+
download: async (location) => {
|
6230
|
+
const { table, record, column, fileId = "" } = location ?? {};
|
6231
|
+
return await getFileItem({
|
6232
|
+
pathParams: {
|
6233
|
+
workspace: "{workspaceId}",
|
6234
|
+
dbBranchName: "{dbBranch}",
|
6235
|
+
region: "{region}",
|
6236
|
+
tableName: table ?? "",
|
6237
|
+
recordId: record ?? "",
|
6238
|
+
columnName: column ?? "",
|
6239
|
+
fileId
|
6240
|
+
},
|
6241
|
+
...pluginOptions,
|
6242
|
+
rawResponse: true
|
6243
|
+
});
|
6244
|
+
},
|
6245
|
+
upload: async (location, file, options) => {
|
6246
|
+
const { table, record, column, fileId = "" } = location ?? {};
|
6247
|
+
const resolvedFile = await file;
|
6248
|
+
const contentType = options?.mediaType || getContentType(resolvedFile);
|
6249
|
+
const body = resolvedFile instanceof XataFile ? resolvedFile.toBlob() : resolvedFile;
|
6250
|
+
return await putFileItem({
|
6251
|
+
...pluginOptions,
|
6252
|
+
pathParams: {
|
6253
|
+
workspace: "{workspaceId}",
|
6254
|
+
dbBranchName: "{dbBranch}",
|
6255
|
+
region: "{region}",
|
6256
|
+
tableName: table ?? "",
|
6257
|
+
recordId: record ?? "",
|
6258
|
+
columnName: column ?? "",
|
6259
|
+
fileId
|
6260
|
+
},
|
6261
|
+
body,
|
6262
|
+
headers: { "Content-Type": contentType }
|
6263
|
+
});
|
6264
|
+
},
|
6265
|
+
delete: async (location) => {
|
6266
|
+
const { table, record, column, fileId = "" } = location ?? {};
|
6267
|
+
return await deleteFileItem({
|
6268
|
+
pathParams: {
|
6269
|
+
workspace: "{workspaceId}",
|
6270
|
+
dbBranchName: "{dbBranch}",
|
6271
|
+
region: "{region}",
|
6272
|
+
tableName: table ?? "",
|
6273
|
+
recordId: record ?? "",
|
6274
|
+
columnName: column ?? "",
|
6275
|
+
fileId
|
6276
|
+
},
|
6277
|
+
...pluginOptions
|
6278
|
+
});
|
6279
|
+
}
|
6280
|
+
};
|
6281
|
+
}
|
6282
|
+
}
|
6283
|
+
function getContentType(file) {
|
6284
|
+
if (typeof file === "string") {
|
6285
|
+
return "text/plain";
|
6286
|
+
}
|
6287
|
+
if ("mediaType" in file && file.mediaType !== void 0) {
|
6288
|
+
return file.mediaType;
|
6289
|
+
}
|
6290
|
+
if (isBlob(file)) {
|
6291
|
+
return file.type;
|
6292
|
+
}
|
6293
|
+
try {
|
6294
|
+
return file.type;
|
6295
|
+
} catch (e) {
|
6296
|
+
}
|
6297
|
+
return "application/octet-stream";
|
6298
|
+
}
|
4304
6299
|
|
4305
6300
|
var __accessCheck$1 = (obj, member, msg) => {
|
4306
6301
|
if (!member.has(obj))
|
4307
6302
|
throw TypeError("Cannot " + msg);
|
4308
6303
|
};
|
4309
|
-
var __privateGet$1 = (obj, member, getter) => {
|
4310
|
-
__accessCheck$1(obj, member, "read from private field");
|
4311
|
-
return getter ? getter.call(obj) : member.get(obj);
|
4312
|
-
};
|
4313
6304
|
var __privateAdd$1 = (obj, member, value) => {
|
4314
6305
|
if (member.has(obj))
|
4315
6306
|
throw TypeError("Cannot add the same private member more than once");
|
4316
6307
|
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
4317
6308
|
};
|
4318
|
-
var __privateSet$1 = (obj, member, value, setter) => {
|
4319
|
-
__accessCheck$1(obj, member, "write to private field");
|
4320
|
-
setter ? setter.call(obj, value) : member.set(obj, value);
|
4321
|
-
return value;
|
4322
|
-
};
|
4323
6309
|
var __privateMethod$1 = (obj, member, method) => {
|
4324
6310
|
__accessCheck$1(obj, member, "access private method");
|
4325
6311
|
return method;
|
4326
6312
|
};
|
4327
|
-
var
|
6313
|
+
var _search, search_fn;
|
4328
6314
|
class SearchPlugin extends XataPlugin {
|
4329
|
-
constructor(db
|
6315
|
+
constructor(db) {
|
4330
6316
|
super();
|
4331
6317
|
this.db = db;
|
4332
6318
|
__privateAdd$1(this, _search);
|
4333
|
-
__privateAdd$1(this, _getSchemaTables);
|
4334
|
-
__privateAdd$1(this, _schemaTables, void 0);
|
4335
|
-
__privateSet$1(this, _schemaTables, schemaTables);
|
4336
6319
|
}
|
4337
6320
|
build(pluginOptions) {
|
4338
6321
|
return {
|
4339
6322
|
all: async (query, options = {}) => {
|
4340
|
-
const records = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
|
4341
|
-
|
4342
|
-
|
4343
|
-
|
4344
|
-
|
4345
|
-
|
6323
|
+
const { records, totalCount } = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
|
6324
|
+
return {
|
6325
|
+
totalCount,
|
6326
|
+
records: records.map((record) => {
|
6327
|
+
const { table = "orphan" } = record.xata;
|
6328
|
+
return { table, record: initObject(this.db, pluginOptions.tables, table, record, ["*"]) };
|
6329
|
+
})
|
6330
|
+
};
|
4346
6331
|
},
|
4347
6332
|
byTable: async (query, options = {}) => {
|
4348
|
-
const records = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
|
4349
|
-
const
|
4350
|
-
return records.reduce((acc, record) => {
|
6333
|
+
const { records: rawRecords, totalCount } = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
|
6334
|
+
const records = rawRecords.reduce((acc, record) => {
|
4351
6335
|
const { table = "orphan" } = record.xata;
|
4352
6336
|
const items = acc[table] ?? [];
|
4353
|
-
const item = initObject(this.db,
|
6337
|
+
const item = initObject(this.db, pluginOptions.tables, table, record, ["*"]);
|
4354
6338
|
return { ...acc, [table]: [...items, item] };
|
4355
6339
|
}, {});
|
6340
|
+
return { totalCount, records };
|
4356
6341
|
}
|
4357
6342
|
};
|
4358
6343
|
}
|
4359
6344
|
}
|
4360
|
-
_schemaTables = new WeakMap();
|
4361
6345
|
_search = new WeakSet();
|
4362
6346
|
search_fn = async function(query, options, pluginOptions) {
|
4363
6347
|
const { tables, fuzziness, highlight, prefix, page } = options ?? {};
|
4364
|
-
const { records } = await searchBranch({
|
6348
|
+
const { records, totalCount } = await searchBranch({
|
4365
6349
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
4366
|
-
// @ts-
|
6350
|
+
// @ts-expect-error Filter properties do not match inferred type
|
4367
6351
|
body: { tables, query, fuzziness, prefix, highlight, page },
|
4368
6352
|
...pluginOptions
|
4369
6353
|
});
|
4370
|
-
return records;
|
4371
|
-
};
|
4372
|
-
_getSchemaTables = new WeakSet();
|
4373
|
-
getSchemaTables_fn = async function(pluginOptions) {
|
4374
|
-
if (__privateGet$1(this, _schemaTables))
|
4375
|
-
return __privateGet$1(this, _schemaTables);
|
4376
|
-
const { schema } = await getBranchDetails({
|
4377
|
-
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
4378
|
-
...pluginOptions
|
4379
|
-
});
|
4380
|
-
__privateSet$1(this, _schemaTables, schema.tables);
|
4381
|
-
return schema.tables;
|
6354
|
+
return { records, totalCount };
|
4382
6355
|
};
|
4383
6356
|
|
6357
|
+
function escapeElement(elementRepresentation) {
|
6358
|
+
const escaped = elementRepresentation.replace(/\\/g, "\\\\").replace(/"/g, '\\"');
|
6359
|
+
return '"' + escaped + '"';
|
6360
|
+
}
|
6361
|
+
function arrayString(val) {
|
6362
|
+
let result = "{";
|
6363
|
+
for (let i = 0; i < val.length; i++) {
|
6364
|
+
if (i > 0) {
|
6365
|
+
result = result + ",";
|
6366
|
+
}
|
6367
|
+
if (val[i] === null || typeof val[i] === "undefined") {
|
6368
|
+
result = result + "NULL";
|
6369
|
+
} else if (Array.isArray(val[i])) {
|
6370
|
+
result = result + arrayString(val[i]);
|
6371
|
+
} else if (val[i] instanceof Buffer) {
|
6372
|
+
result += "\\\\x" + val[i].toString("hex");
|
6373
|
+
} else {
|
6374
|
+
result += escapeElement(prepareValue(val[i]));
|
6375
|
+
}
|
6376
|
+
}
|
6377
|
+
result = result + "}";
|
6378
|
+
return result;
|
6379
|
+
}
|
6380
|
+
function prepareValue(value) {
|
6381
|
+
if (!isDefined(value))
|
6382
|
+
return null;
|
6383
|
+
if (value instanceof Date) {
|
6384
|
+
return value.toISOString();
|
6385
|
+
}
|
6386
|
+
if (Array.isArray(value)) {
|
6387
|
+
return arrayString(value);
|
6388
|
+
}
|
6389
|
+
if (isObject(value)) {
|
6390
|
+
return JSON.stringify(value);
|
6391
|
+
}
|
6392
|
+
try {
|
6393
|
+
return value.toString();
|
6394
|
+
} catch (e) {
|
6395
|
+
return value;
|
6396
|
+
}
|
6397
|
+
}
|
6398
|
+
function prepareParams(param1, param2) {
|
6399
|
+
if (isString(param1)) {
|
6400
|
+
return { statement: param1, params: param2?.map((value) => prepareValue(value)) };
|
6401
|
+
}
|
6402
|
+
if (isStringArray(param1)) {
|
6403
|
+
const statement = param1.reduce((acc, curr, index) => {
|
6404
|
+
return acc + curr + (index < (param2?.length ?? 0) ? "$" + (index + 1) : "");
|
6405
|
+
}, "");
|
6406
|
+
return { statement, params: param2?.map((value) => prepareValue(value)) };
|
6407
|
+
}
|
6408
|
+
if (isObject(param1)) {
|
6409
|
+
const { statement, params, consistency, responseType } = param1;
|
6410
|
+
return { statement, params: params?.map((value) => prepareValue(value)), consistency, responseType };
|
6411
|
+
}
|
6412
|
+
throw new Error("Invalid query");
|
6413
|
+
}
|
6414
|
+
|
6415
|
+
class SQLPlugin extends XataPlugin {
|
6416
|
+
build(pluginOptions) {
|
6417
|
+
const sqlFunction = async (query, ...parameters) => {
|
6418
|
+
if (!isParamsObject(query) && (!isTemplateStringsArray(query) || !Array.isArray(parameters))) {
|
6419
|
+
throw new Error("Invalid usage of `xata.sql`. Please use it as a tagged template or with an object.");
|
6420
|
+
}
|
6421
|
+
const { statement, params, consistency, responseType } = prepareParams(query, parameters);
|
6422
|
+
const {
|
6423
|
+
records,
|
6424
|
+
rows,
|
6425
|
+
warning,
|
6426
|
+
columns = []
|
6427
|
+
} = await sqlQuery({
|
6428
|
+
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
6429
|
+
body: { statement, params, consistency, responseType },
|
6430
|
+
...pluginOptions
|
6431
|
+
});
|
6432
|
+
return { records, rows, warning, columns };
|
6433
|
+
};
|
6434
|
+
sqlFunction.connectionString = buildConnectionString(pluginOptions);
|
6435
|
+
return sqlFunction;
|
6436
|
+
}
|
6437
|
+
}
|
6438
|
+
function isTemplateStringsArray(strings) {
|
6439
|
+
return Array.isArray(strings) && "raw" in strings && Array.isArray(strings.raw);
|
6440
|
+
}
|
6441
|
+
function isParamsObject(params) {
|
6442
|
+
return isObject(params) && "statement" in params;
|
6443
|
+
}
|
6444
|
+
function buildDomain(host, region) {
|
6445
|
+
switch (host) {
|
6446
|
+
case "production":
|
6447
|
+
return `${region}.sql.xata.sh`;
|
6448
|
+
case "staging":
|
6449
|
+
return `${region}.sql.staging-xata.dev`;
|
6450
|
+
case "dev":
|
6451
|
+
return `${region}.sql.dev-xata.dev`;
|
6452
|
+
case "local":
|
6453
|
+
return "localhost:7654";
|
6454
|
+
default:
|
6455
|
+
throw new Error("Invalid host provider");
|
6456
|
+
}
|
6457
|
+
}
|
6458
|
+
function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
|
6459
|
+
const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
|
6460
|
+
const parts = parseWorkspacesUrlParts(url);
|
6461
|
+
if (!parts)
|
6462
|
+
throw new Error("Invalid workspaces URL");
|
6463
|
+
const { workspace: workspaceSlug, region, database, host } = parts;
|
6464
|
+
const domain = buildDomain(host, region);
|
6465
|
+
const workspace = workspaceSlug.split("-").pop();
|
6466
|
+
if (!workspace || !region || !database || !apiKey || !branch) {
|
6467
|
+
throw new Error("Unable to build xata connection string");
|
6468
|
+
}
|
6469
|
+
return `postgresql://${workspace}:${apiKey}@${domain}/${database}:${branch}?sslmode=require`;
|
6470
|
+
}
|
6471
|
+
|
4384
6472
|
class TransactionPlugin extends XataPlugin {
|
4385
6473
|
build(pluginOptions) {
|
4386
6474
|
return {
|
@@ -4396,12 +6484,6 @@ class TransactionPlugin extends XataPlugin {
|
|
4396
6484
|
}
|
4397
6485
|
}
|
4398
6486
|
|
4399
|
-
var __defProp$2 = Object.defineProperty;
|
4400
|
-
var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
4401
|
-
var __publicField$2 = (obj, key, value) => {
|
4402
|
-
__defNormalProp$2(obj, typeof key !== "symbol" ? key + "" : key, value);
|
4403
|
-
return value;
|
4404
|
-
};
|
4405
6487
|
var __accessCheck = (obj, member, msg) => {
|
4406
6488
|
if (!member.has(obj))
|
4407
6489
|
throw TypeError("Cannot " + msg);
|
@@ -4427,28 +6509,29 @@ var __privateMethod = (obj, member, method) => {
|
|
4427
6509
|
const buildClient = (plugins) => {
|
4428
6510
|
var _options, _parseOptions, parseOptions_fn, _getFetchProps, getFetchProps_fn, _a;
|
4429
6511
|
return _a = class {
|
4430
|
-
constructor(options = {},
|
6512
|
+
constructor(options = {}, tables) {
|
4431
6513
|
__privateAdd(this, _parseOptions);
|
4432
6514
|
__privateAdd(this, _getFetchProps);
|
4433
6515
|
__privateAdd(this, _options, void 0);
|
4434
|
-
__publicField$2(this, "db");
|
4435
|
-
__publicField$2(this, "search");
|
4436
|
-
__publicField$2(this, "transactions");
|
4437
|
-
__publicField$2(this, "files");
|
4438
6516
|
const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
|
4439
6517
|
__privateSet(this, _options, safeOptions);
|
4440
6518
|
const pluginOptions = {
|
4441
6519
|
...__privateMethod(this, _getFetchProps, getFetchProps_fn).call(this, safeOptions),
|
4442
6520
|
cache: safeOptions.cache,
|
4443
|
-
host: safeOptions.host
|
6521
|
+
host: safeOptions.host,
|
6522
|
+
tables,
|
6523
|
+
branch: safeOptions.branch
|
4444
6524
|
};
|
4445
|
-
const db = new SchemaPlugin(
|
4446
|
-
const search = new SearchPlugin(db
|
6525
|
+
const db = new SchemaPlugin().build(pluginOptions);
|
6526
|
+
const search = new SearchPlugin(db).build(pluginOptions);
|
4447
6527
|
const transactions = new TransactionPlugin().build(pluginOptions);
|
6528
|
+
const sql = new SQLPlugin().build(pluginOptions);
|
4448
6529
|
const files = new FilesPlugin().build(pluginOptions);
|
6530
|
+
this.schema = { tables };
|
4449
6531
|
this.db = db;
|
4450
6532
|
this.search = search;
|
4451
6533
|
this.transactions = transactions;
|
6534
|
+
this.sql = sql;
|
4452
6535
|
this.files = files;
|
4453
6536
|
for (const [key, namespace] of Object.entries(plugins ?? {})) {
|
4454
6537
|
if (namespace === void 0)
|
@@ -4466,7 +6549,7 @@ const buildClient = (plugins) => {
|
|
4466
6549
|
const isBrowser = typeof window !== "undefined" && typeof Deno === "undefined";
|
4467
6550
|
if (isBrowser && !enableBrowser) {
|
4468
6551
|
throw new Error(
|
4469
|
-
"You are trying to use Xata from the browser, which is potentially a non-secure environment.
|
6552
|
+
"You are trying to use Xata from the browser, which is potentially a non-secure environment. How to fix: https://xata.io/docs/messages/api-key-browser-error"
|
4470
6553
|
);
|
4471
6554
|
}
|
4472
6555
|
const fetch = getFetchImplementation(options?.fetch);
|
@@ -4546,17 +6629,11 @@ const buildClient = (plugins) => {
|
|
4546
6629
|
class BaseClient extends buildClient() {
|
4547
6630
|
}
|
4548
6631
|
|
4549
|
-
var __defProp$1 = Object.defineProperty;
|
4550
|
-
var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
4551
|
-
var __publicField$1 = (obj, key, value) => {
|
4552
|
-
__defNormalProp$1(obj, typeof key !== "symbol" ? key + "" : key, value);
|
4553
|
-
return value;
|
4554
|
-
};
|
4555
6632
|
const META = "__";
|
4556
6633
|
const VALUE = "___";
|
4557
6634
|
class Serializer {
|
4558
6635
|
constructor() {
|
4559
|
-
|
6636
|
+
this.classes = {};
|
4560
6637
|
}
|
4561
6638
|
add(clazz) {
|
4562
6639
|
this.classes[clazz.name] = clazz;
|
@@ -4619,34 +6696,12 @@ const deserialize = (json) => {
|
|
4619
6696
|
return defaultSerializer.fromJSON(json);
|
4620
6697
|
};
|
4621
6698
|
|
4622
|
-
function buildWorkerRunner(config) {
|
4623
|
-
return function xataWorker(name, worker) {
|
4624
|
-
return async (...args) => {
|
4625
|
-
const url = process.env.NODE_ENV === "development" ? `http://localhost:64749/${name}` : `https://dispatcher.xata.workers.dev/${config.workspace}/${config.worker}/${name}`;
|
4626
|
-
const result = await fetch(url, {
|
4627
|
-
method: "POST",
|
4628
|
-
headers: { "Content-Type": "application/json" },
|
4629
|
-
body: serialize({ args })
|
4630
|
-
});
|
4631
|
-
const text = await result.text();
|
4632
|
-
return deserialize(text);
|
4633
|
-
};
|
4634
|
-
};
|
4635
|
-
}
|
4636
|
-
|
4637
|
-
var __defProp = Object.defineProperty;
|
4638
|
-
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
4639
|
-
var __publicField = (obj, key, value) => {
|
4640
|
-
__defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
4641
|
-
return value;
|
4642
|
-
};
|
4643
6699
|
class XataError extends Error {
|
4644
6700
|
constructor(message, status) {
|
4645
6701
|
super(message);
|
4646
|
-
__publicField(this, "status");
|
4647
6702
|
this.status = status;
|
4648
6703
|
}
|
4649
6704
|
}
|
4650
6705
|
|
4651
|
-
export { BaseClient, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SchemaPlugin, SearchPlugin, Serializer, SimpleCache, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString,
|
6706
|
+
export { BaseClient, Buffer, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, SimpleCache, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptAllTables, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge, getAPIKey, getAuthorizationCode, getBranch, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationJobStatus, getBranchMigrationPlan, getBranchSchemaHistory, getBranchStats, getCluster, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseSettings, getDatabaseURL, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationHistory, getMigrationJobStatus, getMigrationRequest, getMigrationRequestIsMerged, getPreviewBranch, getRecord, getSchema, getTableColumns, getTableSchema, getUser, getUserAPIKeys, getUserOAuthAccessTokens, getUserOAuthClients, getWorkspace, getWorkspaceMembersList, getWorkspaceSettings, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, iContains, iPattern, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isValidExpandedColumn, isValidSelectableColumns, isXataRecord, le, lessEquals, lessThan, lessThanEquals, listClusters, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern, previewBranchSchemaEdit, pushBranchMigrations, putFile, putFileItem, queryMigrationRequests, queryTable, removeGitBranchesEntry, removeWorkspaceMember, renameDatabase, resendWorkspaceMemberInvite, resolveBranch, searchBranch, searchTable, serialize, setTableSchema, sqlQuery, startsWith, summarizeTable, transformImage, updateBranchMetadata, updateBranchSchema, updateCluster, updateColumn, updateDatabaseGithubSettings, updateDatabaseMetadata, updateDatabaseSettings, updateMigrationRequest, updateOAuthAccessToken, updateRecordWithID, updateTable, updateUser, updateWorkspace, updateWorkspaceMemberInvite, updateWorkspaceMemberRole, updateWorkspaceSettings, upsertRecordWithID, vectorSearchTable };
|
4652
6707
|
//# sourceMappingURL=index.mjs.map
|