@xata.io/client 0.0.0-next.v6c9e627772cbacc1977ba6ba82e6b403ac64c0b2 → 0.0.0-next.v75d167190643613f39533f83608bd5d30cc66dcf
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-add-version.log +1 -1
- package/.turbo/turbo-build.log +4 -4
- package/CHANGELOG.md +21 -3
- package/dist/index.cjs +2274 -125
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +784 -96
- package/dist/index.mjs +2270 -125
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
@@ -22,6 +22,1814 @@ const TraceAttributes = {
|
|
22
22
|
CLOUDFLARE_RAY_ID: "cf.ray"
|
23
23
|
};
|
24
24
|
|
25
|
+
const lookup = [];
|
26
|
+
const revLookup = [];
|
27
|
+
const code = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
28
|
+
for (let i = 0, len = code.length; i < len; ++i) {
|
29
|
+
lookup[i] = code[i];
|
30
|
+
revLookup[code.charCodeAt(i)] = i;
|
31
|
+
}
|
32
|
+
revLookup["-".charCodeAt(0)] = 62;
|
33
|
+
revLookup["_".charCodeAt(0)] = 63;
|
34
|
+
function getLens(b64) {
|
35
|
+
const len = b64.length;
|
36
|
+
if (len % 4 > 0) {
|
37
|
+
throw new Error("Invalid string. Length must be a multiple of 4");
|
38
|
+
}
|
39
|
+
let validLen = b64.indexOf("=");
|
40
|
+
if (validLen === -1)
|
41
|
+
validLen = len;
|
42
|
+
const placeHoldersLen = validLen === len ? 0 : 4 - validLen % 4;
|
43
|
+
return [validLen, placeHoldersLen];
|
44
|
+
}
|
45
|
+
function _byteLength(_b64, validLen, placeHoldersLen) {
|
46
|
+
return (validLen + placeHoldersLen) * 3 / 4 - placeHoldersLen;
|
47
|
+
}
|
48
|
+
function toByteArray(b64) {
|
49
|
+
let tmp;
|
50
|
+
const lens = getLens(b64);
|
51
|
+
const validLen = lens[0];
|
52
|
+
const placeHoldersLen = lens[1];
|
53
|
+
const arr = new Uint8Array(_byteLength(b64, validLen, placeHoldersLen));
|
54
|
+
let curByte = 0;
|
55
|
+
const len = placeHoldersLen > 0 ? validLen - 4 : validLen;
|
56
|
+
let i;
|
57
|
+
for (i = 0; i < len; i += 4) {
|
58
|
+
tmp = revLookup[b64.charCodeAt(i)] << 18 | revLookup[b64.charCodeAt(i + 1)] << 12 | revLookup[b64.charCodeAt(i + 2)] << 6 | revLookup[b64.charCodeAt(i + 3)];
|
59
|
+
arr[curByte++] = tmp >> 16 & 255;
|
60
|
+
arr[curByte++] = tmp >> 8 & 255;
|
61
|
+
arr[curByte++] = tmp & 255;
|
62
|
+
}
|
63
|
+
if (placeHoldersLen === 2) {
|
64
|
+
tmp = revLookup[b64.charCodeAt(i)] << 2 | revLookup[b64.charCodeAt(i + 1)] >> 4;
|
65
|
+
arr[curByte++] = tmp & 255;
|
66
|
+
}
|
67
|
+
if (placeHoldersLen === 1) {
|
68
|
+
tmp = revLookup[b64.charCodeAt(i)] << 10 | revLookup[b64.charCodeAt(i + 1)] << 4 | revLookup[b64.charCodeAt(i + 2)] >> 2;
|
69
|
+
arr[curByte++] = tmp >> 8 & 255;
|
70
|
+
arr[curByte++] = tmp & 255;
|
71
|
+
}
|
72
|
+
return arr;
|
73
|
+
}
|
74
|
+
function tripletToBase64(num) {
|
75
|
+
return lookup[num >> 18 & 63] + lookup[num >> 12 & 63] + lookup[num >> 6 & 63] + lookup[num & 63];
|
76
|
+
}
|
77
|
+
function encodeChunk(uint8, start, end) {
|
78
|
+
let tmp;
|
79
|
+
const output = [];
|
80
|
+
for (let i = start; i < end; i += 3) {
|
81
|
+
tmp = (uint8[i] << 16 & 16711680) + (uint8[i + 1] << 8 & 65280) + (uint8[i + 2] & 255);
|
82
|
+
output.push(tripletToBase64(tmp));
|
83
|
+
}
|
84
|
+
return output.join("");
|
85
|
+
}
|
86
|
+
function fromByteArray(uint8) {
|
87
|
+
let tmp;
|
88
|
+
const len = uint8.length;
|
89
|
+
const extraBytes = len % 3;
|
90
|
+
const parts = [];
|
91
|
+
const maxChunkLength = 16383;
|
92
|
+
for (let i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
93
|
+
parts.push(encodeChunk(uint8, i, i + maxChunkLength > len2 ? len2 : i + maxChunkLength));
|
94
|
+
}
|
95
|
+
if (extraBytes === 1) {
|
96
|
+
tmp = uint8[len - 1];
|
97
|
+
parts.push(lookup[tmp >> 2] + lookup[tmp << 4 & 63] + "==");
|
98
|
+
} else if (extraBytes === 2) {
|
99
|
+
tmp = (uint8[len - 2] << 8) + uint8[len - 1];
|
100
|
+
parts.push(lookup[tmp >> 10] + lookup[tmp >> 4 & 63] + lookup[tmp << 2 & 63] + "=");
|
101
|
+
}
|
102
|
+
return parts.join("");
|
103
|
+
}
|
104
|
+
|
105
|
+
const K_MAX_LENGTH = 2147483647;
|
106
|
+
const MAX_ARGUMENTS_LENGTH = 4096;
|
107
|
+
class Buffer extends Uint8Array {
|
108
|
+
/**
|
109
|
+
* Constructs a new `Buffer` instance.
|
110
|
+
*
|
111
|
+
* @param value
|
112
|
+
* @param encodingOrOffset
|
113
|
+
* @param length
|
114
|
+
*/
|
115
|
+
constructor(value, encodingOrOffset, length) {
|
116
|
+
if (typeof value === "number") {
|
117
|
+
if (typeof encodingOrOffset === "string") {
|
118
|
+
throw new TypeError("The first argument must be of type string, received type number");
|
119
|
+
}
|
120
|
+
if (value < 0) {
|
121
|
+
throw new RangeError("The buffer size cannot be negative");
|
122
|
+
}
|
123
|
+
super(value < 0 ? 0 : Buffer._checked(value) | 0);
|
124
|
+
} else if (typeof value === "string") {
|
125
|
+
if (typeof encodingOrOffset !== "string") {
|
126
|
+
encodingOrOffset = "utf8";
|
127
|
+
}
|
128
|
+
if (!Buffer.isEncoding(encodingOrOffset)) {
|
129
|
+
throw new TypeError("Unknown encoding: " + encodingOrOffset);
|
130
|
+
}
|
131
|
+
const length2 = Buffer.byteLength(value, encodingOrOffset) | 0;
|
132
|
+
super(length2);
|
133
|
+
const written = this.write(value, 0, this.length, encodingOrOffset);
|
134
|
+
if (written !== length2) {
|
135
|
+
throw new TypeError(
|
136
|
+
"Number of bytes written did not match expected length (wrote " + written + ", expected " + length2 + ")"
|
137
|
+
);
|
138
|
+
}
|
139
|
+
} else if (ArrayBuffer.isView(value)) {
|
140
|
+
if (Buffer._isInstance(value, Uint8Array)) {
|
141
|
+
const copy = new Uint8Array(value);
|
142
|
+
const array = copy.buffer;
|
143
|
+
const byteOffset = copy.byteOffset;
|
144
|
+
const length2 = copy.byteLength;
|
145
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
146
|
+
throw new RangeError("offset is outside of buffer bounds");
|
147
|
+
}
|
148
|
+
if (array.byteLength < byteOffset + (length2 || 0)) {
|
149
|
+
throw new RangeError("length is outside of buffer bounds");
|
150
|
+
}
|
151
|
+
super(new Uint8Array(array, byteOffset, length2));
|
152
|
+
} else {
|
153
|
+
const array = value;
|
154
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
155
|
+
super(new Uint8Array(length2));
|
156
|
+
for (let i = 0; i < length2; i++) {
|
157
|
+
this[i] = array[i] & 255;
|
158
|
+
}
|
159
|
+
}
|
160
|
+
} else if (value == null) {
|
161
|
+
throw new TypeError(
|
162
|
+
"The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type " + typeof value
|
163
|
+
);
|
164
|
+
} else if (Buffer._isInstance(value, ArrayBuffer) || value && Buffer._isInstance(value.buffer, ArrayBuffer)) {
|
165
|
+
const array = value;
|
166
|
+
const byteOffset = encodingOrOffset;
|
167
|
+
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
168
|
+
throw new RangeError("offset is outside of buffer bounds");
|
169
|
+
}
|
170
|
+
if (array.byteLength < byteOffset + (length || 0)) {
|
171
|
+
throw new RangeError("length is outside of buffer bounds");
|
172
|
+
}
|
173
|
+
super(new Uint8Array(array, byteOffset, length));
|
174
|
+
} else if (Array.isArray(value)) {
|
175
|
+
const array = value;
|
176
|
+
const length2 = array.length < 0 ? 0 : Buffer._checked(array.length) | 0;
|
177
|
+
super(new Uint8Array(length2));
|
178
|
+
for (let i = 0; i < length2; i++) {
|
179
|
+
this[i] = array[i] & 255;
|
180
|
+
}
|
181
|
+
} else {
|
182
|
+
throw new TypeError("Unable to determine the correct way to allocate buffer for type " + typeof value);
|
183
|
+
}
|
184
|
+
}
|
185
|
+
/**
|
186
|
+
* Return JSON representation of the buffer.
|
187
|
+
*/
|
188
|
+
toJSON() {
|
189
|
+
return {
|
190
|
+
type: "Buffer",
|
191
|
+
data: Array.prototype.slice.call(this)
|
192
|
+
};
|
193
|
+
}
|
194
|
+
/**
|
195
|
+
* Writes `string` to the buffer at `offset` according to the character encoding in `encoding`. The `length`
|
196
|
+
* parameter is the number of bytes to write. If the buffer does not contain enough space to fit the entire string,
|
197
|
+
* only part of `string` will be written. However, partially encoded characters will not be written.
|
198
|
+
*
|
199
|
+
* @param string String to write to `buf`.
|
200
|
+
* @param offset Number of bytes to skip before starting to write `string`. Default: `0`.
|
201
|
+
* @param length Maximum number of bytes to write: Default: `buf.length - offset`.
|
202
|
+
* @param encoding The character encoding of `string`. Default: `utf8`.
|
203
|
+
*/
|
204
|
+
write(string, offset, length, encoding) {
|
205
|
+
if (typeof offset === "undefined") {
|
206
|
+
encoding = "utf8";
|
207
|
+
length = this.length;
|
208
|
+
offset = 0;
|
209
|
+
} else if (typeof length === "undefined" && typeof offset === "string") {
|
210
|
+
encoding = offset;
|
211
|
+
length = this.length;
|
212
|
+
offset = 0;
|
213
|
+
} else if (typeof offset === "number" && isFinite(offset)) {
|
214
|
+
offset = offset >>> 0;
|
215
|
+
if (typeof length === "number" && isFinite(length)) {
|
216
|
+
length = length >>> 0;
|
217
|
+
encoding ?? (encoding = "utf8");
|
218
|
+
} else if (typeof length === "string") {
|
219
|
+
encoding = length;
|
220
|
+
length = void 0;
|
221
|
+
}
|
222
|
+
} else {
|
223
|
+
throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");
|
224
|
+
}
|
225
|
+
const remaining = this.length - offset;
|
226
|
+
if (typeof length === "undefined" || length > remaining) {
|
227
|
+
length = remaining;
|
228
|
+
}
|
229
|
+
if (string.length > 0 && (length < 0 || offset < 0) || offset > this.length) {
|
230
|
+
throw new RangeError("Attempt to write outside buffer bounds");
|
231
|
+
}
|
232
|
+
encoding || (encoding = "utf8");
|
233
|
+
switch (Buffer._getEncoding(encoding)) {
|
234
|
+
case "hex":
|
235
|
+
return Buffer._hexWrite(this, string, offset, length);
|
236
|
+
case "utf8":
|
237
|
+
return Buffer._utf8Write(this, string, offset, length);
|
238
|
+
case "ascii":
|
239
|
+
case "latin1":
|
240
|
+
case "binary":
|
241
|
+
return Buffer._asciiWrite(this, string, offset, length);
|
242
|
+
case "ucs2":
|
243
|
+
case "utf16le":
|
244
|
+
return Buffer._ucs2Write(this, string, offset, length);
|
245
|
+
case "base64":
|
246
|
+
return Buffer._base64Write(this, string, offset, length);
|
247
|
+
}
|
248
|
+
}
|
249
|
+
/**
|
250
|
+
* Decodes the buffer to a string according to the specified character encoding.
|
251
|
+
* Passing `start` and `end` will decode only a subset of the buffer.
|
252
|
+
*
|
253
|
+
* Note that if the encoding is `utf8` and a byte sequence in the input is not valid UTF-8, then each invalid byte
|
254
|
+
* will be replaced with `U+FFFD`.
|
255
|
+
*
|
256
|
+
* @param encoding
|
257
|
+
* @param start
|
258
|
+
* @param end
|
259
|
+
*/
|
260
|
+
toString(encoding, start, end) {
|
261
|
+
const length = this.length;
|
262
|
+
if (length === 0) {
|
263
|
+
return "";
|
264
|
+
}
|
265
|
+
if (arguments.length === 0) {
|
266
|
+
return Buffer._utf8Slice(this, 0, length);
|
267
|
+
}
|
268
|
+
if (typeof start === "undefined" || start < 0) {
|
269
|
+
start = 0;
|
270
|
+
}
|
271
|
+
if (start > this.length) {
|
272
|
+
return "";
|
273
|
+
}
|
274
|
+
if (typeof end === "undefined" || end > this.length) {
|
275
|
+
end = this.length;
|
276
|
+
}
|
277
|
+
if (end <= 0) {
|
278
|
+
return "";
|
279
|
+
}
|
280
|
+
end >>>= 0;
|
281
|
+
start >>>= 0;
|
282
|
+
if (end <= start) {
|
283
|
+
return "";
|
284
|
+
}
|
285
|
+
if (!encoding) {
|
286
|
+
encoding = "utf8";
|
287
|
+
}
|
288
|
+
switch (Buffer._getEncoding(encoding)) {
|
289
|
+
case "hex":
|
290
|
+
return Buffer._hexSlice(this, start, end);
|
291
|
+
case "utf8":
|
292
|
+
return Buffer._utf8Slice(this, start, end);
|
293
|
+
case "ascii":
|
294
|
+
return Buffer._asciiSlice(this, start, end);
|
295
|
+
case "latin1":
|
296
|
+
case "binary":
|
297
|
+
return Buffer._latin1Slice(this, start, end);
|
298
|
+
case "ucs2":
|
299
|
+
case "utf16le":
|
300
|
+
return Buffer._utf16leSlice(this, start, end);
|
301
|
+
case "base64":
|
302
|
+
return Buffer._base64Slice(this, start, end);
|
303
|
+
}
|
304
|
+
}
|
305
|
+
/**
|
306
|
+
* Returns true if this buffer's is equal to the provided buffer, meaning they share the same exact data.
|
307
|
+
*
|
308
|
+
* @param otherBuffer
|
309
|
+
*/
|
310
|
+
equals(otherBuffer) {
|
311
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
312
|
+
throw new TypeError("Argument must be a Buffer");
|
313
|
+
}
|
314
|
+
if (this === otherBuffer) {
|
315
|
+
return true;
|
316
|
+
}
|
317
|
+
return Buffer.compare(this, otherBuffer) === 0;
|
318
|
+
}
|
319
|
+
/**
|
320
|
+
* Compares the buffer with `otherBuffer` and returns a number indicating whether the buffer comes before, after,
|
321
|
+
* or is the same as `otherBuffer` in sort order. Comparison is based on the actual sequence of bytes in each
|
322
|
+
* buffer.
|
323
|
+
*
|
324
|
+
* - `0` is returned if `otherBuffer` is the same as this buffer.
|
325
|
+
* - `1` is returned if `otherBuffer` should come before this buffer when sorted.
|
326
|
+
* - `-1` is returned if `otherBuffer` should come after this buffer when sorted.
|
327
|
+
*
|
328
|
+
* @param otherBuffer The buffer to compare to.
|
329
|
+
* @param targetStart The offset within `otherBuffer` at which to begin comparison.
|
330
|
+
* @param targetEnd The offset within `otherBuffer` at which to end comparison (exclusive).
|
331
|
+
* @param sourceStart The offset within this buffer at which to begin comparison.
|
332
|
+
* @param sourceEnd The offset within this buffer at which to end the comparison (exclusive).
|
333
|
+
*/
|
334
|
+
compare(otherBuffer, targetStart, targetEnd, sourceStart, sourceEnd) {
|
335
|
+
if (Buffer._isInstance(otherBuffer, Uint8Array)) {
|
336
|
+
otherBuffer = Buffer.from(otherBuffer, otherBuffer.byteOffset, otherBuffer.byteLength);
|
337
|
+
}
|
338
|
+
if (!Buffer.isBuffer(otherBuffer)) {
|
339
|
+
throw new TypeError("Argument must be a Buffer or Uint8Array");
|
340
|
+
}
|
341
|
+
targetStart ?? (targetStart = 0);
|
342
|
+
targetEnd ?? (targetEnd = otherBuffer ? otherBuffer.length : 0);
|
343
|
+
sourceStart ?? (sourceStart = 0);
|
344
|
+
sourceEnd ?? (sourceEnd = this.length);
|
345
|
+
if (targetStart < 0 || targetEnd > otherBuffer.length || sourceStart < 0 || sourceEnd > this.length) {
|
346
|
+
throw new RangeError("Out of range index");
|
347
|
+
}
|
348
|
+
if (sourceStart >= sourceEnd && targetStart >= targetEnd) {
|
349
|
+
return 0;
|
350
|
+
}
|
351
|
+
if (sourceStart >= sourceEnd) {
|
352
|
+
return -1;
|
353
|
+
}
|
354
|
+
if (targetStart >= targetEnd) {
|
355
|
+
return 1;
|
356
|
+
}
|
357
|
+
targetStart >>>= 0;
|
358
|
+
targetEnd >>>= 0;
|
359
|
+
sourceStart >>>= 0;
|
360
|
+
sourceEnd >>>= 0;
|
361
|
+
if (this === otherBuffer) {
|
362
|
+
return 0;
|
363
|
+
}
|
364
|
+
let x = sourceEnd - sourceStart;
|
365
|
+
let y = targetEnd - targetStart;
|
366
|
+
const len = Math.min(x, y);
|
367
|
+
const thisCopy = this.slice(sourceStart, sourceEnd);
|
368
|
+
const targetCopy = otherBuffer.slice(targetStart, targetEnd);
|
369
|
+
for (let i = 0; i < len; ++i) {
|
370
|
+
if (thisCopy[i] !== targetCopy[i]) {
|
371
|
+
x = thisCopy[i];
|
372
|
+
y = targetCopy[i];
|
373
|
+
break;
|
374
|
+
}
|
375
|
+
}
|
376
|
+
if (x < y)
|
377
|
+
return -1;
|
378
|
+
if (y < x)
|
379
|
+
return 1;
|
380
|
+
return 0;
|
381
|
+
}
|
382
|
+
/**
|
383
|
+
* Copies data from a region of this buffer to a region in `targetBuffer`, even if the `targetBuffer` memory
|
384
|
+
* region overlaps with this buffer.
|
385
|
+
*
|
386
|
+
* @param targetBuffer The target buffer to copy into.
|
387
|
+
* @param targetStart The offset within `targetBuffer` at which to begin writing.
|
388
|
+
* @param sourceStart The offset within this buffer at which to begin copying.
|
389
|
+
* @param sourceEnd The offset within this buffer at which to end copying (exclusive).
|
390
|
+
*/
|
391
|
+
copy(targetBuffer, targetStart, sourceStart, sourceEnd) {
|
392
|
+
if (!Buffer.isBuffer(targetBuffer))
|
393
|
+
throw new TypeError("argument should be a Buffer");
|
394
|
+
if (!sourceStart)
|
395
|
+
sourceStart = 0;
|
396
|
+
if (!targetStart)
|
397
|
+
targetStart = 0;
|
398
|
+
if (!sourceEnd && sourceEnd !== 0)
|
399
|
+
sourceEnd = this.length;
|
400
|
+
if (targetStart >= targetBuffer.length)
|
401
|
+
targetStart = targetBuffer.length;
|
402
|
+
if (!targetStart)
|
403
|
+
targetStart = 0;
|
404
|
+
if (sourceEnd > 0 && sourceEnd < sourceStart)
|
405
|
+
sourceEnd = sourceStart;
|
406
|
+
if (sourceEnd === sourceStart)
|
407
|
+
return 0;
|
408
|
+
if (targetBuffer.length === 0 || this.length === 0)
|
409
|
+
return 0;
|
410
|
+
if (targetStart < 0) {
|
411
|
+
throw new RangeError("targetStart out of bounds");
|
412
|
+
}
|
413
|
+
if (sourceStart < 0 || sourceStart >= this.length)
|
414
|
+
throw new RangeError("Index out of range");
|
415
|
+
if (sourceEnd < 0)
|
416
|
+
throw new RangeError("sourceEnd out of bounds");
|
417
|
+
if (sourceEnd > this.length)
|
418
|
+
sourceEnd = this.length;
|
419
|
+
if (targetBuffer.length - targetStart < sourceEnd - sourceStart) {
|
420
|
+
sourceEnd = targetBuffer.length - targetStart + sourceStart;
|
421
|
+
}
|
422
|
+
const len = sourceEnd - sourceStart;
|
423
|
+
if (this === targetBuffer && typeof Uint8Array.prototype.copyWithin === "function") {
|
424
|
+
this.copyWithin(targetStart, sourceStart, sourceEnd);
|
425
|
+
} else {
|
426
|
+
Uint8Array.prototype.set.call(targetBuffer, this.subarray(sourceStart, sourceEnd), targetStart);
|
427
|
+
}
|
428
|
+
return len;
|
429
|
+
}
|
430
|
+
/**
|
431
|
+
* Returns a new `Buffer` that references the same memory as the original, but offset and cropped by the `start`
|
432
|
+
* and `end` indices. This is the same behavior as `buf.subarray()`.
|
433
|
+
*
|
434
|
+
* This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of Buffer. To copy
|
435
|
+
* the slice, use `Uint8Array.prototype.slice()`.
|
436
|
+
*
|
437
|
+
* @param start
|
438
|
+
* @param end
|
439
|
+
*/
|
440
|
+
slice(start, end) {
|
441
|
+
if (!start) {
|
442
|
+
start = 0;
|
443
|
+
}
|
444
|
+
const len = this.length;
|
445
|
+
start = ~~start;
|
446
|
+
end = end === void 0 ? len : ~~end;
|
447
|
+
if (start < 0) {
|
448
|
+
start += len;
|
449
|
+
if (start < 0) {
|
450
|
+
start = 0;
|
451
|
+
}
|
452
|
+
} else if (start > len) {
|
453
|
+
start = len;
|
454
|
+
}
|
455
|
+
if (end < 0) {
|
456
|
+
end += len;
|
457
|
+
if (end < 0) {
|
458
|
+
end = 0;
|
459
|
+
}
|
460
|
+
} else if (end > len) {
|
461
|
+
end = len;
|
462
|
+
}
|
463
|
+
if (end < start) {
|
464
|
+
end = start;
|
465
|
+
}
|
466
|
+
const newBuf = this.subarray(start, end);
|
467
|
+
Object.setPrototypeOf(newBuf, Buffer.prototype);
|
468
|
+
return newBuf;
|
469
|
+
}
|
470
|
+
/**
|
471
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
472
|
+
* of accuracy. Behavior is undefined when value is anything other than an unsigned integer.
|
473
|
+
*
|
474
|
+
* @param value Number to write.
|
475
|
+
* @param offset Number of bytes to skip before starting to write.
|
476
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
477
|
+
* @param noAssert
|
478
|
+
* @returns `offset` plus the number of bytes written.
|
479
|
+
*/
|
480
|
+
writeUIntLE(value, offset, byteLength, noAssert) {
|
481
|
+
value = +value;
|
482
|
+
offset = offset >>> 0;
|
483
|
+
byteLength = byteLength >>> 0;
|
484
|
+
if (!noAssert) {
|
485
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
486
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
487
|
+
}
|
488
|
+
let mul = 1;
|
489
|
+
let i = 0;
|
490
|
+
this[offset] = value & 255;
|
491
|
+
while (++i < byteLength && (mul *= 256)) {
|
492
|
+
this[offset + i] = value / mul & 255;
|
493
|
+
}
|
494
|
+
return offset + byteLength;
|
495
|
+
}
|
496
|
+
/**
|
497
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of
|
498
|
+
* accuracy. Behavior is undefined when `value` is anything other than an unsigned integer.
|
499
|
+
*
|
500
|
+
* @param value Number to write.
|
501
|
+
* @param offset Number of bytes to skip before starting to write.
|
502
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
503
|
+
* @param noAssert
|
504
|
+
* @returns `offset` plus the number of bytes written.
|
505
|
+
*/
|
506
|
+
writeUIntBE(value, offset, byteLength, noAssert) {
|
507
|
+
value = +value;
|
508
|
+
offset = offset >>> 0;
|
509
|
+
byteLength = byteLength >>> 0;
|
510
|
+
if (!noAssert) {
|
511
|
+
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
512
|
+
Buffer._checkInt(this, value, offset, byteLength, maxBytes, 0);
|
513
|
+
}
|
514
|
+
let i = byteLength - 1;
|
515
|
+
let mul = 1;
|
516
|
+
this[offset + i] = value & 255;
|
517
|
+
while (--i >= 0 && (mul *= 256)) {
|
518
|
+
this[offset + i] = value / mul & 255;
|
519
|
+
}
|
520
|
+
return offset + byteLength;
|
521
|
+
}
|
522
|
+
/**
|
523
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits
|
524
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
525
|
+
*
|
526
|
+
* @param value Number to write.
|
527
|
+
* @param offset Number of bytes to skip before starting to write.
|
528
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
529
|
+
* @param noAssert
|
530
|
+
* @returns `offset` plus the number of bytes written.
|
531
|
+
*/
|
532
|
+
writeIntLE(value, offset, byteLength, noAssert) {
|
533
|
+
value = +value;
|
534
|
+
offset = offset >>> 0;
|
535
|
+
if (!noAssert) {
|
536
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
537
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
538
|
+
}
|
539
|
+
let i = 0;
|
540
|
+
let mul = 1;
|
541
|
+
let sub = 0;
|
542
|
+
this[offset] = value & 255;
|
543
|
+
while (++i < byteLength && (mul *= 256)) {
|
544
|
+
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
545
|
+
sub = 1;
|
546
|
+
}
|
547
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
548
|
+
}
|
549
|
+
return offset + byteLength;
|
550
|
+
}
|
551
|
+
/**
|
552
|
+
* Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits
|
553
|
+
* of accuracy. Behavior is undefined when `value` is anything other than a signed integer.
|
554
|
+
*
|
555
|
+
* @param value Number to write.
|
556
|
+
* @param offset Number of bytes to skip before starting to write.
|
557
|
+
* @param byteLength Number of bytes to write, between 0 and 6.
|
558
|
+
* @param noAssert
|
559
|
+
* @returns `offset` plus the number of bytes written.
|
560
|
+
*/
|
561
|
+
writeIntBE(value, offset, byteLength, noAssert) {
|
562
|
+
value = +value;
|
563
|
+
offset = offset >>> 0;
|
564
|
+
if (!noAssert) {
|
565
|
+
const limit = Math.pow(2, 8 * byteLength - 1);
|
566
|
+
Buffer._checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
567
|
+
}
|
568
|
+
let i = byteLength - 1;
|
569
|
+
let mul = 1;
|
570
|
+
let sub = 0;
|
571
|
+
this[offset + i] = value & 255;
|
572
|
+
while (--i >= 0 && (mul *= 256)) {
|
573
|
+
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
574
|
+
sub = 1;
|
575
|
+
}
|
576
|
+
this[offset + i] = (value / mul >> 0) - sub & 255;
|
577
|
+
}
|
578
|
+
return offset + byteLength;
|
579
|
+
}
|
580
|
+
/**
|
581
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
582
|
+
* unsigned, little-endian integer supporting up to 48 bits of accuracy.
|
583
|
+
*
|
584
|
+
* @param offset Number of bytes to skip before starting to read.
|
585
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
586
|
+
* @param noAssert
|
587
|
+
*/
|
588
|
+
readUIntLE(offset, byteLength, noAssert) {
|
589
|
+
offset = offset >>> 0;
|
590
|
+
byteLength = byteLength >>> 0;
|
591
|
+
if (!noAssert) {
|
592
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
593
|
+
}
|
594
|
+
let val = this[offset];
|
595
|
+
let mul = 1;
|
596
|
+
let i = 0;
|
597
|
+
while (++i < byteLength && (mul *= 256)) {
|
598
|
+
val += this[offset + i] * mul;
|
599
|
+
}
|
600
|
+
return val;
|
601
|
+
}
|
602
|
+
/**
|
603
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as an
|
604
|
+
* unsigned, big-endian integer supporting up to 48 bits of accuracy.
|
605
|
+
*
|
606
|
+
* @param offset Number of bytes to skip before starting to read.
|
607
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
608
|
+
* @param noAssert
|
609
|
+
*/
|
610
|
+
readUIntBE(offset, byteLength, noAssert) {
|
611
|
+
offset = offset >>> 0;
|
612
|
+
byteLength = byteLength >>> 0;
|
613
|
+
if (!noAssert) {
|
614
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
615
|
+
}
|
616
|
+
let val = this[offset + --byteLength];
|
617
|
+
let mul = 1;
|
618
|
+
while (byteLength > 0 && (mul *= 256)) {
|
619
|
+
val += this[offset + --byteLength] * mul;
|
620
|
+
}
|
621
|
+
return val;
|
622
|
+
}
|
623
|
+
/**
|
624
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
625
|
+
* little-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
626
|
+
*
|
627
|
+
* @param offset Number of bytes to skip before starting to read.
|
628
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
629
|
+
* @param noAssert
|
630
|
+
*/
|
631
|
+
readIntLE(offset, byteLength, noAssert) {
|
632
|
+
offset = offset >>> 0;
|
633
|
+
byteLength = byteLength >>> 0;
|
634
|
+
if (!noAssert) {
|
635
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
636
|
+
}
|
637
|
+
let val = this[offset];
|
638
|
+
let mul = 1;
|
639
|
+
let i = 0;
|
640
|
+
while (++i < byteLength && (mul *= 256)) {
|
641
|
+
val += this[offset + i] * mul;
|
642
|
+
}
|
643
|
+
mul *= 128;
|
644
|
+
if (val >= mul) {
|
645
|
+
val -= Math.pow(2, 8 * byteLength);
|
646
|
+
}
|
647
|
+
return val;
|
648
|
+
}
|
649
|
+
/**
|
650
|
+
* Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a
|
651
|
+
* big-endian, two's complement signed value supporting up to 48 bits of accuracy.
|
652
|
+
*
|
653
|
+
* @param offset Number of bytes to skip before starting to read.
|
654
|
+
* @param byteLength Number of bytes to read, between 0 and 6.
|
655
|
+
* @param noAssert
|
656
|
+
*/
|
657
|
+
readIntBE(offset, byteLength, noAssert) {
|
658
|
+
offset = offset >>> 0;
|
659
|
+
byteLength = byteLength >>> 0;
|
660
|
+
if (!noAssert) {
|
661
|
+
Buffer._checkOffset(offset, byteLength, this.length);
|
662
|
+
}
|
663
|
+
let i = byteLength;
|
664
|
+
let mul = 1;
|
665
|
+
let val = this[offset + --i];
|
666
|
+
while (i > 0 && (mul *= 256)) {
|
667
|
+
val += this[offset + --i] * mul;
|
668
|
+
}
|
669
|
+
mul *= 128;
|
670
|
+
if (val >= mul) {
|
671
|
+
val -= Math.pow(2, 8 * byteLength);
|
672
|
+
}
|
673
|
+
return val;
|
674
|
+
}
|
675
|
+
/**
|
676
|
+
* Reads an unsigned 8-bit integer from `buf` at the specified `offset`.
|
677
|
+
*
|
678
|
+
* @param offset Number of bytes to skip before starting to read.
|
679
|
+
* @param noAssert
|
680
|
+
*/
|
681
|
+
readUInt8(offset, noAssert) {
|
682
|
+
offset = offset >>> 0;
|
683
|
+
if (!noAssert) {
|
684
|
+
Buffer._checkOffset(offset, 1, this.length);
|
685
|
+
}
|
686
|
+
return this[offset];
|
687
|
+
}
|
688
|
+
/**
|
689
|
+
* Reads an unsigned, little-endian 16-bit integer from `buf` at the specified `offset`.
|
690
|
+
*
|
691
|
+
* @param offset Number of bytes to skip before starting to read.
|
692
|
+
* @param noAssert
|
693
|
+
*/
|
694
|
+
readUInt16LE(offset, noAssert) {
|
695
|
+
offset = offset >>> 0;
|
696
|
+
if (!noAssert) {
|
697
|
+
Buffer._checkOffset(offset, 2, this.length);
|
698
|
+
}
|
699
|
+
return this[offset] | this[offset + 1] << 8;
|
700
|
+
}
|
701
|
+
/**
|
702
|
+
* Reads an unsigned, big-endian 16-bit integer from `buf` at the specified `offset`.
|
703
|
+
*
|
704
|
+
* @param offset Number of bytes to skip before starting to read.
|
705
|
+
* @param noAssert
|
706
|
+
*/
|
707
|
+
readUInt16BE(offset, noAssert) {
|
708
|
+
offset = offset >>> 0;
|
709
|
+
if (!noAssert) {
|
710
|
+
Buffer._checkOffset(offset, 2, this.length);
|
711
|
+
}
|
712
|
+
return this[offset] << 8 | this[offset + 1];
|
713
|
+
}
|
714
|
+
/**
|
715
|
+
* Reads an unsigned, little-endian 32-bit integer from `buf` at the specified `offset`.
|
716
|
+
*
|
717
|
+
* @param offset Number of bytes to skip before starting to read.
|
718
|
+
* @param noAssert
|
719
|
+
*/
|
720
|
+
readUInt32LE(offset, noAssert) {
|
721
|
+
offset = offset >>> 0;
|
722
|
+
if (!noAssert) {
|
723
|
+
Buffer._checkOffset(offset, 4, this.length);
|
724
|
+
}
|
725
|
+
return (this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16) + this[offset + 3] * 16777216;
|
726
|
+
}
|
727
|
+
/**
|
728
|
+
* Reads an unsigned, big-endian 32-bit integer from `buf` at the specified `offset`.
|
729
|
+
*
|
730
|
+
* @param offset Number of bytes to skip before starting to read.
|
731
|
+
* @param noAssert
|
732
|
+
*/
|
733
|
+
readUInt32BE(offset, noAssert) {
|
734
|
+
offset = offset >>> 0;
|
735
|
+
if (!noAssert) {
|
736
|
+
Buffer._checkOffset(offset, 4, this.length);
|
737
|
+
}
|
738
|
+
return this[offset] * 16777216 + (this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3]);
|
739
|
+
}
|
740
|
+
/**
|
741
|
+
* Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted
|
742
|
+
* as two's complement signed values.
|
743
|
+
*
|
744
|
+
* @param offset Number of bytes to skip before starting to read.
|
745
|
+
* @param noAssert
|
746
|
+
*/
|
747
|
+
readInt8(offset, noAssert) {
|
748
|
+
offset = offset >>> 0;
|
749
|
+
if (!noAssert) {
|
750
|
+
Buffer._checkOffset(offset, 1, this.length);
|
751
|
+
}
|
752
|
+
if (!(this[offset] & 128)) {
|
753
|
+
return this[offset];
|
754
|
+
}
|
755
|
+
return (255 - this[offset] + 1) * -1;
|
756
|
+
}
|
757
|
+
/**
|
758
|
+
* Reads a signed, little-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
759
|
+
* are interpreted as two's complement signed values.
|
760
|
+
*
|
761
|
+
* @param offset Number of bytes to skip before starting to read.
|
762
|
+
* @param noAssert
|
763
|
+
*/
|
764
|
+
readInt16LE(offset, noAssert) {
|
765
|
+
offset = offset >>> 0;
|
766
|
+
if (!noAssert) {
|
767
|
+
Buffer._checkOffset(offset, 2, this.length);
|
768
|
+
}
|
769
|
+
const val = this[offset] | this[offset + 1] << 8;
|
770
|
+
return val & 32768 ? val | 4294901760 : val;
|
771
|
+
}
|
772
|
+
/**
|
773
|
+
* Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
774
|
+
* are interpreted as two's complement signed values.
|
775
|
+
*
|
776
|
+
* @param offset Number of bytes to skip before starting to read.
|
777
|
+
* @param noAssert
|
778
|
+
*/
|
779
|
+
readInt16BE(offset, noAssert) {
|
780
|
+
offset = offset >>> 0;
|
781
|
+
if (!noAssert) {
|
782
|
+
Buffer._checkOffset(offset, 2, this.length);
|
783
|
+
}
|
784
|
+
const val = this[offset + 1] | this[offset] << 8;
|
785
|
+
return val & 32768 ? val | 4294901760 : val;
|
786
|
+
}
|
787
|
+
/**
|
788
|
+
* Reads a signed, little-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
789
|
+
* are interpreted as two's complement signed values.
|
790
|
+
*
|
791
|
+
* @param offset Number of bytes to skip before starting to read.
|
792
|
+
* @param noAssert
|
793
|
+
*/
|
794
|
+
readInt32LE(offset, noAssert) {
|
795
|
+
offset = offset >>> 0;
|
796
|
+
if (!noAssert) {
|
797
|
+
Buffer._checkOffset(offset, 4, this.length);
|
798
|
+
}
|
799
|
+
return this[offset] | this[offset + 1] << 8 | this[offset + 2] << 16 | this[offset + 3] << 24;
|
800
|
+
}
|
801
|
+
/**
|
802
|
+
* Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer`
|
803
|
+
* are interpreted as two's complement signed values.
|
804
|
+
*
|
805
|
+
* @param offset Number of bytes to skip before starting to read.
|
806
|
+
* @param noAssert
|
807
|
+
*/
|
808
|
+
readInt32BE(offset, noAssert) {
|
809
|
+
offset = offset >>> 0;
|
810
|
+
if (!noAssert) {
|
811
|
+
Buffer._checkOffset(offset, 4, this.length);
|
812
|
+
}
|
813
|
+
return this[offset] << 24 | this[offset + 1] << 16 | this[offset + 2] << 8 | this[offset + 3];
|
814
|
+
}
|
815
|
+
/**
|
816
|
+
* Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order in-place.
|
817
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 2.
|
818
|
+
*/
|
819
|
+
swap16() {
|
820
|
+
const len = this.length;
|
821
|
+
if (len % 2 !== 0) {
|
822
|
+
throw new RangeError("Buffer size must be a multiple of 16-bits");
|
823
|
+
}
|
824
|
+
for (let i = 0; i < len; i += 2) {
|
825
|
+
this._swap(this, i, i + 1);
|
826
|
+
}
|
827
|
+
return this;
|
828
|
+
}
|
829
|
+
/**
|
830
|
+
* Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order in-place.
|
831
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 4.
|
832
|
+
*/
|
833
|
+
swap32() {
|
834
|
+
const len = this.length;
|
835
|
+
if (len % 4 !== 0) {
|
836
|
+
throw new RangeError("Buffer size must be a multiple of 32-bits");
|
837
|
+
}
|
838
|
+
for (let i = 0; i < len; i += 4) {
|
839
|
+
this._swap(this, i, i + 3);
|
840
|
+
this._swap(this, i + 1, i + 2);
|
841
|
+
}
|
842
|
+
return this;
|
843
|
+
}
|
844
|
+
/**
|
845
|
+
* Interprets `buf` as an array of unsigned 64-bit integers and swaps the byte order in-place.
|
846
|
+
* Throws a `RangeError` if `buf.length` is not a multiple of 8.
|
847
|
+
*/
|
848
|
+
swap64() {
|
849
|
+
const len = this.length;
|
850
|
+
if (len % 8 !== 0) {
|
851
|
+
throw new RangeError("Buffer size must be a multiple of 64-bits");
|
852
|
+
}
|
853
|
+
for (let i = 0; i < len; i += 8) {
|
854
|
+
this._swap(this, i, i + 7);
|
855
|
+
this._swap(this, i + 1, i + 6);
|
856
|
+
this._swap(this, i + 2, i + 5);
|
857
|
+
this._swap(this, i + 3, i + 4);
|
858
|
+
}
|
859
|
+
return this;
|
860
|
+
}
|
861
|
+
/**
|
862
|
+
* Swaps two octets.
|
863
|
+
*
|
864
|
+
* @param b
|
865
|
+
* @param n
|
866
|
+
* @param m
|
867
|
+
*/
|
868
|
+
_swap(b, n, m) {
|
869
|
+
const i = b[n];
|
870
|
+
b[n] = b[m];
|
871
|
+
b[m] = i;
|
872
|
+
}
|
873
|
+
/**
|
874
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid unsigned 8-bit integer.
|
875
|
+
* Behavior is undefined when `value` is anything other than an unsigned 8-bit integer.
|
876
|
+
*
|
877
|
+
* @param value Number to write.
|
878
|
+
* @param offset Number of bytes to skip before starting to write.
|
879
|
+
* @param noAssert
|
880
|
+
* @returns `offset` plus the number of bytes written.
|
881
|
+
*/
|
882
|
+
writeUInt8(value, offset, noAssert) {
|
883
|
+
value = +value;
|
884
|
+
offset = offset >>> 0;
|
885
|
+
if (!noAssert) {
|
886
|
+
Buffer._checkInt(this, value, offset, 1, 255, 0);
|
887
|
+
}
|
888
|
+
this[offset] = value & 255;
|
889
|
+
return offset + 1;
|
890
|
+
}
|
891
|
+
/**
|
892
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 16-bit
|
893
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
894
|
+
*
|
895
|
+
* @param value Number to write.
|
896
|
+
* @param offset Number of bytes to skip before starting to write.
|
897
|
+
* @param noAssert
|
898
|
+
* @returns `offset` plus the number of bytes written.
|
899
|
+
*/
|
900
|
+
writeUInt16LE(value, offset, noAssert) {
|
901
|
+
value = +value;
|
902
|
+
offset = offset >>> 0;
|
903
|
+
if (!noAssert) {
|
904
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
905
|
+
}
|
906
|
+
this[offset] = value & 255;
|
907
|
+
this[offset + 1] = value >>> 8;
|
908
|
+
return offset + 2;
|
909
|
+
}
|
910
|
+
/**
|
911
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 16-bit
|
912
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 16-bit integer.
|
913
|
+
*
|
914
|
+
* @param value Number to write.
|
915
|
+
* @param offset Number of bytes to skip before starting to write.
|
916
|
+
* @param noAssert
|
917
|
+
* @returns `offset` plus the number of bytes written.
|
918
|
+
*/
|
919
|
+
writeUInt16BE(value, offset, noAssert) {
|
920
|
+
value = +value;
|
921
|
+
offset = offset >>> 0;
|
922
|
+
if (!noAssert) {
|
923
|
+
Buffer._checkInt(this, value, offset, 2, 65535, 0);
|
924
|
+
}
|
925
|
+
this[offset] = value >>> 8;
|
926
|
+
this[offset + 1] = value & 255;
|
927
|
+
return offset + 2;
|
928
|
+
}
|
929
|
+
/**
|
930
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid unsigned 32-bit
|
931
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
932
|
+
*
|
933
|
+
* @param value Number to write.
|
934
|
+
* @param offset Number of bytes to skip before starting to write.
|
935
|
+
* @param noAssert
|
936
|
+
* @returns `offset` plus the number of bytes written.
|
937
|
+
*/
|
938
|
+
writeUInt32LE(value, offset, noAssert) {
|
939
|
+
value = +value;
|
940
|
+
offset = offset >>> 0;
|
941
|
+
if (!noAssert) {
|
942
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
943
|
+
}
|
944
|
+
this[offset + 3] = value >>> 24;
|
945
|
+
this[offset + 2] = value >>> 16;
|
946
|
+
this[offset + 1] = value >>> 8;
|
947
|
+
this[offset] = value & 255;
|
948
|
+
return offset + 4;
|
949
|
+
}
|
950
|
+
/**
|
951
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid unsigned 32-bit
|
952
|
+
* integer. Behavior is undefined when `value` is anything other than an unsigned 32-bit integer.
|
953
|
+
*
|
954
|
+
* @param value Number to write.
|
955
|
+
* @param offset Number of bytes to skip before starting to write.
|
956
|
+
* @param noAssert
|
957
|
+
* @returns `offset` plus the number of bytes written.
|
958
|
+
*/
|
959
|
+
writeUInt32BE(value, offset, noAssert) {
|
960
|
+
value = +value;
|
961
|
+
offset = offset >>> 0;
|
962
|
+
if (!noAssert) {
|
963
|
+
Buffer._checkInt(this, value, offset, 4, 4294967295, 0);
|
964
|
+
}
|
965
|
+
this[offset] = value >>> 24;
|
966
|
+
this[offset + 1] = value >>> 16;
|
967
|
+
this[offset + 2] = value >>> 8;
|
968
|
+
this[offset + 3] = value & 255;
|
969
|
+
return offset + 4;
|
970
|
+
}
|
971
|
+
/**
|
972
|
+
* Writes `value` to `buf` at the specified `offset`. The `value` must be a valid signed 8-bit integer.
|
973
|
+
* Behavior is undefined when `value` is anything other than a signed 8-bit integer.
|
974
|
+
*
|
975
|
+
* @param value Number to write.
|
976
|
+
* @param offset Number of bytes to skip before starting to write.
|
977
|
+
* @param noAssert
|
978
|
+
* @returns `offset` plus the number of bytes written.
|
979
|
+
*/
|
980
|
+
writeInt8(value, offset, noAssert) {
|
981
|
+
value = +value;
|
982
|
+
offset = offset >>> 0;
|
983
|
+
if (!noAssert) {
|
984
|
+
Buffer._checkInt(this, value, offset, 1, 127, -128);
|
985
|
+
}
|
986
|
+
if (value < 0) {
|
987
|
+
value = 255 + value + 1;
|
988
|
+
}
|
989
|
+
this[offset] = value & 255;
|
990
|
+
return offset + 1;
|
991
|
+
}
|
992
|
+
/**
|
993
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 16-bit
|
994
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
995
|
+
*
|
996
|
+
* @param value Number to write.
|
997
|
+
* @param offset Number of bytes to skip before starting to write.
|
998
|
+
* @param noAssert
|
999
|
+
* @returns `offset` plus the number of bytes written.
|
1000
|
+
*/
|
1001
|
+
writeInt16LE(value, offset, noAssert) {
|
1002
|
+
value = +value;
|
1003
|
+
offset = offset >>> 0;
|
1004
|
+
if (!noAssert) {
|
1005
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1006
|
+
}
|
1007
|
+
this[offset] = value & 255;
|
1008
|
+
this[offset + 1] = value >>> 8;
|
1009
|
+
return offset + 2;
|
1010
|
+
}
|
1011
|
+
/**
|
1012
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 16-bit
|
1013
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 16-bit integer.
|
1014
|
+
*
|
1015
|
+
* @param value Number to write.
|
1016
|
+
* @param offset Number of bytes to skip before starting to write.
|
1017
|
+
* @param noAssert
|
1018
|
+
* @returns `offset` plus the number of bytes written.
|
1019
|
+
*/
|
1020
|
+
writeInt16BE(value, offset, noAssert) {
|
1021
|
+
value = +value;
|
1022
|
+
offset = offset >>> 0;
|
1023
|
+
if (!noAssert) {
|
1024
|
+
Buffer._checkInt(this, value, offset, 2, 32767, -32768);
|
1025
|
+
}
|
1026
|
+
this[offset] = value >>> 8;
|
1027
|
+
this[offset + 1] = value & 255;
|
1028
|
+
return offset + 2;
|
1029
|
+
}
|
1030
|
+
/**
|
1031
|
+
* Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a valid signed 32-bit
|
1032
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1033
|
+
*
|
1034
|
+
* @param value Number to write.
|
1035
|
+
* @param offset Number of bytes to skip before starting to write.
|
1036
|
+
* @param noAssert
|
1037
|
+
* @returns `offset` plus the number of bytes written.
|
1038
|
+
*/
|
1039
|
+
writeInt32LE(value, offset, noAssert) {
|
1040
|
+
value = +value;
|
1041
|
+
offset = offset >>> 0;
|
1042
|
+
if (!noAssert) {
|
1043
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1044
|
+
}
|
1045
|
+
this[offset] = value & 255;
|
1046
|
+
this[offset + 1] = value >>> 8;
|
1047
|
+
this[offset + 2] = value >>> 16;
|
1048
|
+
this[offset + 3] = value >>> 24;
|
1049
|
+
return offset + 4;
|
1050
|
+
}
|
1051
|
+
/**
|
1052
|
+
* Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a valid signed 32-bit
|
1053
|
+
* integer. Behavior is undefined when `value` is anything other than a signed 32-bit integer.
|
1054
|
+
*
|
1055
|
+
* @param value Number to write.
|
1056
|
+
* @param offset Number of bytes to skip before starting to write.
|
1057
|
+
* @param noAssert
|
1058
|
+
* @returns `offset` plus the number of bytes written.
|
1059
|
+
*/
|
1060
|
+
writeInt32BE(value, offset, noAssert) {
|
1061
|
+
value = +value;
|
1062
|
+
offset = offset >>> 0;
|
1063
|
+
if (!noAssert) {
|
1064
|
+
Buffer._checkInt(this, value, offset, 4, 2147483647, -2147483648);
|
1065
|
+
}
|
1066
|
+
if (value < 0) {
|
1067
|
+
value = 4294967295 + value + 1;
|
1068
|
+
}
|
1069
|
+
this[offset] = value >>> 24;
|
1070
|
+
this[offset + 1] = value >>> 16;
|
1071
|
+
this[offset + 2] = value >>> 8;
|
1072
|
+
this[offset + 3] = value & 255;
|
1073
|
+
return offset + 4;
|
1074
|
+
}
|
1075
|
+
/**
|
1076
|
+
* Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be
|
1077
|
+
* filled. The `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or integer. If the resulting
|
1078
|
+
* integer is greater than `255` (decimal), then `buf` will be filled with `value & 255`.
|
1079
|
+
*
|
1080
|
+
* If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that
|
1081
|
+
* character that fit into `buf` are written.
|
1082
|
+
*
|
1083
|
+
* If `value` contains invalid characters, it is truncated; if no valid fill data remains, an exception is thrown.
|
1084
|
+
*
|
1085
|
+
* @param value
|
1086
|
+
* @param encoding
|
1087
|
+
*/
|
1088
|
+
fill(value, offset, end, encoding) {
|
1089
|
+
if (typeof value === "string") {
|
1090
|
+
if (typeof offset === "string") {
|
1091
|
+
encoding = offset;
|
1092
|
+
offset = 0;
|
1093
|
+
end = this.length;
|
1094
|
+
} else if (typeof end === "string") {
|
1095
|
+
encoding = end;
|
1096
|
+
end = this.length;
|
1097
|
+
}
|
1098
|
+
if (encoding !== void 0 && typeof encoding !== "string") {
|
1099
|
+
throw new TypeError("encoding must be a string");
|
1100
|
+
}
|
1101
|
+
if (typeof encoding === "string" && !Buffer.isEncoding(encoding)) {
|
1102
|
+
throw new TypeError("Unknown encoding: " + encoding);
|
1103
|
+
}
|
1104
|
+
if (value.length === 1) {
|
1105
|
+
const code = value.charCodeAt(0);
|
1106
|
+
if (encoding === "utf8" && code < 128) {
|
1107
|
+
value = code;
|
1108
|
+
}
|
1109
|
+
}
|
1110
|
+
} else if (typeof value === "number") {
|
1111
|
+
value = value & 255;
|
1112
|
+
} else if (typeof value === "boolean") {
|
1113
|
+
value = Number(value);
|
1114
|
+
}
|
1115
|
+
offset ?? (offset = 0);
|
1116
|
+
end ?? (end = this.length);
|
1117
|
+
if (offset < 0 || this.length < offset || this.length < end) {
|
1118
|
+
throw new RangeError("Out of range index");
|
1119
|
+
}
|
1120
|
+
if (end <= offset) {
|
1121
|
+
return this;
|
1122
|
+
}
|
1123
|
+
offset = offset >>> 0;
|
1124
|
+
end = end === void 0 ? this.length : end >>> 0;
|
1125
|
+
value || (value = 0);
|
1126
|
+
let i;
|
1127
|
+
if (typeof value === "number") {
|
1128
|
+
for (i = offset; i < end; ++i) {
|
1129
|
+
this[i] = value;
|
1130
|
+
}
|
1131
|
+
} else {
|
1132
|
+
const bytes = Buffer.isBuffer(value) ? value : Buffer.from(value, encoding);
|
1133
|
+
const len = bytes.length;
|
1134
|
+
if (len === 0) {
|
1135
|
+
throw new TypeError('The value "' + value + '" is invalid for argument "value"');
|
1136
|
+
}
|
1137
|
+
for (i = 0; i < end - offset; ++i) {
|
1138
|
+
this[i + offset] = bytes[i % len];
|
1139
|
+
}
|
1140
|
+
}
|
1141
|
+
return this;
|
1142
|
+
}
|
1143
|
+
/**
|
1144
|
+
* Returns the index of the specified value.
|
1145
|
+
*
|
1146
|
+
* If `value` is:
|
1147
|
+
* - a string, `value` is interpreted according to the character encoding in `encoding`.
|
1148
|
+
* - a `Buffer` or `Uint8Array`, `value` will be used in its entirety. To compare a partial Buffer, use `slice()`.
|
1149
|
+
* - a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`.
|
1150
|
+
*
|
1151
|
+
* Any other types will throw a `TypeError`.
|
1152
|
+
*
|
1153
|
+
* @param value What to search for.
|
1154
|
+
* @param byteOffset Where to begin searching in `buf`. If negative, then calculated from the end.
|
1155
|
+
* @param encoding If `value` is a string, this is the encoding used to search.
|
1156
|
+
* @returns The index of the first occurrence of `value` in `buf`, or `-1` if not found.
|
1157
|
+
*/
|
1158
|
+
indexOf(value, byteOffset, encoding) {
|
1159
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, true);
|
1160
|
+
}
|
1161
|
+
/**
|
1162
|
+
* Gets the last index of the specified value.
|
1163
|
+
*
|
1164
|
+
* @see indexOf()
|
1165
|
+
* @param value
|
1166
|
+
* @param byteOffset
|
1167
|
+
* @param encoding
|
1168
|
+
*/
|
1169
|
+
lastIndexOf(value, byteOffset, encoding) {
|
1170
|
+
return this._bidirectionalIndexOf(this, value, byteOffset, encoding, false);
|
1171
|
+
}
|
1172
|
+
_bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) {
|
1173
|
+
if (buffer.length === 0) {
|
1174
|
+
return -1;
|
1175
|
+
}
|
1176
|
+
if (typeof byteOffset === "string") {
|
1177
|
+
encoding = byteOffset;
|
1178
|
+
byteOffset = 0;
|
1179
|
+
} else if (typeof byteOffset === "undefined") {
|
1180
|
+
byteOffset = 0;
|
1181
|
+
} else if (byteOffset > 2147483647) {
|
1182
|
+
byteOffset = 2147483647;
|
1183
|
+
} else if (byteOffset < -2147483648) {
|
1184
|
+
byteOffset = -2147483648;
|
1185
|
+
}
|
1186
|
+
byteOffset = +byteOffset;
|
1187
|
+
if (byteOffset !== byteOffset) {
|
1188
|
+
byteOffset = dir ? 0 : buffer.length - 1;
|
1189
|
+
}
|
1190
|
+
if (byteOffset < 0) {
|
1191
|
+
byteOffset = buffer.length + byteOffset;
|
1192
|
+
}
|
1193
|
+
if (byteOffset >= buffer.length) {
|
1194
|
+
if (dir) {
|
1195
|
+
return -1;
|
1196
|
+
} else {
|
1197
|
+
byteOffset = buffer.length - 1;
|
1198
|
+
}
|
1199
|
+
} else if (byteOffset < 0) {
|
1200
|
+
if (dir) {
|
1201
|
+
byteOffset = 0;
|
1202
|
+
} else {
|
1203
|
+
return -1;
|
1204
|
+
}
|
1205
|
+
}
|
1206
|
+
if (typeof val === "string") {
|
1207
|
+
val = Buffer.from(val, encoding);
|
1208
|
+
}
|
1209
|
+
if (Buffer.isBuffer(val)) {
|
1210
|
+
if (val.length === 0) {
|
1211
|
+
return -1;
|
1212
|
+
}
|
1213
|
+
return Buffer._arrayIndexOf(buffer, val, byteOffset, encoding, dir);
|
1214
|
+
} else if (typeof val === "number") {
|
1215
|
+
val = val & 255;
|
1216
|
+
if (typeof Uint8Array.prototype.indexOf === "function") {
|
1217
|
+
if (dir) {
|
1218
|
+
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset);
|
1219
|
+
} else {
|
1220
|
+
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset);
|
1221
|
+
}
|
1222
|
+
}
|
1223
|
+
return Buffer._arrayIndexOf(buffer, Buffer.from([val]), byteOffset, encoding, dir);
|
1224
|
+
}
|
1225
|
+
throw new TypeError("val must be string, number or Buffer");
|
1226
|
+
}
|
1227
|
+
/**
|
1228
|
+
* Equivalent to `buf.indexOf() !== -1`.
|
1229
|
+
*
|
1230
|
+
* @param value
|
1231
|
+
* @param byteOffset
|
1232
|
+
* @param encoding
|
1233
|
+
*/
|
1234
|
+
includes(value, byteOffset, encoding) {
|
1235
|
+
return this.indexOf(value, byteOffset, encoding) !== -1;
|
1236
|
+
}
|
1237
|
+
/**
|
1238
|
+
* Creates a new buffer from the given parameters.
|
1239
|
+
*
|
1240
|
+
* @param data
|
1241
|
+
* @param encoding
|
1242
|
+
*/
|
1243
|
+
static from(a, b, c) {
|
1244
|
+
return new Buffer(a, b, c);
|
1245
|
+
}
|
1246
|
+
/**
|
1247
|
+
* Returns true if `obj` is a Buffer.
|
1248
|
+
*
|
1249
|
+
* @param obj
|
1250
|
+
*/
|
1251
|
+
static isBuffer(obj) {
|
1252
|
+
return obj != null && obj !== Buffer.prototype && Buffer._isInstance(obj, Buffer);
|
1253
|
+
}
|
1254
|
+
/**
|
1255
|
+
* Returns true if `encoding` is a supported encoding.
|
1256
|
+
*
|
1257
|
+
* @param encoding
|
1258
|
+
*/
|
1259
|
+
static isEncoding(encoding) {
|
1260
|
+
switch (encoding.toLowerCase()) {
|
1261
|
+
case "hex":
|
1262
|
+
case "utf8":
|
1263
|
+
case "ascii":
|
1264
|
+
case "binary":
|
1265
|
+
case "latin1":
|
1266
|
+
case "ucs2":
|
1267
|
+
case "utf16le":
|
1268
|
+
case "base64":
|
1269
|
+
return true;
|
1270
|
+
default:
|
1271
|
+
return false;
|
1272
|
+
}
|
1273
|
+
}
|
1274
|
+
/**
|
1275
|
+
* Gives the actual byte length of a string for an encoding. This is not the same as `string.length` since that
|
1276
|
+
* returns the number of characters in the string.
|
1277
|
+
*
|
1278
|
+
* @param string The string to test.
|
1279
|
+
* @param encoding The encoding to use for calculation. Defaults is `utf8`.
|
1280
|
+
*/
|
1281
|
+
static byteLength(string, encoding) {
|
1282
|
+
if (Buffer.isBuffer(string)) {
|
1283
|
+
return string.length;
|
1284
|
+
}
|
1285
|
+
if (typeof string !== "string" && (ArrayBuffer.isView(string) || Buffer._isInstance(string, ArrayBuffer))) {
|
1286
|
+
return string.byteLength;
|
1287
|
+
}
|
1288
|
+
if (typeof string !== "string") {
|
1289
|
+
throw new TypeError(
|
1290
|
+
'The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type ' + typeof string
|
1291
|
+
);
|
1292
|
+
}
|
1293
|
+
const len = string.length;
|
1294
|
+
const mustMatch = arguments.length > 2 && arguments[2] === true;
|
1295
|
+
if (!mustMatch && len === 0) {
|
1296
|
+
return 0;
|
1297
|
+
}
|
1298
|
+
switch (encoding?.toLowerCase()) {
|
1299
|
+
case "ascii":
|
1300
|
+
case "latin1":
|
1301
|
+
case "binary":
|
1302
|
+
return len;
|
1303
|
+
case "utf8":
|
1304
|
+
return Buffer._utf8ToBytes(string).length;
|
1305
|
+
case "hex":
|
1306
|
+
return len >>> 1;
|
1307
|
+
case "ucs2":
|
1308
|
+
case "utf16le":
|
1309
|
+
return len * 2;
|
1310
|
+
case "base64":
|
1311
|
+
return Buffer._base64ToBytes(string).length;
|
1312
|
+
default:
|
1313
|
+
return mustMatch ? -1 : Buffer._utf8ToBytes(string).length;
|
1314
|
+
}
|
1315
|
+
}
|
1316
|
+
/**
|
1317
|
+
* Returns a Buffer which is the result of concatenating all the buffers in the list together.
|
1318
|
+
*
|
1319
|
+
* - If the list has no items, or if the `totalLength` is 0, then it returns a zero-length buffer.
|
1320
|
+
* - If the list has exactly one item, then the first item is returned.
|
1321
|
+
* - If the list has more than one item, then a new buffer is created.
|
1322
|
+
*
|
1323
|
+
* It is faster to provide the `totalLength` if it is known. However, it will be calculated if not provided at
|
1324
|
+
* a small computational expense.
|
1325
|
+
*
|
1326
|
+
* @param list An array of Buffer objects to concatenate.
|
1327
|
+
* @param totalLength Total length of the buffers when concatenated.
|
1328
|
+
*/
|
1329
|
+
static concat(list, totalLength) {
|
1330
|
+
if (!Array.isArray(list)) {
|
1331
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1332
|
+
}
|
1333
|
+
if (list.length === 0) {
|
1334
|
+
return Buffer.alloc(0);
|
1335
|
+
}
|
1336
|
+
let i;
|
1337
|
+
if (totalLength === void 0) {
|
1338
|
+
totalLength = 0;
|
1339
|
+
for (i = 0; i < list.length; ++i) {
|
1340
|
+
totalLength += list[i].length;
|
1341
|
+
}
|
1342
|
+
}
|
1343
|
+
const buffer = Buffer.allocUnsafe(totalLength);
|
1344
|
+
let pos = 0;
|
1345
|
+
for (i = 0; i < list.length; ++i) {
|
1346
|
+
let buf = list[i];
|
1347
|
+
if (Buffer._isInstance(buf, Uint8Array)) {
|
1348
|
+
if (pos + buf.length > buffer.length) {
|
1349
|
+
if (!Buffer.isBuffer(buf)) {
|
1350
|
+
buf = Buffer.from(buf);
|
1351
|
+
}
|
1352
|
+
buf.copy(buffer, pos);
|
1353
|
+
} else {
|
1354
|
+
Uint8Array.prototype.set.call(buffer, buf, pos);
|
1355
|
+
}
|
1356
|
+
} else if (!Buffer.isBuffer(buf)) {
|
1357
|
+
throw new TypeError('"list" argument must be an Array of Buffers');
|
1358
|
+
} else {
|
1359
|
+
buf.copy(buffer, pos);
|
1360
|
+
}
|
1361
|
+
pos += buf.length;
|
1362
|
+
}
|
1363
|
+
return buffer;
|
1364
|
+
}
|
1365
|
+
/**
|
1366
|
+
* The same as `buf1.compare(buf2)`.
|
1367
|
+
*/
|
1368
|
+
static compare(buf1, buf2) {
|
1369
|
+
if (Buffer._isInstance(buf1, Uint8Array)) {
|
1370
|
+
buf1 = Buffer.from(buf1, buf1.byteOffset, buf1.byteLength);
|
1371
|
+
}
|
1372
|
+
if (Buffer._isInstance(buf2, Uint8Array)) {
|
1373
|
+
buf2 = Buffer.from(buf2, buf2.byteOffset, buf2.byteLength);
|
1374
|
+
}
|
1375
|
+
if (!Buffer.isBuffer(buf1) || !Buffer.isBuffer(buf2)) {
|
1376
|
+
throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');
|
1377
|
+
}
|
1378
|
+
if (buf1 === buf2) {
|
1379
|
+
return 0;
|
1380
|
+
}
|
1381
|
+
let x = buf1.length;
|
1382
|
+
let y = buf2.length;
|
1383
|
+
for (let i = 0, len = Math.min(x, y); i < len; ++i) {
|
1384
|
+
if (buf1[i] !== buf2[i]) {
|
1385
|
+
x = buf1[i];
|
1386
|
+
y = buf2[i];
|
1387
|
+
break;
|
1388
|
+
}
|
1389
|
+
}
|
1390
|
+
if (x < y) {
|
1391
|
+
return -1;
|
1392
|
+
}
|
1393
|
+
if (y < x) {
|
1394
|
+
return 1;
|
1395
|
+
}
|
1396
|
+
return 0;
|
1397
|
+
}
|
1398
|
+
/**
|
1399
|
+
* Allocates a new buffer of `size` octets.
|
1400
|
+
*
|
1401
|
+
* @param size The number of octets to allocate.
|
1402
|
+
* @param fill If specified, the buffer will be initialized by calling `buf.fill(fill)`, or with zeroes otherwise.
|
1403
|
+
* @param encoding The encoding used for the call to `buf.fill()` while initializing.
|
1404
|
+
*/
|
1405
|
+
static alloc(size, fill, encoding) {
|
1406
|
+
if (typeof size !== "number") {
|
1407
|
+
throw new TypeError('"size" argument must be of type number');
|
1408
|
+
} else if (size < 0) {
|
1409
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1410
|
+
}
|
1411
|
+
if (size <= 0) {
|
1412
|
+
return new Buffer(size);
|
1413
|
+
}
|
1414
|
+
if (fill !== void 0) {
|
1415
|
+
return typeof encoding === "string" ? new Buffer(size).fill(fill, 0, size, encoding) : new Buffer(size).fill(fill);
|
1416
|
+
}
|
1417
|
+
return new Buffer(size);
|
1418
|
+
}
|
1419
|
+
/**
|
1420
|
+
* Allocates a new buffer of `size` octets without initializing memory. The contents of the buffer are unknown.
|
1421
|
+
*
|
1422
|
+
* @param size
|
1423
|
+
*/
|
1424
|
+
static allocUnsafe(size) {
|
1425
|
+
if (typeof size !== "number") {
|
1426
|
+
throw new TypeError('"size" argument must be of type number');
|
1427
|
+
} else if (size < 0) {
|
1428
|
+
throw new RangeError('The value "' + size + '" is invalid for option "size"');
|
1429
|
+
}
|
1430
|
+
return new Buffer(size < 0 ? 0 : Buffer._checked(size) | 0);
|
1431
|
+
}
|
1432
|
+
/**
|
1433
|
+
* Returns true if the given `obj` is an instance of `type`.
|
1434
|
+
*
|
1435
|
+
* @param obj
|
1436
|
+
* @param type
|
1437
|
+
*/
|
1438
|
+
static _isInstance(obj, type) {
|
1439
|
+
return obj instanceof type || obj != null && obj.constructor != null && obj.constructor.name != null && obj.constructor.name === type.name;
|
1440
|
+
}
|
1441
|
+
static _checked(length) {
|
1442
|
+
if (length >= K_MAX_LENGTH) {
|
1443
|
+
throw new RangeError(
|
1444
|
+
"Attempt to allocate Buffer larger than maximum size: 0x" + K_MAX_LENGTH.toString(16) + " bytes"
|
1445
|
+
);
|
1446
|
+
}
|
1447
|
+
return length | 0;
|
1448
|
+
}
|
1449
|
+
static _blitBuffer(src, dst, offset, length) {
|
1450
|
+
let i;
|
1451
|
+
for (i = 0; i < length; ++i) {
|
1452
|
+
if (i + offset >= dst.length || i >= src.length) {
|
1453
|
+
break;
|
1454
|
+
}
|
1455
|
+
dst[i + offset] = src[i];
|
1456
|
+
}
|
1457
|
+
return i;
|
1458
|
+
}
|
1459
|
+
static _utf8Write(buf, string, offset, length) {
|
1460
|
+
return Buffer._blitBuffer(Buffer._utf8ToBytes(string, buf.length - offset), buf, offset, length);
|
1461
|
+
}
|
1462
|
+
static _asciiWrite(buf, string, offset, length) {
|
1463
|
+
return Buffer._blitBuffer(Buffer._asciiToBytes(string), buf, offset, length);
|
1464
|
+
}
|
1465
|
+
static _base64Write(buf, string, offset, length) {
|
1466
|
+
return Buffer._blitBuffer(Buffer._base64ToBytes(string), buf, offset, length);
|
1467
|
+
}
|
1468
|
+
static _ucs2Write(buf, string, offset, length) {
|
1469
|
+
return Buffer._blitBuffer(Buffer._utf16leToBytes(string, buf.length - offset), buf, offset, length);
|
1470
|
+
}
|
1471
|
+
static _hexWrite(buf, string, offset, length) {
|
1472
|
+
offset = Number(offset) || 0;
|
1473
|
+
const remaining = buf.length - offset;
|
1474
|
+
if (!length) {
|
1475
|
+
length = remaining;
|
1476
|
+
} else {
|
1477
|
+
length = Number(length);
|
1478
|
+
if (length > remaining) {
|
1479
|
+
length = remaining;
|
1480
|
+
}
|
1481
|
+
}
|
1482
|
+
const strLen = string.length;
|
1483
|
+
if (length > strLen / 2) {
|
1484
|
+
length = strLen / 2;
|
1485
|
+
}
|
1486
|
+
let i;
|
1487
|
+
for (i = 0; i < length; ++i) {
|
1488
|
+
const parsed = parseInt(string.substr(i * 2, 2), 16);
|
1489
|
+
if (parsed !== parsed) {
|
1490
|
+
return i;
|
1491
|
+
}
|
1492
|
+
buf[offset + i] = parsed;
|
1493
|
+
}
|
1494
|
+
return i;
|
1495
|
+
}
|
1496
|
+
static _utf8ToBytes(string, units) {
|
1497
|
+
units = units || Infinity;
|
1498
|
+
const length = string.length;
|
1499
|
+
const bytes = [];
|
1500
|
+
let codePoint;
|
1501
|
+
let leadSurrogate = null;
|
1502
|
+
for (let i = 0; i < length; ++i) {
|
1503
|
+
codePoint = string.charCodeAt(i);
|
1504
|
+
if (codePoint > 55295 && codePoint < 57344) {
|
1505
|
+
if (!leadSurrogate) {
|
1506
|
+
if (codePoint > 56319) {
|
1507
|
+
if ((units -= 3) > -1) {
|
1508
|
+
bytes.push(239, 191, 189);
|
1509
|
+
}
|
1510
|
+
continue;
|
1511
|
+
} else if (i + 1 === length) {
|
1512
|
+
if ((units -= 3) > -1) {
|
1513
|
+
bytes.push(239, 191, 189);
|
1514
|
+
}
|
1515
|
+
continue;
|
1516
|
+
}
|
1517
|
+
leadSurrogate = codePoint;
|
1518
|
+
continue;
|
1519
|
+
}
|
1520
|
+
if (codePoint < 56320) {
|
1521
|
+
if ((units -= 3) > -1) {
|
1522
|
+
bytes.push(239, 191, 189);
|
1523
|
+
}
|
1524
|
+
leadSurrogate = codePoint;
|
1525
|
+
continue;
|
1526
|
+
}
|
1527
|
+
codePoint = (leadSurrogate - 55296 << 10 | codePoint - 56320) + 65536;
|
1528
|
+
} else if (leadSurrogate) {
|
1529
|
+
if ((units -= 3) > -1) {
|
1530
|
+
bytes.push(239, 191, 189);
|
1531
|
+
}
|
1532
|
+
}
|
1533
|
+
leadSurrogate = null;
|
1534
|
+
if (codePoint < 128) {
|
1535
|
+
if ((units -= 1) < 0) {
|
1536
|
+
break;
|
1537
|
+
}
|
1538
|
+
bytes.push(codePoint);
|
1539
|
+
} else if (codePoint < 2048) {
|
1540
|
+
if ((units -= 2) < 0) {
|
1541
|
+
break;
|
1542
|
+
}
|
1543
|
+
bytes.push(codePoint >> 6 | 192, codePoint & 63 | 128);
|
1544
|
+
} else if (codePoint < 65536) {
|
1545
|
+
if ((units -= 3) < 0) {
|
1546
|
+
break;
|
1547
|
+
}
|
1548
|
+
bytes.push(codePoint >> 12 | 224, codePoint >> 6 & 63 | 128, codePoint & 63 | 128);
|
1549
|
+
} else if (codePoint < 1114112) {
|
1550
|
+
if ((units -= 4) < 0) {
|
1551
|
+
break;
|
1552
|
+
}
|
1553
|
+
bytes.push(
|
1554
|
+
codePoint >> 18 | 240,
|
1555
|
+
codePoint >> 12 & 63 | 128,
|
1556
|
+
codePoint >> 6 & 63 | 128,
|
1557
|
+
codePoint & 63 | 128
|
1558
|
+
);
|
1559
|
+
} else {
|
1560
|
+
throw new Error("Invalid code point");
|
1561
|
+
}
|
1562
|
+
}
|
1563
|
+
return bytes;
|
1564
|
+
}
|
1565
|
+
static _base64ToBytes(str) {
|
1566
|
+
return toByteArray(base64clean(str));
|
1567
|
+
}
|
1568
|
+
static _asciiToBytes(str) {
|
1569
|
+
const byteArray = [];
|
1570
|
+
for (let i = 0; i < str.length; ++i) {
|
1571
|
+
byteArray.push(str.charCodeAt(i) & 255);
|
1572
|
+
}
|
1573
|
+
return byteArray;
|
1574
|
+
}
|
1575
|
+
static _utf16leToBytes(str, units) {
|
1576
|
+
let c, hi, lo;
|
1577
|
+
const byteArray = [];
|
1578
|
+
for (let i = 0; i < str.length; ++i) {
|
1579
|
+
if ((units -= 2) < 0)
|
1580
|
+
break;
|
1581
|
+
c = str.charCodeAt(i);
|
1582
|
+
hi = c >> 8;
|
1583
|
+
lo = c % 256;
|
1584
|
+
byteArray.push(lo);
|
1585
|
+
byteArray.push(hi);
|
1586
|
+
}
|
1587
|
+
return byteArray;
|
1588
|
+
}
|
1589
|
+
static _hexSlice(buf, start, end) {
|
1590
|
+
const len = buf.length;
|
1591
|
+
if (!start || start < 0) {
|
1592
|
+
start = 0;
|
1593
|
+
}
|
1594
|
+
if (!end || end < 0 || end > len) {
|
1595
|
+
end = len;
|
1596
|
+
}
|
1597
|
+
let out = "";
|
1598
|
+
for (let i = start; i < end; ++i) {
|
1599
|
+
out += hexSliceLookupTable[buf[i]];
|
1600
|
+
}
|
1601
|
+
return out;
|
1602
|
+
}
|
1603
|
+
static _base64Slice(buf, start, end) {
|
1604
|
+
if (start === 0 && end === buf.length) {
|
1605
|
+
return fromByteArray(buf);
|
1606
|
+
} else {
|
1607
|
+
return fromByteArray(buf.slice(start, end));
|
1608
|
+
}
|
1609
|
+
}
|
1610
|
+
static _utf8Slice(buf, start, end) {
|
1611
|
+
end = Math.min(buf.length, end);
|
1612
|
+
const res = [];
|
1613
|
+
let i = start;
|
1614
|
+
while (i < end) {
|
1615
|
+
const firstByte = buf[i];
|
1616
|
+
let codePoint = null;
|
1617
|
+
let bytesPerSequence = firstByte > 239 ? 4 : firstByte > 223 ? 3 : firstByte > 191 ? 2 : 1;
|
1618
|
+
if (i + bytesPerSequence <= end) {
|
1619
|
+
let secondByte, thirdByte, fourthByte, tempCodePoint;
|
1620
|
+
switch (bytesPerSequence) {
|
1621
|
+
case 1:
|
1622
|
+
if (firstByte < 128) {
|
1623
|
+
codePoint = firstByte;
|
1624
|
+
}
|
1625
|
+
break;
|
1626
|
+
case 2:
|
1627
|
+
secondByte = buf[i + 1];
|
1628
|
+
if ((secondByte & 192) === 128) {
|
1629
|
+
tempCodePoint = (firstByte & 31) << 6 | secondByte & 63;
|
1630
|
+
if (tempCodePoint > 127) {
|
1631
|
+
codePoint = tempCodePoint;
|
1632
|
+
}
|
1633
|
+
}
|
1634
|
+
break;
|
1635
|
+
case 3:
|
1636
|
+
secondByte = buf[i + 1];
|
1637
|
+
thirdByte = buf[i + 2];
|
1638
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128) {
|
1639
|
+
tempCodePoint = (firstByte & 15) << 12 | (secondByte & 63) << 6 | thirdByte & 63;
|
1640
|
+
if (tempCodePoint > 2047 && (tempCodePoint < 55296 || tempCodePoint > 57343)) {
|
1641
|
+
codePoint = tempCodePoint;
|
1642
|
+
}
|
1643
|
+
}
|
1644
|
+
break;
|
1645
|
+
case 4:
|
1646
|
+
secondByte = buf[i + 1];
|
1647
|
+
thirdByte = buf[i + 2];
|
1648
|
+
fourthByte = buf[i + 3];
|
1649
|
+
if ((secondByte & 192) === 128 && (thirdByte & 192) === 128 && (fourthByte & 192) === 128) {
|
1650
|
+
tempCodePoint = (firstByte & 15) << 18 | (secondByte & 63) << 12 | (thirdByte & 63) << 6 | fourthByte & 63;
|
1651
|
+
if (tempCodePoint > 65535 && tempCodePoint < 1114112) {
|
1652
|
+
codePoint = tempCodePoint;
|
1653
|
+
}
|
1654
|
+
}
|
1655
|
+
}
|
1656
|
+
}
|
1657
|
+
if (codePoint === null) {
|
1658
|
+
codePoint = 65533;
|
1659
|
+
bytesPerSequence = 1;
|
1660
|
+
} else if (codePoint > 65535) {
|
1661
|
+
codePoint -= 65536;
|
1662
|
+
res.push(codePoint >>> 10 & 1023 | 55296);
|
1663
|
+
codePoint = 56320 | codePoint & 1023;
|
1664
|
+
}
|
1665
|
+
res.push(codePoint);
|
1666
|
+
i += bytesPerSequence;
|
1667
|
+
}
|
1668
|
+
return Buffer._decodeCodePointsArray(res);
|
1669
|
+
}
|
1670
|
+
static _decodeCodePointsArray(codePoints) {
|
1671
|
+
const len = codePoints.length;
|
1672
|
+
if (len <= MAX_ARGUMENTS_LENGTH) {
|
1673
|
+
return String.fromCharCode.apply(String, codePoints);
|
1674
|
+
}
|
1675
|
+
let res = "";
|
1676
|
+
let i = 0;
|
1677
|
+
while (i < len) {
|
1678
|
+
res += String.fromCharCode.apply(String, codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH));
|
1679
|
+
}
|
1680
|
+
return res;
|
1681
|
+
}
|
1682
|
+
static _asciiSlice(buf, start, end) {
|
1683
|
+
let ret = "";
|
1684
|
+
end = Math.min(buf.length, end);
|
1685
|
+
for (let i = start; i < end; ++i) {
|
1686
|
+
ret += String.fromCharCode(buf[i] & 127);
|
1687
|
+
}
|
1688
|
+
return ret;
|
1689
|
+
}
|
1690
|
+
static _latin1Slice(buf, start, end) {
|
1691
|
+
let ret = "";
|
1692
|
+
end = Math.min(buf.length, end);
|
1693
|
+
for (let i = start; i < end; ++i) {
|
1694
|
+
ret += String.fromCharCode(buf[i]);
|
1695
|
+
}
|
1696
|
+
return ret;
|
1697
|
+
}
|
1698
|
+
static _utf16leSlice(buf, start, end) {
|
1699
|
+
const bytes = buf.slice(start, end);
|
1700
|
+
let res = "";
|
1701
|
+
for (let i = 0; i < bytes.length - 1; i += 2) {
|
1702
|
+
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
|
1703
|
+
}
|
1704
|
+
return res;
|
1705
|
+
}
|
1706
|
+
static _arrayIndexOf(arr, val, byteOffset, encoding, dir) {
|
1707
|
+
let indexSize = 1;
|
1708
|
+
let arrLength = arr.length;
|
1709
|
+
let valLength = val.length;
|
1710
|
+
if (encoding !== void 0) {
|
1711
|
+
encoding = Buffer._getEncoding(encoding);
|
1712
|
+
if (encoding === "ucs2" || encoding === "utf16le") {
|
1713
|
+
if (arr.length < 2 || val.length < 2) {
|
1714
|
+
return -1;
|
1715
|
+
}
|
1716
|
+
indexSize = 2;
|
1717
|
+
arrLength /= 2;
|
1718
|
+
valLength /= 2;
|
1719
|
+
byteOffset /= 2;
|
1720
|
+
}
|
1721
|
+
}
|
1722
|
+
function read(buf, i2) {
|
1723
|
+
if (indexSize === 1) {
|
1724
|
+
return buf[i2];
|
1725
|
+
} else {
|
1726
|
+
return buf.readUInt16BE(i2 * indexSize);
|
1727
|
+
}
|
1728
|
+
}
|
1729
|
+
let i;
|
1730
|
+
if (dir) {
|
1731
|
+
let foundIndex = -1;
|
1732
|
+
for (i = byteOffset; i < arrLength; i++) {
|
1733
|
+
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
1734
|
+
if (foundIndex === -1)
|
1735
|
+
foundIndex = i;
|
1736
|
+
if (i - foundIndex + 1 === valLength)
|
1737
|
+
return foundIndex * indexSize;
|
1738
|
+
} else {
|
1739
|
+
if (foundIndex !== -1)
|
1740
|
+
i -= i - foundIndex;
|
1741
|
+
foundIndex = -1;
|
1742
|
+
}
|
1743
|
+
}
|
1744
|
+
} else {
|
1745
|
+
if (byteOffset + valLength > arrLength) {
|
1746
|
+
byteOffset = arrLength - valLength;
|
1747
|
+
}
|
1748
|
+
for (i = byteOffset; i >= 0; i--) {
|
1749
|
+
let found = true;
|
1750
|
+
for (let j = 0; j < valLength; j++) {
|
1751
|
+
if (read(arr, i + j) !== read(val, j)) {
|
1752
|
+
found = false;
|
1753
|
+
break;
|
1754
|
+
}
|
1755
|
+
}
|
1756
|
+
if (found) {
|
1757
|
+
return i;
|
1758
|
+
}
|
1759
|
+
}
|
1760
|
+
}
|
1761
|
+
return -1;
|
1762
|
+
}
|
1763
|
+
static _checkOffset(offset, ext, length) {
|
1764
|
+
if (offset % 1 !== 0 || offset < 0)
|
1765
|
+
throw new RangeError("offset is not uint");
|
1766
|
+
if (offset + ext > length)
|
1767
|
+
throw new RangeError("Trying to access beyond buffer length");
|
1768
|
+
}
|
1769
|
+
static _checkInt(buf, value, offset, ext, max, min) {
|
1770
|
+
if (!Buffer.isBuffer(buf))
|
1771
|
+
throw new TypeError('"buffer" argument must be a Buffer instance');
|
1772
|
+
if (value > max || value < min)
|
1773
|
+
throw new RangeError('"value" argument is out of bounds');
|
1774
|
+
if (offset + ext > buf.length)
|
1775
|
+
throw new RangeError("Index out of range");
|
1776
|
+
}
|
1777
|
+
static _getEncoding(encoding) {
|
1778
|
+
let toLowerCase = false;
|
1779
|
+
let originalEncoding = "";
|
1780
|
+
for (; ; ) {
|
1781
|
+
switch (encoding) {
|
1782
|
+
case "hex":
|
1783
|
+
return "hex";
|
1784
|
+
case "utf8":
|
1785
|
+
return "utf8";
|
1786
|
+
case "ascii":
|
1787
|
+
return "ascii";
|
1788
|
+
case "binary":
|
1789
|
+
return "binary";
|
1790
|
+
case "latin1":
|
1791
|
+
return "latin1";
|
1792
|
+
case "ucs2":
|
1793
|
+
return "ucs2";
|
1794
|
+
case "utf16le":
|
1795
|
+
return "utf16le";
|
1796
|
+
case "base64":
|
1797
|
+
return "base64";
|
1798
|
+
default: {
|
1799
|
+
if (toLowerCase) {
|
1800
|
+
throw new TypeError("Unknown or unsupported encoding: " + originalEncoding);
|
1801
|
+
}
|
1802
|
+
toLowerCase = true;
|
1803
|
+
originalEncoding = encoding;
|
1804
|
+
encoding = encoding.toLowerCase();
|
1805
|
+
}
|
1806
|
+
}
|
1807
|
+
}
|
1808
|
+
}
|
1809
|
+
}
|
1810
|
+
const hexSliceLookupTable = function() {
|
1811
|
+
const alphabet = "0123456789abcdef";
|
1812
|
+
const table = new Array(256);
|
1813
|
+
for (let i = 0; i < 16; ++i) {
|
1814
|
+
const i16 = i * 16;
|
1815
|
+
for (let j = 0; j < 16; ++j) {
|
1816
|
+
table[i16 + j] = alphabet[i] + alphabet[j];
|
1817
|
+
}
|
1818
|
+
}
|
1819
|
+
return table;
|
1820
|
+
}();
|
1821
|
+
const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g;
|
1822
|
+
function base64clean(str) {
|
1823
|
+
str = str.split("=")[0];
|
1824
|
+
str = str.trim().replace(INVALID_BASE64_RE, "");
|
1825
|
+
if (str.length < 2)
|
1826
|
+
return "";
|
1827
|
+
while (str.length % 4 !== 0) {
|
1828
|
+
str = str + "=";
|
1829
|
+
}
|
1830
|
+
return str;
|
1831
|
+
}
|
1832
|
+
|
25
1833
|
function notEmpty(value) {
|
26
1834
|
return value !== null && value !== void 0;
|
27
1835
|
}
|
@@ -257,7 +2065,7 @@ var __privateAdd$6 = (obj, member, value) => {
|
|
257
2065
|
};
|
258
2066
|
var __privateSet$4 = (obj, member, value, setter) => {
|
259
2067
|
__accessCheck$6(obj, member, "write to private field");
|
260
|
-
|
2068
|
+
member.set(obj, value);
|
261
2069
|
return value;
|
262
2070
|
};
|
263
2071
|
var __privateMethod$4 = (obj, member, method) => {
|
@@ -526,7 +2334,7 @@ function defaultOnOpen(response) {
|
|
526
2334
|
}
|
527
2335
|
}
|
528
2336
|
|
529
|
-
const VERSION = "0.29.
|
2337
|
+
const VERSION = "0.29.4";
|
530
2338
|
|
531
2339
|
class ErrorWithCause extends Error {
|
532
2340
|
constructor(message, options) {
|
@@ -619,15 +2427,15 @@ function parseWorkspacesUrlParts(url) {
|
|
619
2427
|
if (!isString(url))
|
620
2428
|
return null;
|
621
2429
|
const matches = {
|
622
|
-
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh
|
623
|
-
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev
|
624
|
-
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev
|
625
|
-
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(
|
2430
|
+
production: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.xata\.sh\/db\/([^:]+):?(.*)?/),
|
2431
|
+
staging: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.staging-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2432
|
+
dev: url.match(/(?:https:\/\/)?([^.]+)(?:\.([^.]+))\.dev-xata\.dev\/db\/([^:]+):?(.*)?/),
|
2433
|
+
local: url.match(/(?:https?:\/\/)?([^.]+)(?:\.([^.]+))\.localhost:(?:\d+)\/db\/([^:]+):?(.*)?/)
|
626
2434
|
};
|
627
2435
|
const [host, match] = Object.entries(matches).find(([, match2]) => match2 !== null) ?? [];
|
628
2436
|
if (!isHostProviderAlias(host) || !match)
|
629
2437
|
return null;
|
630
|
-
return { workspace: match[1], region: match[2], host };
|
2438
|
+
return { workspace: match[1], region: match[2], database: match[3], branch: match[4], host };
|
631
2439
|
}
|
632
2440
|
|
633
2441
|
const pool = new ApiRequestPool();
|
@@ -738,6 +2546,8 @@ async function fetch$1({
|
|
738
2546
|
"X-Xata-Client-ID": clientID ?? defaultClientID,
|
739
2547
|
"X-Xata-Session-ID": sessionID ?? generateUUID(),
|
740
2548
|
"X-Xata-Agent": xataAgent,
|
2549
|
+
// Force field rename to xata_ internal properties
|
2550
|
+
"X-Features": compact(["feat-internal-field-rename-api=1", customHeaders?.["X-Features"]]).join(" "),
|
741
2551
|
...customHeaders,
|
742
2552
|
...hostHeader(fullUrl),
|
743
2553
|
Authorization: `Bearer ${apiKey}`
|
@@ -845,16 +2655,42 @@ function parseUrl(url) {
|
|
845
2655
|
|
846
2656
|
const dataPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "dataPlane" });
|
847
2657
|
|
848
|
-
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2658
|
+
const applyMigration = (variables, signal) => dataPlaneFetch({
|
2659
|
+
url: "/db/{dbBranchName}/migrations/apply",
|
2660
|
+
method: "post",
|
2661
|
+
...variables,
|
2662
|
+
signal
|
2663
|
+
});
|
849
2664
|
const adaptTable = (variables, signal) => dataPlaneFetch({
|
850
2665
|
url: "/db/{dbBranchName}/migrations/adapt/{tableName}",
|
851
2666
|
method: "post",
|
852
2667
|
...variables,
|
853
2668
|
signal
|
854
2669
|
});
|
855
|
-
const
|
856
|
-
|
857
|
-
|
2670
|
+
const adaptAllTables = (variables, signal) => dataPlaneFetch({
|
2671
|
+
url: "/db/{dbBranchName}/migrations/adapt",
|
2672
|
+
method: "post",
|
2673
|
+
...variables,
|
2674
|
+
signal
|
2675
|
+
});
|
2676
|
+
const getBranchMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2677
|
+
url: "/db/{dbBranchName}/migrations/status",
|
2678
|
+
method: "get",
|
2679
|
+
...variables,
|
2680
|
+
signal
|
2681
|
+
});
|
2682
|
+
const getMigrationJobStatus = (variables, signal) => dataPlaneFetch({
|
2683
|
+
url: "/db/{dbBranchName}/migrations/jobs/{jobId}",
|
2684
|
+
method: "get",
|
2685
|
+
...variables,
|
2686
|
+
signal
|
2687
|
+
});
|
2688
|
+
const getMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2689
|
+
url: "/db/{dbBranchName}/migrations/history",
|
2690
|
+
method: "get",
|
2691
|
+
...variables,
|
2692
|
+
signal
|
2693
|
+
});
|
858
2694
|
const getBranchList = (variables, signal) => dataPlaneFetch({
|
859
2695
|
url: "/dbs/{dbName}",
|
860
2696
|
method: "get",
|
@@ -913,12 +2749,42 @@ const getBranchStats = (variables, signal) => dataPlaneFetch({
|
|
913
2749
|
});
|
914
2750
|
const getGitBranchesMapping = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables, signal });
|
915
2751
|
const addGitBranchesEntry = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables, signal });
|
916
|
-
const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({
|
917
|
-
|
918
|
-
|
919
|
-
|
920
|
-
|
921
|
-
|
2752
|
+
const removeGitBranchesEntry = (variables, signal) => dataPlaneFetch({
|
2753
|
+
url: "/dbs/{dbName}/gitBranches",
|
2754
|
+
method: "delete",
|
2755
|
+
...variables,
|
2756
|
+
signal
|
2757
|
+
});
|
2758
|
+
const resolveBranch = (variables, signal) => dataPlaneFetch({
|
2759
|
+
url: "/dbs/{dbName}/resolveBranch",
|
2760
|
+
method: "get",
|
2761
|
+
...variables,
|
2762
|
+
signal
|
2763
|
+
});
|
2764
|
+
const getBranchMigrationHistory = (variables, signal) => dataPlaneFetch({
|
2765
|
+
url: "/db/{dbBranchName}/migrations",
|
2766
|
+
method: "get",
|
2767
|
+
...variables,
|
2768
|
+
signal
|
2769
|
+
});
|
2770
|
+
const getBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2771
|
+
url: "/db/{dbBranchName}/migrations/plan",
|
2772
|
+
method: "post",
|
2773
|
+
...variables,
|
2774
|
+
signal
|
2775
|
+
});
|
2776
|
+
const executeBranchMigrationPlan = (variables, signal) => dataPlaneFetch({
|
2777
|
+
url: "/db/{dbBranchName}/migrations/execute",
|
2778
|
+
method: "post",
|
2779
|
+
...variables,
|
2780
|
+
signal
|
2781
|
+
});
|
2782
|
+
const queryMigrationRequests = (variables, signal) => dataPlaneFetch({
|
2783
|
+
url: "/dbs/{dbName}/migrations/query",
|
2784
|
+
method: "post",
|
2785
|
+
...variables,
|
2786
|
+
signal
|
2787
|
+
});
|
922
2788
|
const createMigrationRequest = (variables, signal) => dataPlaneFetch({ url: "/dbs/{dbName}/migrations", method: "post", ...variables, signal });
|
923
2789
|
const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
924
2790
|
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
@@ -926,23 +2792,78 @@ const getMigrationRequest = (variables, signal) => dataPlaneFetch({
|
|
926
2792
|
...variables,
|
927
2793
|
signal
|
928
2794
|
});
|
929
|
-
const updateMigrationRequest = (variables, signal) => dataPlaneFetch({
|
930
|
-
|
931
|
-
|
932
|
-
|
2795
|
+
const updateMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2796
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}",
|
2797
|
+
method: "patch",
|
2798
|
+
...variables,
|
2799
|
+
signal
|
2800
|
+
});
|
2801
|
+
const listMigrationRequestsCommits = (variables, signal) => dataPlaneFetch({
|
2802
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/commits",
|
2803
|
+
method: "post",
|
2804
|
+
...variables,
|
2805
|
+
signal
|
2806
|
+
});
|
2807
|
+
const compareMigrationRequest = (variables, signal) => dataPlaneFetch({
|
2808
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/compare",
|
2809
|
+
method: "post",
|
2810
|
+
...variables,
|
2811
|
+
signal
|
2812
|
+
});
|
2813
|
+
const getMigrationRequestIsMerged = (variables, signal) => dataPlaneFetch({
|
2814
|
+
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
2815
|
+
method: "get",
|
2816
|
+
...variables,
|
2817
|
+
signal
|
2818
|
+
});
|
933
2819
|
const mergeMigrationRequest = (variables, signal) => dataPlaneFetch({
|
934
2820
|
url: "/dbs/{dbName}/migrations/{mrNumber}/merge",
|
935
2821
|
method: "post",
|
936
2822
|
...variables,
|
937
2823
|
signal
|
938
2824
|
});
|
939
|
-
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({
|
940
|
-
|
941
|
-
|
942
|
-
|
943
|
-
|
944
|
-
|
945
|
-
const
|
2825
|
+
const getBranchSchemaHistory = (variables, signal) => dataPlaneFetch({
|
2826
|
+
url: "/db/{dbBranchName}/schema/history",
|
2827
|
+
method: "post",
|
2828
|
+
...variables,
|
2829
|
+
signal
|
2830
|
+
});
|
2831
|
+
const compareBranchWithUserSchema = (variables, signal) => dataPlaneFetch({
|
2832
|
+
url: "/db/{dbBranchName}/schema/compare",
|
2833
|
+
method: "post",
|
2834
|
+
...variables,
|
2835
|
+
signal
|
2836
|
+
});
|
2837
|
+
const compareBranchSchemas = (variables, signal) => dataPlaneFetch({
|
2838
|
+
url: "/db/{dbBranchName}/schema/compare/{branchName}",
|
2839
|
+
method: "post",
|
2840
|
+
...variables,
|
2841
|
+
signal
|
2842
|
+
});
|
2843
|
+
const updateBranchSchema = (variables, signal) => dataPlaneFetch({
|
2844
|
+
url: "/db/{dbBranchName}/schema/update",
|
2845
|
+
method: "post",
|
2846
|
+
...variables,
|
2847
|
+
signal
|
2848
|
+
});
|
2849
|
+
const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2850
|
+
url: "/db/{dbBranchName}/schema/preview",
|
2851
|
+
method: "post",
|
2852
|
+
...variables,
|
2853
|
+
signal
|
2854
|
+
});
|
2855
|
+
const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({
|
2856
|
+
url: "/db/{dbBranchName}/schema/apply",
|
2857
|
+
method: "post",
|
2858
|
+
...variables,
|
2859
|
+
signal
|
2860
|
+
});
|
2861
|
+
const pushBranchMigrations = (variables, signal) => dataPlaneFetch({
|
2862
|
+
url: "/db/{dbBranchName}/schema/push",
|
2863
|
+
method: "post",
|
2864
|
+
...variables,
|
2865
|
+
signal
|
2866
|
+
});
|
946
2867
|
const createTable = (variables, signal) => dataPlaneFetch({
|
947
2868
|
url: "/db/{dbBranchName}/tables/{tableName}",
|
948
2869
|
method: "put",
|
@@ -955,14 +2876,24 @@ const deleteTable = (variables, signal) => dataPlaneFetch({
|
|
955
2876
|
...variables,
|
956
2877
|
signal
|
957
2878
|
});
|
958
|
-
const updateTable = (variables, signal) => dataPlaneFetch({
|
2879
|
+
const updateTable = (variables, signal) => dataPlaneFetch({
|
2880
|
+
url: "/db/{dbBranchName}/tables/{tableName}",
|
2881
|
+
method: "patch",
|
2882
|
+
...variables,
|
2883
|
+
signal
|
2884
|
+
});
|
959
2885
|
const getTableSchema = (variables, signal) => dataPlaneFetch({
|
960
2886
|
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
961
2887
|
method: "get",
|
962
2888
|
...variables,
|
963
2889
|
signal
|
964
2890
|
});
|
965
|
-
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2891
|
+
const setTableSchema = (variables, signal) => dataPlaneFetch({
|
2892
|
+
url: "/db/{dbBranchName}/tables/{tableName}/schema",
|
2893
|
+
method: "put",
|
2894
|
+
...variables,
|
2895
|
+
signal
|
2896
|
+
});
|
966
2897
|
const getTableColumns = (variables, signal) => dataPlaneFetch({
|
967
2898
|
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
968
2899
|
method: "get",
|
@@ -970,7 +2901,12 @@ const getTableColumns = (variables, signal) => dataPlaneFetch({
|
|
970
2901
|
signal
|
971
2902
|
});
|
972
2903
|
const addTableColumn = (variables, signal) => dataPlaneFetch(
|
973
|
-
{
|
2904
|
+
{
|
2905
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns",
|
2906
|
+
method: "post",
|
2907
|
+
...variables,
|
2908
|
+
signal
|
2909
|
+
}
|
974
2910
|
);
|
975
2911
|
const getColumn = (variables, signal) => dataPlaneFetch({
|
976
2912
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
@@ -978,15 +2914,30 @@ const getColumn = (variables, signal) => dataPlaneFetch({
|
|
978
2914
|
...variables,
|
979
2915
|
signal
|
980
2916
|
});
|
981
|
-
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2917
|
+
const updateColumn = (variables, signal) => dataPlaneFetch({
|
2918
|
+
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
2919
|
+
method: "patch",
|
2920
|
+
...variables,
|
2921
|
+
signal
|
2922
|
+
});
|
982
2923
|
const deleteColumn = (variables, signal) => dataPlaneFetch({
|
983
2924
|
url: "/db/{dbBranchName}/tables/{tableName}/columns/{columnName}",
|
984
2925
|
method: "delete",
|
985
2926
|
...variables,
|
986
2927
|
signal
|
987
2928
|
});
|
988
|
-
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
989
|
-
|
2929
|
+
const branchTransaction = (variables, signal) => dataPlaneFetch({
|
2930
|
+
url: "/db/{dbBranchName}/transaction",
|
2931
|
+
method: "post",
|
2932
|
+
...variables,
|
2933
|
+
signal
|
2934
|
+
});
|
2935
|
+
const insertRecord = (variables, signal) => dataPlaneFetch({
|
2936
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data",
|
2937
|
+
method: "post",
|
2938
|
+
...variables,
|
2939
|
+
signal
|
2940
|
+
});
|
990
2941
|
const getFileItem = (variables, signal) => dataPlaneFetch({
|
991
2942
|
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
|
992
2943
|
method: "get",
|
@@ -1029,11 +2980,36 @@ const getRecord = (variables, signal) => dataPlaneFetch({
|
|
1029
2980
|
...variables,
|
1030
2981
|
signal
|
1031
2982
|
});
|
1032
|
-
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
1033
|
-
|
1034
|
-
|
1035
|
-
|
1036
|
-
|
2983
|
+
const insertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2984
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2985
|
+
method: "put",
|
2986
|
+
...variables,
|
2987
|
+
signal
|
2988
|
+
});
|
2989
|
+
const updateRecordWithID = (variables, signal) => dataPlaneFetch({
|
2990
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2991
|
+
method: "patch",
|
2992
|
+
...variables,
|
2993
|
+
signal
|
2994
|
+
});
|
2995
|
+
const upsertRecordWithID = (variables, signal) => dataPlaneFetch({
|
2996
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
2997
|
+
method: "post",
|
2998
|
+
...variables,
|
2999
|
+
signal
|
3000
|
+
});
|
3001
|
+
const deleteRecord = (variables, signal) => dataPlaneFetch({
|
3002
|
+
url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
|
3003
|
+
method: "delete",
|
3004
|
+
...variables,
|
3005
|
+
signal
|
3006
|
+
});
|
3007
|
+
const bulkInsertTableRecords = (variables, signal) => dataPlaneFetch({
|
3008
|
+
url: "/db/{dbBranchName}/tables/{tableName}/bulk",
|
3009
|
+
method: "post",
|
3010
|
+
...variables,
|
3011
|
+
signal
|
3012
|
+
});
|
1037
3013
|
const queryTable = (variables, signal) => dataPlaneFetch({
|
1038
3014
|
url: "/db/{dbBranchName}/tables/{tableName}/query",
|
1039
3015
|
method: "post",
|
@@ -1052,16 +3028,36 @@ const searchTable = (variables, signal) => dataPlaneFetch({
|
|
1052
3028
|
...variables,
|
1053
3029
|
signal
|
1054
3030
|
});
|
1055
|
-
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
3031
|
+
const vectorSearchTable = (variables, signal) => dataPlaneFetch({
|
3032
|
+
url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch",
|
3033
|
+
method: "post",
|
3034
|
+
...variables,
|
3035
|
+
signal
|
3036
|
+
});
|
1056
3037
|
const askTable = (variables, signal) => dataPlaneFetch({
|
1057
3038
|
url: "/db/{dbBranchName}/tables/{tableName}/ask",
|
1058
3039
|
method: "post",
|
1059
3040
|
...variables,
|
1060
3041
|
signal
|
1061
3042
|
});
|
1062
|
-
const askTableSession = (variables, signal) => dataPlaneFetch({
|
1063
|
-
|
1064
|
-
|
3043
|
+
const askTableSession = (variables, signal) => dataPlaneFetch({
|
3044
|
+
url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}",
|
3045
|
+
method: "post",
|
3046
|
+
...variables,
|
3047
|
+
signal
|
3048
|
+
});
|
3049
|
+
const summarizeTable = (variables, signal) => dataPlaneFetch({
|
3050
|
+
url: "/db/{dbBranchName}/tables/{tableName}/summarize",
|
3051
|
+
method: "post",
|
3052
|
+
...variables,
|
3053
|
+
signal
|
3054
|
+
});
|
3055
|
+
const aggregateTable = (variables, signal) => dataPlaneFetch({
|
3056
|
+
url: "/db/{dbBranchName}/tables/{tableName}/aggregate",
|
3057
|
+
method: "post",
|
3058
|
+
...variables,
|
3059
|
+
signal
|
3060
|
+
});
|
1065
3061
|
const fileAccess = (variables, signal) => dataPlaneFetch({
|
1066
3062
|
url: "/file/{fileId}",
|
1067
3063
|
method: "get",
|
@@ -1084,6 +3080,7 @@ const operationsByTag$2 = {
|
|
1084
3080
|
migrations: {
|
1085
3081
|
applyMigration,
|
1086
3082
|
adaptTable,
|
3083
|
+
adaptAllTables,
|
1087
3084
|
getBranchMigrationJobStatus,
|
1088
3085
|
getMigrationJobStatus,
|
1089
3086
|
getMigrationHistory,
|
@@ -1146,7 +3143,16 @@ const operationsByTag$2 = {
|
|
1146
3143
|
deleteRecord,
|
1147
3144
|
bulkInsertTableRecords
|
1148
3145
|
},
|
1149
|
-
files: {
|
3146
|
+
files: {
|
3147
|
+
getFileItem,
|
3148
|
+
putFileItem,
|
3149
|
+
deleteFileItem,
|
3150
|
+
getFile,
|
3151
|
+
putFile,
|
3152
|
+
deleteFile,
|
3153
|
+
fileAccess,
|
3154
|
+
fileUpload
|
3155
|
+
},
|
1150
3156
|
searchAndFilter: {
|
1151
3157
|
queryTable,
|
1152
3158
|
searchBranch,
|
@@ -1224,7 +3230,12 @@ const deleteOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
|
1224
3230
|
...variables,
|
1225
3231
|
signal
|
1226
3232
|
});
|
1227
|
-
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3233
|
+
const updateOAuthAccessToken = (variables, signal) => controlPlaneFetch({
|
3234
|
+
url: "/user/oauth/tokens/{token}",
|
3235
|
+
method: "patch",
|
3236
|
+
...variables,
|
3237
|
+
signal
|
3238
|
+
});
|
1228
3239
|
const getWorkspacesList = (variables, signal) => controlPlaneFetch({
|
1229
3240
|
url: "/workspaces",
|
1230
3241
|
method: "get",
|
@@ -1255,47 +3266,150 @@ const deleteWorkspace = (variables, signal) => controlPlaneFetch({
|
|
1255
3266
|
...variables,
|
1256
3267
|
signal
|
1257
3268
|
});
|
1258
|
-
const
|
1259
|
-
|
3269
|
+
const getWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3270
|
+
url: "/workspaces/{workspaceId}/settings",
|
3271
|
+
method: "get",
|
3272
|
+
...variables,
|
3273
|
+
signal
|
3274
|
+
});
|
3275
|
+
const updateWorkspaceSettings = (variables, signal) => controlPlaneFetch({
|
3276
|
+
url: "/workspaces/{workspaceId}/settings",
|
3277
|
+
method: "patch",
|
3278
|
+
...variables,
|
3279
|
+
signal
|
3280
|
+
});
|
3281
|
+
const getWorkspaceMembersList = (variables, signal) => controlPlaneFetch({
|
3282
|
+
url: "/workspaces/{workspaceId}/members",
|
3283
|
+
method: "get",
|
3284
|
+
...variables,
|
3285
|
+
signal
|
3286
|
+
});
|
3287
|
+
const updateWorkspaceMemberRole = (variables, signal) => controlPlaneFetch({
|
3288
|
+
url: "/workspaces/{workspaceId}/members/{userId}",
|
3289
|
+
method: "put",
|
3290
|
+
...variables,
|
3291
|
+
signal
|
3292
|
+
});
|
1260
3293
|
const removeWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1261
3294
|
url: "/workspaces/{workspaceId}/members/{userId}",
|
1262
3295
|
method: "delete",
|
1263
3296
|
...variables,
|
1264
3297
|
signal
|
1265
3298
|
});
|
1266
|
-
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
1267
|
-
|
1268
|
-
|
1269
|
-
|
1270
|
-
|
1271
|
-
|
1272
|
-
const
|
3299
|
+
const inviteWorkspaceMember = (variables, signal) => controlPlaneFetch({
|
3300
|
+
url: "/workspaces/{workspaceId}/invites",
|
3301
|
+
method: "post",
|
3302
|
+
...variables,
|
3303
|
+
signal
|
3304
|
+
});
|
3305
|
+
const updateWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3306
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3307
|
+
method: "patch",
|
3308
|
+
...variables,
|
3309
|
+
signal
|
3310
|
+
});
|
3311
|
+
const cancelWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3312
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}",
|
3313
|
+
method: "delete",
|
3314
|
+
...variables,
|
3315
|
+
signal
|
3316
|
+
});
|
3317
|
+
const acceptWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3318
|
+
url: "/workspaces/{workspaceId}/invites/{inviteKey}/accept",
|
3319
|
+
method: "post",
|
3320
|
+
...variables,
|
3321
|
+
signal
|
3322
|
+
});
|
3323
|
+
const resendWorkspaceMemberInvite = (variables, signal) => controlPlaneFetch({
|
3324
|
+
url: "/workspaces/{workspaceId}/invites/{inviteId}/resend",
|
3325
|
+
method: "post",
|
3326
|
+
...variables,
|
3327
|
+
signal
|
3328
|
+
});
|
3329
|
+
const listClusters = (variables, signal) => controlPlaneFetch({
|
3330
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3331
|
+
method: "get",
|
3332
|
+
...variables,
|
3333
|
+
signal
|
3334
|
+
});
|
3335
|
+
const createCluster = (variables, signal) => controlPlaneFetch({
|
3336
|
+
url: "/workspaces/{workspaceId}/clusters",
|
3337
|
+
method: "post",
|
3338
|
+
...variables,
|
3339
|
+
signal
|
3340
|
+
});
|
1273
3341
|
const getCluster = (variables, signal) => controlPlaneFetch({
|
1274
3342
|
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
1275
3343
|
method: "get",
|
1276
3344
|
...variables,
|
1277
3345
|
signal
|
1278
3346
|
});
|
1279
|
-
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3347
|
+
const updateCluster = (variables, signal) => controlPlaneFetch({
|
3348
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3349
|
+
method: "patch",
|
3350
|
+
...variables,
|
3351
|
+
signal
|
3352
|
+
});
|
3353
|
+
const deleteCluster = (variables, signal) => controlPlaneFetch({
|
3354
|
+
url: "/workspaces/{workspaceId}/clusters/{clusterId}",
|
3355
|
+
method: "delete",
|
3356
|
+
...variables,
|
3357
|
+
signal
|
3358
|
+
});
|
1280
3359
|
const getDatabaseList = (variables, signal) => controlPlaneFetch({
|
1281
3360
|
url: "/workspaces/{workspaceId}/dbs",
|
1282
3361
|
method: "get",
|
1283
3362
|
...variables,
|
1284
3363
|
signal
|
1285
3364
|
});
|
1286
|
-
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3365
|
+
const createDatabase = (variables, signal) => controlPlaneFetch({
|
3366
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3367
|
+
method: "put",
|
3368
|
+
...variables,
|
3369
|
+
signal
|
3370
|
+
});
|
1287
3371
|
const deleteDatabase = (variables, signal) => controlPlaneFetch({
|
1288
3372
|
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
1289
3373
|
method: "delete",
|
1290
3374
|
...variables,
|
1291
3375
|
signal
|
1292
3376
|
});
|
1293
|
-
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
1294
|
-
|
1295
|
-
|
1296
|
-
|
1297
|
-
|
1298
|
-
|
3377
|
+
const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3378
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3379
|
+
method: "get",
|
3380
|
+
...variables,
|
3381
|
+
signal
|
3382
|
+
});
|
3383
|
+
const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({
|
3384
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}",
|
3385
|
+
method: "patch",
|
3386
|
+
...variables,
|
3387
|
+
signal
|
3388
|
+
});
|
3389
|
+
const renameDatabase = (variables, signal) => controlPlaneFetch({
|
3390
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/rename",
|
3391
|
+
method: "post",
|
3392
|
+
...variables,
|
3393
|
+
signal
|
3394
|
+
});
|
3395
|
+
const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3396
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3397
|
+
method: "get",
|
3398
|
+
...variables,
|
3399
|
+
signal
|
3400
|
+
});
|
3401
|
+
const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3402
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3403
|
+
method: "put",
|
3404
|
+
...variables,
|
3405
|
+
signal
|
3406
|
+
});
|
3407
|
+
const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({
|
3408
|
+
url: "/workspaces/{workspaceId}/dbs/{dbName}/github",
|
3409
|
+
method: "delete",
|
3410
|
+
...variables,
|
3411
|
+
signal
|
3412
|
+
});
|
1299
3413
|
const listRegions = (variables, signal) => controlPlaneFetch({
|
1300
3414
|
url: "/workspaces/{workspaceId}/regions",
|
1301
3415
|
method: "get",
|
@@ -1320,6 +3434,8 @@ const operationsByTag$1 = {
|
|
1320
3434
|
getWorkspace,
|
1321
3435
|
updateWorkspace,
|
1322
3436
|
deleteWorkspace,
|
3437
|
+
getWorkspaceSettings,
|
3438
|
+
updateWorkspaceSettings,
|
1323
3439
|
getWorkspaceMembersList,
|
1324
3440
|
updateWorkspaceMemberRole,
|
1325
3441
|
removeWorkspaceMember
|
@@ -1331,7 +3447,13 @@ const operationsByTag$1 = {
|
|
1331
3447
|
acceptWorkspaceMemberInvite,
|
1332
3448
|
resendWorkspaceMemberInvite
|
1333
3449
|
},
|
1334
|
-
xbcontrolOther: {
|
3450
|
+
xbcontrolOther: {
|
3451
|
+
listClusters,
|
3452
|
+
createCluster,
|
3453
|
+
getCluster,
|
3454
|
+
updateCluster,
|
3455
|
+
deleteCluster
|
3456
|
+
},
|
1335
3457
|
databases: {
|
1336
3458
|
getDatabaseList,
|
1337
3459
|
createDatabase,
|
@@ -1608,7 +3730,7 @@ var __privateAdd$5 = (obj, member, value) => {
|
|
1608
3730
|
};
|
1609
3731
|
var __privateSet$3 = (obj, member, value, setter) => {
|
1610
3732
|
__accessCheck$5(obj, member, "write to private field");
|
1611
|
-
|
3733
|
+
member.set(obj, value);
|
1612
3734
|
return value;
|
1613
3735
|
};
|
1614
3736
|
var _query, _page;
|
@@ -1787,7 +3909,7 @@ var __privateAdd$4 = (obj, member, value) => {
|
|
1787
3909
|
};
|
1788
3910
|
var __privateSet$2 = (obj, member, value, setter) => {
|
1789
3911
|
__accessCheck$4(obj, member, "write to private field");
|
1790
|
-
|
3912
|
+
member.set(obj, value);
|
1791
3913
|
return value;
|
1792
3914
|
};
|
1793
3915
|
var __privateMethod$3 = (obj, member, method) => {
|
@@ -2023,8 +4145,8 @@ cleanFilterConstraint_fn = function(column, value) {
|
|
2023
4145
|
if (columnType === "multiple" && (isString(value) || isStringArray(value))) {
|
2024
4146
|
return { $includes: value };
|
2025
4147
|
}
|
2026
|
-
if (columnType === "link" && isObject(value) && isString(value.
|
2027
|
-
return value.
|
4148
|
+
if (columnType === "link" && isObject(value) && isString(value.xata_id)) {
|
4149
|
+
return value.xata_id;
|
2028
4150
|
}
|
2029
4151
|
return value;
|
2030
4152
|
};
|
@@ -2052,12 +4174,7 @@ const RecordColumnTypes = [
|
|
2052
4174
|
"json"
|
2053
4175
|
];
|
2054
4176
|
function isIdentifiable(x) {
|
2055
|
-
return isObject(x) && isString(x?.
|
2056
|
-
}
|
2057
|
-
function isXataRecord(x) {
|
2058
|
-
const record = x;
|
2059
|
-
const metadata = record?.getMetadata();
|
2060
|
-
return isIdentifiable(x) && isObject(metadata) && typeof metadata.version === "number";
|
4177
|
+
return isObject(x) && isString(x?.xata_id);
|
2061
4178
|
}
|
2062
4179
|
|
2063
4180
|
function isValidExpandedColumn(column) {
|
@@ -2120,7 +4237,7 @@ var __privateAdd$3 = (obj, member, value) => {
|
|
2120
4237
|
};
|
2121
4238
|
var __privateSet$1 = (obj, member, value, setter) => {
|
2122
4239
|
__accessCheck$3(obj, member, "write to private field");
|
2123
|
-
|
4240
|
+
member.set(obj, value);
|
2124
4241
|
return value;
|
2125
4242
|
};
|
2126
4243
|
var __privateMethod$2 = (obj, member, method) => {
|
@@ -2184,11 +4301,14 @@ class RestRepository extends Query {
|
|
2184
4301
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2185
4302
|
return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
|
2186
4303
|
}
|
2187
|
-
if (isObject(a) && isString(a.
|
2188
|
-
if (a.
|
4304
|
+
if (isObject(a) && isString(a.xata_id)) {
|
4305
|
+
if (a.xata_id === "")
|
2189
4306
|
throw new Error("The id can't be empty");
|
2190
4307
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2191
|
-
return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.
|
4308
|
+
return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
4309
|
+
createOnly: true,
|
4310
|
+
ifVersion
|
4311
|
+
});
|
2192
4312
|
}
|
2193
4313
|
if (isObject(a)) {
|
2194
4314
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
@@ -2204,9 +4324,9 @@ class RestRepository extends Query {
|
|
2204
4324
|
if (a.length === 0)
|
2205
4325
|
return [];
|
2206
4326
|
const ids = a.map((item) => extractId(item));
|
2207
|
-
const finalObjects = await this.getAll({ filter: {
|
4327
|
+
const finalObjects = await this.getAll({ filter: { xata_id: { $any: compact(ids) } }, columns });
|
2208
4328
|
const dictionary = finalObjects.reduce((acc, object) => {
|
2209
|
-
acc[object.
|
4329
|
+
acc[object.xata_id] = object;
|
2210
4330
|
return acc;
|
2211
4331
|
}, {});
|
2212
4332
|
return ids.map((id2) => dictionary[id2 ?? ""] ?? null);
|
@@ -2268,7 +4388,7 @@ class RestRepository extends Query {
|
|
2268
4388
|
if (Array.isArray(a)) {
|
2269
4389
|
if (a.length === 0)
|
2270
4390
|
return [];
|
2271
|
-
const existing = await this.read(a, ["
|
4391
|
+
const existing = await this.read(a, ["xata_id"]);
|
2272
4392
|
const updates = a.filter((_item, index) => existing[index] !== null);
|
2273
4393
|
await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, updates, {
|
2274
4394
|
ifVersion,
|
@@ -2283,9 +4403,9 @@ class RestRepository extends Query {
|
|
2283
4403
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2284
4404
|
return await __privateMethod$2(this, _updateRecordWithID, updateRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2285
4405
|
}
|
2286
|
-
if (isObject(a) && isString(a.
|
4406
|
+
if (isObject(a) && isString(a.xata_id)) {
|
2287
4407
|
const columns = isValidSelectableColumns(b) ? b : void 0;
|
2288
|
-
return await __privateMethod$2(this, _updateRecordWithID, updateRecordWithID_fn).call(this, a.
|
4408
|
+
return await __privateMethod$2(this, _updateRecordWithID, updateRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2289
4409
|
}
|
2290
4410
|
} catch (error) {
|
2291
4411
|
if (error.status === 422)
|
@@ -2334,16 +4454,16 @@ class RestRepository extends Query {
|
|
2334
4454
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2335
4455
|
return await __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
|
2336
4456
|
}
|
2337
|
-
if (isObject(a) && isString(a.
|
2338
|
-
if (a.
|
4457
|
+
if (isObject(a) && isString(a.xata_id)) {
|
4458
|
+
if (a.xata_id === "")
|
2339
4459
|
throw new Error("The id can't be empty");
|
2340
4460
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2341
|
-
return await __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a.
|
4461
|
+
return await __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, { ifVersion });
|
2342
4462
|
}
|
2343
4463
|
if (!isDefined(a) && isObject(b)) {
|
2344
4464
|
return await this.create(b, c);
|
2345
4465
|
}
|
2346
|
-
if (isObject(a) && !isDefined(a.
|
4466
|
+
if (isObject(a) && !isDefined(a.xata_id)) {
|
2347
4467
|
return await this.create(a, b);
|
2348
4468
|
}
|
2349
4469
|
throw new Error("Invalid arguments for createOrUpdate method");
|
@@ -2366,16 +4486,19 @@ class RestRepository extends Query {
|
|
2366
4486
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2367
4487
|
return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
|
2368
4488
|
}
|
2369
|
-
if (isObject(a) && isString(a.
|
2370
|
-
if (a.
|
4489
|
+
if (isObject(a) && isString(a.xata_id)) {
|
4490
|
+
if (a.xata_id === "")
|
2371
4491
|
throw new Error("The id can't be empty");
|
2372
4492
|
const columns = isValidSelectableColumns(c) ? c : void 0;
|
2373
|
-
return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.
|
4493
|
+
return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.xata_id, { ...a, xata_id: void 0 }, columns, {
|
4494
|
+
createOnly: false,
|
4495
|
+
ifVersion
|
4496
|
+
});
|
2374
4497
|
}
|
2375
4498
|
if (!isDefined(a) && isObject(b)) {
|
2376
4499
|
return await this.create(b, c);
|
2377
4500
|
}
|
2378
|
-
if (isObject(a) && !isDefined(a.
|
4501
|
+
if (isObject(a) && !isDefined(a.xata_id)) {
|
2379
4502
|
return await this.create(a, b);
|
2380
4503
|
}
|
2381
4504
|
throw new Error("Invalid arguments for createOrReplace method");
|
@@ -2389,8 +4512,8 @@ class RestRepository extends Query {
|
|
2389
4512
|
const ids = a.map((o) => {
|
2390
4513
|
if (isString(o))
|
2391
4514
|
return o;
|
2392
|
-
if (isString(o.
|
2393
|
-
return o.
|
4515
|
+
if (isString(o.xata_id))
|
4516
|
+
return o.xata_id;
|
2394
4517
|
throw new Error("Invalid arguments for delete method");
|
2395
4518
|
});
|
2396
4519
|
const columns = isValidSelectableColumns(b) ? b : ["*"];
|
@@ -2401,8 +4524,8 @@ class RestRepository extends Query {
|
|
2401
4524
|
if (isString(a)) {
|
2402
4525
|
return __privateMethod$2(this, _deleteRecord, deleteRecord_fn).call(this, a, b);
|
2403
4526
|
}
|
2404
|
-
if (isObject(a) && isString(a.
|
2405
|
-
return __privateMethod$2(this, _deleteRecord, deleteRecord_fn).call(this, a.
|
4527
|
+
if (isObject(a) && isString(a.xata_id)) {
|
4528
|
+
return __privateMethod$2(this, _deleteRecord, deleteRecord_fn).call(this, a.xata_id, b);
|
2406
4529
|
}
|
2407
4530
|
throw new Error("Invalid arguments for delete method");
|
2408
4531
|
});
|
@@ -2664,7 +4787,7 @@ _updateRecordWithID = new WeakSet();
|
|
2664
4787
|
updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2665
4788
|
if (!recordId)
|
2666
4789
|
return null;
|
2667
|
-
const {
|
4790
|
+
const { xata_id: _id, ...record } = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
2668
4791
|
try {
|
2669
4792
|
const response = await updateRecordWithID({
|
2670
4793
|
pathParams: {
|
@@ -2689,9 +4812,9 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2689
4812
|
};
|
2690
4813
|
_updateRecords = new WeakSet();
|
2691
4814
|
updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
2692
|
-
const operations = await promiseMap(objects, async ({
|
4815
|
+
const operations = await promiseMap(objects, async ({ xata_id, ...object }) => {
|
2693
4816
|
const fields = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
|
2694
|
-
return { update: { table: __privateGet$2(this, _table), id, ifVersion, upsert, fields } };
|
4817
|
+
return { update: { table: __privateGet$2(this, _table), id: xata_id, ifVersion, upsert, fields } };
|
2695
4818
|
});
|
2696
4819
|
const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
|
2697
4820
|
const ids = [];
|
@@ -2796,12 +4919,12 @@ transformObjectToApi_fn = async function(object) {
|
|
2796
4919
|
throw new Error(`Table ${__privateGet$2(this, _table)} not found in schema`);
|
2797
4920
|
const result = {};
|
2798
4921
|
for (const [key, value] of Object.entries(object)) {
|
2799
|
-
if (
|
4922
|
+
if (["xata_version", "xata_createdat", "xata_updatedat"].includes(key))
|
2800
4923
|
continue;
|
2801
4924
|
const type = schema.columns.find((column) => column.name === key)?.type;
|
2802
4925
|
switch (type) {
|
2803
4926
|
case "link": {
|
2804
|
-
result[key] = isIdentifiable(value) ? value.
|
4927
|
+
result[key] = isIdentifiable(value) ? value.xata_id : value;
|
2805
4928
|
break;
|
2806
4929
|
}
|
2807
4930
|
case "datetime": {
|
@@ -2825,8 +4948,7 @@ transformObjectToApi_fn = async function(object) {
|
|
2825
4948
|
};
|
2826
4949
|
const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
2827
4950
|
const data = {};
|
2828
|
-
|
2829
|
-
Object.assign(data, rest);
|
4951
|
+
Object.assign(data, { ...object });
|
2830
4952
|
const { columns } = schemaTables.find(({ name }) => name === table) ?? {};
|
2831
4953
|
if (!columns)
|
2832
4954
|
console.error(`Table ${table} not found in schema`);
|
@@ -2889,28 +5011,21 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2889
5011
|
}
|
2890
5012
|
}
|
2891
5013
|
const record = { ...data };
|
2892
|
-
const metadata = xata !== void 0 ? { ...xata, createdAt: new Date(xata.createdAt), updatedAt: new Date(xata.updatedAt) } : void 0;
|
2893
5014
|
record.read = function(columns2) {
|
2894
|
-
return db[table].read(record["
|
5015
|
+
return db[table].read(record["xata_id"], columns2);
|
2895
5016
|
};
|
2896
5017
|
record.update = function(data2, b, c) {
|
2897
5018
|
const columns2 = isValidSelectableColumns(b) ? b : ["*"];
|
2898
5019
|
const ifVersion = parseIfVersion(b, c);
|
2899
|
-
return db[table].update(record["
|
5020
|
+
return db[table].update(record["xata_id"], data2, columns2, { ifVersion });
|
2900
5021
|
};
|
2901
5022
|
record.replace = function(data2, b, c) {
|
2902
5023
|
const columns2 = isValidSelectableColumns(b) ? b : ["*"];
|
2903
5024
|
const ifVersion = parseIfVersion(b, c);
|
2904
|
-
return db[table].createOrReplace(record["
|
5025
|
+
return db[table].createOrReplace(record["xata_id"], data2, columns2, { ifVersion });
|
2905
5026
|
};
|
2906
5027
|
record.delete = function() {
|
2907
|
-
return db[table].delete(record["
|
2908
|
-
};
|
2909
|
-
if (metadata !== void 0) {
|
2910
|
-
record.xata = Object.freeze(metadata);
|
2911
|
-
}
|
2912
|
-
record.getMetadata = function() {
|
2913
|
-
return record.xata;
|
5028
|
+
return db[table].delete(record["xata_id"]);
|
2914
5029
|
};
|
2915
5030
|
record.toSerializable = function() {
|
2916
5031
|
return JSON.parse(JSON.stringify(record));
|
@@ -2918,7 +5033,7 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2918
5033
|
record.toString = function() {
|
2919
5034
|
return JSON.stringify(record);
|
2920
5035
|
};
|
2921
|
-
for (const prop of ["read", "update", "replace", "delete", "
|
5036
|
+
for (const prop of ["read", "update", "replace", "delete", "toSerializable", "toString"]) {
|
2922
5037
|
Object.defineProperty(record, prop, { enumerable: false });
|
2923
5038
|
}
|
2924
5039
|
Object.freeze(record);
|
@@ -2927,8 +5042,8 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2927
5042
|
function extractId(value) {
|
2928
5043
|
if (isString(value))
|
2929
5044
|
return value;
|
2930
|
-
if (isObject(value) && isString(value.
|
2931
|
-
return value.
|
5045
|
+
if (isObject(value) && isString(value.xata_id))
|
5046
|
+
return value.xata_id;
|
2932
5047
|
return void 0;
|
2933
5048
|
}
|
2934
5049
|
function isValidColumn(columns, column) {
|
@@ -3116,7 +5231,7 @@ class SearchPlugin extends XataPlugin {
|
|
3116
5231
|
return {
|
3117
5232
|
totalCount,
|
3118
5233
|
records: records.map((record) => {
|
3119
|
-
const
|
5234
|
+
const table = record.xata_table;
|
3120
5235
|
return { table, record: initObject(this.db, pluginOptions.tables, table, record, ["*"]) };
|
3121
5236
|
})
|
3122
5237
|
};
|
@@ -3124,7 +5239,7 @@ class SearchPlugin extends XataPlugin {
|
|
3124
5239
|
byTable: async (query, options = {}) => {
|
3125
5240
|
const { records: rawRecords, totalCount } = await __privateMethod$1(this, _search, search_fn).call(this, query, options, pluginOptions);
|
3126
5241
|
const records = rawRecords.reduce((acc, record) => {
|
3127
|
-
const
|
5242
|
+
const table = record.xata_table;
|
3128
5243
|
const items = acc[table] ?? [];
|
3129
5244
|
const item = initObject(this.db, pluginOptions.tables, table, record, ["*"]);
|
3130
5245
|
return { ...acc, [table]: [...items, item] };
|
@@ -3198,19 +5313,19 @@ function prepareParams(param1, param2) {
|
|
3198
5313
|
return { statement, params: param2?.map((value) => prepareValue(value)) };
|
3199
5314
|
}
|
3200
5315
|
if (isObject(param1)) {
|
3201
|
-
const { statement, params, consistency } = param1;
|
3202
|
-
return { statement, params: params?.map((value) => prepareValue(value)), consistency };
|
5316
|
+
const { statement, params, consistency, responseType } = param1;
|
5317
|
+
return { statement, params: params?.map((value) => prepareValue(value)), consistency, responseType };
|
3203
5318
|
}
|
3204
5319
|
throw new Error("Invalid query");
|
3205
5320
|
}
|
3206
5321
|
|
3207
5322
|
class SQLPlugin extends XataPlugin {
|
3208
5323
|
build(pluginOptions) {
|
3209
|
-
|
5324
|
+
const sqlFunction = async (query, ...parameters) => {
|
3210
5325
|
if (!isParamsObject(query) && (!isTemplateStringsArray(query) || !Array.isArray(parameters))) {
|
3211
5326
|
throw new Error("Invalid usage of `xata.sql`. Please use it as a tagged template or with an object.");
|
3212
5327
|
}
|
3213
|
-
const { statement, params, consistency } = prepareParams(query, parameters);
|
5328
|
+
const { statement, params, consistency, responseType } = prepareParams(query, parameters);
|
3214
5329
|
const {
|
3215
5330
|
records,
|
3216
5331
|
rows,
|
@@ -3218,11 +5333,13 @@ class SQLPlugin extends XataPlugin {
|
|
3218
5333
|
columns = []
|
3219
5334
|
} = await sqlQuery({
|
3220
5335
|
pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
|
3221
|
-
body: { statement, params, consistency },
|
5336
|
+
body: { statement, params, consistency, responseType },
|
3222
5337
|
...pluginOptions
|
3223
5338
|
});
|
3224
5339
|
return { records, rows, warning, columns };
|
3225
5340
|
};
|
5341
|
+
sqlFunction.connectionString = buildConnectionString(pluginOptions);
|
5342
|
+
return sqlFunction;
|
3226
5343
|
}
|
3227
5344
|
}
|
3228
5345
|
function isTemplateStringsArray(strings) {
|
@@ -3231,6 +5348,33 @@ function isTemplateStringsArray(strings) {
|
|
3231
5348
|
function isParamsObject(params) {
|
3232
5349
|
return isObject(params) && "statement" in params;
|
3233
5350
|
}
|
5351
|
+
function buildDomain(host, region) {
|
5352
|
+
switch (host) {
|
5353
|
+
case "production":
|
5354
|
+
return `${region}.sql.xata.sh`;
|
5355
|
+
case "staging":
|
5356
|
+
return `${region}.sql.staging-xata.dev`;
|
5357
|
+
case "dev":
|
5358
|
+
return `${region}.sql.dev-xata.dev`;
|
5359
|
+
case "local":
|
5360
|
+
return "localhost:7654";
|
5361
|
+
default:
|
5362
|
+
throw new Error("Invalid host provider");
|
5363
|
+
}
|
5364
|
+
}
|
5365
|
+
function buildConnectionString({ apiKey, workspacesApiUrl, branch }) {
|
5366
|
+
const url = isString(workspacesApiUrl) ? workspacesApiUrl : workspacesApiUrl("", {});
|
5367
|
+
const parts = parseWorkspacesUrlParts(url);
|
5368
|
+
if (!parts)
|
5369
|
+
throw new Error("Invalid workspaces URL");
|
5370
|
+
const { workspace: workspaceSlug, region, database, host } = parts;
|
5371
|
+
const domain = buildDomain(host, region);
|
5372
|
+
const workspace = workspaceSlug.split("-").pop();
|
5373
|
+
if (!workspace || !region || !database || !apiKey || !branch) {
|
5374
|
+
throw new Error("Unable to build xata connection string");
|
5375
|
+
}
|
5376
|
+
return `postgresql://${workspace}:${apiKey}@${domain}/${database}:${branch}?sslmode=require`;
|
5377
|
+
}
|
3234
5378
|
|
3235
5379
|
class TransactionPlugin extends XataPlugin {
|
3236
5380
|
build(pluginOptions) {
|
@@ -3262,7 +5406,7 @@ var __privateAdd = (obj, member, value) => {
|
|
3262
5406
|
};
|
3263
5407
|
var __privateSet = (obj, member, value, setter) => {
|
3264
5408
|
__accessCheck(obj, member, "write to private field");
|
3265
|
-
|
5409
|
+
member.set(obj, value);
|
3266
5410
|
return value;
|
3267
5411
|
};
|
3268
5412
|
var __privateMethod = (obj, member, method) => {
|
@@ -3281,7 +5425,8 @@ const buildClient = (plugins) => {
|
|
3281
5425
|
const pluginOptions = {
|
3282
5426
|
...__privateMethod(this, _getFetchProps, getFetchProps_fn).call(this, safeOptions),
|
3283
5427
|
host: safeOptions.host,
|
3284
|
-
tables
|
5428
|
+
tables,
|
5429
|
+
branch: safeOptions.branch
|
3285
5430
|
};
|
3286
5431
|
const db = new SchemaPlugin().build(pluginOptions);
|
3287
5432
|
const search = new SearchPlugin(db).build(pluginOptions);
|
@@ -3462,5 +5607,5 @@ class XataError extends Error {
|
|
3462
5607
|
}
|
3463
5608
|
}
|
3464
5609
|
|
3465
|
-
export { BaseClient, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge, getAPIKey, getAuthorizationCode, getBranch, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationJobStatus, getBranchMigrationPlan, getBranchSchemaHistory, getBranchStats, getCluster, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseSettings, getDatabaseURL, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationHistory, getMigrationJobStatus, getMigrationRequest, getMigrationRequestIsMerged, getPreviewBranch, getRecord, getSchema, getTableColumns, getTableSchema, getUser, getUserAPIKeys, getUserOAuthAccessTokens, getUserOAuthClients, getWorkspace, getWorkspaceMembersList, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, iContains, iPattern, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isValidExpandedColumn, isValidSelectableColumns,
|
5610
|
+
export { BaseClient, Buffer, FetcherError, FilesPlugin, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, PageRecordArray, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SQLPlugin, SchemaPlugin, SearchPlugin, Serializer, TransactionPlugin, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, adaptAllTables, adaptTable, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, applyMigration, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, contains, copyBranch, createBranch, createCluster, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteCluster, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteOAuthAccessToken, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteUserOAuthClient, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, fileUpload, ge, getAPIKey, getAuthorizationCode, getBranch, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationJobStatus, getBranchMigrationPlan, getBranchSchemaHistory, getBranchStats, getCluster, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseSettings, getDatabaseURL, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationHistory, getMigrationJobStatus, getMigrationRequest, getMigrationRequestIsMerged, getPreviewBranch, getRecord, getSchema, getTableColumns, getTableSchema, getUser, getUserAPIKeys, getUserOAuthAccessTokens, getUserOAuthClients, getWorkspace, getWorkspaceMembersList, getWorkspaceSettings, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, iContains, iPattern, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isValidExpandedColumn, isValidSelectableColumns, le, lessEquals, lessThan, lessThanEquals, listClusters, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern, previewBranchSchemaEdit, pushBranchMigrations, putFile, putFileItem, queryMigrationRequests, queryTable, removeGitBranchesEntry, removeWorkspaceMember, renameDatabase, resendWorkspaceMemberInvite, resolveBranch, searchBranch, searchTable, serialize, setTableSchema, sqlQuery, startsWith, summarizeTable, transformImage, updateBranchMetadata, updateBranchSchema, updateCluster, updateColumn, updateDatabaseGithubSettings, updateDatabaseMetadata, updateDatabaseSettings, updateMigrationRequest, updateOAuthAccessToken, updateRecordWithID, updateTable, updateUser, updateWorkspace, updateWorkspaceMemberInvite, updateWorkspaceMemberRole, updateWorkspaceSettings, upsertRecordWithID, vectorSearchTable };
|
3466
5611
|
//# sourceMappingURL=index.mjs.map
|