@xata.io/client 0.0.0-alpha.vfd68d20 → 0.0.0-alpha.vfda8f2e
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -0
- package/dist/index.cjs +248 -68
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +82 -44
- package/dist/index.mjs +247 -47
- package/dist/index.mjs.map +1 -1
- package/package.json +2 -2
- package/tsconfig.json +1 -1
- package/mod.ts +0 -2
package/CHANGELOG.md
CHANGED
@@ -1,5 +1,21 @@
|
|
1
1
|
# @xata.io/client
|
2
2
|
|
3
|
+
## 0.20.2
|
4
|
+
|
5
|
+
### Patch Changes
|
6
|
+
|
7
|
+
- [#756](https://github.com/xataio/client-ts/pull/756) [`27b73745`](https://github.com/xataio/client-ts/commit/27b737451227cf12774115ccb70649f6bbf76180) Thanks [@SferaDev](https://github.com/SferaDev)! - Fix update method with id in payload
|
8
|
+
|
9
|
+
- [#726](https://github.com/xataio/client-ts/pull/726) [`47a1f878`](https://github.com/xataio/client-ts/commit/47a1f87850a7178dad656a16d2584eee6ce68f29) Thanks [@SferaDev](https://github.com/SferaDev)! - Use transactions API internally for bulk operations
|
10
|
+
|
11
|
+
- [#726](https://github.com/xataio/client-ts/pull/726) [`adde9b10`](https://github.com/xataio/client-ts/commit/adde9b10708182295dee07c203c7bf737806e49e) Thanks [@SferaDev](https://github.com/SferaDev)! - Allow sending bulk operations of more than the limit
|
12
|
+
|
13
|
+
## 0.20.1
|
14
|
+
|
15
|
+
### Patch Changes
|
16
|
+
|
17
|
+
- [#744](https://github.com/xataio/client-ts/pull/744) [`98298ca`](https://github.com/xataio/client-ts/commit/98298ca1312a2256ee3e9d2700a9f3d3e316abe5) Thanks [@xata-bot](https://github.com/xata-bot)! - Add deno entry point
|
18
|
+
|
3
19
|
## 0.20.0
|
4
20
|
|
5
21
|
### Minor Changes
|
package/dist/index.cjs
CHANGED
@@ -1,25 +1,5 @@
|
|
1
1
|
'use strict';
|
2
2
|
|
3
|
-
Object.defineProperty(exports, '__esModule', { value: true });
|
4
|
-
|
5
|
-
function _interopNamespace(e) {
|
6
|
-
if (e && e.__esModule) return e;
|
7
|
-
var n = Object.create(null);
|
8
|
-
if (e) {
|
9
|
-
Object.keys(e).forEach(function (k) {
|
10
|
-
if (k !== 'default') {
|
11
|
-
var d = Object.getOwnPropertyDescriptor(e, k);
|
12
|
-
Object.defineProperty(n, k, d.get ? d : {
|
13
|
-
enumerable: true,
|
14
|
-
get: function () { return e[k]; }
|
15
|
-
});
|
16
|
-
}
|
17
|
-
});
|
18
|
-
}
|
19
|
-
n["default"] = e;
|
20
|
-
return Object.freeze(n);
|
21
|
-
}
|
22
|
-
|
23
3
|
const defaultTrace = async (_name, fn, _options) => {
|
24
4
|
return await fn({
|
25
5
|
setAttributes: () => {
|
@@ -63,6 +43,18 @@ function isStringArray(value) {
|
|
63
43
|
function isNumber(value) {
|
64
44
|
return isDefined(value) && typeof value === "number";
|
65
45
|
}
|
46
|
+
function parseNumber(value) {
|
47
|
+
if (isNumber(value)) {
|
48
|
+
return value;
|
49
|
+
}
|
50
|
+
if (isString(value)) {
|
51
|
+
const parsed = Number(value);
|
52
|
+
if (!Number.isNaN(parsed)) {
|
53
|
+
return parsed;
|
54
|
+
}
|
55
|
+
}
|
56
|
+
return void 0;
|
57
|
+
}
|
66
58
|
function toBase64(value) {
|
67
59
|
try {
|
68
60
|
return btoa(value);
|
@@ -82,6 +74,16 @@ function deepMerge(a, b) {
|
|
82
74
|
}
|
83
75
|
return result;
|
84
76
|
}
|
77
|
+
function chunk(array, chunkSize) {
|
78
|
+
const result = [];
|
79
|
+
for (let i = 0; i < array.length; i += chunkSize) {
|
80
|
+
result.push(array.slice(i, i + chunkSize));
|
81
|
+
}
|
82
|
+
return result;
|
83
|
+
}
|
84
|
+
async function timeout(ms) {
|
85
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
86
|
+
}
|
85
87
|
|
86
88
|
function getEnvironment() {
|
87
89
|
try {
|
@@ -172,7 +174,7 @@ async function getGitBranch() {
|
|
172
174
|
if (typeof require === "function") {
|
173
175
|
return require(nodeModule).execSync(fullCmd, execOptions).trim();
|
174
176
|
}
|
175
|
-
const { execSync } = await (
|
177
|
+
const { execSync } = await import(nodeModule);
|
176
178
|
return execSync(fullCmd, execOptions).toString().trim();
|
177
179
|
} catch (err) {
|
178
180
|
}
|
@@ -194,6 +196,29 @@ function getAPIKey() {
|
|
194
196
|
}
|
195
197
|
}
|
196
198
|
|
199
|
+
var __accessCheck$8 = (obj, member, msg) => {
|
200
|
+
if (!member.has(obj))
|
201
|
+
throw TypeError("Cannot " + msg);
|
202
|
+
};
|
203
|
+
var __privateGet$8 = (obj, member, getter) => {
|
204
|
+
__accessCheck$8(obj, member, "read from private field");
|
205
|
+
return getter ? getter.call(obj) : member.get(obj);
|
206
|
+
};
|
207
|
+
var __privateAdd$8 = (obj, member, value) => {
|
208
|
+
if (member.has(obj))
|
209
|
+
throw TypeError("Cannot add the same private member more than once");
|
210
|
+
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
|
211
|
+
};
|
212
|
+
var __privateSet$8 = (obj, member, value, setter) => {
|
213
|
+
__accessCheck$8(obj, member, "write to private field");
|
214
|
+
setter ? setter.call(obj, value) : member.set(obj, value);
|
215
|
+
return value;
|
216
|
+
};
|
217
|
+
var __privateMethod$4 = (obj, member, method) => {
|
218
|
+
__accessCheck$8(obj, member, "access private method");
|
219
|
+
return method;
|
220
|
+
};
|
221
|
+
var _fetch, _queue, _concurrency, _enqueue, enqueue_fn;
|
197
222
|
function getFetchImplementation(userFetch) {
|
198
223
|
const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
|
199
224
|
const fetchImpl = userFetch ?? globalFetch;
|
@@ -204,8 +229,70 @@ function getFetchImplementation(userFetch) {
|
|
204
229
|
}
|
205
230
|
return fetchImpl;
|
206
231
|
}
|
232
|
+
class ApiRequestPool {
|
233
|
+
constructor(concurrency = 10) {
|
234
|
+
__privateAdd$8(this, _enqueue);
|
235
|
+
__privateAdd$8(this, _fetch, void 0);
|
236
|
+
__privateAdd$8(this, _queue, void 0);
|
237
|
+
__privateAdd$8(this, _concurrency, void 0);
|
238
|
+
__privateSet$8(this, _queue, []);
|
239
|
+
__privateSet$8(this, _concurrency, concurrency);
|
240
|
+
this.running = 0;
|
241
|
+
this.started = 0;
|
242
|
+
}
|
243
|
+
setFetch(fetch2) {
|
244
|
+
__privateSet$8(this, _fetch, fetch2);
|
245
|
+
}
|
246
|
+
getFetch() {
|
247
|
+
if (!__privateGet$8(this, _fetch)) {
|
248
|
+
throw new Error("Fetch not set");
|
249
|
+
}
|
250
|
+
return __privateGet$8(this, _fetch);
|
251
|
+
}
|
252
|
+
request(url, options, context = { start: new Date(), stalled: false }) {
|
253
|
+
const fetch2 = this.getFetch();
|
254
|
+
return __privateMethod$4(this, _enqueue, enqueue_fn).call(this, async () => {
|
255
|
+
const response = await fetch2(url, options);
|
256
|
+
if (response.status === 429) {
|
257
|
+
const rateLimitReset = parseNumber(response.headers?.get("x-ratelimit-reset")) ?? 1;
|
258
|
+
await timeout(rateLimitReset * 1e3);
|
259
|
+
return await this.request(url, options, { ...context, stalled: true });
|
260
|
+
}
|
261
|
+
if (context.stalled) {
|
262
|
+
const stalledTime = new Date().getTime() - context.start.getTime();
|
263
|
+
console.warn(`A request to Xata hit your workspace limits, was retried and stalled for ${stalledTime}ms`);
|
264
|
+
}
|
265
|
+
return response;
|
266
|
+
});
|
267
|
+
}
|
268
|
+
}
|
269
|
+
_fetch = new WeakMap();
|
270
|
+
_queue = new WeakMap();
|
271
|
+
_concurrency = new WeakMap();
|
272
|
+
_enqueue = new WeakSet();
|
273
|
+
enqueue_fn = function(task) {
|
274
|
+
const promise = new Promise((resolve) => __privateGet$8(this, _queue).push(resolve)).finally(() => {
|
275
|
+
this.started--;
|
276
|
+
this.running++;
|
277
|
+
}).then(() => task()).finally(() => {
|
278
|
+
this.running--;
|
279
|
+
const next = __privateGet$8(this, _queue).shift();
|
280
|
+
if (next !== void 0) {
|
281
|
+
this.started++;
|
282
|
+
next();
|
283
|
+
}
|
284
|
+
});
|
285
|
+
if (this.running + this.started < __privateGet$8(this, _concurrency)) {
|
286
|
+
const next = __privateGet$8(this, _queue).shift();
|
287
|
+
if (next !== void 0) {
|
288
|
+
this.started++;
|
289
|
+
next();
|
290
|
+
}
|
291
|
+
}
|
292
|
+
return promise;
|
293
|
+
};
|
207
294
|
|
208
|
-
const VERSION = "0.0.0-alpha.
|
295
|
+
const VERSION = "0.0.0-alpha.vfda8f2e";
|
209
296
|
|
210
297
|
class ErrorWithCause extends Error {
|
211
298
|
constructor(message, options) {
|
@@ -248,6 +335,7 @@ function getMessage(data) {
|
|
248
335
|
}
|
249
336
|
}
|
250
337
|
|
338
|
+
const pool = new ApiRequestPool();
|
251
339
|
const resolveUrl = (url, queryParams = {}, pathParams = {}) => {
|
252
340
|
const cleanQueryParams = Object.entries(queryParams).reduce((acc, [key, value]) => {
|
253
341
|
if (value === void 0 || value === null)
|
@@ -298,7 +386,8 @@ async function fetch$1({
|
|
298
386
|
sessionID,
|
299
387
|
fetchOptions = {}
|
300
388
|
}) {
|
301
|
-
|
389
|
+
pool.setFetch(fetchImpl);
|
390
|
+
return await trace(
|
302
391
|
`${method.toUpperCase()} ${path}`,
|
303
392
|
async ({ setAttributes }) => {
|
304
393
|
const baseUrl = buildBaseUrl({ endpoint, path, workspacesApiUrl, pathParams, apiUrl });
|
@@ -308,7 +397,7 @@ async function fetch$1({
|
|
308
397
|
[TraceAttributes.HTTP_URL]: url,
|
309
398
|
[TraceAttributes.HTTP_TARGET]: resolveUrl(path, queryParams, pathParams)
|
310
399
|
});
|
311
|
-
const response = await
|
400
|
+
const response = await pool.request(url, {
|
312
401
|
...fetchOptions,
|
313
402
|
method: method.toUpperCase(),
|
314
403
|
body: body ? JSON.stringify(body) : void 0,
|
@@ -323,9 +412,6 @@ async function fetch$1({
|
|
323
412
|
},
|
324
413
|
signal
|
325
414
|
});
|
326
|
-
if (response.status === 204) {
|
327
|
-
return {};
|
328
|
-
}
|
329
415
|
const { host, protocol } = parseUrl(response.url);
|
330
416
|
const requestId = response.headers?.get("x-request-id") ?? void 0;
|
331
417
|
setAttributes({
|
@@ -335,6 +421,12 @@ async function fetch$1({
|
|
335
421
|
[TraceAttributes.HTTP_HOST]: host,
|
336
422
|
[TraceAttributes.HTTP_SCHEME]: protocol?.replace(":", "")
|
337
423
|
});
|
424
|
+
if (response.status === 204) {
|
425
|
+
return {};
|
426
|
+
}
|
427
|
+
if (response.status === 429) {
|
428
|
+
throw new FetcherError(response.status, "Rate limit exceeded", requestId);
|
429
|
+
}
|
338
430
|
try {
|
339
431
|
const jsonResponse = await response.json();
|
340
432
|
if (response.ok) {
|
@@ -1388,6 +1480,19 @@ class RecordsApi {
|
|
1388
1480
|
...this.extraProps
|
1389
1481
|
});
|
1390
1482
|
}
|
1483
|
+
branchTransaction({
|
1484
|
+
workspace,
|
1485
|
+
region,
|
1486
|
+
database,
|
1487
|
+
branch,
|
1488
|
+
operations
|
1489
|
+
}) {
|
1490
|
+
return operationsByTag.records.branchTransaction({
|
1491
|
+
pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
|
1492
|
+
body: { operations },
|
1493
|
+
...this.extraProps
|
1494
|
+
});
|
1495
|
+
}
|
1391
1496
|
}
|
1392
1497
|
class SearchAndFilterApi {
|
1393
1498
|
constructor(extraProps) {
|
@@ -2153,7 +2258,8 @@ var __privateMethod$2 = (obj, member, method) => {
|
|
2153
2258
|
__accessCheck$4(obj, member, "access private method");
|
2154
2259
|
return method;
|
2155
2260
|
};
|
2156
|
-
var _table, _getFetchProps, _db, _cache, _schemaTables$2, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn,
|
2261
|
+
var _table, _getFetchProps, _db, _cache, _schemaTables$2, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn, _insertRecords, insertRecords_fn, _updateRecordWithID, updateRecordWithID_fn, _updateRecords, updateRecords_fn, _upsertRecordWithID, upsertRecordWithID_fn, _deleteRecord, deleteRecord_fn, _deleteRecords, deleteRecords_fn, _setCacheQuery, setCacheQuery_fn, _getCacheQuery, getCacheQuery_fn, _getSchemaTables$1, getSchemaTables_fn$1;
|
2262
|
+
const BULK_OPERATION_MAX_SIZE = 1e3;
|
2157
2263
|
class Repository extends Query {
|
2158
2264
|
}
|
2159
2265
|
class RestRepository extends Query {
|
@@ -2165,10 +2271,12 @@ class RestRepository extends Query {
|
|
2165
2271
|
);
|
2166
2272
|
__privateAdd$4(this, _insertRecordWithoutId);
|
2167
2273
|
__privateAdd$4(this, _insertRecordWithId);
|
2168
|
-
__privateAdd$4(this,
|
2274
|
+
__privateAdd$4(this, _insertRecords);
|
2169
2275
|
__privateAdd$4(this, _updateRecordWithID);
|
2276
|
+
__privateAdd$4(this, _updateRecords);
|
2170
2277
|
__privateAdd$4(this, _upsertRecordWithID);
|
2171
2278
|
__privateAdd$4(this, _deleteRecord);
|
2279
|
+
__privateAdd$4(this, _deleteRecords);
|
2172
2280
|
__privateAdd$4(this, _setCacheQuery);
|
2173
2281
|
__privateAdd$4(this, _getCacheQuery);
|
2174
2282
|
__privateAdd$4(this, _getSchemaTables$1);
|
@@ -2202,20 +2310,22 @@ class RestRepository extends Query {
|
|
2202
2310
|
if (Array.isArray(a)) {
|
2203
2311
|
if (a.length === 0)
|
2204
2312
|
return [];
|
2205
|
-
const
|
2206
|
-
|
2313
|
+
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: true });
|
2314
|
+
const columns = isStringArray(b) ? b : ["*"];
|
2315
|
+
const result = await this.read(ids, columns);
|
2316
|
+
return result;
|
2207
2317
|
}
|
2208
2318
|
if (isString(a) && isObject(b)) {
|
2209
2319
|
if (a === "")
|
2210
2320
|
throw new Error("The id can't be empty");
|
2211
2321
|
const columns = isStringArray(c) ? c : void 0;
|
2212
|
-
return __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
|
2322
|
+
return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: true, ifVersion });
|
2213
2323
|
}
|
2214
2324
|
if (isObject(a) && isString(a.id)) {
|
2215
2325
|
if (a.id === "")
|
2216
2326
|
throw new Error("The id can't be empty");
|
2217
2327
|
const columns = isStringArray(b) ? b : void 0;
|
2218
|
-
return __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.id, { ...a, id: void 0 }, columns, { createOnly: true, ifVersion });
|
2328
|
+
return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.id, { ...a, id: void 0 }, columns, { createOnly: true, ifVersion });
|
2219
2329
|
}
|
2220
2330
|
if (isObject(a)) {
|
2221
2331
|
const columns = isStringArray(b) ? b : void 0;
|
@@ -2290,11 +2400,15 @@ class RestRepository extends Query {
|
|
2290
2400
|
if (Array.isArray(a)) {
|
2291
2401
|
if (a.length === 0)
|
2292
2402
|
return [];
|
2293
|
-
|
2294
|
-
|
2295
|
-
|
2403
|
+
const existing = await this.read(a, ["id"]);
|
2404
|
+
const updates = a.filter((_item, index) => existing[index] !== null);
|
2405
|
+
await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, updates, {
|
2406
|
+
ifVersion,
|
2407
|
+
upsert: false
|
2408
|
+
});
|
2296
2409
|
const columns = isStringArray(b) ? b : ["*"];
|
2297
|
-
|
2410
|
+
const result = await this.read(a, columns);
|
2411
|
+
return result;
|
2298
2412
|
}
|
2299
2413
|
if (isString(a) && isObject(b)) {
|
2300
2414
|
const columns = isStringArray(c) ? c : void 0;
|
@@ -2332,11 +2446,13 @@ class RestRepository extends Query {
|
|
2332
2446
|
if (Array.isArray(a)) {
|
2333
2447
|
if (a.length === 0)
|
2334
2448
|
return [];
|
2335
|
-
|
2336
|
-
|
2337
|
-
|
2449
|
+
await __privateMethod$2(this, _updateRecords, updateRecords_fn).call(this, a, {
|
2450
|
+
ifVersion,
|
2451
|
+
upsert: true
|
2452
|
+
});
|
2338
2453
|
const columns = isStringArray(b) ? b : ["*"];
|
2339
|
-
|
2454
|
+
const result = await this.read(a, columns);
|
2455
|
+
return result;
|
2340
2456
|
}
|
2341
2457
|
if (isString(a) && isObject(b)) {
|
2342
2458
|
const columns = isStringArray(c) ? c : void 0;
|
@@ -2355,8 +2471,10 @@ class RestRepository extends Query {
|
|
2355
2471
|
if (Array.isArray(a)) {
|
2356
2472
|
if (a.length === 0)
|
2357
2473
|
return [];
|
2474
|
+
const ids = await __privateMethod$2(this, _insertRecords, insertRecords_fn).call(this, a, { ifVersion, createOnly: false });
|
2358
2475
|
const columns = isStringArray(b) ? b : ["*"];
|
2359
|
-
|
2476
|
+
const result = await this.read(ids, columns);
|
2477
|
+
return result;
|
2360
2478
|
}
|
2361
2479
|
if (isString(a) && isObject(b)) {
|
2362
2480
|
const columns = isStringArray(c) ? c : void 0;
|
@@ -2374,10 +2492,17 @@ class RestRepository extends Query {
|
|
2374
2492
|
if (Array.isArray(a)) {
|
2375
2493
|
if (a.length === 0)
|
2376
2494
|
return [];
|
2377
|
-
|
2378
|
-
|
2379
|
-
|
2380
|
-
|
2495
|
+
const ids = a.map((o) => {
|
2496
|
+
if (isString(o))
|
2497
|
+
return o;
|
2498
|
+
if (isString(o.id))
|
2499
|
+
return o.id;
|
2500
|
+
throw new Error("Invalid arguments for delete method");
|
2501
|
+
});
|
2502
|
+
const columns = isStringArray(b) ? b : ["*"];
|
2503
|
+
const result = await this.read(a, columns);
|
2504
|
+
await __privateMethod$2(this, _deleteRecords, deleteRecords_fn).call(this, ids);
|
2505
|
+
return result;
|
2381
2506
|
}
|
2382
2507
|
if (isString(a)) {
|
2383
2508
|
return __privateMethod$2(this, _deleteRecord, deleteRecord_fn).call(this, a, b);
|
@@ -2545,31 +2670,40 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
|
|
2545
2670
|
const schemaTables = await __privateMethod$2(this, _getSchemaTables$1, getSchemaTables_fn$1).call(this);
|
2546
2671
|
return initObject(__privateGet$4(this, _db), schemaTables, __privateGet$4(this, _table), response, columns);
|
2547
2672
|
};
|
2548
|
-
|
2549
|
-
|
2673
|
+
_insertRecords = new WeakSet();
|
2674
|
+
insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
|
2550
2675
|
const fetchProps = await __privateGet$4(this, _getFetchProps).call(this);
|
2551
|
-
const
|
2552
|
-
|
2553
|
-
|
2554
|
-
|
2555
|
-
|
2556
|
-
|
2557
|
-
|
2558
|
-
|
2559
|
-
|
2560
|
-
|
2561
|
-
|
2562
|
-
|
2563
|
-
|
2564
|
-
|
2676
|
+
const chunkedOperations = chunk(
|
2677
|
+
objects.map((object) => ({
|
2678
|
+
insert: { table: __privateGet$4(this, _table), record: transformObjectLinks(object), createOnly, ifVersion }
|
2679
|
+
})),
|
2680
|
+
BULK_OPERATION_MAX_SIZE
|
2681
|
+
);
|
2682
|
+
const ids = [];
|
2683
|
+
for (const operations of chunkedOperations) {
|
2684
|
+
const { results } = await branchTransaction({
|
2685
|
+
pathParams: {
|
2686
|
+
workspace: "{workspaceId}",
|
2687
|
+
dbBranchName: "{dbBranch}",
|
2688
|
+
region: "{region}"
|
2689
|
+
},
|
2690
|
+
body: { operations },
|
2691
|
+
...fetchProps
|
2692
|
+
});
|
2693
|
+
for (const result of results) {
|
2694
|
+
if (result.operation === "insert") {
|
2695
|
+
ids.push(result.id);
|
2696
|
+
} else {
|
2697
|
+
ids.push(null);
|
2698
|
+
}
|
2699
|
+
}
|
2565
2700
|
}
|
2566
|
-
|
2567
|
-
return response.records?.map((item) => initObject(__privateGet$4(this, _db), schemaTables, __privateGet$4(this, _table), item, columns));
|
2701
|
+
return ids;
|
2568
2702
|
};
|
2569
2703
|
_updateRecordWithID = new WeakSet();
|
2570
2704
|
updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2571
2705
|
const fetchProps = await __privateGet$4(this, _getFetchProps).call(this);
|
2572
|
-
const record = transformObjectLinks(object);
|
2706
|
+
const { id: _id, ...record } = transformObjectLinks(object);
|
2573
2707
|
try {
|
2574
2708
|
const response = await updateRecordWithID({
|
2575
2709
|
pathParams: {
|
@@ -2592,6 +2726,36 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
|
|
2592
2726
|
throw e;
|
2593
2727
|
}
|
2594
2728
|
};
|
2729
|
+
_updateRecords = new WeakSet();
|
2730
|
+
updateRecords_fn = async function(objects, { ifVersion, upsert }) {
|
2731
|
+
const fetchProps = await __privateGet$4(this, _getFetchProps).call(this);
|
2732
|
+
const chunkedOperations = chunk(
|
2733
|
+
objects.map(({ id, ...object }) => ({
|
2734
|
+
update: { table: __privateGet$4(this, _table), id, ifVersion, upsert, fields: transformObjectLinks(object) }
|
2735
|
+
})),
|
2736
|
+
BULK_OPERATION_MAX_SIZE
|
2737
|
+
);
|
2738
|
+
const ids = [];
|
2739
|
+
for (const operations of chunkedOperations) {
|
2740
|
+
const { results } = await branchTransaction({
|
2741
|
+
pathParams: {
|
2742
|
+
workspace: "{workspaceId}",
|
2743
|
+
dbBranchName: "{dbBranch}",
|
2744
|
+
region: "{region}"
|
2745
|
+
},
|
2746
|
+
body: { operations },
|
2747
|
+
...fetchProps
|
2748
|
+
});
|
2749
|
+
for (const result of results) {
|
2750
|
+
if (result.operation === "update") {
|
2751
|
+
ids.push(result.id);
|
2752
|
+
} else {
|
2753
|
+
ids.push(null);
|
2754
|
+
}
|
2755
|
+
}
|
2756
|
+
}
|
2757
|
+
return ids;
|
2758
|
+
};
|
2595
2759
|
_upsertRecordWithID = new WeakSet();
|
2596
2760
|
upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
|
2597
2761
|
const fetchProps = await __privateGet$4(this, _getFetchProps).call(this);
|
@@ -2634,6 +2798,25 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
|
|
2634
2798
|
throw e;
|
2635
2799
|
}
|
2636
2800
|
};
|
2801
|
+
_deleteRecords = new WeakSet();
|
2802
|
+
deleteRecords_fn = async function(recordIds) {
|
2803
|
+
const fetchProps = await __privateGet$4(this, _getFetchProps).call(this);
|
2804
|
+
const chunkedOperations = chunk(
|
2805
|
+
recordIds.map((id) => ({ delete: { table: __privateGet$4(this, _table), id } })),
|
2806
|
+
BULK_OPERATION_MAX_SIZE
|
2807
|
+
);
|
2808
|
+
for (const operations of chunkedOperations) {
|
2809
|
+
await branchTransaction({
|
2810
|
+
pathParams: {
|
2811
|
+
workspace: "{workspaceId}",
|
2812
|
+
dbBranchName: "{dbBranch}",
|
2813
|
+
region: "{region}"
|
2814
|
+
},
|
2815
|
+
body: { operations },
|
2816
|
+
...fetchProps
|
2817
|
+
});
|
2818
|
+
}
|
2819
|
+
};
|
2637
2820
|
_setCacheQuery = new WeakSet();
|
2638
2821
|
setCacheQuery_fn = async function(query, meta, records) {
|
2639
2822
|
await __privateGet$4(this, _cache).set(`query_${__privateGet$4(this, _table)}:${query.key()}`, { date: new Date(), meta, records });
|
@@ -2744,9 +2927,6 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
|
|
2744
2927
|
Object.freeze(result);
|
2745
2928
|
return result;
|
2746
2929
|
};
|
2747
|
-
function isResponseWithRecords(value) {
|
2748
|
-
return isObject(value) && Array.isArray(value.records);
|
2749
|
-
}
|
2750
2930
|
function extractId(value) {
|
2751
2931
|
if (isString(value))
|
2752
2932
|
return value;
|