@xata.io/client 0.0.0-alpha.vf9f8d99 → 0.0.0-alpha.vfa36696

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -29,8 +29,11 @@ function notEmpty(value) {
29
29
  function compact(arr) {
30
30
  return arr.filter(notEmpty);
31
31
  }
32
+ function compactObject(obj) {
33
+ return Object.fromEntries(Object.entries(obj).filter(([, value]) => notEmpty(value)));
34
+ }
32
35
  function isObject(value) {
33
- return Boolean(value) && typeof value === "object" && !Array.isArray(value);
36
+ return Boolean(value) && typeof value === "object" && !Array.isArray(value) && !(value instanceof Date);
34
37
  }
35
38
  function isDefined(value) {
36
39
  return value !== null && value !== void 0;
@@ -85,6 +88,15 @@ function chunk(array, chunkSize) {
85
88
  async function timeout(ms) {
86
89
  return new Promise((resolve) => setTimeout(resolve, ms));
87
90
  }
91
+ function promiseMap(inputValues, mapper) {
92
+ const reducer = (acc$, inputValue) => acc$.then(
93
+ (acc) => mapper(inputValue).then((result) => {
94
+ acc.push(result);
95
+ return acc;
96
+ })
97
+ );
98
+ return inputValues.reduce(reducer, Promise.resolve([]));
99
+ }
88
100
 
89
101
  function getEnvironment() {
90
102
  try {
@@ -93,8 +105,10 @@ function getEnvironment() {
93
105
  apiKey: process.env.XATA_API_KEY ?? getGlobalApiKey(),
94
106
  databaseURL: process.env.XATA_DATABASE_URL ?? getGlobalDatabaseURL(),
95
107
  branch: process.env.XATA_BRANCH ?? getGlobalBranch(),
96
- envBranch: process.env.VERCEL_GIT_COMMIT_REF ?? process.env.CF_PAGES_BRANCH ?? process.env.BRANCH,
97
- fallbackBranch: process.env.XATA_FALLBACK_BRANCH ?? getGlobalFallbackBranch()
108
+ deployPreview: process.env.XATA_PREVIEW,
109
+ deployPreviewBranch: process.env.XATA_PREVIEW_BRANCH,
110
+ vercelGitCommitRef: process.env.VERCEL_GIT_COMMIT_REF,
111
+ vercelGitRepoOwner: process.env.VERCEL_GIT_REPO_OWNER
98
112
  };
99
113
  }
100
114
  } catch (err) {
@@ -105,8 +119,10 @@ function getEnvironment() {
105
119
  apiKey: Deno.env.get("XATA_API_KEY") ?? getGlobalApiKey(),
106
120
  databaseURL: Deno.env.get("XATA_DATABASE_URL") ?? getGlobalDatabaseURL(),
107
121
  branch: Deno.env.get("XATA_BRANCH") ?? getGlobalBranch(),
108
- envBranch: Deno.env.get("VERCEL_GIT_COMMIT_REF") ?? Deno.env.get("CF_PAGES_BRANCH") ?? Deno.env.get("BRANCH"),
109
- fallbackBranch: Deno.env.get("XATA_FALLBACK_BRANCH") ?? getGlobalFallbackBranch()
122
+ deployPreview: Deno.env.get("XATA_PREVIEW"),
123
+ deployPreviewBranch: Deno.env.get("XATA_PREVIEW_BRANCH"),
124
+ vercelGitCommitRef: Deno.env.get("VERCEL_GIT_COMMIT_REF"),
125
+ vercelGitRepoOwner: Deno.env.get("VERCEL_GIT_REPO_OWNER")
110
126
  };
111
127
  }
112
128
  } catch (err) {
@@ -115,8 +131,10 @@ function getEnvironment() {
115
131
  apiKey: getGlobalApiKey(),
116
132
  databaseURL: getGlobalDatabaseURL(),
117
133
  branch: getGlobalBranch(),
118
- envBranch: void 0,
119
- fallbackBranch: getGlobalFallbackBranch()
134
+ deployPreview: void 0,
135
+ deployPreviewBranch: void 0,
136
+ vercelGitCommitRef: void 0,
137
+ vercelGitRepoOwner: void 0
120
138
  };
121
139
  }
122
140
  function getEnableBrowserVariable() {
@@ -159,39 +177,59 @@ function getGlobalBranch() {
159
177
  return void 0;
160
178
  }
161
179
  }
162
- function getGlobalFallbackBranch() {
180
+ function getDatabaseURL() {
163
181
  try {
164
- return XATA_FALLBACK_BRANCH;
182
+ const { databaseURL } = getEnvironment();
183
+ return databaseURL;
165
184
  } catch (err) {
166
185
  return void 0;
167
186
  }
168
187
  }
169
- function getDatabaseURL() {
188
+ function getAPIKey() {
170
189
  try {
171
- const { databaseURL } = getEnvironment();
172
- return databaseURL;
190
+ const { apiKey } = getEnvironment();
191
+ return apiKey;
173
192
  } catch (err) {
174
193
  return void 0;
175
194
  }
176
195
  }
177
196
  function getBranch() {
178
197
  try {
179
- const { branch, envBranch } = getEnvironment();
180
- return branch ?? envBranch;
198
+ const { branch } = getEnvironment();
199
+ return branch ?? "main";
181
200
  } catch (err) {
182
201
  return void 0;
183
202
  }
184
203
  }
185
-
186
- function getAPIKey() {
204
+ function buildPreviewBranchName({ org, branch }) {
205
+ return `preview-${org}-${branch}`;
206
+ }
207
+ function getPreviewBranch() {
187
208
  try {
188
- const { apiKey } = getEnvironment();
189
- return apiKey;
209
+ const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = getEnvironment();
210
+ if (deployPreviewBranch)
211
+ return deployPreviewBranch;
212
+ switch (deployPreview) {
213
+ case "vercel": {
214
+ if (!vercelGitCommitRef || !vercelGitRepoOwner) {
215
+ console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
216
+ return void 0;
217
+ }
218
+ return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
219
+ }
220
+ }
221
+ return void 0;
190
222
  } catch (err) {
191
223
  return void 0;
192
224
  }
193
225
  }
194
226
 
227
+ var __defProp$8 = Object.defineProperty;
228
+ var __defNormalProp$8 = (obj, key, value) => key in obj ? __defProp$8(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
229
+ var __publicField$8 = (obj, key, value) => {
230
+ __defNormalProp$8(obj, typeof key !== "symbol" ? key + "" : key, value);
231
+ return value;
232
+ };
195
233
  var __accessCheck$8 = (obj, member, msg) => {
196
234
  if (!member.has(obj))
197
235
  throw TypeError("Cannot " + msg);
@@ -215,6 +253,7 @@ var __privateMethod$4 = (obj, member, method) => {
215
253
  return method;
216
254
  };
217
255
  var _fetch, _queue, _concurrency, _enqueue, enqueue_fn;
256
+ const REQUEST_TIMEOUT = 3e4;
218
257
  function getFetchImplementation(userFetch) {
219
258
  const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
220
259
  const fetchImpl = userFetch ?? globalFetch;
@@ -231,6 +270,8 @@ class ApiRequestPool {
231
270
  __privateAdd$8(this, _fetch, void 0);
232
271
  __privateAdd$8(this, _queue, void 0);
233
272
  __privateAdd$8(this, _concurrency, void 0);
273
+ __publicField$8(this, "running");
274
+ __publicField$8(this, "started");
234
275
  __privateSet$8(this, _queue, []);
235
276
  __privateSet$8(this, _concurrency, concurrency);
236
277
  this.running = 0;
@@ -246,17 +287,20 @@ class ApiRequestPool {
246
287
  return __privateGet$8(this, _fetch);
247
288
  }
248
289
  request(url, options) {
249
- const start = new Date();
290
+ const start = /* @__PURE__ */ new Date();
250
291
  const fetch2 = this.getFetch();
251
292
  const runRequest = async (stalled = false) => {
252
- const response = await fetch2(url, options);
293
+ const response = await Promise.race([fetch2(url, options), timeout(REQUEST_TIMEOUT).then(() => null)]);
294
+ if (!response) {
295
+ throw new Error("Request timed out");
296
+ }
253
297
  if (response.status === 429) {
254
298
  const rateLimitReset = parseNumber(response.headers?.get("x-ratelimit-reset")) ?? 1;
255
299
  await timeout(rateLimitReset * 1e3);
256
300
  return await runRequest(true);
257
301
  }
258
302
  if (stalled) {
259
- const stalledTime = new Date().getTime() - start.getTime();
303
+ const stalledTime = (/* @__PURE__ */ new Date()).getTime() - start.getTime();
260
304
  console.warn(`A request to Xata hit your workspace limits, was retried and stalled for ${stalledTime}ms`);
261
305
  }
262
306
  return response;
@@ -299,16 +343,199 @@ function generateUUID() {
299
343
  });
300
344
  }
301
345
 
302
- const VERSION = "0.22.3";
346
+ async function getBytes(stream, onChunk) {
347
+ const reader = stream.getReader();
348
+ let result;
349
+ while (!(result = await reader.read()).done) {
350
+ onChunk(result.value);
351
+ }
352
+ }
353
+ function getLines(onLine) {
354
+ let buffer;
355
+ let position;
356
+ let fieldLength;
357
+ let discardTrailingNewline = false;
358
+ return function onChunk(arr) {
359
+ if (buffer === void 0) {
360
+ buffer = arr;
361
+ position = 0;
362
+ fieldLength = -1;
363
+ } else {
364
+ buffer = concat(buffer, arr);
365
+ }
366
+ const bufLength = buffer.length;
367
+ let lineStart = 0;
368
+ while (position < bufLength) {
369
+ if (discardTrailingNewline) {
370
+ if (buffer[position] === 10 /* NewLine */) {
371
+ lineStart = ++position;
372
+ }
373
+ discardTrailingNewline = false;
374
+ }
375
+ let lineEnd = -1;
376
+ for (; position < bufLength && lineEnd === -1; ++position) {
377
+ switch (buffer[position]) {
378
+ case 58 /* Colon */:
379
+ if (fieldLength === -1) {
380
+ fieldLength = position - lineStart;
381
+ }
382
+ break;
383
+ case 13 /* CarriageReturn */:
384
+ discardTrailingNewline = true;
385
+ case 10 /* NewLine */:
386
+ lineEnd = position;
387
+ break;
388
+ }
389
+ }
390
+ if (lineEnd === -1) {
391
+ break;
392
+ }
393
+ onLine(buffer.subarray(lineStart, lineEnd), fieldLength);
394
+ lineStart = position;
395
+ fieldLength = -1;
396
+ }
397
+ if (lineStart === bufLength) {
398
+ buffer = void 0;
399
+ } else if (lineStart !== 0) {
400
+ buffer = buffer.subarray(lineStart);
401
+ position -= lineStart;
402
+ }
403
+ };
404
+ }
405
+ function getMessages(onId, onRetry, onMessage) {
406
+ let message = newMessage();
407
+ const decoder = new TextDecoder();
408
+ return function onLine(line, fieldLength) {
409
+ if (line.length === 0) {
410
+ onMessage?.(message);
411
+ message = newMessage();
412
+ } else if (fieldLength > 0) {
413
+ const field = decoder.decode(line.subarray(0, fieldLength));
414
+ const valueOffset = fieldLength + (line[fieldLength + 1] === 32 /* Space */ ? 2 : 1);
415
+ const value = decoder.decode(line.subarray(valueOffset));
416
+ switch (field) {
417
+ case "data":
418
+ message.data = message.data ? message.data + "\n" + value : value;
419
+ break;
420
+ case "event":
421
+ message.event = value;
422
+ break;
423
+ case "id":
424
+ onId(message.id = value);
425
+ break;
426
+ case "retry":
427
+ const retry = parseInt(value, 10);
428
+ if (!isNaN(retry)) {
429
+ onRetry(message.retry = retry);
430
+ }
431
+ break;
432
+ }
433
+ }
434
+ };
435
+ }
436
+ function concat(a, b) {
437
+ const res = new Uint8Array(a.length + b.length);
438
+ res.set(a);
439
+ res.set(b, a.length);
440
+ return res;
441
+ }
442
+ function newMessage() {
443
+ return {
444
+ data: "",
445
+ event: "",
446
+ id: "",
447
+ retry: void 0
448
+ };
449
+ }
450
+ const EventStreamContentType = "text/event-stream";
451
+ const LastEventId = "last-event-id";
452
+ function fetchEventSource(input, {
453
+ signal: inputSignal,
454
+ headers: inputHeaders,
455
+ onopen: inputOnOpen,
456
+ onmessage,
457
+ onclose,
458
+ onerror,
459
+ fetch: inputFetch,
460
+ ...rest
461
+ }) {
462
+ return new Promise((resolve, reject) => {
463
+ const headers = { ...inputHeaders };
464
+ if (!headers.accept) {
465
+ headers.accept = EventStreamContentType;
466
+ }
467
+ let curRequestController;
468
+ function dispose() {
469
+ curRequestController.abort();
470
+ }
471
+ inputSignal?.addEventListener("abort", () => {
472
+ dispose();
473
+ resolve();
474
+ });
475
+ const fetchImpl = inputFetch ?? fetch;
476
+ const onopen = inputOnOpen ?? defaultOnOpen;
477
+ async function create() {
478
+ curRequestController = new AbortController();
479
+ try {
480
+ const response = await fetchImpl(input, {
481
+ ...rest,
482
+ headers,
483
+ signal: curRequestController.signal
484
+ });
485
+ await onopen(response);
486
+ await getBytes(
487
+ response.body,
488
+ getLines(
489
+ getMessages(
490
+ (id) => {
491
+ if (id) {
492
+ headers[LastEventId] = id;
493
+ } else {
494
+ delete headers[LastEventId];
495
+ }
496
+ },
497
+ (_retry) => {
498
+ },
499
+ onmessage
500
+ )
501
+ )
502
+ );
503
+ onclose?.();
504
+ dispose();
505
+ resolve();
506
+ } catch (err) {
507
+ }
508
+ }
509
+ create();
510
+ });
511
+ }
512
+ function defaultOnOpen(response) {
513
+ const contentType = response.headers?.get("content-type");
514
+ if (!contentType?.startsWith(EventStreamContentType)) {
515
+ throw new Error(`Expected content-type to be ${EventStreamContentType}, Actual: ${contentType}`);
516
+ }
517
+ }
518
+
519
+ const VERSION = "0.24.3";
303
520
 
521
+ var __defProp$7 = Object.defineProperty;
522
+ var __defNormalProp$7 = (obj, key, value) => key in obj ? __defProp$7(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
523
+ var __publicField$7 = (obj, key, value) => {
524
+ __defNormalProp$7(obj, typeof key !== "symbol" ? key + "" : key, value);
525
+ return value;
526
+ };
304
527
  class ErrorWithCause extends Error {
305
528
  constructor(message, options) {
306
529
  super(message, options);
530
+ __publicField$7(this, "cause");
307
531
  }
308
532
  }
309
533
  class FetcherError extends ErrorWithCause {
310
534
  constructor(status, data, requestId) {
311
535
  super(getMessage(data));
536
+ __publicField$7(this, "status");
537
+ __publicField$7(this, "requestId");
538
+ __publicField$7(this, "errors");
312
539
  this.status = status;
313
540
  this.errors = isBulkError(data) ? data.errors : [{ message: getMessage(data), status }];
314
541
  this.requestId = requestId;
@@ -375,6 +602,15 @@ function hostHeader(url) {
375
602
  const { groups } = pattern.exec(url) ?? {};
376
603
  return groups?.host ? { Host: groups.host } : {};
377
604
  }
605
+ function parseBody(body, headers) {
606
+ if (!isDefined(body))
607
+ return void 0;
608
+ const { "Content-Type": contentType } = headers ?? {};
609
+ if (String(contentType).toLowerCase() === "application/json") {
610
+ return JSON.stringify(body);
611
+ }
612
+ return body;
613
+ }
378
614
  const defaultClientID = generateUUID();
379
615
  async function fetch$1({
380
616
  url: path,
@@ -394,7 +630,8 @@ async function fetch$1({
394
630
  sessionID,
395
631
  clientName,
396
632
  xataAgentExtra,
397
- fetchOptions = {}
633
+ fetchOptions = {},
634
+ rawResponse = false
398
635
  }) {
399
636
  pool.setFetch(fetch2);
400
637
  return await trace(
@@ -413,7 +650,7 @@ async function fetch$1({
413
650
  isDefined(clientName) ? ["service", clientName] : void 0,
414
651
  ...Object.entries(xataAgentExtra ?? {})
415
652
  ]).map(([key, value]) => `${key}=${value}`).join("; ");
416
- const headers = {
653
+ const headers = compactObject({
417
654
  "Accept-Encoding": "identity",
418
655
  "Content-Type": "application/json",
419
656
  "X-Xata-Client-ID": clientID ?? defaultClientID,
@@ -422,11 +659,11 @@ async function fetch$1({
422
659
  ...customHeaders,
423
660
  ...hostHeader(fullUrl),
424
661
  Authorization: `Bearer ${apiKey}`
425
- };
662
+ });
426
663
  const response = await pool.request(url, {
427
664
  ...fetchOptions,
428
665
  method: method.toUpperCase(),
429
- body: body ? JSON.stringify(body) : void 0,
666
+ body: parseBody(body, headers),
430
667
  headers,
431
668
  signal
432
669
  });
@@ -439,6 +676,9 @@ async function fetch$1({
439
676
  [TraceAttributes.HTTP_HOST]: host,
440
677
  [TraceAttributes.HTTP_SCHEME]: protocol?.replace(":", "")
441
678
  });
679
+ const message = response.headers?.get("x-xata-message");
680
+ if (message)
681
+ console.warn(message);
442
682
  if (response.status === 204) {
443
683
  return {};
444
684
  }
@@ -446,7 +686,7 @@ async function fetch$1({
446
686
  throw new FetcherError(response.status, "Rate limit exceeded", requestId);
447
687
  }
448
688
  try {
449
- const jsonResponse = await response.json();
689
+ const jsonResponse = rawResponse ? await response.blob() : await response.json();
450
690
  if (response.ok) {
451
691
  return jsonResponse;
452
692
  }
@@ -458,6 +698,59 @@ async function fetch$1({
458
698
  { [TraceAttributes.HTTP_METHOD]: method.toUpperCase(), [TraceAttributes.HTTP_ROUTE]: path }
459
699
  );
460
700
  }
701
+ function fetchSSERequest({
702
+ url: path,
703
+ method,
704
+ body,
705
+ headers: customHeaders,
706
+ pathParams,
707
+ queryParams,
708
+ fetch: fetch2,
709
+ apiKey,
710
+ endpoint,
711
+ apiUrl,
712
+ workspacesApiUrl,
713
+ onMessage,
714
+ onError,
715
+ onClose,
716
+ signal,
717
+ clientID,
718
+ sessionID,
719
+ clientName,
720
+ xataAgentExtra
721
+ }) {
722
+ const baseUrl = buildBaseUrl({ endpoint, path, workspacesApiUrl, pathParams, apiUrl });
723
+ const fullUrl = resolveUrl(baseUrl, queryParams, pathParams);
724
+ const url = fullUrl.includes("localhost") ? fullUrl.replace(/^[^.]+\./, "http://") : fullUrl;
725
+ void fetchEventSource(url, {
726
+ method,
727
+ body: JSON.stringify(body),
728
+ fetch: fetch2,
729
+ signal,
730
+ headers: {
731
+ "X-Xata-Client-ID": clientID ?? defaultClientID,
732
+ "X-Xata-Session-ID": sessionID ?? generateUUID(),
733
+ "X-Xata-Agent": compact([
734
+ ["client", "TS_SDK"],
735
+ ["version", VERSION],
736
+ isDefined(clientName) ? ["service", clientName] : void 0,
737
+ ...Object.entries(xataAgentExtra ?? {})
738
+ ]).map(([key, value]) => `${key}=${value}`).join("; "),
739
+ ...customHeaders,
740
+ Authorization: `Bearer ${apiKey}`,
741
+ "Content-Type": "application/json"
742
+ },
743
+ onmessage(ev) {
744
+ onMessage?.(JSON.parse(ev.data));
745
+ },
746
+ onerror(ev) {
747
+ onError?.(JSON.parse(ev.data));
748
+ },
749
+ onclose() {
750
+ onClose?.();
751
+ }
752
+ });
753
+ }
461
754
  function parseUrl(url) {
462
755
  try {
463
756
  const { host, protocol } = new URL(url);
@@ -488,6 +781,12 @@ const deleteBranch = (variables, signal) => dataPlaneFetch({
488
781
  ...variables,
489
782
  signal
490
783
  });
784
+ const copyBranch = (variables, signal) => dataPlaneFetch({
785
+ url: "/db/{dbBranchName}/copy",
786
+ method: "post",
787
+ ...variables,
788
+ signal
789
+ });
491
790
  const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
492
791
  url: "/db/{dbBranchName}/metadata",
493
792
  method: "put",
@@ -537,6 +836,7 @@ const compareBranchSchemas = (variables, signal) => dataPlaneFetch({ url: "/db/{
537
836
  const updateBranchSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/update", method: "post", ...variables, signal });
538
837
  const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/preview", method: "post", ...variables, signal });
539
838
  const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/apply", method: "post", ...variables, signal });
839
+ const pushBranchMigrations = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/push", method: "post", ...variables, signal });
540
840
  const createTable = (variables, signal) => dataPlaneFetch({
541
841
  url: "/db/{dbBranchName}/tables/{tableName}",
542
842
  method: "put",
@@ -581,6 +881,42 @@ const deleteColumn = (variables, signal) => dataPlaneFetch({
581
881
  });
582
882
  const branchTransaction = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/transaction", method: "post", ...variables, signal });
583
883
  const insertRecord = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data", method: "post", ...variables, signal });
884
+ const getFileItem = (variables, signal) => dataPlaneFetch({
885
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
886
+ method: "get",
887
+ ...variables,
888
+ signal
889
+ });
890
+ const putFileItem = (variables, signal) => dataPlaneFetch({
891
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
892
+ method: "put",
893
+ ...variables,
894
+ signal
895
+ });
896
+ const deleteFileItem = (variables, signal) => dataPlaneFetch({
897
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
898
+ method: "delete",
899
+ ...variables,
900
+ signal
901
+ });
902
+ const getFile = (variables, signal) => dataPlaneFetch({
903
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file",
904
+ method: "get",
905
+ ...variables,
906
+ signal
907
+ });
908
+ const putFile = (variables, signal) => dataPlaneFetch({
909
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file",
910
+ method: "put",
911
+ ...variables,
912
+ signal
913
+ });
914
+ const deleteFile = (variables, signal) => dataPlaneFetch({
915
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file",
916
+ method: "delete",
917
+ ...variables,
918
+ signal
919
+ });
584
920
  const getRecord = (variables, signal) => dataPlaneFetch({
585
921
  url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
586
922
  method: "get",
@@ -610,6 +946,12 @@ const searchTable = (variables, signal) => dataPlaneFetch({
610
946
  ...variables,
611
947
  signal
612
948
  });
949
+ const sqlQuery = (variables, signal) => dataPlaneFetch({
950
+ url: "/db/{dbBranchName}/sql",
951
+ method: "post",
952
+ ...variables,
953
+ signal
954
+ });
613
955
  const vectorSearchTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch", method: "post", ...variables, signal });
614
956
  const askTable = (variables, signal) => dataPlaneFetch({
615
957
  url: "/db/{dbBranchName}/tables/{tableName}/ask",
@@ -617,14 +959,22 @@ const askTable = (variables, signal) => dataPlaneFetch({
617
959
  ...variables,
618
960
  signal
619
961
  });
962
+ const askTableSession = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}", method: "post", ...variables, signal });
620
963
  const summarizeTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/summarize", method: "post", ...variables, signal });
621
964
  const aggregateTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/aggregate", method: "post", ...variables, signal });
965
+ const fileAccess = (variables, signal) => dataPlaneFetch({
966
+ url: "/file/{fileId}",
967
+ method: "get",
968
+ ...variables,
969
+ signal
970
+ });
622
971
  const operationsByTag$2 = {
623
972
  branch: {
624
973
  getBranchList,
625
974
  getBranchDetails,
626
975
  createBranch,
627
976
  deleteBranch,
977
+ copyBranch,
628
978
  updateBranchMetadata,
629
979
  getBranchMetadata,
630
980
  getBranchStats,
@@ -642,7 +992,8 @@ const operationsByTag$2 = {
642
992
  compareBranchSchemas,
643
993
  updateBranchSchema,
644
994
  previewBranchSchemaEdit,
645
- applyBranchSchemaEdit
995
+ applyBranchSchemaEdit,
996
+ pushBranchMigrations
646
997
  },
647
998
  migrationRequests: {
648
999
  queryMigrationRequests,
@@ -676,12 +1027,15 @@ const operationsByTag$2 = {
676
1027
  deleteRecord,
677
1028
  bulkInsertTableRecords
678
1029
  },
1030
+ files: { getFileItem, putFileItem, deleteFileItem, getFile, putFile, deleteFile, fileAccess },
679
1031
  searchAndFilter: {
680
1032
  queryTable,
681
1033
  searchBranch,
682
1034
  searchTable,
1035
+ sqlQuery,
683
1036
  vectorSearchTable,
684
1037
  askTable,
1038
+ askTableSession,
685
1039
  summarizeTable,
686
1040
  aggregateTable
687
1041
  }
@@ -689,6 +1043,13 @@ const operationsByTag$2 = {
689
1043
 
690
1044
  const controlPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "controlPlane" });
691
1045
 
1046
+ const grantAuthorizationCode = (variables, signal) => controlPlaneFetch({ url: "/oauth/authorize", method: "post", ...variables, signal });
1047
+ const generateAccessToken = (variables, signal) => controlPlaneFetch({
1048
+ url: "/oauth/token",
1049
+ method: "post",
1050
+ ...variables,
1051
+ signal
1052
+ });
692
1053
  const getUser = (variables, signal) => controlPlaneFetch({
693
1054
  url: "/user",
694
1055
  method: "get",
@@ -783,6 +1144,7 @@ const deleteDatabase = (variables, signal) => controlPlaneFetch({
783
1144
  });
784
1145
  const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}", method: "get", ...variables, signal });
785
1146
  const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}", method: "patch", ...variables, signal });
1147
+ const renameDatabase = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/rename", method: "post", ...variables, signal });
786
1148
  const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "get", ...variables, signal });
787
1149
  const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "put", ...variables, signal });
788
1150
  const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "delete", ...variables, signal });
@@ -793,6 +1155,7 @@ const listRegions = (variables, signal) => controlPlaneFetch({
793
1155
  signal
794
1156
  });
795
1157
  const operationsByTag$1 = {
1158
+ authOther: { grantAuthorizationCode, generateAccessToken },
796
1159
  users: { getUser, updateUser, deleteUser },
797
1160
  authentication: { getUserAPIKeys, createUserAPIKey, deleteUserAPIKey },
798
1161
  workspaces: {
@@ -818,6 +1181,7 @@ const operationsByTag$1 = {
818
1181
  deleteDatabase,
819
1182
  getDatabaseMetadata,
820
1183
  updateDatabaseMetadata,
1184
+ renameDatabase,
821
1185
  getDatabaseGithubSettings,
822
1186
  updateDatabaseGithubSettings,
823
1187
  deleteDatabaseGithubSettings,
@@ -843,6 +1207,10 @@ const providers = {
843
1207
  staging: {
844
1208
  main: "https://api.staging-xata.dev",
845
1209
  workspaces: "https://{workspaceId}.{region}.staging-xata.dev"
1210
+ },
1211
+ dev: {
1212
+ main: "https://api.dev-xata.dev",
1213
+ workspaces: "https://{workspaceId}.{region}.dev-xata.dev"
846
1214
  }
847
1215
  };
848
1216
  function isHostProviderAlias(alias) {
@@ -860,6 +1228,11 @@ function parseProviderString(provider = "production") {
860
1228
  return null;
861
1229
  return { main, workspaces };
862
1230
  }
1231
+ function buildProviderString(provider) {
1232
+ if (isHostProviderAlias(provider))
1233
+ return provider;
1234
+ return `${provider.main},${provider.workspaces}`;
1235
+ }
863
1236
  function parseWorkspacesUrlParts(url) {
864
1237
  if (!isString(url))
865
1238
  return null;
@@ -964,6 +1337,11 @@ class XataApiClient {
964
1337
  __privateGet$7(this, _namespaces).records = new RecordsApi(__privateGet$7(this, _extraProps));
965
1338
  return __privateGet$7(this, _namespaces).records;
966
1339
  }
1340
+ get files() {
1341
+ if (!__privateGet$7(this, _namespaces).files)
1342
+ __privateGet$7(this, _namespaces).files = new FilesApi(__privateGet$7(this, _extraProps));
1343
+ return __privateGet$7(this, _namespaces).files;
1344
+ }
967
1345
  get searchAndFilter() {
968
1346
  if (!__privateGet$7(this, _namespaces).searchAndFilter)
969
1347
  __privateGet$7(this, _namespaces).searchAndFilter = new SearchAndFilterApi(__privateGet$7(this, _extraProps));
@@ -1172,6 +1550,20 @@ class BranchApi {
1172
1550
  ...this.extraProps
1173
1551
  });
1174
1552
  }
1553
+ copyBranch({
1554
+ workspace,
1555
+ region,
1556
+ database,
1557
+ branch,
1558
+ destinationBranch,
1559
+ limit
1560
+ }) {
1561
+ return operationsByTag.branch.copyBranch({
1562
+ pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
1563
+ body: { destinationBranch, limit },
1564
+ ...this.extraProps
1565
+ });
1566
+ }
1175
1567
  updateBranchMetadata({
1176
1568
  workspace,
1177
1569
  region,
@@ -1527,6 +1919,164 @@ class RecordsApi {
1527
1919
  });
1528
1920
  }
1529
1921
  }
1922
+ class FilesApi {
1923
+ constructor(extraProps) {
1924
+ this.extraProps = extraProps;
1925
+ }
1926
+ getFileItem({
1927
+ workspace,
1928
+ region,
1929
+ database,
1930
+ branch,
1931
+ table,
1932
+ record,
1933
+ column,
1934
+ fileId
1935
+ }) {
1936
+ return operationsByTag.files.getFileItem({
1937
+ pathParams: {
1938
+ workspace,
1939
+ region,
1940
+ dbBranchName: `${database}:${branch}`,
1941
+ tableName: table,
1942
+ recordId: record,
1943
+ columnName: column,
1944
+ fileId
1945
+ },
1946
+ ...this.extraProps
1947
+ });
1948
+ }
1949
+ putFileItem({
1950
+ workspace,
1951
+ region,
1952
+ database,
1953
+ branch,
1954
+ table,
1955
+ record,
1956
+ column,
1957
+ fileId,
1958
+ file
1959
+ }) {
1960
+ return operationsByTag.files.putFileItem({
1961
+ pathParams: {
1962
+ workspace,
1963
+ region,
1964
+ dbBranchName: `${database}:${branch}`,
1965
+ tableName: table,
1966
+ recordId: record,
1967
+ columnName: column,
1968
+ fileId
1969
+ },
1970
+ // @ts-ignore
1971
+ body: file,
1972
+ ...this.extraProps
1973
+ });
1974
+ }
1975
+ deleteFileItem({
1976
+ workspace,
1977
+ region,
1978
+ database,
1979
+ branch,
1980
+ table,
1981
+ record,
1982
+ column,
1983
+ fileId
1984
+ }) {
1985
+ return operationsByTag.files.deleteFileItem({
1986
+ pathParams: {
1987
+ workspace,
1988
+ region,
1989
+ dbBranchName: `${database}:${branch}`,
1990
+ tableName: table,
1991
+ recordId: record,
1992
+ columnName: column,
1993
+ fileId
1994
+ },
1995
+ ...this.extraProps
1996
+ });
1997
+ }
1998
+ getFile({
1999
+ workspace,
2000
+ region,
2001
+ database,
2002
+ branch,
2003
+ table,
2004
+ record,
2005
+ column
2006
+ }) {
2007
+ return operationsByTag.files.getFile({
2008
+ pathParams: {
2009
+ workspace,
2010
+ region,
2011
+ dbBranchName: `${database}:${branch}`,
2012
+ tableName: table,
2013
+ recordId: record,
2014
+ columnName: column
2015
+ },
2016
+ ...this.extraProps
2017
+ });
2018
+ }
2019
+ putFile({
2020
+ workspace,
2021
+ region,
2022
+ database,
2023
+ branch,
2024
+ table,
2025
+ record,
2026
+ column,
2027
+ file
2028
+ }) {
2029
+ return operationsByTag.files.putFile({
2030
+ pathParams: {
2031
+ workspace,
2032
+ region,
2033
+ dbBranchName: `${database}:${branch}`,
2034
+ tableName: table,
2035
+ recordId: record,
2036
+ columnName: column
2037
+ },
2038
+ body: file,
2039
+ ...this.extraProps
2040
+ });
2041
+ }
2042
+ deleteFile({
2043
+ workspace,
2044
+ region,
2045
+ database,
2046
+ branch,
2047
+ table,
2048
+ record,
2049
+ column
2050
+ }) {
2051
+ return operationsByTag.files.deleteFile({
2052
+ pathParams: {
2053
+ workspace,
2054
+ region,
2055
+ dbBranchName: `${database}:${branch}`,
2056
+ tableName: table,
2057
+ recordId: record,
2058
+ columnName: column
2059
+ },
2060
+ ...this.extraProps
2061
+ });
2062
+ }
2063
+ fileAccess({
2064
+ workspace,
2065
+ region,
2066
+ fileId,
2067
+ verify
2068
+ }) {
2069
+ return operationsByTag.files.fileAccess({
2070
+ pathParams: {
2071
+ workspace,
2072
+ region,
2073
+ fileId
2074
+ },
2075
+ queryParams: { verify },
2076
+ ...this.extraProps
2077
+ });
2078
+ }
2079
+ }
1530
2080
  class SearchAndFilterApi {
1531
2081
  constructor(extraProps) {
1532
2082
  this.extraProps = extraProps;
@@ -1610,17 +2160,26 @@ class SearchAndFilterApi {
1610
2160
  database,
1611
2161
  branch,
1612
2162
  table,
1613
- question,
1614
- fuzziness,
1615
- target,
1616
- prefix,
1617
- filter,
1618
- boosters,
1619
- rules
2163
+ options
1620
2164
  }) {
1621
2165
  return operationsByTag.searchAndFilter.askTable({
1622
2166
  pathParams: { workspace, region, dbBranchName: `${database}:${branch}`, tableName: table },
1623
- body: { question, fuzziness, target, prefix, filter, boosters, rules },
2167
+ body: { ...options },
2168
+ ...this.extraProps
2169
+ });
2170
+ }
2171
+ askTableSession({
2172
+ workspace,
2173
+ region,
2174
+ database,
2175
+ branch,
2176
+ table,
2177
+ sessionId,
2178
+ message
2179
+ }) {
2180
+ return operationsByTag.searchAndFilter.askTableSession({
2181
+ pathParams: { workspace, region, dbBranchName: `${database}:${branch}`, tableName: table, sessionId },
2182
+ body: { message },
1624
2183
  ...this.extraProps
1625
2184
  });
1626
2185
  }
@@ -1888,6 +2447,19 @@ class MigrationsApi {
1888
2447
  ...this.extraProps
1889
2448
  });
1890
2449
  }
2450
+ pushBranchMigrations({
2451
+ workspace,
2452
+ region,
2453
+ database,
2454
+ branch,
2455
+ migrations
2456
+ }) {
2457
+ return operationsByTag.migrations.pushBranchMigrations({
2458
+ pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
2459
+ body: { migrations },
2460
+ ...this.extraProps
2461
+ });
2462
+ }
1891
2463
  }
1892
2464
  class DatabaseApi {
1893
2465
  constructor(extraProps) {
@@ -1902,11 +2474,13 @@ class DatabaseApi {
1902
2474
  createDatabase({
1903
2475
  workspace,
1904
2476
  database,
1905
- data
2477
+ data,
2478
+ headers
1906
2479
  }) {
1907
2480
  return operationsByTag.databases.createDatabase({
1908
2481
  pathParams: { workspaceId: workspace, dbName: database },
1909
2482
  body: data,
2483
+ headers,
1910
2484
  ...this.extraProps
1911
2485
  });
1912
2486
  }
@@ -1939,6 +2513,17 @@ class DatabaseApi {
1939
2513
  ...this.extraProps
1940
2514
  });
1941
2515
  }
2516
+ renameDatabase({
2517
+ workspace,
2518
+ database,
2519
+ newName
2520
+ }) {
2521
+ return operationsByTag.databases.renameDatabase({
2522
+ pathParams: { workspaceId: workspace, dbName: database },
2523
+ body: { newName },
2524
+ ...this.extraProps
2525
+ });
2526
+ }
1942
2527
  getDatabaseGithubSettings({
1943
2528
  workspace,
1944
2529
  database
@@ -1985,13 +2570,261 @@ class XataApiPlugin {
1985
2570
  class XataPlugin {
1986
2571
  }
1987
2572
 
2573
+ class FilesPlugin extends XataPlugin {
2574
+ build(pluginOptions) {
2575
+ return {
2576
+ download: async (location) => {
2577
+ const { table, record, column, fileId = "" } = location ?? {};
2578
+ return await getFileItem({
2579
+ pathParams: {
2580
+ workspace: "{workspaceId}",
2581
+ dbBranchName: "{dbBranch}",
2582
+ region: "{region}",
2583
+ tableName: table ?? "",
2584
+ recordId: record ?? "",
2585
+ columnName: column ?? "",
2586
+ fileId
2587
+ },
2588
+ ...pluginOptions,
2589
+ rawResponse: true
2590
+ });
2591
+ },
2592
+ upload: async (location, file) => {
2593
+ const { table, record, column, fileId = "" } = location ?? {};
2594
+ return await putFileItem({
2595
+ pathParams: {
2596
+ workspace: "{workspaceId}",
2597
+ dbBranchName: "{dbBranch}",
2598
+ region: "{region}",
2599
+ tableName: table ?? "",
2600
+ recordId: record ?? "",
2601
+ columnName: column ?? "",
2602
+ fileId
2603
+ },
2604
+ body: file,
2605
+ ...pluginOptions
2606
+ });
2607
+ },
2608
+ delete: async (location) => {
2609
+ const { table, record, column, fileId = "" } = location ?? {};
2610
+ return await deleteFileItem({
2611
+ pathParams: {
2612
+ workspace: "{workspaceId}",
2613
+ dbBranchName: "{dbBranch}",
2614
+ region: "{region}",
2615
+ tableName: table ?? "",
2616
+ recordId: record ?? "",
2617
+ columnName: column ?? "",
2618
+ fileId
2619
+ },
2620
+ ...pluginOptions
2621
+ });
2622
+ }
2623
+ };
2624
+ }
2625
+ }
2626
+
2627
+ function buildTransformString(transformations) {
2628
+ return transformations.flatMap(
2629
+ (t) => Object.entries(t).map(([key, value]) => {
2630
+ if (key === "trim") {
2631
+ const { left = 0, top = 0, right = 0, bottom = 0 } = value;
2632
+ return `${key}=${[top, right, bottom, left].join(";")}`;
2633
+ }
2634
+ if (key === "gravity" && typeof value === "object") {
2635
+ const { x = 0.5, y = 0.5 } = value;
2636
+ return `${key}=${[x, y].join("x")}`;
2637
+ }
2638
+ return `${key}=${value}`;
2639
+ })
2640
+ ).join(",");
2641
+ }
2642
+ function transformImage(url, transformations) {
2643
+ if (!isDefined(url))
2644
+ return void 0;
2645
+ const transformationsString = buildTransformString(transformations);
2646
+ const { hostname, pathname, search } = new URL(url);
2647
+ return `https://${hostname}/transform/${transformationsString}${pathname}${search}`;
2648
+ }
2649
+
2650
+ var __defProp$6 = Object.defineProperty;
2651
+ var __defNormalProp$6 = (obj, key, value) => key in obj ? __defProp$6(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
2652
+ var __publicField$6 = (obj, key, value) => {
2653
+ __defNormalProp$6(obj, typeof key !== "symbol" ? key + "" : key, value);
2654
+ return value;
2655
+ };
2656
+ class XataFile {
2657
+ constructor(file) {
2658
+ /**
2659
+ * Name of this file.
2660
+ */
2661
+ __publicField$6(this, "name");
2662
+ /**
2663
+ * Media type of this file.
2664
+ */
2665
+ __publicField$6(this, "mediaType");
2666
+ /**
2667
+ * Base64 encoded content of this file.
2668
+ */
2669
+ __publicField$6(this, "base64Content");
2670
+ /**
2671
+ * Whether to enable public url for this file.
2672
+ */
2673
+ __publicField$6(this, "enablePublicUrl");
2674
+ /**
2675
+ * Timeout for the signed url.
2676
+ */
2677
+ __publicField$6(this, "signedUrlTimeout");
2678
+ /**
2679
+ * Size of this file.
2680
+ */
2681
+ __publicField$6(this, "size");
2682
+ /**
2683
+ * Version of this file.
2684
+ */
2685
+ __publicField$6(this, "version");
2686
+ /**
2687
+ * Url of this file.
2688
+ */
2689
+ __publicField$6(this, "url");
2690
+ /**
2691
+ * Signed url of this file.
2692
+ */
2693
+ __publicField$6(this, "signedUrl");
2694
+ /**
2695
+ * Attributes of this file.
2696
+ */
2697
+ __publicField$6(this, "attributes");
2698
+ this.name = file.name;
2699
+ this.mediaType = file.mediaType || "application/octet-stream";
2700
+ this.base64Content = file.base64Content;
2701
+ this.enablePublicUrl = file.enablePublicUrl;
2702
+ this.signedUrlTimeout = file.signedUrlTimeout;
2703
+ this.size = file.size;
2704
+ this.version = file.version;
2705
+ this.url = file.url;
2706
+ this.signedUrl = file.signedUrl;
2707
+ this.attributes = file.attributes;
2708
+ }
2709
+ static fromBuffer(buffer, options = {}) {
2710
+ const base64Content = buffer.toString("base64");
2711
+ return new XataFile({ ...options, base64Content });
2712
+ }
2713
+ toBuffer() {
2714
+ if (!this.base64Content) {
2715
+ throw new Error(`File content is not available, please select property "base64Content" when querying the file`);
2716
+ }
2717
+ return Buffer.from(this.base64Content, "base64");
2718
+ }
2719
+ static fromArrayBuffer(arrayBuffer, options = {}) {
2720
+ const uint8Array = new Uint8Array(arrayBuffer);
2721
+ return this.fromUint8Array(uint8Array, options);
2722
+ }
2723
+ toArrayBuffer() {
2724
+ if (!this.base64Content) {
2725
+ throw new Error(`File content is not available, please select property "base64Content" when querying the file`);
2726
+ }
2727
+ const binary = atob(this.base64Content);
2728
+ return new ArrayBuffer(binary.length);
2729
+ }
2730
+ static fromUint8Array(uint8Array, options = {}) {
2731
+ let binary = "";
2732
+ for (let i = 0; i < uint8Array.byteLength; i++) {
2733
+ binary += String.fromCharCode(uint8Array[i]);
2734
+ }
2735
+ const base64Content = btoa(binary);
2736
+ return new XataFile({ ...options, base64Content });
2737
+ }
2738
+ toUint8Array() {
2739
+ if (!this.base64Content) {
2740
+ throw new Error(`File content is not available, please select property "base64Content" when querying the file`);
2741
+ }
2742
+ const binary = atob(this.base64Content);
2743
+ const uint8Array = new Uint8Array(binary.length);
2744
+ for (let i = 0; i < binary.length; i++) {
2745
+ uint8Array[i] = binary.charCodeAt(i);
2746
+ }
2747
+ return uint8Array;
2748
+ }
2749
+ static async fromBlob(file, options = {}) {
2750
+ const name = options.name ?? file.name;
2751
+ const mediaType = file.type;
2752
+ const arrayBuffer = await file.arrayBuffer();
2753
+ return this.fromArrayBuffer(arrayBuffer, { ...options, name, mediaType });
2754
+ }
2755
+ toBlob() {
2756
+ if (!this.base64Content) {
2757
+ throw new Error(`File content is not available, please select property "base64Content" when querying the file`);
2758
+ }
2759
+ const arrayBuffer = this.toArrayBuffer();
2760
+ return new Blob([arrayBuffer], { type: this.mediaType });
2761
+ }
2762
+ static fromString(string, options = {}) {
2763
+ const base64Content = btoa(string);
2764
+ return new XataFile({ ...options, base64Content });
2765
+ }
2766
+ toString() {
2767
+ if (!this.base64Content) {
2768
+ throw new Error(`File content is not available, please select property "base64Content" when querying the file`);
2769
+ }
2770
+ return atob(this.base64Content);
2771
+ }
2772
+ static fromBase64(base64Content, options = {}) {
2773
+ return new XataFile({ ...options, base64Content });
2774
+ }
2775
+ toBase64() {
2776
+ if (!this.base64Content) {
2777
+ throw new Error(`File content is not available, please select property "base64Content" when querying the file`);
2778
+ }
2779
+ return this.base64Content;
2780
+ }
2781
+ transform(...options) {
2782
+ return {
2783
+ url: transformImage(this.url, options),
2784
+ signedUrl: transformImage(this.signedUrl, options)
2785
+ };
2786
+ }
2787
+ }
2788
+ const parseInputFileEntry = async (entry) => {
2789
+ if (!isDefined(entry))
2790
+ return null;
2791
+ const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout } = await entry;
2792
+ return compactObject({ id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout });
2793
+ };
2794
+
1988
2795
  function cleanFilter(filter) {
1989
- if (!filter)
2796
+ if (!isDefined(filter))
1990
2797
  return void 0;
1991
- const values = Object.values(filter).filter(Boolean).filter((value) => Array.isArray(value) ? value.length > 0 : true);
1992
- return values.length > 0 ? filter : void 0;
2798
+ if (!isObject(filter))
2799
+ return filter;
2800
+ const values = Object.fromEntries(
2801
+ Object.entries(filter).reduce((acc, [key, value]) => {
2802
+ if (!isDefined(value))
2803
+ return acc;
2804
+ if (Array.isArray(value)) {
2805
+ const clean = value.map((item) => cleanFilter(item)).filter((item) => isDefined(item));
2806
+ if (clean.length === 0)
2807
+ return acc;
2808
+ return [...acc, [key, clean]];
2809
+ }
2810
+ if (isObject(value)) {
2811
+ const clean = cleanFilter(value);
2812
+ if (!isDefined(clean))
2813
+ return acc;
2814
+ return [...acc, [key, clean]];
2815
+ }
2816
+ return [...acc, [key, value]];
2817
+ }, [])
2818
+ );
2819
+ return Object.keys(values).length > 0 ? values : void 0;
1993
2820
  }
1994
2821
 
2822
+ var __defProp$5 = Object.defineProperty;
2823
+ var __defNormalProp$5 = (obj, key, value) => key in obj ? __defProp$5(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
2824
+ var __publicField$5 = (obj, key, value) => {
2825
+ __defNormalProp$5(obj, typeof key !== "symbol" ? key + "" : key, value);
2826
+ return value;
2827
+ };
1995
2828
  var __accessCheck$6 = (obj, member, msg) => {
1996
2829
  if (!member.has(obj))
1997
2830
  throw TypeError("Cannot " + msg);
@@ -2014,22 +2847,58 @@ var _query, _page;
2014
2847
  class Page {
2015
2848
  constructor(query, meta, records = []) {
2016
2849
  __privateAdd$6(this, _query, void 0);
2850
+ /**
2851
+ * Page metadata, required to retrieve additional records.
2852
+ */
2853
+ __publicField$5(this, "meta");
2854
+ /**
2855
+ * The set of results for this page.
2856
+ */
2857
+ __publicField$5(this, "records");
2017
2858
  __privateSet$6(this, _query, query);
2018
2859
  this.meta = meta;
2019
2860
  this.records = new RecordArray(this, records);
2020
2861
  }
2862
+ /**
2863
+ * Retrieves the next page of results.
2864
+ * @param size Maximum number of results to be retrieved.
2865
+ * @param offset Number of results to skip when retrieving the results.
2866
+ * @returns The next page or results.
2867
+ */
2021
2868
  async nextPage(size, offset) {
2022
2869
  return __privateGet$6(this, _query).getPaginated({ pagination: { size, offset, after: this.meta.page.cursor } });
2023
2870
  }
2871
+ /**
2872
+ * Retrieves the previous page of results.
2873
+ * @param size Maximum number of results to be retrieved.
2874
+ * @param offset Number of results to skip when retrieving the results.
2875
+ * @returns The previous page or results.
2876
+ */
2024
2877
  async previousPage(size, offset) {
2025
2878
  return __privateGet$6(this, _query).getPaginated({ pagination: { size, offset, before: this.meta.page.cursor } });
2026
2879
  }
2880
+ /**
2881
+ * Retrieves the start page of results.
2882
+ * @param size Maximum number of results to be retrieved.
2883
+ * @param offset Number of results to skip when retrieving the results.
2884
+ * @returns The start page or results.
2885
+ */
2027
2886
  async startPage(size, offset) {
2028
2887
  return __privateGet$6(this, _query).getPaginated({ pagination: { size, offset, start: this.meta.page.cursor } });
2029
2888
  }
2889
+ /**
2890
+ * Retrieves the end page of results.
2891
+ * @param size Maximum number of results to be retrieved.
2892
+ * @param offset Number of results to skip when retrieving the results.
2893
+ * @returns The end page or results.
2894
+ */
2030
2895
  async endPage(size, offset) {
2031
2896
  return __privateGet$6(this, _query).getPaginated({ pagination: { size, offset, end: this.meta.page.cursor } });
2032
2897
  }
2898
+ /**
2899
+ * Shortcut method to check if there will be additional results if the next page of results is retrieved.
2900
+ * @returns Whether or not there will be additional results in the next page of results.
2901
+ */
2033
2902
  hasNextPage() {
2034
2903
  return this.meta.page.more;
2035
2904
  }
@@ -2042,7 +2911,7 @@ const PAGINATION_DEFAULT_OFFSET = 0;
2042
2911
  function isCursorPaginationOptions(options) {
2043
2912
  return isDefined(options) && (isDefined(options.start) || isDefined(options.end) || isDefined(options.after) || isDefined(options.before));
2044
2913
  }
2045
- const _RecordArray = class extends Array {
2914
+ const _RecordArray = class _RecordArray extends Array {
2046
2915
  constructor(...args) {
2047
2916
  super(..._RecordArray.parseConstructorParams(...args));
2048
2917
  __privateAdd$6(this, _page, void 0);
@@ -2070,29 +2939,58 @@ const _RecordArray = class extends Array {
2070
2939
  map(callbackfn, thisArg) {
2071
2940
  return this.toArray().map(callbackfn, thisArg);
2072
2941
  }
2942
+ /**
2943
+ * Retrieve next page of records
2944
+ *
2945
+ * @returns A new array of objects
2946
+ */
2073
2947
  async nextPage(size, offset) {
2074
2948
  const newPage = await __privateGet$6(this, _page).nextPage(size, offset);
2075
2949
  return new _RecordArray(newPage);
2076
2950
  }
2951
+ /**
2952
+ * Retrieve previous page of records
2953
+ *
2954
+ * @returns A new array of objects
2955
+ */
2077
2956
  async previousPage(size, offset) {
2078
2957
  const newPage = await __privateGet$6(this, _page).previousPage(size, offset);
2079
2958
  return new _RecordArray(newPage);
2080
2959
  }
2960
+ /**
2961
+ * Retrieve start page of records
2962
+ *
2963
+ * @returns A new array of objects
2964
+ */
2081
2965
  async startPage(size, offset) {
2082
2966
  const newPage = await __privateGet$6(this, _page).startPage(size, offset);
2083
2967
  return new _RecordArray(newPage);
2084
2968
  }
2969
+ /**
2970
+ * Retrieve end page of records
2971
+ *
2972
+ * @returns A new array of objects
2973
+ */
2085
2974
  async endPage(size, offset) {
2086
2975
  const newPage = await __privateGet$6(this, _page).endPage(size, offset);
2087
2976
  return new _RecordArray(newPage);
2088
2977
  }
2978
+ /**
2979
+ * @returns Boolean indicating if there is a next page
2980
+ */
2089
2981
  hasNextPage() {
2090
2982
  return __privateGet$6(this, _page).meta.page.more;
2091
2983
  }
2092
2984
  };
2093
- let RecordArray = _RecordArray;
2094
2985
  _page = new WeakMap();
2986
+ let RecordArray = _RecordArray;
2095
2987
 
2988
+ var __defProp$4 = Object.defineProperty;
2989
+ var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
2990
+ var __publicField$4 = (obj, key, value) => {
2991
+ __defNormalProp$4(obj, typeof key !== "symbol" ? key + "" : key, value);
2992
+ return value;
2993
+ };
2096
2994
  var __accessCheck$5 = (obj, member, msg) => {
2097
2995
  if (!member.has(obj))
2098
2996
  throw TypeError("Cannot " + msg);
@@ -2116,14 +3014,15 @@ var __privateMethod$3 = (obj, member, method) => {
2116
3014
  return method;
2117
3015
  };
2118
3016
  var _table$1, _repository, _data, _cleanFilterConstraint, cleanFilterConstraint_fn;
2119
- const _Query = class {
3017
+ const _Query = class _Query {
2120
3018
  constructor(repository, table, data, rawParent) {
2121
3019
  __privateAdd$5(this, _cleanFilterConstraint);
2122
3020
  __privateAdd$5(this, _table$1, void 0);
2123
3021
  __privateAdd$5(this, _repository, void 0);
2124
3022
  __privateAdd$5(this, _data, { filter: {} });
2125
- this.meta = { page: { cursor: "start", more: true } };
2126
- this.records = new RecordArray(this, []);
3023
+ // Implements pagination
3024
+ __publicField$4(this, "meta", { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } });
3025
+ __publicField$4(this, "records", new RecordArray(this, []));
2127
3026
  __privateSet$5(this, _table$1, table);
2128
3027
  if (repository) {
2129
3028
  __privateSet$5(this, _repository, repository);
@@ -2159,18 +3058,38 @@ const _Query = class {
2159
3058
  const key = JSON.stringify({ columns, filter, sort, pagination });
2160
3059
  return toBase64(key);
2161
3060
  }
3061
+ /**
3062
+ * Builds a new query object representing a logical OR between the given subqueries.
3063
+ * @param queries An array of subqueries.
3064
+ * @returns A new Query object.
3065
+ */
2162
3066
  any(...queries) {
2163
3067
  const $any = queries.map((query) => query.getQueryOptions().filter ?? {});
2164
3068
  return new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), { filter: { $any } }, __privateGet$5(this, _data));
2165
3069
  }
3070
+ /**
3071
+ * Builds a new query object representing a logical AND between the given subqueries.
3072
+ * @param queries An array of subqueries.
3073
+ * @returns A new Query object.
3074
+ */
2166
3075
  all(...queries) {
2167
3076
  const $all = queries.map((query) => query.getQueryOptions().filter ?? {});
2168
3077
  return new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), { filter: { $all } }, __privateGet$5(this, _data));
2169
3078
  }
3079
+ /**
3080
+ * Builds a new query object representing a logical OR negating each subquery. In pseudo-code: !q1 OR !q2
3081
+ * @param queries An array of subqueries.
3082
+ * @returns A new Query object.
3083
+ */
2170
3084
  not(...queries) {
2171
3085
  const $not = queries.map((query) => query.getQueryOptions().filter ?? {});
2172
3086
  return new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), { filter: { $not } }, __privateGet$5(this, _data));
2173
3087
  }
3088
+ /**
3089
+ * Builds a new query object representing a logical AND negating each subquery. In pseudo-code: !q1 AND !q2
3090
+ * @param queries An array of subqueries.
3091
+ * @returns A new Query object.
3092
+ */
2174
3093
  none(...queries) {
2175
3094
  const $none = queries.map((query) => query.getQueryOptions().filter ?? {});
2176
3095
  return new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), { filter: { $none } }, __privateGet$5(this, _data));
@@ -2193,6 +3112,11 @@ const _Query = class {
2193
3112
  const sort = [...originalSort, { column, direction }];
2194
3113
  return new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), { sort }, __privateGet$5(this, _data));
2195
3114
  }
3115
+ /**
3116
+ * Builds a new query specifying the set of columns to be returned in the query response.
3117
+ * @param columns Array of column names to be returned by the query.
3118
+ * @returns A new Query object.
3119
+ */
2196
3120
  select(columns) {
2197
3121
  return new _Query(
2198
3122
  __privateGet$5(this, _repository),
@@ -2205,6 +3129,12 @@ const _Query = class {
2205
3129
  const query = new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), options, __privateGet$5(this, _data));
2206
3130
  return __privateGet$5(this, _repository).query(query);
2207
3131
  }
3132
+ /**
3133
+ * Get results in an iterator
3134
+ *
3135
+ * @async
3136
+ * @returns Async interable of results
3137
+ */
2208
3138
  async *[Symbol.asyncIterator]() {
2209
3139
  for await (const [record] of this.getIterator({ batchSize: 1 })) {
2210
3140
  yield record;
@@ -2265,26 +3195,53 @@ const _Query = class {
2265
3195
  );
2266
3196
  return __privateGet$5(this, _repository).summarizeTable(query, summaries, summariesFilter);
2267
3197
  }
3198
+ /**
3199
+ * Builds a new query object adding a cache TTL in milliseconds.
3200
+ * @param ttl The cache TTL in milliseconds.
3201
+ * @returns A new Query object.
3202
+ */
2268
3203
  cache(ttl) {
2269
3204
  return new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), { cache: ttl }, __privateGet$5(this, _data));
2270
3205
  }
3206
+ /**
3207
+ * Retrieve next page of records
3208
+ *
3209
+ * @returns A new page object.
3210
+ */
2271
3211
  nextPage(size, offset) {
2272
3212
  return this.startPage(size, offset);
2273
3213
  }
3214
+ /**
3215
+ * Retrieve previous page of records
3216
+ *
3217
+ * @returns A new page object
3218
+ */
2274
3219
  previousPage(size, offset) {
2275
3220
  return this.startPage(size, offset);
2276
3221
  }
3222
+ /**
3223
+ * Retrieve start page of records
3224
+ *
3225
+ * @returns A new page object
3226
+ */
2277
3227
  startPage(size, offset) {
2278
3228
  return this.getPaginated({ pagination: { size, offset } });
2279
3229
  }
3230
+ /**
3231
+ * Retrieve last page of records
3232
+ *
3233
+ * @returns A new page object
3234
+ */
2280
3235
  endPage(size, offset) {
2281
3236
  return this.getPaginated({ pagination: { size, offset, before: "end" } });
2282
3237
  }
3238
+ /**
3239
+ * @returns Boolean indicating if there is a next page
3240
+ */
2283
3241
  hasNextPage() {
2284
3242
  return this.meta.page.more;
2285
3243
  }
2286
3244
  };
2287
- let Query = _Query;
2288
3245
  _table$1 = new WeakMap();
2289
3246
  _repository = new WeakMap();
2290
3247
  _data = new WeakMap();
@@ -2299,6 +3256,7 @@ cleanFilterConstraint_fn = function(column, value) {
2299
3256
  }
2300
3257
  return value;
2301
3258
  };
3259
+ let Query = _Query;
2302
3260
  function cleanParent(data, parent) {
2303
3261
  if (isCursorPaginationOptions(data.pagination)) {
2304
3262
  return { ...parent, sort: void 0, filter: void 0 };
@@ -2306,6 +3264,21 @@ function cleanParent(data, parent) {
2306
3264
  return parent;
2307
3265
  }
2308
3266
 
3267
+ const RecordColumnTypes = [
3268
+ "bool",
3269
+ "int",
3270
+ "float",
3271
+ "string",
3272
+ "text",
3273
+ "email",
3274
+ "multiple",
3275
+ "link",
3276
+ "object",
3277
+ "datetime",
3278
+ "vector",
3279
+ "file[]",
3280
+ "file"
3281
+ ];
2309
3282
  function isIdentifiable(x) {
2310
3283
  return isObject(x) && isString(x?.id);
2311
3284
  }
@@ -2319,7 +3292,11 @@ function isSortFilterString(value) {
2319
3292
  return isString(value);
2320
3293
  }
2321
3294
  function isSortFilterBase(filter) {
2322
- return isObject(filter) && Object.values(filter).every((value) => value === "asc" || value === "desc");
3295
+ return isObject(filter) && Object.entries(filter).every(([key, value]) => {
3296
+ if (key === "*")
3297
+ return value === "random";
3298
+ return value === "asc" || value === "desc";
3299
+ });
2323
3300
  }
2324
3301
  function isSortFilterObject(filter) {
2325
3302
  return isObject(filter) && !isSortFilterBase(filter) && filter.column !== void 0;
@@ -2360,7 +3337,7 @@ var __privateMethod$2 = (obj, member, method) => {
2360
3337
  __accessCheck$4(obj, member, "access private method");
2361
3338
  return method;
2362
3339
  };
2363
- var _table, _getFetchProps, _db, _cache, _schemaTables$2, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn, _insertRecords, insertRecords_fn, _updateRecordWithID, updateRecordWithID_fn, _updateRecords, updateRecords_fn, _upsertRecordWithID, upsertRecordWithID_fn, _deleteRecord, deleteRecord_fn, _deleteRecords, deleteRecords_fn, _setCacheQuery, setCacheQuery_fn, _getCacheQuery, getCacheQuery_fn, _getSchemaTables$1, getSchemaTables_fn$1;
3340
+ var _table, _getFetchProps, _db, _cache, _schemaTables$2, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn, _insertRecords, insertRecords_fn, _updateRecordWithID, updateRecordWithID_fn, _updateRecords, updateRecords_fn, _upsertRecordWithID, upsertRecordWithID_fn, _deleteRecord, deleteRecord_fn, _deleteRecords, deleteRecords_fn, _setCacheQuery, setCacheQuery_fn, _getCacheQuery, getCacheQuery_fn, _getSchemaTables$1, getSchemaTables_fn$1, _transformObjectToApi, transformObjectToApi_fn;
2364
3341
  const BULK_OPERATION_MAX_SIZE = 1e3;
2365
3342
  class Repository extends Query {
2366
3343
  }
@@ -2382,6 +3359,7 @@ class RestRepository extends Query {
2382
3359
  __privateAdd$4(this, _setCacheQuery);
2383
3360
  __privateAdd$4(this, _getCacheQuery);
2384
3361
  __privateAdd$4(this, _getSchemaTables$1);
3362
+ __privateAdd$4(this, _transformObjectToApi);
2385
3363
  __privateAdd$4(this, _table, void 0);
2386
3364
  __privateAdd$4(this, _getFetchProps, void 0);
2387
3365
  __privateAdd$4(this, _db, void 0);
@@ -2559,12 +3537,22 @@ class RestRepository extends Query {
2559
3537
  return result;
2560
3538
  }
2561
3539
  if (isString(a) && isObject(b)) {
3540
+ if (a === "")
3541
+ throw new Error("The id can't be empty");
2562
3542
  const columns = isStringArray(c) ? c : void 0;
2563
- return __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
3543
+ return await __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
2564
3544
  }
2565
3545
  if (isObject(a) && isString(a.id)) {
3546
+ if (a.id === "")
3547
+ throw new Error("The id can't be empty");
2566
3548
  const columns = isStringArray(c) ? c : void 0;
2567
- return __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a.id, { ...a, id: void 0 }, columns, { ifVersion });
3549
+ return await __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a.id, { ...a, id: void 0 }, columns, { ifVersion });
3550
+ }
3551
+ if (!isDefined(a) && isObject(b)) {
3552
+ return await this.create(b, c);
3553
+ }
3554
+ if (isObject(a) && !isDefined(a.id)) {
3555
+ return await this.create(a, b);
2568
3556
  }
2569
3557
  throw new Error("Invalid arguments for createOrUpdate method");
2570
3558
  });
@@ -2581,12 +3569,22 @@ class RestRepository extends Query {
2581
3569
  return result;
2582
3570
  }
2583
3571
  if (isString(a) && isObject(b)) {
3572
+ if (a === "")
3573
+ throw new Error("The id can't be empty");
2584
3574
  const columns = isStringArray(c) ? c : void 0;
2585
- return __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
3575
+ return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
2586
3576
  }
2587
3577
  if (isObject(a) && isString(a.id)) {
3578
+ if (a.id === "")
3579
+ throw new Error("The id can't be empty");
2588
3580
  const columns = isStringArray(c) ? c : void 0;
2589
- return __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.id, { ...a, id: void 0 }, columns, { createOnly: false, ifVersion });
3581
+ return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.id, { ...a, id: void 0 }, columns, { createOnly: false, ifVersion });
3582
+ }
3583
+ if (!isDefined(a) && isObject(b)) {
3584
+ return await this.create(b, c);
3585
+ }
3586
+ if (isObject(a) && !isDefined(a.id)) {
3587
+ return await this.create(a, b);
2590
3588
  }
2591
3589
  throw new Error("Invalid arguments for createOrReplace method");
2592
3590
  });
@@ -2752,6 +3750,34 @@ class RestRepository extends Query {
2752
3750
  return result;
2753
3751
  });
2754
3752
  }
3753
+ ask(question, options) {
3754
+ const params = {
3755
+ pathParams: {
3756
+ workspace: "{workspaceId}",
3757
+ dbBranchName: "{dbBranch}",
3758
+ region: "{region}",
3759
+ tableName: __privateGet$4(this, _table)
3760
+ },
3761
+ body: {
3762
+ question,
3763
+ ...options
3764
+ },
3765
+ ...__privateGet$4(this, _getFetchProps).call(this)
3766
+ };
3767
+ if (options?.onMessage) {
3768
+ fetchSSERequest({
3769
+ endpoint: "dataPlane",
3770
+ url: "/db/{dbBranchName}/tables/{tableName}/ask",
3771
+ method: "POST",
3772
+ onMessage: (message) => {
3773
+ options.onMessage?.({ answer: message.text, records: message.records });
3774
+ },
3775
+ ...params
3776
+ });
3777
+ } else {
3778
+ return askTable(params);
3779
+ }
3780
+ }
2755
3781
  }
2756
3782
  _table = new WeakMap();
2757
3783
  _getFetchProps = new WeakMap();
@@ -2761,7 +3787,7 @@ _schemaTables$2 = new WeakMap();
2761
3787
  _trace = new WeakMap();
2762
3788
  _insertRecordWithoutId = new WeakSet();
2763
3789
  insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
2764
- const record = transformObjectLinks(object);
3790
+ const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
2765
3791
  const response = await insertRecord({
2766
3792
  pathParams: {
2767
3793
  workspace: "{workspaceId}",
@@ -2778,7 +3804,9 @@ insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
2778
3804
  };
2779
3805
  _insertRecordWithId = new WeakSet();
2780
3806
  insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
2781
- const record = transformObjectLinks(object);
3807
+ if (!recordId)
3808
+ return null;
3809
+ const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
2782
3810
  const response = await insertRecordWithID({
2783
3811
  pathParams: {
2784
3812
  workspace: "{workspaceId}",
@@ -2796,21 +3824,20 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
2796
3824
  };
2797
3825
  _insertRecords = new WeakSet();
2798
3826
  insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
2799
- const chunkedOperations = chunk(
2800
- objects.map((object) => ({
2801
- insert: { table: __privateGet$4(this, _table), record: transformObjectLinks(object), createOnly, ifVersion }
2802
- })),
2803
- BULK_OPERATION_MAX_SIZE
2804
- );
3827
+ const operations = await promiseMap(objects, async (object) => {
3828
+ const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
3829
+ return { insert: { table: __privateGet$4(this, _table), record, createOnly, ifVersion } };
3830
+ });
3831
+ const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
2805
3832
  const ids = [];
2806
- for (const operations of chunkedOperations) {
3833
+ for (const operations2 of chunkedOperations) {
2807
3834
  const { results } = await branchTransaction({
2808
3835
  pathParams: {
2809
3836
  workspace: "{workspaceId}",
2810
3837
  dbBranchName: "{dbBranch}",
2811
3838
  region: "{region}"
2812
3839
  },
2813
- body: { operations },
3840
+ body: { operations: operations2 },
2814
3841
  ...__privateGet$4(this, _getFetchProps).call(this)
2815
3842
  });
2816
3843
  for (const result of results) {
@@ -2825,7 +3852,9 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
2825
3852
  };
2826
3853
  _updateRecordWithID = new WeakSet();
2827
3854
  updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
2828
- const { id: _id, ...record } = transformObjectLinks(object);
3855
+ if (!recordId)
3856
+ return null;
3857
+ const { id: _id, ...record } = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
2829
3858
  try {
2830
3859
  const response = await updateRecordWithID({
2831
3860
  pathParams: {
@@ -2850,21 +3879,20 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
2850
3879
  };
2851
3880
  _updateRecords = new WeakSet();
2852
3881
  updateRecords_fn = async function(objects, { ifVersion, upsert }) {
2853
- const chunkedOperations = chunk(
2854
- objects.map(({ id, ...object }) => ({
2855
- update: { table: __privateGet$4(this, _table), id, ifVersion, upsert, fields: transformObjectLinks(object) }
2856
- })),
2857
- BULK_OPERATION_MAX_SIZE
2858
- );
3882
+ const operations = await promiseMap(objects, async ({ id, ...object }) => {
3883
+ const fields = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
3884
+ return { update: { table: __privateGet$4(this, _table), id, ifVersion, upsert, fields } };
3885
+ });
3886
+ const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
2859
3887
  const ids = [];
2860
- for (const operations of chunkedOperations) {
3888
+ for (const operations2 of chunkedOperations) {
2861
3889
  const { results } = await branchTransaction({
2862
3890
  pathParams: {
2863
3891
  workspace: "{workspaceId}",
2864
3892
  dbBranchName: "{dbBranch}",
2865
3893
  region: "{region}"
2866
3894
  },
2867
- body: { operations },
3895
+ body: { operations: operations2 },
2868
3896
  ...__privateGet$4(this, _getFetchProps).call(this)
2869
3897
  });
2870
3898
  for (const result of results) {
@@ -2879,6 +3907,8 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
2879
3907
  };
2880
3908
  _upsertRecordWithID = new WeakSet();
2881
3909
  upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
3910
+ if (!recordId)
3911
+ return null;
2882
3912
  const response = await upsertRecordWithID({
2883
3913
  pathParams: {
2884
3914
  workspace: "{workspaceId}",
@@ -2896,6 +3926,8 @@ upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
2896
3926
  };
2897
3927
  _deleteRecord = new WeakSet();
2898
3928
  deleteRecord_fn = async function(recordId, columns = ["*"]) {
3929
+ if (!recordId)
3930
+ return null;
2899
3931
  try {
2900
3932
  const response = await deleteRecord({
2901
3933
  pathParams: {
@@ -2920,7 +3952,7 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
2920
3952
  _deleteRecords = new WeakSet();
2921
3953
  deleteRecords_fn = async function(recordIds) {
2922
3954
  const chunkedOperations = chunk(
2923
- recordIds.map((id) => ({ delete: { table: __privateGet$4(this, _table), id } })),
3955
+ compact(recordIds).map((id) => ({ delete: { table: __privateGet$4(this, _table), id } })),
2924
3956
  BULK_OPERATION_MAX_SIZE
2925
3957
  );
2926
3958
  for (const operations of chunkedOperations) {
@@ -2937,15 +3969,16 @@ deleteRecords_fn = async function(recordIds) {
2937
3969
  };
2938
3970
  _setCacheQuery = new WeakSet();
2939
3971
  setCacheQuery_fn = async function(query, meta, records) {
2940
- await __privateGet$4(this, _cache).set(`query_${__privateGet$4(this, _table)}:${query.key()}`, { date: new Date(), meta, records });
3972
+ await __privateGet$4(this, _cache)?.set(`query_${__privateGet$4(this, _table)}:${query.key()}`, { date: /* @__PURE__ */ new Date(), meta, records });
2941
3973
  };
2942
3974
  _getCacheQuery = new WeakSet();
2943
3975
  getCacheQuery_fn = async function(query) {
2944
3976
  const key = `query_${__privateGet$4(this, _table)}:${query.key()}`;
2945
- const result = await __privateGet$4(this, _cache).get(key);
3977
+ const result = await __privateGet$4(this, _cache)?.get(key);
2946
3978
  if (!result)
2947
3979
  return null;
2948
- const { cache: ttl = __privateGet$4(this, _cache).defaultQueryTTL } = query.getQueryOptions();
3980
+ const defaultTTL = __privateGet$4(this, _cache)?.defaultQueryTTL ?? -1;
3981
+ const { cache: ttl = defaultTTL } = query.getQueryOptions();
2949
3982
  if (ttl < 0)
2950
3983
  return null;
2951
3984
  const hasExpired = result.date.getTime() + ttl < Date.now();
@@ -2962,7 +3995,39 @@ getSchemaTables_fn$1 = async function() {
2962
3995
  __privateSet$4(this, _schemaTables$2, schema.tables);
2963
3996
  return schema.tables;
2964
3997
  };
2965
- const transformObjectLinks = (object) => {
3998
+ _transformObjectToApi = new WeakSet();
3999
+ transformObjectToApi_fn = async function(object) {
4000
+ const schemaTables = await __privateMethod$2(this, _getSchemaTables$1, getSchemaTables_fn$1).call(this);
4001
+ const schema = schemaTables.find((table) => table.name === __privateGet$4(this, _table));
4002
+ if (!schema)
4003
+ throw new Error(`Table ${__privateGet$4(this, _table)} not found in schema`);
4004
+ const result = {};
4005
+ for (const [key, value] of Object.entries(object)) {
4006
+ if (key === "xata")
4007
+ continue;
4008
+ const type = schema.columns.find((column) => column.name === key)?.type;
4009
+ switch (type) {
4010
+ case "link": {
4011
+ result[key] = isIdentifiable(value) ? value.id : value;
4012
+ break;
4013
+ }
4014
+ case "datetime": {
4015
+ result[key] = value instanceof Date ? value.toISOString() : value;
4016
+ break;
4017
+ }
4018
+ case `file`:
4019
+ result[key] = await parseInputFileEntry(value);
4020
+ break;
4021
+ case "file[]":
4022
+ result[key] = await promiseMap(value, (item) => parseInputFileEntry(item));
4023
+ break;
4024
+ default:
4025
+ result[key] = value;
4026
+ }
4027
+ }
4028
+ return result;
4029
+ };
4030
+ const removeLinksFromObject = (object) => {
2966
4031
  return Object.entries(object).reduce((acc, [key, value]) => {
2967
4032
  if (key === "xata")
2968
4033
  return acc;
@@ -3011,6 +4076,12 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
3011
4076
  }
3012
4077
  break;
3013
4078
  }
4079
+ case "file":
4080
+ data[column.name] = isDefined(value) ? new XataFile(value) : null;
4081
+ break;
4082
+ case "file[]":
4083
+ data[column.name] = value?.map((item) => new XataFile(item)) ?? null;
4084
+ break;
3014
4085
  default:
3015
4086
  data[column.name] = value ?? null;
3016
4087
  if (column.notNull === true && value === null) {
@@ -3020,6 +4091,8 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
3020
4091
  }
3021
4092
  }
3022
4093
  const record = { ...data };
4094
+ const serializable = { xata, ...removeLinksFromObject(data) };
4095
+ const metadata = xata !== void 0 ? { ...xata, createdAt: new Date(xata.createdAt), updatedAt: new Date(xata.updatedAt) } : void 0;
3023
4096
  record.read = function(columns2) {
3024
4097
  return db[table].read(record["id"], columns2);
3025
4098
  };
@@ -3036,14 +4109,15 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
3036
4109
  record.delete = function() {
3037
4110
  return db[table].delete(record["id"]);
3038
4111
  };
4112
+ record.xata = Object.freeze(metadata);
3039
4113
  record.getMetadata = function() {
3040
- return xata;
4114
+ return record.xata;
3041
4115
  };
3042
4116
  record.toSerializable = function() {
3043
- return JSON.parse(JSON.stringify(transformObjectLinks(data)));
4117
+ return JSON.parse(JSON.stringify(serializable));
3044
4118
  };
3045
4119
  record.toString = function() {
3046
- return JSON.stringify(transformObjectLinks(data));
4120
+ return JSON.stringify(serializable);
3047
4121
  };
3048
4122
  for (const prop of ["read", "update", "replace", "delete", "getMetadata", "toSerializable", "toString"]) {
3049
4123
  Object.defineProperty(record, prop, { enumerable: false });
@@ -3061,11 +4135,7 @@ function extractId(value) {
3061
4135
  function isValidColumn(columns, column) {
3062
4136
  if (columns.includes("*"))
3063
4137
  return true;
3064
- if (column.type === "link") {
3065
- const linkColumns = columns.filter((item) => item.startsWith(column.name));
3066
- return linkColumns.length > 0;
3067
- }
3068
- return columns.includes(column.name);
4138
+ return columns.filter((item) => item.startsWith(column.name)).length > 0;
3069
4139
  }
3070
4140
  function parseIfVersion(...args) {
3071
4141
  for (const arg of args) {
@@ -3076,6 +4146,12 @@ function parseIfVersion(...args) {
3076
4146
  return void 0;
3077
4147
  }
3078
4148
 
4149
+ var __defProp$3 = Object.defineProperty;
4150
+ var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
4151
+ var __publicField$3 = (obj, key, value) => {
4152
+ __defNormalProp$3(obj, typeof key !== "symbol" ? key + "" : key, value);
4153
+ return value;
4154
+ };
3079
4155
  var __accessCheck$3 = (obj, member, msg) => {
3080
4156
  if (!member.has(obj))
3081
4157
  throw TypeError("Cannot " + msg);
@@ -3098,6 +4174,8 @@ var _map;
3098
4174
  class SimpleCache {
3099
4175
  constructor(options = {}) {
3100
4176
  __privateAdd$3(this, _map, void 0);
4177
+ __publicField$3(this, "capacity");
4178
+ __publicField$3(this, "defaultQueryTTL");
3101
4179
  __privateSet$3(this, _map, /* @__PURE__ */ new Map());
3102
4180
  this.capacity = options.max ?? 500;
3103
4181
  this.defaultQueryTTL = options.defaultQueryTTL ?? 60 * 1e3;
@@ -3262,6 +4340,7 @@ search_fn = async function(query, options, pluginOptions) {
3262
4340
  const { tables, fuzziness, highlight, prefix, page } = options ?? {};
3263
4341
  const { records } = await searchBranch({
3264
4342
  pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
4343
+ // @ts-ignore https://github.com/xataio/client-ts/issues/313
3265
4344
  body: { tables, query, fuzziness, prefix, highlight, page },
3266
4345
  ...pluginOptions
3267
4346
  });
@@ -3294,6 +4373,12 @@ class TransactionPlugin extends XataPlugin {
3294
4373
  }
3295
4374
  }
3296
4375
 
4376
+ var __defProp$2 = Object.defineProperty;
4377
+ var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
4378
+ var __publicField$2 = (obj, key, value) => {
4379
+ __defNormalProp$2(obj, typeof key !== "symbol" ? key + "" : key, value);
4380
+ return value;
4381
+ };
3297
4382
  var __accessCheck = (obj, member, msg) => {
3298
4383
  if (!member.has(obj))
3299
4384
  throw TypeError("Cannot " + msg);
@@ -3323,29 +4408,29 @@ const buildClient = (plugins) => {
3323
4408
  __privateAdd(this, _parseOptions);
3324
4409
  __privateAdd(this, _getFetchProps);
3325
4410
  __privateAdd(this, _options, void 0);
4411
+ __publicField$2(this, "db");
4412
+ __publicField$2(this, "search");
4413
+ __publicField$2(this, "transactions");
4414
+ __publicField$2(this, "files");
3326
4415
  const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
3327
4416
  __privateSet(this, _options, safeOptions);
3328
4417
  const pluginOptions = {
3329
4418
  ...__privateMethod(this, _getFetchProps, getFetchProps_fn).call(this, safeOptions),
3330
- cache: safeOptions.cache
4419
+ cache: safeOptions.cache,
4420
+ host: safeOptions.host
3331
4421
  };
3332
4422
  const db = new SchemaPlugin(schemaTables).build(pluginOptions);
3333
4423
  const search = new SearchPlugin(db, schemaTables).build(pluginOptions);
3334
4424
  const transactions = new TransactionPlugin().build(pluginOptions);
4425
+ const files = new FilesPlugin().build(pluginOptions);
3335
4426
  this.db = db;
3336
4427
  this.search = search;
3337
4428
  this.transactions = transactions;
4429
+ this.files = files;
3338
4430
  for (const [key, namespace] of Object.entries(plugins ?? {})) {
3339
4431
  if (namespace === void 0)
3340
4432
  continue;
3341
- const result = namespace.build(pluginOptions);
3342
- if (result instanceof Promise) {
3343
- void result.then((namespace2) => {
3344
- this[key] = namespace2;
3345
- });
3346
- } else {
3347
- this[key] = result;
3348
- }
4433
+ this[key] = namespace.build(pluginOptions);
3349
4434
  }
3350
4435
  }
3351
4436
  async getConfig() {
@@ -3363,7 +4448,6 @@ const buildClient = (plugins) => {
3363
4448
  }
3364
4449
  const fetch = getFetchImplementation(options?.fetch);
3365
4450
  const databaseURL = options?.databaseURL || getDatabaseURL();
3366
- const branch = options?.branch || getBranch() || "main";
3367
4451
  const apiKey = options?.apiKey || getAPIKey();
3368
4452
  const cache = options?.cache ?? new SimpleCache({ defaultQueryTTL: 0 });
3369
4453
  const trace = options?.trace ?? defaultTrace;
@@ -3376,6 +4460,26 @@ const buildClient = (plugins) => {
3376
4460
  if (!databaseURL) {
3377
4461
  throw new Error("Option databaseURL is required");
3378
4462
  }
4463
+ const envBranch = getBranch();
4464
+ const previewBranch = getPreviewBranch();
4465
+ const branch = options?.branch || previewBranch || envBranch || "main";
4466
+ if (!!previewBranch && branch !== previewBranch) {
4467
+ console.warn(
4468
+ `Ignoring preview branch ${previewBranch} because branch option was passed to the client constructor with value ${branch}`
4469
+ );
4470
+ } else if (!!envBranch && branch !== envBranch) {
4471
+ console.warn(
4472
+ `Ignoring branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
4473
+ );
4474
+ } else if (!!previewBranch && !!envBranch && previewBranch !== envBranch) {
4475
+ console.warn(
4476
+ `Ignoring preview branch ${previewBranch} and branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
4477
+ );
4478
+ } else if (!previewBranch && !envBranch && options?.branch === void 0) {
4479
+ console.warn(
4480
+ `No branch was passed to the client constructor. Using default branch ${branch}. You can set the branch with the environment variable XATA_BRANCH or by passing the branch option to the client constructor.`
4481
+ );
4482
+ }
3379
4483
  return {
3380
4484
  fetch,
3381
4485
  databaseURL,
@@ -3403,6 +4507,7 @@ const buildClient = (plugins) => {
3403
4507
  fetch,
3404
4508
  apiKey,
3405
4509
  apiUrl: "",
4510
+ // Instead of using workspace and dbBranch, we inject a probably CNAME'd URL
3406
4511
  workspacesApiUrl: (path, params) => {
3407
4512
  const hasBranch = params.dbBranchName ?? params.branch;
3408
4513
  const newPath = path.replace(/^\/db\/[^/]+/, hasBranch !== void 0 ? `:${branch}` : "");
@@ -3418,11 +4523,17 @@ const buildClient = (plugins) => {
3418
4523
  class BaseClient extends buildClient() {
3419
4524
  }
3420
4525
 
4526
+ var __defProp$1 = Object.defineProperty;
4527
+ var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
4528
+ var __publicField$1 = (obj, key, value) => {
4529
+ __defNormalProp$1(obj, typeof key !== "symbol" ? key + "" : key, value);
4530
+ return value;
4531
+ };
3421
4532
  const META = "__";
3422
4533
  const VALUE = "___";
3423
4534
  class Serializer {
3424
4535
  constructor() {
3425
- this.classes = {};
4536
+ __publicField$1(this, "classes", {});
3426
4537
  }
3427
4538
  add(clazz) {
3428
4539
  this.classes[clazz.name] = clazz;
@@ -3500,9 +4611,16 @@ function buildWorkerRunner(config) {
3500
4611
  };
3501
4612
  }
3502
4613
 
4614
+ var __defProp = Object.defineProperty;
4615
+ var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
4616
+ var __publicField = (obj, key, value) => {
4617
+ __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
4618
+ return value;
4619
+ };
3503
4620
  class XataError extends Error {
3504
4621
  constructor(message, status) {
3505
4622
  super(message);
4623
+ __publicField(this, "status");
3506
4624
  this.status = status;
3507
4625
  }
3508
4626
  }
@@ -3517,6 +4635,7 @@ exports.PAGINATION_MAX_SIZE = PAGINATION_MAX_SIZE;
3517
4635
  exports.Page = Page;
3518
4636
  exports.Query = Query;
3519
4637
  exports.RecordArray = RecordArray;
4638
+ exports.RecordColumnTypes = RecordColumnTypes;
3520
4639
  exports.Repository = Repository;
3521
4640
  exports.RestRepository = RestRepository;
3522
4641
  exports.SchemaPlugin = SchemaPlugin;
@@ -3526,6 +4645,7 @@ exports.SimpleCache = SimpleCache;
3526
4645
  exports.XataApiClient = XataApiClient;
3527
4646
  exports.XataApiPlugin = XataApiPlugin;
3528
4647
  exports.XataError = XataError;
4648
+ exports.XataFile = XataFile;
3529
4649
  exports.XataPlugin = XataPlugin;
3530
4650
  exports.acceptWorkspaceMemberInvite = acceptWorkspaceMemberInvite;
3531
4651
  exports.addGitBranchesEntry = addGitBranchesEntry;
@@ -3533,8 +4653,11 @@ exports.addTableColumn = addTableColumn;
3533
4653
  exports.aggregateTable = aggregateTable;
3534
4654
  exports.applyBranchSchemaEdit = applyBranchSchemaEdit;
3535
4655
  exports.askTable = askTable;
4656
+ exports.askTableSession = askTableSession;
3536
4657
  exports.branchTransaction = branchTransaction;
3537
4658
  exports.buildClient = buildClient;
4659
+ exports.buildPreviewBranchName = buildPreviewBranchName;
4660
+ exports.buildProviderString = buildProviderString;
3538
4661
  exports.buildWorkerRunner = buildWorkerRunner;
3539
4662
  exports.bulkInsertTableRecords = bulkInsertTableRecords;
3540
4663
  exports.cancelWorkspaceMemberInvite = cancelWorkspaceMemberInvite;
@@ -3542,6 +4665,7 @@ exports.compareBranchSchemas = compareBranchSchemas;
3542
4665
  exports.compareBranchWithUserSchema = compareBranchWithUserSchema;
3543
4666
  exports.compareMigrationRequest = compareMigrationRequest;
3544
4667
  exports.contains = contains;
4668
+ exports.copyBranch = copyBranch;
3545
4669
  exports.createBranch = createBranch;
3546
4670
  exports.createDatabase = createDatabase;
3547
4671
  exports.createMigrationRequest = createMigrationRequest;
@@ -3552,6 +4676,8 @@ exports.deleteBranch = deleteBranch;
3552
4676
  exports.deleteColumn = deleteColumn;
3553
4677
  exports.deleteDatabase = deleteDatabase;
3554
4678
  exports.deleteDatabaseGithubSettings = deleteDatabaseGithubSettings;
4679
+ exports.deleteFile = deleteFile;
4680
+ exports.deleteFileItem = deleteFileItem;
3555
4681
  exports.deleteRecord = deleteRecord;
3556
4682
  exports.deleteTable = deleteTable;
3557
4683
  exports.deleteUser = deleteUser;
@@ -3562,7 +4688,9 @@ exports.endsWith = endsWith;
3562
4688
  exports.equals = equals;
3563
4689
  exports.executeBranchMigrationPlan = executeBranchMigrationPlan;
3564
4690
  exports.exists = exists;
4691
+ exports.fileAccess = fileAccess;
3565
4692
  exports.ge = ge;
4693
+ exports.generateAccessToken = generateAccessToken;
3566
4694
  exports.getAPIKey = getAPIKey;
3567
4695
  exports.getBranch = getBranch;
3568
4696
  exports.getBranchDetails = getBranchDetails;
@@ -3577,10 +4705,13 @@ exports.getDatabaseGithubSettings = getDatabaseGithubSettings;
3577
4705
  exports.getDatabaseList = getDatabaseList;
3578
4706
  exports.getDatabaseMetadata = getDatabaseMetadata;
3579
4707
  exports.getDatabaseURL = getDatabaseURL;
4708
+ exports.getFile = getFile;
4709
+ exports.getFileItem = getFileItem;
3580
4710
  exports.getGitBranchesMapping = getGitBranchesMapping;
3581
4711
  exports.getHostUrl = getHostUrl;
3582
4712
  exports.getMigrationRequest = getMigrationRequest;
3583
4713
  exports.getMigrationRequestIsMerged = getMigrationRequestIsMerged;
4714
+ exports.getPreviewBranch = getPreviewBranch;
3584
4715
  exports.getRecord = getRecord;
3585
4716
  exports.getTableColumns = getTableColumns;
3586
4717
  exports.getTableSchema = getTableSchema;
@@ -3589,6 +4720,7 @@ exports.getUserAPIKeys = getUserAPIKeys;
3589
4720
  exports.getWorkspace = getWorkspace;
3590
4721
  exports.getWorkspaceMembersList = getWorkspaceMembersList;
3591
4722
  exports.getWorkspacesList = getWorkspacesList;
4723
+ exports.grantAuthorizationCode = grantAuthorizationCode;
3592
4724
  exports.greaterEquals = greaterEquals;
3593
4725
  exports.greaterThan = greaterThan;
3594
4726
  exports.greaterThanEquals = greaterThanEquals;
@@ -3623,16 +4755,21 @@ exports.parseProviderString = parseProviderString;
3623
4755
  exports.parseWorkspacesUrlParts = parseWorkspacesUrlParts;
3624
4756
  exports.pattern = pattern;
3625
4757
  exports.previewBranchSchemaEdit = previewBranchSchemaEdit;
4758
+ exports.pushBranchMigrations = pushBranchMigrations;
4759
+ exports.putFile = putFile;
4760
+ exports.putFileItem = putFileItem;
3626
4761
  exports.queryMigrationRequests = queryMigrationRequests;
3627
4762
  exports.queryTable = queryTable;
3628
4763
  exports.removeGitBranchesEntry = removeGitBranchesEntry;
3629
4764
  exports.removeWorkspaceMember = removeWorkspaceMember;
4765
+ exports.renameDatabase = renameDatabase;
3630
4766
  exports.resendWorkspaceMemberInvite = resendWorkspaceMemberInvite;
3631
4767
  exports.resolveBranch = resolveBranch;
3632
4768
  exports.searchBranch = searchBranch;
3633
4769
  exports.searchTable = searchTable;
3634
4770
  exports.serialize = serialize;
3635
4771
  exports.setTableSchema = setTableSchema;
4772
+ exports.sqlQuery = sqlQuery;
3636
4773
  exports.startsWith = startsWith;
3637
4774
  exports.summarizeTable = summarizeTable;
3638
4775
  exports.updateBranchMetadata = updateBranchMetadata;