@xata.io/client 0.0.0-alpha.vf9f8d99 → 0.0.0-alpha.vfa36696

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -27,8 +27,11 @@ function notEmpty(value) {
27
27
  function compact(arr) {
28
28
  return arr.filter(notEmpty);
29
29
  }
30
+ function compactObject(obj) {
31
+ return Object.fromEntries(Object.entries(obj).filter(([, value]) => notEmpty(value)));
32
+ }
30
33
  function isObject(value) {
31
- return Boolean(value) && typeof value === "object" && !Array.isArray(value);
34
+ return Boolean(value) && typeof value === "object" && !Array.isArray(value) && !(value instanceof Date);
32
35
  }
33
36
  function isDefined(value) {
34
37
  return value !== null && value !== void 0;
@@ -83,6 +86,15 @@ function chunk(array, chunkSize) {
83
86
  async function timeout(ms) {
84
87
  return new Promise((resolve) => setTimeout(resolve, ms));
85
88
  }
89
+ function promiseMap(inputValues, mapper) {
90
+ const reducer = (acc$, inputValue) => acc$.then(
91
+ (acc) => mapper(inputValue).then((result) => {
92
+ acc.push(result);
93
+ return acc;
94
+ })
95
+ );
96
+ return inputValues.reduce(reducer, Promise.resolve([]));
97
+ }
86
98
 
87
99
  function getEnvironment() {
88
100
  try {
@@ -91,8 +103,10 @@ function getEnvironment() {
91
103
  apiKey: process.env.XATA_API_KEY ?? getGlobalApiKey(),
92
104
  databaseURL: process.env.XATA_DATABASE_URL ?? getGlobalDatabaseURL(),
93
105
  branch: process.env.XATA_BRANCH ?? getGlobalBranch(),
94
- envBranch: process.env.VERCEL_GIT_COMMIT_REF ?? process.env.CF_PAGES_BRANCH ?? process.env.BRANCH,
95
- fallbackBranch: process.env.XATA_FALLBACK_BRANCH ?? getGlobalFallbackBranch()
106
+ deployPreview: process.env.XATA_PREVIEW,
107
+ deployPreviewBranch: process.env.XATA_PREVIEW_BRANCH,
108
+ vercelGitCommitRef: process.env.VERCEL_GIT_COMMIT_REF,
109
+ vercelGitRepoOwner: process.env.VERCEL_GIT_REPO_OWNER
96
110
  };
97
111
  }
98
112
  } catch (err) {
@@ -103,8 +117,10 @@ function getEnvironment() {
103
117
  apiKey: Deno.env.get("XATA_API_KEY") ?? getGlobalApiKey(),
104
118
  databaseURL: Deno.env.get("XATA_DATABASE_URL") ?? getGlobalDatabaseURL(),
105
119
  branch: Deno.env.get("XATA_BRANCH") ?? getGlobalBranch(),
106
- envBranch: Deno.env.get("VERCEL_GIT_COMMIT_REF") ?? Deno.env.get("CF_PAGES_BRANCH") ?? Deno.env.get("BRANCH"),
107
- fallbackBranch: Deno.env.get("XATA_FALLBACK_BRANCH") ?? getGlobalFallbackBranch()
120
+ deployPreview: Deno.env.get("XATA_PREVIEW"),
121
+ deployPreviewBranch: Deno.env.get("XATA_PREVIEW_BRANCH"),
122
+ vercelGitCommitRef: Deno.env.get("VERCEL_GIT_COMMIT_REF"),
123
+ vercelGitRepoOwner: Deno.env.get("VERCEL_GIT_REPO_OWNER")
108
124
  };
109
125
  }
110
126
  } catch (err) {
@@ -113,8 +129,10 @@ function getEnvironment() {
113
129
  apiKey: getGlobalApiKey(),
114
130
  databaseURL: getGlobalDatabaseURL(),
115
131
  branch: getGlobalBranch(),
116
- envBranch: void 0,
117
- fallbackBranch: getGlobalFallbackBranch()
132
+ deployPreview: void 0,
133
+ deployPreviewBranch: void 0,
134
+ vercelGitCommitRef: void 0,
135
+ vercelGitRepoOwner: void 0
118
136
  };
119
137
  }
120
138
  function getEnableBrowserVariable() {
@@ -157,39 +175,59 @@ function getGlobalBranch() {
157
175
  return void 0;
158
176
  }
159
177
  }
160
- function getGlobalFallbackBranch() {
178
+ function getDatabaseURL() {
161
179
  try {
162
- return XATA_FALLBACK_BRANCH;
180
+ const { databaseURL } = getEnvironment();
181
+ return databaseURL;
163
182
  } catch (err) {
164
183
  return void 0;
165
184
  }
166
185
  }
167
- function getDatabaseURL() {
186
+ function getAPIKey() {
168
187
  try {
169
- const { databaseURL } = getEnvironment();
170
- return databaseURL;
188
+ const { apiKey } = getEnvironment();
189
+ return apiKey;
171
190
  } catch (err) {
172
191
  return void 0;
173
192
  }
174
193
  }
175
194
  function getBranch() {
176
195
  try {
177
- const { branch, envBranch } = getEnvironment();
178
- return branch ?? envBranch;
196
+ const { branch } = getEnvironment();
197
+ return branch ?? "main";
179
198
  } catch (err) {
180
199
  return void 0;
181
200
  }
182
201
  }
183
-
184
- function getAPIKey() {
202
+ function buildPreviewBranchName({ org, branch }) {
203
+ return `preview-${org}-${branch}`;
204
+ }
205
+ function getPreviewBranch() {
185
206
  try {
186
- const { apiKey } = getEnvironment();
187
- return apiKey;
207
+ const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = getEnvironment();
208
+ if (deployPreviewBranch)
209
+ return deployPreviewBranch;
210
+ switch (deployPreview) {
211
+ case "vercel": {
212
+ if (!vercelGitCommitRef || !vercelGitRepoOwner) {
213
+ console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
214
+ return void 0;
215
+ }
216
+ return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
217
+ }
218
+ }
219
+ return void 0;
188
220
  } catch (err) {
189
221
  return void 0;
190
222
  }
191
223
  }
192
224
 
225
+ var __defProp$8 = Object.defineProperty;
226
+ var __defNormalProp$8 = (obj, key, value) => key in obj ? __defProp$8(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
227
+ var __publicField$8 = (obj, key, value) => {
228
+ __defNormalProp$8(obj, typeof key !== "symbol" ? key + "" : key, value);
229
+ return value;
230
+ };
193
231
  var __accessCheck$8 = (obj, member, msg) => {
194
232
  if (!member.has(obj))
195
233
  throw TypeError("Cannot " + msg);
@@ -213,6 +251,7 @@ var __privateMethod$4 = (obj, member, method) => {
213
251
  return method;
214
252
  };
215
253
  var _fetch, _queue, _concurrency, _enqueue, enqueue_fn;
254
+ const REQUEST_TIMEOUT = 3e4;
216
255
  function getFetchImplementation(userFetch) {
217
256
  const globalFetch = typeof fetch !== "undefined" ? fetch : void 0;
218
257
  const fetchImpl = userFetch ?? globalFetch;
@@ -229,6 +268,8 @@ class ApiRequestPool {
229
268
  __privateAdd$8(this, _fetch, void 0);
230
269
  __privateAdd$8(this, _queue, void 0);
231
270
  __privateAdd$8(this, _concurrency, void 0);
271
+ __publicField$8(this, "running");
272
+ __publicField$8(this, "started");
232
273
  __privateSet$8(this, _queue, []);
233
274
  __privateSet$8(this, _concurrency, concurrency);
234
275
  this.running = 0;
@@ -244,17 +285,20 @@ class ApiRequestPool {
244
285
  return __privateGet$8(this, _fetch);
245
286
  }
246
287
  request(url, options) {
247
- const start = new Date();
288
+ const start = /* @__PURE__ */ new Date();
248
289
  const fetch2 = this.getFetch();
249
290
  const runRequest = async (stalled = false) => {
250
- const response = await fetch2(url, options);
291
+ const response = await Promise.race([fetch2(url, options), timeout(REQUEST_TIMEOUT).then(() => null)]);
292
+ if (!response) {
293
+ throw new Error("Request timed out");
294
+ }
251
295
  if (response.status === 429) {
252
296
  const rateLimitReset = parseNumber(response.headers?.get("x-ratelimit-reset")) ?? 1;
253
297
  await timeout(rateLimitReset * 1e3);
254
298
  return await runRequest(true);
255
299
  }
256
300
  if (stalled) {
257
- const stalledTime = new Date().getTime() - start.getTime();
301
+ const stalledTime = (/* @__PURE__ */ new Date()).getTime() - start.getTime();
258
302
  console.warn(`A request to Xata hit your workspace limits, was retried and stalled for ${stalledTime}ms`);
259
303
  }
260
304
  return response;
@@ -297,16 +341,199 @@ function generateUUID() {
297
341
  });
298
342
  }
299
343
 
300
- const VERSION = "0.22.3";
344
+ async function getBytes(stream, onChunk) {
345
+ const reader = stream.getReader();
346
+ let result;
347
+ while (!(result = await reader.read()).done) {
348
+ onChunk(result.value);
349
+ }
350
+ }
351
+ function getLines(onLine) {
352
+ let buffer;
353
+ let position;
354
+ let fieldLength;
355
+ let discardTrailingNewline = false;
356
+ return function onChunk(arr) {
357
+ if (buffer === void 0) {
358
+ buffer = arr;
359
+ position = 0;
360
+ fieldLength = -1;
361
+ } else {
362
+ buffer = concat(buffer, arr);
363
+ }
364
+ const bufLength = buffer.length;
365
+ let lineStart = 0;
366
+ while (position < bufLength) {
367
+ if (discardTrailingNewline) {
368
+ if (buffer[position] === 10 /* NewLine */) {
369
+ lineStart = ++position;
370
+ }
371
+ discardTrailingNewline = false;
372
+ }
373
+ let lineEnd = -1;
374
+ for (; position < bufLength && lineEnd === -1; ++position) {
375
+ switch (buffer[position]) {
376
+ case 58 /* Colon */:
377
+ if (fieldLength === -1) {
378
+ fieldLength = position - lineStart;
379
+ }
380
+ break;
381
+ case 13 /* CarriageReturn */:
382
+ discardTrailingNewline = true;
383
+ case 10 /* NewLine */:
384
+ lineEnd = position;
385
+ break;
386
+ }
387
+ }
388
+ if (lineEnd === -1) {
389
+ break;
390
+ }
391
+ onLine(buffer.subarray(lineStart, lineEnd), fieldLength);
392
+ lineStart = position;
393
+ fieldLength = -1;
394
+ }
395
+ if (lineStart === bufLength) {
396
+ buffer = void 0;
397
+ } else if (lineStart !== 0) {
398
+ buffer = buffer.subarray(lineStart);
399
+ position -= lineStart;
400
+ }
401
+ };
402
+ }
403
+ function getMessages(onId, onRetry, onMessage) {
404
+ let message = newMessage();
405
+ const decoder = new TextDecoder();
406
+ return function onLine(line, fieldLength) {
407
+ if (line.length === 0) {
408
+ onMessage?.(message);
409
+ message = newMessage();
410
+ } else if (fieldLength > 0) {
411
+ const field = decoder.decode(line.subarray(0, fieldLength));
412
+ const valueOffset = fieldLength + (line[fieldLength + 1] === 32 /* Space */ ? 2 : 1);
413
+ const value = decoder.decode(line.subarray(valueOffset));
414
+ switch (field) {
415
+ case "data":
416
+ message.data = message.data ? message.data + "\n" + value : value;
417
+ break;
418
+ case "event":
419
+ message.event = value;
420
+ break;
421
+ case "id":
422
+ onId(message.id = value);
423
+ break;
424
+ case "retry":
425
+ const retry = parseInt(value, 10);
426
+ if (!isNaN(retry)) {
427
+ onRetry(message.retry = retry);
428
+ }
429
+ break;
430
+ }
431
+ }
432
+ };
433
+ }
434
+ function concat(a, b) {
435
+ const res = new Uint8Array(a.length + b.length);
436
+ res.set(a);
437
+ res.set(b, a.length);
438
+ return res;
439
+ }
440
+ function newMessage() {
441
+ return {
442
+ data: "",
443
+ event: "",
444
+ id: "",
445
+ retry: void 0
446
+ };
447
+ }
448
+ const EventStreamContentType = "text/event-stream";
449
+ const LastEventId = "last-event-id";
450
+ function fetchEventSource(input, {
451
+ signal: inputSignal,
452
+ headers: inputHeaders,
453
+ onopen: inputOnOpen,
454
+ onmessage,
455
+ onclose,
456
+ onerror,
457
+ fetch: inputFetch,
458
+ ...rest
459
+ }) {
460
+ return new Promise((resolve, reject) => {
461
+ const headers = { ...inputHeaders };
462
+ if (!headers.accept) {
463
+ headers.accept = EventStreamContentType;
464
+ }
465
+ let curRequestController;
466
+ function dispose() {
467
+ curRequestController.abort();
468
+ }
469
+ inputSignal?.addEventListener("abort", () => {
470
+ dispose();
471
+ resolve();
472
+ });
473
+ const fetchImpl = inputFetch ?? fetch;
474
+ const onopen = inputOnOpen ?? defaultOnOpen;
475
+ async function create() {
476
+ curRequestController = new AbortController();
477
+ try {
478
+ const response = await fetchImpl(input, {
479
+ ...rest,
480
+ headers,
481
+ signal: curRequestController.signal
482
+ });
483
+ await onopen(response);
484
+ await getBytes(
485
+ response.body,
486
+ getLines(
487
+ getMessages(
488
+ (id) => {
489
+ if (id) {
490
+ headers[LastEventId] = id;
491
+ } else {
492
+ delete headers[LastEventId];
493
+ }
494
+ },
495
+ (_retry) => {
496
+ },
497
+ onmessage
498
+ )
499
+ )
500
+ );
501
+ onclose?.();
502
+ dispose();
503
+ resolve();
504
+ } catch (err) {
505
+ }
506
+ }
507
+ create();
508
+ });
509
+ }
510
+ function defaultOnOpen(response) {
511
+ const contentType = response.headers?.get("content-type");
512
+ if (!contentType?.startsWith(EventStreamContentType)) {
513
+ throw new Error(`Expected content-type to be ${EventStreamContentType}, Actual: ${contentType}`);
514
+ }
515
+ }
516
+
517
+ const VERSION = "0.24.3";
301
518
 
519
+ var __defProp$7 = Object.defineProperty;
520
+ var __defNormalProp$7 = (obj, key, value) => key in obj ? __defProp$7(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
521
+ var __publicField$7 = (obj, key, value) => {
522
+ __defNormalProp$7(obj, typeof key !== "symbol" ? key + "" : key, value);
523
+ return value;
524
+ };
302
525
  class ErrorWithCause extends Error {
303
526
  constructor(message, options) {
304
527
  super(message, options);
528
+ __publicField$7(this, "cause");
305
529
  }
306
530
  }
307
531
  class FetcherError extends ErrorWithCause {
308
532
  constructor(status, data, requestId) {
309
533
  super(getMessage(data));
534
+ __publicField$7(this, "status");
535
+ __publicField$7(this, "requestId");
536
+ __publicField$7(this, "errors");
310
537
  this.status = status;
311
538
  this.errors = isBulkError(data) ? data.errors : [{ message: getMessage(data), status }];
312
539
  this.requestId = requestId;
@@ -373,6 +600,15 @@ function hostHeader(url) {
373
600
  const { groups } = pattern.exec(url) ?? {};
374
601
  return groups?.host ? { Host: groups.host } : {};
375
602
  }
603
+ function parseBody(body, headers) {
604
+ if (!isDefined(body))
605
+ return void 0;
606
+ const { "Content-Type": contentType } = headers ?? {};
607
+ if (String(contentType).toLowerCase() === "application/json") {
608
+ return JSON.stringify(body);
609
+ }
610
+ return body;
611
+ }
376
612
  const defaultClientID = generateUUID();
377
613
  async function fetch$1({
378
614
  url: path,
@@ -392,7 +628,8 @@ async function fetch$1({
392
628
  sessionID,
393
629
  clientName,
394
630
  xataAgentExtra,
395
- fetchOptions = {}
631
+ fetchOptions = {},
632
+ rawResponse = false
396
633
  }) {
397
634
  pool.setFetch(fetch2);
398
635
  return await trace(
@@ -411,7 +648,7 @@ async function fetch$1({
411
648
  isDefined(clientName) ? ["service", clientName] : void 0,
412
649
  ...Object.entries(xataAgentExtra ?? {})
413
650
  ]).map(([key, value]) => `${key}=${value}`).join("; ");
414
- const headers = {
651
+ const headers = compactObject({
415
652
  "Accept-Encoding": "identity",
416
653
  "Content-Type": "application/json",
417
654
  "X-Xata-Client-ID": clientID ?? defaultClientID,
@@ -420,11 +657,11 @@ async function fetch$1({
420
657
  ...customHeaders,
421
658
  ...hostHeader(fullUrl),
422
659
  Authorization: `Bearer ${apiKey}`
423
- };
660
+ });
424
661
  const response = await pool.request(url, {
425
662
  ...fetchOptions,
426
663
  method: method.toUpperCase(),
427
- body: body ? JSON.stringify(body) : void 0,
664
+ body: parseBody(body, headers),
428
665
  headers,
429
666
  signal
430
667
  });
@@ -437,6 +674,9 @@ async function fetch$1({
437
674
  [TraceAttributes.HTTP_HOST]: host,
438
675
  [TraceAttributes.HTTP_SCHEME]: protocol?.replace(":", "")
439
676
  });
677
+ const message = response.headers?.get("x-xata-message");
678
+ if (message)
679
+ console.warn(message);
440
680
  if (response.status === 204) {
441
681
  return {};
442
682
  }
@@ -444,7 +684,7 @@ async function fetch$1({
444
684
  throw new FetcherError(response.status, "Rate limit exceeded", requestId);
445
685
  }
446
686
  try {
447
- const jsonResponse = await response.json();
687
+ const jsonResponse = rawResponse ? await response.blob() : await response.json();
448
688
  if (response.ok) {
449
689
  return jsonResponse;
450
690
  }
@@ -456,6 +696,59 @@ async function fetch$1({
456
696
  { [TraceAttributes.HTTP_METHOD]: method.toUpperCase(), [TraceAttributes.HTTP_ROUTE]: path }
457
697
  );
458
698
  }
699
+ function fetchSSERequest({
700
+ url: path,
701
+ method,
702
+ body,
703
+ headers: customHeaders,
704
+ pathParams,
705
+ queryParams,
706
+ fetch: fetch2,
707
+ apiKey,
708
+ endpoint,
709
+ apiUrl,
710
+ workspacesApiUrl,
711
+ onMessage,
712
+ onError,
713
+ onClose,
714
+ signal,
715
+ clientID,
716
+ sessionID,
717
+ clientName,
718
+ xataAgentExtra
719
+ }) {
720
+ const baseUrl = buildBaseUrl({ endpoint, path, workspacesApiUrl, pathParams, apiUrl });
721
+ const fullUrl = resolveUrl(baseUrl, queryParams, pathParams);
722
+ const url = fullUrl.includes("localhost") ? fullUrl.replace(/^[^.]+\./, "http://") : fullUrl;
723
+ void fetchEventSource(url, {
724
+ method,
725
+ body: JSON.stringify(body),
726
+ fetch: fetch2,
727
+ signal,
728
+ headers: {
729
+ "X-Xata-Client-ID": clientID ?? defaultClientID,
730
+ "X-Xata-Session-ID": sessionID ?? generateUUID(),
731
+ "X-Xata-Agent": compact([
732
+ ["client", "TS_SDK"],
733
+ ["version", VERSION],
734
+ isDefined(clientName) ? ["service", clientName] : void 0,
735
+ ...Object.entries(xataAgentExtra ?? {})
736
+ ]).map(([key, value]) => `${key}=${value}`).join("; "),
737
+ ...customHeaders,
738
+ Authorization: `Bearer ${apiKey}`,
739
+ "Content-Type": "application/json"
740
+ },
741
+ onmessage(ev) {
742
+ onMessage?.(JSON.parse(ev.data));
743
+ },
744
+ onerror(ev) {
745
+ onError?.(JSON.parse(ev.data));
746
+ },
747
+ onclose() {
748
+ onClose?.();
749
+ }
750
+ });
751
+ }
459
752
  function parseUrl(url) {
460
753
  try {
461
754
  const { host, protocol } = new URL(url);
@@ -486,6 +779,12 @@ const deleteBranch = (variables, signal) => dataPlaneFetch({
486
779
  ...variables,
487
780
  signal
488
781
  });
782
+ const copyBranch = (variables, signal) => dataPlaneFetch({
783
+ url: "/db/{dbBranchName}/copy",
784
+ method: "post",
785
+ ...variables,
786
+ signal
787
+ });
489
788
  const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
490
789
  url: "/db/{dbBranchName}/metadata",
491
790
  method: "put",
@@ -535,6 +834,7 @@ const compareBranchSchemas = (variables, signal) => dataPlaneFetch({ url: "/db/{
535
834
  const updateBranchSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/update", method: "post", ...variables, signal });
536
835
  const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/preview", method: "post", ...variables, signal });
537
836
  const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/apply", method: "post", ...variables, signal });
837
+ const pushBranchMigrations = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/push", method: "post", ...variables, signal });
538
838
  const createTable = (variables, signal) => dataPlaneFetch({
539
839
  url: "/db/{dbBranchName}/tables/{tableName}",
540
840
  method: "put",
@@ -579,6 +879,42 @@ const deleteColumn = (variables, signal) => dataPlaneFetch({
579
879
  });
580
880
  const branchTransaction = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/transaction", method: "post", ...variables, signal });
581
881
  const insertRecord = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data", method: "post", ...variables, signal });
882
+ const getFileItem = (variables, signal) => dataPlaneFetch({
883
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
884
+ method: "get",
885
+ ...variables,
886
+ signal
887
+ });
888
+ const putFileItem = (variables, signal) => dataPlaneFetch({
889
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
890
+ method: "put",
891
+ ...variables,
892
+ signal
893
+ });
894
+ const deleteFileItem = (variables, signal) => dataPlaneFetch({
895
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
896
+ method: "delete",
897
+ ...variables,
898
+ signal
899
+ });
900
+ const getFile = (variables, signal) => dataPlaneFetch({
901
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file",
902
+ method: "get",
903
+ ...variables,
904
+ signal
905
+ });
906
+ const putFile = (variables, signal) => dataPlaneFetch({
907
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file",
908
+ method: "put",
909
+ ...variables,
910
+ signal
911
+ });
912
+ const deleteFile = (variables, signal) => dataPlaneFetch({
913
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file",
914
+ method: "delete",
915
+ ...variables,
916
+ signal
917
+ });
582
918
  const getRecord = (variables, signal) => dataPlaneFetch({
583
919
  url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
584
920
  method: "get",
@@ -608,6 +944,12 @@ const searchTable = (variables, signal) => dataPlaneFetch({
608
944
  ...variables,
609
945
  signal
610
946
  });
947
+ const sqlQuery = (variables, signal) => dataPlaneFetch({
948
+ url: "/db/{dbBranchName}/sql",
949
+ method: "post",
950
+ ...variables,
951
+ signal
952
+ });
611
953
  const vectorSearchTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch", method: "post", ...variables, signal });
612
954
  const askTable = (variables, signal) => dataPlaneFetch({
613
955
  url: "/db/{dbBranchName}/tables/{tableName}/ask",
@@ -615,14 +957,22 @@ const askTable = (variables, signal) => dataPlaneFetch({
615
957
  ...variables,
616
958
  signal
617
959
  });
960
+ const askTableSession = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/ask/{sessionId}", method: "post", ...variables, signal });
618
961
  const summarizeTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/summarize", method: "post", ...variables, signal });
619
962
  const aggregateTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/aggregate", method: "post", ...variables, signal });
963
+ const fileAccess = (variables, signal) => dataPlaneFetch({
964
+ url: "/file/{fileId}",
965
+ method: "get",
966
+ ...variables,
967
+ signal
968
+ });
620
969
  const operationsByTag$2 = {
621
970
  branch: {
622
971
  getBranchList,
623
972
  getBranchDetails,
624
973
  createBranch,
625
974
  deleteBranch,
975
+ copyBranch,
626
976
  updateBranchMetadata,
627
977
  getBranchMetadata,
628
978
  getBranchStats,
@@ -640,7 +990,8 @@ const operationsByTag$2 = {
640
990
  compareBranchSchemas,
641
991
  updateBranchSchema,
642
992
  previewBranchSchemaEdit,
643
- applyBranchSchemaEdit
993
+ applyBranchSchemaEdit,
994
+ pushBranchMigrations
644
995
  },
645
996
  migrationRequests: {
646
997
  queryMigrationRequests,
@@ -674,12 +1025,15 @@ const operationsByTag$2 = {
674
1025
  deleteRecord,
675
1026
  bulkInsertTableRecords
676
1027
  },
1028
+ files: { getFileItem, putFileItem, deleteFileItem, getFile, putFile, deleteFile, fileAccess },
677
1029
  searchAndFilter: {
678
1030
  queryTable,
679
1031
  searchBranch,
680
1032
  searchTable,
1033
+ sqlQuery,
681
1034
  vectorSearchTable,
682
1035
  askTable,
1036
+ askTableSession,
683
1037
  summarizeTable,
684
1038
  aggregateTable
685
1039
  }
@@ -687,6 +1041,13 @@ const operationsByTag$2 = {
687
1041
 
688
1042
  const controlPlaneFetch = async (options) => fetch$1({ ...options, endpoint: "controlPlane" });
689
1043
 
1044
+ const grantAuthorizationCode = (variables, signal) => controlPlaneFetch({ url: "/oauth/authorize", method: "post", ...variables, signal });
1045
+ const generateAccessToken = (variables, signal) => controlPlaneFetch({
1046
+ url: "/oauth/token",
1047
+ method: "post",
1048
+ ...variables,
1049
+ signal
1050
+ });
690
1051
  const getUser = (variables, signal) => controlPlaneFetch({
691
1052
  url: "/user",
692
1053
  method: "get",
@@ -781,6 +1142,7 @@ const deleteDatabase = (variables, signal) => controlPlaneFetch({
781
1142
  });
782
1143
  const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}", method: "get", ...variables, signal });
783
1144
  const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}", method: "patch", ...variables, signal });
1145
+ const renameDatabase = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/rename", method: "post", ...variables, signal });
784
1146
  const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "get", ...variables, signal });
785
1147
  const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "put", ...variables, signal });
786
1148
  const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "delete", ...variables, signal });
@@ -791,6 +1153,7 @@ const listRegions = (variables, signal) => controlPlaneFetch({
791
1153
  signal
792
1154
  });
793
1155
  const operationsByTag$1 = {
1156
+ authOther: { grantAuthorizationCode, generateAccessToken },
794
1157
  users: { getUser, updateUser, deleteUser },
795
1158
  authentication: { getUserAPIKeys, createUserAPIKey, deleteUserAPIKey },
796
1159
  workspaces: {
@@ -816,6 +1179,7 @@ const operationsByTag$1 = {
816
1179
  deleteDatabase,
817
1180
  getDatabaseMetadata,
818
1181
  updateDatabaseMetadata,
1182
+ renameDatabase,
819
1183
  getDatabaseGithubSettings,
820
1184
  updateDatabaseGithubSettings,
821
1185
  deleteDatabaseGithubSettings,
@@ -841,6 +1205,10 @@ const providers = {
841
1205
  staging: {
842
1206
  main: "https://api.staging-xata.dev",
843
1207
  workspaces: "https://{workspaceId}.{region}.staging-xata.dev"
1208
+ },
1209
+ dev: {
1210
+ main: "https://api.dev-xata.dev",
1211
+ workspaces: "https://{workspaceId}.{region}.dev-xata.dev"
844
1212
  }
845
1213
  };
846
1214
  function isHostProviderAlias(alias) {
@@ -858,6 +1226,11 @@ function parseProviderString(provider = "production") {
858
1226
  return null;
859
1227
  return { main, workspaces };
860
1228
  }
1229
+ function buildProviderString(provider) {
1230
+ if (isHostProviderAlias(provider))
1231
+ return provider;
1232
+ return `${provider.main},${provider.workspaces}`;
1233
+ }
861
1234
  function parseWorkspacesUrlParts(url) {
862
1235
  if (!isString(url))
863
1236
  return null;
@@ -962,6 +1335,11 @@ class XataApiClient {
962
1335
  __privateGet$7(this, _namespaces).records = new RecordsApi(__privateGet$7(this, _extraProps));
963
1336
  return __privateGet$7(this, _namespaces).records;
964
1337
  }
1338
+ get files() {
1339
+ if (!__privateGet$7(this, _namespaces).files)
1340
+ __privateGet$7(this, _namespaces).files = new FilesApi(__privateGet$7(this, _extraProps));
1341
+ return __privateGet$7(this, _namespaces).files;
1342
+ }
965
1343
  get searchAndFilter() {
966
1344
  if (!__privateGet$7(this, _namespaces).searchAndFilter)
967
1345
  __privateGet$7(this, _namespaces).searchAndFilter = new SearchAndFilterApi(__privateGet$7(this, _extraProps));
@@ -1170,6 +1548,20 @@ class BranchApi {
1170
1548
  ...this.extraProps
1171
1549
  });
1172
1550
  }
1551
+ copyBranch({
1552
+ workspace,
1553
+ region,
1554
+ database,
1555
+ branch,
1556
+ destinationBranch,
1557
+ limit
1558
+ }) {
1559
+ return operationsByTag.branch.copyBranch({
1560
+ pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
1561
+ body: { destinationBranch, limit },
1562
+ ...this.extraProps
1563
+ });
1564
+ }
1173
1565
  updateBranchMetadata({
1174
1566
  workspace,
1175
1567
  region,
@@ -1525,6 +1917,164 @@ class RecordsApi {
1525
1917
  });
1526
1918
  }
1527
1919
  }
1920
+ class FilesApi {
1921
+ constructor(extraProps) {
1922
+ this.extraProps = extraProps;
1923
+ }
1924
+ getFileItem({
1925
+ workspace,
1926
+ region,
1927
+ database,
1928
+ branch,
1929
+ table,
1930
+ record,
1931
+ column,
1932
+ fileId
1933
+ }) {
1934
+ return operationsByTag.files.getFileItem({
1935
+ pathParams: {
1936
+ workspace,
1937
+ region,
1938
+ dbBranchName: `${database}:${branch}`,
1939
+ tableName: table,
1940
+ recordId: record,
1941
+ columnName: column,
1942
+ fileId
1943
+ },
1944
+ ...this.extraProps
1945
+ });
1946
+ }
1947
+ putFileItem({
1948
+ workspace,
1949
+ region,
1950
+ database,
1951
+ branch,
1952
+ table,
1953
+ record,
1954
+ column,
1955
+ fileId,
1956
+ file
1957
+ }) {
1958
+ return operationsByTag.files.putFileItem({
1959
+ pathParams: {
1960
+ workspace,
1961
+ region,
1962
+ dbBranchName: `${database}:${branch}`,
1963
+ tableName: table,
1964
+ recordId: record,
1965
+ columnName: column,
1966
+ fileId
1967
+ },
1968
+ // @ts-ignore
1969
+ body: file,
1970
+ ...this.extraProps
1971
+ });
1972
+ }
1973
+ deleteFileItem({
1974
+ workspace,
1975
+ region,
1976
+ database,
1977
+ branch,
1978
+ table,
1979
+ record,
1980
+ column,
1981
+ fileId
1982
+ }) {
1983
+ return operationsByTag.files.deleteFileItem({
1984
+ pathParams: {
1985
+ workspace,
1986
+ region,
1987
+ dbBranchName: `${database}:${branch}`,
1988
+ tableName: table,
1989
+ recordId: record,
1990
+ columnName: column,
1991
+ fileId
1992
+ },
1993
+ ...this.extraProps
1994
+ });
1995
+ }
1996
+ getFile({
1997
+ workspace,
1998
+ region,
1999
+ database,
2000
+ branch,
2001
+ table,
2002
+ record,
2003
+ column
2004
+ }) {
2005
+ return operationsByTag.files.getFile({
2006
+ pathParams: {
2007
+ workspace,
2008
+ region,
2009
+ dbBranchName: `${database}:${branch}`,
2010
+ tableName: table,
2011
+ recordId: record,
2012
+ columnName: column
2013
+ },
2014
+ ...this.extraProps
2015
+ });
2016
+ }
2017
+ putFile({
2018
+ workspace,
2019
+ region,
2020
+ database,
2021
+ branch,
2022
+ table,
2023
+ record,
2024
+ column,
2025
+ file
2026
+ }) {
2027
+ return operationsByTag.files.putFile({
2028
+ pathParams: {
2029
+ workspace,
2030
+ region,
2031
+ dbBranchName: `${database}:${branch}`,
2032
+ tableName: table,
2033
+ recordId: record,
2034
+ columnName: column
2035
+ },
2036
+ body: file,
2037
+ ...this.extraProps
2038
+ });
2039
+ }
2040
+ deleteFile({
2041
+ workspace,
2042
+ region,
2043
+ database,
2044
+ branch,
2045
+ table,
2046
+ record,
2047
+ column
2048
+ }) {
2049
+ return operationsByTag.files.deleteFile({
2050
+ pathParams: {
2051
+ workspace,
2052
+ region,
2053
+ dbBranchName: `${database}:${branch}`,
2054
+ tableName: table,
2055
+ recordId: record,
2056
+ columnName: column
2057
+ },
2058
+ ...this.extraProps
2059
+ });
2060
+ }
2061
+ fileAccess({
2062
+ workspace,
2063
+ region,
2064
+ fileId,
2065
+ verify
2066
+ }) {
2067
+ return operationsByTag.files.fileAccess({
2068
+ pathParams: {
2069
+ workspace,
2070
+ region,
2071
+ fileId
2072
+ },
2073
+ queryParams: { verify },
2074
+ ...this.extraProps
2075
+ });
2076
+ }
2077
+ }
1528
2078
  class SearchAndFilterApi {
1529
2079
  constructor(extraProps) {
1530
2080
  this.extraProps = extraProps;
@@ -1608,17 +2158,26 @@ class SearchAndFilterApi {
1608
2158
  database,
1609
2159
  branch,
1610
2160
  table,
1611
- question,
1612
- fuzziness,
1613
- target,
1614
- prefix,
1615
- filter,
1616
- boosters,
1617
- rules
2161
+ options
1618
2162
  }) {
1619
2163
  return operationsByTag.searchAndFilter.askTable({
1620
2164
  pathParams: { workspace, region, dbBranchName: `${database}:${branch}`, tableName: table },
1621
- body: { question, fuzziness, target, prefix, filter, boosters, rules },
2165
+ body: { ...options },
2166
+ ...this.extraProps
2167
+ });
2168
+ }
2169
+ askTableSession({
2170
+ workspace,
2171
+ region,
2172
+ database,
2173
+ branch,
2174
+ table,
2175
+ sessionId,
2176
+ message
2177
+ }) {
2178
+ return operationsByTag.searchAndFilter.askTableSession({
2179
+ pathParams: { workspace, region, dbBranchName: `${database}:${branch}`, tableName: table, sessionId },
2180
+ body: { message },
1622
2181
  ...this.extraProps
1623
2182
  });
1624
2183
  }
@@ -1886,6 +2445,19 @@ class MigrationsApi {
1886
2445
  ...this.extraProps
1887
2446
  });
1888
2447
  }
2448
+ pushBranchMigrations({
2449
+ workspace,
2450
+ region,
2451
+ database,
2452
+ branch,
2453
+ migrations
2454
+ }) {
2455
+ return operationsByTag.migrations.pushBranchMigrations({
2456
+ pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
2457
+ body: { migrations },
2458
+ ...this.extraProps
2459
+ });
2460
+ }
1889
2461
  }
1890
2462
  class DatabaseApi {
1891
2463
  constructor(extraProps) {
@@ -1900,11 +2472,13 @@ class DatabaseApi {
1900
2472
  createDatabase({
1901
2473
  workspace,
1902
2474
  database,
1903
- data
2475
+ data,
2476
+ headers
1904
2477
  }) {
1905
2478
  return operationsByTag.databases.createDatabase({
1906
2479
  pathParams: { workspaceId: workspace, dbName: database },
1907
2480
  body: data,
2481
+ headers,
1908
2482
  ...this.extraProps
1909
2483
  });
1910
2484
  }
@@ -1937,6 +2511,17 @@ class DatabaseApi {
1937
2511
  ...this.extraProps
1938
2512
  });
1939
2513
  }
2514
+ renameDatabase({
2515
+ workspace,
2516
+ database,
2517
+ newName
2518
+ }) {
2519
+ return operationsByTag.databases.renameDatabase({
2520
+ pathParams: { workspaceId: workspace, dbName: database },
2521
+ body: { newName },
2522
+ ...this.extraProps
2523
+ });
2524
+ }
1940
2525
  getDatabaseGithubSettings({
1941
2526
  workspace,
1942
2527
  database
@@ -1983,13 +2568,261 @@ class XataApiPlugin {
1983
2568
  class XataPlugin {
1984
2569
  }
1985
2570
 
2571
+ class FilesPlugin extends XataPlugin {
2572
+ build(pluginOptions) {
2573
+ return {
2574
+ download: async (location) => {
2575
+ const { table, record, column, fileId = "" } = location ?? {};
2576
+ return await getFileItem({
2577
+ pathParams: {
2578
+ workspace: "{workspaceId}",
2579
+ dbBranchName: "{dbBranch}",
2580
+ region: "{region}",
2581
+ tableName: table ?? "",
2582
+ recordId: record ?? "",
2583
+ columnName: column ?? "",
2584
+ fileId
2585
+ },
2586
+ ...pluginOptions,
2587
+ rawResponse: true
2588
+ });
2589
+ },
2590
+ upload: async (location, file) => {
2591
+ const { table, record, column, fileId = "" } = location ?? {};
2592
+ return await putFileItem({
2593
+ pathParams: {
2594
+ workspace: "{workspaceId}",
2595
+ dbBranchName: "{dbBranch}",
2596
+ region: "{region}",
2597
+ tableName: table ?? "",
2598
+ recordId: record ?? "",
2599
+ columnName: column ?? "",
2600
+ fileId
2601
+ },
2602
+ body: file,
2603
+ ...pluginOptions
2604
+ });
2605
+ },
2606
+ delete: async (location) => {
2607
+ const { table, record, column, fileId = "" } = location ?? {};
2608
+ return await deleteFileItem({
2609
+ pathParams: {
2610
+ workspace: "{workspaceId}",
2611
+ dbBranchName: "{dbBranch}",
2612
+ region: "{region}",
2613
+ tableName: table ?? "",
2614
+ recordId: record ?? "",
2615
+ columnName: column ?? "",
2616
+ fileId
2617
+ },
2618
+ ...pluginOptions
2619
+ });
2620
+ }
2621
+ };
2622
+ }
2623
+ }
2624
+
2625
+ function buildTransformString(transformations) {
2626
+ return transformations.flatMap(
2627
+ (t) => Object.entries(t).map(([key, value]) => {
2628
+ if (key === "trim") {
2629
+ const { left = 0, top = 0, right = 0, bottom = 0 } = value;
2630
+ return `${key}=${[top, right, bottom, left].join(";")}`;
2631
+ }
2632
+ if (key === "gravity" && typeof value === "object") {
2633
+ const { x = 0.5, y = 0.5 } = value;
2634
+ return `${key}=${[x, y].join("x")}`;
2635
+ }
2636
+ return `${key}=${value}`;
2637
+ })
2638
+ ).join(",");
2639
+ }
2640
+ function transformImage(url, transformations) {
2641
+ if (!isDefined(url))
2642
+ return void 0;
2643
+ const transformationsString = buildTransformString(transformations);
2644
+ const { hostname, pathname, search } = new URL(url);
2645
+ return `https://${hostname}/transform/${transformationsString}${pathname}${search}`;
2646
+ }
2647
+
2648
+ var __defProp$6 = Object.defineProperty;
2649
+ var __defNormalProp$6 = (obj, key, value) => key in obj ? __defProp$6(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
2650
+ var __publicField$6 = (obj, key, value) => {
2651
+ __defNormalProp$6(obj, typeof key !== "symbol" ? key + "" : key, value);
2652
+ return value;
2653
+ };
2654
+ class XataFile {
2655
+ constructor(file) {
2656
+ /**
2657
+ * Name of this file.
2658
+ */
2659
+ __publicField$6(this, "name");
2660
+ /**
2661
+ * Media type of this file.
2662
+ */
2663
+ __publicField$6(this, "mediaType");
2664
+ /**
2665
+ * Base64 encoded content of this file.
2666
+ */
2667
+ __publicField$6(this, "base64Content");
2668
+ /**
2669
+ * Whether to enable public url for this file.
2670
+ */
2671
+ __publicField$6(this, "enablePublicUrl");
2672
+ /**
2673
+ * Timeout for the signed url.
2674
+ */
2675
+ __publicField$6(this, "signedUrlTimeout");
2676
+ /**
2677
+ * Size of this file.
2678
+ */
2679
+ __publicField$6(this, "size");
2680
+ /**
2681
+ * Version of this file.
2682
+ */
2683
+ __publicField$6(this, "version");
2684
+ /**
2685
+ * Url of this file.
2686
+ */
2687
+ __publicField$6(this, "url");
2688
+ /**
2689
+ * Signed url of this file.
2690
+ */
2691
+ __publicField$6(this, "signedUrl");
2692
+ /**
2693
+ * Attributes of this file.
2694
+ */
2695
+ __publicField$6(this, "attributes");
2696
+ this.name = file.name;
2697
+ this.mediaType = file.mediaType || "application/octet-stream";
2698
+ this.base64Content = file.base64Content;
2699
+ this.enablePublicUrl = file.enablePublicUrl;
2700
+ this.signedUrlTimeout = file.signedUrlTimeout;
2701
+ this.size = file.size;
2702
+ this.version = file.version;
2703
+ this.url = file.url;
2704
+ this.signedUrl = file.signedUrl;
2705
+ this.attributes = file.attributes;
2706
+ }
2707
+ static fromBuffer(buffer, options = {}) {
2708
+ const base64Content = buffer.toString("base64");
2709
+ return new XataFile({ ...options, base64Content });
2710
+ }
2711
+ toBuffer() {
2712
+ if (!this.base64Content) {
2713
+ throw new Error(`File content is not available, please select property "base64Content" when querying the file`);
2714
+ }
2715
+ return Buffer.from(this.base64Content, "base64");
2716
+ }
2717
+ static fromArrayBuffer(arrayBuffer, options = {}) {
2718
+ const uint8Array = new Uint8Array(arrayBuffer);
2719
+ return this.fromUint8Array(uint8Array, options);
2720
+ }
2721
+ toArrayBuffer() {
2722
+ if (!this.base64Content) {
2723
+ throw new Error(`File content is not available, please select property "base64Content" when querying the file`);
2724
+ }
2725
+ const binary = atob(this.base64Content);
2726
+ return new ArrayBuffer(binary.length);
2727
+ }
2728
+ static fromUint8Array(uint8Array, options = {}) {
2729
+ let binary = "";
2730
+ for (let i = 0; i < uint8Array.byteLength; i++) {
2731
+ binary += String.fromCharCode(uint8Array[i]);
2732
+ }
2733
+ const base64Content = btoa(binary);
2734
+ return new XataFile({ ...options, base64Content });
2735
+ }
2736
+ toUint8Array() {
2737
+ if (!this.base64Content) {
2738
+ throw new Error(`File content is not available, please select property "base64Content" when querying the file`);
2739
+ }
2740
+ const binary = atob(this.base64Content);
2741
+ const uint8Array = new Uint8Array(binary.length);
2742
+ for (let i = 0; i < binary.length; i++) {
2743
+ uint8Array[i] = binary.charCodeAt(i);
2744
+ }
2745
+ return uint8Array;
2746
+ }
2747
+ static async fromBlob(file, options = {}) {
2748
+ const name = options.name ?? file.name;
2749
+ const mediaType = file.type;
2750
+ const arrayBuffer = await file.arrayBuffer();
2751
+ return this.fromArrayBuffer(arrayBuffer, { ...options, name, mediaType });
2752
+ }
2753
+ toBlob() {
2754
+ if (!this.base64Content) {
2755
+ throw new Error(`File content is not available, please select property "base64Content" when querying the file`);
2756
+ }
2757
+ const arrayBuffer = this.toArrayBuffer();
2758
+ return new Blob([arrayBuffer], { type: this.mediaType });
2759
+ }
2760
+ static fromString(string, options = {}) {
2761
+ const base64Content = btoa(string);
2762
+ return new XataFile({ ...options, base64Content });
2763
+ }
2764
+ toString() {
2765
+ if (!this.base64Content) {
2766
+ throw new Error(`File content is not available, please select property "base64Content" when querying the file`);
2767
+ }
2768
+ return atob(this.base64Content);
2769
+ }
2770
+ static fromBase64(base64Content, options = {}) {
2771
+ return new XataFile({ ...options, base64Content });
2772
+ }
2773
+ toBase64() {
2774
+ if (!this.base64Content) {
2775
+ throw new Error(`File content is not available, please select property "base64Content" when querying the file`);
2776
+ }
2777
+ return this.base64Content;
2778
+ }
2779
+ transform(...options) {
2780
+ return {
2781
+ url: transformImage(this.url, options),
2782
+ signedUrl: transformImage(this.signedUrl, options)
2783
+ };
2784
+ }
2785
+ }
2786
+ const parseInputFileEntry = async (entry) => {
2787
+ if (!isDefined(entry))
2788
+ return null;
2789
+ const { id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout } = await entry;
2790
+ return compactObject({ id, name, mediaType, base64Content, enablePublicUrl, signedUrlTimeout });
2791
+ };
2792
+
1986
2793
  function cleanFilter(filter) {
1987
- if (!filter)
2794
+ if (!isDefined(filter))
1988
2795
  return void 0;
1989
- const values = Object.values(filter).filter(Boolean).filter((value) => Array.isArray(value) ? value.length > 0 : true);
1990
- return values.length > 0 ? filter : void 0;
2796
+ if (!isObject(filter))
2797
+ return filter;
2798
+ const values = Object.fromEntries(
2799
+ Object.entries(filter).reduce((acc, [key, value]) => {
2800
+ if (!isDefined(value))
2801
+ return acc;
2802
+ if (Array.isArray(value)) {
2803
+ const clean = value.map((item) => cleanFilter(item)).filter((item) => isDefined(item));
2804
+ if (clean.length === 0)
2805
+ return acc;
2806
+ return [...acc, [key, clean]];
2807
+ }
2808
+ if (isObject(value)) {
2809
+ const clean = cleanFilter(value);
2810
+ if (!isDefined(clean))
2811
+ return acc;
2812
+ return [...acc, [key, clean]];
2813
+ }
2814
+ return [...acc, [key, value]];
2815
+ }, [])
2816
+ );
2817
+ return Object.keys(values).length > 0 ? values : void 0;
1991
2818
  }
1992
2819
 
2820
+ var __defProp$5 = Object.defineProperty;
2821
+ var __defNormalProp$5 = (obj, key, value) => key in obj ? __defProp$5(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
2822
+ var __publicField$5 = (obj, key, value) => {
2823
+ __defNormalProp$5(obj, typeof key !== "symbol" ? key + "" : key, value);
2824
+ return value;
2825
+ };
1993
2826
  var __accessCheck$6 = (obj, member, msg) => {
1994
2827
  if (!member.has(obj))
1995
2828
  throw TypeError("Cannot " + msg);
@@ -2012,22 +2845,58 @@ var _query, _page;
2012
2845
  class Page {
2013
2846
  constructor(query, meta, records = []) {
2014
2847
  __privateAdd$6(this, _query, void 0);
2848
+ /**
2849
+ * Page metadata, required to retrieve additional records.
2850
+ */
2851
+ __publicField$5(this, "meta");
2852
+ /**
2853
+ * The set of results for this page.
2854
+ */
2855
+ __publicField$5(this, "records");
2015
2856
  __privateSet$6(this, _query, query);
2016
2857
  this.meta = meta;
2017
2858
  this.records = new RecordArray(this, records);
2018
2859
  }
2860
+ /**
2861
+ * Retrieves the next page of results.
2862
+ * @param size Maximum number of results to be retrieved.
2863
+ * @param offset Number of results to skip when retrieving the results.
2864
+ * @returns The next page or results.
2865
+ */
2019
2866
  async nextPage(size, offset) {
2020
2867
  return __privateGet$6(this, _query).getPaginated({ pagination: { size, offset, after: this.meta.page.cursor } });
2021
2868
  }
2869
+ /**
2870
+ * Retrieves the previous page of results.
2871
+ * @param size Maximum number of results to be retrieved.
2872
+ * @param offset Number of results to skip when retrieving the results.
2873
+ * @returns The previous page or results.
2874
+ */
2022
2875
  async previousPage(size, offset) {
2023
2876
  return __privateGet$6(this, _query).getPaginated({ pagination: { size, offset, before: this.meta.page.cursor } });
2024
2877
  }
2878
+ /**
2879
+ * Retrieves the start page of results.
2880
+ * @param size Maximum number of results to be retrieved.
2881
+ * @param offset Number of results to skip when retrieving the results.
2882
+ * @returns The start page or results.
2883
+ */
2025
2884
  async startPage(size, offset) {
2026
2885
  return __privateGet$6(this, _query).getPaginated({ pagination: { size, offset, start: this.meta.page.cursor } });
2027
2886
  }
2887
+ /**
2888
+ * Retrieves the end page of results.
2889
+ * @param size Maximum number of results to be retrieved.
2890
+ * @param offset Number of results to skip when retrieving the results.
2891
+ * @returns The end page or results.
2892
+ */
2028
2893
  async endPage(size, offset) {
2029
2894
  return __privateGet$6(this, _query).getPaginated({ pagination: { size, offset, end: this.meta.page.cursor } });
2030
2895
  }
2896
+ /**
2897
+ * Shortcut method to check if there will be additional results if the next page of results is retrieved.
2898
+ * @returns Whether or not there will be additional results in the next page of results.
2899
+ */
2031
2900
  hasNextPage() {
2032
2901
  return this.meta.page.more;
2033
2902
  }
@@ -2040,7 +2909,7 @@ const PAGINATION_DEFAULT_OFFSET = 0;
2040
2909
  function isCursorPaginationOptions(options) {
2041
2910
  return isDefined(options) && (isDefined(options.start) || isDefined(options.end) || isDefined(options.after) || isDefined(options.before));
2042
2911
  }
2043
- const _RecordArray = class extends Array {
2912
+ const _RecordArray = class _RecordArray extends Array {
2044
2913
  constructor(...args) {
2045
2914
  super(..._RecordArray.parseConstructorParams(...args));
2046
2915
  __privateAdd$6(this, _page, void 0);
@@ -2068,29 +2937,58 @@ const _RecordArray = class extends Array {
2068
2937
  map(callbackfn, thisArg) {
2069
2938
  return this.toArray().map(callbackfn, thisArg);
2070
2939
  }
2940
+ /**
2941
+ * Retrieve next page of records
2942
+ *
2943
+ * @returns A new array of objects
2944
+ */
2071
2945
  async nextPage(size, offset) {
2072
2946
  const newPage = await __privateGet$6(this, _page).nextPage(size, offset);
2073
2947
  return new _RecordArray(newPage);
2074
2948
  }
2949
+ /**
2950
+ * Retrieve previous page of records
2951
+ *
2952
+ * @returns A new array of objects
2953
+ */
2075
2954
  async previousPage(size, offset) {
2076
2955
  const newPage = await __privateGet$6(this, _page).previousPage(size, offset);
2077
2956
  return new _RecordArray(newPage);
2078
2957
  }
2958
+ /**
2959
+ * Retrieve start page of records
2960
+ *
2961
+ * @returns A new array of objects
2962
+ */
2079
2963
  async startPage(size, offset) {
2080
2964
  const newPage = await __privateGet$6(this, _page).startPage(size, offset);
2081
2965
  return new _RecordArray(newPage);
2082
2966
  }
2967
+ /**
2968
+ * Retrieve end page of records
2969
+ *
2970
+ * @returns A new array of objects
2971
+ */
2083
2972
  async endPage(size, offset) {
2084
2973
  const newPage = await __privateGet$6(this, _page).endPage(size, offset);
2085
2974
  return new _RecordArray(newPage);
2086
2975
  }
2976
+ /**
2977
+ * @returns Boolean indicating if there is a next page
2978
+ */
2087
2979
  hasNextPage() {
2088
2980
  return __privateGet$6(this, _page).meta.page.more;
2089
2981
  }
2090
2982
  };
2091
- let RecordArray = _RecordArray;
2092
2983
  _page = new WeakMap();
2984
+ let RecordArray = _RecordArray;
2093
2985
 
2986
+ var __defProp$4 = Object.defineProperty;
2987
+ var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
2988
+ var __publicField$4 = (obj, key, value) => {
2989
+ __defNormalProp$4(obj, typeof key !== "symbol" ? key + "" : key, value);
2990
+ return value;
2991
+ };
2094
2992
  var __accessCheck$5 = (obj, member, msg) => {
2095
2993
  if (!member.has(obj))
2096
2994
  throw TypeError("Cannot " + msg);
@@ -2114,14 +3012,15 @@ var __privateMethod$3 = (obj, member, method) => {
2114
3012
  return method;
2115
3013
  };
2116
3014
  var _table$1, _repository, _data, _cleanFilterConstraint, cleanFilterConstraint_fn;
2117
- const _Query = class {
3015
+ const _Query = class _Query {
2118
3016
  constructor(repository, table, data, rawParent) {
2119
3017
  __privateAdd$5(this, _cleanFilterConstraint);
2120
3018
  __privateAdd$5(this, _table$1, void 0);
2121
3019
  __privateAdd$5(this, _repository, void 0);
2122
3020
  __privateAdd$5(this, _data, { filter: {} });
2123
- this.meta = { page: { cursor: "start", more: true } };
2124
- this.records = new RecordArray(this, []);
3021
+ // Implements pagination
3022
+ __publicField$4(this, "meta", { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } });
3023
+ __publicField$4(this, "records", new RecordArray(this, []));
2125
3024
  __privateSet$5(this, _table$1, table);
2126
3025
  if (repository) {
2127
3026
  __privateSet$5(this, _repository, repository);
@@ -2157,18 +3056,38 @@ const _Query = class {
2157
3056
  const key = JSON.stringify({ columns, filter, sort, pagination });
2158
3057
  return toBase64(key);
2159
3058
  }
3059
+ /**
3060
+ * Builds a new query object representing a logical OR between the given subqueries.
3061
+ * @param queries An array of subqueries.
3062
+ * @returns A new Query object.
3063
+ */
2160
3064
  any(...queries) {
2161
3065
  const $any = queries.map((query) => query.getQueryOptions().filter ?? {});
2162
3066
  return new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), { filter: { $any } }, __privateGet$5(this, _data));
2163
3067
  }
3068
+ /**
3069
+ * Builds a new query object representing a logical AND between the given subqueries.
3070
+ * @param queries An array of subqueries.
3071
+ * @returns A new Query object.
3072
+ */
2164
3073
  all(...queries) {
2165
3074
  const $all = queries.map((query) => query.getQueryOptions().filter ?? {});
2166
3075
  return new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), { filter: { $all } }, __privateGet$5(this, _data));
2167
3076
  }
3077
+ /**
3078
+ * Builds a new query object representing a logical OR negating each subquery. In pseudo-code: !q1 OR !q2
3079
+ * @param queries An array of subqueries.
3080
+ * @returns A new Query object.
3081
+ */
2168
3082
  not(...queries) {
2169
3083
  const $not = queries.map((query) => query.getQueryOptions().filter ?? {});
2170
3084
  return new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), { filter: { $not } }, __privateGet$5(this, _data));
2171
3085
  }
3086
+ /**
3087
+ * Builds a new query object representing a logical AND negating each subquery. In pseudo-code: !q1 AND !q2
3088
+ * @param queries An array of subqueries.
3089
+ * @returns A new Query object.
3090
+ */
2172
3091
  none(...queries) {
2173
3092
  const $none = queries.map((query) => query.getQueryOptions().filter ?? {});
2174
3093
  return new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), { filter: { $none } }, __privateGet$5(this, _data));
@@ -2191,6 +3110,11 @@ const _Query = class {
2191
3110
  const sort = [...originalSort, { column, direction }];
2192
3111
  return new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), { sort }, __privateGet$5(this, _data));
2193
3112
  }
3113
+ /**
3114
+ * Builds a new query specifying the set of columns to be returned in the query response.
3115
+ * @param columns Array of column names to be returned by the query.
3116
+ * @returns A new Query object.
3117
+ */
2194
3118
  select(columns) {
2195
3119
  return new _Query(
2196
3120
  __privateGet$5(this, _repository),
@@ -2203,6 +3127,12 @@ const _Query = class {
2203
3127
  const query = new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), options, __privateGet$5(this, _data));
2204
3128
  return __privateGet$5(this, _repository).query(query);
2205
3129
  }
3130
+ /**
3131
+ * Get results in an iterator
3132
+ *
3133
+ * @async
3134
+ * @returns Async interable of results
3135
+ */
2206
3136
  async *[Symbol.asyncIterator]() {
2207
3137
  for await (const [record] of this.getIterator({ batchSize: 1 })) {
2208
3138
  yield record;
@@ -2263,26 +3193,53 @@ const _Query = class {
2263
3193
  );
2264
3194
  return __privateGet$5(this, _repository).summarizeTable(query, summaries, summariesFilter);
2265
3195
  }
3196
+ /**
3197
+ * Builds a new query object adding a cache TTL in milliseconds.
3198
+ * @param ttl The cache TTL in milliseconds.
3199
+ * @returns A new Query object.
3200
+ */
2266
3201
  cache(ttl) {
2267
3202
  return new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), { cache: ttl }, __privateGet$5(this, _data));
2268
3203
  }
3204
+ /**
3205
+ * Retrieve next page of records
3206
+ *
3207
+ * @returns A new page object.
3208
+ */
2269
3209
  nextPage(size, offset) {
2270
3210
  return this.startPage(size, offset);
2271
3211
  }
3212
+ /**
3213
+ * Retrieve previous page of records
3214
+ *
3215
+ * @returns A new page object
3216
+ */
2272
3217
  previousPage(size, offset) {
2273
3218
  return this.startPage(size, offset);
2274
3219
  }
3220
+ /**
3221
+ * Retrieve start page of records
3222
+ *
3223
+ * @returns A new page object
3224
+ */
2275
3225
  startPage(size, offset) {
2276
3226
  return this.getPaginated({ pagination: { size, offset } });
2277
3227
  }
3228
+ /**
3229
+ * Retrieve last page of records
3230
+ *
3231
+ * @returns A new page object
3232
+ */
2278
3233
  endPage(size, offset) {
2279
3234
  return this.getPaginated({ pagination: { size, offset, before: "end" } });
2280
3235
  }
3236
+ /**
3237
+ * @returns Boolean indicating if there is a next page
3238
+ */
2281
3239
  hasNextPage() {
2282
3240
  return this.meta.page.more;
2283
3241
  }
2284
3242
  };
2285
- let Query = _Query;
2286
3243
  _table$1 = new WeakMap();
2287
3244
  _repository = new WeakMap();
2288
3245
  _data = new WeakMap();
@@ -2297,6 +3254,7 @@ cleanFilterConstraint_fn = function(column, value) {
2297
3254
  }
2298
3255
  return value;
2299
3256
  };
3257
+ let Query = _Query;
2300
3258
  function cleanParent(data, parent) {
2301
3259
  if (isCursorPaginationOptions(data.pagination)) {
2302
3260
  return { ...parent, sort: void 0, filter: void 0 };
@@ -2304,6 +3262,21 @@ function cleanParent(data, parent) {
2304
3262
  return parent;
2305
3263
  }
2306
3264
 
3265
+ const RecordColumnTypes = [
3266
+ "bool",
3267
+ "int",
3268
+ "float",
3269
+ "string",
3270
+ "text",
3271
+ "email",
3272
+ "multiple",
3273
+ "link",
3274
+ "object",
3275
+ "datetime",
3276
+ "vector",
3277
+ "file[]",
3278
+ "file"
3279
+ ];
2307
3280
  function isIdentifiable(x) {
2308
3281
  return isObject(x) && isString(x?.id);
2309
3282
  }
@@ -2317,7 +3290,11 @@ function isSortFilterString(value) {
2317
3290
  return isString(value);
2318
3291
  }
2319
3292
  function isSortFilterBase(filter) {
2320
- return isObject(filter) && Object.values(filter).every((value) => value === "asc" || value === "desc");
3293
+ return isObject(filter) && Object.entries(filter).every(([key, value]) => {
3294
+ if (key === "*")
3295
+ return value === "random";
3296
+ return value === "asc" || value === "desc";
3297
+ });
2321
3298
  }
2322
3299
  function isSortFilterObject(filter) {
2323
3300
  return isObject(filter) && !isSortFilterBase(filter) && filter.column !== void 0;
@@ -2358,7 +3335,7 @@ var __privateMethod$2 = (obj, member, method) => {
2358
3335
  __accessCheck$4(obj, member, "access private method");
2359
3336
  return method;
2360
3337
  };
2361
- var _table, _getFetchProps, _db, _cache, _schemaTables$2, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn, _insertRecords, insertRecords_fn, _updateRecordWithID, updateRecordWithID_fn, _updateRecords, updateRecords_fn, _upsertRecordWithID, upsertRecordWithID_fn, _deleteRecord, deleteRecord_fn, _deleteRecords, deleteRecords_fn, _setCacheQuery, setCacheQuery_fn, _getCacheQuery, getCacheQuery_fn, _getSchemaTables$1, getSchemaTables_fn$1;
3338
+ var _table, _getFetchProps, _db, _cache, _schemaTables$2, _trace, _insertRecordWithoutId, insertRecordWithoutId_fn, _insertRecordWithId, insertRecordWithId_fn, _insertRecords, insertRecords_fn, _updateRecordWithID, updateRecordWithID_fn, _updateRecords, updateRecords_fn, _upsertRecordWithID, upsertRecordWithID_fn, _deleteRecord, deleteRecord_fn, _deleteRecords, deleteRecords_fn, _setCacheQuery, setCacheQuery_fn, _getCacheQuery, getCacheQuery_fn, _getSchemaTables$1, getSchemaTables_fn$1, _transformObjectToApi, transformObjectToApi_fn;
2362
3339
  const BULK_OPERATION_MAX_SIZE = 1e3;
2363
3340
  class Repository extends Query {
2364
3341
  }
@@ -2380,6 +3357,7 @@ class RestRepository extends Query {
2380
3357
  __privateAdd$4(this, _setCacheQuery);
2381
3358
  __privateAdd$4(this, _getCacheQuery);
2382
3359
  __privateAdd$4(this, _getSchemaTables$1);
3360
+ __privateAdd$4(this, _transformObjectToApi);
2383
3361
  __privateAdd$4(this, _table, void 0);
2384
3362
  __privateAdd$4(this, _getFetchProps, void 0);
2385
3363
  __privateAdd$4(this, _db, void 0);
@@ -2557,12 +3535,22 @@ class RestRepository extends Query {
2557
3535
  return result;
2558
3536
  }
2559
3537
  if (isString(a) && isObject(b)) {
3538
+ if (a === "")
3539
+ throw new Error("The id can't be empty");
2560
3540
  const columns = isStringArray(c) ? c : void 0;
2561
- return __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
3541
+ return await __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a, b, columns, { ifVersion });
2562
3542
  }
2563
3543
  if (isObject(a) && isString(a.id)) {
3544
+ if (a.id === "")
3545
+ throw new Error("The id can't be empty");
2564
3546
  const columns = isStringArray(c) ? c : void 0;
2565
- return __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a.id, { ...a, id: void 0 }, columns, { ifVersion });
3547
+ return await __privateMethod$2(this, _upsertRecordWithID, upsertRecordWithID_fn).call(this, a.id, { ...a, id: void 0 }, columns, { ifVersion });
3548
+ }
3549
+ if (!isDefined(a) && isObject(b)) {
3550
+ return await this.create(b, c);
3551
+ }
3552
+ if (isObject(a) && !isDefined(a.id)) {
3553
+ return await this.create(a, b);
2566
3554
  }
2567
3555
  throw new Error("Invalid arguments for createOrUpdate method");
2568
3556
  });
@@ -2579,12 +3567,22 @@ class RestRepository extends Query {
2579
3567
  return result;
2580
3568
  }
2581
3569
  if (isString(a) && isObject(b)) {
3570
+ if (a === "")
3571
+ throw new Error("The id can't be empty");
2582
3572
  const columns = isStringArray(c) ? c : void 0;
2583
- return __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
3573
+ return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a, b, columns, { createOnly: false, ifVersion });
2584
3574
  }
2585
3575
  if (isObject(a) && isString(a.id)) {
3576
+ if (a.id === "")
3577
+ throw new Error("The id can't be empty");
2586
3578
  const columns = isStringArray(c) ? c : void 0;
2587
- return __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.id, { ...a, id: void 0 }, columns, { createOnly: false, ifVersion });
3579
+ return await __privateMethod$2(this, _insertRecordWithId, insertRecordWithId_fn).call(this, a.id, { ...a, id: void 0 }, columns, { createOnly: false, ifVersion });
3580
+ }
3581
+ if (!isDefined(a) && isObject(b)) {
3582
+ return await this.create(b, c);
3583
+ }
3584
+ if (isObject(a) && !isDefined(a.id)) {
3585
+ return await this.create(a, b);
2588
3586
  }
2589
3587
  throw new Error("Invalid arguments for createOrReplace method");
2590
3588
  });
@@ -2750,6 +3748,34 @@ class RestRepository extends Query {
2750
3748
  return result;
2751
3749
  });
2752
3750
  }
3751
+ ask(question, options) {
3752
+ const params = {
3753
+ pathParams: {
3754
+ workspace: "{workspaceId}",
3755
+ dbBranchName: "{dbBranch}",
3756
+ region: "{region}",
3757
+ tableName: __privateGet$4(this, _table)
3758
+ },
3759
+ body: {
3760
+ question,
3761
+ ...options
3762
+ },
3763
+ ...__privateGet$4(this, _getFetchProps).call(this)
3764
+ };
3765
+ if (options?.onMessage) {
3766
+ fetchSSERequest({
3767
+ endpoint: "dataPlane",
3768
+ url: "/db/{dbBranchName}/tables/{tableName}/ask",
3769
+ method: "POST",
3770
+ onMessage: (message) => {
3771
+ options.onMessage?.({ answer: message.text, records: message.records });
3772
+ },
3773
+ ...params
3774
+ });
3775
+ } else {
3776
+ return askTable(params);
3777
+ }
3778
+ }
2753
3779
  }
2754
3780
  _table = new WeakMap();
2755
3781
  _getFetchProps = new WeakMap();
@@ -2759,7 +3785,7 @@ _schemaTables$2 = new WeakMap();
2759
3785
  _trace = new WeakMap();
2760
3786
  _insertRecordWithoutId = new WeakSet();
2761
3787
  insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
2762
- const record = transformObjectLinks(object);
3788
+ const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
2763
3789
  const response = await insertRecord({
2764
3790
  pathParams: {
2765
3791
  workspace: "{workspaceId}",
@@ -2776,7 +3802,9 @@ insertRecordWithoutId_fn = async function(object, columns = ["*"]) {
2776
3802
  };
2777
3803
  _insertRecordWithId = new WeakSet();
2778
3804
  insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { createOnly, ifVersion }) {
2779
- const record = transformObjectLinks(object);
3805
+ if (!recordId)
3806
+ return null;
3807
+ const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
2780
3808
  const response = await insertRecordWithID({
2781
3809
  pathParams: {
2782
3810
  workspace: "{workspaceId}",
@@ -2794,21 +3822,20 @@ insertRecordWithId_fn = async function(recordId, object, columns = ["*"], { crea
2794
3822
  };
2795
3823
  _insertRecords = new WeakSet();
2796
3824
  insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
2797
- const chunkedOperations = chunk(
2798
- objects.map((object) => ({
2799
- insert: { table: __privateGet$4(this, _table), record: transformObjectLinks(object), createOnly, ifVersion }
2800
- })),
2801
- BULK_OPERATION_MAX_SIZE
2802
- );
3825
+ const operations = await promiseMap(objects, async (object) => {
3826
+ const record = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
3827
+ return { insert: { table: __privateGet$4(this, _table), record, createOnly, ifVersion } };
3828
+ });
3829
+ const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
2803
3830
  const ids = [];
2804
- for (const operations of chunkedOperations) {
3831
+ for (const operations2 of chunkedOperations) {
2805
3832
  const { results } = await branchTransaction({
2806
3833
  pathParams: {
2807
3834
  workspace: "{workspaceId}",
2808
3835
  dbBranchName: "{dbBranch}",
2809
3836
  region: "{region}"
2810
3837
  },
2811
- body: { operations },
3838
+ body: { operations: operations2 },
2812
3839
  ...__privateGet$4(this, _getFetchProps).call(this)
2813
3840
  });
2814
3841
  for (const result of results) {
@@ -2823,7 +3850,9 @@ insertRecords_fn = async function(objects, { createOnly, ifVersion }) {
2823
3850
  };
2824
3851
  _updateRecordWithID = new WeakSet();
2825
3852
  updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
2826
- const { id: _id, ...record } = transformObjectLinks(object);
3853
+ if (!recordId)
3854
+ return null;
3855
+ const { id: _id, ...record } = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
2827
3856
  try {
2828
3857
  const response = await updateRecordWithID({
2829
3858
  pathParams: {
@@ -2848,21 +3877,20 @@ updateRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
2848
3877
  };
2849
3878
  _updateRecords = new WeakSet();
2850
3879
  updateRecords_fn = async function(objects, { ifVersion, upsert }) {
2851
- const chunkedOperations = chunk(
2852
- objects.map(({ id, ...object }) => ({
2853
- update: { table: __privateGet$4(this, _table), id, ifVersion, upsert, fields: transformObjectLinks(object) }
2854
- })),
2855
- BULK_OPERATION_MAX_SIZE
2856
- );
3880
+ const operations = await promiseMap(objects, async ({ id, ...object }) => {
3881
+ const fields = await __privateMethod$2(this, _transformObjectToApi, transformObjectToApi_fn).call(this, object);
3882
+ return { update: { table: __privateGet$4(this, _table), id, ifVersion, upsert, fields } };
3883
+ });
3884
+ const chunkedOperations = chunk(operations, BULK_OPERATION_MAX_SIZE);
2857
3885
  const ids = [];
2858
- for (const operations of chunkedOperations) {
3886
+ for (const operations2 of chunkedOperations) {
2859
3887
  const { results } = await branchTransaction({
2860
3888
  pathParams: {
2861
3889
  workspace: "{workspaceId}",
2862
3890
  dbBranchName: "{dbBranch}",
2863
3891
  region: "{region}"
2864
3892
  },
2865
- body: { operations },
3893
+ body: { operations: operations2 },
2866
3894
  ...__privateGet$4(this, _getFetchProps).call(this)
2867
3895
  });
2868
3896
  for (const result of results) {
@@ -2877,6 +3905,8 @@ updateRecords_fn = async function(objects, { ifVersion, upsert }) {
2877
3905
  };
2878
3906
  _upsertRecordWithID = new WeakSet();
2879
3907
  upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVersion }) {
3908
+ if (!recordId)
3909
+ return null;
2880
3910
  const response = await upsertRecordWithID({
2881
3911
  pathParams: {
2882
3912
  workspace: "{workspaceId}",
@@ -2894,6 +3924,8 @@ upsertRecordWithID_fn = async function(recordId, object, columns = ["*"], { ifVe
2894
3924
  };
2895
3925
  _deleteRecord = new WeakSet();
2896
3926
  deleteRecord_fn = async function(recordId, columns = ["*"]) {
3927
+ if (!recordId)
3928
+ return null;
2897
3929
  try {
2898
3930
  const response = await deleteRecord({
2899
3931
  pathParams: {
@@ -2918,7 +3950,7 @@ deleteRecord_fn = async function(recordId, columns = ["*"]) {
2918
3950
  _deleteRecords = new WeakSet();
2919
3951
  deleteRecords_fn = async function(recordIds) {
2920
3952
  const chunkedOperations = chunk(
2921
- recordIds.map((id) => ({ delete: { table: __privateGet$4(this, _table), id } })),
3953
+ compact(recordIds).map((id) => ({ delete: { table: __privateGet$4(this, _table), id } })),
2922
3954
  BULK_OPERATION_MAX_SIZE
2923
3955
  );
2924
3956
  for (const operations of chunkedOperations) {
@@ -2935,15 +3967,16 @@ deleteRecords_fn = async function(recordIds) {
2935
3967
  };
2936
3968
  _setCacheQuery = new WeakSet();
2937
3969
  setCacheQuery_fn = async function(query, meta, records) {
2938
- await __privateGet$4(this, _cache).set(`query_${__privateGet$4(this, _table)}:${query.key()}`, { date: new Date(), meta, records });
3970
+ await __privateGet$4(this, _cache)?.set(`query_${__privateGet$4(this, _table)}:${query.key()}`, { date: /* @__PURE__ */ new Date(), meta, records });
2939
3971
  };
2940
3972
  _getCacheQuery = new WeakSet();
2941
3973
  getCacheQuery_fn = async function(query) {
2942
3974
  const key = `query_${__privateGet$4(this, _table)}:${query.key()}`;
2943
- const result = await __privateGet$4(this, _cache).get(key);
3975
+ const result = await __privateGet$4(this, _cache)?.get(key);
2944
3976
  if (!result)
2945
3977
  return null;
2946
- const { cache: ttl = __privateGet$4(this, _cache).defaultQueryTTL } = query.getQueryOptions();
3978
+ const defaultTTL = __privateGet$4(this, _cache)?.defaultQueryTTL ?? -1;
3979
+ const { cache: ttl = defaultTTL } = query.getQueryOptions();
2947
3980
  if (ttl < 0)
2948
3981
  return null;
2949
3982
  const hasExpired = result.date.getTime() + ttl < Date.now();
@@ -2960,7 +3993,39 @@ getSchemaTables_fn$1 = async function() {
2960
3993
  __privateSet$4(this, _schemaTables$2, schema.tables);
2961
3994
  return schema.tables;
2962
3995
  };
2963
- const transformObjectLinks = (object) => {
3996
+ _transformObjectToApi = new WeakSet();
3997
+ transformObjectToApi_fn = async function(object) {
3998
+ const schemaTables = await __privateMethod$2(this, _getSchemaTables$1, getSchemaTables_fn$1).call(this);
3999
+ const schema = schemaTables.find((table) => table.name === __privateGet$4(this, _table));
4000
+ if (!schema)
4001
+ throw new Error(`Table ${__privateGet$4(this, _table)} not found in schema`);
4002
+ const result = {};
4003
+ for (const [key, value] of Object.entries(object)) {
4004
+ if (key === "xata")
4005
+ continue;
4006
+ const type = schema.columns.find((column) => column.name === key)?.type;
4007
+ switch (type) {
4008
+ case "link": {
4009
+ result[key] = isIdentifiable(value) ? value.id : value;
4010
+ break;
4011
+ }
4012
+ case "datetime": {
4013
+ result[key] = value instanceof Date ? value.toISOString() : value;
4014
+ break;
4015
+ }
4016
+ case `file`:
4017
+ result[key] = await parseInputFileEntry(value);
4018
+ break;
4019
+ case "file[]":
4020
+ result[key] = await promiseMap(value, (item) => parseInputFileEntry(item));
4021
+ break;
4022
+ default:
4023
+ result[key] = value;
4024
+ }
4025
+ }
4026
+ return result;
4027
+ };
4028
+ const removeLinksFromObject = (object) => {
2964
4029
  return Object.entries(object).reduce((acc, [key, value]) => {
2965
4030
  if (key === "xata")
2966
4031
  return acc;
@@ -3009,6 +4074,12 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
3009
4074
  }
3010
4075
  break;
3011
4076
  }
4077
+ case "file":
4078
+ data[column.name] = isDefined(value) ? new XataFile(value) : null;
4079
+ break;
4080
+ case "file[]":
4081
+ data[column.name] = value?.map((item) => new XataFile(item)) ?? null;
4082
+ break;
3012
4083
  default:
3013
4084
  data[column.name] = value ?? null;
3014
4085
  if (column.notNull === true && value === null) {
@@ -3018,6 +4089,8 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
3018
4089
  }
3019
4090
  }
3020
4091
  const record = { ...data };
4092
+ const serializable = { xata, ...removeLinksFromObject(data) };
4093
+ const metadata = xata !== void 0 ? { ...xata, createdAt: new Date(xata.createdAt), updatedAt: new Date(xata.updatedAt) } : void 0;
3021
4094
  record.read = function(columns2) {
3022
4095
  return db[table].read(record["id"], columns2);
3023
4096
  };
@@ -3034,14 +4107,15 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
3034
4107
  record.delete = function() {
3035
4108
  return db[table].delete(record["id"]);
3036
4109
  };
4110
+ record.xata = Object.freeze(metadata);
3037
4111
  record.getMetadata = function() {
3038
- return xata;
4112
+ return record.xata;
3039
4113
  };
3040
4114
  record.toSerializable = function() {
3041
- return JSON.parse(JSON.stringify(transformObjectLinks(data)));
4115
+ return JSON.parse(JSON.stringify(serializable));
3042
4116
  };
3043
4117
  record.toString = function() {
3044
- return JSON.stringify(transformObjectLinks(data));
4118
+ return JSON.stringify(serializable);
3045
4119
  };
3046
4120
  for (const prop of ["read", "update", "replace", "delete", "getMetadata", "toSerializable", "toString"]) {
3047
4121
  Object.defineProperty(record, prop, { enumerable: false });
@@ -3059,11 +4133,7 @@ function extractId(value) {
3059
4133
  function isValidColumn(columns, column) {
3060
4134
  if (columns.includes("*"))
3061
4135
  return true;
3062
- if (column.type === "link") {
3063
- const linkColumns = columns.filter((item) => item.startsWith(column.name));
3064
- return linkColumns.length > 0;
3065
- }
3066
- return columns.includes(column.name);
4136
+ return columns.filter((item) => item.startsWith(column.name)).length > 0;
3067
4137
  }
3068
4138
  function parseIfVersion(...args) {
3069
4139
  for (const arg of args) {
@@ -3074,6 +4144,12 @@ function parseIfVersion(...args) {
3074
4144
  return void 0;
3075
4145
  }
3076
4146
 
4147
+ var __defProp$3 = Object.defineProperty;
4148
+ var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
4149
+ var __publicField$3 = (obj, key, value) => {
4150
+ __defNormalProp$3(obj, typeof key !== "symbol" ? key + "" : key, value);
4151
+ return value;
4152
+ };
3077
4153
  var __accessCheck$3 = (obj, member, msg) => {
3078
4154
  if (!member.has(obj))
3079
4155
  throw TypeError("Cannot " + msg);
@@ -3096,6 +4172,8 @@ var _map;
3096
4172
  class SimpleCache {
3097
4173
  constructor(options = {}) {
3098
4174
  __privateAdd$3(this, _map, void 0);
4175
+ __publicField$3(this, "capacity");
4176
+ __publicField$3(this, "defaultQueryTTL");
3099
4177
  __privateSet$3(this, _map, /* @__PURE__ */ new Map());
3100
4178
  this.capacity = options.max ?? 500;
3101
4179
  this.defaultQueryTTL = options.defaultQueryTTL ?? 60 * 1e3;
@@ -3260,6 +4338,7 @@ search_fn = async function(query, options, pluginOptions) {
3260
4338
  const { tables, fuzziness, highlight, prefix, page } = options ?? {};
3261
4339
  const { records } = await searchBranch({
3262
4340
  pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
4341
+ // @ts-ignore https://github.com/xataio/client-ts/issues/313
3263
4342
  body: { tables, query, fuzziness, prefix, highlight, page },
3264
4343
  ...pluginOptions
3265
4344
  });
@@ -3292,6 +4371,12 @@ class TransactionPlugin extends XataPlugin {
3292
4371
  }
3293
4372
  }
3294
4373
 
4374
+ var __defProp$2 = Object.defineProperty;
4375
+ var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
4376
+ var __publicField$2 = (obj, key, value) => {
4377
+ __defNormalProp$2(obj, typeof key !== "symbol" ? key + "" : key, value);
4378
+ return value;
4379
+ };
3295
4380
  var __accessCheck = (obj, member, msg) => {
3296
4381
  if (!member.has(obj))
3297
4382
  throw TypeError("Cannot " + msg);
@@ -3321,29 +4406,29 @@ const buildClient = (plugins) => {
3321
4406
  __privateAdd(this, _parseOptions);
3322
4407
  __privateAdd(this, _getFetchProps);
3323
4408
  __privateAdd(this, _options, void 0);
4409
+ __publicField$2(this, "db");
4410
+ __publicField$2(this, "search");
4411
+ __publicField$2(this, "transactions");
4412
+ __publicField$2(this, "files");
3324
4413
  const safeOptions = __privateMethod(this, _parseOptions, parseOptions_fn).call(this, options);
3325
4414
  __privateSet(this, _options, safeOptions);
3326
4415
  const pluginOptions = {
3327
4416
  ...__privateMethod(this, _getFetchProps, getFetchProps_fn).call(this, safeOptions),
3328
- cache: safeOptions.cache
4417
+ cache: safeOptions.cache,
4418
+ host: safeOptions.host
3329
4419
  };
3330
4420
  const db = new SchemaPlugin(schemaTables).build(pluginOptions);
3331
4421
  const search = new SearchPlugin(db, schemaTables).build(pluginOptions);
3332
4422
  const transactions = new TransactionPlugin().build(pluginOptions);
4423
+ const files = new FilesPlugin().build(pluginOptions);
3333
4424
  this.db = db;
3334
4425
  this.search = search;
3335
4426
  this.transactions = transactions;
4427
+ this.files = files;
3336
4428
  for (const [key, namespace] of Object.entries(plugins ?? {})) {
3337
4429
  if (namespace === void 0)
3338
4430
  continue;
3339
- const result = namespace.build(pluginOptions);
3340
- if (result instanceof Promise) {
3341
- void result.then((namespace2) => {
3342
- this[key] = namespace2;
3343
- });
3344
- } else {
3345
- this[key] = result;
3346
- }
4431
+ this[key] = namespace.build(pluginOptions);
3347
4432
  }
3348
4433
  }
3349
4434
  async getConfig() {
@@ -3361,7 +4446,6 @@ const buildClient = (plugins) => {
3361
4446
  }
3362
4447
  const fetch = getFetchImplementation(options?.fetch);
3363
4448
  const databaseURL = options?.databaseURL || getDatabaseURL();
3364
- const branch = options?.branch || getBranch() || "main";
3365
4449
  const apiKey = options?.apiKey || getAPIKey();
3366
4450
  const cache = options?.cache ?? new SimpleCache({ defaultQueryTTL: 0 });
3367
4451
  const trace = options?.trace ?? defaultTrace;
@@ -3374,6 +4458,26 @@ const buildClient = (plugins) => {
3374
4458
  if (!databaseURL) {
3375
4459
  throw new Error("Option databaseURL is required");
3376
4460
  }
4461
+ const envBranch = getBranch();
4462
+ const previewBranch = getPreviewBranch();
4463
+ const branch = options?.branch || previewBranch || envBranch || "main";
4464
+ if (!!previewBranch && branch !== previewBranch) {
4465
+ console.warn(
4466
+ `Ignoring preview branch ${previewBranch} because branch option was passed to the client constructor with value ${branch}`
4467
+ );
4468
+ } else if (!!envBranch && branch !== envBranch) {
4469
+ console.warn(
4470
+ `Ignoring branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
4471
+ );
4472
+ } else if (!!previewBranch && !!envBranch && previewBranch !== envBranch) {
4473
+ console.warn(
4474
+ `Ignoring preview branch ${previewBranch} and branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
4475
+ );
4476
+ } else if (!previewBranch && !envBranch && options?.branch === void 0) {
4477
+ console.warn(
4478
+ `No branch was passed to the client constructor. Using default branch ${branch}. You can set the branch with the environment variable XATA_BRANCH or by passing the branch option to the client constructor.`
4479
+ );
4480
+ }
3377
4481
  return {
3378
4482
  fetch,
3379
4483
  databaseURL,
@@ -3401,6 +4505,7 @@ const buildClient = (plugins) => {
3401
4505
  fetch,
3402
4506
  apiKey,
3403
4507
  apiUrl: "",
4508
+ // Instead of using workspace and dbBranch, we inject a probably CNAME'd URL
3404
4509
  workspacesApiUrl: (path, params) => {
3405
4510
  const hasBranch = params.dbBranchName ?? params.branch;
3406
4511
  const newPath = path.replace(/^\/db\/[^/]+/, hasBranch !== void 0 ? `:${branch}` : "");
@@ -3416,11 +4521,17 @@ const buildClient = (plugins) => {
3416
4521
  class BaseClient extends buildClient() {
3417
4522
  }
3418
4523
 
4524
+ var __defProp$1 = Object.defineProperty;
4525
+ var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
4526
+ var __publicField$1 = (obj, key, value) => {
4527
+ __defNormalProp$1(obj, typeof key !== "symbol" ? key + "" : key, value);
4528
+ return value;
4529
+ };
3419
4530
  const META = "__";
3420
4531
  const VALUE = "___";
3421
4532
  class Serializer {
3422
4533
  constructor() {
3423
- this.classes = {};
4534
+ __publicField$1(this, "classes", {});
3424
4535
  }
3425
4536
  add(clazz) {
3426
4537
  this.classes[clazz.name] = clazz;
@@ -3498,12 +4609,19 @@ function buildWorkerRunner(config) {
3498
4609
  };
3499
4610
  }
3500
4611
 
4612
+ var __defProp = Object.defineProperty;
4613
+ var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
4614
+ var __publicField = (obj, key, value) => {
4615
+ __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
4616
+ return value;
4617
+ };
3501
4618
  class XataError extends Error {
3502
4619
  constructor(message, status) {
3503
4620
  super(message);
4621
+ __publicField(this, "status");
3504
4622
  this.status = status;
3505
4623
  }
3506
4624
  }
3507
4625
 
3508
- export { BaseClient, FetcherError, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, Query, RecordArray, Repository, RestRepository, SchemaPlugin, SearchPlugin, Serializer, SimpleCache, XataApiClient, XataApiPlugin, XataError, XataPlugin, acceptWorkspaceMemberInvite, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, askTable, branchTransaction, buildClient, buildWorkerRunner, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, contains, createBranch, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, ge, getAPIKey, getBranch, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationPlan, getBranchSchemaHistory, getBranchStats, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseURL, getGitBranchesMapping, getHostUrl, getMigrationRequest, getMigrationRequestIsMerged, getRecord, getTableColumns, getTableSchema, getUser, getUserAPIKeys, getWorkspace, getWorkspaceMembersList, getWorkspacesList, greaterEquals, greaterThan, greaterThanEquals, gt, gte, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isXataRecord, le, lessEquals, lessThan, lessThanEquals, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern, previewBranchSchemaEdit, queryMigrationRequests, queryTable, removeGitBranchesEntry, removeWorkspaceMember, resendWorkspaceMemberInvite, resolveBranch, searchBranch, searchTable, serialize, setTableSchema, startsWith, summarizeTable, updateBranchMetadata, updateBranchSchema, updateColumn, updateDatabaseGithubSettings, updateDatabaseMetadata, updateMigrationRequest, updateRecordWithID, updateTable, updateUser, updateWorkspace, updateWorkspaceMemberInvite, updateWorkspaceMemberRole, upsertRecordWithID, vectorSearchTable };
4626
+ export { BaseClient, FetcherError, operationsByTag as Operations, PAGINATION_DEFAULT_OFFSET, PAGINATION_DEFAULT_SIZE, PAGINATION_MAX_OFFSET, PAGINATION_MAX_SIZE, Page, Query, RecordArray, RecordColumnTypes, Repository, RestRepository, SchemaPlugin, SearchPlugin, Serializer, SimpleCache, XataApiClient, XataApiPlugin, XataError, XataFile, XataPlugin, acceptWorkspaceMemberInvite, addGitBranchesEntry, addTableColumn, aggregateTable, applyBranchSchemaEdit, askTable, askTableSession, branchTransaction, buildClient, buildPreviewBranchName, buildProviderString, buildWorkerRunner, bulkInsertTableRecords, cancelWorkspaceMemberInvite, compareBranchSchemas, compareBranchWithUserSchema, compareMigrationRequest, contains, copyBranch, createBranch, createDatabase, createMigrationRequest, createTable, createUserAPIKey, createWorkspace, deleteBranch, deleteColumn, deleteDatabase, deleteDatabaseGithubSettings, deleteFile, deleteFileItem, deleteRecord, deleteTable, deleteUser, deleteUserAPIKey, deleteWorkspace, deserialize, endsWith, equals, executeBranchMigrationPlan, exists, fileAccess, ge, generateAccessToken, getAPIKey, getBranch, getBranchDetails, getBranchList, getBranchMetadata, getBranchMigrationHistory, getBranchMigrationPlan, getBranchSchemaHistory, getBranchStats, getColumn, getDatabaseGithubSettings, getDatabaseList, getDatabaseMetadata, getDatabaseURL, getFile, getFileItem, getGitBranchesMapping, getHostUrl, getMigrationRequest, getMigrationRequestIsMerged, getPreviewBranch, getRecord, getTableColumns, getTableSchema, getUser, getUserAPIKeys, getWorkspace, getWorkspaceMembersList, getWorkspacesList, grantAuthorizationCode, greaterEquals, greaterThan, greaterThanEquals, gt, gte, includes, includesAll, includesAny, includesNone, insertRecord, insertRecordWithID, inviteWorkspaceMember, is, isCursorPaginationOptions, isHostProviderAlias, isHostProviderBuilder, isIdentifiable, isNot, isXataRecord, le, lessEquals, lessThan, lessThanEquals, listMigrationRequestsCommits, listRegions, lt, lte, mergeMigrationRequest, notExists, operationsByTag, parseProviderString, parseWorkspacesUrlParts, pattern, previewBranchSchemaEdit, pushBranchMigrations, putFile, putFileItem, queryMigrationRequests, queryTable, removeGitBranchesEntry, removeWorkspaceMember, renameDatabase, resendWorkspaceMemberInvite, resolveBranch, searchBranch, searchTable, serialize, setTableSchema, sqlQuery, startsWith, summarizeTable, updateBranchMetadata, updateBranchSchema, updateColumn, updateDatabaseGithubSettings, updateDatabaseMetadata, updateMigrationRequest, updateRecordWithID, updateTable, updateUser, updateWorkspace, updateWorkspaceMemberInvite, updateWorkspaceMemberRole, upsertRecordWithID, vectorSearchTable };
3509
4627
  //# sourceMappingURL=index.mjs.map