@xata.io/client 0.0.0-alpha.vf9f8d99 → 0.0.0-alpha.vfa37ea7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -93,8 +93,10 @@ function getEnvironment() {
93
93
  apiKey: process.env.XATA_API_KEY ?? getGlobalApiKey(),
94
94
  databaseURL: process.env.XATA_DATABASE_URL ?? getGlobalDatabaseURL(),
95
95
  branch: process.env.XATA_BRANCH ?? getGlobalBranch(),
96
- envBranch: process.env.VERCEL_GIT_COMMIT_REF ?? process.env.CF_PAGES_BRANCH ?? process.env.BRANCH,
97
- fallbackBranch: process.env.XATA_FALLBACK_BRANCH ?? getGlobalFallbackBranch()
96
+ deployPreview: process.env.XATA_PREVIEW,
97
+ deployPreviewBranch: process.env.XATA_PREVIEW_BRANCH,
98
+ vercelGitCommitRef: process.env.VERCEL_GIT_COMMIT_REF,
99
+ vercelGitRepoOwner: process.env.VERCEL_GIT_REPO_OWNER
98
100
  };
99
101
  }
100
102
  } catch (err) {
@@ -105,8 +107,10 @@ function getEnvironment() {
105
107
  apiKey: Deno.env.get("XATA_API_KEY") ?? getGlobalApiKey(),
106
108
  databaseURL: Deno.env.get("XATA_DATABASE_URL") ?? getGlobalDatabaseURL(),
107
109
  branch: Deno.env.get("XATA_BRANCH") ?? getGlobalBranch(),
108
- envBranch: Deno.env.get("VERCEL_GIT_COMMIT_REF") ?? Deno.env.get("CF_PAGES_BRANCH") ?? Deno.env.get("BRANCH"),
109
- fallbackBranch: Deno.env.get("XATA_FALLBACK_BRANCH") ?? getGlobalFallbackBranch()
110
+ deployPreview: Deno.env.get("XATA_PREVIEW"),
111
+ deployPreviewBranch: Deno.env.get("XATA_PREVIEW_BRANCH"),
112
+ vercelGitCommitRef: Deno.env.get("VERCEL_GIT_COMMIT_REF"),
113
+ vercelGitRepoOwner: Deno.env.get("VERCEL_GIT_REPO_OWNER")
110
114
  };
111
115
  }
112
116
  } catch (err) {
@@ -115,8 +119,10 @@ function getEnvironment() {
115
119
  apiKey: getGlobalApiKey(),
116
120
  databaseURL: getGlobalDatabaseURL(),
117
121
  branch: getGlobalBranch(),
118
- envBranch: void 0,
119
- fallbackBranch: getGlobalFallbackBranch()
122
+ deployPreview: void 0,
123
+ deployPreviewBranch: void 0,
124
+ vercelGitCommitRef: void 0,
125
+ vercelGitRepoOwner: void 0
120
126
  };
121
127
  }
122
128
  function getEnableBrowserVariable() {
@@ -159,34 +165,48 @@ function getGlobalBranch() {
159
165
  return void 0;
160
166
  }
161
167
  }
162
- function getGlobalFallbackBranch() {
168
+ function getDatabaseURL() {
163
169
  try {
164
- return XATA_FALLBACK_BRANCH;
170
+ const { databaseURL } = getEnvironment();
171
+ return databaseURL;
165
172
  } catch (err) {
166
173
  return void 0;
167
174
  }
168
175
  }
169
- function getDatabaseURL() {
176
+ function getAPIKey() {
170
177
  try {
171
- const { databaseURL } = getEnvironment();
172
- return databaseURL;
178
+ const { apiKey } = getEnvironment();
179
+ return apiKey;
173
180
  } catch (err) {
174
181
  return void 0;
175
182
  }
176
183
  }
177
184
  function getBranch() {
178
185
  try {
179
- const { branch, envBranch } = getEnvironment();
180
- return branch ?? envBranch;
186
+ const { branch } = getEnvironment();
187
+ return branch ?? "main";
181
188
  } catch (err) {
182
189
  return void 0;
183
190
  }
184
191
  }
185
-
186
- function getAPIKey() {
192
+ function buildPreviewBranchName({ org, branch }) {
193
+ return `preview-${org}-${branch}`;
194
+ }
195
+ function getPreviewBranch() {
187
196
  try {
188
- const { apiKey } = getEnvironment();
189
- return apiKey;
197
+ const { deployPreview, deployPreviewBranch, vercelGitCommitRef, vercelGitRepoOwner } = getEnvironment();
198
+ if (deployPreviewBranch)
199
+ return deployPreviewBranch;
200
+ switch (deployPreview) {
201
+ case "vercel": {
202
+ if (!vercelGitCommitRef || !vercelGitRepoOwner) {
203
+ console.warn("XATA_PREVIEW=vercel but VERCEL_GIT_COMMIT_REF or VERCEL_GIT_REPO_OWNER is not valid");
204
+ return void 0;
205
+ }
206
+ return buildPreviewBranchName({ org: vercelGitRepoOwner, branch: vercelGitCommitRef });
207
+ }
208
+ }
209
+ return void 0;
190
210
  } catch (err) {
191
211
  return void 0;
192
212
  }
@@ -246,7 +266,7 @@ class ApiRequestPool {
246
266
  return __privateGet$8(this, _fetch);
247
267
  }
248
268
  request(url, options) {
249
- const start = new Date();
269
+ const start = /* @__PURE__ */ new Date();
250
270
  const fetch2 = this.getFetch();
251
271
  const runRequest = async (stalled = false) => {
252
272
  const response = await fetch2(url, options);
@@ -256,7 +276,7 @@ class ApiRequestPool {
256
276
  return await runRequest(true);
257
277
  }
258
278
  if (stalled) {
259
- const stalledTime = new Date().getTime() - start.getTime();
279
+ const stalledTime = (/* @__PURE__ */ new Date()).getTime() - start.getTime();
260
280
  console.warn(`A request to Xata hit your workspace limits, was retried and stalled for ${stalledTime}ms`);
261
281
  }
262
282
  return response;
@@ -299,7 +319,180 @@ function generateUUID() {
299
319
  });
300
320
  }
301
321
 
302
- const VERSION = "0.22.3";
322
+ async function getBytes(stream, onChunk) {
323
+ const reader = stream.getReader();
324
+ let result;
325
+ while (!(result = await reader.read()).done) {
326
+ onChunk(result.value);
327
+ }
328
+ }
329
+ function getLines(onLine) {
330
+ let buffer;
331
+ let position;
332
+ let fieldLength;
333
+ let discardTrailingNewline = false;
334
+ return function onChunk(arr) {
335
+ if (buffer === void 0) {
336
+ buffer = arr;
337
+ position = 0;
338
+ fieldLength = -1;
339
+ } else {
340
+ buffer = concat(buffer, arr);
341
+ }
342
+ const bufLength = buffer.length;
343
+ let lineStart = 0;
344
+ while (position < bufLength) {
345
+ if (discardTrailingNewline) {
346
+ if (buffer[position] === 10 /* NewLine */) {
347
+ lineStart = ++position;
348
+ }
349
+ discardTrailingNewline = false;
350
+ }
351
+ let lineEnd = -1;
352
+ for (; position < bufLength && lineEnd === -1; ++position) {
353
+ switch (buffer[position]) {
354
+ case 58 /* Colon */:
355
+ if (fieldLength === -1) {
356
+ fieldLength = position - lineStart;
357
+ }
358
+ break;
359
+ case 13 /* CarriageReturn */:
360
+ discardTrailingNewline = true;
361
+ case 10 /* NewLine */:
362
+ lineEnd = position;
363
+ break;
364
+ }
365
+ }
366
+ if (lineEnd === -1) {
367
+ break;
368
+ }
369
+ onLine(buffer.subarray(lineStart, lineEnd), fieldLength);
370
+ lineStart = position;
371
+ fieldLength = -1;
372
+ }
373
+ if (lineStart === bufLength) {
374
+ buffer = void 0;
375
+ } else if (lineStart !== 0) {
376
+ buffer = buffer.subarray(lineStart);
377
+ position -= lineStart;
378
+ }
379
+ };
380
+ }
381
+ function getMessages(onId, onRetry, onMessage) {
382
+ let message = newMessage();
383
+ const decoder = new TextDecoder();
384
+ return function onLine(line, fieldLength) {
385
+ if (line.length === 0) {
386
+ onMessage?.(message);
387
+ message = newMessage();
388
+ } else if (fieldLength > 0) {
389
+ const field = decoder.decode(line.subarray(0, fieldLength));
390
+ const valueOffset = fieldLength + (line[fieldLength + 1] === 32 /* Space */ ? 2 : 1);
391
+ const value = decoder.decode(line.subarray(valueOffset));
392
+ switch (field) {
393
+ case "data":
394
+ message.data = message.data ? message.data + "\n" + value : value;
395
+ break;
396
+ case "event":
397
+ message.event = value;
398
+ break;
399
+ case "id":
400
+ onId(message.id = value);
401
+ break;
402
+ case "retry":
403
+ const retry = parseInt(value, 10);
404
+ if (!isNaN(retry)) {
405
+ onRetry(message.retry = retry);
406
+ }
407
+ break;
408
+ }
409
+ }
410
+ };
411
+ }
412
+ function concat(a, b) {
413
+ const res = new Uint8Array(a.length + b.length);
414
+ res.set(a);
415
+ res.set(b, a.length);
416
+ return res;
417
+ }
418
+ function newMessage() {
419
+ return {
420
+ data: "",
421
+ event: "",
422
+ id: "",
423
+ retry: void 0
424
+ };
425
+ }
426
+ const EventStreamContentType = "text/event-stream";
427
+ const LastEventId = "last-event-id";
428
+ function fetchEventSource(input, {
429
+ signal: inputSignal,
430
+ headers: inputHeaders,
431
+ onopen: inputOnOpen,
432
+ onmessage,
433
+ onclose,
434
+ onerror,
435
+ fetch: inputFetch,
436
+ ...rest
437
+ }) {
438
+ return new Promise((resolve, reject) => {
439
+ const headers = { ...inputHeaders };
440
+ if (!headers.accept) {
441
+ headers.accept = EventStreamContentType;
442
+ }
443
+ let curRequestController;
444
+ function dispose() {
445
+ curRequestController.abort();
446
+ }
447
+ inputSignal?.addEventListener("abort", () => {
448
+ dispose();
449
+ resolve();
450
+ });
451
+ const fetchImpl = inputFetch ?? fetch;
452
+ const onopen = inputOnOpen ?? defaultOnOpen;
453
+ async function create() {
454
+ curRequestController = new AbortController();
455
+ try {
456
+ const response = await fetchImpl(input, {
457
+ ...rest,
458
+ headers,
459
+ signal: curRequestController.signal
460
+ });
461
+ await onopen(response);
462
+ await getBytes(
463
+ response.body,
464
+ getLines(
465
+ getMessages(
466
+ (id) => {
467
+ if (id) {
468
+ headers[LastEventId] = id;
469
+ } else {
470
+ delete headers[LastEventId];
471
+ }
472
+ },
473
+ (_retry) => {
474
+ },
475
+ onmessage
476
+ )
477
+ )
478
+ );
479
+ onclose?.();
480
+ dispose();
481
+ resolve();
482
+ } catch (err) {
483
+ }
484
+ }
485
+ create();
486
+ });
487
+ }
488
+ function defaultOnOpen(response) {
489
+ const contentType = response.headers?.get("content-type");
490
+ if (!contentType?.startsWith(EventStreamContentType)) {
491
+ throw new Error(`Expected content-type to be ${EventStreamContentType}, Actual: ${contentType}`);
492
+ }
493
+ }
494
+
495
+ const VERSION = "0.24.0";
303
496
 
304
497
  class ErrorWithCause extends Error {
305
498
  constructor(message, options) {
@@ -458,6 +651,59 @@ async function fetch$1({
458
651
  { [TraceAttributes.HTTP_METHOD]: method.toUpperCase(), [TraceAttributes.HTTP_ROUTE]: path }
459
652
  );
460
653
  }
654
+ function fetchSSERequest({
655
+ url: path,
656
+ method,
657
+ body,
658
+ headers: customHeaders,
659
+ pathParams,
660
+ queryParams,
661
+ fetch: fetch2,
662
+ apiKey,
663
+ endpoint,
664
+ apiUrl,
665
+ workspacesApiUrl,
666
+ onMessage,
667
+ onError,
668
+ onClose,
669
+ signal,
670
+ clientID,
671
+ sessionID,
672
+ clientName,
673
+ xataAgentExtra
674
+ }) {
675
+ const baseUrl = buildBaseUrl({ endpoint, path, workspacesApiUrl, pathParams, apiUrl });
676
+ const fullUrl = resolveUrl(baseUrl, queryParams, pathParams);
677
+ const url = fullUrl.includes("localhost") ? fullUrl.replace(/^[^.]+\./, "http://") : fullUrl;
678
+ void fetchEventSource(url, {
679
+ method,
680
+ body: JSON.stringify(body),
681
+ fetch: fetch2,
682
+ signal,
683
+ headers: {
684
+ "X-Xata-Client-ID": clientID ?? defaultClientID,
685
+ "X-Xata-Session-ID": sessionID ?? generateUUID(),
686
+ "X-Xata-Agent": compact([
687
+ ["client", "TS_SDK"],
688
+ ["version", VERSION],
689
+ isDefined(clientName) ? ["service", clientName] : void 0,
690
+ ...Object.entries(xataAgentExtra ?? {})
691
+ ]).map(([key, value]) => `${key}=${value}`).join("; "),
692
+ ...customHeaders,
693
+ Authorization: `Bearer ${apiKey}`,
694
+ "Content-Type": "application/json"
695
+ },
696
+ onmessage(ev) {
697
+ onMessage?.(JSON.parse(ev.data));
698
+ },
699
+ onerror(ev) {
700
+ onError?.(JSON.parse(ev.data));
701
+ },
702
+ onclose() {
703
+ onClose?.();
704
+ }
705
+ });
706
+ }
461
707
  function parseUrl(url) {
462
708
  try {
463
709
  const { host, protocol } = new URL(url);
@@ -488,6 +734,12 @@ const deleteBranch = (variables, signal) => dataPlaneFetch({
488
734
  ...variables,
489
735
  signal
490
736
  });
737
+ const copyBranch = (variables, signal) => dataPlaneFetch({
738
+ url: "/db/{dbBranchName}/copy",
739
+ method: "post",
740
+ ...variables,
741
+ signal
742
+ });
491
743
  const updateBranchMetadata = (variables, signal) => dataPlaneFetch({
492
744
  url: "/db/{dbBranchName}/metadata",
493
745
  method: "put",
@@ -537,6 +789,7 @@ const compareBranchSchemas = (variables, signal) => dataPlaneFetch({ url: "/db/{
537
789
  const updateBranchSchema = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/update", method: "post", ...variables, signal });
538
790
  const previewBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/preview", method: "post", ...variables, signal });
539
791
  const applyBranchSchemaEdit = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/apply", method: "post", ...variables, signal });
792
+ const pushBranchMigrations = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/schema/push", method: "post", ...variables, signal });
540
793
  const createTable = (variables, signal) => dataPlaneFetch({
541
794
  url: "/db/{dbBranchName}/tables/{tableName}",
542
795
  method: "put",
@@ -581,6 +834,42 @@ const deleteColumn = (variables, signal) => dataPlaneFetch({
581
834
  });
582
835
  const branchTransaction = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/transaction", method: "post", ...variables, signal });
583
836
  const insertRecord = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/data", method: "post", ...variables, signal });
837
+ const getFileItem = (variables, signal) => dataPlaneFetch({
838
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
839
+ method: "get",
840
+ ...variables,
841
+ signal
842
+ });
843
+ const putFileItem = (variables, signal) => dataPlaneFetch({
844
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
845
+ method: "put",
846
+ ...variables,
847
+ signal
848
+ });
849
+ const deleteFileItem = (variables, signal) => dataPlaneFetch({
850
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file/{fileId}",
851
+ method: "delete",
852
+ ...variables,
853
+ signal
854
+ });
855
+ const getFile = (variables, signal) => dataPlaneFetch({
856
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file",
857
+ method: "get",
858
+ ...variables,
859
+ signal
860
+ });
861
+ const putFile = (variables, signal) => dataPlaneFetch({
862
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file",
863
+ method: "put",
864
+ ...variables,
865
+ signal
866
+ });
867
+ const deleteFile = (variables, signal) => dataPlaneFetch({
868
+ url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}/column/{columnName}/file",
869
+ method: "delete",
870
+ ...variables,
871
+ signal
872
+ });
584
873
  const getRecord = (variables, signal) => dataPlaneFetch({
585
874
  url: "/db/{dbBranchName}/tables/{tableName}/data/{recordId}",
586
875
  method: "get",
@@ -610,6 +899,12 @@ const searchTable = (variables, signal) => dataPlaneFetch({
610
899
  ...variables,
611
900
  signal
612
901
  });
902
+ const sqlQuery = (variables, signal) => dataPlaneFetch({
903
+ url: "/db/{dbBranchName}/sql",
904
+ method: "post",
905
+ ...variables,
906
+ signal
907
+ });
613
908
  const vectorSearchTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/vectorSearch", method: "post", ...variables, signal });
614
909
  const askTable = (variables, signal) => dataPlaneFetch({
615
910
  url: "/db/{dbBranchName}/tables/{tableName}/ask",
@@ -619,12 +914,19 @@ const askTable = (variables, signal) => dataPlaneFetch({
619
914
  });
620
915
  const summarizeTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/summarize", method: "post", ...variables, signal });
621
916
  const aggregateTable = (variables, signal) => dataPlaneFetch({ url: "/db/{dbBranchName}/tables/{tableName}/aggregate", method: "post", ...variables, signal });
917
+ const fileAccess = (variables, signal) => dataPlaneFetch({
918
+ url: "/file/{fileId}",
919
+ method: "get",
920
+ ...variables,
921
+ signal
922
+ });
622
923
  const operationsByTag$2 = {
623
924
  branch: {
624
925
  getBranchList,
625
926
  getBranchDetails,
626
927
  createBranch,
627
928
  deleteBranch,
929
+ copyBranch,
628
930
  updateBranchMetadata,
629
931
  getBranchMetadata,
630
932
  getBranchStats,
@@ -642,7 +944,8 @@ const operationsByTag$2 = {
642
944
  compareBranchSchemas,
643
945
  updateBranchSchema,
644
946
  previewBranchSchemaEdit,
645
- applyBranchSchemaEdit
947
+ applyBranchSchemaEdit,
948
+ pushBranchMigrations
646
949
  },
647
950
  migrationRequests: {
648
951
  queryMigrationRequests,
@@ -676,10 +979,12 @@ const operationsByTag$2 = {
676
979
  deleteRecord,
677
980
  bulkInsertTableRecords
678
981
  },
982
+ files: { getFileItem, putFileItem, deleteFileItem, getFile, putFile, deleteFile, fileAccess },
679
983
  searchAndFilter: {
680
984
  queryTable,
681
985
  searchBranch,
682
986
  searchTable,
987
+ sqlQuery,
683
988
  vectorSearchTable,
684
989
  askTable,
685
990
  summarizeTable,
@@ -783,6 +1088,7 @@ const deleteDatabase = (variables, signal) => controlPlaneFetch({
783
1088
  });
784
1089
  const getDatabaseMetadata = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}", method: "get", ...variables, signal });
785
1090
  const updateDatabaseMetadata = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}", method: "patch", ...variables, signal });
1091
+ const renameDatabase = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/rename", method: "post", ...variables, signal });
786
1092
  const getDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "get", ...variables, signal });
787
1093
  const updateDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "put", ...variables, signal });
788
1094
  const deleteDatabaseGithubSettings = (variables, signal) => controlPlaneFetch({ url: "/workspaces/{workspaceId}/dbs/{dbName}/github", method: "delete", ...variables, signal });
@@ -818,6 +1124,7 @@ const operationsByTag$1 = {
818
1124
  deleteDatabase,
819
1125
  getDatabaseMetadata,
820
1126
  updateDatabaseMetadata,
1127
+ renameDatabase,
821
1128
  getDatabaseGithubSettings,
822
1129
  updateDatabaseGithubSettings,
823
1130
  deleteDatabaseGithubSettings,
@@ -843,6 +1150,10 @@ const providers = {
843
1150
  staging: {
844
1151
  main: "https://api.staging-xata.dev",
845
1152
  workspaces: "https://{workspaceId}.{region}.staging-xata.dev"
1153
+ },
1154
+ dev: {
1155
+ main: "https://api.dev-xata.dev",
1156
+ workspaces: "https://{workspaceId}.{region}.dev-xata.dev"
846
1157
  }
847
1158
  };
848
1159
  function isHostProviderAlias(alias) {
@@ -860,6 +1171,11 @@ function parseProviderString(provider = "production") {
860
1171
  return null;
861
1172
  return { main, workspaces };
862
1173
  }
1174
+ function buildProviderString(provider) {
1175
+ if (isHostProviderAlias(provider))
1176
+ return provider;
1177
+ return `${provider.main},${provider.workspaces}`;
1178
+ }
863
1179
  function parseWorkspacesUrlParts(url) {
864
1180
  if (!isString(url))
865
1181
  return null;
@@ -964,6 +1280,11 @@ class XataApiClient {
964
1280
  __privateGet$7(this, _namespaces).records = new RecordsApi(__privateGet$7(this, _extraProps));
965
1281
  return __privateGet$7(this, _namespaces).records;
966
1282
  }
1283
+ get files() {
1284
+ if (!__privateGet$7(this, _namespaces).files)
1285
+ __privateGet$7(this, _namespaces).files = new FilesApi(__privateGet$7(this, _extraProps));
1286
+ return __privateGet$7(this, _namespaces).files;
1287
+ }
967
1288
  get searchAndFilter() {
968
1289
  if (!__privateGet$7(this, _namespaces).searchAndFilter)
969
1290
  __privateGet$7(this, _namespaces).searchAndFilter = new SearchAndFilterApi(__privateGet$7(this, _extraProps));
@@ -1172,6 +1493,20 @@ class BranchApi {
1172
1493
  ...this.extraProps
1173
1494
  });
1174
1495
  }
1496
+ copyBranch({
1497
+ workspace,
1498
+ region,
1499
+ database,
1500
+ branch,
1501
+ destinationBranch,
1502
+ limit
1503
+ }) {
1504
+ return operationsByTag.branch.copyBranch({
1505
+ pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
1506
+ body: { destinationBranch, limit },
1507
+ ...this.extraProps
1508
+ });
1509
+ }
1175
1510
  updateBranchMetadata({
1176
1511
  workspace,
1177
1512
  region,
@@ -1527,6 +1862,164 @@ class RecordsApi {
1527
1862
  });
1528
1863
  }
1529
1864
  }
1865
+ class FilesApi {
1866
+ constructor(extraProps) {
1867
+ this.extraProps = extraProps;
1868
+ }
1869
+ getFileItem({
1870
+ workspace,
1871
+ region,
1872
+ database,
1873
+ branch,
1874
+ table,
1875
+ record,
1876
+ column,
1877
+ fileId
1878
+ }) {
1879
+ return operationsByTag.files.getFileItem({
1880
+ pathParams: {
1881
+ workspace,
1882
+ region,
1883
+ dbBranchName: `${database}:${branch}`,
1884
+ tableName: table,
1885
+ recordId: record,
1886
+ columnName: column,
1887
+ fileId
1888
+ },
1889
+ ...this.extraProps
1890
+ });
1891
+ }
1892
+ putFileItem({
1893
+ workspace,
1894
+ region,
1895
+ database,
1896
+ branch,
1897
+ table,
1898
+ record,
1899
+ column,
1900
+ fileId,
1901
+ file
1902
+ }) {
1903
+ return operationsByTag.files.putFileItem({
1904
+ pathParams: {
1905
+ workspace,
1906
+ region,
1907
+ dbBranchName: `${database}:${branch}`,
1908
+ tableName: table,
1909
+ recordId: record,
1910
+ columnName: column,
1911
+ fileId
1912
+ },
1913
+ // @ts-ignore
1914
+ body: file,
1915
+ ...this.extraProps
1916
+ });
1917
+ }
1918
+ deleteFileItem({
1919
+ workspace,
1920
+ region,
1921
+ database,
1922
+ branch,
1923
+ table,
1924
+ record,
1925
+ column,
1926
+ fileId
1927
+ }) {
1928
+ return operationsByTag.files.deleteFileItem({
1929
+ pathParams: {
1930
+ workspace,
1931
+ region,
1932
+ dbBranchName: `${database}:${branch}`,
1933
+ tableName: table,
1934
+ recordId: record,
1935
+ columnName: column,
1936
+ fileId
1937
+ },
1938
+ ...this.extraProps
1939
+ });
1940
+ }
1941
+ getFile({
1942
+ workspace,
1943
+ region,
1944
+ database,
1945
+ branch,
1946
+ table,
1947
+ record,
1948
+ column
1949
+ }) {
1950
+ return operationsByTag.files.getFile({
1951
+ pathParams: {
1952
+ workspace,
1953
+ region,
1954
+ dbBranchName: `${database}:${branch}`,
1955
+ tableName: table,
1956
+ recordId: record,
1957
+ columnName: column
1958
+ },
1959
+ ...this.extraProps
1960
+ });
1961
+ }
1962
+ putFile({
1963
+ workspace,
1964
+ region,
1965
+ database,
1966
+ branch,
1967
+ table,
1968
+ record,
1969
+ column,
1970
+ file
1971
+ }) {
1972
+ return operationsByTag.files.putFile({
1973
+ pathParams: {
1974
+ workspace,
1975
+ region,
1976
+ dbBranchName: `${database}:${branch}`,
1977
+ tableName: table,
1978
+ recordId: record,
1979
+ columnName: column
1980
+ },
1981
+ body: file,
1982
+ ...this.extraProps
1983
+ });
1984
+ }
1985
+ deleteFile({
1986
+ workspace,
1987
+ region,
1988
+ database,
1989
+ branch,
1990
+ table,
1991
+ record,
1992
+ column
1993
+ }) {
1994
+ return operationsByTag.files.deleteFile({
1995
+ pathParams: {
1996
+ workspace,
1997
+ region,
1998
+ dbBranchName: `${database}:${branch}`,
1999
+ tableName: table,
2000
+ recordId: record,
2001
+ columnName: column
2002
+ },
2003
+ ...this.extraProps
2004
+ });
2005
+ }
2006
+ fileAccess({
2007
+ workspace,
2008
+ region,
2009
+ fileId,
2010
+ verify
2011
+ }) {
2012
+ return operationsByTag.files.fileAccess({
2013
+ pathParams: {
2014
+ workspace,
2015
+ region,
2016
+ fileId
2017
+ },
2018
+ queryParams: { verify },
2019
+ ...this.extraProps
2020
+ });
2021
+ }
2022
+ }
1530
2023
  class SearchAndFilterApi {
1531
2024
  constructor(extraProps) {
1532
2025
  this.extraProps = extraProps;
@@ -1610,17 +2103,11 @@ class SearchAndFilterApi {
1610
2103
  database,
1611
2104
  branch,
1612
2105
  table,
1613
- question,
1614
- fuzziness,
1615
- target,
1616
- prefix,
1617
- filter,
1618
- boosters,
1619
- rules
2106
+ options
1620
2107
  }) {
1621
2108
  return operationsByTag.searchAndFilter.askTable({
1622
2109
  pathParams: { workspace, region, dbBranchName: `${database}:${branch}`, tableName: table },
1623
- body: { question, fuzziness, target, prefix, filter, boosters, rules },
2110
+ body: { ...options },
1624
2111
  ...this.extraProps
1625
2112
  });
1626
2113
  }
@@ -1888,6 +2375,19 @@ class MigrationsApi {
1888
2375
  ...this.extraProps
1889
2376
  });
1890
2377
  }
2378
+ pushBranchMigrations({
2379
+ workspace,
2380
+ region,
2381
+ database,
2382
+ branch,
2383
+ migrations
2384
+ }) {
2385
+ return operationsByTag.migrations.pushBranchMigrations({
2386
+ pathParams: { workspace, region, dbBranchName: `${database}:${branch}` },
2387
+ body: { migrations },
2388
+ ...this.extraProps
2389
+ });
2390
+ }
1891
2391
  }
1892
2392
  class DatabaseApi {
1893
2393
  constructor(extraProps) {
@@ -1939,6 +2439,17 @@ class DatabaseApi {
1939
2439
  ...this.extraProps
1940
2440
  });
1941
2441
  }
2442
+ renameDatabase({
2443
+ workspace,
2444
+ database,
2445
+ newName
2446
+ }) {
2447
+ return operationsByTag.databases.renameDatabase({
2448
+ pathParams: { workspaceId: workspace, dbName: database },
2449
+ body: { newName },
2450
+ ...this.extraProps
2451
+ });
2452
+ }
1942
2453
  getDatabaseGithubSettings({
1943
2454
  workspace,
1944
2455
  database
@@ -2018,18 +2529,46 @@ class Page {
2018
2529
  this.meta = meta;
2019
2530
  this.records = new RecordArray(this, records);
2020
2531
  }
2532
+ /**
2533
+ * Retrieves the next page of results.
2534
+ * @param size Maximum number of results to be retrieved.
2535
+ * @param offset Number of results to skip when retrieving the results.
2536
+ * @returns The next page or results.
2537
+ */
2021
2538
  async nextPage(size, offset) {
2022
2539
  return __privateGet$6(this, _query).getPaginated({ pagination: { size, offset, after: this.meta.page.cursor } });
2023
2540
  }
2541
+ /**
2542
+ * Retrieves the previous page of results.
2543
+ * @param size Maximum number of results to be retrieved.
2544
+ * @param offset Number of results to skip when retrieving the results.
2545
+ * @returns The previous page or results.
2546
+ */
2024
2547
  async previousPage(size, offset) {
2025
2548
  return __privateGet$6(this, _query).getPaginated({ pagination: { size, offset, before: this.meta.page.cursor } });
2026
2549
  }
2550
+ /**
2551
+ * Retrieves the start page of results.
2552
+ * @param size Maximum number of results to be retrieved.
2553
+ * @param offset Number of results to skip when retrieving the results.
2554
+ * @returns The start page or results.
2555
+ */
2027
2556
  async startPage(size, offset) {
2028
2557
  return __privateGet$6(this, _query).getPaginated({ pagination: { size, offset, start: this.meta.page.cursor } });
2029
2558
  }
2559
+ /**
2560
+ * Retrieves the end page of results.
2561
+ * @param size Maximum number of results to be retrieved.
2562
+ * @param offset Number of results to skip when retrieving the results.
2563
+ * @returns The end page or results.
2564
+ */
2030
2565
  async endPage(size, offset) {
2031
2566
  return __privateGet$6(this, _query).getPaginated({ pagination: { size, offset, end: this.meta.page.cursor } });
2032
2567
  }
2568
+ /**
2569
+ * Shortcut method to check if there will be additional results if the next page of results is retrieved.
2570
+ * @returns Whether or not there will be additional results in the next page of results.
2571
+ */
2033
2572
  hasNextPage() {
2034
2573
  return this.meta.page.more;
2035
2574
  }
@@ -2070,22 +2609,45 @@ const _RecordArray = class extends Array {
2070
2609
  map(callbackfn, thisArg) {
2071
2610
  return this.toArray().map(callbackfn, thisArg);
2072
2611
  }
2612
+ /**
2613
+ * Retrieve next page of records
2614
+ *
2615
+ * @returns A new array of objects
2616
+ */
2073
2617
  async nextPage(size, offset) {
2074
2618
  const newPage = await __privateGet$6(this, _page).nextPage(size, offset);
2075
2619
  return new _RecordArray(newPage);
2076
2620
  }
2621
+ /**
2622
+ * Retrieve previous page of records
2623
+ *
2624
+ * @returns A new array of objects
2625
+ */
2077
2626
  async previousPage(size, offset) {
2078
2627
  const newPage = await __privateGet$6(this, _page).previousPage(size, offset);
2079
2628
  return new _RecordArray(newPage);
2080
2629
  }
2630
+ /**
2631
+ * Retrieve start page of records
2632
+ *
2633
+ * @returns A new array of objects
2634
+ */
2081
2635
  async startPage(size, offset) {
2082
2636
  const newPage = await __privateGet$6(this, _page).startPage(size, offset);
2083
2637
  return new _RecordArray(newPage);
2084
2638
  }
2639
+ /**
2640
+ * Retrieve end page of records
2641
+ *
2642
+ * @returns A new array of objects
2643
+ */
2085
2644
  async endPage(size, offset) {
2086
2645
  const newPage = await __privateGet$6(this, _page).endPage(size, offset);
2087
2646
  return new _RecordArray(newPage);
2088
2647
  }
2648
+ /**
2649
+ * @returns Boolean indicating if there is a next page
2650
+ */
2089
2651
  hasNextPage() {
2090
2652
  return __privateGet$6(this, _page).meta.page.more;
2091
2653
  }
@@ -2122,7 +2684,8 @@ const _Query = class {
2122
2684
  __privateAdd$5(this, _table$1, void 0);
2123
2685
  __privateAdd$5(this, _repository, void 0);
2124
2686
  __privateAdd$5(this, _data, { filter: {} });
2125
- this.meta = { page: { cursor: "start", more: true } };
2687
+ // Implements pagination
2688
+ this.meta = { page: { cursor: "start", more: true, size: PAGINATION_DEFAULT_SIZE } };
2126
2689
  this.records = new RecordArray(this, []);
2127
2690
  __privateSet$5(this, _table$1, table);
2128
2691
  if (repository) {
@@ -2159,18 +2722,38 @@ const _Query = class {
2159
2722
  const key = JSON.stringify({ columns, filter, sort, pagination });
2160
2723
  return toBase64(key);
2161
2724
  }
2725
+ /**
2726
+ * Builds a new query object representing a logical OR between the given subqueries.
2727
+ * @param queries An array of subqueries.
2728
+ * @returns A new Query object.
2729
+ */
2162
2730
  any(...queries) {
2163
2731
  const $any = queries.map((query) => query.getQueryOptions().filter ?? {});
2164
2732
  return new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), { filter: { $any } }, __privateGet$5(this, _data));
2165
2733
  }
2734
+ /**
2735
+ * Builds a new query object representing a logical AND between the given subqueries.
2736
+ * @param queries An array of subqueries.
2737
+ * @returns A new Query object.
2738
+ */
2166
2739
  all(...queries) {
2167
2740
  const $all = queries.map((query) => query.getQueryOptions().filter ?? {});
2168
2741
  return new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), { filter: { $all } }, __privateGet$5(this, _data));
2169
2742
  }
2743
+ /**
2744
+ * Builds a new query object representing a logical OR negating each subquery. In pseudo-code: !q1 OR !q2
2745
+ * @param queries An array of subqueries.
2746
+ * @returns A new Query object.
2747
+ */
2170
2748
  not(...queries) {
2171
2749
  const $not = queries.map((query) => query.getQueryOptions().filter ?? {});
2172
2750
  return new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), { filter: { $not } }, __privateGet$5(this, _data));
2173
2751
  }
2752
+ /**
2753
+ * Builds a new query object representing a logical AND negating each subquery. In pseudo-code: !q1 AND !q2
2754
+ * @param queries An array of subqueries.
2755
+ * @returns A new Query object.
2756
+ */
2174
2757
  none(...queries) {
2175
2758
  const $none = queries.map((query) => query.getQueryOptions().filter ?? {});
2176
2759
  return new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), { filter: { $none } }, __privateGet$5(this, _data));
@@ -2193,6 +2776,11 @@ const _Query = class {
2193
2776
  const sort = [...originalSort, { column, direction }];
2194
2777
  return new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), { sort }, __privateGet$5(this, _data));
2195
2778
  }
2779
+ /**
2780
+ * Builds a new query specifying the set of columns to be returned in the query response.
2781
+ * @param columns Array of column names to be returned by the query.
2782
+ * @returns A new Query object.
2783
+ */
2196
2784
  select(columns) {
2197
2785
  return new _Query(
2198
2786
  __privateGet$5(this, _repository),
@@ -2205,6 +2793,12 @@ const _Query = class {
2205
2793
  const query = new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), options, __privateGet$5(this, _data));
2206
2794
  return __privateGet$5(this, _repository).query(query);
2207
2795
  }
2796
+ /**
2797
+ * Get results in an iterator
2798
+ *
2799
+ * @async
2800
+ * @returns Async interable of results
2801
+ */
2208
2802
  async *[Symbol.asyncIterator]() {
2209
2803
  for await (const [record] of this.getIterator({ batchSize: 1 })) {
2210
2804
  yield record;
@@ -2265,21 +2859,49 @@ const _Query = class {
2265
2859
  );
2266
2860
  return __privateGet$5(this, _repository).summarizeTable(query, summaries, summariesFilter);
2267
2861
  }
2862
+ /**
2863
+ * Builds a new query object adding a cache TTL in milliseconds.
2864
+ * @param ttl The cache TTL in milliseconds.
2865
+ * @returns A new Query object.
2866
+ */
2268
2867
  cache(ttl) {
2269
2868
  return new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), { cache: ttl }, __privateGet$5(this, _data));
2270
2869
  }
2870
+ /**
2871
+ * Retrieve next page of records
2872
+ *
2873
+ * @returns A new page object.
2874
+ */
2271
2875
  nextPage(size, offset) {
2272
2876
  return this.startPage(size, offset);
2273
2877
  }
2878
+ /**
2879
+ * Retrieve previous page of records
2880
+ *
2881
+ * @returns A new page object
2882
+ */
2274
2883
  previousPage(size, offset) {
2275
2884
  return this.startPage(size, offset);
2276
2885
  }
2886
+ /**
2887
+ * Retrieve start page of records
2888
+ *
2889
+ * @returns A new page object
2890
+ */
2277
2891
  startPage(size, offset) {
2278
2892
  return this.getPaginated({ pagination: { size, offset } });
2279
2893
  }
2894
+ /**
2895
+ * Retrieve last page of records
2896
+ *
2897
+ * @returns A new page object
2898
+ */
2280
2899
  endPage(size, offset) {
2281
2900
  return this.getPaginated({ pagination: { size, offset, before: "end" } });
2282
2901
  }
2902
+ /**
2903
+ * @returns Boolean indicating if there is a next page
2904
+ */
2283
2905
  hasNextPage() {
2284
2906
  return this.meta.page.more;
2285
2907
  }
@@ -2319,7 +2941,11 @@ function isSortFilterString(value) {
2319
2941
  return isString(value);
2320
2942
  }
2321
2943
  function isSortFilterBase(filter) {
2322
- return isObject(filter) && Object.values(filter).every((value) => value === "asc" || value === "desc");
2944
+ return isObject(filter) && Object.entries(filter).every(([key, value]) => {
2945
+ if (key === "*")
2946
+ return value === "random";
2947
+ return value === "asc" || value === "desc";
2948
+ });
2323
2949
  }
2324
2950
  function isSortFilterObject(filter) {
2325
2951
  return isObject(filter) && !isSortFilterBase(filter) && filter.column !== void 0;
@@ -2752,6 +3378,34 @@ class RestRepository extends Query {
2752
3378
  return result;
2753
3379
  });
2754
3380
  }
3381
+ ask(question, options) {
3382
+ const params = {
3383
+ pathParams: {
3384
+ workspace: "{workspaceId}",
3385
+ dbBranchName: "{dbBranch}",
3386
+ region: "{region}",
3387
+ tableName: __privateGet$4(this, _table)
3388
+ },
3389
+ body: {
3390
+ question,
3391
+ ...options
3392
+ },
3393
+ ...__privateGet$4(this, _getFetchProps).call(this)
3394
+ };
3395
+ if (options?.onMessage) {
3396
+ fetchSSERequest({
3397
+ endpoint: "dataPlane",
3398
+ url: "/db/{dbBranchName}/tables/{tableName}/ask",
3399
+ method: "POST",
3400
+ onMessage: (message) => {
3401
+ options.onMessage?.({ answer: message.text, records: message.records });
3402
+ },
3403
+ ...params
3404
+ });
3405
+ } else {
3406
+ return askTable(params);
3407
+ }
3408
+ }
2755
3409
  }
2756
3410
  _table = new WeakMap();
2757
3411
  _getFetchProps = new WeakMap();
@@ -2937,15 +3591,16 @@ deleteRecords_fn = async function(recordIds) {
2937
3591
  };
2938
3592
  _setCacheQuery = new WeakSet();
2939
3593
  setCacheQuery_fn = async function(query, meta, records) {
2940
- await __privateGet$4(this, _cache).set(`query_${__privateGet$4(this, _table)}:${query.key()}`, { date: new Date(), meta, records });
3594
+ await __privateGet$4(this, _cache)?.set(`query_${__privateGet$4(this, _table)}:${query.key()}`, { date: /* @__PURE__ */ new Date(), meta, records });
2941
3595
  };
2942
3596
  _getCacheQuery = new WeakSet();
2943
3597
  getCacheQuery_fn = async function(query) {
2944
3598
  const key = `query_${__privateGet$4(this, _table)}:${query.key()}`;
2945
- const result = await __privateGet$4(this, _cache).get(key);
3599
+ const result = await __privateGet$4(this, _cache)?.get(key);
2946
3600
  if (!result)
2947
3601
  return null;
2948
- const { cache: ttl = __privateGet$4(this, _cache).defaultQueryTTL } = query.getQueryOptions();
3602
+ const defaultTTL = __privateGet$4(this, _cache)?.defaultQueryTTL ?? -1;
3603
+ const { cache: ttl = defaultTTL } = query.getQueryOptions();
2949
3604
  if (ttl < 0)
2950
3605
  return null;
2951
3606
  const hasExpired = result.date.getTime() + ttl < Date.now();
@@ -3020,6 +3675,8 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
3020
3675
  }
3021
3676
  }
3022
3677
  const record = { ...data };
3678
+ const serializable = { xata, ...transformObjectLinks(data) };
3679
+ const metadata = xata !== void 0 ? { ...xata, createdAt: new Date(xata.createdAt), updatedAt: new Date(xata.updatedAt) } : void 0;
3023
3680
  record.read = function(columns2) {
3024
3681
  return db[table].read(record["id"], columns2);
3025
3682
  };
@@ -3036,16 +3693,17 @@ const initObject = (db, schemaTables, table, object, selectedColumns) => {
3036
3693
  record.delete = function() {
3037
3694
  return db[table].delete(record["id"]);
3038
3695
  };
3696
+ record.xata = metadata;
3039
3697
  record.getMetadata = function() {
3040
- return xata;
3698
+ return metadata;
3041
3699
  };
3042
3700
  record.toSerializable = function() {
3043
- return JSON.parse(JSON.stringify(transformObjectLinks(data)));
3701
+ return JSON.parse(JSON.stringify(serializable));
3044
3702
  };
3045
3703
  record.toString = function() {
3046
- return JSON.stringify(transformObjectLinks(data));
3704
+ return JSON.stringify(transformObjectLinks(serializable));
3047
3705
  };
3048
- for (const prop of ["read", "update", "replace", "delete", "getMetadata", "toSerializable", "toString"]) {
3706
+ for (const prop of ["read", "update", "replace", "delete", "xata", "getMetadata", "toSerializable", "toString"]) {
3049
3707
  Object.defineProperty(record, prop, { enumerable: false });
3050
3708
  }
3051
3709
  Object.freeze(record);
@@ -3262,6 +3920,7 @@ search_fn = async function(query, options, pluginOptions) {
3262
3920
  const { tables, fuzziness, highlight, prefix, page } = options ?? {};
3263
3921
  const { records } = await searchBranch({
3264
3922
  pathParams: { workspace: "{workspaceId}", dbBranchName: "{dbBranch}", region: "{region}" },
3923
+ // @ts-ignore https://github.com/xataio/client-ts/issues/313
3265
3924
  body: { tables, query, fuzziness, prefix, highlight, page },
3266
3925
  ...pluginOptions
3267
3926
  });
@@ -3327,7 +3986,8 @@ const buildClient = (plugins) => {
3327
3986
  __privateSet(this, _options, safeOptions);
3328
3987
  const pluginOptions = {
3329
3988
  ...__privateMethod(this, _getFetchProps, getFetchProps_fn).call(this, safeOptions),
3330
- cache: safeOptions.cache
3989
+ cache: safeOptions.cache,
3990
+ host: safeOptions.host
3331
3991
  };
3332
3992
  const db = new SchemaPlugin(schemaTables).build(pluginOptions);
3333
3993
  const search = new SearchPlugin(db, schemaTables).build(pluginOptions);
@@ -3338,14 +3998,7 @@ const buildClient = (plugins) => {
3338
3998
  for (const [key, namespace] of Object.entries(plugins ?? {})) {
3339
3999
  if (namespace === void 0)
3340
4000
  continue;
3341
- const result = namespace.build(pluginOptions);
3342
- if (result instanceof Promise) {
3343
- void result.then((namespace2) => {
3344
- this[key] = namespace2;
3345
- });
3346
- } else {
3347
- this[key] = result;
3348
- }
4001
+ this[key] = namespace.build(pluginOptions);
3349
4002
  }
3350
4003
  }
3351
4004
  async getConfig() {
@@ -3363,7 +4016,6 @@ const buildClient = (plugins) => {
3363
4016
  }
3364
4017
  const fetch = getFetchImplementation(options?.fetch);
3365
4018
  const databaseURL = options?.databaseURL || getDatabaseURL();
3366
- const branch = options?.branch || getBranch() || "main";
3367
4019
  const apiKey = options?.apiKey || getAPIKey();
3368
4020
  const cache = options?.cache ?? new SimpleCache({ defaultQueryTTL: 0 });
3369
4021
  const trace = options?.trace ?? defaultTrace;
@@ -3376,6 +4028,26 @@ const buildClient = (plugins) => {
3376
4028
  if (!databaseURL) {
3377
4029
  throw new Error("Option databaseURL is required");
3378
4030
  }
4031
+ const envBranch = getBranch();
4032
+ const previewBranch = getPreviewBranch();
4033
+ const branch = options?.branch || previewBranch || envBranch || "main";
4034
+ if (!!previewBranch && branch !== previewBranch) {
4035
+ console.warn(
4036
+ `Ignoring preview branch ${previewBranch} because branch option was passed to the client constructor with value ${branch}`
4037
+ );
4038
+ } else if (!!envBranch && branch !== envBranch) {
4039
+ console.warn(
4040
+ `Ignoring branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
4041
+ );
4042
+ } else if (!!previewBranch && !!envBranch && previewBranch !== envBranch) {
4043
+ console.warn(
4044
+ `Ignoring preview branch ${previewBranch} and branch ${envBranch} because branch option was passed to the client constructor with value ${branch}`
4045
+ );
4046
+ } else if (!previewBranch && !envBranch && options?.branch === void 0) {
4047
+ console.warn(
4048
+ `No branch was passed to the client constructor. Using default branch ${branch}. You can set the branch with the environment variable XATA_BRANCH or by passing the branch option to the client constructor.`
4049
+ );
4050
+ }
3379
4051
  return {
3380
4052
  fetch,
3381
4053
  databaseURL,
@@ -3403,6 +4075,7 @@ const buildClient = (plugins) => {
3403
4075
  fetch,
3404
4076
  apiKey,
3405
4077
  apiUrl: "",
4078
+ // Instead of using workspace and dbBranch, we inject a probably CNAME'd URL
3406
4079
  workspacesApiUrl: (path, params) => {
3407
4080
  const hasBranch = params.dbBranchName ?? params.branch;
3408
4081
  const newPath = path.replace(/^\/db\/[^/]+/, hasBranch !== void 0 ? `:${branch}` : "");
@@ -3535,6 +4208,8 @@ exports.applyBranchSchemaEdit = applyBranchSchemaEdit;
3535
4208
  exports.askTable = askTable;
3536
4209
  exports.branchTransaction = branchTransaction;
3537
4210
  exports.buildClient = buildClient;
4211
+ exports.buildPreviewBranchName = buildPreviewBranchName;
4212
+ exports.buildProviderString = buildProviderString;
3538
4213
  exports.buildWorkerRunner = buildWorkerRunner;
3539
4214
  exports.bulkInsertTableRecords = bulkInsertTableRecords;
3540
4215
  exports.cancelWorkspaceMemberInvite = cancelWorkspaceMemberInvite;
@@ -3542,6 +4217,7 @@ exports.compareBranchSchemas = compareBranchSchemas;
3542
4217
  exports.compareBranchWithUserSchema = compareBranchWithUserSchema;
3543
4218
  exports.compareMigrationRequest = compareMigrationRequest;
3544
4219
  exports.contains = contains;
4220
+ exports.copyBranch = copyBranch;
3545
4221
  exports.createBranch = createBranch;
3546
4222
  exports.createDatabase = createDatabase;
3547
4223
  exports.createMigrationRequest = createMigrationRequest;
@@ -3552,6 +4228,8 @@ exports.deleteBranch = deleteBranch;
3552
4228
  exports.deleteColumn = deleteColumn;
3553
4229
  exports.deleteDatabase = deleteDatabase;
3554
4230
  exports.deleteDatabaseGithubSettings = deleteDatabaseGithubSettings;
4231
+ exports.deleteFile = deleteFile;
4232
+ exports.deleteFileItem = deleteFileItem;
3555
4233
  exports.deleteRecord = deleteRecord;
3556
4234
  exports.deleteTable = deleteTable;
3557
4235
  exports.deleteUser = deleteUser;
@@ -3562,6 +4240,7 @@ exports.endsWith = endsWith;
3562
4240
  exports.equals = equals;
3563
4241
  exports.executeBranchMigrationPlan = executeBranchMigrationPlan;
3564
4242
  exports.exists = exists;
4243
+ exports.fileAccess = fileAccess;
3565
4244
  exports.ge = ge;
3566
4245
  exports.getAPIKey = getAPIKey;
3567
4246
  exports.getBranch = getBranch;
@@ -3577,10 +4256,13 @@ exports.getDatabaseGithubSettings = getDatabaseGithubSettings;
3577
4256
  exports.getDatabaseList = getDatabaseList;
3578
4257
  exports.getDatabaseMetadata = getDatabaseMetadata;
3579
4258
  exports.getDatabaseURL = getDatabaseURL;
4259
+ exports.getFile = getFile;
4260
+ exports.getFileItem = getFileItem;
3580
4261
  exports.getGitBranchesMapping = getGitBranchesMapping;
3581
4262
  exports.getHostUrl = getHostUrl;
3582
4263
  exports.getMigrationRequest = getMigrationRequest;
3583
4264
  exports.getMigrationRequestIsMerged = getMigrationRequestIsMerged;
4265
+ exports.getPreviewBranch = getPreviewBranch;
3584
4266
  exports.getRecord = getRecord;
3585
4267
  exports.getTableColumns = getTableColumns;
3586
4268
  exports.getTableSchema = getTableSchema;
@@ -3623,16 +4305,21 @@ exports.parseProviderString = parseProviderString;
3623
4305
  exports.parseWorkspacesUrlParts = parseWorkspacesUrlParts;
3624
4306
  exports.pattern = pattern;
3625
4307
  exports.previewBranchSchemaEdit = previewBranchSchemaEdit;
4308
+ exports.pushBranchMigrations = pushBranchMigrations;
4309
+ exports.putFile = putFile;
4310
+ exports.putFileItem = putFileItem;
3626
4311
  exports.queryMigrationRequests = queryMigrationRequests;
3627
4312
  exports.queryTable = queryTable;
3628
4313
  exports.removeGitBranchesEntry = removeGitBranchesEntry;
3629
4314
  exports.removeWorkspaceMember = removeWorkspaceMember;
4315
+ exports.renameDatabase = renameDatabase;
3630
4316
  exports.resendWorkspaceMemberInvite = resendWorkspaceMemberInvite;
3631
4317
  exports.resolveBranch = resolveBranch;
3632
4318
  exports.searchBranch = searchBranch;
3633
4319
  exports.searchTable = searchTable;
3634
4320
  exports.serialize = serialize;
3635
4321
  exports.setTableSchema = setTableSchema;
4322
+ exports.sqlQuery = sqlQuery;
3636
4323
  exports.startsWith = startsWith;
3637
4324
  exports.summarizeTable = summarizeTable;
3638
4325
  exports.updateBranchMetadata = updateBranchMetadata;