@xata.io/client 0.17.1 → 0.18.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -0
- package/README.md +1 -1
- package/Usage.md +2 -0
- package/dist/index.cjs +107 -4
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +314 -33
- package/dist/index.mjs +106 -5
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
@@ -1,5 +1,27 @@
|
|
1
1
|
# @xata.io/client
|
2
2
|
|
3
|
+
## 0.18.0
|
4
|
+
|
5
|
+
### Minor Changes
|
6
|
+
|
7
|
+
- [#562](https://github.com/xataio/client-ts/pull/562) [`699beb4`](https://github.com/xataio/client-ts/commit/699beb4bbf21cffa001d3f88a03246980e30250b) Thanks [@SferaDev](https://github.com/SferaDev)! - Return null on nullable columns
|
8
|
+
|
9
|
+
### Patch Changes
|
10
|
+
|
11
|
+
- [#583](https://github.com/xataio/client-ts/pull/583) [`330b076`](https://github.com/xataio/client-ts/commit/330b076a0781e3576c82afab76e3fb2a64f2e041) Thanks [@SferaDev](https://github.com/SferaDev)! - Add support for unique columns
|
12
|
+
|
13
|
+
- [#598](https://github.com/xataio/client-ts/pull/598) [`c3dfb4b`](https://github.com/xataio/client-ts/commit/c3dfb4babc990634b9e9747616ed93223178a2e7) Thanks [@SferaDev](https://github.com/SferaDev)! - API: Add patch database metadata endpoint
|
14
|
+
|
15
|
+
- [#602](https://github.com/xataio/client-ts/pull/602) [`74b17aa`](https://github.com/xataio/client-ts/commit/74b17aaedc0dbdd79bfdcb182b2e70b61f98f5a5) Thanks [@gimenete](https://github.com/gimenete)! - API: Make workspace slug optional on create
|
16
|
+
|
17
|
+
- [#615](https://github.com/xataio/client-ts/pull/615) [`83f20cd`](https://github.com/xataio/client-ts/commit/83f20cdbe53706c16016c4db3f318e679b24ec86) Thanks [@SferaDev](https://github.com/SferaDev)! - Make `getMany` return more items than max pagination size
|
18
|
+
|
19
|
+
- [#562](https://github.com/xataio/client-ts/pull/562) [`addfcc6`](https://github.com/xataio/client-ts/commit/addfcc67fca663defdd340111ea09c9188bad3ab) Thanks [@SferaDev](https://github.com/SferaDev)! - Add `orThrows` methods that instead of returning null, throw an exception.
|
20
|
+
|
21
|
+
- [#583](https://github.com/xataio/client-ts/pull/583) [`eb7ba59`](https://github.com/xataio/client-ts/commit/eb7ba594be2a1f0ab90956836bbeb912e188a46d) Thanks [@SferaDev](https://github.com/SferaDev)! - Add support for non nullable columns
|
22
|
+
|
23
|
+
- [#612](https://github.com/xataio/client-ts/pull/612) [`f1a0742`](https://github.com/xataio/client-ts/commit/f1a0742a04e1aefab14f46371a04a41069faec01) Thanks [@xata-bot](https://github.com/xata-bot)! - API: Add summarize table endpoint
|
24
|
+
|
3
25
|
## 0.17.1
|
4
26
|
|
5
27
|
### Patch Changes
|
package/README.md
CHANGED
@@ -246,7 +246,7 @@ const api = new XataApiClient({ apiKey: process.env.XATA_API_KEY });
|
|
246
246
|
Once you have initialized the API client, the operations are organized following the same hiearchy as in the [official documentation](https://docs.xata.io). You have different namespaces for each entity (ie. `workspaces`, `databases`, `tables`, `branches`, `users`, `records`...).
|
247
247
|
|
248
248
|
```ts
|
249
|
-
const { id: workspace } = await api.workspaces.createWorkspace({ name: 'example'
|
249
|
+
const { id: workspace } = await api.workspaces.createWorkspace({ name: 'example' });
|
250
250
|
const { databaseName } = await api.databases.createDatabase(workspace, 'database');
|
251
251
|
|
252
252
|
await api.branches.createBranch(workspace, databaseName, 'branch');
|
package/Usage.md
CHANGED
@@ -41,6 +41,8 @@ To get a collection of records, you can use the `Query` object. It provides the
|
|
41
41
|
- `getAll()`: returns all the records in the query results by making multiple requests to iterate over all the pages which exist. If the query is not filtered and the table is a large dataset, this operation can affect the performance.
|
42
42
|
- `getMany()`: returns an array with a subset of the first results in the query. The default [pagination](#page) size (20) is used and can be customised by passing a different `{ pagination: { size: number } }` in its options. To learn more about default values, see [helper variables](#helper-variables).
|
43
43
|
|
44
|
+
Both the `getAll()` and `getMany()` will produce multiple requests to the server if the query should return more than the maximum page size. We perform the minimum number of requests to get the desired number of records.
|
45
|
+
|
44
46
|
All these methods allow customising its filters, column selection, column ordering, pagination or cache TTL. For example:
|
45
47
|
|
46
48
|
```ts
|
package/dist/index.cjs
CHANGED
@@ -172,7 +172,7 @@ function getFetchImplementation(userFetch) {
|
|
172
172
|
return fetchImpl;
|
173
173
|
}
|
174
174
|
|
175
|
-
const VERSION = "0.
|
175
|
+
const VERSION = "0.18.0";
|
176
176
|
|
177
177
|
class ErrorWithCause extends Error {
|
178
178
|
constructor(message, options) {
|
@@ -408,6 +408,7 @@ const getDatabaseMetadata = (variables) => fetch$1({
|
|
408
408
|
method: "get",
|
409
409
|
...variables
|
410
410
|
});
|
411
|
+
const updateDatabaseMetadata = (variables) => fetch$1({ url: "/dbs/{dbName}/metadata", method: "patch", ...variables });
|
411
412
|
const getGitBranchesMapping = (variables) => fetch$1({ url: "/dbs/{dbName}/gitBranches", method: "get", ...variables });
|
412
413
|
const addGitBranchesEntry = (variables) => fetch$1({ url: "/dbs/{dbName}/gitBranches", method: "post", ...variables });
|
413
414
|
const removeGitBranchesEntry = (variables) => fetch$1({ url: "/dbs/{dbName}/gitBranches", method: "delete", ...variables });
|
@@ -551,6 +552,11 @@ const searchBranch = (variables) => fetch$1({
|
|
551
552
|
method: "post",
|
552
553
|
...variables
|
553
554
|
});
|
555
|
+
const summarizeTable = (variables) => fetch$1({
|
556
|
+
url: "/db/{dbBranchName}/tables/{tableName}/summarize",
|
557
|
+
method: "post",
|
558
|
+
...variables
|
559
|
+
});
|
554
560
|
const operationsByTag = {
|
555
561
|
users: { getUser, updateUser, deleteUser, getUserAPIKeys, createUserAPIKey, deleteUserAPIKey },
|
556
562
|
workspaces: {
|
@@ -573,6 +579,7 @@ const operationsByTag = {
|
|
573
579
|
createDatabase,
|
574
580
|
deleteDatabase,
|
575
581
|
getDatabaseMetadata,
|
582
|
+
updateDatabaseMetadata,
|
576
583
|
getGitBranchesMapping,
|
577
584
|
addGitBranchesEntry,
|
578
585
|
removeGitBranchesEntry,
|
@@ -630,7 +637,8 @@ const operationsByTag = {
|
|
630
637
|
bulkInsertTableRecords,
|
631
638
|
queryTable,
|
632
639
|
searchTable,
|
633
|
-
searchBranch
|
640
|
+
searchBranch,
|
641
|
+
summarizeTable
|
634
642
|
}
|
635
643
|
};
|
636
644
|
|
@@ -881,6 +889,13 @@ class DatabaseApi {
|
|
881
889
|
...this.extraProps
|
882
890
|
});
|
883
891
|
}
|
892
|
+
updateDatabaseMetadata(workspace, dbName, options = {}) {
|
893
|
+
return operationsByTag.database.updateDatabaseMetadata({
|
894
|
+
pathParams: { workspace, dbName },
|
895
|
+
body: options,
|
896
|
+
...this.extraProps
|
897
|
+
});
|
898
|
+
}
|
884
899
|
getGitBranchesMapping(workspace, dbName) {
|
885
900
|
return operationsByTag.database.getGitBranchesMapping({
|
886
901
|
pathParams: { workspace, dbName },
|
@@ -1107,6 +1122,13 @@ class RecordsApi {
|
|
1107
1122
|
...this.extraProps
|
1108
1123
|
});
|
1109
1124
|
}
|
1125
|
+
summarizeTable(workspace, database, branch, tableName, query) {
|
1126
|
+
return operationsByTag.records.summarizeTable({
|
1127
|
+
pathParams: { workspace, dbBranchName: `${database}:${branch}`, tableName },
|
1128
|
+
body: query,
|
1129
|
+
...this.extraProps
|
1130
|
+
});
|
1131
|
+
}
|
1110
1132
|
}
|
1111
1133
|
class MigrationRequestsApi {
|
1112
1134
|
constructor(extraProps) {
|
@@ -1466,11 +1488,20 @@ const _Query = class {
|
|
1466
1488
|
}
|
1467
1489
|
}
|
1468
1490
|
async getMany(options = {}) {
|
1469
|
-
const
|
1491
|
+
const { pagination = {}, ...rest } = options;
|
1492
|
+
const { size = PAGINATION_DEFAULT_SIZE, offset } = pagination;
|
1493
|
+
const batchSize = size <= PAGINATION_MAX_SIZE ? size : PAGINATION_MAX_SIZE;
|
1494
|
+
let page = await this.getPaginated({ ...rest, pagination: { size: batchSize, offset } });
|
1495
|
+
const results = [...page.records];
|
1496
|
+
while (page.hasNextPage() && results.length < size) {
|
1497
|
+
page = await page.nextPage();
|
1498
|
+
results.push(...page.records);
|
1499
|
+
}
|
1470
1500
|
if (page.hasNextPage() && options.pagination?.size === void 0) {
|
1471
1501
|
console.trace("Calling getMany does not return all results. Paginate to get all results or call getAll.");
|
1472
1502
|
}
|
1473
|
-
|
1503
|
+
const array = new RecordArray(page, results.slice(0, size));
|
1504
|
+
return array;
|
1474
1505
|
}
|
1475
1506
|
async getAll(options = {}) {
|
1476
1507
|
const { batchSize = PAGINATION_MAX_SIZE, ...rest } = options;
|
@@ -1484,6 +1515,12 @@ const _Query = class {
|
|
1484
1515
|
const records = await this.getMany({ ...options, pagination: { size: 1 } });
|
1485
1516
|
return records[0] ?? null;
|
1486
1517
|
}
|
1518
|
+
async getFirstOrThrow(options = {}) {
|
1519
|
+
const records = await this.getMany({ ...options, pagination: { size: 1 } });
|
1520
|
+
if (records[0] === void 0)
|
1521
|
+
throw new Error("No results found.");
|
1522
|
+
return records[0];
|
1523
|
+
}
|
1487
1524
|
cache(ttl) {
|
1488
1525
|
return new _Query(__privateGet$5(this, _repository), __privateGet$5(this, _table$1), { cache: ttl }, __privateGet$5(this, _data));
|
1489
1526
|
}
|
@@ -1675,6 +1712,25 @@ class RestRepository extends Query {
|
|
1675
1712
|
return null;
|
1676
1713
|
});
|
1677
1714
|
}
|
1715
|
+
async readOrThrow(a, b) {
|
1716
|
+
return __privateGet$4(this, _trace).call(this, "readOrThrow", async () => {
|
1717
|
+
const result = await this.read(a, b);
|
1718
|
+
if (Array.isArray(result)) {
|
1719
|
+
const missingIds = compact(
|
1720
|
+
a.filter((_item, index) => result[index] === null).map((item) => extractId(item))
|
1721
|
+
);
|
1722
|
+
if (missingIds.length > 0) {
|
1723
|
+
throw new Error(`Could not find records with ids: ${missingIds.join(", ")}`);
|
1724
|
+
}
|
1725
|
+
return result;
|
1726
|
+
}
|
1727
|
+
if (result === null) {
|
1728
|
+
const id = extractId(a) ?? "unknown";
|
1729
|
+
throw new Error(`Record with id ${id} not found`);
|
1730
|
+
}
|
1731
|
+
return result;
|
1732
|
+
});
|
1733
|
+
}
|
1678
1734
|
async update(a, b, c) {
|
1679
1735
|
return __privateGet$4(this, _trace).call(this, "update", async () => {
|
1680
1736
|
if (Array.isArray(a)) {
|
@@ -1697,6 +1753,25 @@ class RestRepository extends Query {
|
|
1697
1753
|
throw new Error("Invalid arguments for update method");
|
1698
1754
|
});
|
1699
1755
|
}
|
1756
|
+
async updateOrThrow(a, b, c) {
|
1757
|
+
return __privateGet$4(this, _trace).call(this, "updateOrThrow", async () => {
|
1758
|
+
const result = await this.update(a, b, c);
|
1759
|
+
if (Array.isArray(result)) {
|
1760
|
+
const missingIds = compact(
|
1761
|
+
a.filter((_item, index) => result[index] === null).map((item) => extractId(item))
|
1762
|
+
);
|
1763
|
+
if (missingIds.length > 0) {
|
1764
|
+
throw new Error(`Could not find records with ids: ${missingIds.join(", ")}`);
|
1765
|
+
}
|
1766
|
+
return result;
|
1767
|
+
}
|
1768
|
+
if (result === null) {
|
1769
|
+
const id = extractId(a) ?? "unknown";
|
1770
|
+
throw new Error(`Record with id ${id} not found`);
|
1771
|
+
}
|
1772
|
+
return result;
|
1773
|
+
});
|
1774
|
+
}
|
1700
1775
|
async createOrUpdate(a, b, c) {
|
1701
1776
|
return __privateGet$4(this, _trace).call(this, "createOrUpdate", async () => {
|
1702
1777
|
if (Array.isArray(a)) {
|
@@ -1738,6 +1813,24 @@ class RestRepository extends Query {
|
|
1738
1813
|
throw new Error("Invalid arguments for delete method");
|
1739
1814
|
});
|
1740
1815
|
}
|
1816
|
+
async deleteOrThrow(a, b) {
|
1817
|
+
return __privateGet$4(this, _trace).call(this, "deleteOrThrow", async () => {
|
1818
|
+
const result = await this.delete(a, b);
|
1819
|
+
if (Array.isArray(result)) {
|
1820
|
+
const missingIds = compact(
|
1821
|
+
a.filter((_item, index) => result[index] === null).map((item) => extractId(item))
|
1822
|
+
);
|
1823
|
+
if (missingIds.length > 0) {
|
1824
|
+
throw new Error(`Could not find records with ids: ${missingIds.join(", ")}`);
|
1825
|
+
}
|
1826
|
+
return result;
|
1827
|
+
} else if (result === null) {
|
1828
|
+
const id = extractId(a) ?? "unknown";
|
1829
|
+
throw new Error(`Record with id ${id} not found`);
|
1830
|
+
}
|
1831
|
+
return result;
|
1832
|
+
});
|
1833
|
+
}
|
1741
1834
|
async search(query, options = {}) {
|
1742
1835
|
return __privateGet$4(this, _trace).call(this, "search", async () => {
|
1743
1836
|
const fetchProps = await __privateGet$4(this, _getFetchProps).call(this);
|
@@ -1949,9 +2042,17 @@ const initObject = (db, schemaTables, table, object) => {
|
|
1949
2042
|
console.error(`Failed to parse link for field ${column.name}`);
|
1950
2043
|
} else if (isObject(value)) {
|
1951
2044
|
result[column.name] = initObject(db, schemaTables, linkTable, value);
|
2045
|
+
} else {
|
2046
|
+
result[column.name] = null;
|
1952
2047
|
}
|
1953
2048
|
break;
|
1954
2049
|
}
|
2050
|
+
default:
|
2051
|
+
result[column.name] = value ?? null;
|
2052
|
+
if (column.notNull === true && value === null) {
|
2053
|
+
console.error(`Parse error, column ${column.name} is non nullable and value resolves null`);
|
2054
|
+
}
|
2055
|
+
break;
|
1955
2056
|
}
|
1956
2057
|
}
|
1957
2058
|
result.read = function(columns2) {
|
@@ -2591,9 +2692,11 @@ exports.searchTable = searchTable;
|
|
2591
2692
|
exports.serialize = serialize;
|
2592
2693
|
exports.setTableSchema = setTableSchema;
|
2593
2694
|
exports.startsWith = startsWith;
|
2695
|
+
exports.summarizeTable = summarizeTable;
|
2594
2696
|
exports.updateBranchMetadata = updateBranchMetadata;
|
2595
2697
|
exports.updateBranchSchema = updateBranchSchema;
|
2596
2698
|
exports.updateColumn = updateColumn;
|
2699
|
+
exports.updateDatabaseMetadata = updateDatabaseMetadata;
|
2597
2700
|
exports.updateMigrationRequest = updateMigrationRequest;
|
2598
2701
|
exports.updateRecordWithID = updateRecordWithID;
|
2599
2702
|
exports.updateTable = updateTable;
|