dexie-cloud-addon 4.0.0-beta.17 → 4.0.0-beta.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/dist/modern/dexie-cloud-addon.js +1630 -1572
  2. package/dist/modern/dexie-cloud-addon.js.map +1 -1
  3. package/dist/modern/dexie-cloud-addon.min.js +1 -1
  4. package/dist/modern/dexie-cloud-addon.min.js.map +1 -1
  5. package/dist/modern/service-worker.js +1694 -1630
  6. package/dist/modern/service-worker.js.map +1 -1
  7. package/dist/modern/service-worker.min.js +1 -1
  8. package/dist/modern/service-worker.min.js.map +1 -1
  9. package/dist/module-es5/dexie-cloud-addon.js +701 -662
  10. package/dist/module-es5/dexie-cloud-addon.js.map +1 -1
  11. package/dist/module-es5/dexie-cloud-addon.min.js +1 -1
  12. package/dist/module-es5/dexie-cloud-addon.min.js.map +1 -1
  13. package/dist/types/DexieCloudAPI.d.ts +1 -0
  14. package/dist/types/DexieCloudEntity.d.ts +8 -0
  15. package/dist/types/DexieCloudOptions.d.ts +1 -0
  16. package/dist/types/WebSocketStatus.d.ts +1 -0
  17. package/dist/types/createMyMembersObservable.d.ts +14 -0
  18. package/dist/types/currentUserObservable.d.ts +3 -0
  19. package/dist/types/default-ui/Dialog.d.ts +2 -1
  20. package/dist/types/helpers/BroadcastedLocalEvent.d.ts +8 -0
  21. package/dist/types/helpers/visibleState.d.ts +1 -0
  22. package/dist/types/permissionsLookup.d.ts +9 -0
  23. package/dist/types/permissionsLookupObservable.d.ts +14 -0
  24. package/dist/types/sync/globalizePrivateIds.d.ts +4 -0
  25. package/dist/types/sync/syncServerToClientOnly.d.ts +3 -0
  26. package/dist/types/types/CloudConnectionStatus.d.ts +0 -0
  27. package/dist/types/types/ConnectionStatus.d.ts +0 -0
  28. package/dist/types/types/LoginState.d.ts +41 -0
  29. package/dist/types/types/SyncConnectionStatus.d.ts +1 -0
  30. package/dist/types/types/SyncFlowStatus.d.ts +6 -0
  31. package/dist/types/types/SyncStatus.d.ts +6 -0
  32. package/dist/umd/dexie-cloud-addon.js +701 -662
  33. package/dist/umd/dexie-cloud-addon.js.map +1 -1
  34. package/dist/umd/dexie-cloud-addon.min.js +1 -1
  35. package/dist/umd/dexie-cloud-addon.min.js.map +1 -1
  36. package/dist/umd/service-worker.js +1694 -1630
  37. package/dist/umd/service-worker.js.map +1 -1
  38. package/dist/umd/service-worker.min.js +1 -1
  39. package/dist/umd/service-worker.min.js.map +1 -1
  40. package/dist/umd-modern/dexie-cloud-addon.js +1627 -1569
  41. package/dist/umd-modern/dexie-cloud-addon.js.map +1 -1
  42. package/package.json +2 -2
@@ -8,6 +8,31 @@
8
8
 
9
9
  var Dexie__default = /*#__PURE__*/_interopDefaultLegacy(Dexie);
10
10
 
11
+ /*! *****************************************************************************
12
+ Copyright (c) Microsoft Corporation.
13
+
14
+ Permission to use, copy, modify, and/or distribute this software for any
15
+ purpose with or without fee is hereby granted.
16
+
17
+ THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
18
+ REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
19
+ AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
20
+ INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
21
+ LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
22
+ OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
23
+ PERFORMANCE OF THIS SOFTWARE.
24
+ ***************************************************************************** */
25
+
26
+ function __awaiter$1(thisArg, _arguments, P, generator) {
27
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
28
+ return new (P || (P = Promise))(function (resolve, reject) {
29
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
30
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
31
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
32
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
33
+ });
34
+ }
35
+
11
36
  const UNAUTHORIZED_USER = {
12
37
  userId: "unauthorized",
13
38
  name: "Unauthorized",
@@ -20,7 +45,7 @@
20
45
  Object.freeze(UNAUTHORIZED_USER);
21
46
  Object.freeze(UNAUTHORIZED_USER.claims);
22
47
  }
23
- catch { }
48
+ catch (_a) { }
24
49
 
25
50
  const swHolder = {};
26
51
  const swContainer = self.document && navigator.serviceWorker; // self.document is to verify we're not the SW ourself
@@ -29,8 +54,9 @@
29
54
  if (typeof self !== 'undefined' && 'clients' in self && !self.document) {
30
55
  // We are the service worker. Propagate messages to all our clients.
31
56
  addEventListener('message', (ev) => {
32
- if (ev.data?.type?.startsWith('sw-broadcast-')) {
33
- [...self['clients'].matchAll({ includeUncontrolled: true })].forEach((client) => client.id !== ev.source?.id && client.postMessage(ev.data));
57
+ var _a, _b;
58
+ if ((_b = (_a = ev.data) === null || _a === void 0 ? void 0 : _a.type) === null || _b === void 0 ? void 0 : _b.startsWith('sw-broadcast-')) {
59
+ [...self['clients'].matchAll({ includeUncontrolled: true })].forEach((client) => { var _a; return client.id !== ((_a = ev.source) === null || _a === void 0 ? void 0 : _a.id) && client.postMessage(ev.data); });
34
60
  }
35
61
  });
36
62
  }
@@ -42,7 +68,8 @@
42
68
  if (!swContainer)
43
69
  return () => { };
44
70
  const forwarder = (ev) => {
45
- if (ev.data?.type === `sw-broadcast-${this.name}`) {
71
+ var _a;
72
+ if (((_a = ev.data) === null || _a === void 0 ? void 0 : _a.type) === `sw-broadcast-${this.name}`) {
46
73
  listener(ev.data.message);
47
74
  }
48
75
  };
@@ -50,6 +77,7 @@
50
77
  return () => swContainer.removeEventListener('message', forwarder);
51
78
  }
52
79
  postMessage(message) {
80
+ var _a;
53
81
  if (typeof self['clients'] === 'object') {
54
82
  // We're a service worker. Propagate to our browser clients.
55
83
  [...self['clients'].matchAll({ includeUncontrolled: true })].forEach((client) => client.postMessage({
@@ -60,7 +88,7 @@
60
88
  else if (swHolder.registration) {
61
89
  // We're a client (browser window or other worker)
62
90
  // Post to SW so it can repost to all its clients and to itself
63
- swHolder.registration.active?.postMessage({
91
+ (_a = swHolder.registration.active) === null || _a === void 0 ? void 0 : _a.postMessage({
64
92
  type: `sw-broadcast-${this.name}`,
65
93
  message
66
94
  });
@@ -103,7 +131,7 @@
103
131
  this.bc = bc;
104
132
  }
105
133
  next(message) {
106
- console.debug("BroadcastedAndLocalEvent: bc.postMessage()", { ...message }, "bc is a", this.bc);
134
+ console.debug("BroadcastedAndLocalEvent: bc.postMessage()", Object.assign({}, message), "bc is a", this.bc);
107
135
  this.bc.postMessage(message);
108
136
  const ev = new CustomEvent(`lbc-${this.name}`, { detail: message });
109
137
  self.dispatchEvent(ev);
@@ -1696,55 +1724,60 @@
1696
1724
 
1697
1725
  //const hasSW = 'serviceWorker' in navigator;
1698
1726
  let hasComplainedAboutSyncEvent = false;
1699
- async function registerSyncEvent(db, purpose) {
1700
- try {
1701
- // Send sync event to SW:
1702
- const sw = await navigator.serviceWorker.ready;
1703
- if (purpose === "push" && sw.sync) {
1704
- await sw.sync.register(`dexie-cloud:${db.name}`);
1705
- }
1706
- if (sw.active) {
1707
- // Use postMessage for pull syncs and for browsers not supporting sync event (Firefox, Safari).
1708
- // Also chromium based browsers with sw.sync as a fallback for sleepy sync events not taking action for a while.
1709
- sw.active.postMessage({
1710
- type: 'dexie-cloud-sync',
1711
- dbName: db.name,
1712
- purpose
1713
- });
1714
- }
1715
- else {
1716
- throw new Error(`Failed to trigger sync - there's no active service worker`);
1727
+ function registerSyncEvent(db, purpose) {
1728
+ return __awaiter$1(this, void 0, void 0, function* () {
1729
+ try {
1730
+ // Send sync event to SW:
1731
+ const sw = yield navigator.serviceWorker.ready;
1732
+ if (purpose === "push" && sw.sync) {
1733
+ yield sw.sync.register(`dexie-cloud:${db.name}`);
1734
+ }
1735
+ if (sw.active) {
1736
+ // Use postMessage for pull syncs and for browsers not supporting sync event (Firefox, Safari).
1737
+ // Also chromium based browsers with sw.sync as a fallback for sleepy sync events not taking action for a while.
1738
+ sw.active.postMessage({
1739
+ type: 'dexie-cloud-sync',
1740
+ dbName: db.name,
1741
+ purpose
1742
+ });
1743
+ }
1744
+ else {
1745
+ throw new Error(`Failed to trigger sync - there's no active service worker`);
1746
+ }
1747
+ return;
1717
1748
  }
1718
- return;
1719
- }
1720
- catch (e) {
1721
- if (!hasComplainedAboutSyncEvent) {
1722
- console.debug(`Dexie Cloud: Could not register sync event`, e);
1723
- hasComplainedAboutSyncEvent = true;
1749
+ catch (e) {
1750
+ if (!hasComplainedAboutSyncEvent) {
1751
+ console.debug(`Dexie Cloud: Could not register sync event`, e);
1752
+ hasComplainedAboutSyncEvent = true;
1753
+ }
1724
1754
  }
1725
- }
1755
+ });
1726
1756
  }
1727
- async function registerPeriodicSyncEvent(db) {
1728
- try {
1729
- // Register periodicSync event to SW:
1730
- // @ts-ignore
1731
- const { periodicSync } = await navigator.serviceWorker.ready;
1732
- if (periodicSync) {
1733
- try {
1734
- await periodicSync.register(`dexie-cloud:${db.name}`, db.cloud.options?.periodicSync);
1735
- console.debug(`Dexie Cloud: Successfully registered periodicsync event for ${db.name}`);
1757
+ function registerPeriodicSyncEvent(db) {
1758
+ var _a;
1759
+ return __awaiter$1(this, void 0, void 0, function* () {
1760
+ try {
1761
+ // Register periodicSync event to SW:
1762
+ // @ts-ignore
1763
+ const { periodicSync } = yield navigator.serviceWorker.ready;
1764
+ if (periodicSync) {
1765
+ try {
1766
+ yield periodicSync.register(`dexie-cloud:${db.name}`, (_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.periodicSync);
1767
+ console.debug(`Dexie Cloud: Successfully registered periodicsync event for ${db.name}`);
1768
+ }
1769
+ catch (e) {
1770
+ console.debug(`Dexie Cloud: Failed to register periodic sync. Your PWA must be installed to allow background sync.`, e);
1771
+ }
1736
1772
  }
1737
- catch (e) {
1738
- console.debug(`Dexie Cloud: Failed to register periodic sync. Your PWA must be installed to allow background sync.`, e);
1773
+ else {
1774
+ console.debug(`Dexie Cloud: periodicSync not supported.`);
1739
1775
  }
1740
1776
  }
1741
- else {
1742
- console.debug(`Dexie Cloud: periodicSync not supported.`);
1777
+ catch (e) {
1778
+ console.debug(`Dexie Cloud: Could not register periodicSync for ${db.name}`, e);
1743
1779
  }
1744
- }
1745
- catch (e) {
1746
- console.debug(`Dexie Cloud: Could not register periodicSync for ${db.name}`, e);
1747
- }
1780
+ });
1748
1781
  }
1749
1782
 
1750
1783
  function triggerSync(db, purpose) {
@@ -1773,15 +1806,17 @@
1773
1806
  : Buffer.from(b).toString("base64")
1774
1807
  : (b) => btoa(String.fromCharCode.apply(null, b));
1775
1808
 
1776
- async function computeRealmSetHash({ realms, inviteRealms, }) {
1777
- const data = JSON.stringify([
1778
- ...realms.map((realmId) => ({ realmId, accepted: true })),
1779
- ...inviteRealms.map((realmId) => ({ realmId, accepted: false })),
1780
- ].sort((a, b) => a.realmId < b.realmId ? -1 : a.realmId > b.realmId ? 1 : 0));
1781
- const byteArray = new TextEncoder().encode(data);
1782
- const digestBytes = await crypto.subtle.digest('SHA-1', byteArray);
1783
- const base64 = b64encode(digestBytes);
1784
- return base64;
1809
+ function computeRealmSetHash({ realms, inviteRealms, }) {
1810
+ return __awaiter$1(this, void 0, void 0, function* () {
1811
+ const data = JSON.stringify([
1812
+ ...realms.map((realmId) => ({ realmId, accepted: true })),
1813
+ ...inviteRealms.map((realmId) => ({ realmId, accepted: false })),
1814
+ ].sort((a, b) => a.realmId < b.realmId ? -1 : a.realmId > b.realmId ? 1 : 0));
1815
+ const byteArray = new TextEncoder().encode(data);
1816
+ const digestBytes = yield crypto.subtle.digest('SHA-1', byteArray);
1817
+ const base64 = b64encode(digestBytes);
1818
+ return base64;
1819
+ });
1785
1820
  }
1786
1821
 
1787
1822
  function getSyncableTables(db) {
@@ -1796,7 +1831,8 @@
1796
1831
  }
1797
1832
 
1798
1833
  function getTableFromMutationTable(mutationTable) {
1799
- const tableName = /^\$(.*)_mutations$/.exec(mutationTable)?.[1];
1834
+ var _a;
1835
+ const tableName = (_a = /^\$(.*)_mutations$/.exec(mutationTable)) === null || _a === void 0 ? void 0 : _a[1];
1800
1836
  if (!tableName)
1801
1837
  throw new Error(`Given mutationTable ${mutationTable} is not correct`);
1802
1838
  return tableName;
@@ -1807,49 +1843,51 @@
1807
1843
  return concat.apply([], a);
1808
1844
  }
1809
1845
 
1810
- async function listClientChanges(mutationTables, db, { since = {}, limit = Infinity } = {}) {
1811
- const allMutsOnTables = await Promise.all(mutationTables.map(async (mutationTable) => {
1812
- const tableName = getTableFromMutationTable(mutationTable.name);
1813
- const lastRevision = since[tableName];
1814
- let query = lastRevision
1815
- ? mutationTable.where('rev').above(lastRevision)
1816
- : mutationTable;
1817
- if (limit < Infinity)
1818
- query = query.limit(limit);
1819
- const muts = await query.toArray();
1820
- //const objTable = db.table(tableName);
1821
- /*for (const mut of muts) {
1822
- if (mut.type === "insert" || mut.type === "upsert") {
1823
- mut.values = await objTable.bulkGet(mut.keys);
1824
- }
1825
- }*/
1826
- return muts.map((mut) => ({
1827
- table: tableName,
1828
- mut,
1829
- }));
1830
- }));
1831
- // Sort by time to get a true order of the operations (between tables)
1832
- const sorted = flatten(allMutsOnTables).sort((a, b) => a.mut.ts - b.mut.ts);
1833
- const result = [];
1834
- let currentEntry = null;
1835
- let currentTxid = null;
1836
- for (const { table, mut } of sorted) {
1837
- if (currentEntry &&
1838
- currentEntry.table === table &&
1839
- currentTxid === mut.txid) {
1840
- currentEntry.muts.push(mut);
1841
- }
1842
- else {
1843
- currentEntry = {
1844
- table,
1845
- muts: [mut],
1846
- };
1847
- currentTxid = mut.txid;
1848
- result.push(currentEntry);
1846
+ function listClientChanges(mutationTables, db, { since = {}, limit = Infinity } = {}) {
1847
+ return __awaiter$1(this, void 0, void 0, function* () {
1848
+ const allMutsOnTables = yield Promise.all(mutationTables.map((mutationTable) => __awaiter$1(this, void 0, void 0, function* () {
1849
+ const tableName = getTableFromMutationTable(mutationTable.name);
1850
+ const lastRevision = since[tableName];
1851
+ let query = lastRevision
1852
+ ? mutationTable.where('rev').above(lastRevision)
1853
+ : mutationTable;
1854
+ if (limit < Infinity)
1855
+ query = query.limit(limit);
1856
+ const muts = yield query.toArray();
1857
+ //const objTable = db.table(tableName);
1858
+ /*for (const mut of muts) {
1859
+ if (mut.type === "insert" || mut.type === "upsert") {
1860
+ mut.values = await objTable.bulkGet(mut.keys);
1861
+ }
1862
+ }*/
1863
+ return muts.map((mut) => ({
1864
+ table: tableName,
1865
+ mut,
1866
+ }));
1867
+ })));
1868
+ // Sort by time to get a true order of the operations (between tables)
1869
+ const sorted = flatten(allMutsOnTables).sort((a, b) => a.mut.ts - b.mut.ts);
1870
+ const result = [];
1871
+ let currentEntry = null;
1872
+ let currentTxid = null;
1873
+ for (const { table, mut } of sorted) {
1874
+ if (currentEntry &&
1875
+ currentEntry.table === table &&
1876
+ currentTxid === mut.txid) {
1877
+ currentEntry.muts.push(mut);
1878
+ }
1879
+ else {
1880
+ currentEntry = {
1881
+ table,
1882
+ muts: [mut],
1883
+ };
1884
+ currentTxid = mut.txid;
1885
+ result.push(currentEntry);
1886
+ }
1849
1887
  }
1850
- }
1851
- // Filter out those tables that doesn't have any mutations:
1852
- return result;
1888
+ // Filter out those tables that doesn't have any mutations:
1889
+ return result;
1890
+ });
1853
1891
  }
1854
1892
 
1855
1893
  function randomString$1(bytes) {
@@ -2155,58 +2193,60 @@
2155
2193
  : url.pathname.split('/')[1];
2156
2194
  }
2157
2195
 
2158
- async function listSyncifiedChanges(tablesToSyncify, currentUser, schema, alreadySyncedRealms) {
2159
- const txid = `upload-${randomString$1(8)}`;
2160
- if (currentUser.isLoggedIn) {
2161
- if (tablesToSyncify.length > 0) {
2162
- const ignoredRealms = new Set(alreadySyncedRealms || []);
2163
- const upserts = await Promise.all(tablesToSyncify.map(async (table) => {
2164
- const { extractKey } = table.core.schema.primaryKey;
2165
- if (!extractKey)
2166
- return { table: table.name, muts: [] }; // Outbound tables are not synced.
2167
- const dexieCloudTableSchema = schema[table.name];
2168
- const query = dexieCloudTableSchema?.generatedGlobalId
2169
- ? table.filter((item) => {
2170
- const id = extractKey(item);
2171
- return (!ignoredRealms.has(item.realmId || '') &&
2172
- //(id[0] !== '#' || !!item.$ts) && // Private obj need no sync if not changed
2173
- isValidSyncableID(id));
2174
- })
2175
- : table.filter((item) => {
2176
- extractKey(item);
2177
- return (!ignoredRealms.has(item.realmId || '') &&
2178
- //(id[0] !== '#' || !!item.$ts) && // Private obj need no sync if not changed
2179
- isValidAtID(extractKey(item), dexieCloudTableSchema?.idPrefix));
2180
- });
2181
- const unsyncedObjects = await query.toArray();
2182
- if (unsyncedObjects.length > 0) {
2183
- const mut = {
2184
- type: 'upsert',
2185
- values: unsyncedObjects,
2186
- keys: unsyncedObjects.map(extractKey),
2187
- userId: currentUser.userId,
2188
- txid,
2189
- };
2190
- return {
2191
- table: table.name,
2192
- muts: [mut],
2193
- };
2194
- }
2195
- else {
2196
- return {
2197
- table: table.name,
2198
- muts: [],
2199
- };
2200
- }
2201
- }));
2202
- return upserts.filter((op) => op.muts.length > 0);
2196
+ function listSyncifiedChanges(tablesToSyncify, currentUser, schema, alreadySyncedRealms) {
2197
+ return __awaiter$1(this, void 0, void 0, function* () {
2198
+ const txid = `upload-${randomString$1(8)}`;
2199
+ if (currentUser.isLoggedIn) {
2200
+ if (tablesToSyncify.length > 0) {
2201
+ const ignoredRealms = new Set(alreadySyncedRealms || []);
2202
+ const upserts = yield Promise.all(tablesToSyncify.map((table) => __awaiter$1(this, void 0, void 0, function* () {
2203
+ const { extractKey } = table.core.schema.primaryKey;
2204
+ if (!extractKey)
2205
+ return { table: table.name, muts: [] }; // Outbound tables are not synced.
2206
+ const dexieCloudTableSchema = schema[table.name];
2207
+ const query = (dexieCloudTableSchema === null || dexieCloudTableSchema === void 0 ? void 0 : dexieCloudTableSchema.generatedGlobalId)
2208
+ ? table.filter((item) => {
2209
+ const id = extractKey(item);
2210
+ return (!ignoredRealms.has(item.realmId || '') &&
2211
+ //(id[0] !== '#' || !!item.$ts) && // Private obj need no sync if not changed
2212
+ isValidSyncableID(id));
2213
+ })
2214
+ : table.filter((item) => {
2215
+ extractKey(item);
2216
+ return (!ignoredRealms.has(item.realmId || '') &&
2217
+ //(id[0] !== '#' || !!item.$ts) && // Private obj need no sync if not changed
2218
+ isValidAtID(extractKey(item), dexieCloudTableSchema === null || dexieCloudTableSchema === void 0 ? void 0 : dexieCloudTableSchema.idPrefix));
2219
+ });
2220
+ const unsyncedObjects = yield query.toArray();
2221
+ if (unsyncedObjects.length > 0) {
2222
+ const mut = {
2223
+ type: 'upsert',
2224
+ values: unsyncedObjects,
2225
+ keys: unsyncedObjects.map(extractKey),
2226
+ userId: currentUser.userId,
2227
+ txid,
2228
+ };
2229
+ return {
2230
+ table: table.name,
2231
+ muts: [mut],
2232
+ };
2233
+ }
2234
+ else {
2235
+ return {
2236
+ table: table.name,
2237
+ muts: [],
2238
+ };
2239
+ }
2240
+ })));
2241
+ return upserts.filter((op) => op.muts.length > 0);
2242
+ }
2203
2243
  }
2204
- }
2205
- return [];
2244
+ return [];
2245
+ });
2206
2246
  }
2207
2247
 
2208
2248
  function getTablesToSyncify(db, syncState) {
2209
- const syncedTables = syncState?.syncedTables || [];
2249
+ const syncedTables = (syncState === null || syncState === void 0 ? void 0 : syncState.syncedTables) || [];
2210
2250
  const syncableTables = getSyncableTables(db);
2211
2251
  const tablesToSyncify = syncableTables.filter((tbl) => !syncedTables.includes(tbl.name));
2212
2252
  return tablesToSyncify;
@@ -2215,19 +2255,15 @@
2215
2255
  function interactWithUser(userInteraction, req) {
2216
2256
  let done = false;
2217
2257
  return new Promise((resolve, reject) => {
2218
- const interactionProps = {
2219
- ...req,
2220
- onSubmit: (res) => {
2258
+ const interactionProps = Object.assign(Object.assign({}, req), { onSubmit: (res) => {
2221
2259
  userInteraction.next(undefined);
2222
2260
  done = true;
2223
2261
  resolve(res);
2224
- },
2225
- onCancel: () => {
2262
+ }, onCancel: () => {
2226
2263
  userInteraction.next(undefined);
2227
2264
  done = true;
2228
2265
  reject(new Dexie__default["default"].AbortError("User cancelled"));
2229
- },
2230
- };
2266
+ } });
2231
2267
  userInteraction.next(interactionProps);
2232
2268
  // Start subscribing for external updates to db.cloud.userInteraction, and if so, cancel this request.
2233
2269
  /*const subscription = userInteraction.subscribe((currentInteractionProps) => {
@@ -2248,180 +2284,193 @@
2248
2284
  fields: {}
2249
2285
  });
2250
2286
  }
2251
- async function promptForEmail(userInteraction, title, emailHint) {
2252
- let email = emailHint || '';
2253
- while (!email || !/^[\w-\.]+@([\w-]+\.)+[\w-]{2,10}$/.test(email)) {
2254
- email = (await interactWithUser(userInteraction, {
2255
- type: 'email',
2256
- title,
2257
- alerts: email
2258
- ? [
2259
- {
2260
- type: 'error',
2261
- messageCode: 'INVALID_EMAIL',
2262
- message: 'Please enter a valid email address',
2263
- messageParams: {},
2287
+ function promptForEmail(userInteraction, title, emailHint) {
2288
+ return __awaiter$1(this, void 0, void 0, function* () {
2289
+ let email = emailHint || '';
2290
+ while (!email || !/^[\w-\.]+@([\w-]+\.)+[\w-]{2,10}$/.test(email)) {
2291
+ email = (yield interactWithUser(userInteraction, {
2292
+ type: 'email',
2293
+ title,
2294
+ alerts: email
2295
+ ? [
2296
+ {
2297
+ type: 'error',
2298
+ messageCode: 'INVALID_EMAIL',
2299
+ message: 'Please enter a valid email address',
2300
+ messageParams: {},
2301
+ },
2302
+ ]
2303
+ : [],
2304
+ fields: {
2305
+ email: {
2306
+ type: 'email',
2307
+ placeholder: 'you@somedomain.com',
2264
2308
  },
2265
- ]
2266
- : [],
2267
- fields: {
2268
- email: {
2269
- type: 'email',
2270
- placeholder: 'you@somedomain.com',
2271
2309
  },
2272
- },
2273
- })).email;
2274
- }
2275
- return email;
2310
+ })).email;
2311
+ }
2312
+ return email;
2313
+ });
2276
2314
  }
2277
- async function promptForOTP(userInteraction, email, alert) {
2278
- const alerts = [
2279
- {
2280
- type: 'info',
2281
- messageCode: 'OTP_SENT',
2282
- message: `A One-Time password has been sent to {email}`,
2283
- messageParams: { email },
2284
- },
2285
- ];
2286
- if (alert) {
2287
- alerts.push(alert);
2288
- }
2289
- const { otp } = await interactWithUser(userInteraction, {
2290
- type: 'otp',
2291
- title: 'Enter OTP',
2292
- alerts,
2293
- fields: {
2294
- otp: {
2295
- type: 'otp',
2296
- label: 'OTP',
2297
- placeholder: 'Paste OTP here',
2315
+ function promptForOTP(userInteraction, email, alert) {
2316
+ return __awaiter$1(this, void 0, void 0, function* () {
2317
+ const alerts = [
2318
+ {
2319
+ type: 'info',
2320
+ messageCode: 'OTP_SENT',
2321
+ message: `A One-Time password has been sent to {email}`,
2322
+ messageParams: { email },
2298
2323
  },
2299
- },
2324
+ ];
2325
+ if (alert) {
2326
+ alerts.push(alert);
2327
+ }
2328
+ const { otp } = yield interactWithUser(userInteraction, {
2329
+ type: 'otp',
2330
+ title: 'Enter OTP',
2331
+ alerts,
2332
+ fields: {
2333
+ otp: {
2334
+ type: 'otp',
2335
+ label: 'OTP',
2336
+ placeholder: 'Paste OTP here',
2337
+ },
2338
+ },
2339
+ });
2340
+ return otp;
2300
2341
  });
2301
- return otp;
2302
2342
  }
2303
2343
 
2304
- async function loadAccessToken(db) {
2305
- const currentUser = await db.getCurrentUser();
2306
- const { accessToken, accessTokenExpiration, refreshToken, refreshTokenExpiration, claims, } = currentUser;
2307
- if (!accessToken)
2308
- return;
2309
- const expTime = accessTokenExpiration?.getTime() ?? Infinity;
2310
- if (expTime > Date.now()) {
2311
- return accessToken;
2312
- }
2313
- if (!refreshToken) {
2314
- throw new Error(`Refresh token missing`);
2315
- }
2316
- const refreshExpTime = refreshTokenExpiration?.getTime() ?? Infinity;
2317
- if (refreshExpTime <= Date.now()) {
2318
- throw new Error(`Refresh token has expired`);
2319
- }
2320
- const refreshedLogin = await refreshAccessToken(db.cloud.options.databaseUrl, currentUser);
2321
- await db.table('$logins').update(claims.sub, {
2322
- accessToken: refreshedLogin.accessToken,
2323
- accessTokenExpiration: refreshedLogin.accessTokenExpiration,
2344
+ function loadAccessToken(db) {
2345
+ var _a, _b;
2346
+ return __awaiter$1(this, void 0, void 0, function* () {
2347
+ const currentUser = yield db.getCurrentUser();
2348
+ const { accessToken, accessTokenExpiration, refreshToken, refreshTokenExpiration, claims, } = currentUser;
2349
+ if (!accessToken)
2350
+ return;
2351
+ const expTime = (_a = accessTokenExpiration === null || accessTokenExpiration === void 0 ? void 0 : accessTokenExpiration.getTime()) !== null && _a !== void 0 ? _a : Infinity;
2352
+ if (expTime > Date.now()) {
2353
+ return accessToken;
2354
+ }
2355
+ if (!refreshToken) {
2356
+ throw new Error(`Refresh token missing`);
2357
+ }
2358
+ const refreshExpTime = (_b = refreshTokenExpiration === null || refreshTokenExpiration === void 0 ? void 0 : refreshTokenExpiration.getTime()) !== null && _b !== void 0 ? _b : Infinity;
2359
+ if (refreshExpTime <= Date.now()) {
2360
+ throw new Error(`Refresh token has expired`);
2361
+ }
2362
+ const refreshedLogin = yield refreshAccessToken(db.cloud.options.databaseUrl, currentUser);
2363
+ yield db.table('$logins').update(claims.sub, {
2364
+ accessToken: refreshedLogin.accessToken,
2365
+ accessTokenExpiration: refreshedLogin.accessTokenExpiration,
2366
+ });
2367
+ return refreshedLogin.accessToken;
2324
2368
  });
2325
- return refreshedLogin.accessToken;
2326
- }
2327
- async function authenticate(url, context, fetchToken, userInteraction, hints) {
2328
- if (context.accessToken &&
2329
- context.accessTokenExpiration.getTime() > Date.now()) {
2330
- return context;
2331
- }
2332
- else if (context.refreshToken &&
2333
- (!context.refreshTokenExpiration ||
2334
- context.refreshTokenExpiration.getTime() > Date.now())) {
2335
- return await refreshAccessToken(url, context);
2336
- }
2337
- else {
2338
- return await userAuthenticate(context, fetchToken, userInteraction, hints);
2339
- }
2340
2369
  }
2341
- async function refreshAccessToken(url, login) {
2342
- if (!login.refreshToken)
2343
- throw new Error(`Cannot refresh token - refresh token is missing.`);
2344
- if (!login.nonExportablePrivateKey)
2345
- throw new Error(`login.nonExportablePrivateKey is missing - cannot sign refresh token without a private key.`);
2346
- const time_stamp = Date.now();
2347
- const signing_algorithm = 'RSASSA-PKCS1-v1_5';
2348
- const textEncoder = new TextEncoder();
2349
- const data = textEncoder.encode(login.refreshToken + time_stamp);
2350
- const binarySignature = await crypto.subtle.sign(signing_algorithm, login.nonExportablePrivateKey, data);
2351
- const signature = b64encode(binarySignature);
2352
- const tokenRequest = {
2353
- grant_type: 'refresh_token',
2354
- refresh_token: login.refreshToken,
2355
- scopes: ['ACCESS_DB'],
2356
- signature,
2357
- signing_algorithm,
2358
- time_stamp,
2359
- };
2360
- const res = await fetch(`${url}/token`, {
2361
- body: JSON.stringify(tokenRequest),
2362
- method: 'post',
2363
- headers: { 'Content-Type': 'application/json' },
2364
- mode: 'cors',
2370
+ function authenticate(url, context, fetchToken, userInteraction, hints) {
2371
+ return __awaiter$1(this, void 0, void 0, function* () {
2372
+ if (context.accessToken &&
2373
+ context.accessTokenExpiration.getTime() > Date.now()) {
2374
+ return context;
2375
+ }
2376
+ else if (context.refreshToken &&
2377
+ (!context.refreshTokenExpiration ||
2378
+ context.refreshTokenExpiration.getTime() > Date.now())) {
2379
+ return yield refreshAccessToken(url, context);
2380
+ }
2381
+ else {
2382
+ return yield userAuthenticate(context, fetchToken, userInteraction, hints);
2383
+ }
2365
2384
  });
2366
- if (res.status !== 200)
2367
- throw new Error(`RefreshToken: Status ${res.status} from ${url}/token`);
2368
- const response = await res.json();
2369
- login.accessToken = response.accessToken;
2370
- login.accessTokenExpiration = response.accessTokenExpiration
2371
- ? new Date(response.accessTokenExpiration)
2372
- : undefined;
2373
- return login;
2374
- }
2375
- async function userAuthenticate(context, fetchToken, userInteraction, hints) {
2376
- const { privateKey, publicKey } = await crypto.subtle.generateKey({
2377
- name: 'RSASSA-PKCS1-v1_5',
2378
- modulusLength: 2048,
2379
- publicExponent: new Uint8Array([0x01, 0x00, 0x01]),
2380
- hash: { name: 'SHA-256' },
2381
- }, false, // Non-exportable...
2382
- ['sign', 'verify']);
2383
- if (!privateKey || !publicKey)
2384
- throw new Error(`Could not generate RSA keypair`); // Typings suggest these can be undefined...
2385
- context.nonExportablePrivateKey = privateKey; //...but storable!
2386
- const publicKeySPKI = await crypto.subtle.exportKey('spki', publicKey);
2387
- const publicKeyPEM = spkiToPEM(publicKeySPKI);
2388
- context.publicKey = publicKey;
2389
- try {
2390
- const response2 = await fetchToken({
2391
- public_key: publicKeyPEM,
2392
- hints,
2385
+ }
2386
+ function refreshAccessToken(url, login) {
2387
+ return __awaiter$1(this, void 0, void 0, function* () {
2388
+ if (!login.refreshToken)
2389
+ throw new Error(`Cannot refresh token - refresh token is missing.`);
2390
+ if (!login.nonExportablePrivateKey)
2391
+ throw new Error(`login.nonExportablePrivateKey is missing - cannot sign refresh token without a private key.`);
2392
+ const time_stamp = Date.now();
2393
+ const signing_algorithm = 'RSASSA-PKCS1-v1_5';
2394
+ const textEncoder = new TextEncoder();
2395
+ const data = textEncoder.encode(login.refreshToken + time_stamp);
2396
+ const binarySignature = yield crypto.subtle.sign(signing_algorithm, login.nonExportablePrivateKey, data);
2397
+ const signature = b64encode(binarySignature);
2398
+ const tokenRequest = {
2399
+ grant_type: 'refresh_token',
2400
+ refresh_token: login.refreshToken,
2401
+ scopes: ['ACCESS_DB'],
2402
+ signature,
2403
+ signing_algorithm,
2404
+ time_stamp,
2405
+ };
2406
+ const res = yield fetch(`${url}/token`, {
2407
+ body: JSON.stringify(tokenRequest),
2408
+ method: 'post',
2409
+ headers: { 'Content-Type': 'application/json' },
2410
+ mode: 'cors',
2393
2411
  });
2394
- if (response2.type !== 'tokens')
2395
- throw new Error(`Unexpected response type from token endpoint: ${response2.type}`);
2396
- context.accessToken = response2.accessToken;
2397
- context.accessTokenExpiration = new Date(response2.accessTokenExpiration);
2398
- context.refreshToken = response2.refreshToken;
2399
- if (response2.refreshTokenExpiration) {
2400
- context.refreshTokenExpiration = new Date(response2.refreshTokenExpiration);
2401
- }
2402
- context.userId = response2.claims.sub;
2403
- context.email = response2.claims.email;
2404
- context.name = response2.claims.name;
2405
- context.claims = response2.claims;
2406
- if (response2.alerts && response2.alerts.length > 0) {
2407
- await interactWithUser(userInteraction, {
2408
- type: 'message-alert',
2409
- title: 'Authentication Alert',
2410
- fields: {},
2411
- alerts: response2.alerts,
2412
+ if (res.status !== 200)
2413
+ throw new Error(`RefreshToken: Status ${res.status} from ${url}/token`);
2414
+ const response = yield res.json();
2415
+ login.accessToken = response.accessToken;
2416
+ login.accessTokenExpiration = response.accessTokenExpiration
2417
+ ? new Date(response.accessTokenExpiration)
2418
+ : undefined;
2419
+ return login;
2420
+ });
2421
+ }
2422
+ function userAuthenticate(context, fetchToken, userInteraction, hints) {
2423
+ return __awaiter$1(this, void 0, void 0, function* () {
2424
+ const { privateKey, publicKey } = yield crypto.subtle.generateKey({
2425
+ name: 'RSASSA-PKCS1-v1_5',
2426
+ modulusLength: 2048,
2427
+ publicExponent: new Uint8Array([0x01, 0x00, 0x01]),
2428
+ hash: { name: 'SHA-256' },
2429
+ }, false, // Non-exportable...
2430
+ ['sign', 'verify']);
2431
+ if (!privateKey || !publicKey)
2432
+ throw new Error(`Could not generate RSA keypair`); // Typings suggest these can be undefined...
2433
+ context.nonExportablePrivateKey = privateKey; //...but storable!
2434
+ const publicKeySPKI = yield crypto.subtle.exportKey('spki', publicKey);
2435
+ const publicKeyPEM = spkiToPEM(publicKeySPKI);
2436
+ context.publicKey = publicKey;
2437
+ try {
2438
+ const response2 = yield fetchToken({
2439
+ public_key: publicKeyPEM,
2440
+ hints,
2412
2441
  });
2442
+ if (response2.type !== 'tokens')
2443
+ throw new Error(`Unexpected response type from token endpoint: ${response2.type}`);
2444
+ context.accessToken = response2.accessToken;
2445
+ context.accessTokenExpiration = new Date(response2.accessTokenExpiration);
2446
+ context.refreshToken = response2.refreshToken;
2447
+ if (response2.refreshTokenExpiration) {
2448
+ context.refreshTokenExpiration = new Date(response2.refreshTokenExpiration);
2449
+ }
2450
+ context.userId = response2.claims.sub;
2451
+ context.email = response2.claims.email;
2452
+ context.name = response2.claims.name;
2453
+ context.claims = response2.claims;
2454
+ if (response2.alerts && response2.alerts.length > 0) {
2455
+ yield interactWithUser(userInteraction, {
2456
+ type: 'message-alert',
2457
+ title: 'Authentication Alert',
2458
+ fields: {},
2459
+ alerts: response2.alerts,
2460
+ });
2461
+ }
2462
+ return context;
2413
2463
  }
2414
- return context;
2415
- }
2416
- catch (error) {
2417
- await alertUser(userInteraction, 'Authentication Failed', {
2418
- type: 'error',
2419
- messageCode: 'GENERIC_ERROR',
2420
- message: `We're having a problem authenticating right now.`,
2421
- messageParams: {}
2422
- }).catch(() => { });
2423
- throw error;
2424
- }
2464
+ catch (error) {
2465
+ yield alertUser(userInteraction, 'Authentication Failed', {
2466
+ type: 'error',
2467
+ messageCode: 'GENERIC_ERROR',
2468
+ message: `We're having a problem authenticating right now.`,
2469
+ messageParams: {}
2470
+ }).catch(() => { });
2471
+ throw error;
2472
+ }
2473
+ });
2425
2474
  }
2426
2475
  function spkiToPEM(keydata) {
2427
2476
  const keydataB64 = b64encode(keydata);
@@ -2954,23 +3003,17 @@
2954
3003
  return this.v;
2955
3004
  }
2956
3005
  }
2957
- const defs = {
2958
- ...undefinedDef,
2959
- ...(hasBigIntSupport
2960
- ? {}
2961
- : {
2962
- bigint: {
2963
- test: (val) => val instanceof FakeBigInt,
2964
- replace: (fakeBigInt) => {
2965
- return {
2966
- $t: 'bigint',
2967
- ...fakeBigInt
2968
- };
2969
- },
2970
- revive: ({ v, }) => new FakeBigInt(v)
2971
- }
2972
- })
2973
- };
3006
+ const defs = Object.assign(Object.assign({}, undefinedDef), (hasBigIntSupport
3007
+ ? {}
3008
+ : {
3009
+ bigint: {
3010
+ test: (val) => val instanceof FakeBigInt,
3011
+ replace: (fakeBigInt) => {
3012
+ return Object.assign({ $t: 'bigint' }, fakeBigInt);
3013
+ },
3014
+ revive: ({ v, }) => new FakeBigInt(v)
3015
+ }
3016
+ }));
2974
3017
  const TSON = TypesonSimplified(builtin, defs);
2975
3018
  const BISON = Bison(defs);
2976
3019
 
@@ -3029,110 +3072,107 @@
3029
3072
  }
3030
3073
  function cloneChange(change, rewriteValues) {
3031
3074
  // clone on demand:
3032
- return {
3033
- ...change,
3034
- muts: rewriteValues
3035
- ? change.muts.map((m) => ({
3036
- ...m,
3037
- keys: m.keys.slice(),
3038
- values: m.values.slice(),
3039
- }))
3040
- : change.muts.map((m) => ({ ...m, keys: m.keys.slice() })),
3041
- };
3075
+ return Object.assign(Object.assign({}, change), { muts: rewriteValues
3076
+ ? change.muts.map((m) => (Object.assign(Object.assign({}, m), { keys: m.keys.slice(), values: m.values.slice() })))
3077
+ : change.muts.map((m) => (Object.assign(Object.assign({}, m), { keys: m.keys.slice() }))) });
3042
3078
  }
3043
3079
 
3044
3080
  //import {BisonWebStreamReader} from "dreambase-library/dist/typeson-simplified/BisonWebStreamReader";
3045
- async function syncWithServer(changes, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser) {
3046
- //
3047
- // Push changes to server using fetch
3048
- //
3049
- const headers = {
3050
- Accept: 'application/json, application/x-bison, application/x-bison-stream',
3051
- 'Content-Type': 'application/tson'
3052
- };
3053
- const accessToken = await loadAccessToken(db);
3054
- if (accessToken) {
3055
- headers.Authorization = `Bearer ${accessToken}`;
3056
- }
3057
- const syncRequest = {
3058
- v: 2,
3059
- dbID: syncState?.remoteDbId,
3060
- clientIdentity,
3061
- schema: schema || {},
3062
- lastPull: syncState ? {
3063
- serverRevision: syncState.serverRevision,
3064
- realms: syncState.realms,
3065
- inviteRealms: syncState.inviteRealms
3066
- } : undefined,
3067
- baseRevs,
3068
- changes: encodeIdsForServer(db.dx.core.schema, currentUser, changes)
3069
- };
3070
- console.debug("Sync request", syncRequest);
3071
- db.syncStateChangedEvent.next({
3072
- phase: 'pushing',
3073
- });
3074
- const res = await fetch(`${databaseUrl}/sync`, {
3075
- method: 'post',
3076
- headers,
3077
- body: TSON.stringify(syncRequest)
3078
- });
3079
- //const contentLength = Number(res.headers.get('content-length'));
3080
- db.syncStateChangedEvent.next({
3081
- phase: 'pulling'
3082
- });
3083
- if (!res.ok) {
3084
- throw new HttpError(res);
3085
- }
3086
- switch (res.headers.get('content-type')) {
3087
- case 'application/x-bison':
3088
- return BISON.fromBinary(await res.blob());
3089
- case 'application/x-bison-stream': //return BisonWebStreamReader(BISON, res);
3090
- default:
3091
- case 'application/json': {
3092
- const text = await res.text();
3093
- const syncRes = TSON.parse(text);
3094
- return syncRes;
3081
+ function syncWithServer(changes, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser) {
3082
+ return __awaiter$1(this, void 0, void 0, function* () {
3083
+ //
3084
+ // Push changes to server using fetch
3085
+ //
3086
+ const headers = {
3087
+ Accept: 'application/json, application/x-bison, application/x-bison-stream',
3088
+ 'Content-Type': 'application/tson'
3089
+ };
3090
+ const accessToken = yield loadAccessToken(db);
3091
+ if (accessToken) {
3092
+ headers.Authorization = `Bearer ${accessToken}`;
3095
3093
  }
3096
- }
3097
- }
3098
-
3099
- async function modifyLocalObjectsWithNewUserId(syncifiedTables, currentUser, alreadySyncedRealms) {
3100
- const ignoredRealms = new Set(alreadySyncedRealms || []);
3101
- for (const table of syncifiedTables) {
3102
- if (table.name === "members") {
3103
- // members
3104
- await table.toCollection().modify((member) => {
3105
- if (!ignoredRealms.has(member.realmId) && (!member.userId || member.userId === UNAUTHORIZED_USER.userId)) {
3106
- member.userId = currentUser.userId;
3107
- }
3108
- });
3094
+ const syncRequest = {
3095
+ v: 2,
3096
+ dbID: syncState === null || syncState === void 0 ? void 0 : syncState.remoteDbId,
3097
+ clientIdentity,
3098
+ schema: schema || {},
3099
+ lastPull: syncState ? {
3100
+ serverRevision: syncState.serverRevision,
3101
+ realms: syncState.realms,
3102
+ inviteRealms: syncState.inviteRealms
3103
+ } : undefined,
3104
+ baseRevs,
3105
+ changes: encodeIdsForServer(db.dx.core.schema, currentUser, changes)
3106
+ };
3107
+ console.debug("Sync request", syncRequest);
3108
+ db.syncStateChangedEvent.next({
3109
+ phase: 'pushing',
3110
+ });
3111
+ const res = yield fetch(`${databaseUrl}/sync`, {
3112
+ method: 'post',
3113
+ headers,
3114
+ body: TSON.stringify(syncRequest)
3115
+ });
3116
+ //const contentLength = Number(res.headers.get('content-length'));
3117
+ db.syncStateChangedEvent.next({
3118
+ phase: 'pulling'
3119
+ });
3120
+ if (!res.ok) {
3121
+ throw new HttpError(res);
3109
3122
  }
3110
- else if (table.name === "roles") ;
3111
- else if (table.name === "realms") {
3112
- // realms
3113
- await table.toCollection().modify((realm) => {
3114
- if (!ignoredRealms.has(realm.realmId) && (realm.owner === undefined || realm.owner === UNAUTHORIZED_USER.userId)) {
3115
- realm.owner = currentUser.userId;
3116
- }
3117
- });
3123
+ switch (res.headers.get('content-type')) {
3124
+ case 'application/x-bison':
3125
+ return BISON.fromBinary(yield res.blob());
3126
+ case 'application/x-bison-stream': //return BisonWebStreamReader(BISON, res);
3127
+ default:
3128
+ case 'application/json': {
3129
+ const text = yield res.text();
3130
+ const syncRes = TSON.parse(text);
3131
+ return syncRes;
3132
+ }
3118
3133
  }
3119
- else {
3120
- // application entities
3121
- await table.toCollection().modify((obj) => {
3122
- if (!obj.realmId || !ignoredRealms.has(obj.realmId)) {
3123
- if (!obj.owner || obj.owner === UNAUTHORIZED_USER.userId)
3124
- obj.owner = currentUser.userId;
3125
- if (!obj.realmId || obj.realmId === UNAUTHORIZED_USER.userId) {
3126
- obj.realmId = currentUser.userId;
3134
+ });
3135
+ }
3136
+
3137
+ function modifyLocalObjectsWithNewUserId(syncifiedTables, currentUser, alreadySyncedRealms) {
3138
+ return __awaiter$1(this, void 0, void 0, function* () {
3139
+ const ignoredRealms = new Set(alreadySyncedRealms || []);
3140
+ for (const table of syncifiedTables) {
3141
+ if (table.name === "members") {
3142
+ // members
3143
+ yield table.toCollection().modify((member) => {
3144
+ if (!ignoredRealms.has(member.realmId) && (!member.userId || member.userId === UNAUTHORIZED_USER.userId)) {
3145
+ member.userId = currentUser.userId;
3127
3146
  }
3128
- }
3129
- });
3147
+ });
3148
+ }
3149
+ else if (table.name === "roles") ;
3150
+ else if (table.name === "realms") {
3151
+ // realms
3152
+ yield table.toCollection().modify((realm) => {
3153
+ if (!ignoredRealms.has(realm.realmId) && (realm.owner === undefined || realm.owner === UNAUTHORIZED_USER.userId)) {
3154
+ realm.owner = currentUser.userId;
3155
+ }
3156
+ });
3157
+ }
3158
+ else {
3159
+ // application entities
3160
+ yield table.toCollection().modify((obj) => {
3161
+ if (!obj.realmId || !ignoredRealms.has(obj.realmId)) {
3162
+ if (!obj.owner || obj.owner === UNAUTHORIZED_USER.userId)
3163
+ obj.owner = currentUser.userId;
3164
+ if (!obj.realmId || obj.realmId === UNAUTHORIZED_USER.userId) {
3165
+ obj.realmId = currentUser.userId;
3166
+ }
3167
+ }
3168
+ });
3169
+ }
3130
3170
  }
3131
- }
3171
+ });
3132
3172
  }
3133
3173
 
3134
3174
  function throwIfCancelled(cancelToken) {
3135
- if (cancelToken?.cancelled)
3175
+ if (cancelToken === null || cancelToken === void 0 ? void 0 : cancelToken.cancelled)
3136
3176
  throw new Dexie__default["default"].AbortError(`Operation was cancelled`);
3137
3177
  }
3138
3178
 
@@ -3144,17 +3184,19 @@
3144
3184
  self.addEventListener('online', () => isOnline = true);
3145
3185
  self.addEventListener('offline', () => isOnline = false);
3146
3186
 
3147
- async function updateBaseRevs(db, schema, latestRevisions, serverRev) {
3148
- await db.$baseRevs.bulkPut(Object.keys(schema)
3149
- .filter((table) => schema[table].markedForSync)
3150
- .map((tableName) => {
3151
- const lastClientRevOnPreviousServerRev = latestRevisions[tableName] || 0;
3152
- return {
3153
- tableName,
3154
- clientRev: lastClientRevOnPreviousServerRev + 1,
3155
- serverRev,
3156
- };
3157
- }));
3187
+ function updateBaseRevs(db, schema, latestRevisions, serverRev) {
3188
+ return __awaiter$1(this, void 0, void 0, function* () {
3189
+ yield db.$baseRevs.bulkPut(Object.keys(schema)
3190
+ .filter((table) => schema[table].markedForSync)
3191
+ .map((tableName) => {
3192
+ const lastClientRevOnPreviousServerRev = latestRevisions[tableName] || 0;
3193
+ return {
3194
+ tableName,
3195
+ clientRev: lastClientRevOnPreviousServerRev + 1,
3196
+ serverRev,
3197
+ };
3198
+ }));
3199
+ });
3158
3200
  }
3159
3201
 
3160
3202
  function getLatestRevisionsPerTable(clientChangeSet, lastRevisions = {}) {
@@ -3165,119 +3207,123 @@
3165
3207
  return lastRevisions;
3166
3208
  }
3167
3209
 
3168
- async function bulkUpdate(table, keys, changeSpecs) {
3169
- const objs = await table.bulkGet(keys);
3170
- const resultKeys = [];
3171
- const resultObjs = [];
3172
- keys.forEach((key, idx) => {
3173
- const obj = objs[idx];
3174
- if (obj) {
3175
- for (const [keyPath, value] of Object.entries(changeSpecs[idx])) {
3176
- if (keyPath === table.schema.primKey.keyPath) {
3177
- if (Dexie.cmp(value, key) !== 0) {
3178
- throw new Error(`Cannot change primary key`);
3179
- }
3180
- }
3181
- else {
3182
- Dexie__default["default"].setByKeyPath(obj, keyPath, value);
3183
- }
3184
- }
3185
- resultKeys.push(key);
3186
- resultObjs.push(obj);
3187
- }
3188
- });
3189
- await (table.schema.primKey.keyPath == null
3190
- ? table.bulkPut(resultObjs, resultKeys)
3191
- : table.bulkPut(resultObjs));
3192
- }
3193
-
3194
- async function applyServerChanges(changes, db) {
3195
- console.debug('Applying server changes', changes, Dexie__default["default"].currentTransaction);
3196
- for (const { table: tableName, muts } of changes) {
3197
- const table = db.table(tableName);
3198
- if (!table)
3199
- continue; // If server sends changes on a table we don't have, ignore it.
3200
- const { primaryKey } = table.core.schema;
3201
- const keyDecoder = (key) => {
3202
- switch (key[0]) {
3203
- case '[':
3204
- // Decode JSON array
3205
- if (key.endsWith(']'))
3206
- try {
3207
- // On server, array keys are transformed to JSON string representation
3208
- return JSON.parse(key);
3210
+ function bulkUpdate(table, keys, changeSpecs) {
3211
+ return __awaiter$1(this, void 0, void 0, function* () {
3212
+ const objs = yield table.bulkGet(keys);
3213
+ const resultKeys = [];
3214
+ const resultObjs = [];
3215
+ keys.forEach((key, idx) => {
3216
+ const obj = objs[idx];
3217
+ if (obj) {
3218
+ for (const [keyPath, value] of Object.entries(changeSpecs[idx])) {
3219
+ if (keyPath === table.schema.primKey.keyPath) {
3220
+ if (Dexie.cmp(value, key) !== 0) {
3221
+ throw new Error(`Cannot change primary key`);
3209
3222
  }
3210
- catch { }
3211
- return key;
3212
- case '#':
3213
- // Decode private ID (do the opposite from what's done in encodeIdsForServer())
3214
- if (key.endsWith(':' + db.cloud.currentUserId)) {
3215
- return key.substr(0, key.length - db.cloud.currentUserId.length - 1);
3216
- }
3217
- return key;
3218
- default:
3219
- return key;
3220
- }
3221
- };
3222
- for (const mut of muts) {
3223
- const keys = mut.keys.map(keyDecoder);
3224
- switch (mut.type) {
3225
- case 'insert':
3226
- if (primaryKey.outbound) {
3227
- await table.bulkAdd(mut.values, keys);
3228
- }
3229
- else {
3230
- keys.forEach((key, i) => {
3231
- // Make sure inbound keys are consistent
3232
- Dexie__default["default"].setByKeyPath(mut.values[i], primaryKey.keyPath, key);
3233
- });
3234
- await table.bulkAdd(mut.values);
3235
- }
3236
- break;
3237
- case 'upsert':
3238
- if (primaryKey.outbound) {
3239
- await table.bulkPut(mut.values, keys);
3240
- }
3241
- else {
3242
- keys.forEach((key, i) => {
3243
- // Make sure inbound keys are consistent
3244
- Dexie__default["default"].setByKeyPath(mut.values[i], primaryKey.keyPath, key);
3245
- });
3246
- await table.bulkPut(mut.values);
3247
- }
3248
- break;
3249
- case 'modify':
3250
- if (keys.length === 1) {
3251
- await table.update(keys[0], mut.changeSpec);
3252
3223
  }
3253
3224
  else {
3254
- await table.where(':id').anyOf(keys).modify(mut.changeSpec);
3225
+ Dexie__default["default"].setByKeyPath(obj, keyPath, value);
3255
3226
  }
3256
- break;
3257
- case 'update':
3258
- await bulkUpdate(table, keys, mut.changeSpecs);
3259
- break;
3260
- case 'delete':
3261
- await table.bulkDelete(keys);
3262
- break;
3227
+ }
3228
+ resultKeys.push(key);
3229
+ resultObjs.push(obj);
3263
3230
  }
3264
- }
3265
- }
3231
+ });
3232
+ yield (table.schema.primKey.keyPath == null
3233
+ ? table.bulkPut(resultObjs, resultKeys)
3234
+ : table.bulkPut(resultObjs));
3235
+ });
3266
3236
  }
3267
3237
 
3268
- const CURRENT_SYNC_WORKER = 'currentSyncWorker';
3269
- function sync(db, options, schema, syncOptions) {
3270
- return _sync
3271
- .apply(this, arguments)
3272
- .then(() => {
3273
- if (!syncOptions?.justCheckIfNeeded) {
3274
- db.syncStateChangedEvent.next({
3238
+ function applyServerChanges(changes, db) {
3239
+ return __awaiter$1(this, void 0, void 0, function* () {
3240
+ console.debug('Applying server changes', changes, Dexie__default["default"].currentTransaction);
3241
+ for (const { table: tableName, muts } of changes) {
3242
+ const table = db.table(tableName);
3243
+ if (!table)
3244
+ continue; // If server sends changes on a table we don't have, ignore it.
3245
+ const { primaryKey } = table.core.schema;
3246
+ const keyDecoder = (key) => {
3247
+ switch (key[0]) {
3248
+ case '[':
3249
+ // Decode JSON array
3250
+ if (key.endsWith(']'))
3251
+ try {
3252
+ // On server, array keys are transformed to JSON string representation
3253
+ return JSON.parse(key);
3254
+ }
3255
+ catch (_a) { }
3256
+ return key;
3257
+ case '#':
3258
+ // Decode private ID (do the opposite from what's done in encodeIdsForServer())
3259
+ if (key.endsWith(':' + db.cloud.currentUserId)) {
3260
+ return key.substr(0, key.length - db.cloud.currentUserId.length - 1);
3261
+ }
3262
+ return key;
3263
+ default:
3264
+ return key;
3265
+ }
3266
+ };
3267
+ for (const mut of muts) {
3268
+ const keys = mut.keys.map(keyDecoder);
3269
+ switch (mut.type) {
3270
+ case 'insert':
3271
+ if (primaryKey.outbound) {
3272
+ yield table.bulkAdd(mut.values, keys);
3273
+ }
3274
+ else {
3275
+ keys.forEach((key, i) => {
3276
+ // Make sure inbound keys are consistent
3277
+ Dexie__default["default"].setByKeyPath(mut.values[i], primaryKey.keyPath, key);
3278
+ });
3279
+ yield table.bulkAdd(mut.values);
3280
+ }
3281
+ break;
3282
+ case 'upsert':
3283
+ if (primaryKey.outbound) {
3284
+ yield table.bulkPut(mut.values, keys);
3285
+ }
3286
+ else {
3287
+ keys.forEach((key, i) => {
3288
+ // Make sure inbound keys are consistent
3289
+ Dexie__default["default"].setByKeyPath(mut.values[i], primaryKey.keyPath, key);
3290
+ });
3291
+ yield table.bulkPut(mut.values);
3292
+ }
3293
+ break;
3294
+ case 'modify':
3295
+ if (keys.length === 1) {
3296
+ yield table.update(keys[0], mut.changeSpec);
3297
+ }
3298
+ else {
3299
+ yield table.where(':id').anyOf(keys).modify(mut.changeSpec);
3300
+ }
3301
+ break;
3302
+ case 'update':
3303
+ yield bulkUpdate(table, keys, mut.changeSpecs);
3304
+ break;
3305
+ case 'delete':
3306
+ yield table.bulkDelete(keys);
3307
+ break;
3308
+ }
3309
+ }
3310
+ }
3311
+ });
3312
+ }
3313
+
3314
+ const CURRENT_SYNC_WORKER = 'currentSyncWorker';
3315
+ function sync(db, options, schema, syncOptions) {
3316
+ return _sync
3317
+ .apply(this, arguments)
3318
+ .then(() => {
3319
+ if (!(syncOptions === null || syncOptions === void 0 ? void 0 : syncOptions.justCheckIfNeeded)) {
3320
+ db.syncStateChangedEvent.next({
3275
3321
  phase: 'in-sync',
3276
3322
  });
3277
3323
  }
3278
3324
  })
3279
- .catch(async (error) => {
3280
- if (syncOptions?.justCheckIfNeeded)
3325
+ .catch((error) => __awaiter$1(this, void 0, void 0, function* () {
3326
+ if (syncOptions === null || syncOptions === void 0 ? void 0 : syncOptions.justCheckIfNeeded)
3281
3327
  return Promise.reject(error); // Just rethrow.
3282
3328
  console.debug('Error from _sync', {
3283
3329
  isOnline,
@@ -3285,23 +3331,20 @@
3285
3331
  error,
3286
3332
  });
3287
3333
  if (isOnline &&
3288
- syncOptions?.retryImmediatelyOnFetchError &&
3289
- error?.name === 'TypeError' &&
3290
- /fetch/.test(error?.message)) {
3334
+ (syncOptions === null || syncOptions === void 0 ? void 0 : syncOptions.retryImmediatelyOnFetchError) &&
3335
+ (error === null || error === void 0 ? void 0 : error.name) === 'TypeError' &&
3336
+ /fetch/.test(error === null || error === void 0 ? void 0 : error.message)) {
3291
3337
  db.syncStateChangedEvent.next({
3292
3338
  phase: 'error',
3293
3339
  error,
3294
3340
  });
3295
3341
  // Retry again in 500 ms but if it fails again, don't retry.
3296
- await new Promise((resolve) => setTimeout(resolve, 500));
3297
- return await sync(db, options, schema, {
3298
- ...syncOptions,
3299
- retryImmediatelyOnFetchError: false,
3300
- });
3342
+ yield new Promise((resolve) => setTimeout(resolve, 500));
3343
+ return yield sync(db, options, schema, Object.assign(Object.assign({}, syncOptions), { retryImmediatelyOnFetchError: false }));
3301
3344
  }
3302
3345
  // Make sure that no matter whether sync() explodes or not,
3303
3346
  // always update the timestamp. Also store the error.
3304
- await db.$syncState.update('syncState', {
3347
+ yield db.$syncState.update('syncState', {
3305
3348
  timestamp: new Date(),
3306
3349
  error: '' + error,
3307
3350
  });
@@ -3310,234 +3353,239 @@
3310
3353
  error,
3311
3354
  });
3312
3355
  return Promise.reject(error);
3313
- });
3356
+ }));
3314
3357
  }
3315
- async function _sync(db, options, schema, { isInitialSync, cancelToken, justCheckIfNeeded, purpose } = {
3358
+ function _sync(db, options, schema, { isInitialSync, cancelToken, justCheckIfNeeded, purpose } = {
3316
3359
  isInitialSync: false,
3317
3360
  }) {
3318
- if (!justCheckIfNeeded) {
3319
- console.debug('SYNC STARTED', { isInitialSync, purpose });
3320
- }
3321
- if (!db.cloud.options?.databaseUrl)
3322
- throw new Error(`Internal error: sync must not be called when no databaseUrl is configured`);
3323
- const { databaseUrl } = options;
3324
- const currentUser = await db.getCurrentUser(); // Keep same value across entire sync flow:
3325
- const tablesToSync = currentUser.isLoggedIn ? getSyncableTables(db) : [];
3326
- const mutationTables = tablesToSync.map((tbl) => db.table(getMutationTable(tbl.name)));
3327
- // If this is not the initial sync,
3328
- // go through tables that were previously not synced but should now be according to
3329
- // logged in state and the sync table whitelist in db.cloud.options.
3330
- //
3331
- // Prepare for syncification by modifying locally unauthorized objects:
3332
- //
3333
- const persistedSyncState = await db.getPersistedSyncState();
3334
- const tablesToSyncify = !isInitialSync && currentUser.isLoggedIn
3335
- ? getTablesToSyncify(db, persistedSyncState)
3336
- : [];
3337
- throwIfCancelled(cancelToken);
3338
- const doSyncify = tablesToSyncify.length > 0;
3339
- if (doSyncify) {
3340
- if (justCheckIfNeeded)
3341
- return true;
3342
- //console.debug('sync doSyncify is true');
3343
- await db.transaction('rw', tablesToSyncify, async (tx) => {
3344
- // @ts-ignore
3345
- tx.idbtrans.disableChangeTracking = true;
3346
- // @ts-ignore
3347
- tx.idbtrans.disableAccessControl = true; // TODO: Take care of this flag in access control middleware!
3348
- await modifyLocalObjectsWithNewUserId(tablesToSyncify, currentUser, persistedSyncState?.realms);
3349
- });
3350
- throwIfCancelled(cancelToken);
3351
- }
3352
- //
3353
- // List changes to sync
3354
- //
3355
- const [clientChangeSet, syncState, baseRevs] = await db.transaction('r', db.tables, async () => {
3356
- const syncState = await db.getPersistedSyncState();
3357
- const baseRevs = await db.$baseRevs.toArray();
3358
- let clientChanges = await listClientChanges(mutationTables);
3361
+ var _a;
3362
+ return __awaiter$1(this, void 0, void 0, function* () {
3363
+ if (!justCheckIfNeeded) {
3364
+ console.debug('SYNC STARTED', { isInitialSync, purpose });
3365
+ }
3366
+ if (!((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl))
3367
+ throw new Error(`Internal error: sync must not be called when no databaseUrl is configured`);
3368
+ const { databaseUrl } = options;
3369
+ const currentUser = yield db.getCurrentUser(); // Keep same value across entire sync flow:
3370
+ const tablesToSync = currentUser.isLoggedIn ? getSyncableTables(db) : [];
3371
+ const mutationTables = tablesToSync.map((tbl) => db.table(getMutationTable(tbl.name)));
3372
+ // If this is not the initial sync,
3373
+ // go through tables that were previously not synced but should now be according to
3374
+ // logged in state and the sync table whitelist in db.cloud.options.
3375
+ //
3376
+ // Prepare for syncification by modifying locally unauthorized objects:
3377
+ //
3378
+ const persistedSyncState = yield db.getPersistedSyncState();
3379
+ const tablesToSyncify = !isInitialSync && currentUser.isLoggedIn
3380
+ ? getTablesToSyncify(db, persistedSyncState)
3381
+ : [];
3359
3382
  throwIfCancelled(cancelToken);
3383
+ const doSyncify = tablesToSyncify.length > 0;
3360
3384
  if (doSyncify) {
3361
- const alreadySyncedRealms = [
3362
- ...(persistedSyncState?.realms || []),
3363
- ...(persistedSyncState?.inviteRealms || []),
3364
- ];
3365
- const syncificationInserts = await listSyncifiedChanges(tablesToSyncify, currentUser, schema, alreadySyncedRealms);
3385
+ if (justCheckIfNeeded)
3386
+ return true;
3387
+ //console.debug('sync doSyncify is true');
3388
+ yield db.transaction('rw', tablesToSyncify, (tx) => __awaiter$1(this, void 0, void 0, function* () {
3389
+ // @ts-ignore
3390
+ tx.idbtrans.disableChangeTracking = true;
3391
+ // @ts-ignore
3392
+ tx.idbtrans.disableAccessControl = true; // TODO: Take care of this flag in access control middleware!
3393
+ yield modifyLocalObjectsWithNewUserId(tablesToSyncify, currentUser, persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.realms);
3394
+ }));
3366
3395
  throwIfCancelled(cancelToken);
3367
- clientChanges = clientChanges.concat(syncificationInserts);
3368
- return [clientChanges, syncState, baseRevs];
3369
3396
  }
3370
- return [clientChanges, syncState, baseRevs];
3371
- });
3372
- const syncIsNeeded = clientChangeSet.some((set) => set.muts.some((mut) => mut.keys.length > 0));
3373
- if (justCheckIfNeeded) {
3374
- console.debug('Sync is needed:', syncIsNeeded);
3375
- return syncIsNeeded;
3376
- }
3377
- if (purpose === 'push' && !syncIsNeeded) {
3378
- // The purpose of this request was to push changes
3379
- return false;
3380
- }
3381
- const latestRevisions = getLatestRevisionsPerTable(clientChangeSet, syncState?.latestRevisions);
3382
- const clientIdentity = syncState?.clientIdentity || randomString(16);
3383
- //
3384
- // Push changes to server
3385
- //
3386
- throwIfCancelled(cancelToken);
3387
- const res = await syncWithServer(clientChangeSet, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser);
3388
- console.debug('Sync response', res);
3389
- //
3390
- // Apply changes locally and clear old change entries:
3391
- //
3392
- const done = await db.transaction('rw', db.tables, async (tx) => {
3393
- // @ts-ignore
3394
- tx.idbtrans.disableChangeTracking = true;
3395
- // @ts-ignore
3396
- tx.idbtrans.disableAccessControl = true; // TODO: Take care of this flag in access control middleware!
3397
- // Update db.cloud.schema from server response.
3398
- // Local schema MAY include a subset of tables, so do not force all tables into local schema.
3399
- for (const tableName of Object.keys(schema)) {
3400
- if (res.schema[tableName]) {
3401
- // Write directly into configured schema. This code can only be executed alone.
3402
- schema[tableName] = res.schema[tableName];
3403
- }
3404
- }
3405
- await db.$syncState.put(schema, 'schema');
3406
- // List mutations that happened during our exchange with the server:
3407
- const addedClientChanges = await listClientChanges(mutationTables, db, {
3408
- since: latestRevisions,
3409
- });
3410
3397
  //
3411
- // Delete changes now as server has return success
3412
- // (but keep changes that haven't reached server yet)
3398
+ // List changes to sync
3413
3399
  //
3414
- for (const mutTable of mutationTables) {
3415
- const tableName = getTableFromMutationTable(mutTable.name);
3416
- if (!addedClientChanges.some((ch) => ch.table === tableName && ch.muts.length > 0)) {
3417
- // No added mutations for this table during the time we sent changes
3418
- // to the server.
3419
- // It is therefore safe to clear all changes (which is faster than
3420
- // deleting a range)
3421
- await Promise.all([
3422
- mutTable.clear(),
3423
- db.$baseRevs.where({ tableName }).delete(),
3424
- ]);
3425
- }
3426
- else if (latestRevisions[tableName]) {
3427
- const latestRev = latestRevisions[tableName] || 0;
3428
- await Promise.all([
3429
- mutTable.where('rev').belowOrEqual(latestRev).delete(),
3430
- db.$baseRevs
3431
- .where(':id')
3432
- .between([tableName, -Infinity], [tableName, latestRev + 1], true, true)
3433
- .reverse()
3434
- .offset(1) // Keep one entry (the one mapping muts that came during fetch --> previous server revision)
3435
- .delete(),
3436
- ]);
3400
+ const [clientChangeSet, syncState, baseRevs] = yield db.transaction('r', db.tables, () => __awaiter$1(this, void 0, void 0, function* () {
3401
+ const syncState = yield db.getPersistedSyncState();
3402
+ const baseRevs = yield db.$baseRevs.toArray();
3403
+ let clientChanges = yield listClientChanges(mutationTables);
3404
+ throwIfCancelled(cancelToken);
3405
+ if (doSyncify) {
3406
+ const alreadySyncedRealms = [
3407
+ ...((persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.realms) || []),
3408
+ ...((persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.inviteRealms) || []),
3409
+ ];
3410
+ const syncificationInserts = yield listSyncifiedChanges(tablesToSyncify, currentUser, schema, alreadySyncedRealms);
3411
+ throwIfCancelled(cancelToken);
3412
+ clientChanges = clientChanges.concat(syncificationInserts);
3413
+ return [clientChanges, syncState, baseRevs];
3437
3414
  }
3438
- else ;
3439
- }
3440
- // Update latestRevisions object according to additional changes:
3441
- getLatestRevisionsPerTable(addedClientChanges, latestRevisions);
3442
- // Update/add new entries into baseRevs map.
3443
- // * On tables without mutations since last serverRevision,
3444
- // this will update existing entry.
3445
- // * On tables where mutations have been recorded since last
3446
- // serverRevision, this will create a new entry.
3447
- // The purpose of this operation is to mark a start revision (per table)
3448
- // so that all client-mutations that come after this, will be mapped to current
3449
- // server revision.
3450
- await updateBaseRevs(db, schema, latestRevisions, res.serverRevision);
3451
- const syncState = await db.getPersistedSyncState();
3452
- //
3453
- // Delete objects from removed realms
3454
- //
3455
- await deleteObjectsFromRemovedRealms(db, res, syncState);
3456
- //
3457
- // Update syncState
3458
- //
3459
- const newSyncState = syncState || {
3460
- syncedTables: [],
3461
- latestRevisions: {},
3462
- realms: [],
3463
- inviteRealms: [],
3464
- clientIdentity,
3465
- };
3466
- newSyncState.syncedTables = tablesToSync
3467
- .map((tbl) => tbl.name)
3468
- .concat(tablesToSyncify.map((tbl) => tbl.name));
3469
- newSyncState.latestRevisions = latestRevisions;
3470
- newSyncState.remoteDbId = res.dbId;
3471
- newSyncState.initiallySynced = true;
3472
- newSyncState.realms = res.realms;
3473
- newSyncState.inviteRealms = res.inviteRealms;
3474
- newSyncState.serverRevision = res.serverRevision;
3475
- newSyncState.timestamp = new Date();
3476
- delete newSyncState.error;
3477
- const filteredChanges = filterServerChangesThroughAddedClientChanges(res.changes, addedClientChanges);
3415
+ return [clientChanges, syncState, baseRevs];
3416
+ }));
3417
+ const syncIsNeeded = clientChangeSet.some((set) => set.muts.some((mut) => mut.keys.length > 0));
3418
+ if (justCheckIfNeeded) {
3419
+ console.debug('Sync is needed:', syncIsNeeded);
3420
+ return syncIsNeeded;
3421
+ }
3422
+ if (purpose === 'push' && !syncIsNeeded) {
3423
+ // The purpose of this request was to push changes
3424
+ return false;
3425
+ }
3426
+ const latestRevisions = getLatestRevisionsPerTable(clientChangeSet, syncState === null || syncState === void 0 ? void 0 : syncState.latestRevisions);
3427
+ const clientIdentity = (syncState === null || syncState === void 0 ? void 0 : syncState.clientIdentity) || randomString(16);
3478
3428
  //
3479
- // apply server changes
3429
+ // Push changes to server
3480
3430
  //
3481
- await applyServerChanges(filteredChanges, db);
3431
+ throwIfCancelled(cancelToken);
3432
+ const res = yield syncWithServer(clientChangeSet, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser);
3433
+ console.debug('Sync response', res);
3482
3434
  //
3483
- // Update syncState
3435
+ // Apply changes locally and clear old change entries:
3484
3436
  //
3485
- db.$syncState.put(newSyncState, 'syncState');
3486
- return addedClientChanges.length === 0;
3437
+ const done = yield db.transaction('rw', db.tables, (tx) => __awaiter$1(this, void 0, void 0, function* () {
3438
+ // @ts-ignore
3439
+ tx.idbtrans.disableChangeTracking = true;
3440
+ // @ts-ignore
3441
+ tx.idbtrans.disableAccessControl = true; // TODO: Take care of this flag in access control middleware!
3442
+ // Update db.cloud.schema from server response.
3443
+ // Local schema MAY include a subset of tables, so do not force all tables into local schema.
3444
+ for (const tableName of Object.keys(schema)) {
3445
+ if (res.schema[tableName]) {
3446
+ // Write directly into configured schema. This code can only be executed alone.
3447
+ schema[tableName] = res.schema[tableName];
3448
+ }
3449
+ }
3450
+ yield db.$syncState.put(schema, 'schema');
3451
+ // List mutations that happened during our exchange with the server:
3452
+ const addedClientChanges = yield listClientChanges(mutationTables, db, {
3453
+ since: latestRevisions,
3454
+ });
3455
+ //
3456
+ // Delete changes now as server has return success
3457
+ // (but keep changes that haven't reached server yet)
3458
+ //
3459
+ for (const mutTable of mutationTables) {
3460
+ const tableName = getTableFromMutationTable(mutTable.name);
3461
+ if (!addedClientChanges.some((ch) => ch.table === tableName && ch.muts.length > 0)) {
3462
+ // No added mutations for this table during the time we sent changes
3463
+ // to the server.
3464
+ // It is therefore safe to clear all changes (which is faster than
3465
+ // deleting a range)
3466
+ yield Promise.all([
3467
+ mutTable.clear(),
3468
+ db.$baseRevs.where({ tableName }).delete(),
3469
+ ]);
3470
+ }
3471
+ else if (latestRevisions[tableName]) {
3472
+ const latestRev = latestRevisions[tableName] || 0;
3473
+ yield Promise.all([
3474
+ mutTable.where('rev').belowOrEqual(latestRev).delete(),
3475
+ db.$baseRevs
3476
+ .where(':id')
3477
+ .between([tableName, -Infinity], [tableName, latestRev + 1], true, true)
3478
+ .reverse()
3479
+ .offset(1) // Keep one entry (the one mapping muts that came during fetch --> previous server revision)
3480
+ .delete(),
3481
+ ]);
3482
+ }
3483
+ else ;
3484
+ }
3485
+ // Update latestRevisions object according to additional changes:
3486
+ getLatestRevisionsPerTable(addedClientChanges, latestRevisions);
3487
+ // Update/add new entries into baseRevs map.
3488
+ // * On tables without mutations since last serverRevision,
3489
+ // this will update existing entry.
3490
+ // * On tables where mutations have been recorded since last
3491
+ // serverRevision, this will create a new entry.
3492
+ // The purpose of this operation is to mark a start revision (per table)
3493
+ // so that all client-mutations that come after this, will be mapped to current
3494
+ // server revision.
3495
+ yield updateBaseRevs(db, schema, latestRevisions, res.serverRevision);
3496
+ const syncState = yield db.getPersistedSyncState();
3497
+ //
3498
+ // Delete objects from removed realms
3499
+ //
3500
+ yield deleteObjectsFromRemovedRealms(db, res, syncState);
3501
+ //
3502
+ // Update syncState
3503
+ //
3504
+ const newSyncState = syncState || {
3505
+ syncedTables: [],
3506
+ latestRevisions: {},
3507
+ realms: [],
3508
+ inviteRealms: [],
3509
+ clientIdentity,
3510
+ };
3511
+ newSyncState.syncedTables = tablesToSync
3512
+ .map((tbl) => tbl.name)
3513
+ .concat(tablesToSyncify.map((tbl) => tbl.name));
3514
+ newSyncState.latestRevisions = latestRevisions;
3515
+ newSyncState.remoteDbId = res.dbId;
3516
+ newSyncState.initiallySynced = true;
3517
+ newSyncState.realms = res.realms;
3518
+ newSyncState.inviteRealms = res.inviteRealms;
3519
+ newSyncState.serverRevision = res.serverRevision;
3520
+ newSyncState.timestamp = new Date();
3521
+ delete newSyncState.error;
3522
+ const filteredChanges = filterServerChangesThroughAddedClientChanges(res.changes, addedClientChanges);
3523
+ //
3524
+ // apply server changes
3525
+ //
3526
+ yield applyServerChanges(filteredChanges, db);
3527
+ //
3528
+ // Update syncState
3529
+ //
3530
+ db.$syncState.put(newSyncState, 'syncState');
3531
+ return addedClientChanges.length === 0;
3532
+ }));
3533
+ if (!done) {
3534
+ console.debug('MORE SYNC NEEDED. Go for it again!');
3535
+ return yield _sync(db, options, schema, { isInitialSync, cancelToken });
3536
+ }
3537
+ console.debug('SYNC DONE', { isInitialSync });
3538
+ return false; // Not needed anymore
3487
3539
  });
3488
- if (!done) {
3489
- console.debug('MORE SYNC NEEDED. Go for it again!');
3490
- return await _sync(db, options, schema, { isInitialSync, cancelToken });
3491
- }
3492
- console.debug('SYNC DONE', { isInitialSync });
3493
- return false; // Not needed anymore
3494
- }
3495
- async function deleteObjectsFromRemovedRealms(db, res, prevState) {
3496
- const deletedRealms = new Set();
3497
- const rejectedRealms = new Set();
3498
- const previousRealmSet = prevState ? prevState.realms : [];
3499
- const previousInviteRealmSet = prevState ? prevState.inviteRealms : [];
3500
- const updatedRealmSet = new Set(res.realms);
3501
- const updatedTotalRealmSet = new Set(res.realms.concat(res.inviteRealms));
3502
- for (const realmId of previousRealmSet) {
3503
- if (!updatedRealmSet.has(realmId)) {
3504
- rejectedRealms.add(realmId);
3540
+ }
3541
+ function deleteObjectsFromRemovedRealms(db, res, prevState) {
3542
+ return __awaiter$1(this, void 0, void 0, function* () {
3543
+ const deletedRealms = new Set();
3544
+ const rejectedRealms = new Set();
3545
+ const previousRealmSet = prevState ? prevState.realms : [];
3546
+ const previousInviteRealmSet = prevState ? prevState.inviteRealms : [];
3547
+ const updatedRealmSet = new Set(res.realms);
3548
+ const updatedTotalRealmSet = new Set(res.realms.concat(res.inviteRealms));
3549
+ for (const realmId of previousRealmSet) {
3550
+ if (!updatedRealmSet.has(realmId)) {
3551
+ rejectedRealms.add(realmId);
3552
+ if (!updatedTotalRealmSet.has(realmId)) {
3553
+ deletedRealms.add(realmId);
3554
+ }
3555
+ }
3556
+ }
3557
+ for (const realmId of previousInviteRealmSet.concat(previousRealmSet)) {
3505
3558
  if (!updatedTotalRealmSet.has(realmId)) {
3506
3559
  deletedRealms.add(realmId);
3507
3560
  }
3508
3561
  }
3509
- }
3510
- for (const realmId of previousInviteRealmSet.concat(previousRealmSet)) {
3511
- if (!updatedTotalRealmSet.has(realmId)) {
3512
- deletedRealms.add(realmId);
3513
- }
3514
- }
3515
- if (deletedRealms.size > 0 || rejectedRealms.size > 0) {
3516
- const tables = getSyncableTables(db);
3517
- for (const table of tables) {
3518
- let realmsToDelete = ['realms', 'members', 'roles'].includes(table.name)
3519
- ? deletedRealms // These tables should spare rejected ones.
3520
- : rejectedRealms; // All other tables shoudl delete rejected+deleted ones
3521
- if (realmsToDelete.size === 0)
3522
- continue;
3523
- if (table.schema.indexes.some((idx) => idx.keyPath === 'realmId' ||
3524
- (Array.isArray(idx.keyPath) && idx.keyPath[0] === 'realmId'))) {
3525
- // There's an index to use:
3526
- //console.debug(`REMOVAL: deleting all ${table.name} where realmId anyOf `, JSON.stringify([...realmsToDelete]));
3527
- await table
3528
- .where('realmId')
3529
- .anyOf([...realmsToDelete])
3530
- .delete();
3531
- }
3532
- else {
3533
- // No index to use:
3534
- //console.debug(`REMOVAL: deleting all ${table.name} where realmId is any of `, JSON.stringify([...realmsToDelete]), realmsToDelete.size);
3535
- await table
3536
- .filter((obj) => !!obj?.realmId && realmsToDelete.has(obj.realmId))
3537
- .delete();
3562
+ if (deletedRealms.size > 0 || rejectedRealms.size > 0) {
3563
+ const tables = getSyncableTables(db);
3564
+ for (const table of tables) {
3565
+ let realmsToDelete = ['realms', 'members', 'roles'].includes(table.name)
3566
+ ? deletedRealms // These tables should spare rejected ones.
3567
+ : rejectedRealms; // All other tables shoudl delete rejected+deleted ones
3568
+ if (realmsToDelete.size === 0)
3569
+ continue;
3570
+ if (table.schema.indexes.some((idx) => idx.keyPath === 'realmId' ||
3571
+ (Array.isArray(idx.keyPath) && idx.keyPath[0] === 'realmId'))) {
3572
+ // There's an index to use:
3573
+ //console.debug(`REMOVAL: deleting all ${table.name} where realmId anyOf `, JSON.stringify([...realmsToDelete]));
3574
+ yield table
3575
+ .where('realmId')
3576
+ .anyOf([...realmsToDelete])
3577
+ .delete();
3578
+ }
3579
+ else {
3580
+ // No index to use:
3581
+ //console.debug(`REMOVAL: deleting all ${table.name} where realmId is any of `, JSON.stringify([...realmsToDelete]), realmsToDelete.size);
3582
+ yield table
3583
+ .filter((obj) => !!(obj === null || obj === void 0 ? void 0 : obj.realmId) && realmsToDelete.has(obj.realmId))
3584
+ .delete();
3585
+ }
3538
3586
  }
3539
3587
  }
3540
- }
3588
+ });
3541
3589
  }
3542
3590
  function filterServerChangesThroughAddedClientChanges(serverChanges, addedClientChanges) {
3543
3591
  const changes = {};
@@ -3555,7 +3603,7 @@
3555
3603
  let isWorking = false;
3556
3604
  let loopWarning = 0;
3557
3605
  let loopDetection = [0, 0, 0, 0, 0, 0, 0, 0, 0, Date.now()];
3558
- event.subscribe(async () => {
3606
+ event.subscribe(() => __awaiter$1(this, void 0, void 0, function* () {
3559
3607
  if (isWorking)
3560
3608
  return;
3561
3609
  if (queue.length > 0) {
@@ -3564,7 +3612,7 @@
3564
3612
  loopDetection.push(Date.now());
3565
3613
  readyToServe.next(false);
3566
3614
  try {
3567
- await consumeQueue();
3615
+ yield consumeQueue();
3568
3616
  }
3569
3617
  finally {
3570
3618
  if (loopDetection[loopDetection.length - 1] - loopDetection[0] <
@@ -3574,170 +3622,173 @@
3574
3622
  // Last time we did this, we ended up here too. Wait for a minute.
3575
3623
  console.warn(`Slowing down websocket loop for one minute`);
3576
3624
  loopWarning = Date.now() + 60000;
3577
- await new Promise((resolve) => setTimeout(resolve, 60000));
3625
+ yield new Promise((resolve) => setTimeout(resolve, 60000));
3578
3626
  }
3579
3627
  else {
3580
3628
  // This is a one-time event. Just pause 10 seconds.
3581
3629
  console.warn(`Slowing down websocket loop for 10 seconds`);
3582
3630
  loopWarning = Date.now() + 10000;
3583
- await new Promise((resolve) => setTimeout(resolve, 10000));
3631
+ yield new Promise((resolve) => setTimeout(resolve, 10000));
3584
3632
  }
3585
3633
  }
3586
3634
  isWorking = false;
3587
3635
  readyToServe.next(true);
3588
3636
  }
3589
3637
  }
3590
- });
3638
+ }));
3591
3639
  function enqueue(msg) {
3592
3640
  queue.push(msg);
3593
3641
  event.next(null);
3594
3642
  }
3595
- async function consumeQueue() {
3596
- while (queue.length > 0) {
3597
- const msg = queue.shift();
3598
- try {
3599
- // If the sync worker or service worker is syncing, wait 'til thei're done.
3600
- // It's no need to have two channels at the same time - even though it wouldnt
3601
- // be a problem - this is an optimization.
3602
- await db.cloud.syncState
3603
- .pipe(filter(({ phase }) => phase === 'in-sync' || phase === 'error'), take(1))
3604
- .toPromise();
3605
- console.debug('processing msg', msg);
3606
- const persistedSyncState = db.cloud.persistedSyncState.value;
3607
- //syncState.
3608
- if (!msg)
3609
- continue;
3610
- switch (msg.type) {
3611
- case 'token-expired':
3612
- console.debug('WebSocket observable: Token expired. Refreshing token...');
3613
- const user = db.cloud.currentUser.value;
3614
- // Refresh access token
3615
- const refreshedLogin = await refreshAccessToken(db.cloud.options.databaseUrl, user);
3616
- // Persist updated access token
3617
- await db.table('$logins').update(user.userId, {
3618
- accessToken: refreshedLogin.accessToken,
3619
- accessTokenExpiration: refreshedLogin.accessTokenExpiration,
3620
- });
3621
- // Updating $logins will trigger emission of db.cloud.currentUser observable, which
3622
- // in turn will lead to that connectWebSocket.ts will reconnect the socket with the
3623
- // new token. So we don't need to do anything more here.
3624
- break;
3625
- case 'realm-added':
3626
- //if (!persistedSyncState?.realms?.includes(msg.realm) && !persistedSyncState?.inviteRealms?.includes(msg.realm)) {
3627
- triggerSync(db, 'pull');
3628
- //}
3629
- break;
3630
- case 'realm-accepted':
3631
- //if (!persistedSyncState?.realms?.includes(msg.realm)) {
3632
- triggerSync(db, 'pull');
3633
- //}
3634
- break;
3635
- case 'realm-removed':
3636
- //if (
3637
- persistedSyncState?.realms?.includes(msg.realm) ||
3638
- persistedSyncState?.inviteRealms?.includes(msg.realm);
3639
- //) {
3640
- triggerSync(db, 'pull');
3641
- //}
3642
- break;
3643
- case 'realms-changed':
3644
- triggerSync(db, 'pull');
3645
- break;
3646
- case 'changes':
3647
- console.debug('changes');
3648
- if (db.cloud.syncState.value?.phase === 'error') {
3643
+ function consumeQueue() {
3644
+ var _a, _b, _c;
3645
+ return __awaiter$1(this, void 0, void 0, function* () {
3646
+ while (queue.length > 0) {
3647
+ const msg = queue.shift();
3648
+ try {
3649
+ // If the sync worker or service worker is syncing, wait 'til thei're done.
3650
+ // It's no need to have two channels at the same time - even though it wouldnt
3651
+ // be a problem - this is an optimization.
3652
+ yield db.cloud.syncState
3653
+ .pipe(filter(({ phase }) => phase === 'in-sync' || phase === 'error'), take(1))
3654
+ .toPromise();
3655
+ console.debug('processing msg', msg);
3656
+ const persistedSyncState = db.cloud.persistedSyncState.value;
3657
+ //syncState.
3658
+ if (!msg)
3659
+ continue;
3660
+ switch (msg.type) {
3661
+ case 'token-expired':
3662
+ console.debug('WebSocket observable: Token expired. Refreshing token...');
3663
+ const user = db.cloud.currentUser.value;
3664
+ // Refresh access token
3665
+ const refreshedLogin = yield refreshAccessToken(db.cloud.options.databaseUrl, user);
3666
+ // Persist updated access token
3667
+ yield db.table('$logins').update(user.userId, {
3668
+ accessToken: refreshedLogin.accessToken,
3669
+ accessTokenExpiration: refreshedLogin.accessTokenExpiration,
3670
+ });
3671
+ // Updating $logins will trigger emission of db.cloud.currentUser observable, which
3672
+ // in turn will lead to that connectWebSocket.ts will reconnect the socket with the
3673
+ // new token. So we don't need to do anything more here.
3674
+ break;
3675
+ case 'realm-added':
3676
+ //if (!persistedSyncState?.realms?.includes(msg.realm) && !persistedSyncState?.inviteRealms?.includes(msg.realm)) {
3649
3677
  triggerSync(db, 'pull');
3678
+ //}
3650
3679
  break;
3651
- }
3652
- await db.transaction('rw', db.dx.tables, async (tx) => {
3653
- // @ts-ignore
3654
- tx.idbtrans.disableChangeTracking = true;
3655
- // @ts-ignore
3656
- tx.idbtrans.disableAccessControl = true;
3657
- const [schema, syncState, currentUser] = await Promise.all([
3658
- db.getSchema(),
3659
- db.getPersistedSyncState(),
3660
- db.getCurrentUser(),
3661
- ]);
3662
- console.debug('ws message queue: in transaction');
3663
- if (!syncState || !schema || !currentUser) {
3664
- console.debug('required vars not present', {
3665
- syncState,
3666
- schema,
3667
- currentUser,
3668
- });
3669
- return; // Initial sync must have taken place - otherwise, ignore this.
3680
+ case 'realm-accepted':
3681
+ //if (!persistedSyncState?.realms?.includes(msg.realm)) {
3682
+ triggerSync(db, 'pull');
3683
+ //}
3684
+ break;
3685
+ case 'realm-removed':
3686
+ //if (
3687
+ ((_a = persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.realms) === null || _a === void 0 ? void 0 : _a.includes(msg.realm)) ||
3688
+ ((_b = persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.inviteRealms) === null || _b === void 0 ? void 0 : _b.includes(msg.realm));
3689
+ //) {
3690
+ triggerSync(db, 'pull');
3691
+ //}
3692
+ break;
3693
+ case 'realms-changed':
3694
+ triggerSync(db, 'pull');
3695
+ break;
3696
+ case 'changes':
3697
+ console.debug('changes');
3698
+ if (((_c = db.cloud.syncState.value) === null || _c === void 0 ? void 0 : _c.phase) === 'error') {
3699
+ triggerSync(db, 'pull');
3700
+ break;
3670
3701
  }
3671
- // Verify again in ACID tx that we're on same server revision.
3672
- if (msg.baseRev !== syncState.serverRevision) {
3673
- console.debug(`baseRev (${msg.baseRev}) differs from our serverRevision in syncState (${syncState.serverRevision})`);
3674
- // Should we trigger a sync now? No. This is a normal case
3675
- // when another local peer (such as the SW or a websocket channel on other tab) has
3676
- // updated syncState from new server information but we are not aware yet. It would
3677
- // be unnescessary to do a sync in that case. Instead, the caller of this consumeQueue()
3678
- // function will do readyToServe.next(true) right after this return, which will lead
3679
- // to a "ready" message being sent to server with the new accurate serverRev we have,
3680
- // so that the next message indeed will be correct.
3681
- if (typeof msg.baseRev === 'string' && // v2 format
3682
- (typeof syncState.serverRevision === 'bigint' || // v1 format
3683
- typeof syncState.serverRevision === 'object') // v1 format old browser
3684
- ) {
3685
- // The reason for the diff seems to be that server has migrated the revision format.
3686
- // Do a full sync to update revision format.
3687
- // If we don't do a sync request now, we could stuck in an endless loop.
3702
+ yield db.transaction('rw', db.dx.tables, (tx) => __awaiter$1(this, void 0, void 0, function* () {
3703
+ // @ts-ignore
3704
+ tx.idbtrans.disableChangeTracking = true;
3705
+ // @ts-ignore
3706
+ tx.idbtrans.disableAccessControl = true;
3707
+ const [schema, syncState, currentUser] = yield Promise.all([
3708
+ db.getSchema(),
3709
+ db.getPersistedSyncState(),
3710
+ db.getCurrentUser(),
3711
+ ]);
3712
+ console.debug('ws message queue: in transaction');
3713
+ if (!syncState || !schema || !currentUser) {
3714
+ console.debug('required vars not present', {
3715
+ syncState,
3716
+ schema,
3717
+ currentUser,
3718
+ });
3719
+ return; // Initial sync must have taken place - otherwise, ignore this.
3720
+ }
3721
+ // Verify again in ACID tx that we're on same server revision.
3722
+ if (msg.baseRev !== syncState.serverRevision) {
3723
+ console.debug(`baseRev (${msg.baseRev}) differs from our serverRevision in syncState (${syncState.serverRevision})`);
3724
+ // Should we trigger a sync now? No. This is a normal case
3725
+ // when another local peer (such as the SW or a websocket channel on other tab) has
3726
+ // updated syncState from new server information but we are not aware yet. It would
3727
+ // be unnescessary to do a sync in that case. Instead, the caller of this consumeQueue()
3728
+ // function will do readyToServe.next(true) right after this return, which will lead
3729
+ // to a "ready" message being sent to server with the new accurate serverRev we have,
3730
+ // so that the next message indeed will be correct.
3731
+ if (typeof msg.baseRev === 'string' && // v2 format
3732
+ (typeof syncState.serverRevision === 'bigint' || // v1 format
3733
+ typeof syncState.serverRevision === 'object') // v1 format old browser
3734
+ ) {
3735
+ // The reason for the diff seems to be that server has migrated the revision format.
3736
+ // Do a full sync to update revision format.
3737
+ // If we don't do a sync request now, we could stuck in an endless loop.
3738
+ triggerSync(db, 'pull');
3739
+ }
3740
+ return; // Ignore message
3741
+ }
3742
+ // Verify also that the message is based on the exact same set of realms
3743
+ const ourRealmSetHash = yield Dexie__default["default"].waitFor(
3744
+ // Keep TX in non-IDB work
3745
+ computeRealmSetHash(syncState));
3746
+ console.debug('ourRealmSetHash', ourRealmSetHash);
3747
+ if (ourRealmSetHash !== msg.realmSetHash) {
3748
+ console.debug('not same realmSetHash', msg.realmSetHash);
3688
3749
  triggerSync(db, 'pull');
3750
+ // The message isn't based on the same realms.
3751
+ // Trigger a sync instead to resolve all things up.
3752
+ return;
3689
3753
  }
3690
- return; // Ignore message
3691
- }
3692
- // Verify also that the message is based on the exact same set of realms
3693
- const ourRealmSetHash = await Dexie__default["default"].waitFor(
3694
- // Keep TX in non-IDB work
3695
- computeRealmSetHash(syncState));
3696
- console.debug('ourRealmSetHash', ourRealmSetHash);
3697
- if (ourRealmSetHash !== msg.realmSetHash) {
3698
- console.debug('not same realmSetHash', msg.realmSetHash);
3699
- triggerSync(db, 'pull');
3700
- // The message isn't based on the same realms.
3701
- // Trigger a sync instead to resolve all things up.
3702
- return;
3703
- }
3704
- // Get clientChanges
3705
- let clientChanges = [];
3706
- if (currentUser.isLoggedIn) {
3707
- const mutationTables = getSyncableTables(db).map((tbl) => db.table(getMutationTable(tbl.name)));
3708
- clientChanges = await listClientChanges(mutationTables, db);
3709
- console.debug('msg queue: client changes', clientChanges);
3710
- }
3711
- if (msg.changes.length > 0) {
3712
- const filteredChanges = filterServerChangesThroughAddedClientChanges(msg.changes, clientChanges);
3754
+ // Get clientChanges
3755
+ let clientChanges = [];
3756
+ if (currentUser.isLoggedIn) {
3757
+ const mutationTables = getSyncableTables(db).map((tbl) => db.table(getMutationTable(tbl.name)));
3758
+ clientChanges = yield listClientChanges(mutationTables, db);
3759
+ console.debug('msg queue: client changes', clientChanges);
3760
+ }
3761
+ if (msg.changes.length > 0) {
3762
+ const filteredChanges = filterServerChangesThroughAddedClientChanges(msg.changes, clientChanges);
3763
+ //
3764
+ // apply server changes
3765
+ //
3766
+ console.debug('applying filtered server changes', filteredChanges);
3767
+ yield applyServerChanges(filteredChanges, db);
3768
+ }
3769
+ // Update latest revisions per table in case there are unsynced changes
3770
+ // This can be a real case in future when we allow non-eagery sync.
3771
+ // And it can actually be realistic now also, but very rare.
3772
+ syncState.latestRevisions = getLatestRevisionsPerTable(clientChanges, syncState.latestRevisions);
3773
+ syncState.serverRevision = msg.newRev;
3774
+ // Update base revs
3775
+ console.debug('Updating baseRefs', syncState.latestRevisions);
3776
+ yield updateBaseRevs(db, schema, syncState.latestRevisions, msg.newRev);
3713
3777
  //
3714
- // apply server changes
3778
+ // Update syncState
3715
3779
  //
3716
- console.debug('applying filtered server changes', filteredChanges);
3717
- await applyServerChanges(filteredChanges, db);
3718
- }
3719
- // Update latest revisions per table in case there are unsynced changes
3720
- // This can be a real case in future when we allow non-eagery sync.
3721
- // And it can actually be realistic now also, but very rare.
3722
- syncState.latestRevisions = getLatestRevisionsPerTable(clientChanges, syncState.latestRevisions);
3723
- syncState.serverRevision = msg.newRev;
3724
- // Update base revs
3725
- console.debug('Updating baseRefs', syncState.latestRevisions);
3726
- await updateBaseRevs(db, schema, syncState.latestRevisions, msg.newRev);
3727
- //
3728
- // Update syncState
3729
- //
3730
- console.debug('Updating syncState', syncState);
3731
- await db.$syncState.put(syncState, 'syncState');
3732
- });
3733
- console.debug('msg queue: done with rw transaction');
3734
- break;
3780
+ console.debug('Updating syncState', syncState);
3781
+ yield db.$syncState.put(syncState, 'syncState');
3782
+ }));
3783
+ console.debug('msg queue: done with rw transaction');
3784
+ break;
3785
+ }
3786
+ }
3787
+ catch (error) {
3788
+ console.error(`Error in msg queue`, error);
3735
3789
  }
3736
3790
  }
3737
- catch (error) {
3738
- console.error(`Error in msg queue`, error);
3739
- }
3740
- }
3791
+ });
3741
3792
  }
3742
3793
  return {
3743
3794
  enqueue,
@@ -3856,100 +3907,105 @@
3856
3907
  lastLogin: new Date(0)
3857
3908
  }));
3858
3909
  }
3859
- async save() {
3860
- const db = wm.get(this);
3861
- db.table("$logins").put(this);
3910
+ save() {
3911
+ return __awaiter$1(this, void 0, void 0, function* () {
3912
+ const db = wm.get(this);
3913
+ db.table("$logins").put(this);
3914
+ });
3862
3915
  }
3863
3916
  }
3864
3917
 
3865
3918
  function otpFetchTokenCallback(db) {
3866
3919
  const { userInteraction } = db.cloud;
3867
- return async function otpAuthenticate({ public_key, hints }) {
3868
- let tokenRequest;
3869
- const url = db.cloud.options?.databaseUrl;
3870
- if (!url)
3871
- throw new Error(`No database URL given.`);
3872
- if (hints?.grant_type === 'demo') {
3873
- const demo_user = await promptForEmail(userInteraction, 'Enter a demo user email', hints?.email || hints?.userId);
3874
- tokenRequest = {
3875
- demo_user,
3876
- grant_type: 'demo',
3877
- scopes: ['ACCESS_DB'],
3878
- public_key,
3879
- };
3880
- }
3881
- else {
3882
- const email = await promptForEmail(userInteraction, 'Enter email address', hints?.email);
3883
- tokenRequest = {
3884
- email,
3885
- grant_type: 'otp',
3886
- scopes: ['ACCESS_DB'],
3887
- public_key,
3888
- };
3889
- }
3890
- const res1 = await fetch(`${url}/token`, {
3891
- body: JSON.stringify(tokenRequest),
3892
- method: 'post',
3893
- headers: { 'Content-Type': 'application/json', mode: 'cors' },
3894
- });
3895
- if (res1.status !== 200) {
3896
- const errMsg = await res1.text();
3897
- await alertUser(userInteraction, "Token request failed", {
3898
- type: 'error',
3899
- messageCode: 'GENERIC_ERROR',
3900
- message: errMsg,
3901
- messageParams: {}
3902
- }).catch(() => { });
3903
- throw new HttpError(res1, errMsg);
3904
- }
3905
- const response = await res1.json();
3906
- if (response.type === 'tokens') {
3907
- // Demo user request can get a "tokens" response right away
3908
- return response;
3909
- }
3910
- else if (tokenRequest.grant_type === 'otp') {
3911
- if (response.type !== 'otp-sent')
3912
- throw new Error(`Unexpected response from ${url}/token`);
3913
- const otp = await promptForOTP(userInteraction, tokenRequest.email);
3914
- tokenRequest.otp = otp || '';
3915
- tokenRequest.otp_id = response.otp_id;
3916
- let res2 = await fetch(`${url}/token`, {
3920
+ return function otpAuthenticate({ public_key, hints }) {
3921
+ var _a;
3922
+ return __awaiter$1(this, void 0, void 0, function* () {
3923
+ let tokenRequest;
3924
+ const url = (_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl;
3925
+ if (!url)
3926
+ throw new Error(`No database URL given.`);
3927
+ if ((hints === null || hints === void 0 ? void 0 : hints.grant_type) === 'demo') {
3928
+ const demo_user = yield promptForEmail(userInteraction, 'Enter a demo user email', (hints === null || hints === void 0 ? void 0 : hints.email) || (hints === null || hints === void 0 ? void 0 : hints.userId));
3929
+ tokenRequest = {
3930
+ demo_user,
3931
+ grant_type: 'demo',
3932
+ scopes: ['ACCESS_DB'],
3933
+ public_key,
3934
+ };
3935
+ }
3936
+ else {
3937
+ const email = yield promptForEmail(userInteraction, 'Enter email address', hints === null || hints === void 0 ? void 0 : hints.email);
3938
+ tokenRequest = {
3939
+ email,
3940
+ grant_type: 'otp',
3941
+ scopes: ['ACCESS_DB'],
3942
+ public_key,
3943
+ };
3944
+ }
3945
+ const res1 = yield fetch(`${url}/token`, {
3917
3946
  body: JSON.stringify(tokenRequest),
3918
3947
  method: 'post',
3919
- headers: { 'Content-Type': 'application/json' },
3920
- mode: 'cors',
3948
+ headers: { 'Content-Type': 'application/json', mode: 'cors' },
3921
3949
  });
3922
- while (res2.status === 401) {
3923
- const errorText = await res2.text();
3924
- tokenRequest.otp = await promptForOTP(userInteraction, tokenRequest.email, {
3950
+ if (res1.status !== 200) {
3951
+ const errMsg = yield res1.text();
3952
+ yield alertUser(userInteraction, "Token request failed", {
3925
3953
  type: 'error',
3926
- messageCode: 'INVALID_OTP',
3927
- message: errorText,
3954
+ messageCode: 'GENERIC_ERROR',
3955
+ message: errMsg,
3928
3956
  messageParams: {}
3929
- });
3930
- res2 = await fetch(`${url}/token`, {
3957
+ }).catch(() => { });
3958
+ throw new HttpError(res1, errMsg);
3959
+ }
3960
+ const response = yield res1.json();
3961
+ if (response.type === 'tokens') {
3962
+ // Demo user request can get a "tokens" response right away
3963
+ return response;
3964
+ }
3965
+ else if (tokenRequest.grant_type === 'otp') {
3966
+ if (response.type !== 'otp-sent')
3967
+ throw new Error(`Unexpected response from ${url}/token`);
3968
+ const otp = yield promptForOTP(userInteraction, tokenRequest.email);
3969
+ tokenRequest.otp = otp || '';
3970
+ tokenRequest.otp_id = response.otp_id;
3971
+ let res2 = yield fetch(`${url}/token`, {
3931
3972
  body: JSON.stringify(tokenRequest),
3932
3973
  method: 'post',
3933
3974
  headers: { 'Content-Type': 'application/json' },
3934
3975
  mode: 'cors',
3935
3976
  });
3977
+ while (res2.status === 401) {
3978
+ const errorText = yield res2.text();
3979
+ tokenRequest.otp = yield promptForOTP(userInteraction, tokenRequest.email, {
3980
+ type: 'error',
3981
+ messageCode: 'INVALID_OTP',
3982
+ message: errorText,
3983
+ messageParams: {}
3984
+ });
3985
+ res2 = yield fetch(`${url}/token`, {
3986
+ body: JSON.stringify(tokenRequest),
3987
+ method: 'post',
3988
+ headers: { 'Content-Type': 'application/json' },
3989
+ mode: 'cors',
3990
+ });
3991
+ }
3992
+ if (res2.status !== 200) {
3993
+ const errMsg = yield res2.text();
3994
+ yield alertUser(userInteraction, "OTP Authentication Failed", {
3995
+ type: 'error',
3996
+ messageCode: 'GENERIC_ERROR',
3997
+ message: errMsg,
3998
+ messageParams: {}
3999
+ }).catch(() => { });
4000
+ throw new HttpError(res2, errMsg);
4001
+ }
4002
+ const response2 = yield res2.json();
4003
+ return response2;
3936
4004
  }
3937
- if (res2.status !== 200) {
3938
- const errMsg = await res2.text();
3939
- await alertUser(userInteraction, "OTP Authentication Failed", {
3940
- type: 'error',
3941
- messageCode: 'GENERIC_ERROR',
3942
- message: errMsg,
3943
- messageParams: {}
3944
- }).catch(() => { });
3945
- throw new HttpError(res2, errMsg);
4005
+ else {
4006
+ throw new Error(`Unexpected response from ${url}/token`);
3946
4007
  }
3947
- const response2 = await res2.json();
3948
- return response2;
3949
- }
3950
- else {
3951
- throw new Error(`Unexpected response from ${url}/token`);
3952
- }
4008
+ });
3953
4009
  };
3954
4010
  }
3955
4011
 
@@ -3964,83 +4020,87 @@
3964
4020
  * @param db
3965
4021
  * @param newUser
3966
4022
  */
3967
- async function setCurrentUser(db, user) {
3968
- if (user.userId === db.cloud.currentUserId)
3969
- return; // Already this user.
3970
- const $logins = db.table('$logins');
3971
- await db.transaction('rw', $logins, async (tx) => {
3972
- const existingLogins = await $logins.toArray();
3973
- await Promise.all(existingLogins
3974
- .filter((login) => login.userId !== user.userId && login.isLoggedIn)
3975
- .map((login) => {
3976
- login.isLoggedIn = false;
3977
- return $logins.put(login);
4023
+ function setCurrentUser(db, user) {
4024
+ return __awaiter$1(this, void 0, void 0, function* () {
4025
+ if (user.userId === db.cloud.currentUserId)
4026
+ return; // Already this user.
4027
+ const $logins = db.table('$logins');
4028
+ yield db.transaction('rw', $logins, (tx) => __awaiter$1(this, void 0, void 0, function* () {
4029
+ const existingLogins = yield $logins.toArray();
4030
+ yield Promise.all(existingLogins
4031
+ .filter((login) => login.userId !== user.userId && login.isLoggedIn)
4032
+ .map((login) => {
4033
+ login.isLoggedIn = false;
4034
+ return $logins.put(login);
4035
+ }));
4036
+ user.isLoggedIn = true;
4037
+ user.lastLogin = new Date();
4038
+ yield user.save();
4039
+ console.debug('Saved new user', user.email);
3978
4040
  }));
3979
- user.isLoggedIn = true;
3980
- user.lastLogin = new Date();
3981
- await user.save();
3982
- console.debug('Saved new user', user.email);
4041
+ yield new Promise((resolve) => {
4042
+ if (db.cloud.currentUserId === user.userId) {
4043
+ resolve(null);
4044
+ }
4045
+ else {
4046
+ const subscription = db.cloud.currentUser.subscribe((currentUser) => {
4047
+ if (currentUser.userId === user.userId) {
4048
+ subscription.unsubscribe();
4049
+ resolve(null);
4050
+ }
4051
+ });
4052
+ }
4053
+ });
4054
+ // TANKAR!!!!
4055
+ // V: Service workern kommer inte ha tillgång till currentUserObservable om den inte istället härrör från ett liveQuery.
4056
+ // V: Samma med andra windows.
4057
+ // V: Så kanske göra om den till att häröra från liveQuery som läser $logins.orderBy('lastLogin').last().
4058
+ // V: Då bara vara medveten om:
4059
+ // V: En sån observable börjar hämta data vid första subscribe
4060
+ // V: Vi har inget "inital value" men kan emulera det till att vara ANONYMOUS_USER
4061
+ // V: Om requireAuth är true, så borde db.on(ready) hålla databasen stängd för alla utom denna observable.
4062
+ // V: Om inte så behöver den inte blocka.
4063
+ // Andra tankar:
4064
+ // * Man kan inte byta användare när man är offline. Skulle gå att flytta realms till undanstuff-tabell vid user-change.
4065
+ // men troligen inte värt det.
4066
+ // * Istället: sälj inte inte switch-user funktionalitet utan tala enbart om inloggat vs icke inloggat läge.
4067
+ // * populate $logins med ANONYMOUS så att en påbörjad inloggning inte räknas, alternativt ha en boolean prop!
4068
+ // Kanske bäst ha en boolean prop!
4069
+ // * Alternativ switch-user funktionalitet:
4070
+ // * DBCore gömmer data från realms man inte har tillgång till.
4071
+ // * Cursor impl behövs också då.
4072
+ // * Då blir det snabba user switch.
4073
+ // * claims-settet som skickas till servern blir summan av alla claims. Då måste servern stödja multipla tokens eller
4074
+ // att ens token är ett samlad.
3983
4075
  });
3984
- await new Promise((resolve) => {
3985
- if (db.cloud.currentUserId === user.userId) {
3986
- resolve(null);
3987
- }
3988
- else {
3989
- const subscription = db.cloud.currentUser.subscribe((currentUser) => {
3990
- if (currentUser.userId === user.userId) {
3991
- subscription.unsubscribe();
3992
- resolve(null);
4076
+ }
4077
+
4078
+ function login(db, hints) {
4079
+ return __awaiter$1(this, void 0, void 0, function* () {
4080
+ const currentUser = yield db.getCurrentUser();
4081
+ if (currentUser.isLoggedIn) {
4082
+ if (hints) {
4083
+ if (hints.email && db.cloud.currentUser.value.email !== hints.email) {
4084
+ throw new Error(`Must logout before changing user`);
3993
4085
  }
3994
- });
4086
+ if (hints.userId && db.cloud.currentUserId !== hints.userId) {
4087
+ throw new Error(`Must logout before changing user`);
4088
+ }
4089
+ }
4090
+ // Already authenticated according to given hints.
4091
+ return;
3995
4092
  }
4093
+ const context = new AuthPersistedContext(db, {
4094
+ claims: {},
4095
+ lastLogin: new Date(0),
4096
+ });
4097
+ yield authenticate(db.cloud.options.databaseUrl, context, db.cloud.options.fetchTokens || otpFetchTokenCallback(db), db.cloud.userInteraction, hints);
4098
+ yield context.save();
4099
+ yield setCurrentUser(db, context);
4100
+ // Make sure to resync as the new login will be authorized
4101
+ // for new realms.
4102
+ triggerSync(db, "pull");
3996
4103
  });
3997
- // TANKAR!!!!
3998
- // V: Service workern kommer inte ha tillgång till currentUserObservable om den inte istället härrör från ett liveQuery.
3999
- // V: Samma med andra windows.
4000
- // V: Så kanske göra om den till att häröra från liveQuery som läser $logins.orderBy('lastLogin').last().
4001
- // V: Då bara vara medveten om:
4002
- // V: En sån observable börjar hämta data vid första subscribe
4003
- // V: Vi har inget "inital value" men kan emulera det till att vara ANONYMOUS_USER
4004
- // V: Om requireAuth är true, så borde db.on(ready) hålla databasen stängd för alla utom denna observable.
4005
- // V: Om inte så behöver den inte blocka.
4006
- // Andra tankar:
4007
- // * Man kan inte byta användare när man är offline. Skulle gå att flytta realms till undanstuff-tabell vid user-change.
4008
- // men troligen inte värt det.
4009
- // * Istället: sälj inte inte switch-user funktionalitet utan tala enbart om inloggat vs icke inloggat läge.
4010
- // * populate $logins med ANONYMOUS så att en påbörjad inloggning inte räknas, alternativt ha en boolean prop!
4011
- // Kanske bäst ha en boolean prop!
4012
- // * Alternativ switch-user funktionalitet:
4013
- // * DBCore gömmer data från realms man inte har tillgång till.
4014
- // * Cursor impl behövs också då.
4015
- // * Då blir det snabba user switch.
4016
- // * claims-settet som skickas till servern blir summan av alla claims. Då måste servern stödja multipla tokens eller
4017
- // att ens token är ett samlad.
4018
- }
4019
-
4020
- async function login(db, hints) {
4021
- const currentUser = await db.getCurrentUser();
4022
- if (currentUser.isLoggedIn) {
4023
- if (hints) {
4024
- if (hints.email && db.cloud.currentUser.value.email !== hints.email) {
4025
- throw new Error(`Must logout before changing user`);
4026
- }
4027
- if (hints.userId && db.cloud.currentUserId !== hints.userId) {
4028
- throw new Error(`Must logout before changing user`);
4029
- }
4030
- }
4031
- // Already authenticated according to given hints.
4032
- return;
4033
- }
4034
- const context = new AuthPersistedContext(db, {
4035
- claims: {},
4036
- lastLogin: new Date(0),
4037
- });
4038
- await authenticate(db.cloud.options.databaseUrl, context, db.cloud.options.fetchTokens || otpFetchTokenCallback(db), db.cloud.userInteraction, hints);
4039
- await context.save();
4040
- await setCurrentUser(db, context);
4041
- // Make sure to resync as the new login will be authorized
4042
- // for new realms.
4043
- triggerSync(db, "pull");
4044
4104
  }
4045
4105
 
4046
4106
  // @ts-ignore
@@ -4088,9 +4148,10 @@
4088
4148
  return toString.call(o).slice(8, -1);
4089
4149
  }
4090
4150
  function getEffectiveKeys(primaryKey, req) {
4151
+ var _a;
4091
4152
  if (req.type === 'delete')
4092
4153
  return req.keys;
4093
- return req.keys?.slice() || req.values.map(primaryKey.extractKey);
4154
+ return ((_a = req.keys) === null || _a === void 0 ? void 0 : _a.slice()) || req.values.map(primaryKey.extractKey);
4094
4155
  }
4095
4156
  function applyToUpperBitFix(orig, bits) {
4096
4157
  return ((bits & 1 ? orig[0].toUpperCase() : orig[0].toLowerCase()) +
@@ -4181,9 +4242,7 @@
4181
4242
  name: 'idGenerationMiddleware',
4182
4243
  level: 1,
4183
4244
  create: (core) => {
4184
- return {
4185
- ...core,
4186
- table: (tableName) => {
4245
+ return Object.assign(Object.assign({}, core), { table: (tableName) => {
4187
4246
  const table = core.table(tableName);
4188
4247
  function generateOrVerifyAtKeys(req, idPrefix) {
4189
4248
  let valueClones = null;
@@ -4209,24 +4268,19 @@
4209
4268
  `If you want to generate IDs programmatically, remove '@' from the schema to get rid of this constraint. Dexie Cloud supports custom IDs as long as they are random and globally unique.`);
4210
4269
  }
4211
4270
  });
4212
- return table.mutate({
4213
- ...req,
4214
- keys,
4215
- values: valueClones || req.values,
4216
- });
4271
+ return table.mutate(Object.assign(Object.assign({}, req), { keys, values: valueClones || req.values }));
4217
4272
  }
4218
- return {
4219
- ...table,
4220
- mutate: (req) => {
4273
+ return Object.assign(Object.assign({}, table), { mutate: (req) => {
4274
+ var _a, _b;
4221
4275
  // @ts-ignore
4222
4276
  if (req.trans.disableChangeTracking) {
4223
4277
  // Disable ID policy checks and ID generation
4224
4278
  return table.mutate(req);
4225
4279
  }
4226
4280
  if (req.type === 'add' || req.type === 'put') {
4227
- const cloudTableSchema = db.cloud.schema?.[tableName];
4228
- if (!cloudTableSchema?.generatedGlobalId) {
4229
- if (cloudTableSchema?.markedForSync) {
4281
+ const cloudTableSchema = (_a = db.cloud.schema) === null || _a === void 0 ? void 0 : _a[tableName];
4282
+ if (!(cloudTableSchema === null || cloudTableSchema === void 0 ? void 0 : cloudTableSchema.generatedGlobalId)) {
4283
+ if (cloudTableSchema === null || cloudTableSchema === void 0 ? void 0 : cloudTableSchema.markedForSync) {
4230
4284
  // Just make sure primary key is of a supported type:
4231
4285
  const keys = getEffectiveKeys(table.schema.primaryKey, req);
4232
4286
  keys.forEach((key, idx) => {
@@ -4240,7 +4294,7 @@
4240
4294
  }
4241
4295
  }
4242
4296
  else {
4243
- if (db.cloud.options?.databaseUrl && !db.initiallySynced) {
4297
+ if (((_b = db.cloud.options) === null || _b === void 0 ? void 0 : _b.databaseUrl) && !db.initiallySynced) {
4244
4298
  // A database URL is configured but no initial sync has been performed.
4245
4299
  const keys = getEffectiveKeys(table.schema.primaryKey, req);
4246
4300
  // Check if the operation would yield any INSERT. If so, complain! We never want wrong ID prefixes stored.
@@ -4261,10 +4315,8 @@
4261
4315
  }
4262
4316
  }
4263
4317
  return table.mutate(req);
4264
- },
4265
- };
4266
- },
4267
- };
4318
+ } });
4319
+ } });
4268
4320
  },
4269
4321
  };
4270
4322
  }
@@ -4275,19 +4327,16 @@
4275
4327
  name: 'implicitPropSetterMiddleware',
4276
4328
  level: 1,
4277
4329
  create: (core) => {
4278
- return {
4279
- ...core,
4280
- table: (tableName) => {
4330
+ return Object.assign(Object.assign({}, core), { table: (tableName) => {
4281
4331
  const table = core.table(tableName);
4282
- return {
4283
- ...table,
4284
- mutate: (req) => {
4332
+ return Object.assign(Object.assign({}, table), { mutate: (req) => {
4333
+ var _a, _b, _c, _d;
4285
4334
  // @ts-ignore
4286
4335
  if (req.trans.disableChangeTracking) {
4287
4336
  return table.mutate(req);
4288
4337
  }
4289
4338
  const trans = req.trans;
4290
- if (db.cloud.schema?.[tableName]?.markedForSync) {
4339
+ if ((_b = (_a = db.cloud.schema) === null || _a === void 0 ? void 0 : _a[tableName]) === null || _b === void 0 ? void 0 : _b.markedForSync) {
4291
4340
  if (req.type === 'add' || req.type === 'put') {
4292
4341
  // No matter if user is logged in or not, make sure "owner" and "realmId" props are set properly.
4293
4342
  // If not logged in, this will be changed upon syncification of the tables (next sync after login),
@@ -4301,7 +4350,7 @@
4301
4350
  if (!obj.realmId) {
4302
4351
  obj.realmId = trans.currentUser.userId;
4303
4352
  }
4304
- const key = table.schema.primaryKey.extractKey?.(obj);
4353
+ const key = (_d = (_c = table.schema.primaryKey).extractKey) === null || _d === void 0 ? void 0 : _d.call(_c, obj);
4305
4354
  if (typeof key === 'string' && key[0] === '#') {
4306
4355
  // Add $ts prop for put operations and
4307
4356
  // disable update operations as well as consistent
@@ -4328,10 +4377,8 @@
4328
4377
  }
4329
4378
  }
4330
4379
  return table.mutate(req);
4331
- },
4332
- };
4333
- },
4334
- };
4380
+ } });
4381
+ } });
4335
4382
  },
4336
4383
  };
4337
4384
  }
@@ -4350,15 +4397,7 @@
4350
4397
  let counter$1 = 0;
4351
4398
  function guardedTable(table) {
4352
4399
  const prop = "$lock" + (++counter$1);
4353
- return {
4354
- ...table,
4355
- count: readLock(table.count, prop),
4356
- get: readLock(table.get, prop),
4357
- getMany: readLock(table.getMany, prop),
4358
- openCursor: readLock(table.openCursor, prop),
4359
- query: readLock(table.query, prop),
4360
- mutate: writeLock(table.mutate, prop),
4361
- };
4400
+ return Object.assign(Object.assign({}, table), { count: readLock(table.count, prop), get: readLock(table.get, prop), getMany: readLock(table.getMany, prop), openCursor: readLock(table.openCursor, prop), query: readLock(table.query, prop), mutate: writeLock(table.mutate, prop) });
4362
4401
  }
4363
4402
  function readLock(fn, prop) {
4364
4403
  return function readLocker(req) {
@@ -4408,16 +4447,14 @@
4408
4447
  core.table(`$${tbl.name}_mutations`)
4409
4448
  ]));
4410
4449
  }
4411
- catch {
4450
+ catch (_a) {
4412
4451
  throwVersionIncrementNeeded();
4413
4452
  }
4414
- return {
4415
- ...core,
4416
- transaction: (tables, mode) => {
4453
+ return Object.assign(Object.assign({}, core), { transaction: (tables, mode) => {
4417
4454
  let tx;
4418
4455
  if (mode === 'readwrite') {
4419
4456
  const mutationTables = tables
4420
- .filter((tbl) => db.cloud.schema?.[tbl]?.markedForSync)
4457
+ .filter((tbl) => { var _a, _b; return (_b = (_a = db.cloud.schema) === null || _a === void 0 ? void 0 : _a[tbl]) === null || _b === void 0 ? void 0 : _b.markedForSync; })
4421
4458
  .map((tbl) => getMutationTable(tbl));
4422
4459
  tx = core.transaction([...tables, ...mutationTables], mode);
4423
4460
  }
@@ -4440,7 +4477,8 @@
4440
4477
  outstandingTransactions.next(outstandingTransactions.value);
4441
4478
  };
4442
4479
  const txComplete = () => {
4443
- if (tx.mutationsAdded && db.cloud.options?.databaseUrl) {
4480
+ var _a;
4481
+ if (tx.mutationsAdded && ((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl)) {
4444
4482
  if (db.cloud.usingServiceWorker) {
4445
4483
  console.debug('registering sync event');
4446
4484
  registerSyncEvent(db, "push");
@@ -4456,8 +4494,7 @@
4456
4494
  tx.addEventListener('abort', removeTransaction);
4457
4495
  }
4458
4496
  return tx;
4459
- },
4460
- table: (tableName) => {
4497
+ }, table: (tableName) => {
4461
4498
  const table = core.table(tableName);
4462
4499
  if (/^\$/.test(tableName)) {
4463
4500
  if (tableName.endsWith('_mutations')) {
@@ -4465,20 +4502,15 @@
4465
4502
  // make sure to set the mutationsAdded flag on transaction.
4466
4503
  // This is also done in mutateAndLog() as that function talks to a
4467
4504
  // lower level DBCore and wouldn't be catched by this code.
4468
- return {
4469
- ...table,
4470
- mutate: (req) => {
4505
+ return Object.assign(Object.assign({}, table), { mutate: (req) => {
4471
4506
  if (req.type === 'add' || req.type === 'put') {
4472
4507
  req.trans.mutationsAdded = true;
4473
4508
  }
4474
4509
  return table.mutate(req);
4475
- }
4476
- };
4510
+ } });
4477
4511
  }
4478
4512
  else if (tableName === '$logins') {
4479
- return {
4480
- ...table,
4481
- mutate: (req) => {
4513
+ return Object.assign(Object.assign({}, table), { mutate: (req) => {
4482
4514
  //console.debug('Mutating $logins table', req);
4483
4515
  return table
4484
4516
  .mutate(req)
@@ -4492,8 +4524,7 @@
4492
4524
  console.debug('Failed mutation $logins', err);
4493
4525
  return Promise.reject(err);
4494
4526
  });
4495
- }
4496
- };
4527
+ } });
4497
4528
  }
4498
4529
  else {
4499
4530
  return table;
@@ -4501,17 +4532,16 @@
4501
4532
  }
4502
4533
  const { schema } = table;
4503
4534
  const mutsTable = mutTableMap.get(tableName);
4504
- return guardedTable({
4505
- ...table,
4506
- mutate: (req) => {
4535
+ return guardedTable(Object.assign(Object.assign({}, table), { mutate: (req) => {
4536
+ var _a, _b, _c;
4507
4537
  const trans = req.trans;
4508
4538
  if (!trans.txid)
4509
4539
  return table.mutate(req); // Upgrade transactions not guarded by us.
4510
4540
  if (trans.disableChangeTracking)
4511
4541
  return table.mutate(req);
4512
- if (!db.cloud.schema?.[tableName]?.markedForSync)
4542
+ if (!((_b = (_a = db.cloud.schema) === null || _a === void 0 ? void 0 : _a[tableName]) === null || _b === void 0 ? void 0 : _b.markedForSync))
4513
4543
  return table.mutate(req);
4514
- if (!trans.currentUser?.isLoggedIn) {
4544
+ if (!((_c = trans.currentUser) === null || _c === void 0 ? void 0 : _c.isLoggedIn)) {
4515
4545
  // Unauthorized user should not log mutations.
4516
4546
  // Instead, after login all local data should be logged at once.
4517
4547
  return table.mutate(req);
@@ -4534,8 +4564,7 @@
4534
4564
  });
4535
4565
  })
4536
4566
  : mutateAndLog(req);
4537
- }
4538
- });
4567
+ } }));
4539
4568
  function mutateAndLog(req) {
4540
4569
  const trans = req.trans;
4541
4570
  trans.mutationsAdded = true;
@@ -4606,18 +4635,14 @@
4606
4635
  : res;
4607
4636
  });
4608
4637
  }
4609
- }
4610
- };
4638
+ } });
4611
4639
  }
4612
4640
  };
4613
4641
  }
4614
4642
 
4615
4643
  function overrideParseStoresSpec(origFunc, dexie) {
4616
4644
  return function (stores, dbSchema) {
4617
- const storesClone = {
4618
- ...DEXIE_CLOUD_SCHEMA,
4619
- ...stores,
4620
- };
4645
+ const storesClone = Object.assign(Object.assign({}, DEXIE_CLOUD_SCHEMA), stores);
4621
4646
  const cloudSchema = dexie.cloud.schema || (dexie.cloud.schema = {});
4622
4647
  const allPrefixes = new Set();
4623
4648
  Object.keys(storesClone).forEach(tableName => {
@@ -4649,10 +4674,12 @@
4649
4674
  };
4650
4675
  }
4651
4676
 
4652
- async function performInitialSync(db, cloudOptions, cloudSchema) {
4653
- console.debug('Performing initial sync');
4654
- await sync(db, cloudOptions, cloudSchema, { isInitialSync: true });
4655
- console.debug('Done initial sync');
4677
+ function performInitialSync(db, cloudOptions, cloudSchema) {
4678
+ return __awaiter$1(this, void 0, void 0, function* () {
4679
+ console.debug('Performing initial sync');
4680
+ yield sync(db, cloudOptions, cloudSchema, { isInitialSync: true });
4681
+ console.debug('Done initial sync');
4682
+ });
4656
4683
  }
4657
4684
 
4658
4685
  const USER_INACTIVITY_TIMEOUT = 180000; // 3 minutes
@@ -4770,7 +4797,7 @@
4770
4797
  try {
4771
4798
  this.ws.close();
4772
4799
  }
4773
- catch { }
4800
+ catch (_a) { }
4774
4801
  }
4775
4802
  this.ws = null;
4776
4803
  if (this.messageProducerSubscription) {
@@ -4785,168 +4812,174 @@
4785
4812
  try {
4786
4813
  this.disconnect();
4787
4814
  }
4788
- catch { }
4815
+ catch (_a) { }
4789
4816
  this.connect()
4790
4817
  .catch(() => { })
4791
4818
  .then(() => (this.reconnecting = false)); // finally()
4792
4819
  }
4793
- async connect() {
4794
- this.lastServerActivity = new Date();
4795
- if (this.pauseUntil && this.pauseUntil > new Date()) {
4796
- console.debug('WS not reconnecting just yet', {
4797
- id: this.id,
4798
- pauseUntil: this.pauseUntil,
4799
- });
4800
- return;
4801
- }
4802
- if (this.ws) {
4803
- throw new Error(`Called connect() when a connection is already open`);
4804
- }
4805
- if (!this.databaseUrl)
4806
- throw new Error(`Cannot connect without a database URL`);
4807
- if (this.closed) {
4808
- //console.debug('SyncStatus: DUBB: Ooops it was closed!');
4809
- return;
4810
- }
4811
- if (this.tokenExpiration && this.tokenExpiration < new Date()) {
4812
- this.subscriber.error(new TokenExpiredError()); // Will be handled in connectWebSocket.ts.
4813
- return;
4814
- }
4815
- this.webSocketStatus.next('connecting');
4816
- this.pinger = setInterval(async () => {
4817
- if (this.closed) {
4818
- console.debug('pinger check', this.id, 'CLOSED.');
4819
- this.teardown();
4820
+ connect() {
4821
+ return __awaiter$1(this, void 0, void 0, function* () {
4822
+ this.lastServerActivity = new Date();
4823
+ if (this.pauseUntil && this.pauseUntil > new Date()) {
4824
+ console.debug('WS not reconnecting just yet', {
4825
+ id: this.id,
4826
+ pauseUntil: this.pauseUntil,
4827
+ });
4820
4828
  return;
4821
4829
  }
4822
4830
  if (this.ws) {
4823
- try {
4824
- this.ws.send(JSON.stringify({ type: 'ping' }));
4825
- setTimeout(() => {
4826
- console.debug('pinger setTimeout', this.id, this.pinger ? `alive` : 'dead');
4827
- if (!this.pinger)
4828
- return;
4829
- if (this.closed) {
4830
- console.debug('pinger setTimeout', this.id, 'subscription is closed');
4831
- this.teardown();
4832
- return;
4833
- }
4834
- if (this.lastServerActivity <
4835
- new Date(Date.now() - SERVER_PING_TIMEOUT)) {
4836
- // Server inactive. Reconnect if user is active.
4837
- console.debug('pinger: server is inactive');
4838
- console.debug('pinger reconnecting');
4839
- this.reconnect();
4840
- }
4841
- else {
4842
- console.debug('pinger: server still active');
4843
- }
4844
- }, SERVER_PING_TIMEOUT);
4845
- }
4846
- catch {
4847
- console.debug('pinger catch error', this.id, 'reconnecting');
4848
- this.reconnect();
4849
- }
4850
- }
4851
- else {
4852
- console.debug('pinger', this.id, 'reconnecting');
4853
- this.reconnect();
4831
+ throw new Error(`Called connect() when a connection is already open`);
4854
4832
  }
4855
- }, CLIENT_PING_INTERVAL);
4856
- // The following vars are needed because we must know which callback to ack when server sends it's ack to us.
4857
- const wsUrl = new URL(this.databaseUrl);
4858
- wsUrl.protocol = wsUrl.protocol === 'http:' ? 'ws' : 'wss';
4859
- const searchParams = new URLSearchParams();
4860
- if (this.subscriber.closed)
4861
- return;
4862
- searchParams.set('v', '2');
4863
- searchParams.set('rev', this.rev);
4864
- searchParams.set('realmsHash', this.realmSetHash);
4865
- searchParams.set('clientId', this.clientIdentity);
4866
- if (this.token) {
4867
- searchParams.set('token', this.token);
4868
- }
4869
- // Connect the WebSocket to given url:
4870
- console.debug('dexie-cloud WebSocket create');
4871
- const ws = (this.ws = new WebSocket(`${wsUrl}/changes?${searchParams}`));
4872
- //ws.binaryType = "arraybuffer"; // For future when subscribing to actual changes.
4873
- ws.onclose = (event) => {
4874
- if (!this.pinger)
4833
+ if (!this.databaseUrl)
4834
+ throw new Error(`Cannot connect without a database URL`);
4835
+ if (this.closed) {
4836
+ //console.debug('SyncStatus: DUBB: Ooops it was closed!');
4875
4837
  return;
4876
- console.debug('dexie-cloud WebSocket onclosed', this.id);
4877
- this.reconnect();
4878
- };
4879
- ws.onmessage = (event) => {
4880
- if (!this.pinger)
4838
+ }
4839
+ if (this.tokenExpiration && this.tokenExpiration < new Date()) {
4840
+ this.subscriber.error(new TokenExpiredError()); // Will be handled in connectWebSocket.ts.
4881
4841
  return;
4882
- console.debug('dexie-cloud WebSocket onmessage', event.data);
4883
- this.lastServerActivity = new Date();
4884
- try {
4885
- const msg = TSON.parse(event.data);
4886
- if (msg.type === 'error') {
4887
- throw new Error(`Error message from dexie-cloud: ${msg.error}`);
4842
+ }
4843
+ this.webSocketStatus.next('connecting');
4844
+ this.pinger = setInterval(() => __awaiter$1(this, void 0, void 0, function* () {
4845
+ if (this.closed) {
4846
+ console.debug('pinger check', this.id, 'CLOSED.');
4847
+ this.teardown();
4848
+ return;
4888
4849
  }
4889
- if (msg.type === 'rev') {
4890
- this.rev = msg.rev; // No meaning but seems reasonable.
4850
+ if (this.ws) {
4851
+ try {
4852
+ this.ws.send(JSON.stringify({ type: 'ping' }));
4853
+ setTimeout(() => {
4854
+ console.debug('pinger setTimeout', this.id, this.pinger ? `alive` : 'dead');
4855
+ if (!this.pinger)
4856
+ return;
4857
+ if (this.closed) {
4858
+ console.debug('pinger setTimeout', this.id, 'subscription is closed');
4859
+ this.teardown();
4860
+ return;
4861
+ }
4862
+ if (this.lastServerActivity <
4863
+ new Date(Date.now() - SERVER_PING_TIMEOUT)) {
4864
+ // Server inactive. Reconnect if user is active.
4865
+ console.debug('pinger: server is inactive');
4866
+ console.debug('pinger reconnecting');
4867
+ this.reconnect();
4868
+ }
4869
+ else {
4870
+ console.debug('pinger: server still active');
4871
+ }
4872
+ }, SERVER_PING_TIMEOUT);
4873
+ }
4874
+ catch (_a) {
4875
+ console.debug('pinger catch error', this.id, 'reconnecting');
4876
+ this.reconnect();
4877
+ }
4891
4878
  }
4892
- if (msg.type !== 'pong') {
4893
- this.subscriber.next(msg);
4879
+ else {
4880
+ console.debug('pinger', this.id, 'reconnecting');
4881
+ this.reconnect();
4894
4882
  }
4883
+ }), CLIENT_PING_INTERVAL);
4884
+ // The following vars are needed because we must know which callback to ack when server sends it's ack to us.
4885
+ const wsUrl = new URL(this.databaseUrl);
4886
+ wsUrl.protocol = wsUrl.protocol === 'http:' ? 'ws' : 'wss';
4887
+ const searchParams = new URLSearchParams();
4888
+ if (this.subscriber.closed)
4889
+ return;
4890
+ searchParams.set('v', '2');
4891
+ searchParams.set('rev', this.rev);
4892
+ searchParams.set('realmsHash', this.realmSetHash);
4893
+ searchParams.set('clientId', this.clientIdentity);
4894
+ if (this.token) {
4895
+ searchParams.set('token', this.token);
4895
4896
  }
4896
- catch (e) {
4897
- this.subscriber.error(e);
4898
- }
4899
- };
4900
- try {
4901
- let everConnected = false;
4902
- await new Promise((resolve, reject) => {
4903
- ws.onopen = (event) => {
4904
- console.debug('dexie-cloud WebSocket onopen');
4905
- everConnected = true;
4906
- resolve(null);
4907
- };
4908
- ws.onerror = (event) => {
4909
- if (!everConnected) {
4910
- const error = event.error || new Error('WebSocket Error');
4911
- this.subscriber.error(error);
4912
- this.webSocketStatus.next('error');
4913
- reject(error);
4897
+ // Connect the WebSocket to given url:
4898
+ console.debug('dexie-cloud WebSocket create');
4899
+ const ws = (this.ws = new WebSocket(`${wsUrl}/changes?${searchParams}`));
4900
+ //ws.binaryType = "arraybuffer"; // For future when subscribing to actual changes.
4901
+ ws.onclose = (event) => {
4902
+ if (!this.pinger)
4903
+ return;
4904
+ console.debug('dexie-cloud WebSocket onclosed', this.id);
4905
+ this.reconnect();
4906
+ };
4907
+ ws.onmessage = (event) => {
4908
+ if (!this.pinger)
4909
+ return;
4910
+ console.debug('dexie-cloud WebSocket onmessage', event.data);
4911
+ this.lastServerActivity = new Date();
4912
+ try {
4913
+ const msg = TSON.parse(event.data);
4914
+ if (msg.type === 'error') {
4915
+ throw new Error(`Error message from dexie-cloud: ${msg.error}`);
4914
4916
  }
4915
- else {
4916
- this.reconnect();
4917
+ if (msg.type === 'rev') {
4918
+ this.rev = msg.rev; // No meaning but seems reasonable.
4917
4919
  }
4918
- };
4919
- });
4920
- this.messageProducerSubscription = this.messageProducer.subscribe((msg) => {
4921
- if (!this.closed) {
4922
- if (msg.type === 'ready' &&
4923
- this.webSocketStatus.value !== 'connected') {
4924
- this.webSocketStatus.next('connected');
4920
+ if (msg.type !== 'pong') {
4921
+ this.subscriber.next(msg);
4925
4922
  }
4926
- this.ws?.send(TSON.stringify(msg));
4927
4923
  }
4928
- });
4929
- }
4930
- catch (error) {
4931
- this.pauseUntil = new Date(Date.now() + FAIL_RETRY_WAIT_TIME);
4932
- }
4924
+ catch (e) {
4925
+ this.subscriber.error(e);
4926
+ }
4927
+ };
4928
+ try {
4929
+ let everConnected = false;
4930
+ yield new Promise((resolve, reject) => {
4931
+ ws.onopen = (event) => {
4932
+ console.debug('dexie-cloud WebSocket onopen');
4933
+ everConnected = true;
4934
+ resolve(null);
4935
+ };
4936
+ ws.onerror = (event) => {
4937
+ if (!everConnected) {
4938
+ const error = event.error || new Error('WebSocket Error');
4939
+ this.subscriber.error(error);
4940
+ this.webSocketStatus.next('error');
4941
+ reject(error);
4942
+ }
4943
+ else {
4944
+ this.reconnect();
4945
+ }
4946
+ };
4947
+ });
4948
+ this.messageProducerSubscription = this.messageProducer.subscribe((msg) => {
4949
+ var _a;
4950
+ if (!this.closed) {
4951
+ if (msg.type === 'ready' &&
4952
+ this.webSocketStatus.value !== 'connected') {
4953
+ this.webSocketStatus.next('connected');
4954
+ }
4955
+ (_a = this.ws) === null || _a === void 0 ? void 0 : _a.send(TSON.stringify(msg));
4956
+ }
4957
+ });
4958
+ }
4959
+ catch (error) {
4960
+ this.pauseUntil = new Date(Date.now() + FAIL_RETRY_WAIT_TIME);
4961
+ }
4962
+ });
4933
4963
  }
4934
4964
  }
4935
4965
 
4936
4966
  function sleep$1(ms) {
4937
4967
  return new Promise((resolve) => setTimeout(resolve, ms));
4938
4968
  }
4939
- async function waitAndReconnectWhenUserDoesSomething(error) {
4940
- console.error(`WebSocket observable: error but revive when user does some active thing...`, error);
4941
- // Sleep some seconds...
4942
- await sleep$1(3000);
4943
- // Wait til user does something (move mouse, tap, scroll, click etc)
4944
- console.debug('waiting for someone to do something');
4945
- await userDoesSomething.pipe(take(1)).toPromise();
4946
- console.debug('someone did something!');
4969
+ function waitAndReconnectWhenUserDoesSomething(error) {
4970
+ return __awaiter$1(this, void 0, void 0, function* () {
4971
+ console.error(`WebSocket observable: error but revive when user does some active thing...`, error);
4972
+ // Sleep some seconds...
4973
+ yield sleep$1(3000);
4974
+ // Wait til user does something (move mouse, tap, scroll, click etc)
4975
+ console.debug('waiting for someone to do something');
4976
+ yield userDoesSomething.pipe(take(1)).toPromise();
4977
+ console.debug('someone did something!');
4978
+ });
4947
4979
  }
4948
4980
  function connectWebSocket(db) {
4949
- if (!db.cloud.options?.databaseUrl) {
4981
+ var _a;
4982
+ if (!((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl)) {
4950
4983
  throw new Error(`No database URL to connect WebSocket to`);
4951
4984
  }
4952
4985
  const messageProducer = db.messageConsumer.readyToServe.pipe(filter((isReady) => isReady), // When consumer is ready for new messages, produce such a message to inform server about it
@@ -4958,27 +4991,27 @@
4958
4991
  rev: syncState.serverRevision,
4959
4992
  })));
4960
4993
  function createObservable() {
4961
- return db.cloud.persistedSyncState.pipe(filter((syncState) => syncState?.serverRevision), // Don't connect before there's no initial sync performed.
4994
+ return db.cloud.persistedSyncState.pipe(filter((syncState) => syncState === null || syncState === void 0 ? void 0 : syncState.serverRevision), // Don't connect before there's no initial sync performed.
4962
4995
  take(1), // Don't continue waking up whenever syncState change
4963
- switchMap((syncState) => db.cloud.currentUser.pipe(map((userLogin) => [userLogin, syncState]))), switchMap(([userLogin, syncState]) => userIsReallyActive.pipe(map((isActive) => [isActive ? userLogin : null, syncState]))), switchMap(async ([userLogin, syncState]) => [userLogin, await computeRealmSetHash(syncState)]), switchMap(([userLogin, realmSetHash]) =>
4996
+ switchMap((syncState) => db.cloud.currentUser.pipe(map((userLogin) => [userLogin, syncState]))), switchMap(([userLogin, syncState]) => userIsReallyActive.pipe(map((isActive) => [isActive ? userLogin : null, syncState]))), switchMap(([userLogin, syncState]) => __awaiter$1(this, void 0, void 0, function* () { return [userLogin, yield computeRealmSetHash(syncState)]; })), switchMap(([userLogin, realmSetHash]) =>
4964
4997
  // Let server end query changes from last entry of same client-ID and forward.
4965
4998
  // If no new entries, server won't bother the client. If new entries, server sends only those
4966
4999
  // and the baseRev of the last from same client-ID.
4967
5000
  userLogin
4968
5001
  ? new WSObservable(db.cloud.options.databaseUrl, db.cloud.persistedSyncState.value.serverRevision, realmSetHash, db.cloud.persistedSyncState.value.clientIdentity, messageProducer, db.cloud.webSocketStatus, userLogin.accessToken, userLogin.accessTokenExpiration)
4969
5002
  : rxjs.from([])), catchError((error) => {
4970
- if (error?.name === 'TokenExpiredError') {
5003
+ if ((error === null || error === void 0 ? void 0 : error.name) === 'TokenExpiredError') {
4971
5004
  console.debug('WebSocket observable: Token expired. Refreshing token...');
4972
- return rxjs.of(true).pipe(switchMap(async () => {
5005
+ return rxjs.of(true).pipe(switchMap(() => __awaiter$1(this, void 0, void 0, function* () {
4973
5006
  // Refresh access token
4974
- const user = await db.getCurrentUser();
4975
- const refreshedLogin = await refreshAccessToken(db.cloud.options.databaseUrl, user);
5007
+ const user = yield db.getCurrentUser();
5008
+ const refreshedLogin = yield refreshAccessToken(db.cloud.options.databaseUrl, user);
4976
5009
  // Persist updated access token
4977
- await db.table('$logins').update(user.userId, {
5010
+ yield db.table('$logins').update(user.userId, {
4978
5011
  accessToken: refreshedLogin.accessToken,
4979
5012
  accessTokenExpiration: refreshedLogin.accessTokenExpiration,
4980
5013
  });
4981
- }), switchMap(() => createObservable()));
5014
+ })), switchMap(() => createObservable()));
4982
5015
  }
4983
5016
  else {
4984
5017
  return rxjs.throwError(error);
@@ -5000,10 +5033,13 @@
5000
5033
  });
5001
5034
  }
5002
5035
 
5003
- async function isSyncNeeded(db) {
5004
- return db.cloud.options?.databaseUrl && db.cloud.schema
5005
- ? await sync(db, db.cloud.options, db.cloud.schema, { justCheckIfNeeded: true })
5006
- : false;
5036
+ function isSyncNeeded(db) {
5037
+ var _a;
5038
+ return __awaiter$1(this, void 0, void 0, function* () {
5039
+ return ((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl) && db.cloud.schema
5040
+ ? yield sync(db, db.cloud.options, db.cloud.schema, { justCheckIfNeeded: true })
5041
+ : false;
5042
+ });
5007
5043
  }
5008
5044
 
5009
5045
  const SECONDS = 1000;
@@ -5013,93 +5049,97 @@
5013
5049
 
5014
5050
  const GUARDED_JOB_HEARTBEAT = 1 * SECONDS;
5015
5051
  const GUARDED_JOB_TIMEOUT = 1 * MINUTES;
5016
- async function performGuardedJob(db, jobName, jobsTableName, job, { awaitRemoteJob } = {}) {
5017
- // Start working.
5018
- //
5019
- // Check if someone else is working on this already.
5020
- //
5021
- const jobsTable = db.table(jobsTableName);
5022
- async function aquireLock() {
5023
- const gotTheLock = await db.transaction('rw!', jobsTableName, async () => {
5024
- const currentWork = await jobsTable.get(jobName);
5025
- if (!currentWork) {
5026
- // No one else is working. Let's record that we are.
5027
- await jobsTable.add({
5028
- nodeId: myId,
5029
- started: new Date(),
5030
- heartbeat: new Date()
5031
- }, jobName);
5032
- return true;
5033
- }
5034
- else if (currentWork.heartbeat.getTime() <
5035
- Date.now() - GUARDED_JOB_TIMEOUT) {
5036
- console.warn(`Latest ${jobName} worker seem to have died.\n`, `The dead job started:`, currentWork.started, `\n`, `Last heart beat was:`, currentWork.heartbeat, '\n', `We're now taking over!`);
5037
- // Now, take over!
5038
- await jobsTable.put({
5039
- nodeId: myId,
5040
- started: new Date(),
5041
- heartbeat: new Date()
5042
- }, jobName);
5043
- return true;
5044
- }
5045
- return false;
5046
- });
5047
- if (gotTheLock)
5048
- return true;
5049
- // Someone else took the job.
5050
- if (awaitRemoteJob) {
5051
- try {
5052
- const jobDoneObservable = rxjs.from(Dexie.liveQuery(() => jobsTable.get(jobName))).pipe(timeout(GUARDED_JOB_TIMEOUT), filter((job) => !job)); // Wait til job is not there anymore.
5053
- await jobDoneObservable.toPromise();
5054
- return false;
5055
- }
5056
- catch (err) {
5057
- if (err.name !== 'TimeoutError') {
5058
- throw err;
5059
- }
5060
- // Timeout stopped us! Try aquire the lock now.
5061
- // It will likely succeed this time unless
5062
- // another client took it.
5063
- return await aquireLock();
5064
- }
5065
- }
5066
- return false;
5067
- }
5068
- if (await aquireLock()) {
5069
- // We own the lock entry and can do our job undisturbed.
5070
- // We're not within a transaction, but these type of locks
5071
- // spans over transactions.
5072
- // Start our heart beat during the job.
5073
- // Use setInterval to make sure we are updating heartbeat even during long-lived fetch calls.
5074
- const heartbeat = setInterval(() => {
5075
- jobsTable.update(jobName, (job) => {
5076
- if (job.nodeId === myId) {
5077
- job.heartbeat = new Date();
5052
+ function performGuardedJob(db, jobName, jobsTableName, job, { awaitRemoteJob } = {}) {
5053
+ return __awaiter$1(this, void 0, void 0, function* () {
5054
+ // Start working.
5055
+ //
5056
+ // Check if someone else is working on this already.
5057
+ //
5058
+ const jobsTable = db.table(jobsTableName);
5059
+ function aquireLock() {
5060
+ return __awaiter$1(this, void 0, void 0, function* () {
5061
+ const gotTheLock = yield db.transaction('rw!', jobsTableName, () => __awaiter$1(this, void 0, void 0, function* () {
5062
+ const currentWork = yield jobsTable.get(jobName);
5063
+ if (!currentWork) {
5064
+ // No one else is working. Let's record that we are.
5065
+ yield jobsTable.add({
5066
+ nodeId: myId,
5067
+ started: new Date(),
5068
+ heartbeat: new Date()
5069
+ }, jobName);
5070
+ return true;
5071
+ }
5072
+ else if (currentWork.heartbeat.getTime() <
5073
+ Date.now() - GUARDED_JOB_TIMEOUT) {
5074
+ console.warn(`Latest ${jobName} worker seem to have died.\n`, `The dead job started:`, currentWork.started, `\n`, `Last heart beat was:`, currentWork.heartbeat, '\n', `We're now taking over!`);
5075
+ // Now, take over!
5076
+ yield jobsTable.put({
5077
+ nodeId: myId,
5078
+ started: new Date(),
5079
+ heartbeat: new Date()
5080
+ }, jobName);
5081
+ return true;
5082
+ }
5083
+ return false;
5084
+ }));
5085
+ if (gotTheLock)
5086
+ return true;
5087
+ // Someone else took the job.
5088
+ if (awaitRemoteJob) {
5089
+ try {
5090
+ const jobDoneObservable = rxjs.from(Dexie.liveQuery(() => jobsTable.get(jobName))).pipe(timeout(GUARDED_JOB_TIMEOUT), filter((job) => !job)); // Wait til job is not there anymore.
5091
+ yield jobDoneObservable.toPromise();
5092
+ return false;
5093
+ }
5094
+ catch (err) {
5095
+ if (err.name !== 'TimeoutError') {
5096
+ throw err;
5097
+ }
5098
+ // Timeout stopped us! Try aquire the lock now.
5099
+ // It will likely succeed this time unless
5100
+ // another client took it.
5101
+ return yield aquireLock();
5102
+ }
5078
5103
  }
5104
+ return false;
5079
5105
  });
5080
- }, GUARDED_JOB_HEARTBEAT);
5081
- try {
5082
- return await job();
5083
5106
  }
5084
- finally {
5085
- // Stop heartbeat
5086
- clearInterval(heartbeat);
5087
- // Remove the persisted job state:
5088
- await db.transaction('rw!', jobsTableName, async () => {
5089
- const currentWork = await jobsTable.get(jobName);
5090
- if (currentWork && currentWork.nodeId === myId) {
5091
- await jobsTable.delete(jobName);
5092
- }
5093
- });
5107
+ if (yield aquireLock()) {
5108
+ // We own the lock entry and can do our job undisturbed.
5109
+ // We're not within a transaction, but these type of locks
5110
+ // spans over transactions.
5111
+ // Start our heart beat during the job.
5112
+ // Use setInterval to make sure we are updating heartbeat even during long-lived fetch calls.
5113
+ const heartbeat = setInterval(() => {
5114
+ jobsTable.update(jobName, (job) => {
5115
+ if (job.nodeId === myId) {
5116
+ job.heartbeat = new Date();
5117
+ }
5118
+ });
5119
+ }, GUARDED_JOB_HEARTBEAT);
5120
+ try {
5121
+ return yield job();
5122
+ }
5123
+ finally {
5124
+ // Stop heartbeat
5125
+ clearInterval(heartbeat);
5126
+ // Remove the persisted job state:
5127
+ yield db.transaction('rw!', jobsTableName, () => __awaiter$1(this, void 0, void 0, function* () {
5128
+ const currentWork = yield jobsTable.get(jobName);
5129
+ if (currentWork && currentWork.nodeId === myId) {
5130
+ yield jobsTable.delete(jobName);
5131
+ }
5132
+ }));
5133
+ }
5094
5134
  }
5095
- }
5135
+ });
5096
5136
  }
5097
5137
 
5098
5138
  const ongoingSyncs = new WeakMap();
5099
5139
  function syncIfPossible(db, cloudOptions, cloudSchema, options) {
5100
5140
  const ongoing = ongoingSyncs.get(db);
5101
5141
  if (ongoing) {
5102
- if (ongoing.pull || options?.purpose === 'push') {
5142
+ if (ongoing.pull || (options === null || options === void 0 ? void 0 : options.purpose) === 'push') {
5103
5143
  console.debug('syncIfPossible(): returning the ongoing sync promise.');
5104
5144
  return ongoing.promise;
5105
5145
  }
@@ -5141,32 +5181,37 @@
5141
5181
  }
5142
5182
  }
5143
5183
  const promise = _syncIfPossible();
5144
- ongoingSyncs.set(db, { promise, pull: options?.purpose !== 'push' });
5184
+ ongoingSyncs.set(db, { promise, pull: (options === null || options === void 0 ? void 0 : options.purpose) !== 'push' });
5145
5185
  return promise;
5146
- async function _syncIfPossible() {
5147
- try {
5148
- if (db.cloud.usingServiceWorker) {
5149
- if (IS_SERVICE_WORKER) {
5150
- await sync(db, cloudOptions, cloudSchema, options);
5186
+ function _syncIfPossible() {
5187
+ return __awaiter$1(this, void 0, void 0, function* () {
5188
+ try {
5189
+ if (db.cloud.isServiceWorkerDB) {
5190
+ // We are the dedicated sync SW:
5191
+ yield sync(db, cloudOptions, cloudSchema, options);
5151
5192
  }
5193
+ else if (!db.cloud.usingServiceWorker) {
5194
+ // We use a flow that is better suited for the case when multiple workers want to
5195
+ // do the same thing.
5196
+ yield performGuardedJob(db, CURRENT_SYNC_WORKER, '$jobs', () => sync(db, cloudOptions, cloudSchema, options));
5197
+ }
5198
+ else {
5199
+ assert(false);
5200
+ throw new Error('Internal _syncIfPossible() - invalid precondition - should not have been called.');
5201
+ }
5202
+ ongoingSyncs.delete(db);
5203
+ console.debug('Done sync');
5152
5204
  }
5153
- else {
5154
- // We use a flow that is better suited for the case when multiple workers want to
5155
- // do the same thing.
5156
- await performGuardedJob(db, CURRENT_SYNC_WORKER, '$jobs', () => sync(db, cloudOptions, cloudSchema, options));
5205
+ catch (error) {
5206
+ ongoingSyncs.delete(db);
5207
+ console.error(`Failed to sync client changes`, error);
5208
+ throw error; // Make sure we rethrow error so that sync event is retried.
5209
+ // I don't think we should setTimout or so here.
5210
+ // Unless server tells us to in some response.
5211
+ // Then we could follow that advice but not by waiting here but by registering
5212
+ // Something that triggers an event listened to in startPushWorker()
5157
5213
  }
5158
- ongoingSyncs.delete(db);
5159
- console.debug('Done sync');
5160
- }
5161
- catch (error) {
5162
- ongoingSyncs.delete(db);
5163
- console.error(`Failed to sync client changes`, error);
5164
- throw error; // Make sure we rethrow error so that sync event is retried.
5165
- // I don't think we should setTimout or so here.
5166
- // Unless server tells us to in some response.
5167
- // Then we could follow that advice but not by waiting here but by registering
5168
- // Something that triggers an event listened to in startPushWorker()
5169
- }
5214
+ });
5170
5215
  }
5171
5216
  }
5172
5217
 
@@ -5236,8 +5281,9 @@
5236
5281
  }
5237
5282
 
5238
5283
  function verifySchema(db) {
5284
+ var _a, _b;
5239
5285
  for (const table of db.tables) {
5240
- if (db.cloud.schema?.[table.name]?.markedForSync) {
5286
+ if ((_b = (_a = db.cloud.schema) === null || _a === void 0 ? void 0 : _a[table.name]) === null || _b === void 0 ? void 0 : _b.markedForSync) {
5241
5287
  if (table.schema.primKey.auto) {
5242
5288
  throw new Dexie__default["default"].SchemaError(`Table ${table.name} is both autoIncremented and synced. ` +
5243
5289
  `Use db.cloud.configure({unsyncedTables: [${JSON.stringify(table.name)}]}) to blacklist it from sync`);
@@ -5314,8 +5360,8 @@
5314
5360
  }
5315
5361
  };
5316
5362
 
5317
- function Dialog({ children }) {
5318
- return (a$1("div", null,
5363
+ function Dialog({ children, className }) {
5364
+ return (a$1("div", { className: className },
5319
5365
  a$1("div", { style: Styles.Darken }),
5320
5366
  a$1("div", { style: Styles.DialogOuter },
5321
5367
  a$1("div", { style: Styles.DialogInner }, children))));
@@ -5330,8 +5376,8 @@
5330
5376
  function LoginDialog({ title, alerts, fields, onCancel, onSubmit, }) {
5331
5377
  const [params, setParams] = l({});
5332
5378
  const firstFieldRef = s();
5333
- h(() => firstFieldRef.current?.focus(), []);
5334
- return (a$1(Dialog, null,
5379
+ h(() => { var _a; return (_a = firstFieldRef.current) === null || _a === void 0 ? void 0 : _a.focus(); }, []);
5380
+ return (a$1(Dialog, { className: "dxc-login-dlg" },
5335
5381
  a$1(y, null,
5336
5382
  a$1("h3", { style: Styles.WindowHeader }, title),
5337
5383
  alerts.map((alert) => (a$1("p", { style: Styles.Alert[alert.type] }, resolveText(alert)))),
@@ -5340,7 +5386,7 @@
5340
5386
  onSubmit(params);
5341
5387
  } }, Object.entries(fields).map(([fieldName, { type, label, placeholder }], idx) => (a$1("label", { style: Styles.Label },
5342
5388
  label ? `${label}: ` : '',
5343
- a$1("input", { ref: idx === 0 ? firstFieldRef : undefined, type: type, name: fieldName, autoComplete: "on", style: Styles.Input, autoFocus: true, placeholder: placeholder, value: params[fieldName] || '', onInput: (ev) => setParams({ ...params, [fieldName]: valueTransformer(type, ev.target?.['value']) }) })))))),
5389
+ a$1("input", { ref: idx === 0 ? firstFieldRef : undefined, type: type, name: fieldName, autoComplete: "on", style: Styles.Input, autoFocus: true, placeholder: placeholder, value: params[fieldName] || '', onInput: (ev) => { var _a; return setParams(Object.assign(Object.assign({}, params), { [fieldName]: valueTransformer(type, (_a = ev.target) === null || _a === void 0 ? void 0 : _a['value']) })); } })))))),
5344
5390
  a$1("div", { style: Styles.ButtonsDiv },
5345
5391
  a$1("button", { type: "submit", style: Styles.Button, onClick: () => onSubmit(params) }, "Submit"),
5346
5392
  a$1("button", { style: Styles.Button, onClick: onCancel }, "Cancel"))));
@@ -5372,7 +5418,7 @@
5372
5418
  if (!userInteraction)
5373
5419
  return null;
5374
5420
  //if (props.db.cloud.userInteraction.observers.length > 1) return null; // Someone else subscribes.
5375
- return a$1(LoginDialog, { ...userInteraction });
5421
+ return a$1(LoginDialog, Object.assign({}, userInteraction));
5376
5422
  }
5377
5423
  }
5378
5424
  function setupDefaultGUI(db) {
@@ -5558,7 +5604,7 @@
5558
5604
  if (permissions.length === 0)
5559
5605
  return {};
5560
5606
  const reduced = permissions.reduce((result, next) => {
5561
- const ret = { ...result };
5607
+ const ret = Object.assign({}, result);
5562
5608
  for (const [verb, rights] of Object.entries(next)) {
5563
5609
  if (verb in ret && ret[verb]) {
5564
5610
  if (ret[verb] === '*')
@@ -5640,14 +5686,11 @@
5640
5686
  .map((role) => globalRoles[role])
5641
5687
  .filter((role) => role)
5642
5688
  .map((role) => role.permissions);
5643
- return {
5644
- ...realm,
5645
- permissions: realm.owner === userId
5689
+ return Object.assign(Object.assign({}, realm), { permissions: realm.owner === userId
5646
5690
  ? { manage: '*' }
5647
- : mergePermissions(...directPermissionSets, ...rolePermissionSets),
5648
- };
5691
+ : mergePermissions(...directPermissionSets, ...rolePermissionSets) });
5649
5692
  })
5650
- .reduce((p, c) => ({ ...p, [c.realmId]: c }), {
5693
+ .reduce((p, c) => (Object.assign(Object.assign({}, p), { [c.realmId]: c })), {
5651
5694
  [userId]: {
5652
5695
  realmId: userId,
5653
5696
  owner: userId,
@@ -5666,47 +5709,50 @@
5666
5709
  this.isOwner = isOwner;
5667
5710
  }
5668
5711
  add(...tableNames) {
5712
+ var _a;
5669
5713
  // If user can manage the whole realm, return true.
5670
5714
  if (this.permissions.manage === '*')
5671
5715
  return true;
5672
5716
  // If user can manage given table in realm, return true
5673
- if (this.permissions.manage?.includes(this.tableName))
5717
+ if ((_a = this.permissions.manage) === null || _a === void 0 ? void 0 : _a.includes(this.tableName))
5674
5718
  return true;
5675
5719
  // If user can add any type, return true
5676
5720
  if (this.permissions.add === '*')
5677
5721
  return true;
5678
5722
  // If user can add objects into given table names in the realm, return true
5679
- if (tableNames.every((tableName) => this.permissions.add?.includes(tableName))) {
5723
+ if (tableNames.every((tableName) => { var _a; return (_a = this.permissions.add) === null || _a === void 0 ? void 0 : _a.includes(tableName); })) {
5680
5724
  return true;
5681
5725
  }
5682
5726
  return false;
5683
5727
  }
5684
5728
  update(...props) {
5729
+ var _a, _b;
5685
5730
  // If user is owner of this object, or if user can manage the whole realm, return true.
5686
5731
  if (this.isOwner || this.permissions.manage === '*')
5687
5732
  return true;
5688
5733
  // If user can manage given table in realm, return true
5689
- if (this.permissions.manage?.includes(this.tableName))
5734
+ if ((_a = this.permissions.manage) === null || _a === void 0 ? void 0 : _a.includes(this.tableName))
5690
5735
  return true;
5691
5736
  // If user can update any prop in any table in this realm, return true unless
5692
5737
  // it regards to ownership change:
5693
5738
  if (this.permissions.update === '*') {
5694
5739
  return props.every((prop) => prop !== 'owner');
5695
5740
  }
5696
- const tablePermissions = this.permissions.update?.[this.tableName];
5741
+ const tablePermissions = (_b = this.permissions.update) === null || _b === void 0 ? void 0 : _b[this.tableName];
5697
5742
  // If user can update any prop in table and realm, return true unless
5698
5743
  // accessing special props owner or realmId
5699
5744
  if (tablePermissions === '*')
5700
5745
  return props.every((prop) => prop !== 'owner');
5701
5746
  // Explicitely listed properties to allow updates on:
5702
- return props.every((prop) => tablePermissions?.some((permittedProp) => permittedProp === prop || (permittedProp === '*' && prop !== 'owner')));
5747
+ return props.every((prop) => tablePermissions === null || tablePermissions === void 0 ? void 0 : tablePermissions.some((permittedProp) => permittedProp === prop || (permittedProp === '*' && prop !== 'owner')));
5703
5748
  }
5704
5749
  delete() {
5750
+ var _a;
5705
5751
  // If user is owner of this object, or if user can manage the whole realm, return true.
5706
5752
  if (this.isOwner || this.permissions.manage === '*')
5707
5753
  return true;
5708
5754
  // If user can manage given table in realm, return true
5709
- if (this.permissions.manage?.includes(this.tableName))
5755
+ if ((_a = this.permissions.manage) === null || _a === void 0 ? void 0 : _a.includes(this.tableName))
5710
5756
  return true;
5711
5757
  return false;
5712
5758
  }
@@ -5742,7 +5788,7 @@
5742
5788
  const permissions = getPermissionsLookupObservable(db._novip);
5743
5789
  const accessControl = getInternalAccessControlObservable(db._novip);
5744
5790
  return createSharedValueObservable(rxjs.combineLatest([membersByEmail, accessControl, permissions]).pipe(rxjs.map(([membersByEmail, accessControl, realmLookup]) => {
5745
- const reducer = (result, m) => ({ ...result, [m.id]: { ...m, realm: realmLookup[m.realmId] } });
5791
+ const reducer = (result, m) => (Object.assign(Object.assign({}, result), { [m.id]: Object.assign(Object.assign({}, m), { realm: realmLookup[m.realmId] }) }));
5746
5792
  const emailMembersById = membersByEmail.reduce(reducer, {});
5747
5793
  const membersById = accessControl.selfMembers.reduce(reducer, emailMembersById);
5748
5794
  return Object.values(membersById).filter(m => !m.accepted);
@@ -5762,15 +5808,15 @@
5762
5808
  let configuredProgramatically = false;
5763
5809
  // local sync worker - used when there's no service worker.
5764
5810
  let localSyncWorker = null;
5765
- dexie.on('ready', async (dexie) => {
5811
+ dexie.on('ready', (dexie) => __awaiter$1(this, void 0, void 0, function* () {
5766
5812
  try {
5767
- await onDbReady(dexie);
5813
+ yield onDbReady(dexie);
5768
5814
  }
5769
5815
  catch (error) {
5770
5816
  console.error(error);
5771
5817
  // Make sure to succeed with database open even if network is down.
5772
5818
  }
5773
- }, true // true = sticky
5819
+ }), true // true = sticky
5774
5820
  );
5775
5821
  /** Void starting subscribers after a close has happened. */
5776
5822
  let closed = false;
@@ -5786,8 +5832,8 @@
5786
5832
  currentUserEmitter.next(UNAUTHORIZED_USER);
5787
5833
  });
5788
5834
  dexie.cloud = {
5789
- version: '4.0.0-beta.17',
5790
- options: { ...DEFAULT_OPTIONS },
5835
+ version: '4.0.0-beta.20',
5836
+ options: Object.assign({}, DEFAULT_OPTIONS),
5791
5837
  schema: null,
5792
5838
  serverState: null,
5793
5839
  get currentUserId() {
@@ -5801,15 +5847,17 @@
5801
5847
  persistedSyncState: new rxjs.BehaviorSubject(undefined),
5802
5848
  userInteraction: new rxjs.BehaviorSubject(undefined),
5803
5849
  webSocketStatus: new rxjs.BehaviorSubject('not-started'),
5804
- async login(hint) {
5805
- const db = DexieCloudDB(dexie);
5806
- await db.cloud.sync();
5807
- await login(db, hint);
5850
+ login(hint) {
5851
+ return __awaiter$1(this, void 0, void 0, function* () {
5852
+ const db = DexieCloudDB(dexie);
5853
+ yield db.cloud.sync();
5854
+ yield login(db, hint);
5855
+ });
5808
5856
  },
5809
5857
  invites: getInvitesObservable(dexie),
5810
5858
  roles: getGlobalRolesObservable(dexie),
5811
5859
  configure(options) {
5812
- options = dexie.cloud.options = { ...dexie.cloud.options, ...options };
5860
+ options = dexie.cloud.options = Object.assign(Object.assign({}, dexie.cloud.options), options);
5813
5861
  configuredProgramatically = true;
5814
5862
  if (options.databaseUrl && options.nameSuffix) {
5815
5863
  // @ts-ignore
@@ -5818,41 +5866,43 @@
5818
5866
  }
5819
5867
  updateSchemaFromOptions(dexie.cloud.schema, dexie.cloud.options);
5820
5868
  },
5821
- async sync({ wait, purpose } = { wait: true, purpose: 'push' }) {
5822
- if (wait === undefined)
5823
- wait = true;
5824
- const db = DexieCloudDB(dexie);
5825
- if (purpose === 'pull') {
5826
- const syncState = db.cloud.persistedSyncState.value;
5827
- triggerSync(db, purpose);
5828
- if (wait) {
5829
- const newSyncState = await db.cloud.persistedSyncState
5830
- .pipe(filter((newSyncState) => newSyncState?.timestamp != null &&
5831
- (!syncState || newSyncState.timestamp > syncState.timestamp)), take(1))
5832
- .toPromise();
5833
- if (newSyncState?.error) {
5834
- throw new Error(`Sync error: ` + newSyncState.error);
5869
+ sync({ wait, purpose } = { wait: true, purpose: 'push' }) {
5870
+ return __awaiter$1(this, void 0, void 0, function* () {
5871
+ if (wait === undefined)
5872
+ wait = true;
5873
+ const db = DexieCloudDB(dexie);
5874
+ if (purpose === 'pull') {
5875
+ const syncState = db.cloud.persistedSyncState.value;
5876
+ triggerSync(db, purpose);
5877
+ if (wait) {
5878
+ const newSyncState = yield db.cloud.persistedSyncState
5879
+ .pipe(filter((newSyncState) => (newSyncState === null || newSyncState === void 0 ? void 0 : newSyncState.timestamp) != null &&
5880
+ (!syncState || newSyncState.timestamp > syncState.timestamp)), take(1))
5881
+ .toPromise();
5882
+ if (newSyncState === null || newSyncState === void 0 ? void 0 : newSyncState.error) {
5883
+ throw new Error(`Sync error: ` + newSyncState.error);
5884
+ }
5835
5885
  }
5836
5886
  }
5837
- }
5838
- else if (await isSyncNeeded(db)) {
5839
- const syncState = db.cloud.persistedSyncState.value;
5840
- triggerSync(db, purpose);
5841
- if (wait) {
5842
- console.debug('db.cloud.login() is waiting for sync completion...');
5843
- await rxjs.from(Dexie.liveQuery(async () => {
5844
- const syncNeeded = await isSyncNeeded(db);
5845
- const newSyncState = await db.getPersistedSyncState();
5846
- if (newSyncState?.timestamp !== syncState?.timestamp &&
5847
- newSyncState?.error)
5848
- throw new Error(`Sync error: ` + newSyncState.error);
5849
- return syncNeeded;
5850
- }))
5851
- .pipe(filter((isNeeded) => !isNeeded), take(1))
5852
- .toPromise();
5853
- console.debug('Done waiting for sync completion because we have nothing to push anymore');
5887
+ else if (yield isSyncNeeded(db)) {
5888
+ const syncState = db.cloud.persistedSyncState.value;
5889
+ triggerSync(db, purpose);
5890
+ if (wait) {
5891
+ console.debug('db.cloud.login() is waiting for sync completion...');
5892
+ yield rxjs.from(Dexie.liveQuery(() => __awaiter$1(this, void 0, void 0, function* () {
5893
+ const syncNeeded = yield isSyncNeeded(db);
5894
+ const newSyncState = yield db.getPersistedSyncState();
5895
+ if ((newSyncState === null || newSyncState === void 0 ? void 0 : newSyncState.timestamp) !== (syncState === null || syncState === void 0 ? void 0 : syncState.timestamp) &&
5896
+ (newSyncState === null || newSyncState === void 0 ? void 0 : newSyncState.error))
5897
+ throw new Error(`Sync error: ` + newSyncState.error);
5898
+ return syncNeeded;
5899
+ })))
5900
+ .pipe(filter((isNeeded) => !isNeeded), take(1))
5901
+ .toPromise();
5902
+ console.debug('Done waiting for sync completion because we have nothing to push anymore');
5903
+ }
5854
5904
  }
5855
- }
5905
+ });
5856
5906
  },
5857
5907
  permissions(obj, tableName) {
5858
5908
  return permissions(dexie._novip, obj, tableName);
@@ -5864,7 +5914,8 @@
5864
5914
  return generateKey(dexie.cloud.schema[this.name].idPrefix || '', shardKey);
5865
5915
  };
5866
5916
  dexie.Table.prototype.idPrefix = function () {
5867
- return this.db.cloud.schema?.[this.name]?.idPrefix || '';
5917
+ var _a, _b;
5918
+ return ((_b = (_a = this.db.cloud.schema) === null || _a === void 0 ? void 0 : _a[this.name]) === null || _b === void 0 ? void 0 : _b.idPrefix) || '';
5868
5919
  };
5869
5920
  dexie.use(createMutationTrackingMiddleware({
5870
5921
  currentUserObservable: dexie.cloud.currentUser,
@@ -5872,163 +5923,170 @@
5872
5923
  }));
5873
5924
  dexie.use(createImplicitPropSetterMiddleware(DexieCloudDB(dexie)));
5874
5925
  dexie.use(createIdGenerationMiddleware(DexieCloudDB(dexie)));
5875
- async function onDbReady(dexie) {
5876
- closed = false; // As Dexie calls us, we are not closed anymore. Maybe reopened? Remember db.ready event is registered with sticky flag!
5877
- const db = DexieCloudDB(dexie);
5878
- // Setup default GUI:
5879
- if (!IS_SERVICE_WORKER) {
5880
- if (!db.cloud.options?.customLoginGui) {
5881
- subscriptions.push(setupDefaultGUI(dexie));
5882
- }
5883
- subscriptions.push(computeSyncState(db).subscribe(dexie.cloud.syncState));
5884
- }
5885
- //verifyConfig(db.cloud.options); Not needed (yet at least!)
5886
- // Verify the user has allowed version increment.
5887
- if (!db.tables.every((table) => table.core)) {
5888
- throwVersionIncrementNeeded();
5889
- }
5890
- const swRegistrations = 'serviceWorker' in navigator
5891
- ? await navigator.serviceWorker.getRegistrations()
5892
- : [];
5893
- const initiallySynced = await db.transaction('rw', db.$syncState, async () => {
5894
- const { options, schema } = db.cloud;
5895
- const [persistedOptions, persistedSchema, persistedSyncState] = await Promise.all([
5896
- db.getOptions(),
5897
- db.getSchema(),
5898
- db.getPersistedSyncState(),
5899
- ]);
5900
- if (!configuredProgramatically) {
5901
- // Options not specified programatically (use case for SW!)
5902
- // Take persisted options:
5903
- db.cloud.options = persistedOptions || null;
5904
- }
5905
- else if (!persistedOptions ||
5906
- JSON.stringify(persistedOptions) !== JSON.stringify(options)) {
5907
- // Update persisted options:
5908
- if (!options)
5909
- throw new Error(`Internal error`); // options cannot be null if configuredProgramatically is set.
5910
- await db.$syncState.put(options, 'options');
5911
- }
5912
- if (db.cloud.options?.tryUseServiceWorker &&
5913
- 'serviceWorker' in navigator &&
5914
- swRegistrations.length > 0 &&
5915
- !DISABLE_SERVICEWORKER_STRATEGY) {
5916
- // * Configured for using service worker if available.
5917
- // * Browser supports service workers
5918
- // * There are at least one service worker registration
5919
- console.debug('Dexie Cloud Addon: Using service worker');
5920
- db.cloud.usingServiceWorker = true;
5926
+ function onDbReady(dexie) {
5927
+ var _a, _b, _c, _d, _e, _f, _g;
5928
+ return __awaiter$1(this, void 0, void 0, function* () {
5929
+ closed = false; // As Dexie calls us, we are not closed anymore. Maybe reopened? Remember db.ready event is registered with sticky flag!
5930
+ const db = DexieCloudDB(dexie);
5931
+ // Setup default GUI:
5932
+ if (typeof window !== 'undefined' && typeof document !== 'undefined') {
5933
+ if (!((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.customLoginGui)) {
5934
+ subscriptions.push(setupDefaultGUI(dexie));
5935
+ }
5921
5936
  }
5922
- else {
5923
- // Not configured for using service worker or no service worker
5924
- // registration exists. Don't rely on service worker to do any job.
5925
- // Use LocalSyncWorker instead.
5926
- if (db.cloud.options?.tryUseServiceWorker && !IS_SERVICE_WORKER) {
5927
- console.debug('dexie-cloud-addon: Not using service worker.', swRegistrations.length === 0
5928
- ? 'No SW registrations found.'
5929
- : 'serviceWorker' in navigator && DISABLE_SERVICEWORKER_STRATEGY
5930
- ? 'Avoiding SW background sync and SW periodic bg sync for this browser due to browser bugs.'
5931
- : 'navigator.serviceWorker not present');
5937
+ if (!db.cloud.isServiceWorkerDB) {
5938
+ subscriptions.push(computeSyncState(db).subscribe(dexie.cloud.syncState));
5939
+ }
5940
+ //verifyConfig(db.cloud.options); Not needed (yet at least!)
5941
+ // Verify the user has allowed version increment.
5942
+ if (!db.tables.every((table) => table.core)) {
5943
+ throwVersionIncrementNeeded();
5944
+ }
5945
+ const swRegistrations = 'serviceWorker' in navigator
5946
+ ? yield navigator.serviceWorker.getRegistrations()
5947
+ : [];
5948
+ const initiallySynced = yield db.transaction('rw', db.$syncState, () => __awaiter$1(this, void 0, void 0, function* () {
5949
+ var _h, _j;
5950
+ const { options, schema } = db.cloud;
5951
+ const [persistedOptions, persistedSchema, persistedSyncState] = yield Promise.all([
5952
+ db.getOptions(),
5953
+ db.getSchema(),
5954
+ db.getPersistedSyncState(),
5955
+ ]);
5956
+ if (!configuredProgramatically) {
5957
+ // Options not specified programatically (use case for SW!)
5958
+ // Take persisted options:
5959
+ db.cloud.options = persistedOptions || null;
5960
+ }
5961
+ else if (!persistedOptions ||
5962
+ JSON.stringify(persistedOptions) !== JSON.stringify(options)) {
5963
+ // Update persisted options:
5964
+ if (!options)
5965
+ throw new Error(`Internal error`); // options cannot be null if configuredProgramatically is set.
5966
+ yield db.$syncState.put(options, 'options');
5932
5967
  }
5933
- db.cloud.usingServiceWorker = false;
5934
- }
5935
- updateSchemaFromOptions(schema, db.cloud.options);
5936
- updateSchemaFromOptions(persistedSchema, db.cloud.options);
5937
- if (!schema) {
5938
- // Database opened dynamically (use case for SW!)
5939
- // Take persisted schema:
5940
- db.cloud.schema = persistedSchema || null;
5941
- }
5942
- else if (!persistedSchema ||
5943
- JSON.stringify(persistedSchema) !== JSON.stringify(schema)) {
5944
- // Update persisted schema (but don't overwrite table prefixes)
5945
- const newPersistedSchema = persistedSchema || {};
5946
- for (const [table, tblSchema] of Object.entries(schema)) {
5947
- const newTblSchema = newPersistedSchema[table];
5948
- if (!newTblSchema) {
5949
- newPersistedSchema[table] = { ...tblSchema };
5968
+ if (((_h = db.cloud.options) === null || _h === void 0 ? void 0 : _h.tryUseServiceWorker) &&
5969
+ 'serviceWorker' in navigator &&
5970
+ swRegistrations.length > 0 &&
5971
+ !DISABLE_SERVICEWORKER_STRATEGY) {
5972
+ // * Configured for using service worker if available.
5973
+ // * Browser supports service workers
5974
+ // * There are at least one service worker registration
5975
+ console.debug('Dexie Cloud Addon: Using service worker');
5976
+ db.cloud.usingServiceWorker = true;
5977
+ }
5978
+ else {
5979
+ // Not configured for using service worker or no service worker
5980
+ // registration exists. Don't rely on service worker to do any job.
5981
+ // Use LocalSyncWorker instead.
5982
+ if (((_j = db.cloud.options) === null || _j === void 0 ? void 0 : _j.tryUseServiceWorker) &&
5983
+ !db.cloud.isServiceWorkerDB) {
5984
+ console.debug('dexie-cloud-addon: Not using service worker.', swRegistrations.length === 0
5985
+ ? 'No SW registrations found.'
5986
+ : 'serviceWorker' in navigator && DISABLE_SERVICEWORKER_STRATEGY
5987
+ ? 'Avoiding SW background sync and SW periodic bg sync for this browser due to browser bugs.'
5988
+ : 'navigator.serviceWorker not present');
5950
5989
  }
5951
- else {
5952
- newTblSchema.markedForSync = tblSchema.markedForSync;
5953
- tblSchema.deleted = newTblSchema.deleted;
5954
- newTblSchema.generatedGlobalId = tblSchema.generatedGlobalId;
5990
+ db.cloud.usingServiceWorker = false;
5991
+ }
5992
+ updateSchemaFromOptions(schema, db.cloud.options);
5993
+ updateSchemaFromOptions(persistedSchema, db.cloud.options);
5994
+ if (!schema) {
5995
+ // Database opened dynamically (use case for SW!)
5996
+ // Take persisted schema:
5997
+ db.cloud.schema = persistedSchema || null;
5998
+ }
5999
+ else if (!persistedSchema ||
6000
+ JSON.stringify(persistedSchema) !== JSON.stringify(schema)) {
6001
+ // Update persisted schema (but don't overwrite table prefixes)
6002
+ const newPersistedSchema = persistedSchema || {};
6003
+ for (const [table, tblSchema] of Object.entries(schema)) {
6004
+ const newTblSchema = newPersistedSchema[table];
6005
+ if (!newTblSchema) {
6006
+ newPersistedSchema[table] = Object.assign({}, tblSchema);
6007
+ }
6008
+ else {
6009
+ newTblSchema.markedForSync = tblSchema.markedForSync;
6010
+ tblSchema.deleted = newTblSchema.deleted;
6011
+ newTblSchema.generatedGlobalId = tblSchema.generatedGlobalId;
6012
+ }
5955
6013
  }
6014
+ yield db.$syncState.put(newPersistedSchema, 'schema');
6015
+ // Make sure persisted table prefixes are being used instead of computed ones:
6016
+ // Let's assign all props as the newPersistedSchems should be what we should be working with.
6017
+ Object.assign(schema, newPersistedSchema);
5956
6018
  }
5957
- await db.$syncState.put(newPersistedSchema, 'schema');
5958
- // Make sure persisted table prefixes are being used instead of computed ones:
5959
- // Let's assign all props as the newPersistedSchems should be what we should be working with.
5960
- Object.assign(schema, newPersistedSchema);
6019
+ return persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.initiallySynced;
6020
+ }));
6021
+ if (initiallySynced) {
6022
+ db.setInitiallySynced(true);
5961
6023
  }
5962
- return persistedSyncState?.initiallySynced;
5963
- });
5964
- if (initiallySynced) {
5965
- db.setInitiallySynced(true);
5966
- }
5967
- verifySchema(db);
5968
- if (db.cloud.options?.databaseUrl && !initiallySynced) {
5969
- await performInitialSync(db, db.cloud.options, db.cloud.schema);
5970
- db.setInitiallySynced(true);
5971
- }
5972
- // Manage CurrentUser observable:
5973
- throwIfClosed();
5974
- if (!IS_SERVICE_WORKER) {
5975
- subscriptions.push(Dexie.liveQuery(() => db.getCurrentUser()).subscribe(currentUserEmitter));
5976
- // Manage PersistendSyncState observable:
5977
- subscriptions.push(Dexie.liveQuery(() => db.getPersistedSyncState()).subscribe(db.cloud.persistedSyncState));
5978
- // Wait till currentUser and persistedSyncState gets populated
5979
- // with things from the database and not just the default values.
5980
- // This is so that when db.open() completes, user should be safe
5981
- // to subscribe to these observables and get actual data.
5982
- await rxjs.combineLatest([
5983
- currentUserEmitter.pipe(skip(1), take(1)),
5984
- db.cloud.persistedSyncState.pipe(skip(1), take(1)),
5985
- ]).toPromise();
5986
- }
5987
- // HERE: If requireAuth, do athentication now.
5988
- if (db.cloud.options?.requireAuth) {
5989
- await login(db);
5990
- }
5991
- if (localSyncWorker)
5992
- localSyncWorker.stop();
5993
- localSyncWorker = null;
5994
- throwIfClosed();
5995
- if (db.cloud.usingServiceWorker && db.cloud.options?.databaseUrl) {
5996
- registerSyncEvent(db, 'push').catch(() => { });
5997
- registerPeriodicSyncEvent(db).catch(() => { });
5998
- }
5999
- else if (db.cloud.options?.databaseUrl &&
6000
- db.cloud.schema &&
6001
- !IS_SERVICE_WORKER) {
6002
- // There's no SW. Start SyncWorker instead.
6003
- localSyncWorker = LocalSyncWorker(db, db.cloud.options, db.cloud.schema);
6004
- localSyncWorker.start();
6005
- triggerSync(db, 'push');
6006
- }
6007
- // Listen to online event and do sync.
6008
- throwIfClosed();
6009
- if (!IS_SERVICE_WORKER) {
6010
- subscriptions.push(rxjs.fromEvent(self, 'online').subscribe(() => {
6011
- console.debug('online!');
6012
- db.syncStateChangedEvent.next({
6013
- phase: 'not-in-sync',
6014
- });
6024
+ verifySchema(db);
6025
+ if (((_b = db.cloud.options) === null || _b === void 0 ? void 0 : _b.databaseUrl) && !initiallySynced) {
6026
+ yield performInitialSync(db, db.cloud.options, db.cloud.schema);
6027
+ db.setInitiallySynced(true);
6028
+ }
6029
+ // Manage CurrentUser observable:
6030
+ throwIfClosed();
6031
+ if (!db.cloud.isServiceWorkerDB) {
6032
+ subscriptions.push(Dexie.liveQuery(() => db.getCurrentUser()).subscribe(currentUserEmitter));
6033
+ // Manage PersistendSyncState observable:
6034
+ subscriptions.push(Dexie.liveQuery(() => db.getPersistedSyncState()).subscribe(db.cloud.persistedSyncState));
6035
+ // Wait till currentUser and persistedSyncState gets populated
6036
+ // with things from the database and not just the default values.
6037
+ // This is so that when db.open() completes, user should be safe
6038
+ // to subscribe to these observables and get actual data.
6039
+ yield rxjs.combineLatest([
6040
+ currentUserEmitter.pipe(skip(1), take(1)),
6041
+ db.cloud.persistedSyncState.pipe(skip(1), take(1)),
6042
+ ]).toPromise();
6043
+ }
6044
+ // HERE: If requireAuth, do athentication now.
6045
+ if ((_c = db.cloud.options) === null || _c === void 0 ? void 0 : _c.requireAuth) {
6046
+ yield login(db);
6047
+ }
6048
+ if (localSyncWorker)
6049
+ localSyncWorker.stop();
6050
+ localSyncWorker = null;
6051
+ throwIfClosed();
6052
+ if (db.cloud.usingServiceWorker && ((_d = db.cloud.options) === null || _d === void 0 ? void 0 : _d.databaseUrl)) {
6053
+ registerSyncEvent(db, 'push').catch(() => { });
6054
+ registerPeriodicSyncEvent(db).catch(() => { });
6055
+ }
6056
+ else if (((_e = db.cloud.options) === null || _e === void 0 ? void 0 : _e.databaseUrl) &&
6057
+ db.cloud.schema &&
6058
+ !db.cloud.isServiceWorkerDB) {
6059
+ // There's no SW. Start SyncWorker instead.
6060
+ localSyncWorker = LocalSyncWorker(db, db.cloud.options, db.cloud.schema);
6061
+ localSyncWorker.start();
6015
6062
  triggerSync(db, 'push');
6016
- }), rxjs.fromEvent(self, 'offline').subscribe(() => {
6017
- console.debug('offline!');
6018
- db.syncStateChangedEvent.next({
6019
- phase: 'offline',
6020
- });
6021
- }));
6022
- }
6023
- // Connect WebSocket only if we're a browser window
6024
- if (typeof window !== 'undefined' &&
6025
- !IS_SERVICE_WORKER &&
6026
- db.cloud.options?.databaseUrl) {
6027
- subscriptions.push(connectWebSocket(db));
6028
- }
6063
+ }
6064
+ // Listen to online event and do sync.
6065
+ throwIfClosed();
6066
+ if (!db.cloud.isServiceWorkerDB) {
6067
+ subscriptions.push(rxjs.fromEvent(self, 'online').subscribe(() => {
6068
+ console.debug('online!');
6069
+ db.syncStateChangedEvent.next({
6070
+ phase: 'not-in-sync',
6071
+ });
6072
+ triggerSync(db, 'push');
6073
+ }), rxjs.fromEvent(self, 'offline').subscribe(() => {
6074
+ console.debug('offline!');
6075
+ db.syncStateChangedEvent.next({
6076
+ phase: 'offline',
6077
+ });
6078
+ }));
6079
+ }
6080
+ // Connect WebSocket unless we
6081
+ if (((_f = db.cloud.options) === null || _f === void 0 ? void 0 : _f.databaseUrl) &&
6082
+ !((_g = db.cloud.options) === null || _g === void 0 ? void 0 : _g.disableWebSocket) &&
6083
+ !IS_SERVICE_WORKER) {
6084
+ subscriptions.push(connectWebSocket(db));
6085
+ }
6086
+ });
6029
6087
  }
6030
6088
  }
6031
- dexieCloud.version = '4.0.0-beta.17';
6089
+ dexieCloud.version = '4.0.0-beta.20';
6032
6090
  Dexie__default["default"].Cloud = dexieCloud;
6033
6091
 
6034
6092
  // In case the SW lives for a while, let it reuse already opened connections:
@@ -6057,55 +6115,61 @@
6057
6115
  syncDBSemaphore.set(dbName + '/' + purpose, promise);
6058
6116
  }
6059
6117
  return promise;
6060
- async function _syncDB(dbName, purpose) {
6061
- let db = managedDBs.get(dbName);
6062
- if (!db) {
6063
- console.debug('Dexie Cloud SW: Creating new Dexie instance for', dbName);
6064
- const dexie = new Dexie__default["default"](dbName, { addons: [dexieCloud] });
6065
- db = DexieCloudDB(dexie);
6066
- dexie.on('versionchange', stopManagingDB);
6067
- await db.dx.open(); // Makes sure db.cloud.options and db.cloud.schema are read from db,
6068
- if (!managedDBs.get(dbName)) {
6069
- // Avoid race conditions.
6118
+ function _syncDB(dbName, purpose) {
6119
+ var _a;
6120
+ return __awaiter$1(this, void 0, void 0, function* () {
6121
+ let db = managedDBs.get(dbName);
6122
+ if (!db) {
6123
+ console.debug('Dexie Cloud SW: Creating new Dexie instance for', dbName);
6124
+ const dexie = new Dexie__default["default"](dbName, { addons: [dexieCloud] });
6125
+ db = DexieCloudDB(dexie);
6126
+ db.cloud.isServiceWorkerDB = true;
6127
+ dexie.on('versionchange', stopManagingDB);
6128
+ yield db.dx.open(); // Makes sure db.cloud.options and db.cloud.schema are read from db,
6129
+ if (managedDBs.get(dbName)) {
6130
+ // Avoid race conditions.
6131
+ db.close();
6132
+ return yield _syncDB(dbName, purpose);
6133
+ }
6070
6134
  managedDBs.set(dbName, db);
6071
6135
  }
6072
- }
6073
- if (!db.cloud.options?.databaseUrl) {
6074
- console.error(`Dexie Cloud: No databaseUrl configured`);
6075
- return; // Nothing to sync.
6076
- }
6077
- if (!db.cloud.schema) {
6078
- console.error(`Dexie Cloud: No schema persisted`);
6079
- return; // Nothing to sync.
6080
- }
6081
- function stopManagingDB() {
6082
- db.dx.on.versionchange.unsubscribe(stopManagingDB);
6083
- if (managedDBs.get(db.name) === db) {
6084
- // Avoid race conditions.
6085
- managedDBs.delete(db.name);
6136
+ if (!((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl)) {
6137
+ console.error(`Dexie Cloud: No databaseUrl configured`);
6138
+ return; // Nothing to sync.
6086
6139
  }
6087
- console.debug(`Dexie Cloud SW: Closing Dexie instance for ${dbName}`);
6088
- db.dx.close();
6089
- return false;
6090
- }
6091
- try {
6092
- console.debug('Dexie Cloud SW: Syncing');
6093
- await syncIfPossible(db, db.cloud.options, db.cloud.schema, {
6094
- retryImmediatelyOnFetchError: true,
6095
- purpose,
6096
- });
6097
- console.debug('Dexie Cloud SW: Done Syncing');
6098
- }
6099
- catch (e) {
6100
- console.error(`Dexie Cloud SW Error`, e);
6101
- // Error occured. Stop managing this DB until we wake up again by a sync event,
6102
- // which will open a new Dexie and start trying to sync it.
6103
- stopManagingDB();
6104
- if (e.name !== Dexie__default["default"].errnames.NoSuchDatabase) {
6105
- // Unless the error was that DB doesn't exist, rethrow to trigger sync retry.
6106
- throw e; // Throw e to make syncEvent.waitUntil() receive a rejected promis, so it will retry.
6140
+ if (!db.cloud.schema) {
6141
+ console.error(`Dexie Cloud: No schema persisted`);
6142
+ return; // Nothing to sync.
6107
6143
  }
6108
- }
6144
+ function stopManagingDB() {
6145
+ db.dx.on.versionchange.unsubscribe(stopManagingDB);
6146
+ if (managedDBs.get(db.name) === db) {
6147
+ // Avoid race conditions.
6148
+ managedDBs.delete(db.name);
6149
+ }
6150
+ console.debug(`Dexie Cloud SW: Closing Dexie instance for ${dbName}`);
6151
+ db.dx.close();
6152
+ return false;
6153
+ }
6154
+ try {
6155
+ console.debug('Dexie Cloud SW: Syncing');
6156
+ yield syncIfPossible(db, db.cloud.options, db.cloud.schema, {
6157
+ retryImmediatelyOnFetchError: true,
6158
+ purpose,
6159
+ });
6160
+ console.debug('Dexie Cloud SW: Done Syncing');
6161
+ }
6162
+ catch (e) {
6163
+ console.error(`Dexie Cloud SW Error`, e);
6164
+ // Error occured. Stop managing this DB until we wake up again by a sync event,
6165
+ // which will open a new Dexie and start trying to sync it.
6166
+ stopManagingDB();
6167
+ if (e.name !== Dexie__default["default"].errnames.NoSuchDatabase) {
6168
+ // Unless the error was that DB doesn't exist, rethrow to trigger sync retry.
6169
+ throw e; // Throw e to make syncEvent.waitUntil() receive a rejected promis, so it will retry.
6170
+ }
6171
+ }
6172
+ });
6109
6173
  }
6110
6174
  }
6111
6175
  // Avoid taking care of events if browser bugs out by using dexie cloud from a service worker.
@@ -6131,12 +6195,12 @@
6131
6195
  // Mimic background sync behavior - retry in X minutes on failure.
6132
6196
  // But lesser timeout and more number of times.
6133
6197
  const syncAndRetry = (num = 1) => {
6134
- return syncDB(dbName, event.data.purpose || "pull").catch(async (e) => {
6198
+ return syncDB(dbName, event.data.purpose || "pull").catch((e) => __awaiter$1(void 0, void 0, void 0, function* () {
6135
6199
  if (num === 3)
6136
6200
  throw e;
6137
- await sleep(60000); // 1 minute
6201
+ yield sleep(60000); // 1 minute
6138
6202
  syncAndRetry(num + 1);
6139
- });
6203
+ }));
6140
6204
  };
6141
6205
  if ('waitUntil' in event) {
6142
6206
  event.waitUntil(syncAndRetry().catch(error => console.error(error)));