dexie-cloud-addon 4.0.0-beta.15 → 4.0.0-beta.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,30 @@
1
1
  import Dexie, { cmp, liveQuery } from 'dexie';
2
- import { Observable as Observable$1, BehaviorSubject, Subject, from as from$1, fromEvent, of, merge, Subscription as Subscription$1, throwError, combineLatest, map as map$1, share, timer as timer$1, switchMap as switchMap$1 } from 'rxjs';
2
+ import { Observable as Observable$1, BehaviorSubject, Subject, fromEvent, of, merge, Subscription as Subscription$1, from as from$1, throwError, combineLatest, map as map$1, share, timer as timer$1, switchMap as switchMap$1 } from 'rxjs';
3
+
4
+ /*! *****************************************************************************
5
+ Copyright (c) Microsoft Corporation.
6
+
7
+ Permission to use, copy, modify, and/or distribute this software for any
8
+ purpose with or without fee is hereby granted.
9
+
10
+ THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
11
+ REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
12
+ AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
13
+ INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
14
+ LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
15
+ OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
16
+ PERFORMANCE OF THIS SOFTWARE.
17
+ ***************************************************************************** */
18
+
19
+ function __awaiter$1(thisArg, _arguments, P, generator) {
20
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
21
+ return new (P || (P = Promise))(function (resolve, reject) {
22
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
23
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
24
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
25
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
26
+ });
27
+ }
3
28
 
4
29
  const UNAUTHORIZED_USER = {
5
30
  userId: "unauthorized",
@@ -13,7 +38,7 @@ try {
13
38
  Object.freeze(UNAUTHORIZED_USER);
14
39
  Object.freeze(UNAUTHORIZED_USER.claims);
15
40
  }
16
- catch { }
41
+ catch (_a) { }
17
42
 
18
43
  const swHolder = {};
19
44
  const swContainer = self.document && navigator.serviceWorker; // self.document is to verify we're not the SW ourself
@@ -22,8 +47,9 @@ if (swContainer)
22
47
  if (typeof self !== 'undefined' && 'clients' in self && !self.document) {
23
48
  // We are the service worker. Propagate messages to all our clients.
24
49
  addEventListener('message', (ev) => {
25
- if (ev.data?.type?.startsWith('sw-broadcast-')) {
26
- [...self['clients'].matchAll({ includeUncontrolled: true })].forEach((client) => client.id !== ev.source?.id && client.postMessage(ev.data));
50
+ var _a, _b;
51
+ if ((_b = (_a = ev.data) === null || _a === void 0 ? void 0 : _a.type) === null || _b === void 0 ? void 0 : _b.startsWith('sw-broadcast-')) {
52
+ [...self['clients'].matchAll({ includeUncontrolled: true })].forEach((client) => { var _a; return client.id !== ((_a = ev.source) === null || _a === void 0 ? void 0 : _a.id) && client.postMessage(ev.data); });
27
53
  }
28
54
  });
29
55
  }
@@ -35,7 +61,8 @@ class SWBroadcastChannel {
35
61
  if (!swContainer)
36
62
  return () => { };
37
63
  const forwarder = (ev) => {
38
- if (ev.data?.type === `sw-broadcast-${this.name}`) {
64
+ var _a;
65
+ if (((_a = ev.data) === null || _a === void 0 ? void 0 : _a.type) === `sw-broadcast-${this.name}`) {
39
66
  listener(ev.data.message);
40
67
  }
41
68
  };
@@ -43,6 +70,7 @@ class SWBroadcastChannel {
43
70
  return () => swContainer.removeEventListener('message', forwarder);
44
71
  }
45
72
  postMessage(message) {
73
+ var _a;
46
74
  if (typeof self['clients'] === 'object') {
47
75
  // We're a service worker. Propagate to our browser clients.
48
76
  [...self['clients'].matchAll({ includeUncontrolled: true })].forEach((client) => client.postMessage({
@@ -53,7 +81,7 @@ class SWBroadcastChannel {
53
81
  else if (swHolder.registration) {
54
82
  // We're a client (browser window or other worker)
55
83
  // Post to SW so it can repost to all its clients and to itself
56
- swHolder.registration.active?.postMessage({
84
+ (_a = swHolder.registration.active) === null || _a === void 0 ? void 0 : _a.postMessage({
57
85
  type: `sw-broadcast-${this.name}`,
58
86
  message
59
87
  });
@@ -96,7 +124,7 @@ class BroadcastedAndLocalEvent extends Observable$1 {
96
124
  this.bc = bc;
97
125
  }
98
126
  next(message) {
99
- console.debug("BroadcastedAndLocalEvent: bc.postMessage()", { ...message }, "bc is a", this.bc);
127
+ console.debug("BroadcastedAndLocalEvent: bc.postMessage()", Object.assign({}, message), "bc is a", this.bc);
100
128
  this.bc.postMessage(message);
101
129
  const ev = new CustomEvent(`lbc-${this.name}`, { detail: message });
102
130
  self.dispatchEvent(ev);
@@ -1689,55 +1717,60 @@ function timeoutErrorFactory(info) {
1689
1717
 
1690
1718
  //const hasSW = 'serviceWorker' in navigator;
1691
1719
  let hasComplainedAboutSyncEvent = false;
1692
- async function registerSyncEvent(db, purpose) {
1693
- try {
1694
- // Send sync event to SW:
1695
- const sw = await navigator.serviceWorker.ready;
1696
- if (purpose === "push" && sw.sync) {
1697
- await sw.sync.register(`dexie-cloud:${db.name}`);
1698
- }
1699
- if (sw.active) {
1700
- // Use postMessage for pull syncs and for browsers not supporting sync event (Firefox, Safari).
1701
- // Also chromium based browsers with sw.sync as a fallback for sleepy sync events not taking action for a while.
1702
- sw.active.postMessage({
1703
- type: 'dexie-cloud-sync',
1704
- dbName: db.name,
1705
- purpose
1706
- });
1707
- }
1708
- else {
1709
- throw new Error(`Failed to trigger sync - there's no active service worker`);
1720
+ function registerSyncEvent(db, purpose) {
1721
+ return __awaiter$1(this, void 0, void 0, function* () {
1722
+ try {
1723
+ // Send sync event to SW:
1724
+ const sw = yield navigator.serviceWorker.ready;
1725
+ if (purpose === "push" && sw.sync) {
1726
+ yield sw.sync.register(`dexie-cloud:${db.name}`);
1727
+ }
1728
+ if (sw.active) {
1729
+ // Use postMessage for pull syncs and for browsers not supporting sync event (Firefox, Safari).
1730
+ // Also chromium based browsers with sw.sync as a fallback for sleepy sync events not taking action for a while.
1731
+ sw.active.postMessage({
1732
+ type: 'dexie-cloud-sync',
1733
+ dbName: db.name,
1734
+ purpose
1735
+ });
1736
+ }
1737
+ else {
1738
+ throw new Error(`Failed to trigger sync - there's no active service worker`);
1739
+ }
1740
+ return;
1710
1741
  }
1711
- return;
1712
- }
1713
- catch (e) {
1714
- if (!hasComplainedAboutSyncEvent) {
1715
- console.debug(`Dexie Cloud: Could not register sync event`, e);
1716
- hasComplainedAboutSyncEvent = true;
1742
+ catch (e) {
1743
+ if (!hasComplainedAboutSyncEvent) {
1744
+ console.debug(`Dexie Cloud: Could not register sync event`, e);
1745
+ hasComplainedAboutSyncEvent = true;
1746
+ }
1717
1747
  }
1718
- }
1748
+ });
1719
1749
  }
1720
- async function registerPeriodicSyncEvent(db) {
1721
- try {
1722
- // Register periodicSync event to SW:
1723
- // @ts-ignore
1724
- const { periodicSync } = await navigator.serviceWorker.ready;
1725
- if (periodicSync) {
1726
- try {
1727
- await periodicSync.register(`dexie-cloud:${db.name}`, db.cloud.options?.periodicSync);
1728
- console.debug(`Dexie Cloud: Successfully registered periodicsync event for ${db.name}`);
1750
+ function registerPeriodicSyncEvent(db) {
1751
+ var _a;
1752
+ return __awaiter$1(this, void 0, void 0, function* () {
1753
+ try {
1754
+ // Register periodicSync event to SW:
1755
+ // @ts-ignore
1756
+ const { periodicSync } = yield navigator.serviceWorker.ready;
1757
+ if (periodicSync) {
1758
+ try {
1759
+ yield periodicSync.register(`dexie-cloud:${db.name}`, (_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.periodicSync);
1760
+ console.debug(`Dexie Cloud: Successfully registered periodicsync event for ${db.name}`);
1761
+ }
1762
+ catch (e) {
1763
+ console.debug(`Dexie Cloud: Failed to register periodic sync. Your PWA must be installed to allow background sync.`, e);
1764
+ }
1729
1765
  }
1730
- catch (e) {
1731
- console.debug(`Dexie Cloud: Failed to register periodic sync. Your PWA must be installed to allow background sync.`, e);
1766
+ else {
1767
+ console.debug(`Dexie Cloud: periodicSync not supported.`);
1732
1768
  }
1733
1769
  }
1734
- else {
1735
- console.debug(`Dexie Cloud: periodicSync not supported.`);
1770
+ catch (e) {
1771
+ console.debug(`Dexie Cloud: Could not register periodicSync for ${db.name}`, e);
1736
1772
  }
1737
- }
1738
- catch (e) {
1739
- console.debug(`Dexie Cloud: Could not register periodicSync for ${db.name}`, e);
1740
- }
1773
+ });
1741
1774
  }
1742
1775
 
1743
1776
  function triggerSync(db, purpose) {
@@ -1766,15 +1799,17 @@ const b64encode = typeof Buffer !== "undefined"
1766
1799
  : Buffer.from(b).toString("base64")
1767
1800
  : (b) => btoa(String.fromCharCode.apply(null, b));
1768
1801
 
1769
- async function computeRealmSetHash({ realms, inviteRealms, }) {
1770
- const data = JSON.stringify([
1771
- ...realms.map((realmId) => ({ realmId, accepted: true })),
1772
- ...inviteRealms.map((realmId) => ({ realmId, accepted: false })),
1773
- ].sort((a, b) => a.realmId < b.realmId ? -1 : a.realmId > b.realmId ? 1 : 0));
1774
- const byteArray = new TextEncoder().encode(data);
1775
- const digestBytes = await crypto.subtle.digest('SHA-1', byteArray);
1776
- const base64 = b64encode(digestBytes);
1777
- return base64;
1802
+ function computeRealmSetHash({ realms, inviteRealms, }) {
1803
+ return __awaiter$1(this, void 0, void 0, function* () {
1804
+ const data = JSON.stringify([
1805
+ ...realms.map((realmId) => ({ realmId, accepted: true })),
1806
+ ...inviteRealms.map((realmId) => ({ realmId, accepted: false })),
1807
+ ].sort((a, b) => a.realmId < b.realmId ? -1 : a.realmId > b.realmId ? 1 : 0));
1808
+ const byteArray = new TextEncoder().encode(data);
1809
+ const digestBytes = yield crypto.subtle.digest('SHA-1', byteArray);
1810
+ const base64 = b64encode(digestBytes);
1811
+ return base64;
1812
+ });
1778
1813
  }
1779
1814
 
1780
1815
  function getSyncableTables(db) {
@@ -1789,7 +1824,8 @@ function getMutationTable(tableName) {
1789
1824
  }
1790
1825
 
1791
1826
  function getTableFromMutationTable(mutationTable) {
1792
- const tableName = /^\$(.*)_mutations$/.exec(mutationTable)?.[1];
1827
+ var _a;
1828
+ const tableName = (_a = /^\$(.*)_mutations$/.exec(mutationTable)) === null || _a === void 0 ? void 0 : _a[1];
1793
1829
  if (!tableName)
1794
1830
  throw new Error(`Given mutationTable ${mutationTable} is not correct`);
1795
1831
  return tableName;
@@ -1800,49 +1836,51 @@ function flatten(a) {
1800
1836
  return concat.apply([], a);
1801
1837
  }
1802
1838
 
1803
- async function listClientChanges(mutationTables, db, { since = {}, limit = Infinity } = {}) {
1804
- const allMutsOnTables = await Promise.all(mutationTables.map(async (mutationTable) => {
1805
- const tableName = getTableFromMutationTable(mutationTable.name);
1806
- const lastRevision = since[tableName];
1807
- let query = lastRevision
1808
- ? mutationTable.where('rev').above(lastRevision)
1809
- : mutationTable;
1810
- if (limit < Infinity)
1811
- query = query.limit(limit);
1812
- const muts = await query.toArray();
1813
- //const objTable = db.table(tableName);
1814
- /*for (const mut of muts) {
1815
- if (mut.type === "insert" || mut.type === "upsert") {
1816
- mut.values = await objTable.bulkGet(mut.keys);
1817
- }
1818
- }*/
1819
- return muts.map((mut) => ({
1820
- table: tableName,
1821
- mut,
1822
- }));
1823
- }));
1824
- // Sort by time to get a true order of the operations (between tables)
1825
- const sorted = flatten(allMutsOnTables).sort((a, b) => a.mut.ts - b.mut.ts);
1826
- const result = [];
1827
- let currentEntry = null;
1828
- let currentTxid = null;
1829
- for (const { table, mut } of sorted) {
1830
- if (currentEntry &&
1831
- currentEntry.table === table &&
1832
- currentTxid === mut.txid) {
1833
- currentEntry.muts.push(mut);
1834
- }
1835
- else {
1836
- currentEntry = {
1837
- table,
1838
- muts: [mut],
1839
- };
1840
- currentTxid = mut.txid;
1841
- result.push(currentEntry);
1839
+ function listClientChanges(mutationTables, db, { since = {}, limit = Infinity } = {}) {
1840
+ return __awaiter$1(this, void 0, void 0, function* () {
1841
+ const allMutsOnTables = yield Promise.all(mutationTables.map((mutationTable) => __awaiter$1(this, void 0, void 0, function* () {
1842
+ const tableName = getTableFromMutationTable(mutationTable.name);
1843
+ const lastRevision = since[tableName];
1844
+ let query = lastRevision
1845
+ ? mutationTable.where('rev').above(lastRevision)
1846
+ : mutationTable;
1847
+ if (limit < Infinity)
1848
+ query = query.limit(limit);
1849
+ const muts = yield query.toArray();
1850
+ //const objTable = db.table(tableName);
1851
+ /*for (const mut of muts) {
1852
+ if (mut.type === "insert" || mut.type === "upsert") {
1853
+ mut.values = await objTable.bulkGet(mut.keys);
1854
+ }
1855
+ }*/
1856
+ return muts.map((mut) => ({
1857
+ table: tableName,
1858
+ mut,
1859
+ }));
1860
+ })));
1861
+ // Sort by time to get a true order of the operations (between tables)
1862
+ const sorted = flatten(allMutsOnTables).sort((a, b) => a.mut.ts - b.mut.ts);
1863
+ const result = [];
1864
+ let currentEntry = null;
1865
+ let currentTxid = null;
1866
+ for (const { table, mut } of sorted) {
1867
+ if (currentEntry &&
1868
+ currentEntry.table === table &&
1869
+ currentTxid === mut.txid) {
1870
+ currentEntry.muts.push(mut);
1871
+ }
1872
+ else {
1873
+ currentEntry = {
1874
+ table,
1875
+ muts: [mut],
1876
+ };
1877
+ currentTxid = mut.txid;
1878
+ result.push(currentEntry);
1879
+ }
1842
1880
  }
1843
- }
1844
- // Filter out those tables that doesn't have any mutations:
1845
- return result;
1881
+ // Filter out those tables that doesn't have any mutations:
1882
+ return result;
1883
+ });
1846
1884
  }
1847
1885
 
1848
1886
  function randomString$1(bytes) {
@@ -2148,58 +2186,60 @@ function getDbNameFromDbUrl(dbUrl) {
2148
2186
  : url.pathname.split('/')[1];
2149
2187
  }
2150
2188
 
2151
- async function listSyncifiedChanges(tablesToSyncify, currentUser, schema, alreadySyncedRealms) {
2152
- const txid = `upload-${randomString$1(8)}`;
2153
- if (currentUser.isLoggedIn) {
2154
- if (tablesToSyncify.length > 0) {
2155
- const ignoredRealms = new Set(alreadySyncedRealms || []);
2156
- const upserts = await Promise.all(tablesToSyncify.map(async (table) => {
2157
- const { extractKey } = table.core.schema.primaryKey;
2158
- if (!extractKey)
2159
- return { table: table.name, muts: [] }; // Outbound tables are not synced.
2160
- const dexieCloudTableSchema = schema[table.name];
2161
- const query = dexieCloudTableSchema?.generatedGlobalId
2162
- ? table.filter((item) => {
2163
- const id = extractKey(item);
2164
- return (!ignoredRealms.has(item.realmId || '') &&
2165
- //(id[0] !== '#' || !!item.$ts) && // Private obj need no sync if not changed
2166
- isValidSyncableID(id));
2167
- })
2168
- : table.filter((item) => {
2169
- extractKey(item);
2170
- return (!ignoredRealms.has(item.realmId || '') &&
2171
- //(id[0] !== '#' || !!item.$ts) && // Private obj need no sync if not changed
2172
- isValidAtID(extractKey(item), dexieCloudTableSchema?.idPrefix));
2173
- });
2174
- const unsyncedObjects = await query.toArray();
2175
- if (unsyncedObjects.length > 0) {
2176
- const mut = {
2177
- type: 'upsert',
2178
- values: unsyncedObjects,
2179
- keys: unsyncedObjects.map(extractKey),
2180
- userId: currentUser.userId,
2181
- txid,
2182
- };
2183
- return {
2184
- table: table.name,
2185
- muts: [mut],
2186
- };
2187
- }
2188
- else {
2189
- return {
2190
- table: table.name,
2191
- muts: [],
2192
- };
2193
- }
2194
- }));
2195
- return upserts.filter((op) => op.muts.length > 0);
2189
+ function listSyncifiedChanges(tablesToSyncify, currentUser, schema, alreadySyncedRealms) {
2190
+ return __awaiter$1(this, void 0, void 0, function* () {
2191
+ const txid = `upload-${randomString$1(8)}`;
2192
+ if (currentUser.isLoggedIn) {
2193
+ if (tablesToSyncify.length > 0) {
2194
+ const ignoredRealms = new Set(alreadySyncedRealms || []);
2195
+ const upserts = yield Promise.all(tablesToSyncify.map((table) => __awaiter$1(this, void 0, void 0, function* () {
2196
+ const { extractKey } = table.core.schema.primaryKey;
2197
+ if (!extractKey)
2198
+ return { table: table.name, muts: [] }; // Outbound tables are not synced.
2199
+ const dexieCloudTableSchema = schema[table.name];
2200
+ const query = (dexieCloudTableSchema === null || dexieCloudTableSchema === void 0 ? void 0 : dexieCloudTableSchema.generatedGlobalId)
2201
+ ? table.filter((item) => {
2202
+ const id = extractKey(item);
2203
+ return (!ignoredRealms.has(item.realmId || '') &&
2204
+ //(id[0] !== '#' || !!item.$ts) && // Private obj need no sync if not changed
2205
+ isValidSyncableID(id));
2206
+ })
2207
+ : table.filter((item) => {
2208
+ extractKey(item);
2209
+ return (!ignoredRealms.has(item.realmId || '') &&
2210
+ //(id[0] !== '#' || !!item.$ts) && // Private obj need no sync if not changed
2211
+ isValidAtID(extractKey(item), dexieCloudTableSchema === null || dexieCloudTableSchema === void 0 ? void 0 : dexieCloudTableSchema.idPrefix));
2212
+ });
2213
+ const unsyncedObjects = yield query.toArray();
2214
+ if (unsyncedObjects.length > 0) {
2215
+ const mut = {
2216
+ type: 'upsert',
2217
+ values: unsyncedObjects,
2218
+ keys: unsyncedObjects.map(extractKey),
2219
+ userId: currentUser.userId,
2220
+ txid,
2221
+ };
2222
+ return {
2223
+ table: table.name,
2224
+ muts: [mut],
2225
+ };
2226
+ }
2227
+ else {
2228
+ return {
2229
+ table: table.name,
2230
+ muts: [],
2231
+ };
2232
+ }
2233
+ })));
2234
+ return upserts.filter((op) => op.muts.length > 0);
2235
+ }
2196
2236
  }
2197
- }
2198
- return [];
2237
+ return [];
2238
+ });
2199
2239
  }
2200
2240
 
2201
2241
  function getTablesToSyncify(db, syncState) {
2202
- const syncedTables = syncState?.syncedTables || [];
2242
+ const syncedTables = (syncState === null || syncState === void 0 ? void 0 : syncState.syncedTables) || [];
2203
2243
  const syncableTables = getSyncableTables(db);
2204
2244
  const tablesToSyncify = syncableTables.filter((tbl) => !syncedTables.includes(tbl.name));
2205
2245
  return tablesToSyncify;
@@ -2208,19 +2248,15 @@ function getTablesToSyncify(db, syncState) {
2208
2248
  function interactWithUser(userInteraction, req) {
2209
2249
  let done = false;
2210
2250
  return new Promise((resolve, reject) => {
2211
- const interactionProps = {
2212
- ...req,
2213
- onSubmit: (res) => {
2251
+ const interactionProps = Object.assign(Object.assign({}, req), { onSubmit: (res) => {
2214
2252
  userInteraction.next(undefined);
2215
2253
  done = true;
2216
2254
  resolve(res);
2217
- },
2218
- onCancel: () => {
2255
+ }, onCancel: () => {
2219
2256
  userInteraction.next(undefined);
2220
2257
  done = true;
2221
2258
  reject(new Dexie.AbortError("User cancelled"));
2222
- },
2223
- };
2259
+ } });
2224
2260
  userInteraction.next(interactionProps);
2225
2261
  // Start subscribing for external updates to db.cloud.userInteraction, and if so, cancel this request.
2226
2262
  /*const subscription = userInteraction.subscribe((currentInteractionProps) => {
@@ -2241,180 +2277,193 @@ function alertUser(userInteraction, title, ...alerts) {
2241
2277
  fields: {}
2242
2278
  });
2243
2279
  }
2244
- async function promptForEmail(userInteraction, title, emailHint) {
2245
- let email = emailHint || '';
2246
- while (!email || !/^[\w-\.]+@([\w-]+\.)+[\w-]{2,10}$/.test(email)) {
2247
- email = (await interactWithUser(userInteraction, {
2248
- type: 'email',
2249
- title,
2250
- alerts: email
2251
- ? [
2252
- {
2253
- type: 'error',
2254
- messageCode: 'INVALID_EMAIL',
2255
- message: 'Please enter a valid email address',
2256
- messageParams: {},
2280
+ function promptForEmail(userInteraction, title, emailHint) {
2281
+ return __awaiter$1(this, void 0, void 0, function* () {
2282
+ let email = emailHint || '';
2283
+ while (!email || !/^[\w-\.]+@([\w-]+\.)+[\w-]{2,10}$/.test(email)) {
2284
+ email = (yield interactWithUser(userInteraction, {
2285
+ type: 'email',
2286
+ title,
2287
+ alerts: email
2288
+ ? [
2289
+ {
2290
+ type: 'error',
2291
+ messageCode: 'INVALID_EMAIL',
2292
+ message: 'Please enter a valid email address',
2293
+ messageParams: {},
2294
+ },
2295
+ ]
2296
+ : [],
2297
+ fields: {
2298
+ email: {
2299
+ type: 'email',
2300
+ placeholder: 'you@somedomain.com',
2257
2301
  },
2258
- ]
2259
- : [],
2260
- fields: {
2261
- email: {
2262
- type: 'email',
2263
- placeholder: 'you@somedomain.com',
2264
2302
  },
2265
- },
2266
- })).email;
2267
- }
2268
- return email;
2303
+ })).email;
2304
+ }
2305
+ return email;
2306
+ });
2269
2307
  }
2270
- async function promptForOTP(userInteraction, email, alert) {
2271
- const alerts = [
2272
- {
2273
- type: 'info',
2274
- messageCode: 'OTP_SENT',
2275
- message: `A One-Time password has been sent to {email}`,
2276
- messageParams: { email },
2277
- },
2278
- ];
2279
- if (alert) {
2280
- alerts.push(alert);
2281
- }
2282
- const { otp } = await interactWithUser(userInteraction, {
2283
- type: 'otp',
2284
- title: 'Enter OTP',
2285
- alerts,
2286
- fields: {
2287
- otp: {
2288
- type: 'otp',
2289
- label: 'OTP',
2290
- placeholder: 'Paste OTP here',
2308
+ function promptForOTP(userInteraction, email, alert) {
2309
+ return __awaiter$1(this, void 0, void 0, function* () {
2310
+ const alerts = [
2311
+ {
2312
+ type: 'info',
2313
+ messageCode: 'OTP_SENT',
2314
+ message: `A One-Time password has been sent to {email}`,
2315
+ messageParams: { email },
2291
2316
  },
2292
- },
2317
+ ];
2318
+ if (alert) {
2319
+ alerts.push(alert);
2320
+ }
2321
+ const { otp } = yield interactWithUser(userInteraction, {
2322
+ type: 'otp',
2323
+ title: 'Enter OTP',
2324
+ alerts,
2325
+ fields: {
2326
+ otp: {
2327
+ type: 'otp',
2328
+ label: 'OTP',
2329
+ placeholder: 'Paste OTP here',
2330
+ },
2331
+ },
2332
+ });
2333
+ return otp;
2293
2334
  });
2294
- return otp;
2295
2335
  }
2296
2336
 
2297
- async function loadAccessToken(db) {
2298
- const currentUser = await db.getCurrentUser();
2299
- const { accessToken, accessTokenExpiration, refreshToken, refreshTokenExpiration, claims, } = currentUser;
2300
- if (!accessToken)
2301
- return;
2302
- const expTime = accessTokenExpiration?.getTime() ?? Infinity;
2303
- if (expTime > Date.now()) {
2304
- return accessToken;
2305
- }
2306
- if (!refreshToken) {
2307
- throw new Error(`Refresh token missing`);
2308
- }
2309
- const refreshExpTime = refreshTokenExpiration?.getTime() ?? Infinity;
2310
- if (refreshExpTime <= Date.now()) {
2311
- throw new Error(`Refresh token has expired`);
2312
- }
2313
- const refreshedLogin = await refreshAccessToken(db.cloud.options.databaseUrl, currentUser);
2314
- await db.table('$logins').update(claims.sub, {
2315
- accessToken: refreshedLogin.accessToken,
2316
- accessTokenExpiration: refreshedLogin.accessTokenExpiration,
2337
+ function loadAccessToken(db) {
2338
+ var _a, _b;
2339
+ return __awaiter$1(this, void 0, void 0, function* () {
2340
+ const currentUser = yield db.getCurrentUser();
2341
+ const { accessToken, accessTokenExpiration, refreshToken, refreshTokenExpiration, claims, } = currentUser;
2342
+ if (!accessToken)
2343
+ return;
2344
+ const expTime = (_a = accessTokenExpiration === null || accessTokenExpiration === void 0 ? void 0 : accessTokenExpiration.getTime()) !== null && _a !== void 0 ? _a : Infinity;
2345
+ if (expTime > Date.now()) {
2346
+ return accessToken;
2347
+ }
2348
+ if (!refreshToken) {
2349
+ throw new Error(`Refresh token missing`);
2350
+ }
2351
+ const refreshExpTime = (_b = refreshTokenExpiration === null || refreshTokenExpiration === void 0 ? void 0 : refreshTokenExpiration.getTime()) !== null && _b !== void 0 ? _b : Infinity;
2352
+ if (refreshExpTime <= Date.now()) {
2353
+ throw new Error(`Refresh token has expired`);
2354
+ }
2355
+ const refreshedLogin = yield refreshAccessToken(db.cloud.options.databaseUrl, currentUser);
2356
+ yield db.table('$logins').update(claims.sub, {
2357
+ accessToken: refreshedLogin.accessToken,
2358
+ accessTokenExpiration: refreshedLogin.accessTokenExpiration,
2359
+ });
2360
+ return refreshedLogin.accessToken;
2317
2361
  });
2318
- return refreshedLogin.accessToken;
2319
- }
2320
- async function authenticate(url, context, fetchToken, userInteraction, hints) {
2321
- if (context.accessToken &&
2322
- context.accessTokenExpiration.getTime() > Date.now()) {
2323
- return context;
2324
- }
2325
- else if (context.refreshToken &&
2326
- (!context.refreshTokenExpiration ||
2327
- context.refreshTokenExpiration.getTime() > Date.now())) {
2328
- return await refreshAccessToken(url, context);
2329
- }
2330
- else {
2331
- return await userAuthenticate(context, fetchToken, userInteraction, hints);
2332
- }
2333
2362
  }
2334
- async function refreshAccessToken(url, login) {
2335
- if (!login.refreshToken)
2336
- throw new Error(`Cannot refresh token - refresh token is missing.`);
2337
- if (!login.nonExportablePrivateKey)
2338
- throw new Error(`login.nonExportablePrivateKey is missing - cannot sign refresh token without a private key.`);
2339
- const time_stamp = Date.now();
2340
- const signing_algorithm = 'RSASSA-PKCS1-v1_5';
2341
- const textEncoder = new TextEncoder();
2342
- const data = textEncoder.encode(login.refreshToken + time_stamp);
2343
- const binarySignature = await crypto.subtle.sign(signing_algorithm, login.nonExportablePrivateKey, data);
2344
- const signature = b64encode(binarySignature);
2345
- const tokenRequest = {
2346
- grant_type: 'refresh_token',
2347
- refresh_token: login.refreshToken,
2348
- scopes: ['ACCESS_DB'],
2349
- signature,
2350
- signing_algorithm,
2351
- time_stamp,
2352
- };
2353
- const res = await fetch(`${url}/token`, {
2354
- body: JSON.stringify(tokenRequest),
2355
- method: 'post',
2356
- headers: { 'Content-Type': 'application/json' },
2357
- mode: 'cors',
2363
+ function authenticate(url, context, fetchToken, userInteraction, hints) {
2364
+ return __awaiter$1(this, void 0, void 0, function* () {
2365
+ if (context.accessToken &&
2366
+ context.accessTokenExpiration.getTime() > Date.now()) {
2367
+ return context;
2368
+ }
2369
+ else if (context.refreshToken &&
2370
+ (!context.refreshTokenExpiration ||
2371
+ context.refreshTokenExpiration.getTime() > Date.now())) {
2372
+ return yield refreshAccessToken(url, context);
2373
+ }
2374
+ else {
2375
+ return yield userAuthenticate(context, fetchToken, userInteraction, hints);
2376
+ }
2358
2377
  });
2359
- if (res.status !== 200)
2360
- throw new Error(`RefreshToken: Status ${res.status} from ${url}/token`);
2361
- const response = await res.json();
2362
- login.accessToken = response.accessToken;
2363
- login.accessTokenExpiration = response.accessTokenExpiration
2364
- ? new Date(response.accessTokenExpiration)
2365
- : undefined;
2366
- return login;
2367
- }
2368
- async function userAuthenticate(context, fetchToken, userInteraction, hints) {
2369
- const { privateKey, publicKey } = await crypto.subtle.generateKey({
2370
- name: 'RSASSA-PKCS1-v1_5',
2371
- modulusLength: 2048,
2372
- publicExponent: new Uint8Array([0x01, 0x00, 0x01]),
2373
- hash: { name: 'SHA-256' },
2374
- }, false, // Non-exportable...
2375
- ['sign', 'verify']);
2376
- if (!privateKey || !publicKey)
2377
- throw new Error(`Could not generate RSA keypair`); // Typings suggest these can be undefined...
2378
- context.nonExportablePrivateKey = privateKey; //...but storable!
2379
- const publicKeySPKI = await crypto.subtle.exportKey('spki', publicKey);
2380
- const publicKeyPEM = spkiToPEM(publicKeySPKI);
2381
- context.publicKey = publicKey;
2382
- try {
2383
- const response2 = await fetchToken({
2384
- public_key: publicKeyPEM,
2385
- hints,
2378
+ }
2379
+ function refreshAccessToken(url, login) {
2380
+ return __awaiter$1(this, void 0, void 0, function* () {
2381
+ if (!login.refreshToken)
2382
+ throw new Error(`Cannot refresh token - refresh token is missing.`);
2383
+ if (!login.nonExportablePrivateKey)
2384
+ throw new Error(`login.nonExportablePrivateKey is missing - cannot sign refresh token without a private key.`);
2385
+ const time_stamp = Date.now();
2386
+ const signing_algorithm = 'RSASSA-PKCS1-v1_5';
2387
+ const textEncoder = new TextEncoder();
2388
+ const data = textEncoder.encode(login.refreshToken + time_stamp);
2389
+ const binarySignature = yield crypto.subtle.sign(signing_algorithm, login.nonExportablePrivateKey, data);
2390
+ const signature = b64encode(binarySignature);
2391
+ const tokenRequest = {
2392
+ grant_type: 'refresh_token',
2393
+ refresh_token: login.refreshToken,
2394
+ scopes: ['ACCESS_DB'],
2395
+ signature,
2396
+ signing_algorithm,
2397
+ time_stamp,
2398
+ };
2399
+ const res = yield fetch(`${url}/token`, {
2400
+ body: JSON.stringify(tokenRequest),
2401
+ method: 'post',
2402
+ headers: { 'Content-Type': 'application/json' },
2403
+ mode: 'cors',
2386
2404
  });
2387
- if (response2.type !== 'tokens')
2388
- throw new Error(`Unexpected response type from token endpoint: ${response2.type}`);
2389
- context.accessToken = response2.accessToken;
2390
- context.accessTokenExpiration = new Date(response2.accessTokenExpiration);
2391
- context.refreshToken = response2.refreshToken;
2392
- if (response2.refreshTokenExpiration) {
2393
- context.refreshTokenExpiration = new Date(response2.refreshTokenExpiration);
2394
- }
2395
- context.userId = response2.claims.sub;
2396
- context.email = response2.claims.email;
2397
- context.name = response2.claims.name;
2398
- context.claims = response2.claims;
2399
- if (response2.alerts && response2.alerts.length > 0) {
2400
- await interactWithUser(userInteraction, {
2401
- type: 'message-alert',
2402
- title: 'Authentication Alert',
2403
- fields: {},
2404
- alerts: response2.alerts,
2405
+ if (res.status !== 200)
2406
+ throw new Error(`RefreshToken: Status ${res.status} from ${url}/token`);
2407
+ const response = yield res.json();
2408
+ login.accessToken = response.accessToken;
2409
+ login.accessTokenExpiration = response.accessTokenExpiration
2410
+ ? new Date(response.accessTokenExpiration)
2411
+ : undefined;
2412
+ return login;
2413
+ });
2414
+ }
2415
+ function userAuthenticate(context, fetchToken, userInteraction, hints) {
2416
+ return __awaiter$1(this, void 0, void 0, function* () {
2417
+ const { privateKey, publicKey } = yield crypto.subtle.generateKey({
2418
+ name: 'RSASSA-PKCS1-v1_5',
2419
+ modulusLength: 2048,
2420
+ publicExponent: new Uint8Array([0x01, 0x00, 0x01]),
2421
+ hash: { name: 'SHA-256' },
2422
+ }, false, // Non-exportable...
2423
+ ['sign', 'verify']);
2424
+ if (!privateKey || !publicKey)
2425
+ throw new Error(`Could not generate RSA keypair`); // Typings suggest these can be undefined...
2426
+ context.nonExportablePrivateKey = privateKey; //...but storable!
2427
+ const publicKeySPKI = yield crypto.subtle.exportKey('spki', publicKey);
2428
+ const publicKeyPEM = spkiToPEM(publicKeySPKI);
2429
+ context.publicKey = publicKey;
2430
+ try {
2431
+ const response2 = yield fetchToken({
2432
+ public_key: publicKeyPEM,
2433
+ hints,
2405
2434
  });
2435
+ if (response2.type !== 'tokens')
2436
+ throw new Error(`Unexpected response type from token endpoint: ${response2.type}`);
2437
+ context.accessToken = response2.accessToken;
2438
+ context.accessTokenExpiration = new Date(response2.accessTokenExpiration);
2439
+ context.refreshToken = response2.refreshToken;
2440
+ if (response2.refreshTokenExpiration) {
2441
+ context.refreshTokenExpiration = new Date(response2.refreshTokenExpiration);
2442
+ }
2443
+ context.userId = response2.claims.sub;
2444
+ context.email = response2.claims.email;
2445
+ context.name = response2.claims.name;
2446
+ context.claims = response2.claims;
2447
+ if (response2.alerts && response2.alerts.length > 0) {
2448
+ yield interactWithUser(userInteraction, {
2449
+ type: 'message-alert',
2450
+ title: 'Authentication Alert',
2451
+ fields: {},
2452
+ alerts: response2.alerts,
2453
+ });
2454
+ }
2455
+ return context;
2406
2456
  }
2407
- return context;
2408
- }
2409
- catch (error) {
2410
- await alertUser(userInteraction, 'Authentication Failed', {
2411
- type: 'error',
2412
- messageCode: 'GENERIC_ERROR',
2413
- message: `We're having a problem authenticating right now.`,
2414
- messageParams: {}
2415
- }).catch(() => { });
2416
- throw error;
2417
- }
2457
+ catch (error) {
2458
+ yield alertUser(userInteraction, 'Authentication Failed', {
2459
+ type: 'error',
2460
+ messageCode: 'GENERIC_ERROR',
2461
+ message: `We're having a problem authenticating right now.`,
2462
+ messageParams: {}
2463
+ }).catch(() => { });
2464
+ throw error;
2465
+ }
2466
+ });
2418
2467
  }
2419
2468
  function spkiToPEM(keydata) {
2420
2469
  const keydataB64 = b64encode(keydata);
@@ -2947,23 +2996,17 @@ class FakeBigInt {
2947
2996
  return this.v;
2948
2997
  }
2949
2998
  }
2950
- const defs = {
2951
- ...undefinedDef,
2952
- ...(hasBigIntSupport
2953
- ? {}
2954
- : {
2955
- bigint: {
2956
- test: (val) => val instanceof FakeBigInt,
2957
- replace: (fakeBigInt) => {
2958
- return {
2959
- $t: 'bigint',
2960
- ...fakeBigInt
2961
- };
2962
- },
2963
- revive: ({ v, }) => new FakeBigInt(v)
2964
- }
2965
- })
2966
- };
2999
+ const defs = Object.assign(Object.assign({}, undefinedDef), (hasBigIntSupport
3000
+ ? {}
3001
+ : {
3002
+ bigint: {
3003
+ test: (val) => val instanceof FakeBigInt,
3004
+ replace: (fakeBigInt) => {
3005
+ return Object.assign({ $t: 'bigint' }, fakeBigInt);
3006
+ },
3007
+ revive: ({ v, }) => new FakeBigInt(v)
3008
+ }
3009
+ }));
2967
3010
  const TSON = TypesonSimplified(builtin, defs);
2968
3011
  const BISON = Bison(defs);
2969
3012
 
@@ -3022,110 +3065,107 @@ function encodeIdsForServer(schema, currentUser, changes) {
3022
3065
  }
3023
3066
  function cloneChange(change, rewriteValues) {
3024
3067
  // clone on demand:
3025
- return {
3026
- ...change,
3027
- muts: rewriteValues
3028
- ? change.muts.map((m) => ({
3029
- ...m,
3030
- keys: m.keys.slice(),
3031
- values: m.values.slice(),
3032
- }))
3033
- : change.muts.map((m) => ({ ...m, keys: m.keys.slice() })),
3034
- };
3068
+ return Object.assign(Object.assign({}, change), { muts: rewriteValues
3069
+ ? change.muts.map((m) => (Object.assign(Object.assign({}, m), { keys: m.keys.slice(), values: m.values.slice() })))
3070
+ : change.muts.map((m) => (Object.assign(Object.assign({}, m), { keys: m.keys.slice() }))) });
3035
3071
  }
3036
3072
 
3037
3073
  //import {BisonWebStreamReader} from "dreambase-library/dist/typeson-simplified/BisonWebStreamReader";
3038
- async function syncWithServer(changes, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser) {
3039
- //
3040
- // Push changes to server using fetch
3041
- //
3042
- const headers = {
3043
- Accept: 'application/json, application/x-bison, application/x-bison-stream',
3044
- 'Content-Type': 'application/tson'
3045
- };
3046
- const accessToken = await loadAccessToken(db);
3047
- if (accessToken) {
3048
- headers.Authorization = `Bearer ${accessToken}`;
3049
- }
3050
- const syncRequest = {
3051
- v: 2,
3052
- dbID: syncState?.remoteDbId,
3053
- clientIdentity,
3054
- schema: schema || {},
3055
- lastPull: syncState ? {
3056
- serverRevision: syncState.serverRevision,
3057
- realms: syncState.realms,
3058
- inviteRealms: syncState.inviteRealms
3059
- } : undefined,
3060
- baseRevs,
3061
- changes: encodeIdsForServer(db.dx.core.schema, currentUser, changes)
3062
- };
3063
- console.debug("Sync request", syncRequest);
3064
- db.syncStateChangedEvent.next({
3065
- phase: 'pushing',
3066
- });
3067
- const res = await fetch(`${databaseUrl}/sync`, {
3068
- method: 'post',
3069
- headers,
3070
- body: TSON.stringify(syncRequest)
3071
- });
3072
- //const contentLength = Number(res.headers.get('content-length'));
3073
- db.syncStateChangedEvent.next({
3074
- phase: 'pulling'
3075
- });
3076
- if (!res.ok) {
3077
- throw new HttpError(res);
3078
- }
3079
- switch (res.headers.get('content-type')) {
3080
- case 'application/x-bison':
3081
- return BISON.fromBinary(await res.blob());
3082
- case 'application/x-bison-stream': //return BisonWebStreamReader(BISON, res);
3083
- default:
3084
- case 'application/json': {
3085
- const text = await res.text();
3086
- const syncRes = TSON.parse(text);
3087
- return syncRes;
3074
+ function syncWithServer(changes, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser) {
3075
+ return __awaiter$1(this, void 0, void 0, function* () {
3076
+ //
3077
+ // Push changes to server using fetch
3078
+ //
3079
+ const headers = {
3080
+ Accept: 'application/json, application/x-bison, application/x-bison-stream',
3081
+ 'Content-Type': 'application/tson'
3082
+ };
3083
+ const accessToken = yield loadAccessToken(db);
3084
+ if (accessToken) {
3085
+ headers.Authorization = `Bearer ${accessToken}`;
3088
3086
  }
3089
- }
3090
- }
3091
-
3092
- async function modifyLocalObjectsWithNewUserId(syncifiedTables, currentUser, alreadySyncedRealms) {
3093
- const ignoredRealms = new Set(alreadySyncedRealms || []);
3094
- for (const table of syncifiedTables) {
3095
- if (table.name === "members") {
3096
- // members
3097
- await table.toCollection().modify((member) => {
3098
- if (!ignoredRealms.has(member.realmId) && (!member.userId || member.userId === UNAUTHORIZED_USER.userId)) {
3099
- member.userId = currentUser.userId;
3100
- }
3101
- });
3087
+ const syncRequest = {
3088
+ v: 2,
3089
+ dbID: syncState === null || syncState === void 0 ? void 0 : syncState.remoteDbId,
3090
+ clientIdentity,
3091
+ schema: schema || {},
3092
+ lastPull: syncState ? {
3093
+ serverRevision: syncState.serverRevision,
3094
+ realms: syncState.realms,
3095
+ inviteRealms: syncState.inviteRealms
3096
+ } : undefined,
3097
+ baseRevs,
3098
+ changes: encodeIdsForServer(db.dx.core.schema, currentUser, changes)
3099
+ };
3100
+ console.debug("Sync request", syncRequest);
3101
+ db.syncStateChangedEvent.next({
3102
+ phase: 'pushing',
3103
+ });
3104
+ const res = yield fetch(`${databaseUrl}/sync`, {
3105
+ method: 'post',
3106
+ headers,
3107
+ body: TSON.stringify(syncRequest)
3108
+ });
3109
+ //const contentLength = Number(res.headers.get('content-length'));
3110
+ db.syncStateChangedEvent.next({
3111
+ phase: 'pulling'
3112
+ });
3113
+ if (!res.ok) {
3114
+ throw new HttpError(res);
3102
3115
  }
3103
- else if (table.name === "roles") ;
3104
- else if (table.name === "realms") {
3105
- // realms
3106
- await table.toCollection().modify((realm) => {
3107
- if (!ignoredRealms.has(realm.realmId) && (realm.owner === undefined || realm.owner === UNAUTHORIZED_USER.userId)) {
3108
- realm.owner = currentUser.userId;
3109
- }
3110
- });
3116
+ switch (res.headers.get('content-type')) {
3117
+ case 'application/x-bison':
3118
+ return BISON.fromBinary(yield res.blob());
3119
+ case 'application/x-bison-stream': //return BisonWebStreamReader(BISON, res);
3120
+ default:
3121
+ case 'application/json': {
3122
+ const text = yield res.text();
3123
+ const syncRes = TSON.parse(text);
3124
+ return syncRes;
3125
+ }
3111
3126
  }
3112
- else {
3113
- // application entities
3114
- await table.toCollection().modify((obj) => {
3115
- if (!obj.realmId || !ignoredRealms.has(obj.realmId)) {
3116
- if (!obj.owner || obj.owner === UNAUTHORIZED_USER.userId)
3117
- obj.owner = currentUser.userId;
3118
- if (!obj.realmId || obj.realmId === UNAUTHORIZED_USER.userId) {
3119
- obj.realmId = currentUser.userId;
3127
+ });
3128
+ }
3129
+
3130
+ function modifyLocalObjectsWithNewUserId(syncifiedTables, currentUser, alreadySyncedRealms) {
3131
+ return __awaiter$1(this, void 0, void 0, function* () {
3132
+ const ignoredRealms = new Set(alreadySyncedRealms || []);
3133
+ for (const table of syncifiedTables) {
3134
+ if (table.name === "members") {
3135
+ // members
3136
+ yield table.toCollection().modify((member) => {
3137
+ if (!ignoredRealms.has(member.realmId) && (!member.userId || member.userId === UNAUTHORIZED_USER.userId)) {
3138
+ member.userId = currentUser.userId;
3120
3139
  }
3121
- }
3122
- });
3140
+ });
3141
+ }
3142
+ else if (table.name === "roles") ;
3143
+ else if (table.name === "realms") {
3144
+ // realms
3145
+ yield table.toCollection().modify((realm) => {
3146
+ if (!ignoredRealms.has(realm.realmId) && (realm.owner === undefined || realm.owner === UNAUTHORIZED_USER.userId)) {
3147
+ realm.owner = currentUser.userId;
3148
+ }
3149
+ });
3150
+ }
3151
+ else {
3152
+ // application entities
3153
+ yield table.toCollection().modify((obj) => {
3154
+ if (!obj.realmId || !ignoredRealms.has(obj.realmId)) {
3155
+ if (!obj.owner || obj.owner === UNAUTHORIZED_USER.userId)
3156
+ obj.owner = currentUser.userId;
3157
+ if (!obj.realmId || obj.realmId === UNAUTHORIZED_USER.userId) {
3158
+ obj.realmId = currentUser.userId;
3159
+ }
3160
+ }
3161
+ });
3162
+ }
3123
3163
  }
3124
- }
3164
+ });
3125
3165
  }
3126
3166
 
3127
3167
  function throwIfCancelled(cancelToken) {
3128
- if (cancelToken?.cancelled)
3168
+ if (cancelToken === null || cancelToken === void 0 ? void 0 : cancelToken.cancelled)
3129
3169
  throw new Dexie.AbortError(`Operation was cancelled`);
3130
3170
  }
3131
3171
 
@@ -3137,17 +3177,19 @@ let isOnline = navigator.onLine;
3137
3177
  self.addEventListener('online', () => isOnline = true);
3138
3178
  self.addEventListener('offline', () => isOnline = false);
3139
3179
 
3140
- async function updateBaseRevs(db, schema, latestRevisions, serverRev) {
3141
- await db.$baseRevs.bulkPut(Object.keys(schema)
3142
- .filter((table) => schema[table].markedForSync)
3143
- .map((tableName) => {
3144
- const lastClientRevOnPreviousServerRev = latestRevisions[tableName] || 0;
3145
- return {
3146
- tableName,
3147
- clientRev: lastClientRevOnPreviousServerRev + 1,
3148
- serverRev,
3149
- };
3150
- }));
3180
+ function updateBaseRevs(db, schema, latestRevisions, serverRev) {
3181
+ return __awaiter$1(this, void 0, void 0, function* () {
3182
+ yield db.$baseRevs.bulkPut(Object.keys(schema)
3183
+ .filter((table) => schema[table].markedForSync)
3184
+ .map((tableName) => {
3185
+ const lastClientRevOnPreviousServerRev = latestRevisions[tableName] || 0;
3186
+ return {
3187
+ tableName,
3188
+ clientRev: lastClientRevOnPreviousServerRev + 1,
3189
+ serverRev,
3190
+ };
3191
+ }));
3192
+ });
3151
3193
  }
3152
3194
 
3153
3195
  function getLatestRevisionsPerTable(clientChangeSet, lastRevisions = {}) {
@@ -3158,119 +3200,123 @@ function getLatestRevisionsPerTable(clientChangeSet, lastRevisions = {}) {
3158
3200
  return lastRevisions;
3159
3201
  }
3160
3202
 
3161
- async function bulkUpdate(table, keys, changeSpecs) {
3162
- const objs = await table.bulkGet(keys);
3163
- const resultKeys = [];
3164
- const resultObjs = [];
3165
- keys.forEach((key, idx) => {
3166
- const obj = objs[idx];
3167
- if (obj) {
3168
- for (const [keyPath, value] of Object.entries(changeSpecs[idx])) {
3169
- if (keyPath === table.schema.primKey.keyPath) {
3170
- if (cmp(value, key) !== 0) {
3171
- throw new Error(`Cannot change primary key`);
3172
- }
3173
- }
3174
- else {
3175
- Dexie.setByKeyPath(obj, keyPath, value);
3176
- }
3177
- }
3178
- resultKeys.push(key);
3179
- resultObjs.push(obj);
3180
- }
3181
- });
3182
- await (table.schema.primKey.keyPath == null
3183
- ? table.bulkPut(resultObjs, resultKeys)
3184
- : table.bulkPut(resultObjs));
3185
- }
3186
-
3187
- async function applyServerChanges(changes, db) {
3188
- console.debug('Applying server changes', changes, Dexie.currentTransaction);
3189
- for (const { table: tableName, muts } of changes) {
3190
- const table = db.table(tableName);
3191
- if (!table)
3192
- continue; // If server sends changes on a table we don't have, ignore it.
3193
- const { primaryKey } = table.core.schema;
3194
- const keyDecoder = (key) => {
3195
- switch (key[0]) {
3196
- case '[':
3197
- // Decode JSON array
3198
- if (key.endsWith(']'))
3199
- try {
3200
- // On server, array keys are transformed to JSON string representation
3201
- return JSON.parse(key);
3203
+ function bulkUpdate(table, keys, changeSpecs) {
3204
+ return __awaiter$1(this, void 0, void 0, function* () {
3205
+ const objs = yield table.bulkGet(keys);
3206
+ const resultKeys = [];
3207
+ const resultObjs = [];
3208
+ keys.forEach((key, idx) => {
3209
+ const obj = objs[idx];
3210
+ if (obj) {
3211
+ for (const [keyPath, value] of Object.entries(changeSpecs[idx])) {
3212
+ if (keyPath === table.schema.primKey.keyPath) {
3213
+ if (cmp(value, key) !== 0) {
3214
+ throw new Error(`Cannot change primary key`);
3202
3215
  }
3203
- catch { }
3204
- return key;
3205
- case '#':
3206
- // Decode private ID (do the opposite from what's done in encodeIdsForServer())
3207
- if (key.endsWith(':' + db.cloud.currentUserId)) {
3208
- return key.substr(0, key.length - db.cloud.currentUserId.length - 1);
3209
- }
3210
- return key;
3211
- default:
3212
- return key;
3213
- }
3214
- };
3215
- for (const mut of muts) {
3216
- const keys = mut.keys.map(keyDecoder);
3217
- switch (mut.type) {
3218
- case 'insert':
3219
- if (primaryKey.outbound) {
3220
- await table.bulkAdd(mut.values, keys);
3221
- }
3222
- else {
3223
- keys.forEach((key, i) => {
3224
- // Make sure inbound keys are consistent
3225
- Dexie.setByKeyPath(mut.values[i], primaryKey.keyPath, key);
3226
- });
3227
- await table.bulkAdd(mut.values);
3228
- }
3229
- break;
3230
- case 'upsert':
3231
- if (primaryKey.outbound) {
3232
- await table.bulkPut(mut.values, keys);
3233
- }
3234
- else {
3235
- keys.forEach((key, i) => {
3236
- // Make sure inbound keys are consistent
3237
- Dexie.setByKeyPath(mut.values[i], primaryKey.keyPath, key);
3238
- });
3239
- await table.bulkPut(mut.values);
3240
- }
3241
- break;
3242
- case 'modify':
3243
- if (keys.length === 1) {
3244
- await table.update(keys[0], mut.changeSpec);
3245
3216
  }
3246
3217
  else {
3247
- await table.where(':id').anyOf(keys).modify(mut.changeSpec);
3218
+ Dexie.setByKeyPath(obj, keyPath, value);
3248
3219
  }
3249
- break;
3250
- case 'update':
3251
- await bulkUpdate(table, keys, mut.changeSpecs);
3252
- break;
3253
- case 'delete':
3254
- await table.bulkDelete(keys);
3255
- break;
3220
+ }
3221
+ resultKeys.push(key);
3222
+ resultObjs.push(obj);
3256
3223
  }
3257
- }
3258
- }
3224
+ });
3225
+ yield (table.schema.primKey.keyPath == null
3226
+ ? table.bulkPut(resultObjs, resultKeys)
3227
+ : table.bulkPut(resultObjs));
3228
+ });
3259
3229
  }
3260
3230
 
3261
- const CURRENT_SYNC_WORKER = 'currentSyncWorker';
3262
- function sync(db, options, schema, syncOptions) {
3263
- return _sync
3264
- .apply(this, arguments)
3265
- .then(() => {
3266
- if (!syncOptions?.justCheckIfNeeded) {
3267
- db.syncStateChangedEvent.next({
3231
+ function applyServerChanges(changes, db) {
3232
+ return __awaiter$1(this, void 0, void 0, function* () {
3233
+ console.debug('Applying server changes', changes, Dexie.currentTransaction);
3234
+ for (const { table: tableName, muts } of changes) {
3235
+ const table = db.table(tableName);
3236
+ if (!table)
3237
+ continue; // If server sends changes on a table we don't have, ignore it.
3238
+ const { primaryKey } = table.core.schema;
3239
+ const keyDecoder = (key) => {
3240
+ switch (key[0]) {
3241
+ case '[':
3242
+ // Decode JSON array
3243
+ if (key.endsWith(']'))
3244
+ try {
3245
+ // On server, array keys are transformed to JSON string representation
3246
+ return JSON.parse(key);
3247
+ }
3248
+ catch (_a) { }
3249
+ return key;
3250
+ case '#':
3251
+ // Decode private ID (do the opposite from what's done in encodeIdsForServer())
3252
+ if (key.endsWith(':' + db.cloud.currentUserId)) {
3253
+ return key.substr(0, key.length - db.cloud.currentUserId.length - 1);
3254
+ }
3255
+ return key;
3256
+ default:
3257
+ return key;
3258
+ }
3259
+ };
3260
+ for (const mut of muts) {
3261
+ const keys = mut.keys.map(keyDecoder);
3262
+ switch (mut.type) {
3263
+ case 'insert':
3264
+ if (primaryKey.outbound) {
3265
+ yield table.bulkAdd(mut.values, keys);
3266
+ }
3267
+ else {
3268
+ keys.forEach((key, i) => {
3269
+ // Make sure inbound keys are consistent
3270
+ Dexie.setByKeyPath(mut.values[i], primaryKey.keyPath, key);
3271
+ });
3272
+ yield table.bulkAdd(mut.values);
3273
+ }
3274
+ break;
3275
+ case 'upsert':
3276
+ if (primaryKey.outbound) {
3277
+ yield table.bulkPut(mut.values, keys);
3278
+ }
3279
+ else {
3280
+ keys.forEach((key, i) => {
3281
+ // Make sure inbound keys are consistent
3282
+ Dexie.setByKeyPath(mut.values[i], primaryKey.keyPath, key);
3283
+ });
3284
+ yield table.bulkPut(mut.values);
3285
+ }
3286
+ break;
3287
+ case 'modify':
3288
+ if (keys.length === 1) {
3289
+ yield table.update(keys[0], mut.changeSpec);
3290
+ }
3291
+ else {
3292
+ yield table.where(':id').anyOf(keys).modify(mut.changeSpec);
3293
+ }
3294
+ break;
3295
+ case 'update':
3296
+ yield bulkUpdate(table, keys, mut.changeSpecs);
3297
+ break;
3298
+ case 'delete':
3299
+ yield table.bulkDelete(keys);
3300
+ break;
3301
+ }
3302
+ }
3303
+ }
3304
+ });
3305
+ }
3306
+
3307
+ const CURRENT_SYNC_WORKER = 'currentSyncWorker';
3308
+ function sync(db, options, schema, syncOptions) {
3309
+ return _sync
3310
+ .apply(this, arguments)
3311
+ .then(() => {
3312
+ if (!(syncOptions === null || syncOptions === void 0 ? void 0 : syncOptions.justCheckIfNeeded)) {
3313
+ db.syncStateChangedEvent.next({
3268
3314
  phase: 'in-sync',
3269
3315
  });
3270
3316
  }
3271
3317
  })
3272
- .catch(async (error) => {
3273
- if (syncOptions?.justCheckIfNeeded)
3318
+ .catch((error) => __awaiter$1(this, void 0, void 0, function* () {
3319
+ if (syncOptions === null || syncOptions === void 0 ? void 0 : syncOptions.justCheckIfNeeded)
3274
3320
  return Promise.reject(error); // Just rethrow.
3275
3321
  console.debug('Error from _sync', {
3276
3322
  isOnline,
@@ -3278,23 +3324,20 @@ function sync(db, options, schema, syncOptions) {
3278
3324
  error,
3279
3325
  });
3280
3326
  if (isOnline &&
3281
- syncOptions?.retryImmediatelyOnFetchError &&
3282
- error?.name === 'TypeError' &&
3283
- /fetch/.test(error?.message)) {
3327
+ (syncOptions === null || syncOptions === void 0 ? void 0 : syncOptions.retryImmediatelyOnFetchError) &&
3328
+ (error === null || error === void 0 ? void 0 : error.name) === 'TypeError' &&
3329
+ /fetch/.test(error === null || error === void 0 ? void 0 : error.message)) {
3284
3330
  db.syncStateChangedEvent.next({
3285
3331
  phase: 'error',
3286
3332
  error,
3287
3333
  });
3288
3334
  // Retry again in 500 ms but if it fails again, don't retry.
3289
- await new Promise((resolve) => setTimeout(resolve, 500));
3290
- return await sync(db, options, schema, {
3291
- ...syncOptions,
3292
- retryImmediatelyOnFetchError: false,
3293
- });
3335
+ yield new Promise((resolve) => setTimeout(resolve, 500));
3336
+ return yield sync(db, options, schema, Object.assign(Object.assign({}, syncOptions), { retryImmediatelyOnFetchError: false }));
3294
3337
  }
3295
3338
  // Make sure that no matter whether sync() explodes or not,
3296
3339
  // always update the timestamp. Also store the error.
3297
- await db.$syncState.update('syncState', {
3340
+ yield db.$syncState.update('syncState', {
3298
3341
  timestamp: new Date(),
3299
3342
  error: '' + error,
3300
3343
  });
@@ -3303,234 +3346,239 @@ function sync(db, options, schema, syncOptions) {
3303
3346
  error,
3304
3347
  });
3305
3348
  return Promise.reject(error);
3306
- });
3349
+ }));
3307
3350
  }
3308
- async function _sync(db, options, schema, { isInitialSync, cancelToken, justCheckIfNeeded, purpose } = {
3351
+ function _sync(db, options, schema, { isInitialSync, cancelToken, justCheckIfNeeded, purpose } = {
3309
3352
  isInitialSync: false,
3310
3353
  }) {
3311
- if (!justCheckIfNeeded) {
3312
- console.debug('SYNC STARTED', { isInitialSync, purpose });
3313
- }
3314
- if (!db.cloud.options?.databaseUrl)
3315
- throw new Error(`Internal error: sync must not be called when no databaseUrl is configured`);
3316
- const { databaseUrl } = options;
3317
- const currentUser = await db.getCurrentUser(); // Keep same value across entire sync flow:
3318
- const tablesToSync = currentUser.isLoggedIn ? getSyncableTables(db) : [];
3319
- const mutationTables = tablesToSync.map((tbl) => db.table(getMutationTable(tbl.name)));
3320
- // If this is not the initial sync,
3321
- // go through tables that were previously not synced but should now be according to
3322
- // logged in state and the sync table whitelist in db.cloud.options.
3323
- //
3324
- // Prepare for syncification by modifying locally unauthorized objects:
3325
- //
3326
- const persistedSyncState = await db.getPersistedSyncState();
3327
- const tablesToSyncify = !isInitialSync && currentUser.isLoggedIn
3328
- ? getTablesToSyncify(db, persistedSyncState)
3329
- : [];
3330
- throwIfCancelled(cancelToken);
3331
- const doSyncify = tablesToSyncify.length > 0;
3332
- if (doSyncify) {
3333
- if (justCheckIfNeeded)
3334
- return true;
3335
- //console.debug('sync doSyncify is true');
3336
- await db.transaction('rw', tablesToSyncify, async (tx) => {
3337
- // @ts-ignore
3338
- tx.idbtrans.disableChangeTracking = true;
3339
- // @ts-ignore
3340
- tx.idbtrans.disableAccessControl = true; // TODO: Take care of this flag in access control middleware!
3341
- await modifyLocalObjectsWithNewUserId(tablesToSyncify, currentUser, persistedSyncState?.realms);
3342
- });
3343
- throwIfCancelled(cancelToken);
3344
- }
3345
- //
3346
- // List changes to sync
3347
- //
3348
- const [clientChangeSet, syncState, baseRevs] = await db.transaction('r', db.tables, async () => {
3349
- const syncState = await db.getPersistedSyncState();
3350
- const baseRevs = await db.$baseRevs.toArray();
3351
- let clientChanges = await listClientChanges(mutationTables);
3354
+ var _a;
3355
+ return __awaiter$1(this, void 0, void 0, function* () {
3356
+ if (!justCheckIfNeeded) {
3357
+ console.debug('SYNC STARTED', { isInitialSync, purpose });
3358
+ }
3359
+ if (!((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl))
3360
+ throw new Error(`Internal error: sync must not be called when no databaseUrl is configured`);
3361
+ const { databaseUrl } = options;
3362
+ const currentUser = yield db.getCurrentUser(); // Keep same value across entire sync flow:
3363
+ const tablesToSync = currentUser.isLoggedIn ? getSyncableTables(db) : [];
3364
+ const mutationTables = tablesToSync.map((tbl) => db.table(getMutationTable(tbl.name)));
3365
+ // If this is not the initial sync,
3366
+ // go through tables that were previously not synced but should now be according to
3367
+ // logged in state and the sync table whitelist in db.cloud.options.
3368
+ //
3369
+ // Prepare for syncification by modifying locally unauthorized objects:
3370
+ //
3371
+ const persistedSyncState = yield db.getPersistedSyncState();
3372
+ const tablesToSyncify = !isInitialSync && currentUser.isLoggedIn
3373
+ ? getTablesToSyncify(db, persistedSyncState)
3374
+ : [];
3352
3375
  throwIfCancelled(cancelToken);
3376
+ const doSyncify = tablesToSyncify.length > 0;
3353
3377
  if (doSyncify) {
3354
- const alreadySyncedRealms = [
3355
- ...(persistedSyncState?.realms || []),
3356
- ...(persistedSyncState?.inviteRealms || []),
3357
- ];
3358
- const syncificationInserts = await listSyncifiedChanges(tablesToSyncify, currentUser, schema, alreadySyncedRealms);
3378
+ if (justCheckIfNeeded)
3379
+ return true;
3380
+ //console.debug('sync doSyncify is true');
3381
+ yield db.transaction('rw', tablesToSyncify, (tx) => __awaiter$1(this, void 0, void 0, function* () {
3382
+ // @ts-ignore
3383
+ tx.idbtrans.disableChangeTracking = true;
3384
+ // @ts-ignore
3385
+ tx.idbtrans.disableAccessControl = true; // TODO: Take care of this flag in access control middleware!
3386
+ yield modifyLocalObjectsWithNewUserId(tablesToSyncify, currentUser, persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.realms);
3387
+ }));
3359
3388
  throwIfCancelled(cancelToken);
3360
- clientChanges = clientChanges.concat(syncificationInserts);
3361
- return [clientChanges, syncState, baseRevs];
3362
3389
  }
3363
- return [clientChanges, syncState, baseRevs];
3364
- });
3365
- const syncIsNeeded = clientChangeSet.some((set) => set.muts.some((mut) => mut.keys.length > 0));
3366
- if (justCheckIfNeeded) {
3367
- console.debug('Sync is needed:', syncIsNeeded);
3368
- return syncIsNeeded;
3369
- }
3370
- if (purpose === 'push' && !syncIsNeeded) {
3371
- // The purpose of this request was to push changes
3372
- return false;
3373
- }
3374
- const latestRevisions = getLatestRevisionsPerTable(clientChangeSet, syncState?.latestRevisions);
3375
- const clientIdentity = syncState?.clientIdentity || randomString(16);
3376
- //
3377
- // Push changes to server
3378
- //
3379
- throwIfCancelled(cancelToken);
3380
- const res = await syncWithServer(clientChangeSet, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser);
3381
- console.debug('Sync response', res);
3382
- //
3383
- // Apply changes locally and clear old change entries:
3384
- //
3385
- const done = await db.transaction('rw', db.tables, async (tx) => {
3386
- // @ts-ignore
3387
- tx.idbtrans.disableChangeTracking = true;
3388
- // @ts-ignore
3389
- tx.idbtrans.disableAccessControl = true; // TODO: Take care of this flag in access control middleware!
3390
- // Update db.cloud.schema from server response.
3391
- // Local schema MAY include a subset of tables, so do not force all tables into local schema.
3392
- for (const tableName of Object.keys(schema)) {
3393
- if (res.schema[tableName]) {
3394
- // Write directly into configured schema. This code can only be executed alone.
3395
- schema[tableName] = res.schema[tableName];
3396
- }
3397
- }
3398
- await db.$syncState.put(schema, 'schema');
3399
- // List mutations that happened during our exchange with the server:
3400
- const addedClientChanges = await listClientChanges(mutationTables, db, {
3401
- since: latestRevisions,
3402
- });
3403
3390
  //
3404
- // Delete changes now as server has return success
3405
- // (but keep changes that haven't reached server yet)
3391
+ // List changes to sync
3406
3392
  //
3407
- for (const mutTable of mutationTables) {
3408
- const tableName = getTableFromMutationTable(mutTable.name);
3409
- if (!addedClientChanges.some((ch) => ch.table === tableName && ch.muts.length > 0)) {
3410
- // No added mutations for this table during the time we sent changes
3411
- // to the server.
3412
- // It is therefore safe to clear all changes (which is faster than
3413
- // deleting a range)
3414
- await Promise.all([
3415
- mutTable.clear(),
3416
- db.$baseRevs.where({ tableName }).delete(),
3417
- ]);
3418
- }
3419
- else if (latestRevisions[tableName]) {
3420
- const latestRev = latestRevisions[tableName] || 0;
3421
- await Promise.all([
3422
- mutTable.where('rev').belowOrEqual(latestRev).delete(),
3423
- db.$baseRevs
3424
- .where(':id')
3425
- .between([tableName, -Infinity], [tableName, latestRev + 1], true, true)
3426
- .reverse()
3427
- .offset(1) // Keep one entry (the one mapping muts that came during fetch --> previous server revision)
3428
- .delete(),
3429
- ]);
3393
+ const [clientChangeSet, syncState, baseRevs] = yield db.transaction('r', db.tables, () => __awaiter$1(this, void 0, void 0, function* () {
3394
+ const syncState = yield db.getPersistedSyncState();
3395
+ const baseRevs = yield db.$baseRevs.toArray();
3396
+ let clientChanges = yield listClientChanges(mutationTables);
3397
+ throwIfCancelled(cancelToken);
3398
+ if (doSyncify) {
3399
+ const alreadySyncedRealms = [
3400
+ ...((persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.realms) || []),
3401
+ ...((persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.inviteRealms) || []),
3402
+ ];
3403
+ const syncificationInserts = yield listSyncifiedChanges(tablesToSyncify, currentUser, schema, alreadySyncedRealms);
3404
+ throwIfCancelled(cancelToken);
3405
+ clientChanges = clientChanges.concat(syncificationInserts);
3406
+ return [clientChanges, syncState, baseRevs];
3430
3407
  }
3431
- else ;
3432
- }
3433
- // Update latestRevisions object according to additional changes:
3434
- getLatestRevisionsPerTable(addedClientChanges, latestRevisions);
3435
- // Update/add new entries into baseRevs map.
3436
- // * On tables without mutations since last serverRevision,
3437
- // this will update existing entry.
3438
- // * On tables where mutations have been recorded since last
3439
- // serverRevision, this will create a new entry.
3440
- // The purpose of this operation is to mark a start revision (per table)
3441
- // so that all client-mutations that come after this, will be mapped to current
3442
- // server revision.
3443
- await updateBaseRevs(db, schema, latestRevisions, res.serverRevision);
3444
- const syncState = await db.getPersistedSyncState();
3445
- //
3446
- // Delete objects from removed realms
3447
- //
3448
- await deleteObjectsFromRemovedRealms(db, res, syncState);
3449
- //
3450
- // Update syncState
3451
- //
3452
- const newSyncState = syncState || {
3453
- syncedTables: [],
3454
- latestRevisions: {},
3455
- realms: [],
3456
- inviteRealms: [],
3457
- clientIdentity,
3458
- };
3459
- newSyncState.syncedTables = tablesToSync
3460
- .map((tbl) => tbl.name)
3461
- .concat(tablesToSyncify.map((tbl) => tbl.name));
3462
- newSyncState.latestRevisions = latestRevisions;
3463
- newSyncState.remoteDbId = res.dbId;
3464
- newSyncState.initiallySynced = true;
3465
- newSyncState.realms = res.realms;
3466
- newSyncState.inviteRealms = res.inviteRealms;
3467
- newSyncState.serverRevision = res.serverRevision;
3468
- newSyncState.timestamp = new Date();
3469
- delete newSyncState.error;
3470
- const filteredChanges = filterServerChangesThroughAddedClientChanges(res.changes, addedClientChanges);
3408
+ return [clientChanges, syncState, baseRevs];
3409
+ }));
3410
+ const syncIsNeeded = clientChangeSet.some((set) => set.muts.some((mut) => mut.keys.length > 0));
3411
+ if (justCheckIfNeeded) {
3412
+ console.debug('Sync is needed:', syncIsNeeded);
3413
+ return syncIsNeeded;
3414
+ }
3415
+ if (purpose === 'push' && !syncIsNeeded) {
3416
+ // The purpose of this request was to push changes
3417
+ return false;
3418
+ }
3419
+ const latestRevisions = getLatestRevisionsPerTable(clientChangeSet, syncState === null || syncState === void 0 ? void 0 : syncState.latestRevisions);
3420
+ const clientIdentity = (syncState === null || syncState === void 0 ? void 0 : syncState.clientIdentity) || randomString(16);
3471
3421
  //
3472
- // apply server changes
3422
+ // Push changes to server
3473
3423
  //
3474
- await applyServerChanges(filteredChanges, db);
3424
+ throwIfCancelled(cancelToken);
3425
+ const res = yield syncWithServer(clientChangeSet, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser);
3426
+ console.debug('Sync response', res);
3475
3427
  //
3476
- // Update syncState
3428
+ // Apply changes locally and clear old change entries:
3477
3429
  //
3478
- db.$syncState.put(newSyncState, 'syncState');
3479
- return addedClientChanges.length === 0;
3430
+ const done = yield db.transaction('rw', db.tables, (tx) => __awaiter$1(this, void 0, void 0, function* () {
3431
+ // @ts-ignore
3432
+ tx.idbtrans.disableChangeTracking = true;
3433
+ // @ts-ignore
3434
+ tx.idbtrans.disableAccessControl = true; // TODO: Take care of this flag in access control middleware!
3435
+ // Update db.cloud.schema from server response.
3436
+ // Local schema MAY include a subset of tables, so do not force all tables into local schema.
3437
+ for (const tableName of Object.keys(schema)) {
3438
+ if (res.schema[tableName]) {
3439
+ // Write directly into configured schema. This code can only be executed alone.
3440
+ schema[tableName] = res.schema[tableName];
3441
+ }
3442
+ }
3443
+ yield db.$syncState.put(schema, 'schema');
3444
+ // List mutations that happened during our exchange with the server:
3445
+ const addedClientChanges = yield listClientChanges(mutationTables, db, {
3446
+ since: latestRevisions,
3447
+ });
3448
+ //
3449
+ // Delete changes now as server has return success
3450
+ // (but keep changes that haven't reached server yet)
3451
+ //
3452
+ for (const mutTable of mutationTables) {
3453
+ const tableName = getTableFromMutationTable(mutTable.name);
3454
+ if (!addedClientChanges.some((ch) => ch.table === tableName && ch.muts.length > 0)) {
3455
+ // No added mutations for this table during the time we sent changes
3456
+ // to the server.
3457
+ // It is therefore safe to clear all changes (which is faster than
3458
+ // deleting a range)
3459
+ yield Promise.all([
3460
+ mutTable.clear(),
3461
+ db.$baseRevs.where({ tableName }).delete(),
3462
+ ]);
3463
+ }
3464
+ else if (latestRevisions[tableName]) {
3465
+ const latestRev = latestRevisions[tableName] || 0;
3466
+ yield Promise.all([
3467
+ mutTable.where('rev').belowOrEqual(latestRev).delete(),
3468
+ db.$baseRevs
3469
+ .where(':id')
3470
+ .between([tableName, -Infinity], [tableName, latestRev + 1], true, true)
3471
+ .reverse()
3472
+ .offset(1) // Keep one entry (the one mapping muts that came during fetch --> previous server revision)
3473
+ .delete(),
3474
+ ]);
3475
+ }
3476
+ else ;
3477
+ }
3478
+ // Update latestRevisions object according to additional changes:
3479
+ getLatestRevisionsPerTable(addedClientChanges, latestRevisions);
3480
+ // Update/add new entries into baseRevs map.
3481
+ // * On tables without mutations since last serverRevision,
3482
+ // this will update existing entry.
3483
+ // * On tables where mutations have been recorded since last
3484
+ // serverRevision, this will create a new entry.
3485
+ // The purpose of this operation is to mark a start revision (per table)
3486
+ // so that all client-mutations that come after this, will be mapped to current
3487
+ // server revision.
3488
+ yield updateBaseRevs(db, schema, latestRevisions, res.serverRevision);
3489
+ const syncState = yield db.getPersistedSyncState();
3490
+ //
3491
+ // Delete objects from removed realms
3492
+ //
3493
+ yield deleteObjectsFromRemovedRealms(db, res, syncState);
3494
+ //
3495
+ // Update syncState
3496
+ //
3497
+ const newSyncState = syncState || {
3498
+ syncedTables: [],
3499
+ latestRevisions: {},
3500
+ realms: [],
3501
+ inviteRealms: [],
3502
+ clientIdentity,
3503
+ };
3504
+ newSyncState.syncedTables = tablesToSync
3505
+ .map((tbl) => tbl.name)
3506
+ .concat(tablesToSyncify.map((tbl) => tbl.name));
3507
+ newSyncState.latestRevisions = latestRevisions;
3508
+ newSyncState.remoteDbId = res.dbId;
3509
+ newSyncState.initiallySynced = true;
3510
+ newSyncState.realms = res.realms;
3511
+ newSyncState.inviteRealms = res.inviteRealms;
3512
+ newSyncState.serverRevision = res.serverRevision;
3513
+ newSyncState.timestamp = new Date();
3514
+ delete newSyncState.error;
3515
+ const filteredChanges = filterServerChangesThroughAddedClientChanges(res.changes, addedClientChanges);
3516
+ //
3517
+ // apply server changes
3518
+ //
3519
+ yield applyServerChanges(filteredChanges, db);
3520
+ //
3521
+ // Update syncState
3522
+ //
3523
+ db.$syncState.put(newSyncState, 'syncState');
3524
+ return addedClientChanges.length === 0;
3525
+ }));
3526
+ if (!done) {
3527
+ console.debug('MORE SYNC NEEDED. Go for it again!');
3528
+ return yield _sync(db, options, schema, { isInitialSync, cancelToken });
3529
+ }
3530
+ console.debug('SYNC DONE', { isInitialSync });
3531
+ return false; // Not needed anymore
3480
3532
  });
3481
- if (!done) {
3482
- console.debug('MORE SYNC NEEDED. Go for it again!');
3483
- return await _sync(db, options, schema, { isInitialSync, cancelToken });
3484
- }
3485
- console.debug('SYNC DONE', { isInitialSync });
3486
- return false; // Not needed anymore
3487
- }
3488
- async function deleteObjectsFromRemovedRealms(db, res, prevState) {
3489
- const deletedRealms = new Set();
3490
- const rejectedRealms = new Set();
3491
- const previousRealmSet = prevState ? prevState.realms : [];
3492
- const previousInviteRealmSet = prevState ? prevState.inviteRealms : [];
3493
- const updatedRealmSet = new Set(res.realms);
3494
- const updatedTotalRealmSet = new Set(res.realms.concat(res.inviteRealms));
3495
- for (const realmId of previousRealmSet) {
3496
- if (!updatedRealmSet.has(realmId)) {
3497
- rejectedRealms.add(realmId);
3533
+ }
3534
+ function deleteObjectsFromRemovedRealms(db, res, prevState) {
3535
+ return __awaiter$1(this, void 0, void 0, function* () {
3536
+ const deletedRealms = new Set();
3537
+ const rejectedRealms = new Set();
3538
+ const previousRealmSet = prevState ? prevState.realms : [];
3539
+ const previousInviteRealmSet = prevState ? prevState.inviteRealms : [];
3540
+ const updatedRealmSet = new Set(res.realms);
3541
+ const updatedTotalRealmSet = new Set(res.realms.concat(res.inviteRealms));
3542
+ for (const realmId of previousRealmSet) {
3543
+ if (!updatedRealmSet.has(realmId)) {
3544
+ rejectedRealms.add(realmId);
3545
+ if (!updatedTotalRealmSet.has(realmId)) {
3546
+ deletedRealms.add(realmId);
3547
+ }
3548
+ }
3549
+ }
3550
+ for (const realmId of previousInviteRealmSet.concat(previousRealmSet)) {
3498
3551
  if (!updatedTotalRealmSet.has(realmId)) {
3499
3552
  deletedRealms.add(realmId);
3500
3553
  }
3501
3554
  }
3502
- }
3503
- for (const realmId of previousInviteRealmSet.concat(previousRealmSet)) {
3504
- if (!updatedTotalRealmSet.has(realmId)) {
3505
- deletedRealms.add(realmId);
3506
- }
3507
- }
3508
- if (deletedRealms.size > 0 || rejectedRealms.size > 0) {
3509
- const tables = getSyncableTables(db);
3510
- for (const table of tables) {
3511
- let realmsToDelete = ['realms', 'members', 'roles'].includes(table.name)
3512
- ? deletedRealms // These tables should spare rejected ones.
3513
- : rejectedRealms; // All other tables shoudl delete rejected+deleted ones
3514
- if (realmsToDelete.size === 0)
3515
- continue;
3516
- if (table.schema.indexes.some((idx) => idx.keyPath === 'realmId' ||
3517
- (Array.isArray(idx.keyPath) && idx.keyPath[0] === 'realmId'))) {
3518
- // There's an index to use:
3519
- //console.debug(`REMOVAL: deleting all ${table.name} where realmId anyOf `, JSON.stringify([...realmsToDelete]));
3520
- await table
3521
- .where('realmId')
3522
- .anyOf([...realmsToDelete])
3523
- .delete();
3524
- }
3525
- else {
3526
- // No index to use:
3527
- //console.debug(`REMOVAL: deleting all ${table.name} where realmId is any of `, JSON.stringify([...realmsToDelete]), realmsToDelete.size);
3528
- await table
3529
- .filter((obj) => !!obj?.realmId && realmsToDelete.has(obj.realmId))
3530
- .delete();
3555
+ if (deletedRealms.size > 0 || rejectedRealms.size > 0) {
3556
+ const tables = getSyncableTables(db);
3557
+ for (const table of tables) {
3558
+ let realmsToDelete = ['realms', 'members', 'roles'].includes(table.name)
3559
+ ? deletedRealms // These tables should spare rejected ones.
3560
+ : rejectedRealms; // All other tables shoudl delete rejected+deleted ones
3561
+ if (realmsToDelete.size === 0)
3562
+ continue;
3563
+ if (table.schema.indexes.some((idx) => idx.keyPath === 'realmId' ||
3564
+ (Array.isArray(idx.keyPath) && idx.keyPath[0] === 'realmId'))) {
3565
+ // There's an index to use:
3566
+ //console.debug(`REMOVAL: deleting all ${table.name} where realmId anyOf `, JSON.stringify([...realmsToDelete]));
3567
+ yield table
3568
+ .where('realmId')
3569
+ .anyOf([...realmsToDelete])
3570
+ .delete();
3571
+ }
3572
+ else {
3573
+ // No index to use:
3574
+ //console.debug(`REMOVAL: deleting all ${table.name} where realmId is any of `, JSON.stringify([...realmsToDelete]), realmsToDelete.size);
3575
+ yield table
3576
+ .filter((obj) => !!(obj === null || obj === void 0 ? void 0 : obj.realmId) && realmsToDelete.has(obj.realmId))
3577
+ .delete();
3578
+ }
3531
3579
  }
3532
3580
  }
3533
- }
3581
+ });
3534
3582
  }
3535
3583
  function filterServerChangesThroughAddedClientChanges(serverChanges, addedClientChanges) {
3536
3584
  const changes = {};
@@ -3548,7 +3596,7 @@ function MessagesFromServerConsumer(db) {
3548
3596
  let isWorking = false;
3549
3597
  let loopWarning = 0;
3550
3598
  let loopDetection = [0, 0, 0, 0, 0, 0, 0, 0, 0, Date.now()];
3551
- event.subscribe(async () => {
3599
+ event.subscribe(() => __awaiter$1(this, void 0, void 0, function* () {
3552
3600
  if (isWorking)
3553
3601
  return;
3554
3602
  if (queue.length > 0) {
@@ -3557,7 +3605,7 @@ function MessagesFromServerConsumer(db) {
3557
3605
  loopDetection.push(Date.now());
3558
3606
  readyToServe.next(false);
3559
3607
  try {
3560
- await consumeQueue();
3608
+ yield consumeQueue();
3561
3609
  }
3562
3610
  finally {
3563
3611
  if (loopDetection[loopDetection.length - 1] - loopDetection[0] <
@@ -3567,170 +3615,173 @@ function MessagesFromServerConsumer(db) {
3567
3615
  // Last time we did this, we ended up here too. Wait for a minute.
3568
3616
  console.warn(`Slowing down websocket loop for one minute`);
3569
3617
  loopWarning = Date.now() + 60000;
3570
- await new Promise((resolve) => setTimeout(resolve, 60000));
3618
+ yield new Promise((resolve) => setTimeout(resolve, 60000));
3571
3619
  }
3572
3620
  else {
3573
3621
  // This is a one-time event. Just pause 10 seconds.
3574
3622
  console.warn(`Slowing down websocket loop for 10 seconds`);
3575
3623
  loopWarning = Date.now() + 10000;
3576
- await new Promise((resolve) => setTimeout(resolve, 10000));
3624
+ yield new Promise((resolve) => setTimeout(resolve, 10000));
3577
3625
  }
3578
3626
  }
3579
3627
  isWorking = false;
3580
3628
  readyToServe.next(true);
3581
3629
  }
3582
3630
  }
3583
- });
3631
+ }));
3584
3632
  function enqueue(msg) {
3585
3633
  queue.push(msg);
3586
3634
  event.next(null);
3587
3635
  }
3588
- async function consumeQueue() {
3589
- while (queue.length > 0) {
3590
- const msg = queue.shift();
3591
- try {
3592
- // If the sync worker or service worker is syncing, wait 'til thei're done.
3593
- // It's no need to have two channels at the same time - even though it wouldnt
3594
- // be a problem - this is an optimization.
3595
- await db.cloud.syncState
3596
- .pipe(filter(({ phase }) => phase === 'in-sync' || phase === 'error'), take(1))
3597
- .toPromise();
3598
- console.debug('processing msg', msg);
3599
- const persistedSyncState = db.cloud.persistedSyncState.value;
3600
- //syncState.
3601
- if (!msg)
3602
- continue;
3603
- switch (msg.type) {
3604
- case 'token-expired':
3605
- console.debug('WebSocket observable: Token expired. Refreshing token...');
3606
- const user = db.cloud.currentUser.value;
3607
- // Refresh access token
3608
- const refreshedLogin = await refreshAccessToken(db.cloud.options.databaseUrl, user);
3609
- // Persist updated access token
3610
- await db.table('$logins').update(user.userId, {
3611
- accessToken: refreshedLogin.accessToken,
3612
- accessTokenExpiration: refreshedLogin.accessTokenExpiration,
3613
- });
3614
- // Updating $logins will trigger emission of db.cloud.currentUser observable, which
3615
- // in turn will lead to that connectWebSocket.ts will reconnect the socket with the
3616
- // new token. So we don't need to do anything more here.
3617
- break;
3618
- case 'realm-added':
3619
- //if (!persistedSyncState?.realms?.includes(msg.realm) && !persistedSyncState?.inviteRealms?.includes(msg.realm)) {
3620
- triggerSync(db, 'pull');
3621
- //}
3622
- break;
3623
- case 'realm-accepted':
3624
- //if (!persistedSyncState?.realms?.includes(msg.realm)) {
3625
- triggerSync(db, 'pull');
3626
- //}
3627
- break;
3628
- case 'realm-removed':
3629
- //if (
3630
- persistedSyncState?.realms?.includes(msg.realm) ||
3631
- persistedSyncState?.inviteRealms?.includes(msg.realm);
3632
- //) {
3633
- triggerSync(db, 'pull');
3634
- //}
3635
- break;
3636
- case 'realms-changed':
3637
- triggerSync(db, 'pull');
3638
- break;
3639
- case 'changes':
3640
- console.debug('changes');
3641
- if (db.cloud.syncState.value?.phase === 'error') {
3636
+ function consumeQueue() {
3637
+ var _a, _b, _c;
3638
+ return __awaiter$1(this, void 0, void 0, function* () {
3639
+ while (queue.length > 0) {
3640
+ const msg = queue.shift();
3641
+ try {
3642
+ // If the sync worker or service worker is syncing, wait 'til thei're done.
3643
+ // It's no need to have two channels at the same time - even though it wouldnt
3644
+ // be a problem - this is an optimization.
3645
+ yield db.cloud.syncState
3646
+ .pipe(filter(({ phase }) => phase === 'in-sync' || phase === 'error'), take(1))
3647
+ .toPromise();
3648
+ console.debug('processing msg', msg);
3649
+ const persistedSyncState = db.cloud.persistedSyncState.value;
3650
+ //syncState.
3651
+ if (!msg)
3652
+ continue;
3653
+ switch (msg.type) {
3654
+ case 'token-expired':
3655
+ console.debug('WebSocket observable: Token expired. Refreshing token...');
3656
+ const user = db.cloud.currentUser.value;
3657
+ // Refresh access token
3658
+ const refreshedLogin = yield refreshAccessToken(db.cloud.options.databaseUrl, user);
3659
+ // Persist updated access token
3660
+ yield db.table('$logins').update(user.userId, {
3661
+ accessToken: refreshedLogin.accessToken,
3662
+ accessTokenExpiration: refreshedLogin.accessTokenExpiration,
3663
+ });
3664
+ // Updating $logins will trigger emission of db.cloud.currentUser observable, which
3665
+ // in turn will lead to that connectWebSocket.ts will reconnect the socket with the
3666
+ // new token. So we don't need to do anything more here.
3667
+ break;
3668
+ case 'realm-added':
3669
+ //if (!persistedSyncState?.realms?.includes(msg.realm) && !persistedSyncState?.inviteRealms?.includes(msg.realm)) {
3642
3670
  triggerSync(db, 'pull');
3671
+ //}
3643
3672
  break;
3644
- }
3645
- await db.transaction('rw', db.dx.tables, async (tx) => {
3646
- // @ts-ignore
3647
- tx.idbtrans.disableChangeTracking = true;
3648
- // @ts-ignore
3649
- tx.idbtrans.disableAccessControl = true;
3650
- const [schema, syncState, currentUser] = await Promise.all([
3651
- db.getSchema(),
3652
- db.getPersistedSyncState(),
3653
- db.getCurrentUser(),
3654
- ]);
3655
- console.debug('ws message queue: in transaction');
3656
- if (!syncState || !schema || !currentUser) {
3657
- console.debug('required vars not present', {
3658
- syncState,
3659
- schema,
3660
- currentUser,
3661
- });
3662
- return; // Initial sync must have taken place - otherwise, ignore this.
3673
+ case 'realm-accepted':
3674
+ //if (!persistedSyncState?.realms?.includes(msg.realm)) {
3675
+ triggerSync(db, 'pull');
3676
+ //}
3677
+ break;
3678
+ case 'realm-removed':
3679
+ //if (
3680
+ ((_a = persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.realms) === null || _a === void 0 ? void 0 : _a.includes(msg.realm)) ||
3681
+ ((_b = persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.inviteRealms) === null || _b === void 0 ? void 0 : _b.includes(msg.realm));
3682
+ //) {
3683
+ triggerSync(db, 'pull');
3684
+ //}
3685
+ break;
3686
+ case 'realms-changed':
3687
+ triggerSync(db, 'pull');
3688
+ break;
3689
+ case 'changes':
3690
+ console.debug('changes');
3691
+ if (((_c = db.cloud.syncState.value) === null || _c === void 0 ? void 0 : _c.phase) === 'error') {
3692
+ triggerSync(db, 'pull');
3693
+ break;
3663
3694
  }
3664
- // Verify again in ACID tx that we're on same server revision.
3665
- if (msg.baseRev !== syncState.serverRevision) {
3666
- console.debug(`baseRev (${msg.baseRev}) differs from our serverRevision in syncState (${syncState.serverRevision})`);
3667
- // Should we trigger a sync now? No. This is a normal case
3668
- // when another local peer (such as the SW or a websocket channel on other tab) has
3669
- // updated syncState from new server information but we are not aware yet. It would
3670
- // be unnescessary to do a sync in that case. Instead, the caller of this consumeQueue()
3671
- // function will do readyToServe.next(true) right after this return, which will lead
3672
- // to a "ready" message being sent to server with the new accurate serverRev we have,
3673
- // so that the next message indeed will be correct.
3674
- if (typeof msg.baseRev === 'string' && // v2 format
3675
- (typeof syncState.serverRevision === 'bigint' || // v1 format
3676
- typeof syncState.serverRevision === 'object') // v1 format old browser
3677
- ) {
3678
- // The reason for the diff seems to be that server has migrated the revision format.
3679
- // Do a full sync to update revision format.
3680
- // If we don't do a sync request now, we could stuck in an endless loop.
3695
+ yield db.transaction('rw', db.dx.tables, (tx) => __awaiter$1(this, void 0, void 0, function* () {
3696
+ // @ts-ignore
3697
+ tx.idbtrans.disableChangeTracking = true;
3698
+ // @ts-ignore
3699
+ tx.idbtrans.disableAccessControl = true;
3700
+ const [schema, syncState, currentUser] = yield Promise.all([
3701
+ db.getSchema(),
3702
+ db.getPersistedSyncState(),
3703
+ db.getCurrentUser(),
3704
+ ]);
3705
+ console.debug('ws message queue: in transaction');
3706
+ if (!syncState || !schema || !currentUser) {
3707
+ console.debug('required vars not present', {
3708
+ syncState,
3709
+ schema,
3710
+ currentUser,
3711
+ });
3712
+ return; // Initial sync must have taken place - otherwise, ignore this.
3713
+ }
3714
+ // Verify again in ACID tx that we're on same server revision.
3715
+ if (msg.baseRev !== syncState.serverRevision) {
3716
+ console.debug(`baseRev (${msg.baseRev}) differs from our serverRevision in syncState (${syncState.serverRevision})`);
3717
+ // Should we trigger a sync now? No. This is a normal case
3718
+ // when another local peer (such as the SW or a websocket channel on other tab) has
3719
+ // updated syncState from new server information but we are not aware yet. It would
3720
+ // be unnescessary to do a sync in that case. Instead, the caller of this consumeQueue()
3721
+ // function will do readyToServe.next(true) right after this return, which will lead
3722
+ // to a "ready" message being sent to server with the new accurate serverRev we have,
3723
+ // so that the next message indeed will be correct.
3724
+ if (typeof msg.baseRev === 'string' && // v2 format
3725
+ (typeof syncState.serverRevision === 'bigint' || // v1 format
3726
+ typeof syncState.serverRevision === 'object') // v1 format old browser
3727
+ ) {
3728
+ // The reason for the diff seems to be that server has migrated the revision format.
3729
+ // Do a full sync to update revision format.
3730
+ // If we don't do a sync request now, we could stuck in an endless loop.
3731
+ triggerSync(db, 'pull');
3732
+ }
3733
+ return; // Ignore message
3734
+ }
3735
+ // Verify also that the message is based on the exact same set of realms
3736
+ const ourRealmSetHash = yield Dexie.waitFor(
3737
+ // Keep TX in non-IDB work
3738
+ computeRealmSetHash(syncState));
3739
+ console.debug('ourRealmSetHash', ourRealmSetHash);
3740
+ if (ourRealmSetHash !== msg.realmSetHash) {
3741
+ console.debug('not same realmSetHash', msg.realmSetHash);
3681
3742
  triggerSync(db, 'pull');
3743
+ // The message isn't based on the same realms.
3744
+ // Trigger a sync instead to resolve all things up.
3745
+ return;
3682
3746
  }
3683
- return; // Ignore message
3684
- }
3685
- // Verify also that the message is based on the exact same set of realms
3686
- const ourRealmSetHash = await Dexie.waitFor(
3687
- // Keep TX in non-IDB work
3688
- computeRealmSetHash(syncState));
3689
- console.debug('ourRealmSetHash', ourRealmSetHash);
3690
- if (ourRealmSetHash !== msg.realmSetHash) {
3691
- console.debug('not same realmSetHash', msg.realmSetHash);
3692
- triggerSync(db, 'pull');
3693
- // The message isn't based on the same realms.
3694
- // Trigger a sync instead to resolve all things up.
3695
- return;
3696
- }
3697
- // Get clientChanges
3698
- let clientChanges = [];
3699
- if (currentUser.isLoggedIn) {
3700
- const mutationTables = getSyncableTables(db).map((tbl) => db.table(getMutationTable(tbl.name)));
3701
- clientChanges = await listClientChanges(mutationTables, db);
3702
- console.debug('msg queue: client changes', clientChanges);
3703
- }
3704
- if (msg.changes.length > 0) {
3705
- const filteredChanges = filterServerChangesThroughAddedClientChanges(msg.changes, clientChanges);
3747
+ // Get clientChanges
3748
+ let clientChanges = [];
3749
+ if (currentUser.isLoggedIn) {
3750
+ const mutationTables = getSyncableTables(db).map((tbl) => db.table(getMutationTable(tbl.name)));
3751
+ clientChanges = yield listClientChanges(mutationTables, db);
3752
+ console.debug('msg queue: client changes', clientChanges);
3753
+ }
3754
+ if (msg.changes.length > 0) {
3755
+ const filteredChanges = filterServerChangesThroughAddedClientChanges(msg.changes, clientChanges);
3756
+ //
3757
+ // apply server changes
3758
+ //
3759
+ console.debug('applying filtered server changes', filteredChanges);
3760
+ yield applyServerChanges(filteredChanges, db);
3761
+ }
3762
+ // Update latest revisions per table in case there are unsynced changes
3763
+ // This can be a real case in future when we allow non-eagery sync.
3764
+ // And it can actually be realistic now also, but very rare.
3765
+ syncState.latestRevisions = getLatestRevisionsPerTable(clientChanges, syncState.latestRevisions);
3766
+ syncState.serverRevision = msg.newRev;
3767
+ // Update base revs
3768
+ console.debug('Updating baseRefs', syncState.latestRevisions);
3769
+ yield updateBaseRevs(db, schema, syncState.latestRevisions, msg.newRev);
3706
3770
  //
3707
- // apply server changes
3771
+ // Update syncState
3708
3772
  //
3709
- console.debug('applying filtered server changes', filteredChanges);
3710
- await applyServerChanges(filteredChanges, db);
3711
- }
3712
- // Update latest revisions per table in case there are unsynced changes
3713
- // This can be a real case in future when we allow non-eagery sync.
3714
- // And it can actually be realistic now also, but very rare.
3715
- syncState.latestRevisions = getLatestRevisionsPerTable(clientChanges, syncState.latestRevisions);
3716
- syncState.serverRevision = msg.newRev;
3717
- // Update base revs
3718
- console.debug('Updating baseRefs', syncState.latestRevisions);
3719
- await updateBaseRevs(db, schema, syncState.latestRevisions, msg.newRev);
3720
- //
3721
- // Update syncState
3722
- //
3723
- console.debug('Updating syncState', syncState);
3724
- await db.$syncState.put(syncState, 'syncState');
3725
- });
3726
- console.debug('msg queue: done with rw transaction');
3727
- break;
3773
+ console.debug('Updating syncState', syncState);
3774
+ yield db.$syncState.put(syncState, 'syncState');
3775
+ }));
3776
+ console.debug('msg queue: done with rw transaction');
3777
+ break;
3778
+ }
3779
+ }
3780
+ catch (error) {
3781
+ console.error(`Error in msg queue`, error);
3728
3782
  }
3729
3783
  }
3730
- catch (error) {
3731
- console.error(`Error in msg queue`, error);
3732
- }
3733
- }
3784
+ });
3734
3785
  }
3735
3786
  return {
3736
3787
  enqueue,
@@ -3849,100 +3900,105 @@ class AuthPersistedContext {
3849
3900
  lastLogin: new Date(0)
3850
3901
  }));
3851
3902
  }
3852
- async save() {
3853
- const db = wm.get(this);
3854
- db.table("$logins").put(this);
3903
+ save() {
3904
+ return __awaiter$1(this, void 0, void 0, function* () {
3905
+ const db = wm.get(this);
3906
+ db.table("$logins").put(this);
3907
+ });
3855
3908
  }
3856
3909
  }
3857
3910
 
3858
3911
  function otpFetchTokenCallback(db) {
3859
3912
  const { userInteraction } = db.cloud;
3860
- return async function otpAuthenticate({ public_key, hints }) {
3861
- let tokenRequest;
3862
- const url = db.cloud.options?.databaseUrl;
3863
- if (!url)
3864
- throw new Error(`No database URL given.`);
3865
- if (hints?.grant_type === 'demo') {
3866
- const demo_user = await promptForEmail(userInteraction, 'Enter a demo user email', hints?.email || hints?.userId);
3867
- tokenRequest = {
3868
- demo_user,
3869
- grant_type: 'demo',
3870
- scopes: ['ACCESS_DB'],
3871
- public_key,
3872
- };
3873
- }
3874
- else {
3875
- const email = await promptForEmail(userInteraction, 'Enter email address', hints?.email);
3876
- tokenRequest = {
3877
- email,
3878
- grant_type: 'otp',
3879
- scopes: ['ACCESS_DB'],
3880
- public_key,
3881
- };
3882
- }
3883
- const res1 = await fetch(`${url}/token`, {
3884
- body: JSON.stringify(tokenRequest),
3885
- method: 'post',
3886
- headers: { 'Content-Type': 'application/json', mode: 'cors' },
3887
- });
3888
- if (res1.status !== 200) {
3889
- const errMsg = await res1.text();
3890
- await alertUser(userInteraction, "Token request failed", {
3891
- type: 'error',
3892
- messageCode: 'GENERIC_ERROR',
3893
- message: errMsg,
3894
- messageParams: {}
3895
- }).catch(() => { });
3896
- throw new HttpError(res1, errMsg);
3897
- }
3898
- const response = await res1.json();
3899
- if (response.type === 'tokens') {
3900
- // Demo user request can get a "tokens" response right away
3901
- return response;
3902
- }
3903
- else if (tokenRequest.grant_type === 'otp') {
3904
- if (response.type !== 'otp-sent')
3905
- throw new Error(`Unexpected response from ${url}/token`);
3906
- const otp = await promptForOTP(userInteraction, tokenRequest.email);
3907
- tokenRequest.otp = otp || '';
3908
- tokenRequest.otp_id = response.otp_id;
3909
- let res2 = await fetch(`${url}/token`, {
3913
+ return function otpAuthenticate({ public_key, hints }) {
3914
+ var _a;
3915
+ return __awaiter$1(this, void 0, void 0, function* () {
3916
+ let tokenRequest;
3917
+ const url = (_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl;
3918
+ if (!url)
3919
+ throw new Error(`No database URL given.`);
3920
+ if ((hints === null || hints === void 0 ? void 0 : hints.grant_type) === 'demo') {
3921
+ const demo_user = yield promptForEmail(userInteraction, 'Enter a demo user email', (hints === null || hints === void 0 ? void 0 : hints.email) || (hints === null || hints === void 0 ? void 0 : hints.userId));
3922
+ tokenRequest = {
3923
+ demo_user,
3924
+ grant_type: 'demo',
3925
+ scopes: ['ACCESS_DB'],
3926
+ public_key,
3927
+ };
3928
+ }
3929
+ else {
3930
+ const email = yield promptForEmail(userInteraction, 'Enter email address', hints === null || hints === void 0 ? void 0 : hints.email);
3931
+ tokenRequest = {
3932
+ email,
3933
+ grant_type: 'otp',
3934
+ scopes: ['ACCESS_DB'],
3935
+ public_key,
3936
+ };
3937
+ }
3938
+ const res1 = yield fetch(`${url}/token`, {
3910
3939
  body: JSON.stringify(tokenRequest),
3911
3940
  method: 'post',
3912
- headers: { 'Content-Type': 'application/json' },
3913
- mode: 'cors',
3941
+ headers: { 'Content-Type': 'application/json', mode: 'cors' },
3914
3942
  });
3915
- while (res2.status === 401) {
3916
- const errorText = await res2.text();
3917
- tokenRequest.otp = await promptForOTP(userInteraction, tokenRequest.email, {
3943
+ if (res1.status !== 200) {
3944
+ const errMsg = yield res1.text();
3945
+ yield alertUser(userInteraction, "Token request failed", {
3918
3946
  type: 'error',
3919
- messageCode: 'INVALID_OTP',
3920
- message: errorText,
3947
+ messageCode: 'GENERIC_ERROR',
3948
+ message: errMsg,
3921
3949
  messageParams: {}
3922
- });
3923
- res2 = await fetch(`${url}/token`, {
3950
+ }).catch(() => { });
3951
+ throw new HttpError(res1, errMsg);
3952
+ }
3953
+ const response = yield res1.json();
3954
+ if (response.type === 'tokens') {
3955
+ // Demo user request can get a "tokens" response right away
3956
+ return response;
3957
+ }
3958
+ else if (tokenRequest.grant_type === 'otp') {
3959
+ if (response.type !== 'otp-sent')
3960
+ throw new Error(`Unexpected response from ${url}/token`);
3961
+ const otp = yield promptForOTP(userInteraction, tokenRequest.email);
3962
+ tokenRequest.otp = otp || '';
3963
+ tokenRequest.otp_id = response.otp_id;
3964
+ let res2 = yield fetch(`${url}/token`, {
3924
3965
  body: JSON.stringify(tokenRequest),
3925
3966
  method: 'post',
3926
3967
  headers: { 'Content-Type': 'application/json' },
3927
3968
  mode: 'cors',
3928
3969
  });
3970
+ while (res2.status === 401) {
3971
+ const errorText = yield res2.text();
3972
+ tokenRequest.otp = yield promptForOTP(userInteraction, tokenRequest.email, {
3973
+ type: 'error',
3974
+ messageCode: 'INVALID_OTP',
3975
+ message: errorText,
3976
+ messageParams: {}
3977
+ });
3978
+ res2 = yield fetch(`${url}/token`, {
3979
+ body: JSON.stringify(tokenRequest),
3980
+ method: 'post',
3981
+ headers: { 'Content-Type': 'application/json' },
3982
+ mode: 'cors',
3983
+ });
3984
+ }
3985
+ if (res2.status !== 200) {
3986
+ const errMsg = yield res2.text();
3987
+ yield alertUser(userInteraction, "OTP Authentication Failed", {
3988
+ type: 'error',
3989
+ messageCode: 'GENERIC_ERROR',
3990
+ message: errMsg,
3991
+ messageParams: {}
3992
+ }).catch(() => { });
3993
+ throw new HttpError(res2, errMsg);
3994
+ }
3995
+ const response2 = yield res2.json();
3996
+ return response2;
3929
3997
  }
3930
- if (res2.status !== 200) {
3931
- const errMsg = await res2.text();
3932
- await alertUser(userInteraction, "OTP Authentication Failed", {
3933
- type: 'error',
3934
- messageCode: 'GENERIC_ERROR',
3935
- message: errMsg,
3936
- messageParams: {}
3937
- }).catch(() => { });
3938
- throw new HttpError(res2, errMsg);
3998
+ else {
3999
+ throw new Error(`Unexpected response from ${url}/token`);
3939
4000
  }
3940
- const response2 = await res2.json();
3941
- return response2;
3942
- }
3943
- else {
3944
- throw new Error(`Unexpected response from ${url}/token`);
3945
- }
4001
+ });
3946
4002
  };
3947
4003
  }
3948
4004
 
@@ -3957,83 +4013,87 @@ function otpFetchTokenCallback(db) {
3957
4013
  * @param db
3958
4014
  * @param newUser
3959
4015
  */
3960
- async function setCurrentUser(db, user) {
3961
- if (user.userId === db.cloud.currentUserId)
3962
- return; // Already this user.
3963
- const $logins = db.table('$logins');
3964
- await db.transaction('rw', $logins, async (tx) => {
3965
- const existingLogins = await $logins.toArray();
3966
- await Promise.all(existingLogins
3967
- .filter((login) => login.userId !== user.userId && login.isLoggedIn)
3968
- .map((login) => {
3969
- login.isLoggedIn = false;
3970
- return $logins.put(login);
4016
+ function setCurrentUser(db, user) {
4017
+ return __awaiter$1(this, void 0, void 0, function* () {
4018
+ if (user.userId === db.cloud.currentUserId)
4019
+ return; // Already this user.
4020
+ const $logins = db.table('$logins');
4021
+ yield db.transaction('rw', $logins, (tx) => __awaiter$1(this, void 0, void 0, function* () {
4022
+ const existingLogins = yield $logins.toArray();
4023
+ yield Promise.all(existingLogins
4024
+ .filter((login) => login.userId !== user.userId && login.isLoggedIn)
4025
+ .map((login) => {
4026
+ login.isLoggedIn = false;
4027
+ return $logins.put(login);
4028
+ }));
4029
+ user.isLoggedIn = true;
4030
+ user.lastLogin = new Date();
4031
+ yield user.save();
4032
+ console.debug('Saved new user', user.email);
3971
4033
  }));
3972
- user.isLoggedIn = true;
3973
- user.lastLogin = new Date();
3974
- await user.save();
3975
- console.debug('Saved new user', user.email);
4034
+ yield new Promise((resolve) => {
4035
+ if (db.cloud.currentUserId === user.userId) {
4036
+ resolve(null);
4037
+ }
4038
+ else {
4039
+ const subscription = db.cloud.currentUser.subscribe((currentUser) => {
4040
+ if (currentUser.userId === user.userId) {
4041
+ subscription.unsubscribe();
4042
+ resolve(null);
4043
+ }
4044
+ });
4045
+ }
4046
+ });
4047
+ // TANKAR!!!!
4048
+ // V: Service workern kommer inte ha tillgång till currentUserObservable om den inte istället härrör från ett liveQuery.
4049
+ // V: Samma med andra windows.
4050
+ // V: Så kanske göra om den till att häröra från liveQuery som läser $logins.orderBy('lastLogin').last().
4051
+ // V: Då bara vara medveten om:
4052
+ // V: En sån observable börjar hämta data vid första subscribe
4053
+ // V: Vi har inget "inital value" men kan emulera det till att vara ANONYMOUS_USER
4054
+ // V: Om requireAuth är true, så borde db.on(ready) hålla databasen stängd för alla utom denna observable.
4055
+ // V: Om inte så behöver den inte blocka.
4056
+ // Andra tankar:
4057
+ // * Man kan inte byta användare när man är offline. Skulle gå att flytta realms till undanstuff-tabell vid user-change.
4058
+ // men troligen inte värt det.
4059
+ // * Istället: sälj inte inte switch-user funktionalitet utan tala enbart om inloggat vs icke inloggat läge.
4060
+ // * populate $logins med ANONYMOUS så att en påbörjad inloggning inte räknas, alternativt ha en boolean prop!
4061
+ // Kanske bäst ha en boolean prop!
4062
+ // * Alternativ switch-user funktionalitet:
4063
+ // * DBCore gömmer data från realms man inte har tillgång till.
4064
+ // * Cursor impl behövs också då.
4065
+ // * Då blir det snabba user switch.
4066
+ // * claims-settet som skickas till servern blir summan av alla claims. Då måste servern stödja multipla tokens eller
4067
+ // att ens token är ett samlad.
3976
4068
  });
3977
- await new Promise((resolve) => {
3978
- if (db.cloud.currentUserId === user.userId) {
3979
- resolve(null);
3980
- }
3981
- else {
3982
- const subscription = db.cloud.currentUser.subscribe((currentUser) => {
3983
- if (currentUser.userId === user.userId) {
3984
- subscription.unsubscribe();
3985
- resolve(null);
4069
+ }
4070
+
4071
+ function login(db, hints) {
4072
+ return __awaiter$1(this, void 0, void 0, function* () {
4073
+ const currentUser = yield db.getCurrentUser();
4074
+ if (currentUser.isLoggedIn) {
4075
+ if (hints) {
4076
+ if (hints.email && db.cloud.currentUser.value.email !== hints.email) {
4077
+ throw new Error(`Must logout before changing user`);
3986
4078
  }
3987
- });
4079
+ if (hints.userId && db.cloud.currentUserId !== hints.userId) {
4080
+ throw new Error(`Must logout before changing user`);
4081
+ }
4082
+ }
4083
+ // Already authenticated according to given hints.
4084
+ return;
3988
4085
  }
4086
+ const context = new AuthPersistedContext(db, {
4087
+ claims: {},
4088
+ lastLogin: new Date(0),
4089
+ });
4090
+ yield authenticate(db.cloud.options.databaseUrl, context, db.cloud.options.fetchTokens || otpFetchTokenCallback(db), db.cloud.userInteraction, hints);
4091
+ yield context.save();
4092
+ yield setCurrentUser(db, context);
4093
+ // Make sure to resync as the new login will be authorized
4094
+ // for new realms.
4095
+ triggerSync(db, "pull");
3989
4096
  });
3990
- // TANKAR!!!!
3991
- // V: Service workern kommer inte ha tillgång till currentUserObservable om den inte istället härrör från ett liveQuery.
3992
- // V: Samma med andra windows.
3993
- // V: Så kanske göra om den till att häröra från liveQuery som läser $logins.orderBy('lastLogin').last().
3994
- // V: Då bara vara medveten om:
3995
- // V: En sån observable börjar hämta data vid första subscribe
3996
- // V: Vi har inget "inital value" men kan emulera det till att vara ANONYMOUS_USER
3997
- // V: Om requireAuth är true, så borde db.on(ready) hålla databasen stängd för alla utom denna observable.
3998
- // V: Om inte så behöver den inte blocka.
3999
- // Andra tankar:
4000
- // * Man kan inte byta användare när man är offline. Skulle gå att flytta realms till undanstuff-tabell vid user-change.
4001
- // men troligen inte värt det.
4002
- // * Istället: sälj inte inte switch-user funktionalitet utan tala enbart om inloggat vs icke inloggat läge.
4003
- // * populate $logins med ANONYMOUS så att en påbörjad inloggning inte räknas, alternativt ha en boolean prop!
4004
- // Kanske bäst ha en boolean prop!
4005
- // * Alternativ switch-user funktionalitet:
4006
- // * DBCore gömmer data från realms man inte har tillgång till.
4007
- // * Cursor impl behövs också då.
4008
- // * Då blir det snabba user switch.
4009
- // * claims-settet som skickas till servern blir summan av alla claims. Då måste servern stödja multipla tokens eller
4010
- // att ens token är ett samlad.
4011
- }
4012
-
4013
- async function login(db, hints) {
4014
- const currentUser = await db.getCurrentUser();
4015
- if (currentUser.isLoggedIn) {
4016
- if (hints) {
4017
- if (hints.email && db.cloud.currentUser.value.email !== hints.email) {
4018
- throw new Error(`Must logout before changing user`);
4019
- }
4020
- if (hints.userId && db.cloud.currentUserId !== hints.userId) {
4021
- throw new Error(`Must logout before changing user`);
4022
- }
4023
- }
4024
- // Already authenticated according to given hints.
4025
- return;
4026
- }
4027
- const context = new AuthPersistedContext(db, {
4028
- claims: {},
4029
- lastLogin: new Date(0),
4030
- });
4031
- await authenticate(db.cloud.options.databaseUrl, context, db.cloud.options.fetchTokens || otpFetchTokenCallback(db), db.cloud.userInteraction, hints);
4032
- await context.save();
4033
- await setCurrentUser(db, context);
4034
- // Make sure to resync as the new login will be authorized
4035
- // for new realms.
4036
- triggerSync(db, "pull");
4037
4097
  }
4038
4098
 
4039
4099
  // @ts-ignore
@@ -4081,9 +4141,10 @@ function toStringTag(o) {
4081
4141
  return toString.call(o).slice(8, -1);
4082
4142
  }
4083
4143
  function getEffectiveKeys(primaryKey, req) {
4144
+ var _a;
4084
4145
  if (req.type === 'delete')
4085
4146
  return req.keys;
4086
- return req.keys?.slice() || req.values.map(primaryKey.extractKey);
4147
+ return ((_a = req.keys) === null || _a === void 0 ? void 0 : _a.slice()) || req.values.map(primaryKey.extractKey);
4087
4148
  }
4088
4149
  function applyToUpperBitFix(orig, bits) {
4089
4150
  return ((bits & 1 ? orig[0].toUpperCase() : orig[0].toLowerCase()) +
@@ -4174,9 +4235,7 @@ function createIdGenerationMiddleware(db) {
4174
4235
  name: 'idGenerationMiddleware',
4175
4236
  level: 1,
4176
4237
  create: (core) => {
4177
- return {
4178
- ...core,
4179
- table: (tableName) => {
4238
+ return Object.assign(Object.assign({}, core), { table: (tableName) => {
4180
4239
  const table = core.table(tableName);
4181
4240
  function generateOrVerifyAtKeys(req, idPrefix) {
4182
4241
  let valueClones = null;
@@ -4202,24 +4261,19 @@ function createIdGenerationMiddleware(db) {
4202
4261
  `If you want to generate IDs programmatically, remove '@' from the schema to get rid of this constraint. Dexie Cloud supports custom IDs as long as they are random and globally unique.`);
4203
4262
  }
4204
4263
  });
4205
- return table.mutate({
4206
- ...req,
4207
- keys,
4208
- values: valueClones || req.values,
4209
- });
4264
+ return table.mutate(Object.assign(Object.assign({}, req), { keys, values: valueClones || req.values }));
4210
4265
  }
4211
- return {
4212
- ...table,
4213
- mutate: (req) => {
4266
+ return Object.assign(Object.assign({}, table), { mutate: (req) => {
4267
+ var _a, _b;
4214
4268
  // @ts-ignore
4215
4269
  if (req.trans.disableChangeTracking) {
4216
4270
  // Disable ID policy checks and ID generation
4217
4271
  return table.mutate(req);
4218
4272
  }
4219
4273
  if (req.type === 'add' || req.type === 'put') {
4220
- const cloudTableSchema = db.cloud.schema?.[tableName];
4221
- if (!cloudTableSchema?.generatedGlobalId) {
4222
- if (cloudTableSchema?.markedForSync) {
4274
+ const cloudTableSchema = (_a = db.cloud.schema) === null || _a === void 0 ? void 0 : _a[tableName];
4275
+ if (!(cloudTableSchema === null || cloudTableSchema === void 0 ? void 0 : cloudTableSchema.generatedGlobalId)) {
4276
+ if (cloudTableSchema === null || cloudTableSchema === void 0 ? void 0 : cloudTableSchema.markedForSync) {
4223
4277
  // Just make sure primary key is of a supported type:
4224
4278
  const keys = getEffectiveKeys(table.schema.primaryKey, req);
4225
4279
  keys.forEach((key, idx) => {
@@ -4233,7 +4287,7 @@ function createIdGenerationMiddleware(db) {
4233
4287
  }
4234
4288
  }
4235
4289
  else {
4236
- if (db.cloud.options?.databaseUrl && !db.initiallySynced) {
4290
+ if (((_b = db.cloud.options) === null || _b === void 0 ? void 0 : _b.databaseUrl) && !db.initiallySynced) {
4237
4291
  // A database URL is configured but no initial sync has been performed.
4238
4292
  const keys = getEffectiveKeys(table.schema.primaryKey, req);
4239
4293
  // Check if the operation would yield any INSERT. If so, complain! We never want wrong ID prefixes stored.
@@ -4254,10 +4308,8 @@ function createIdGenerationMiddleware(db) {
4254
4308
  }
4255
4309
  }
4256
4310
  return table.mutate(req);
4257
- },
4258
- };
4259
- },
4260
- };
4311
+ } });
4312
+ } });
4261
4313
  },
4262
4314
  };
4263
4315
  }
@@ -4268,19 +4320,16 @@ function createImplicitPropSetterMiddleware(db) {
4268
4320
  name: 'implicitPropSetterMiddleware',
4269
4321
  level: 1,
4270
4322
  create: (core) => {
4271
- return {
4272
- ...core,
4273
- table: (tableName) => {
4323
+ return Object.assign(Object.assign({}, core), { table: (tableName) => {
4274
4324
  const table = core.table(tableName);
4275
- return {
4276
- ...table,
4277
- mutate: (req) => {
4325
+ return Object.assign(Object.assign({}, table), { mutate: (req) => {
4326
+ var _a, _b, _c, _d;
4278
4327
  // @ts-ignore
4279
4328
  if (req.trans.disableChangeTracking) {
4280
4329
  return table.mutate(req);
4281
4330
  }
4282
4331
  const trans = req.trans;
4283
- if (db.cloud.schema?.[tableName]?.markedForSync) {
4332
+ if ((_b = (_a = db.cloud.schema) === null || _a === void 0 ? void 0 : _a[tableName]) === null || _b === void 0 ? void 0 : _b.markedForSync) {
4284
4333
  if (req.type === 'add' || req.type === 'put') {
4285
4334
  // No matter if user is logged in or not, make sure "owner" and "realmId" props are set properly.
4286
4335
  // If not logged in, this will be changed upon syncification of the tables (next sync after login),
@@ -4294,7 +4343,7 @@ function createImplicitPropSetterMiddleware(db) {
4294
4343
  if (!obj.realmId) {
4295
4344
  obj.realmId = trans.currentUser.userId;
4296
4345
  }
4297
- const key = table.schema.primaryKey.extractKey?.(obj);
4346
+ const key = (_d = (_c = table.schema.primaryKey).extractKey) === null || _d === void 0 ? void 0 : _d.call(_c, obj);
4298
4347
  if (typeof key === 'string' && key[0] === '#') {
4299
4348
  // Add $ts prop for put operations and
4300
4349
  // disable update operations as well as consistent
@@ -4321,10 +4370,8 @@ function createImplicitPropSetterMiddleware(db) {
4321
4370
  }
4322
4371
  }
4323
4372
  return table.mutate(req);
4324
- },
4325
- };
4326
- },
4327
- };
4373
+ } });
4374
+ } });
4328
4375
  },
4329
4376
  };
4330
4377
  }
@@ -4343,15 +4390,7 @@ function allSettled(possiblePromises) {
4343
4390
  let counter$1 = 0;
4344
4391
  function guardedTable(table) {
4345
4392
  const prop = "$lock" + (++counter$1);
4346
- return {
4347
- ...table,
4348
- count: readLock(table.count, prop),
4349
- get: readLock(table.get, prop),
4350
- getMany: readLock(table.getMany, prop),
4351
- openCursor: readLock(table.openCursor, prop),
4352
- query: readLock(table.query, prop),
4353
- mutate: writeLock(table.mutate, prop),
4354
- };
4393
+ return Object.assign(Object.assign({}, table), { count: readLock(table.count, prop), get: readLock(table.get, prop), getMany: readLock(table.getMany, prop), openCursor: readLock(table.openCursor, prop), query: readLock(table.query, prop), mutate: writeLock(table.mutate, prop) });
4355
4394
  }
4356
4395
  function readLock(fn, prop) {
4357
4396
  return function readLocker(req) {
@@ -4401,16 +4440,14 @@ function createMutationTrackingMiddleware({ currentUserObservable, db }) {
4401
4440
  core.table(`$${tbl.name}_mutations`)
4402
4441
  ]));
4403
4442
  }
4404
- catch {
4443
+ catch (_a) {
4405
4444
  throwVersionIncrementNeeded();
4406
4445
  }
4407
- return {
4408
- ...core,
4409
- transaction: (tables, mode) => {
4446
+ return Object.assign(Object.assign({}, core), { transaction: (tables, mode) => {
4410
4447
  let tx;
4411
4448
  if (mode === 'readwrite') {
4412
4449
  const mutationTables = tables
4413
- .filter((tbl) => db.cloud.schema?.[tbl]?.markedForSync)
4450
+ .filter((tbl) => { var _a, _b; return (_b = (_a = db.cloud.schema) === null || _a === void 0 ? void 0 : _a[tbl]) === null || _b === void 0 ? void 0 : _b.markedForSync; })
4414
4451
  .map((tbl) => getMutationTable(tbl));
4415
4452
  tx = core.transaction([...tables, ...mutationTables], mode);
4416
4453
  }
@@ -4433,7 +4470,8 @@ function createMutationTrackingMiddleware({ currentUserObservable, db }) {
4433
4470
  outstandingTransactions.next(outstandingTransactions.value);
4434
4471
  };
4435
4472
  const txComplete = () => {
4436
- if (tx.mutationsAdded && db.cloud.options?.databaseUrl) {
4473
+ var _a;
4474
+ if (tx.mutationsAdded && ((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl)) {
4437
4475
  if (db.cloud.usingServiceWorker) {
4438
4476
  console.debug('registering sync event');
4439
4477
  registerSyncEvent(db, "push");
@@ -4449,8 +4487,7 @@ function createMutationTrackingMiddleware({ currentUserObservable, db }) {
4449
4487
  tx.addEventListener('abort', removeTransaction);
4450
4488
  }
4451
4489
  return tx;
4452
- },
4453
- table: (tableName) => {
4490
+ }, table: (tableName) => {
4454
4491
  const table = core.table(tableName);
4455
4492
  if (/^\$/.test(tableName)) {
4456
4493
  if (tableName.endsWith('_mutations')) {
@@ -4458,20 +4495,15 @@ function createMutationTrackingMiddleware({ currentUserObservable, db }) {
4458
4495
  // make sure to set the mutationsAdded flag on transaction.
4459
4496
  // This is also done in mutateAndLog() as that function talks to a
4460
4497
  // lower level DBCore and wouldn't be catched by this code.
4461
- return {
4462
- ...table,
4463
- mutate: (req) => {
4498
+ return Object.assign(Object.assign({}, table), { mutate: (req) => {
4464
4499
  if (req.type === 'add' || req.type === 'put') {
4465
4500
  req.trans.mutationsAdded = true;
4466
4501
  }
4467
4502
  return table.mutate(req);
4468
- }
4469
- };
4503
+ } });
4470
4504
  }
4471
4505
  else if (tableName === '$logins') {
4472
- return {
4473
- ...table,
4474
- mutate: (req) => {
4506
+ return Object.assign(Object.assign({}, table), { mutate: (req) => {
4475
4507
  //console.debug('Mutating $logins table', req);
4476
4508
  return table
4477
4509
  .mutate(req)
@@ -4485,8 +4517,7 @@ function createMutationTrackingMiddleware({ currentUserObservable, db }) {
4485
4517
  console.debug('Failed mutation $logins', err);
4486
4518
  return Promise.reject(err);
4487
4519
  });
4488
- }
4489
- };
4520
+ } });
4490
4521
  }
4491
4522
  else {
4492
4523
  return table;
@@ -4494,17 +4525,16 @@ function createMutationTrackingMiddleware({ currentUserObservable, db }) {
4494
4525
  }
4495
4526
  const { schema } = table;
4496
4527
  const mutsTable = mutTableMap.get(tableName);
4497
- return guardedTable({
4498
- ...table,
4499
- mutate: (req) => {
4528
+ return guardedTable(Object.assign(Object.assign({}, table), { mutate: (req) => {
4529
+ var _a, _b, _c;
4500
4530
  const trans = req.trans;
4501
4531
  if (!trans.txid)
4502
4532
  return table.mutate(req); // Upgrade transactions not guarded by us.
4503
4533
  if (trans.disableChangeTracking)
4504
4534
  return table.mutate(req);
4505
- if (!db.cloud.schema?.[tableName]?.markedForSync)
4535
+ if (!((_b = (_a = db.cloud.schema) === null || _a === void 0 ? void 0 : _a[tableName]) === null || _b === void 0 ? void 0 : _b.markedForSync))
4506
4536
  return table.mutate(req);
4507
- if (!trans.currentUser?.isLoggedIn) {
4537
+ if (!((_c = trans.currentUser) === null || _c === void 0 ? void 0 : _c.isLoggedIn)) {
4508
4538
  // Unauthorized user should not log mutations.
4509
4539
  // Instead, after login all local data should be logged at once.
4510
4540
  return table.mutate(req);
@@ -4527,8 +4557,7 @@ function createMutationTrackingMiddleware({ currentUserObservable, db }) {
4527
4557
  });
4528
4558
  })
4529
4559
  : mutateAndLog(req);
4530
- }
4531
- });
4560
+ } }));
4532
4561
  function mutateAndLog(req) {
4533
4562
  const trans = req.trans;
4534
4563
  trans.mutationsAdded = true;
@@ -4599,18 +4628,14 @@ function createMutationTrackingMiddleware({ currentUserObservable, db }) {
4599
4628
  : res;
4600
4629
  });
4601
4630
  }
4602
- }
4603
- };
4631
+ } });
4604
4632
  }
4605
4633
  };
4606
4634
  }
4607
4635
 
4608
4636
  function overrideParseStoresSpec(origFunc, dexie) {
4609
4637
  return function (stores, dbSchema) {
4610
- const storesClone = {
4611
- ...DEXIE_CLOUD_SCHEMA,
4612
- ...stores,
4613
- };
4638
+ const storesClone = Object.assign(Object.assign({}, DEXIE_CLOUD_SCHEMA), stores);
4614
4639
  const cloudSchema = dexie.cloud.schema || (dexie.cloud.schema = {});
4615
4640
  const allPrefixes = new Set();
4616
4641
  Object.keys(storesClone).forEach(tableName => {
@@ -4642,110 +4667,15 @@ function overrideParseStoresSpec(origFunc, dexie) {
4642
4667
  };
4643
4668
  }
4644
4669
 
4645
- const SECONDS = 1000;
4646
- const MINUTES = 60 * SECONDS;
4647
-
4648
- const myId = randomString$1(16);
4649
-
4650
- const GUARDED_JOB_HEARTBEAT = 1 * SECONDS;
4651
- const GUARDED_JOB_TIMEOUT = 1 * MINUTES;
4652
- async function performGuardedJob(db, jobName, jobsTableName, job, { awaitRemoteJob } = {}) {
4653
- // Start working.
4654
- //
4655
- // Check if someone else is working on this already.
4656
- //
4657
- const jobsTable = db.table(jobsTableName);
4658
- async function aquireLock() {
4659
- const gotTheLock = await db.transaction('rw!', jobsTableName, async () => {
4660
- const currentWork = await jobsTable.get(jobName);
4661
- if (!currentWork) {
4662
- // No one else is working. Let's record that we are.
4663
- await jobsTable.add({
4664
- nodeId: myId,
4665
- started: new Date(),
4666
- heartbeat: new Date()
4667
- }, jobName);
4668
- return true;
4669
- }
4670
- else if (currentWork.heartbeat.getTime() <
4671
- Date.now() - GUARDED_JOB_TIMEOUT) {
4672
- console.warn(`Latest ${jobName} worker seem to have died.\n`, `The dead job started:`, currentWork.started, `\n`, `Last heart beat was:`, currentWork.heartbeat, '\n', `We're now taking over!`);
4673
- // Now, take over!
4674
- await jobsTable.put({
4675
- nodeId: myId,
4676
- started: new Date(),
4677
- heartbeat: new Date()
4678
- }, jobName);
4679
- return true;
4680
- }
4681
- return false;
4682
- });
4683
- if (gotTheLock)
4684
- return true;
4685
- // Someone else took the job.
4686
- if (awaitRemoteJob) {
4687
- try {
4688
- const jobDoneObservable = from$1(liveQuery(() => jobsTable.get(jobName))).pipe(timeout(GUARDED_JOB_TIMEOUT), filter((job) => !job)); // Wait til job is not there anymore.
4689
- await jobDoneObservable.toPromise();
4690
- return false;
4691
- }
4692
- catch (err) {
4693
- if (err.name !== 'TimeoutError') {
4694
- throw err;
4695
- }
4696
- // Timeout stopped us! Try aquire the lock now.
4697
- // It will likely succeed this time unless
4698
- // another client took it.
4699
- return await aquireLock();
4700
- }
4701
- }
4702
- return false;
4703
- }
4704
- if (await aquireLock()) {
4705
- // We own the lock entry and can do our job undisturbed.
4706
- // We're not within a transaction, but these type of locks
4707
- // spans over transactions.
4708
- // Start our heart beat during the job.
4709
- // Use setInterval to make sure we are updating heartbeat even during long-lived fetch calls.
4710
- const heartbeat = setInterval(() => {
4711
- jobsTable.update(jobName, (job) => {
4712
- if (job.nodeId === myId) {
4713
- job.heartbeat = new Date();
4714
- }
4715
- });
4716
- }, GUARDED_JOB_HEARTBEAT);
4717
- try {
4718
- return await job();
4719
- }
4720
- finally {
4721
- // Stop heartbeat
4722
- clearInterval(heartbeat);
4723
- // Remove the persisted job state:
4724
- await db.transaction('rw!', jobsTableName, async () => {
4725
- const currentWork = await jobsTable.get(jobName);
4726
- if (currentWork && currentWork.nodeId === myId) {
4727
- jobsTable.delete(jobName);
4728
- }
4729
- });
4730
- }
4731
- }
4732
- }
4733
-
4734
- async function performInitialSync(db, cloudOptions, cloudSchema) {
4735
- console.debug("Performing initial sync");
4736
- await performGuardedJob(db, 'initialSync', '$jobs', async () => {
4737
- // Even though caller has already checked it,
4738
- // Do check again (now within a transaction) that we really do not have a sync state:
4739
- const syncState = await db.getPersistedSyncState();
4740
- if (!syncState?.initiallySynced) {
4741
- await sync(db, cloudOptions, cloudSchema, { isInitialSync: true });
4742
- }
4743
- }, { awaitRemoteJob: true } // Don't return until the job is done!
4744
- );
4745
- console.debug("Done initial sync");
4670
+ function performInitialSync(db, cloudOptions, cloudSchema) {
4671
+ return __awaiter$1(this, void 0, void 0, function* () {
4672
+ console.debug('Performing initial sync');
4673
+ yield sync(db, cloudOptions, cloudSchema, { isInitialSync: true });
4674
+ console.debug('Done initial sync');
4675
+ });
4746
4676
  }
4747
4677
 
4748
- const USER_INACTIVITY_TIMEOUT = 300000; // 300_000;
4678
+ const USER_INACTIVITY_TIMEOUT = 180000; // 3 minutes
4749
4679
  const INACTIVE_WAIT_TIME = 20000;
4750
4680
  // This observable will be emitted to later down....
4751
4681
  const userIsActive = new BehaviorSubject(true);
@@ -4759,9 +4689,13 @@ const userIsActive = new BehaviorSubject(true);
4759
4689
  // for just a short time.
4760
4690
  const userIsReallyActive = new BehaviorSubject(true);
4761
4691
  userIsActive
4762
- .pipe(switchMap((isActive) => isActive
4763
- ? of(true)
4764
- : of(false).pipe(delay(INACTIVE_WAIT_TIME))), distinctUntilChanged())
4692
+ .pipe(switchMap((isActive) => {
4693
+ //console.debug('SyncStatus: DUBB: isActive changed to', isActive);
4694
+ return isActive
4695
+ ? of(true)
4696
+ : of(false).pipe(delay(INACTIVE_WAIT_TIME))
4697
+ ;
4698
+ }), distinctUntilChanged())
4765
4699
  .subscribe(userIsReallyActive);
4766
4700
  //
4767
4701
  // First create some corner-stone observables to build the flow on
@@ -4776,7 +4710,7 @@ const documentBecomesHidden = visibilityStateIsChanged.pipe(filter(() => documen
4776
4710
  const documentBecomesVisible = visibilityStateIsChanged.pipe(filter(() => document.visibilityState === 'visible'));
4777
4711
  // Any of various user-activity-related events happen:
4778
4712
  const userDoesSomething = typeof window !== 'undefined'
4779
- ? merge(documentBecomesVisible, fromEvent(window, 'mousemove'), fromEvent(window, 'keydown'), fromEvent(window, 'wheel'), fromEvent(window, 'touchmove'))
4713
+ ? merge(documentBecomesVisible, fromEvent(window, 'mousedown'), fromEvent(window, 'mousemove'), fromEvent(window, 'keydown'), fromEvent(window, 'wheel'), fromEvent(window, 'touchmove'))
4780
4714
  : of({});
4781
4715
  if (typeof document !== 'undefined') {
4782
4716
  //
@@ -4827,6 +4761,7 @@ class WSConnection extends Subscription$1 {
4827
4761
  constructor(databaseUrl, rev, realmSetHash, clientIdentity, token, tokenExpiration, subscriber, messageProducer, webSocketStatus) {
4828
4762
  super(() => this.teardown());
4829
4763
  this.id = ++counter;
4764
+ this.reconnecting = false;
4830
4765
  console.debug('New WebSocket Connection', this.id, token ? 'authorized' : 'unauthorized');
4831
4766
  this.databaseUrl = databaseUrl;
4832
4767
  this.rev = rev;
@@ -4846,7 +4781,7 @@ class WSConnection extends Subscription$1 {
4846
4781
  this.disconnect();
4847
4782
  }
4848
4783
  disconnect() {
4849
- this.webSocketStatus.next("disconnected");
4784
+ this.webSocketStatus.next('disconnected');
4850
4785
  if (this.pinger) {
4851
4786
  clearInterval(this.pinger);
4852
4787
  this.pinger = null;
@@ -4855,7 +4790,7 @@ class WSConnection extends Subscription$1 {
4855
4790
  try {
4856
4791
  this.ws.close();
4857
4792
  }
4858
- catch { }
4793
+ catch (_a) { }
4859
4794
  }
4860
4795
  this.ws = null;
4861
4796
  if (this.messageProducerSubscription) {
@@ -4864,158 +4799,180 @@ class WSConnection extends Subscription$1 {
4864
4799
  }
4865
4800
  }
4866
4801
  reconnect() {
4867
- this.disconnect();
4868
- this.connect();
4869
- }
4870
- async connect() {
4871
- this.webSocketStatus.next("connecting");
4872
- this.lastServerActivity = new Date();
4873
- if (this.pauseUntil && this.pauseUntil > new Date()) {
4874
- console.debug('WS not reconnecting just yet', {
4875
- id: this.id,
4876
- pauseUntil: this.pauseUntil,
4877
- });
4878
- return;
4879
- }
4880
- if (this.ws) {
4881
- throw new Error(`Called connect() when a connection is already open`);
4882
- }
4883
- if (!this.databaseUrl)
4884
- throw new Error(`Cannot connect without a database URL`);
4885
- if (this.closed) {
4886
- return;
4887
- }
4888
- if (this.tokenExpiration && this.tokenExpiration < new Date()) {
4889
- this.subscriber.error(new TokenExpiredError()); // Will be handled in connectWebSocket.ts.
4802
+ if (this.reconnecting)
4890
4803
  return;
4804
+ this.reconnecting = true;
4805
+ try {
4806
+ this.disconnect();
4891
4807
  }
4892
- this.pinger = setInterval(async () => {
4893
- if (this.closed) {
4894
- console.debug('pinger check', this.id, 'CLOSED.');
4895
- this.teardown();
4808
+ catch (_a) { }
4809
+ this.connect()
4810
+ .catch(() => { })
4811
+ .then(() => (this.reconnecting = false)); // finally()
4812
+ }
4813
+ connect() {
4814
+ return __awaiter$1(this, void 0, void 0, function* () {
4815
+ this.lastServerActivity = new Date();
4816
+ if (this.pauseUntil && this.pauseUntil > new Date()) {
4817
+ console.debug('WS not reconnecting just yet', {
4818
+ id: this.id,
4819
+ pauseUntil: this.pauseUntil,
4820
+ });
4896
4821
  return;
4897
4822
  }
4898
4823
  if (this.ws) {
4899
- try {
4900
- this.ws.send(JSON.stringify({ type: 'ping' }));
4901
- setTimeout(() => {
4902
- console.debug('pinger setTimeout', this.id, this.pinger ? `alive` : 'dead');
4903
- if (!this.pinger)
4904
- return;
4905
- if (this.closed) {
4906
- console.debug('pinger setTimeout', this.id, 'subscription is closed');
4907
- this.teardown();
4908
- return;
4909
- }
4910
- if (this.lastServerActivity <
4911
- new Date(Date.now() - SERVER_PING_TIMEOUT)) {
4912
- // Server inactive. Reconnect if user is active.
4913
- console.debug('pinger: server is inactive');
4914
- console.debug('pinger reconnecting');
4915
- this.reconnect();
4916
- }
4917
- else {
4918
- console.debug('pinger: server still active');
4919
- }
4920
- }, SERVER_PING_TIMEOUT);
4921
- }
4922
- catch {
4923
- console.debug('pinger catch error', this.id, 'reconnecting');
4924
- this.reconnect();
4925
- }
4824
+ throw new Error(`Called connect() when a connection is already open`);
4926
4825
  }
4927
- else {
4928
- console.debug('pinger', this.id, 'reconnecting');
4929
- this.reconnect();
4930
- }
4931
- }, CLIENT_PING_INTERVAL);
4932
- // The following vars are needed because we must know which callback to ack when server sends it's ack to us.
4933
- const wsUrl = new URL(this.databaseUrl);
4934
- wsUrl.protocol = wsUrl.protocol === 'http:' ? 'ws' : 'wss';
4935
- const searchParams = new URLSearchParams();
4936
- if (this.subscriber.closed)
4937
- return;
4938
- searchParams.set('v', "2");
4939
- searchParams.set('rev', this.rev);
4940
- searchParams.set('realmsHash', this.realmSetHash);
4941
- searchParams.set('clientId', this.clientIdentity);
4942
- if (this.token) {
4943
- searchParams.set('token', this.token);
4944
- }
4945
- // Connect the WebSocket to given url:
4946
- console.debug('dexie-cloud WebSocket create');
4947
- const ws = (this.ws = new WebSocket(`${wsUrl}/changes?${searchParams}`));
4948
- //ws.binaryType = "arraybuffer"; // For future when subscribing to actual changes.
4949
- ws.onclose = (event) => {
4950
- if (!this.pinger)
4826
+ if (!this.databaseUrl)
4827
+ throw new Error(`Cannot connect without a database URL`);
4828
+ if (this.closed) {
4829
+ //console.debug('SyncStatus: DUBB: Ooops it was closed!');
4951
4830
  return;
4952
- console.debug('dexie-cloud WebSocket onclosed', this.id);
4953
- this.reconnect();
4954
- };
4955
- ws.onmessage = (event) => {
4956
- if (!this.pinger)
4831
+ }
4832
+ if (this.tokenExpiration && this.tokenExpiration < new Date()) {
4833
+ this.subscriber.error(new TokenExpiredError()); // Will be handled in connectWebSocket.ts.
4957
4834
  return;
4958
- console.debug('dexie-cloud WebSocket onmessage', event.data);
4959
- this.lastServerActivity = new Date();
4960
- try {
4961
- const msg = TSON.parse(event.data);
4962
- if (msg.type === 'error') {
4963
- throw new Error(`Error message from dexie-cloud: ${msg.error}`);
4835
+ }
4836
+ this.webSocketStatus.next('connecting');
4837
+ this.pinger = setInterval(() => __awaiter$1(this, void 0, void 0, function* () {
4838
+ if (this.closed) {
4839
+ console.debug('pinger check', this.id, 'CLOSED.');
4840
+ this.teardown();
4841
+ return;
4964
4842
  }
4965
- if (msg.type === 'rev') {
4966
- this.rev = msg.rev; // No meaning but seems reasonable.
4843
+ if (this.ws) {
4844
+ try {
4845
+ this.ws.send(JSON.stringify({ type: 'ping' }));
4846
+ setTimeout(() => {
4847
+ console.debug('pinger setTimeout', this.id, this.pinger ? `alive` : 'dead');
4848
+ if (!this.pinger)
4849
+ return;
4850
+ if (this.closed) {
4851
+ console.debug('pinger setTimeout', this.id, 'subscription is closed');
4852
+ this.teardown();
4853
+ return;
4854
+ }
4855
+ if (this.lastServerActivity <
4856
+ new Date(Date.now() - SERVER_PING_TIMEOUT)) {
4857
+ // Server inactive. Reconnect if user is active.
4858
+ console.debug('pinger: server is inactive');
4859
+ console.debug('pinger reconnecting');
4860
+ this.reconnect();
4861
+ }
4862
+ else {
4863
+ console.debug('pinger: server still active');
4864
+ }
4865
+ }, SERVER_PING_TIMEOUT);
4866
+ }
4867
+ catch (_a) {
4868
+ console.debug('pinger catch error', this.id, 'reconnecting');
4869
+ this.reconnect();
4870
+ }
4967
4871
  }
4968
- if (msg.type !== 'pong') {
4969
- this.subscriber.next(msg);
4872
+ else {
4873
+ console.debug('pinger', this.id, 'reconnecting');
4874
+ this.reconnect();
4970
4875
  }
4876
+ }), CLIENT_PING_INTERVAL);
4877
+ // The following vars are needed because we must know which callback to ack when server sends it's ack to us.
4878
+ const wsUrl = new URL(this.databaseUrl);
4879
+ wsUrl.protocol = wsUrl.protocol === 'http:' ? 'ws' : 'wss';
4880
+ const searchParams = new URLSearchParams();
4881
+ if (this.subscriber.closed)
4882
+ return;
4883
+ searchParams.set('v', '2');
4884
+ searchParams.set('rev', this.rev);
4885
+ searchParams.set('realmsHash', this.realmSetHash);
4886
+ searchParams.set('clientId', this.clientIdentity);
4887
+ if (this.token) {
4888
+ searchParams.set('token', this.token);
4971
4889
  }
4972
- catch (e) {
4973
- this.subscriber.error(e);
4974
- }
4975
- };
4976
- try {
4977
- await new Promise((resolve, reject) => {
4978
- ws.onopen = (event) => {
4979
- console.debug('dexie-cloud WebSocket onopen');
4980
- resolve(null);
4981
- };
4982
- ws.onerror = (event) => {
4983
- const error = event.error || new Error('WebSocket Error');
4984
- this.disconnect();
4985
- this.subscriber.error(error);
4986
- this.webSocketStatus.next("error");
4987
- reject(error);
4988
- };
4989
- });
4990
- this.messageProducerSubscription = this.messageProducer.subscribe(msg => {
4991
- if (!this.closed) {
4992
- if (msg.type === 'ready' && this.webSocketStatus.value !== 'connected') {
4993
- this.webSocketStatus.next("connected");
4890
+ // Connect the WebSocket to given url:
4891
+ console.debug('dexie-cloud WebSocket create');
4892
+ const ws = (this.ws = new WebSocket(`${wsUrl}/changes?${searchParams}`));
4893
+ //ws.binaryType = "arraybuffer"; // For future when subscribing to actual changes.
4894
+ ws.onclose = (event) => {
4895
+ if (!this.pinger)
4896
+ return;
4897
+ console.debug('dexie-cloud WebSocket onclosed', this.id);
4898
+ this.reconnect();
4899
+ };
4900
+ ws.onmessage = (event) => {
4901
+ if (!this.pinger)
4902
+ return;
4903
+ console.debug('dexie-cloud WebSocket onmessage', event.data);
4904
+ this.lastServerActivity = new Date();
4905
+ try {
4906
+ const msg = TSON.parse(event.data);
4907
+ if (msg.type === 'error') {
4908
+ throw new Error(`Error message from dexie-cloud: ${msg.error}`);
4909
+ }
4910
+ if (msg.type === 'rev') {
4911
+ this.rev = msg.rev; // No meaning but seems reasonable.
4912
+ }
4913
+ if (msg.type !== 'pong') {
4914
+ this.subscriber.next(msg);
4994
4915
  }
4995
- this.ws?.send(TSON.stringify(msg));
4996
4916
  }
4997
- });
4998
- }
4999
- catch (error) {
5000
- this.pauseUntil = new Date(Date.now() + FAIL_RETRY_WAIT_TIME);
5001
- }
4917
+ catch (e) {
4918
+ this.subscriber.error(e);
4919
+ }
4920
+ };
4921
+ try {
4922
+ let everConnected = false;
4923
+ yield new Promise((resolve, reject) => {
4924
+ ws.onopen = (event) => {
4925
+ console.debug('dexie-cloud WebSocket onopen');
4926
+ everConnected = true;
4927
+ resolve(null);
4928
+ };
4929
+ ws.onerror = (event) => {
4930
+ if (!everConnected) {
4931
+ const error = event.error || new Error('WebSocket Error');
4932
+ this.subscriber.error(error);
4933
+ this.webSocketStatus.next('error');
4934
+ reject(error);
4935
+ }
4936
+ else {
4937
+ this.reconnect();
4938
+ }
4939
+ };
4940
+ });
4941
+ this.messageProducerSubscription = this.messageProducer.subscribe((msg) => {
4942
+ var _a;
4943
+ if (!this.closed) {
4944
+ if (msg.type === 'ready' &&
4945
+ this.webSocketStatus.value !== 'connected') {
4946
+ this.webSocketStatus.next('connected');
4947
+ }
4948
+ (_a = this.ws) === null || _a === void 0 ? void 0 : _a.send(TSON.stringify(msg));
4949
+ }
4950
+ });
4951
+ }
4952
+ catch (error) {
4953
+ this.pauseUntil = new Date(Date.now() + FAIL_RETRY_WAIT_TIME);
4954
+ }
4955
+ });
5002
4956
  }
5003
4957
  }
5004
4958
 
5005
4959
  function sleep$1(ms) {
5006
4960
  return new Promise((resolve) => setTimeout(resolve, ms));
5007
4961
  }
5008
- async function waitAndReconnectWhenUserDoesSomething(error) {
5009
- console.error(`WebSocket observable: error but revive when user does some active thing...`, error);
5010
- // Sleep some seconds...
5011
- await sleep$1(3000);
5012
- // Wait til user does something (move mouse, tap, scroll, click etc)
5013
- console.debug('waiting for someone to do something');
5014
- await userDoesSomething.pipe(take(1)).toPromise();
5015
- console.debug('someone did something!');
4962
+ function waitAndReconnectWhenUserDoesSomething(error) {
4963
+ return __awaiter$1(this, void 0, void 0, function* () {
4964
+ console.error(`WebSocket observable: error but revive when user does some active thing...`, error);
4965
+ // Sleep some seconds...
4966
+ yield sleep$1(3000);
4967
+ // Wait til user does something (move mouse, tap, scroll, click etc)
4968
+ console.debug('waiting for someone to do something');
4969
+ yield userDoesSomething.pipe(take(1)).toPromise();
4970
+ console.debug('someone did something!');
4971
+ });
5016
4972
  }
5017
4973
  function connectWebSocket(db) {
5018
- if (!db.cloud.options?.databaseUrl) {
4974
+ var _a;
4975
+ if (!((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl)) {
5019
4976
  throw new Error(`No database URL to connect WebSocket to`);
5020
4977
  }
5021
4978
  const messageProducer = db.messageConsumer.readyToServe.pipe(filter((isReady) => isReady), // When consumer is ready for new messages, produce such a message to inform server about it
@@ -5027,32 +4984,35 @@ function connectWebSocket(db) {
5027
4984
  rev: syncState.serverRevision,
5028
4985
  })));
5029
4986
  function createObservable() {
5030
- return db.cloud.persistedSyncState.pipe(filter(syncState => syncState?.serverRevision), // Don't connect before there's no initial sync performed.
4987
+ return db.cloud.persistedSyncState.pipe(filter((syncState) => syncState === null || syncState === void 0 ? void 0 : syncState.serverRevision), // Don't connect before there's no initial sync performed.
5031
4988
  take(1), // Don't continue waking up whenever syncState change
5032
- switchMap((syncState) => db.cloud.currentUser.pipe(map(userLogin => [userLogin, syncState]))), switchMap(([userLogin, syncState]) => userIsReallyActive.pipe(map((isActive) => [isActive ? userLogin : null, syncState]))), switchMap(async ([userLogin, syncState]) => [userLogin, await computeRealmSetHash(syncState)]), switchMap(([userLogin, realmSetHash]) =>
4989
+ switchMap((syncState) => db.cloud.currentUser.pipe(map((userLogin) => [userLogin, syncState]))), switchMap(([userLogin, syncState]) => userIsReallyActive.pipe(map((isActive) => [isActive ? userLogin : null, syncState]))), switchMap(([userLogin, syncState]) => __awaiter$1(this, void 0, void 0, function* () { return [userLogin, yield computeRealmSetHash(syncState)]; })), switchMap(([userLogin, realmSetHash]) =>
5033
4990
  // Let server end query changes from last entry of same client-ID and forward.
5034
4991
  // If no new entries, server won't bother the client. If new entries, server sends only those
5035
4992
  // and the baseRev of the last from same client-ID.
5036
4993
  userLogin
5037
4994
  ? new WSObservable(db.cloud.options.databaseUrl, db.cloud.persistedSyncState.value.serverRevision, realmSetHash, db.cloud.persistedSyncState.value.clientIdentity, messageProducer, db.cloud.webSocketStatus, userLogin.accessToken, userLogin.accessTokenExpiration)
5038
4995
  : from$1([])), catchError((error) => {
5039
- if (error?.name === 'TokenExpiredError') {
4996
+ if ((error === null || error === void 0 ? void 0 : error.name) === 'TokenExpiredError') {
5040
4997
  console.debug('WebSocket observable: Token expired. Refreshing token...');
5041
- return of(true).pipe(switchMap(async () => {
4998
+ return of(true).pipe(switchMap(() => __awaiter$1(this, void 0, void 0, function* () {
5042
4999
  // Refresh access token
5043
- const user = await db.getCurrentUser();
5044
- const refreshedLogin = await refreshAccessToken(db.cloud.options.databaseUrl, user);
5000
+ const user = yield db.getCurrentUser();
5001
+ const refreshedLogin = yield refreshAccessToken(db.cloud.options.databaseUrl, user);
5045
5002
  // Persist updated access token
5046
- await db.table('$logins').update(user.userId, {
5003
+ yield db.table('$logins').update(user.userId, {
5047
5004
  accessToken: refreshedLogin.accessToken,
5048
5005
  accessTokenExpiration: refreshedLogin.accessTokenExpiration,
5049
5006
  });
5050
- }), switchMap(() => createObservable()));
5007
+ })), switchMap(() => createObservable()));
5051
5008
  }
5052
5009
  else {
5053
5010
  return throwError(error);
5054
5011
  }
5055
- }), catchError((error) => from$1(waitAndReconnectWhenUserDoesSomething(error)).pipe(switchMap(() => createObservable()))));
5012
+ }), catchError((error) => {
5013
+ db.cloud.webSocketStatus.next("error");
5014
+ return from$1(waitAndReconnectWhenUserDoesSomething(error)).pipe(switchMap(() => createObservable()));
5015
+ }));
5056
5016
  }
5057
5017
  return createObservable().subscribe((msg) => {
5058
5018
  if (msg) {
@@ -5066,17 +5026,113 @@ function connectWebSocket(db) {
5066
5026
  });
5067
5027
  }
5068
5028
 
5069
- async function isSyncNeeded(db) {
5070
- return db.cloud.options?.databaseUrl && db.cloud.schema
5071
- ? await sync(db, db.cloud.options, db.cloud.schema, { justCheckIfNeeded: true })
5072
- : false;
5029
+ function isSyncNeeded(db) {
5030
+ var _a;
5031
+ return __awaiter$1(this, void 0, void 0, function* () {
5032
+ return ((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl) && db.cloud.schema
5033
+ ? yield sync(db, db.cloud.options, db.cloud.schema, { justCheckIfNeeded: true })
5034
+ : false;
5035
+ });
5036
+ }
5037
+
5038
+ const SECONDS = 1000;
5039
+ const MINUTES = 60 * SECONDS;
5040
+
5041
+ const myId = randomString$1(16);
5042
+
5043
+ const GUARDED_JOB_HEARTBEAT = 1 * SECONDS;
5044
+ const GUARDED_JOB_TIMEOUT = 1 * MINUTES;
5045
+ function performGuardedJob(db, jobName, jobsTableName, job, { awaitRemoteJob } = {}) {
5046
+ return __awaiter$1(this, void 0, void 0, function* () {
5047
+ // Start working.
5048
+ //
5049
+ // Check if someone else is working on this already.
5050
+ //
5051
+ const jobsTable = db.table(jobsTableName);
5052
+ function aquireLock() {
5053
+ return __awaiter$1(this, void 0, void 0, function* () {
5054
+ const gotTheLock = yield db.transaction('rw!', jobsTableName, () => __awaiter$1(this, void 0, void 0, function* () {
5055
+ const currentWork = yield jobsTable.get(jobName);
5056
+ if (!currentWork) {
5057
+ // No one else is working. Let's record that we are.
5058
+ yield jobsTable.add({
5059
+ nodeId: myId,
5060
+ started: new Date(),
5061
+ heartbeat: new Date()
5062
+ }, jobName);
5063
+ return true;
5064
+ }
5065
+ else if (currentWork.heartbeat.getTime() <
5066
+ Date.now() - GUARDED_JOB_TIMEOUT) {
5067
+ console.warn(`Latest ${jobName} worker seem to have died.\n`, `The dead job started:`, currentWork.started, `\n`, `Last heart beat was:`, currentWork.heartbeat, '\n', `We're now taking over!`);
5068
+ // Now, take over!
5069
+ yield jobsTable.put({
5070
+ nodeId: myId,
5071
+ started: new Date(),
5072
+ heartbeat: new Date()
5073
+ }, jobName);
5074
+ return true;
5075
+ }
5076
+ return false;
5077
+ }));
5078
+ if (gotTheLock)
5079
+ return true;
5080
+ // Someone else took the job.
5081
+ if (awaitRemoteJob) {
5082
+ try {
5083
+ const jobDoneObservable = from$1(liveQuery(() => jobsTable.get(jobName))).pipe(timeout(GUARDED_JOB_TIMEOUT), filter((job) => !job)); // Wait til job is not there anymore.
5084
+ yield jobDoneObservable.toPromise();
5085
+ return false;
5086
+ }
5087
+ catch (err) {
5088
+ if (err.name !== 'TimeoutError') {
5089
+ throw err;
5090
+ }
5091
+ // Timeout stopped us! Try aquire the lock now.
5092
+ // It will likely succeed this time unless
5093
+ // another client took it.
5094
+ return yield aquireLock();
5095
+ }
5096
+ }
5097
+ return false;
5098
+ });
5099
+ }
5100
+ if (yield aquireLock()) {
5101
+ // We own the lock entry and can do our job undisturbed.
5102
+ // We're not within a transaction, but these type of locks
5103
+ // spans over transactions.
5104
+ // Start our heart beat during the job.
5105
+ // Use setInterval to make sure we are updating heartbeat even during long-lived fetch calls.
5106
+ const heartbeat = setInterval(() => {
5107
+ jobsTable.update(jobName, (job) => {
5108
+ if (job.nodeId === myId) {
5109
+ job.heartbeat = new Date();
5110
+ }
5111
+ });
5112
+ }, GUARDED_JOB_HEARTBEAT);
5113
+ try {
5114
+ return yield job();
5115
+ }
5116
+ finally {
5117
+ // Stop heartbeat
5118
+ clearInterval(heartbeat);
5119
+ // Remove the persisted job state:
5120
+ yield db.transaction('rw!', jobsTableName, () => __awaiter$1(this, void 0, void 0, function* () {
5121
+ const currentWork = yield jobsTable.get(jobName);
5122
+ if (currentWork && currentWork.nodeId === myId) {
5123
+ yield jobsTable.delete(jobName);
5124
+ }
5125
+ }));
5126
+ }
5127
+ }
5128
+ });
5073
5129
  }
5074
5130
 
5075
5131
  const ongoingSyncs = new WeakMap();
5076
5132
  function syncIfPossible(db, cloudOptions, cloudSchema, options) {
5077
5133
  const ongoing = ongoingSyncs.get(db);
5078
5134
  if (ongoing) {
5079
- if (ongoing.pull || options?.purpose === 'push') {
5135
+ if (ongoing.pull || (options === null || options === void 0 ? void 0 : options.purpose) === 'push') {
5080
5136
  console.debug('syncIfPossible(): returning the ongoing sync promise.');
5081
5137
  return ongoing.promise;
5082
5138
  }
@@ -5118,32 +5174,34 @@ function syncIfPossible(db, cloudOptions, cloudSchema, options) {
5118
5174
  }
5119
5175
  }
5120
5176
  const promise = _syncIfPossible();
5121
- ongoingSyncs.set(db, { promise, pull: options?.purpose !== 'push' });
5177
+ ongoingSyncs.set(db, { promise, pull: (options === null || options === void 0 ? void 0 : options.purpose) !== 'push' });
5122
5178
  return promise;
5123
- async function _syncIfPossible() {
5124
- try {
5125
- if (db.cloud.usingServiceWorker) {
5126
- if (IS_SERVICE_WORKER) {
5127
- await sync(db, cloudOptions, cloudSchema, options);
5179
+ function _syncIfPossible() {
5180
+ return __awaiter$1(this, void 0, void 0, function* () {
5181
+ try {
5182
+ if (db.cloud.usingServiceWorker) {
5183
+ if (IS_SERVICE_WORKER) {
5184
+ yield sync(db, cloudOptions, cloudSchema, options);
5185
+ }
5186
+ }
5187
+ else {
5188
+ // We use a flow that is better suited for the case when multiple workers want to
5189
+ // do the same thing.
5190
+ yield performGuardedJob(db, CURRENT_SYNC_WORKER, '$jobs', () => sync(db, cloudOptions, cloudSchema, options));
5128
5191
  }
5192
+ ongoingSyncs.delete(db);
5193
+ console.debug('Done sync');
5129
5194
  }
5130
- else {
5131
- // We use a flow that is better suited for the case when multiple workers want to
5132
- // do the same thing.
5133
- await performGuardedJob(db, CURRENT_SYNC_WORKER, '$jobs', () => sync(db, cloudOptions, cloudSchema, options));
5195
+ catch (error) {
5196
+ ongoingSyncs.delete(db);
5197
+ console.error(`Failed to sync client changes`, error);
5198
+ throw error; // Make sure we rethrow error so that sync event is retried.
5199
+ // I don't think we should setTimout or so here.
5200
+ // Unless server tells us to in some response.
5201
+ // Then we could follow that advice but not by waiting here but by registering
5202
+ // Something that triggers an event listened to in startPushWorker()
5134
5203
  }
5135
- ongoingSyncs.delete(db);
5136
- console.debug('Done sync');
5137
- }
5138
- catch (error) {
5139
- ongoingSyncs.delete(db);
5140
- console.error(`Failed to sync client changes`, error);
5141
- throw error; // Make sure we rethrow error so that sync event is retried.
5142
- // I don't think we should setTimout or so here.
5143
- // Unless server tells us to in some response.
5144
- // Then we could follow that advice but not by waiting here but by registering
5145
- // Something that triggers an event listened to in startPushWorker()
5146
- }
5204
+ });
5147
5205
  }
5148
5206
  }
5149
5207
 
@@ -5213,8 +5271,9 @@ function updateSchemaFromOptions(schema, options) {
5213
5271
  }
5214
5272
 
5215
5273
  function verifySchema(db) {
5274
+ var _a, _b;
5216
5275
  for (const table of db.tables) {
5217
- if (db.cloud.schema?.[table.name]?.markedForSync) {
5276
+ if ((_b = (_a = db.cloud.schema) === null || _a === void 0 ? void 0 : _a[table.name]) === null || _b === void 0 ? void 0 : _b.markedForSync) {
5218
5277
  if (table.schema.primKey.auto) {
5219
5278
  throw new Dexie.SchemaError(`Table ${table.name} is both autoIncremented and synced. ` +
5220
5279
  `Use db.cloud.configure({unsyncedTables: [${JSON.stringify(table.name)}]}) to blacklist it from sync`);
@@ -5307,7 +5366,7 @@ function resolveText({ message, messageCode, messageParams }) {
5307
5366
  function LoginDialog({ title, alerts, fields, onCancel, onSubmit, }) {
5308
5367
  const [params, setParams] = l({});
5309
5368
  const firstFieldRef = s();
5310
- h(() => firstFieldRef.current?.focus(), []);
5369
+ h(() => { var _a; return (_a = firstFieldRef.current) === null || _a === void 0 ? void 0 : _a.focus(); }, []);
5311
5370
  return (a$1(Dialog, null,
5312
5371
  a$1(y, null,
5313
5372
  a$1("h3", { style: Styles.WindowHeader }, title),
@@ -5317,7 +5376,7 @@ function LoginDialog({ title, alerts, fields, onCancel, onSubmit, }) {
5317
5376
  onSubmit(params);
5318
5377
  } }, Object.entries(fields).map(([fieldName, { type, label, placeholder }], idx) => (a$1("label", { style: Styles.Label },
5319
5378
  label ? `${label}: ` : '',
5320
- a$1("input", { ref: idx === 0 ? firstFieldRef : undefined, type: type, name: fieldName, autoComplete: "on", style: Styles.Input, autoFocus: true, placeholder: placeholder, value: params[fieldName] || '', onInput: (ev) => setParams({ ...params, [fieldName]: valueTransformer(type, ev.target?.['value']) }) })))))),
5379
+ a$1("input", { ref: idx === 0 ? firstFieldRef : undefined, type: type, name: fieldName, autoComplete: "on", style: Styles.Input, autoFocus: true, placeholder: placeholder, value: params[fieldName] || '', onInput: (ev) => { var _a; return setParams(Object.assign(Object.assign({}, params), { [fieldName]: valueTransformer(type, (_a = ev.target) === null || _a === void 0 ? void 0 : _a['value']) })); } })))))),
5321
5380
  a$1("div", { style: Styles.ButtonsDiv },
5322
5381
  a$1("button", { type: "submit", style: Styles.Button, onClick: () => onSubmit(params) }, "Submit"),
5323
5382
  a$1("button", { style: Styles.Button, onClick: onCancel }, "Cancel"))));
@@ -5349,7 +5408,7 @@ class LoginGui extends p$1 {
5349
5408
  if (!userInteraction)
5350
5409
  return null;
5351
5410
  //if (props.db.cloud.userInteraction.observers.length > 1) return null; // Someone else subscribes.
5352
- return a$1(LoginDialog, { ...userInteraction });
5411
+ return a$1(LoginDialog, Object.assign({}, userInteraction));
5353
5412
  }
5354
5413
  }
5355
5414
  function setupDefaultGUI(db) {
@@ -5476,6 +5535,21 @@ function createSharedValueObservable(o, defaultValue) {
5476
5535
  return rv;
5477
5536
  }
5478
5537
 
5538
+ const getGlobalRolesObservable = associate((db) => {
5539
+ return createSharedValueObservable(liveQuery(() => db.roles
5540
+ .where({ realmId: 'rlm-public' })
5541
+ .toArray()
5542
+ .then((roles) => {
5543
+ const rv = {};
5544
+ for (const role of roles
5545
+ .slice()
5546
+ .sort((a, b) => (a.sortOrder || 0) - (b.sortOrder || 0))) {
5547
+ rv[role.name] = role;
5548
+ }
5549
+ return rv;
5550
+ })), {});
5551
+ });
5552
+
5479
5553
  const getCurrentUserEmitter = associate((db) => new BehaviorSubject(UNAUTHORIZED_USER));
5480
5554
 
5481
5555
  const getInternalAccessControlObservable = associate((db) => {
@@ -5520,7 +5594,7 @@ function mergePermissions(...permissions) {
5520
5594
  if (permissions.length === 0)
5521
5595
  return {};
5522
5596
  const reduced = permissions.reduce((result, next) => {
5523
- const ret = { ...result };
5597
+ const ret = Object.assign({}, result);
5524
5598
  for (const [verb, rights] of Object.entries(next)) {
5525
5599
  if (verb in ret && ret[verb]) {
5526
5600
  if (ret[verb] === '*')
@@ -5577,19 +5651,36 @@ function mergePermissions(...permissions) {
5577
5651
  }
5578
5652
 
5579
5653
  const getPermissionsLookupObservable = associate((db) => {
5580
- const o = getInternalAccessControlObservable(db._novip);
5581
- return mapValueObservable(o, ({ selfMembers, realms, userId }) => {
5654
+ const o = createSharedValueObservable(combineLatest([
5655
+ getInternalAccessControlObservable(db._novip),
5656
+ getGlobalRolesObservable(db._novip),
5657
+ ]).pipe(map(([{ selfMembers, realms, userId }, globalRoles]) => ({
5658
+ selfMembers,
5659
+ realms,
5660
+ userId,
5661
+ globalRoles,
5662
+ }))), {
5663
+ selfMembers: [],
5664
+ realms: [],
5665
+ userId: UNAUTHORIZED_USER.userId,
5666
+ globalRoles: {},
5667
+ });
5668
+ return mapValueObservable(o, ({ selfMembers, realms, userId, globalRoles }) => {
5582
5669
  const rv = realms
5583
- .map((realm) => ({
5584
- ...realm,
5585
- permissions: realm.owner === userId
5586
- ? { manage: '*' }
5587
- : mergePermissions(...selfMembers
5588
- .filter((m) => m.realmId === realm.realmId)
5589
- .map((m) => m.permissions)
5590
- .filter((p) => p)),
5591
- }))
5592
- .reduce((p, c) => ({ ...p, [c.realmId]: c }), {
5670
+ .map((realm) => {
5671
+ const selfRealmMembers = selfMembers.filter((m) => m.realmId === realm.realmId);
5672
+ const directPermissionSets = selfRealmMembers
5673
+ .map((m) => m.permissions)
5674
+ .filter((p) => p);
5675
+ const rolePermissionSets = flatten(selfRealmMembers.map((m) => m.roles).filter((roleName) => roleName))
5676
+ .map((role) => globalRoles[role])
5677
+ .filter((role) => role)
5678
+ .map((role) => role.permissions);
5679
+ return Object.assign(Object.assign({}, realm), { permissions: realm.owner === userId
5680
+ ? { manage: '*' }
5681
+ : mergePermissions(...directPermissionSets, ...rolePermissionSets) });
5682
+ })
5683
+ .reduce((p, c) => (Object.assign(Object.assign({}, p), { [c.realmId]: c })), {
5593
5684
  [userId]: {
5594
5685
  realmId: userId,
5595
5686
  owner: userId,
@@ -5608,47 +5699,50 @@ class PermissionChecker {
5608
5699
  this.isOwner = isOwner;
5609
5700
  }
5610
5701
  add(...tableNames) {
5702
+ var _a;
5611
5703
  // If user can manage the whole realm, return true.
5612
5704
  if (this.permissions.manage === '*')
5613
5705
  return true;
5614
5706
  // If user can manage given table in realm, return true
5615
- if (this.permissions.manage?.includes(this.tableName))
5707
+ if ((_a = this.permissions.manage) === null || _a === void 0 ? void 0 : _a.includes(this.tableName))
5616
5708
  return true;
5617
5709
  // If user can add any type, return true
5618
5710
  if (this.permissions.add === '*')
5619
5711
  return true;
5620
5712
  // If user can add objects into given table names in the realm, return true
5621
- if (tableNames.every((tableName) => this.permissions.add?.includes(tableName))) {
5713
+ if (tableNames.every((tableName) => { var _a; return (_a = this.permissions.add) === null || _a === void 0 ? void 0 : _a.includes(tableName); })) {
5622
5714
  return true;
5623
5715
  }
5624
5716
  return false;
5625
5717
  }
5626
5718
  update(...props) {
5719
+ var _a, _b;
5627
5720
  // If user is owner of this object, or if user can manage the whole realm, return true.
5628
5721
  if (this.isOwner || this.permissions.manage === '*')
5629
5722
  return true;
5630
5723
  // If user can manage given table in realm, return true
5631
- if (this.permissions.manage?.includes(this.tableName))
5724
+ if ((_a = this.permissions.manage) === null || _a === void 0 ? void 0 : _a.includes(this.tableName))
5632
5725
  return true;
5633
5726
  // If user can update any prop in any table in this realm, return true unless
5634
5727
  // it regards to ownership change:
5635
5728
  if (this.permissions.update === '*') {
5636
5729
  return props.every((prop) => prop !== 'owner');
5637
5730
  }
5638
- const tablePermissions = this.permissions.update?.[this.tableName];
5731
+ const tablePermissions = (_b = this.permissions.update) === null || _b === void 0 ? void 0 : _b[this.tableName];
5639
5732
  // If user can update any prop in table and realm, return true unless
5640
5733
  // accessing special props owner or realmId
5641
5734
  if (tablePermissions === '*')
5642
5735
  return props.every((prop) => prop !== 'owner');
5643
5736
  // Explicitely listed properties to allow updates on:
5644
- return props.every((prop) => tablePermissions?.some((permittedProp) => permittedProp === prop || (permittedProp === '*' && prop !== 'owner')));
5737
+ return props.every((prop) => tablePermissions === null || tablePermissions === void 0 ? void 0 : tablePermissions.some((permittedProp) => permittedProp === prop || (permittedProp === '*' && prop !== 'owner')));
5645
5738
  }
5646
5739
  delete() {
5740
+ var _a;
5647
5741
  // If user is owner of this object, or if user can manage the whole realm, return true.
5648
5742
  if (this.isOwner || this.permissions.manage === '*')
5649
5743
  return true;
5650
5744
  // If user can manage given table in realm, return true
5651
- if (this.permissions.manage?.includes(this.tableName))
5745
+ if ((_a = this.permissions.manage) === null || _a === void 0 ? void 0 : _a.includes(this.tableName))
5652
5746
  return true;
5653
5747
  return false;
5654
5748
  }
@@ -5672,7 +5766,7 @@ function permissions(dexie, obj, tableName) {
5672
5766
  const realm = permissionsLookup[realmId || dexie.cloud.currentUserId];
5673
5767
  if (!realm)
5674
5768
  return new PermissionChecker({}, tableName, !owner || owner === dexie.cloud.currentUserId);
5675
- return new PermissionChecker(realm.permissions, tableName, !owner || owner === dexie.cloud.currentUserId);
5769
+ return new PermissionChecker(realm.permissions, tableName, realmId === dexie.cloud.currentUserId || owner === dexie.cloud.currentUserId);
5676
5770
  };
5677
5771
  const o = source.pipe(map(mapper));
5678
5772
  o.getValue = () => mapper(source.getValue());
@@ -5684,7 +5778,7 @@ const getInvitesObservable = associate((db) => {
5684
5778
  const permissions = getPermissionsLookupObservable(db._novip);
5685
5779
  const accessControl = getInternalAccessControlObservable(db._novip);
5686
5780
  return createSharedValueObservable(combineLatest([membersByEmail, accessControl, permissions]).pipe(map$1(([membersByEmail, accessControl, realmLookup]) => {
5687
- const reducer = (result, m) => ({ ...result, [m.id]: { ...m, realm: realmLookup[m.realmId] } });
5781
+ const reducer = (result, m) => (Object.assign(Object.assign({}, result), { [m.id]: Object.assign(Object.assign({}, m), { realm: realmLookup[m.realmId] }) }));
5688
5782
  const emailMembersById = membersByEmail.reduce(reducer, {});
5689
5783
  const membersById = accessControl.selfMembers.reduce(reducer, emailMembersById);
5690
5784
  return Object.values(membersById).filter(m => !m.accepted);
@@ -5704,15 +5798,15 @@ function dexieCloud(dexie) {
5704
5798
  let configuredProgramatically = false;
5705
5799
  // local sync worker - used when there's no service worker.
5706
5800
  let localSyncWorker = null;
5707
- dexie.on('ready', async (dexie) => {
5801
+ dexie.on('ready', (dexie) => __awaiter$1(this, void 0, void 0, function* () {
5708
5802
  try {
5709
- await onDbReady(dexie);
5803
+ yield onDbReady(dexie);
5710
5804
  }
5711
5805
  catch (error) {
5712
5806
  console.error(error);
5713
5807
  // Make sure to succeed with database open even if network is down.
5714
5808
  }
5715
- }, true // true = sticky
5809
+ }), true // true = sticky
5716
5810
  );
5717
5811
  /** Void starting subscribers after a close has happened. */
5718
5812
  let closed = false;
@@ -5728,8 +5822,8 @@ function dexieCloud(dexie) {
5728
5822
  currentUserEmitter.next(UNAUTHORIZED_USER);
5729
5823
  });
5730
5824
  dexie.cloud = {
5731
- version: '4.0.0-beta.15',
5732
- options: { ...DEFAULT_OPTIONS },
5825
+ version: '4.0.0-beta.18',
5826
+ options: Object.assign({}, DEFAULT_OPTIONS),
5733
5827
  schema: null,
5734
5828
  serverState: null,
5735
5829
  get currentUserId() {
@@ -5743,14 +5837,17 @@ function dexieCloud(dexie) {
5743
5837
  persistedSyncState: new BehaviorSubject(undefined),
5744
5838
  userInteraction: new BehaviorSubject(undefined),
5745
5839
  webSocketStatus: new BehaviorSubject('not-started'),
5746
- async login(hint) {
5747
- const db = DexieCloudDB(dexie);
5748
- await db.cloud.sync();
5749
- await login(db, hint);
5840
+ login(hint) {
5841
+ return __awaiter$1(this, void 0, void 0, function* () {
5842
+ const db = DexieCloudDB(dexie);
5843
+ yield db.cloud.sync();
5844
+ yield login(db, hint);
5845
+ });
5750
5846
  },
5751
5847
  invites: getInvitesObservable(dexie),
5848
+ roles: getGlobalRolesObservable(dexie),
5752
5849
  configure(options) {
5753
- options = dexie.cloud.options = { ...dexie.cloud.options, ...options };
5850
+ options = dexie.cloud.options = Object.assign(Object.assign({}, dexie.cloud.options), options);
5754
5851
  configuredProgramatically = true;
5755
5852
  if (options.databaseUrl && options.nameSuffix) {
5756
5853
  // @ts-ignore
@@ -5759,41 +5856,43 @@ function dexieCloud(dexie) {
5759
5856
  }
5760
5857
  updateSchemaFromOptions(dexie.cloud.schema, dexie.cloud.options);
5761
5858
  },
5762
- async sync({ wait, purpose } = { wait: true, purpose: 'push' }) {
5763
- if (wait === undefined)
5764
- wait = true;
5765
- const db = DexieCloudDB(dexie);
5766
- if (purpose === 'pull') {
5767
- const syncState = db.cloud.persistedSyncState.value;
5768
- triggerSync(db, purpose);
5769
- if (wait) {
5770
- const newSyncState = await db.cloud.persistedSyncState
5771
- .pipe(filter((newSyncState) => newSyncState?.timestamp != null &&
5772
- (!syncState || newSyncState.timestamp > syncState.timestamp)), take(1))
5773
- .toPromise();
5774
- if (newSyncState?.error) {
5775
- throw new Error(`Sync error: ` + newSyncState.error);
5859
+ sync({ wait, purpose } = { wait: true, purpose: 'push' }) {
5860
+ return __awaiter$1(this, void 0, void 0, function* () {
5861
+ if (wait === undefined)
5862
+ wait = true;
5863
+ const db = DexieCloudDB(dexie);
5864
+ if (purpose === 'pull') {
5865
+ const syncState = db.cloud.persistedSyncState.value;
5866
+ triggerSync(db, purpose);
5867
+ if (wait) {
5868
+ const newSyncState = yield db.cloud.persistedSyncState
5869
+ .pipe(filter((newSyncState) => (newSyncState === null || newSyncState === void 0 ? void 0 : newSyncState.timestamp) != null &&
5870
+ (!syncState || newSyncState.timestamp > syncState.timestamp)), take(1))
5871
+ .toPromise();
5872
+ if (newSyncState === null || newSyncState === void 0 ? void 0 : newSyncState.error) {
5873
+ throw new Error(`Sync error: ` + newSyncState.error);
5874
+ }
5776
5875
  }
5777
5876
  }
5778
- }
5779
- else if (await isSyncNeeded(db)) {
5780
- const syncState = db.cloud.persistedSyncState.value;
5781
- triggerSync(db, purpose);
5782
- if (wait) {
5783
- console.debug('db.cloud.login() is waiting for sync completion...');
5784
- await from$1(liveQuery(async () => {
5785
- const syncNeeded = await isSyncNeeded(db);
5786
- const newSyncState = await db.getPersistedSyncState();
5787
- if (newSyncState?.timestamp !== syncState?.timestamp &&
5788
- newSyncState?.error)
5789
- throw new Error(`Sync error: ` + newSyncState.error);
5790
- return syncNeeded;
5791
- }))
5792
- .pipe(filter((isNeeded) => !isNeeded), take(1))
5793
- .toPromise();
5794
- console.debug('Done waiting for sync completion because we have nothing to push anymore');
5877
+ else if (yield isSyncNeeded(db)) {
5878
+ const syncState = db.cloud.persistedSyncState.value;
5879
+ triggerSync(db, purpose);
5880
+ if (wait) {
5881
+ console.debug('db.cloud.login() is waiting for sync completion...');
5882
+ yield from$1(liveQuery(() => __awaiter$1(this, void 0, void 0, function* () {
5883
+ const syncNeeded = yield isSyncNeeded(db);
5884
+ const newSyncState = yield db.getPersistedSyncState();
5885
+ if ((newSyncState === null || newSyncState === void 0 ? void 0 : newSyncState.timestamp) !== (syncState === null || syncState === void 0 ? void 0 : syncState.timestamp) &&
5886
+ (newSyncState === null || newSyncState === void 0 ? void 0 : newSyncState.error))
5887
+ throw new Error(`Sync error: ` + newSyncState.error);
5888
+ return syncNeeded;
5889
+ })))
5890
+ .pipe(filter((isNeeded) => !isNeeded), take(1))
5891
+ .toPromise();
5892
+ console.debug('Done waiting for sync completion because we have nothing to push anymore');
5893
+ }
5795
5894
  }
5796
- }
5895
+ });
5797
5896
  },
5798
5897
  permissions(obj, tableName) {
5799
5898
  return permissions(dexie._novip, obj, tableName);
@@ -5805,7 +5904,8 @@ function dexieCloud(dexie) {
5805
5904
  return generateKey(dexie.cloud.schema[this.name].idPrefix || '', shardKey);
5806
5905
  };
5807
5906
  dexie.Table.prototype.idPrefix = function () {
5808
- return this.db.cloud.schema?.[this.name]?.idPrefix || '';
5907
+ var _a, _b;
5908
+ return ((_b = (_a = this.db.cloud.schema) === null || _a === void 0 ? void 0 : _a[this.name]) === null || _b === void 0 ? void 0 : _b.idPrefix) || '';
5809
5909
  };
5810
5910
  dexie.use(createMutationTrackingMiddleware({
5811
5911
  currentUserObservable: dexie.cloud.currentUser,
@@ -5813,163 +5913,167 @@ function dexieCloud(dexie) {
5813
5913
  }));
5814
5914
  dexie.use(createImplicitPropSetterMiddleware(DexieCloudDB(dexie)));
5815
5915
  dexie.use(createIdGenerationMiddleware(DexieCloudDB(dexie)));
5816
- async function onDbReady(dexie) {
5817
- closed = false; // As Dexie calls us, we are not closed anymore. Maybe reopened? Remember db.ready event is registered with sticky flag!
5818
- const db = DexieCloudDB(dexie);
5819
- // Setup default GUI:
5820
- if (!IS_SERVICE_WORKER) {
5821
- if (!db.cloud.options?.customLoginGui) {
5822
- subscriptions.push(setupDefaultGUI(dexie));
5823
- }
5824
- subscriptions.push(computeSyncState(db).subscribe(dexie.cloud.syncState));
5825
- }
5826
- //verifyConfig(db.cloud.options); Not needed (yet at least!)
5827
- // Verify the user has allowed version increment.
5828
- if (!db.tables.every((table) => table.core)) {
5829
- throwVersionIncrementNeeded();
5830
- }
5831
- const swRegistrations = 'serviceWorker' in navigator
5832
- ? await navigator.serviceWorker.getRegistrations()
5833
- : [];
5834
- const initiallySynced = await db.transaction('rw', db.$syncState, async () => {
5835
- const { options, schema } = db.cloud;
5836
- const [persistedOptions, persistedSchema, persistedSyncState] = await Promise.all([
5837
- db.getOptions(),
5838
- db.getSchema(),
5839
- db.getPersistedSyncState(),
5840
- ]);
5841
- if (!configuredProgramatically) {
5842
- // Options not specified programatically (use case for SW!)
5843
- // Take persisted options:
5844
- db.cloud.options = persistedOptions || null;
5845
- }
5846
- else if (!persistedOptions ||
5847
- JSON.stringify(persistedOptions) !== JSON.stringify(options)) {
5848
- // Update persisted options:
5849
- if (!options)
5850
- throw new Error(`Internal error`); // options cannot be null if configuredProgramatically is set.
5851
- await db.$syncState.put(options, 'options');
5852
- }
5853
- if (db.cloud.options?.tryUseServiceWorker &&
5854
- 'serviceWorker' in navigator &&
5855
- swRegistrations.length > 0 &&
5856
- !DISABLE_SERVICEWORKER_STRATEGY) {
5857
- // * Configured for using service worker if available.
5858
- // * Browser supports service workers
5859
- // * There are at least one service worker registration
5860
- console.debug('Dexie Cloud Addon: Using service worker');
5861
- db.cloud.usingServiceWorker = true;
5916
+ function onDbReady(dexie) {
5917
+ var _a, _b, _c, _d, _e, _f;
5918
+ return __awaiter$1(this, void 0, void 0, function* () {
5919
+ closed = false; // As Dexie calls us, we are not closed anymore. Maybe reopened? Remember db.ready event is registered with sticky flag!
5920
+ const db = DexieCloudDB(dexie);
5921
+ // Setup default GUI:
5922
+ if (!IS_SERVICE_WORKER) {
5923
+ if (!((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.customLoginGui)) {
5924
+ subscriptions.push(setupDefaultGUI(dexie));
5925
+ }
5926
+ subscriptions.push(computeSyncState(db).subscribe(dexie.cloud.syncState));
5862
5927
  }
5863
- else {
5864
- // Not configured for using service worker or no service worker
5865
- // registration exists. Don't rely on service worker to do any job.
5866
- // Use LocalSyncWorker instead.
5867
- if (db.cloud.options?.tryUseServiceWorker && !IS_SERVICE_WORKER) {
5868
- console.debug('dexie-cloud-addon: Not using service worker.', swRegistrations.length === 0
5869
- ? 'No SW registrations found.'
5870
- : 'serviceWorker' in navigator && DISABLE_SERVICEWORKER_STRATEGY
5871
- ? 'Avoiding SW background sync and SW periodic bg sync for this browser due to browser bugs.'
5872
- : 'navigator.serviceWorker not present');
5928
+ //verifyConfig(db.cloud.options); Not needed (yet at least!)
5929
+ // Verify the user has allowed version increment.
5930
+ if (!db.tables.every((table) => table.core)) {
5931
+ throwVersionIncrementNeeded();
5932
+ }
5933
+ const swRegistrations = 'serviceWorker' in navigator
5934
+ ? yield navigator.serviceWorker.getRegistrations()
5935
+ : [];
5936
+ const initiallySynced = yield db.transaction('rw', db.$syncState, () => __awaiter$1(this, void 0, void 0, function* () {
5937
+ var _g, _h;
5938
+ const { options, schema } = db.cloud;
5939
+ const [persistedOptions, persistedSchema, persistedSyncState] = yield Promise.all([
5940
+ db.getOptions(),
5941
+ db.getSchema(),
5942
+ db.getPersistedSyncState(),
5943
+ ]);
5944
+ if (!configuredProgramatically) {
5945
+ // Options not specified programatically (use case for SW!)
5946
+ // Take persisted options:
5947
+ db.cloud.options = persistedOptions || null;
5948
+ }
5949
+ else if (!persistedOptions ||
5950
+ JSON.stringify(persistedOptions) !== JSON.stringify(options)) {
5951
+ // Update persisted options:
5952
+ if (!options)
5953
+ throw new Error(`Internal error`); // options cannot be null if configuredProgramatically is set.
5954
+ yield db.$syncState.put(options, 'options');
5955
+ }
5956
+ if (((_g = db.cloud.options) === null || _g === void 0 ? void 0 : _g.tryUseServiceWorker) &&
5957
+ 'serviceWorker' in navigator &&
5958
+ swRegistrations.length > 0 &&
5959
+ !DISABLE_SERVICEWORKER_STRATEGY) {
5960
+ // * Configured for using service worker if available.
5961
+ // * Browser supports service workers
5962
+ // * There are at least one service worker registration
5963
+ console.debug('Dexie Cloud Addon: Using service worker');
5964
+ db.cloud.usingServiceWorker = true;
5873
5965
  }
5874
- db.cloud.usingServiceWorker = false;
5875
- }
5876
- updateSchemaFromOptions(schema, db.cloud.options);
5877
- updateSchemaFromOptions(persistedSchema, db.cloud.options);
5878
- if (!schema) {
5879
- // Database opened dynamically (use case for SW!)
5880
- // Take persisted schema:
5881
- db.cloud.schema = persistedSchema || null;
5882
- }
5883
- else if (!persistedSchema ||
5884
- JSON.stringify(persistedSchema) !== JSON.stringify(schema)) {
5885
- // Update persisted schema (but don't overwrite table prefixes)
5886
- const newPersistedSchema = persistedSchema || {};
5887
- for (const [table, tblSchema] of Object.entries(schema)) {
5888
- const newTblSchema = newPersistedSchema[table];
5889
- if (!newTblSchema) {
5890
- newPersistedSchema[table] = { ...tblSchema };
5966
+ else {
5967
+ // Not configured for using service worker or no service worker
5968
+ // registration exists. Don't rely on service worker to do any job.
5969
+ // Use LocalSyncWorker instead.
5970
+ if (((_h = db.cloud.options) === null || _h === void 0 ? void 0 : _h.tryUseServiceWorker) && !IS_SERVICE_WORKER) {
5971
+ console.debug('dexie-cloud-addon: Not using service worker.', swRegistrations.length === 0
5972
+ ? 'No SW registrations found.'
5973
+ : 'serviceWorker' in navigator && DISABLE_SERVICEWORKER_STRATEGY
5974
+ ? 'Avoiding SW background sync and SW periodic bg sync for this browser due to browser bugs.'
5975
+ : 'navigator.serviceWorker not present');
5891
5976
  }
5892
- else {
5893
- newTblSchema.markedForSync = tblSchema.markedForSync;
5894
- tblSchema.deleted = newTblSchema.deleted;
5895
- newTblSchema.generatedGlobalId = tblSchema.generatedGlobalId;
5977
+ db.cloud.usingServiceWorker = false;
5978
+ }
5979
+ updateSchemaFromOptions(schema, db.cloud.options);
5980
+ updateSchemaFromOptions(persistedSchema, db.cloud.options);
5981
+ if (!schema) {
5982
+ // Database opened dynamically (use case for SW!)
5983
+ // Take persisted schema:
5984
+ db.cloud.schema = persistedSchema || null;
5985
+ }
5986
+ else if (!persistedSchema ||
5987
+ JSON.stringify(persistedSchema) !== JSON.stringify(schema)) {
5988
+ // Update persisted schema (but don't overwrite table prefixes)
5989
+ const newPersistedSchema = persistedSchema || {};
5990
+ for (const [table, tblSchema] of Object.entries(schema)) {
5991
+ const newTblSchema = newPersistedSchema[table];
5992
+ if (!newTblSchema) {
5993
+ newPersistedSchema[table] = Object.assign({}, tblSchema);
5994
+ }
5995
+ else {
5996
+ newTblSchema.markedForSync = tblSchema.markedForSync;
5997
+ tblSchema.deleted = newTblSchema.deleted;
5998
+ newTblSchema.generatedGlobalId = tblSchema.generatedGlobalId;
5999
+ }
5896
6000
  }
6001
+ yield db.$syncState.put(newPersistedSchema, 'schema');
6002
+ // Make sure persisted table prefixes are being used instead of computed ones:
6003
+ // Let's assign all props as the newPersistedSchems should be what we should be working with.
6004
+ Object.assign(schema, newPersistedSchema);
5897
6005
  }
5898
- await db.$syncState.put(newPersistedSchema, 'schema');
5899
- // Make sure persisted table prefixes are being used instead of computed ones:
5900
- // Let's assign all props as the newPersistedSchems should be what we should be working with.
5901
- Object.assign(schema, newPersistedSchema);
6006
+ return persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.initiallySynced;
6007
+ }));
6008
+ if (initiallySynced) {
6009
+ db.setInitiallySynced(true);
5902
6010
  }
5903
- return persistedSyncState?.initiallySynced;
5904
- });
5905
- if (initiallySynced) {
5906
- db.setInitiallySynced(true);
5907
- }
5908
- verifySchema(db);
5909
- if (db.cloud.options?.databaseUrl && !initiallySynced) {
5910
- await performInitialSync(db, db.cloud.options, db.cloud.schema);
5911
- db.setInitiallySynced(true);
5912
- }
5913
- // Manage CurrentUser observable:
5914
- throwIfClosed();
5915
- if (!IS_SERVICE_WORKER) {
5916
- subscriptions.push(liveQuery(() => db.getCurrentUser()).subscribe(currentUserEmitter));
5917
- // Manage PersistendSyncState observable:
5918
- subscriptions.push(liveQuery(() => db.getPersistedSyncState()).subscribe(db.cloud.persistedSyncState));
5919
- // Wait till currentUser and persistedSyncState gets populated
5920
- // with things from the database and not just the default values.
5921
- // This is so that when db.open() completes, user should be safe
5922
- // to subscribe to these observables and get actual data.
5923
- await combineLatest([
5924
- currentUserEmitter.pipe(skip(1), take(1)),
5925
- db.cloud.persistedSyncState.pipe(skip(1), take(1)),
5926
- ]).toPromise();
5927
- }
5928
- // HERE: If requireAuth, do athentication now.
5929
- if (db.cloud.options?.requireAuth) {
5930
- await login(db);
5931
- }
5932
- if (localSyncWorker)
5933
- localSyncWorker.stop();
5934
- localSyncWorker = null;
5935
- throwIfClosed();
5936
- if (db.cloud.usingServiceWorker && db.cloud.options?.databaseUrl) {
5937
- registerSyncEvent(db, 'push').catch(() => { });
5938
- registerPeriodicSyncEvent(db).catch(() => { });
5939
- }
5940
- else if (db.cloud.options?.databaseUrl &&
5941
- db.cloud.schema &&
5942
- !IS_SERVICE_WORKER) {
5943
- // There's no SW. Start SyncWorker instead.
5944
- localSyncWorker = LocalSyncWorker(db, db.cloud.options, db.cloud.schema);
5945
- localSyncWorker.start();
5946
- triggerSync(db, 'push');
5947
- }
5948
- // Listen to online event and do sync.
5949
- throwIfClosed();
5950
- if (!IS_SERVICE_WORKER) {
5951
- subscriptions.push(fromEvent(self, 'online').subscribe(() => {
5952
- console.debug('online!');
5953
- db.syncStateChangedEvent.next({
5954
- phase: 'not-in-sync',
5955
- });
6011
+ verifySchema(db);
6012
+ if (((_b = db.cloud.options) === null || _b === void 0 ? void 0 : _b.databaseUrl) && !initiallySynced) {
6013
+ yield performInitialSync(db, db.cloud.options, db.cloud.schema);
6014
+ db.setInitiallySynced(true);
6015
+ }
6016
+ // Manage CurrentUser observable:
6017
+ throwIfClosed();
6018
+ if (!IS_SERVICE_WORKER) {
6019
+ subscriptions.push(liveQuery(() => db.getCurrentUser()).subscribe(currentUserEmitter));
6020
+ // Manage PersistendSyncState observable:
6021
+ subscriptions.push(liveQuery(() => db.getPersistedSyncState()).subscribe(db.cloud.persistedSyncState));
6022
+ // Wait till currentUser and persistedSyncState gets populated
6023
+ // with things from the database and not just the default values.
6024
+ // This is so that when db.open() completes, user should be safe
6025
+ // to subscribe to these observables and get actual data.
6026
+ yield combineLatest([
6027
+ currentUserEmitter.pipe(skip(1), take(1)),
6028
+ db.cloud.persistedSyncState.pipe(skip(1), take(1)),
6029
+ ]).toPromise();
6030
+ }
6031
+ // HERE: If requireAuth, do athentication now.
6032
+ if ((_c = db.cloud.options) === null || _c === void 0 ? void 0 : _c.requireAuth) {
6033
+ yield login(db);
6034
+ }
6035
+ if (localSyncWorker)
6036
+ localSyncWorker.stop();
6037
+ localSyncWorker = null;
6038
+ throwIfClosed();
6039
+ if (db.cloud.usingServiceWorker && ((_d = db.cloud.options) === null || _d === void 0 ? void 0 : _d.databaseUrl)) {
6040
+ registerSyncEvent(db, 'push').catch(() => { });
6041
+ registerPeriodicSyncEvent(db).catch(() => { });
6042
+ }
6043
+ else if (((_e = db.cloud.options) === null || _e === void 0 ? void 0 : _e.databaseUrl) &&
6044
+ db.cloud.schema &&
6045
+ !IS_SERVICE_WORKER) {
6046
+ // There's no SW. Start SyncWorker instead.
6047
+ localSyncWorker = LocalSyncWorker(db, db.cloud.options, db.cloud.schema);
6048
+ localSyncWorker.start();
5956
6049
  triggerSync(db, 'push');
5957
- }), fromEvent(self, 'offline').subscribe(() => {
5958
- console.debug('offline!');
5959
- db.syncStateChangedEvent.next({
5960
- phase: 'offline',
5961
- });
5962
- }));
5963
- }
5964
- // Connect WebSocket only if we're a browser window
5965
- if (typeof window !== 'undefined' &&
5966
- !IS_SERVICE_WORKER &&
5967
- db.cloud.options?.databaseUrl) {
5968
- subscriptions.push(connectWebSocket(db));
5969
- }
6050
+ }
6051
+ // Listen to online event and do sync.
6052
+ throwIfClosed();
6053
+ if (!IS_SERVICE_WORKER) {
6054
+ subscriptions.push(fromEvent(self, 'online').subscribe(() => {
6055
+ console.debug('online!');
6056
+ db.syncStateChangedEvent.next({
6057
+ phase: 'not-in-sync',
6058
+ });
6059
+ triggerSync(db, 'push');
6060
+ }), fromEvent(self, 'offline').subscribe(() => {
6061
+ console.debug('offline!');
6062
+ db.syncStateChangedEvent.next({
6063
+ phase: 'offline',
6064
+ });
6065
+ }));
6066
+ }
6067
+ // Connect WebSocket only if we're a browser window
6068
+ if (typeof window !== 'undefined' &&
6069
+ !IS_SERVICE_WORKER &&
6070
+ ((_f = db.cloud.options) === null || _f === void 0 ? void 0 : _f.databaseUrl)) {
6071
+ subscriptions.push(connectWebSocket(db));
6072
+ }
6073
+ });
5970
6074
  }
5971
6075
  }
5972
- dexieCloud.version = '4.0.0-beta.15';
6076
+ dexieCloud.version = '4.0.0-beta.18';
5973
6077
  Dexie.Cloud = dexieCloud;
5974
6078
 
5975
6079
  // In case the SW lives for a while, let it reuse already opened connections:
@@ -5998,55 +6102,58 @@ function syncDB(dbName, purpose) {
5998
6102
  syncDBSemaphore.set(dbName + '/' + purpose, promise);
5999
6103
  }
6000
6104
  return promise;
6001
- async function _syncDB(dbName, purpose) {
6002
- let db = managedDBs.get(dbName);
6003
- if (!db) {
6004
- console.debug('Dexie Cloud SW: Creating new Dexie instance for', dbName);
6005
- const dexie = new Dexie(dbName, { addons: [dexieCloud] });
6006
- db = DexieCloudDB(dexie);
6007
- dexie.on('versionchange', stopManagingDB);
6008
- await db.dx.open(); // Makes sure db.cloud.options and db.cloud.schema are read from db,
6009
- if (!managedDBs.get(dbName)) {
6010
- // Avoid race conditions.
6011
- managedDBs.set(dbName, db);
6012
- }
6013
- }
6014
- if (!db.cloud.options?.databaseUrl) {
6015
- console.error(`Dexie Cloud: No databaseUrl configured`);
6016
- return; // Nothing to sync.
6017
- }
6018
- if (!db.cloud.schema) {
6019
- console.error(`Dexie Cloud: No schema persisted`);
6020
- return; // Nothing to sync.
6021
- }
6022
- function stopManagingDB() {
6023
- db.dx.on.versionchange.unsubscribe(stopManagingDB);
6024
- if (managedDBs.get(db.name) === db) {
6025
- // Avoid race conditions.
6026
- managedDBs.delete(db.name);
6027
- }
6028
- console.debug(`Dexie Cloud SW: Closing Dexie instance for ${dbName}`);
6029
- db.dx.close();
6030
- return false;
6031
- }
6032
- try {
6033
- console.debug('Dexie Cloud SW: Syncing');
6034
- await syncIfPossible(db, db.cloud.options, db.cloud.schema, {
6035
- retryImmediatelyOnFetchError: true,
6036
- purpose,
6037
- });
6038
- console.debug('Dexie Cloud SW: Done Syncing');
6039
- }
6040
- catch (e) {
6041
- console.error(`Dexie Cloud SW Error`, e);
6042
- // Error occured. Stop managing this DB until we wake up again by a sync event,
6043
- // which will open a new Dexie and start trying to sync it.
6044
- stopManagingDB();
6045
- if (e.name !== Dexie.errnames.NoSuchDatabase) {
6046
- // Unless the error was that DB doesn't exist, rethrow to trigger sync retry.
6047
- throw e; // Throw e to make syncEvent.waitUntil() receive a rejected promis, so it will retry.
6105
+ function _syncDB(dbName, purpose) {
6106
+ var _a;
6107
+ return __awaiter$1(this, void 0, void 0, function* () {
6108
+ let db = managedDBs.get(dbName);
6109
+ if (!db) {
6110
+ console.debug('Dexie Cloud SW: Creating new Dexie instance for', dbName);
6111
+ const dexie = new Dexie(dbName, { addons: [dexieCloud] });
6112
+ db = DexieCloudDB(dexie);
6113
+ dexie.on('versionchange', stopManagingDB);
6114
+ yield db.dx.open(); // Makes sure db.cloud.options and db.cloud.schema are read from db,
6115
+ if (!managedDBs.get(dbName)) {
6116
+ // Avoid race conditions.
6117
+ managedDBs.set(dbName, db);
6118
+ }
6048
6119
  }
6049
- }
6120
+ if (!((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl)) {
6121
+ console.error(`Dexie Cloud: No databaseUrl configured`);
6122
+ return; // Nothing to sync.
6123
+ }
6124
+ if (!db.cloud.schema) {
6125
+ console.error(`Dexie Cloud: No schema persisted`);
6126
+ return; // Nothing to sync.
6127
+ }
6128
+ function stopManagingDB() {
6129
+ db.dx.on.versionchange.unsubscribe(stopManagingDB);
6130
+ if (managedDBs.get(db.name) === db) {
6131
+ // Avoid race conditions.
6132
+ managedDBs.delete(db.name);
6133
+ }
6134
+ console.debug(`Dexie Cloud SW: Closing Dexie instance for ${dbName}`);
6135
+ db.dx.close();
6136
+ return false;
6137
+ }
6138
+ try {
6139
+ console.debug('Dexie Cloud SW: Syncing');
6140
+ yield syncIfPossible(db, db.cloud.options, db.cloud.schema, {
6141
+ retryImmediatelyOnFetchError: true,
6142
+ purpose,
6143
+ });
6144
+ console.debug('Dexie Cloud SW: Done Syncing');
6145
+ }
6146
+ catch (e) {
6147
+ console.error(`Dexie Cloud SW Error`, e);
6148
+ // Error occured. Stop managing this DB until we wake up again by a sync event,
6149
+ // which will open a new Dexie and start trying to sync it.
6150
+ stopManagingDB();
6151
+ if (e.name !== Dexie.errnames.NoSuchDatabase) {
6152
+ // Unless the error was that DB doesn't exist, rethrow to trigger sync retry.
6153
+ throw e; // Throw e to make syncEvent.waitUntil() receive a rejected promis, so it will retry.
6154
+ }
6155
+ }
6156
+ });
6050
6157
  }
6051
6158
  }
6052
6159
  // Avoid taking care of events if browser bugs out by using dexie cloud from a service worker.
@@ -6072,12 +6179,12 @@ if (!DISABLE_SERVICEWORKER_STRATEGY) {
6072
6179
  // Mimic background sync behavior - retry in X minutes on failure.
6073
6180
  // But lesser timeout and more number of times.
6074
6181
  const syncAndRetry = (num = 1) => {
6075
- return syncDB(dbName, event.data.purpose || "pull").catch(async (e) => {
6182
+ return syncDB(dbName, event.data.purpose || "pull").catch((e) => __awaiter$1(void 0, void 0, void 0, function* () {
6076
6183
  if (num === 3)
6077
6184
  throw e;
6078
- await sleep(60000); // 1 minute
6185
+ yield sleep(60000); // 1 minute
6079
6186
  syncAndRetry(num + 1);
6080
- });
6187
+ }));
6081
6188
  };
6082
6189
  if ('waitUntil' in event) {
6083
6190
  event.waitUntil(syncAndRetry().catch(error => console.error(error)));