dexie-cloud-addon 4.0.0-beta.15 → 4.0.0-beta.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,7 +8,7 @@
8
8
  *
9
9
  * ==========================================================================
10
10
  *
11
- * Version 4.0.0-beta.15, Mon Dec 20 2021
11
+ * Version 4.0.0-beta.18, Sun Apr 10 2022
12
12
  *
13
13
  * https://dexie.org
14
14
  *
@@ -17,7 +17,32 @@
17
17
  */
18
18
 
19
19
  import Dexie, { cmp, liveQuery } from 'dexie';
20
- import { Observable as Observable$1, BehaviorSubject, Subject, from as from$1, fromEvent, of, merge, Subscription as Subscription$1, throwError, combineLatest, map as map$1, share, timer as timer$1, switchMap as switchMap$1 } from 'rxjs';
20
+ import { Observable as Observable$1, BehaviorSubject, Subject, fromEvent, of, merge, Subscription as Subscription$1, from as from$1, throwError, combineLatest, map as map$1, share, timer as timer$1, switchMap as switchMap$1 } from 'rxjs';
21
+
22
+ /*! *****************************************************************************
23
+ Copyright (c) Microsoft Corporation.
24
+
25
+ Permission to use, copy, modify, and/or distribute this software for any
26
+ purpose with or without fee is hereby granted.
27
+
28
+ THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
29
+ REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
30
+ AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
31
+ INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
32
+ LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
33
+ OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
34
+ PERFORMANCE OF THIS SOFTWARE.
35
+ ***************************************************************************** */
36
+
37
+ function __awaiter$1(thisArg, _arguments, P, generator) {
38
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
39
+ return new (P || (P = Promise))(function (resolve, reject) {
40
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
41
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
42
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
43
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
44
+ });
45
+ }
21
46
 
22
47
  //@ts-check
23
48
  const randomFillSync = crypto.getRandomValues;
@@ -1902,55 +1927,60 @@ function timeoutErrorFactory(info) {
1902
1927
 
1903
1928
  //const hasSW = 'serviceWorker' in navigator;
1904
1929
  let hasComplainedAboutSyncEvent = false;
1905
- async function registerSyncEvent(db, purpose) {
1906
- try {
1907
- // Send sync event to SW:
1908
- const sw = await navigator.serviceWorker.ready;
1909
- if (purpose === "push" && sw.sync) {
1910
- await sw.sync.register(`dexie-cloud:${db.name}`);
1911
- }
1912
- if (sw.active) {
1913
- // Use postMessage for pull syncs and for browsers not supporting sync event (Firefox, Safari).
1914
- // Also chromium based browsers with sw.sync as a fallback for sleepy sync events not taking action for a while.
1915
- sw.active.postMessage({
1916
- type: 'dexie-cloud-sync',
1917
- dbName: db.name,
1918
- purpose
1919
- });
1920
- }
1921
- else {
1922
- throw new Error(`Failed to trigger sync - there's no active service worker`);
1930
+ function registerSyncEvent(db, purpose) {
1931
+ return __awaiter$1(this, void 0, void 0, function* () {
1932
+ try {
1933
+ // Send sync event to SW:
1934
+ const sw = yield navigator.serviceWorker.ready;
1935
+ if (purpose === "push" && sw.sync) {
1936
+ yield sw.sync.register(`dexie-cloud:${db.name}`);
1937
+ }
1938
+ if (sw.active) {
1939
+ // Use postMessage for pull syncs and for browsers not supporting sync event (Firefox, Safari).
1940
+ // Also chromium based browsers with sw.sync as a fallback for sleepy sync events not taking action for a while.
1941
+ sw.active.postMessage({
1942
+ type: 'dexie-cloud-sync',
1943
+ dbName: db.name,
1944
+ purpose
1945
+ });
1946
+ }
1947
+ else {
1948
+ throw new Error(`Failed to trigger sync - there's no active service worker`);
1949
+ }
1950
+ return;
1923
1951
  }
1924
- return;
1925
- }
1926
- catch (e) {
1927
- if (!hasComplainedAboutSyncEvent) {
1928
- console.debug(`Dexie Cloud: Could not register sync event`, e);
1929
- hasComplainedAboutSyncEvent = true;
1952
+ catch (e) {
1953
+ if (!hasComplainedAboutSyncEvent) {
1954
+ console.debug(`Dexie Cloud: Could not register sync event`, e);
1955
+ hasComplainedAboutSyncEvent = true;
1956
+ }
1930
1957
  }
1931
- }
1958
+ });
1932
1959
  }
1933
- async function registerPeriodicSyncEvent(db) {
1934
- try {
1935
- // Register periodicSync event to SW:
1936
- // @ts-ignore
1937
- const { periodicSync } = await navigator.serviceWorker.ready;
1938
- if (periodicSync) {
1939
- try {
1940
- await periodicSync.register(`dexie-cloud:${db.name}`, db.cloud.options?.periodicSync);
1941
- console.debug(`Dexie Cloud: Successfully registered periodicsync event for ${db.name}`);
1960
+ function registerPeriodicSyncEvent(db) {
1961
+ var _a;
1962
+ return __awaiter$1(this, void 0, void 0, function* () {
1963
+ try {
1964
+ // Register periodicSync event to SW:
1965
+ // @ts-ignore
1966
+ const { periodicSync } = yield navigator.serviceWorker.ready;
1967
+ if (periodicSync) {
1968
+ try {
1969
+ yield periodicSync.register(`dexie-cloud:${db.name}`, (_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.periodicSync);
1970
+ console.debug(`Dexie Cloud: Successfully registered periodicsync event for ${db.name}`);
1971
+ }
1972
+ catch (e) {
1973
+ console.debug(`Dexie Cloud: Failed to register periodic sync. Your PWA must be installed to allow background sync.`, e);
1974
+ }
1942
1975
  }
1943
- catch (e) {
1944
- console.debug(`Dexie Cloud: Failed to register periodic sync. Your PWA must be installed to allow background sync.`, e);
1976
+ else {
1977
+ console.debug(`Dexie Cloud: periodicSync not supported.`);
1945
1978
  }
1946
1979
  }
1947
- else {
1948
- console.debug(`Dexie Cloud: periodicSync not supported.`);
1980
+ catch (e) {
1981
+ console.debug(`Dexie Cloud: Could not register periodicSync for ${db.name}`, e);
1949
1982
  }
1950
- }
1951
- catch (e) {
1952
- console.debug(`Dexie Cloud: Could not register periodicSync for ${db.name}`, e);
1953
- }
1983
+ });
1954
1984
  }
1955
1985
 
1956
1986
  function triggerSync(db, purpose) {
@@ -1982,19 +2012,15 @@ const b64encode = typeof Buffer !== "undefined"
1982
2012
  function interactWithUser(userInteraction, req) {
1983
2013
  let done = false;
1984
2014
  return new Promise((resolve, reject) => {
1985
- const interactionProps = {
1986
- ...req,
1987
- onSubmit: (res) => {
2015
+ const interactionProps = Object.assign(Object.assign({}, req), { onSubmit: (res) => {
1988
2016
  userInteraction.next(undefined);
1989
2017
  done = true;
1990
2018
  resolve(res);
1991
- },
1992
- onCancel: () => {
2019
+ }, onCancel: () => {
1993
2020
  userInteraction.next(undefined);
1994
2021
  done = true;
1995
2022
  reject(new Dexie.AbortError("User cancelled"));
1996
- },
1997
- };
2023
+ } });
1998
2024
  userInteraction.next(interactionProps);
1999
2025
  // Start subscribing for external updates to db.cloud.userInteraction, and if so, cancel this request.
2000
2026
  /*const subscription = userInteraction.subscribe((currentInteractionProps) => {
@@ -2015,180 +2041,193 @@ function alertUser(userInteraction, title, ...alerts) {
2015
2041
  fields: {}
2016
2042
  });
2017
2043
  }
2018
- async function promptForEmail(userInteraction, title, emailHint) {
2019
- let email = emailHint || '';
2020
- while (!email || !/^[\w-\.]+@([\w-]+\.)+[\w-]{2,10}$/.test(email)) {
2021
- email = (await interactWithUser(userInteraction, {
2022
- type: 'email',
2023
- title,
2024
- alerts: email
2025
- ? [
2026
- {
2027
- type: 'error',
2028
- messageCode: 'INVALID_EMAIL',
2029
- message: 'Please enter a valid email address',
2030
- messageParams: {},
2044
+ function promptForEmail(userInteraction, title, emailHint) {
2045
+ return __awaiter$1(this, void 0, void 0, function* () {
2046
+ let email = emailHint || '';
2047
+ while (!email || !/^[\w-\.]+@([\w-]+\.)+[\w-]{2,10}$/.test(email)) {
2048
+ email = (yield interactWithUser(userInteraction, {
2049
+ type: 'email',
2050
+ title,
2051
+ alerts: email
2052
+ ? [
2053
+ {
2054
+ type: 'error',
2055
+ messageCode: 'INVALID_EMAIL',
2056
+ message: 'Please enter a valid email address',
2057
+ messageParams: {},
2058
+ },
2059
+ ]
2060
+ : [],
2061
+ fields: {
2062
+ email: {
2063
+ type: 'email',
2064
+ placeholder: 'you@somedomain.com',
2031
2065
  },
2032
- ]
2033
- : [],
2034
- fields: {
2035
- email: {
2036
- type: 'email',
2037
- placeholder: 'you@somedomain.com',
2038
2066
  },
2039
- },
2040
- })).email;
2041
- }
2042
- return email;
2067
+ })).email;
2068
+ }
2069
+ return email;
2070
+ });
2043
2071
  }
2044
- async function promptForOTP(userInteraction, email, alert) {
2045
- const alerts = [
2046
- {
2047
- type: 'info',
2048
- messageCode: 'OTP_SENT',
2049
- message: `A One-Time password has been sent to {email}`,
2050
- messageParams: { email },
2051
- },
2052
- ];
2053
- if (alert) {
2054
- alerts.push(alert);
2055
- }
2056
- const { otp } = await interactWithUser(userInteraction, {
2057
- type: 'otp',
2058
- title: 'Enter OTP',
2059
- alerts,
2060
- fields: {
2061
- otp: {
2062
- type: 'otp',
2063
- label: 'OTP',
2064
- placeholder: 'Paste OTP here',
2072
+ function promptForOTP(userInteraction, email, alert) {
2073
+ return __awaiter$1(this, void 0, void 0, function* () {
2074
+ const alerts = [
2075
+ {
2076
+ type: 'info',
2077
+ messageCode: 'OTP_SENT',
2078
+ message: `A One-Time password has been sent to {email}`,
2079
+ messageParams: { email },
2065
2080
  },
2066
- },
2081
+ ];
2082
+ if (alert) {
2083
+ alerts.push(alert);
2084
+ }
2085
+ const { otp } = yield interactWithUser(userInteraction, {
2086
+ type: 'otp',
2087
+ title: 'Enter OTP',
2088
+ alerts,
2089
+ fields: {
2090
+ otp: {
2091
+ type: 'otp',
2092
+ label: 'OTP',
2093
+ placeholder: 'Paste OTP here',
2094
+ },
2095
+ },
2096
+ });
2097
+ return otp;
2067
2098
  });
2068
- return otp;
2069
2099
  }
2070
2100
 
2071
- async function loadAccessToken(db) {
2072
- const currentUser = await db.getCurrentUser();
2073
- const { accessToken, accessTokenExpiration, refreshToken, refreshTokenExpiration, claims, } = currentUser;
2074
- if (!accessToken)
2075
- return;
2076
- const expTime = accessTokenExpiration?.getTime() ?? Infinity;
2077
- if (expTime > Date.now()) {
2078
- return accessToken;
2079
- }
2080
- if (!refreshToken) {
2081
- throw new Error(`Refresh token missing`);
2082
- }
2083
- const refreshExpTime = refreshTokenExpiration?.getTime() ?? Infinity;
2084
- if (refreshExpTime <= Date.now()) {
2085
- throw new Error(`Refresh token has expired`);
2086
- }
2087
- const refreshedLogin = await refreshAccessToken(db.cloud.options.databaseUrl, currentUser);
2088
- await db.table('$logins').update(claims.sub, {
2089
- accessToken: refreshedLogin.accessToken,
2090
- accessTokenExpiration: refreshedLogin.accessTokenExpiration,
2101
+ function loadAccessToken(db) {
2102
+ var _a, _b;
2103
+ return __awaiter$1(this, void 0, void 0, function* () {
2104
+ const currentUser = yield db.getCurrentUser();
2105
+ const { accessToken, accessTokenExpiration, refreshToken, refreshTokenExpiration, claims, } = currentUser;
2106
+ if (!accessToken)
2107
+ return;
2108
+ const expTime = (_a = accessTokenExpiration === null || accessTokenExpiration === void 0 ? void 0 : accessTokenExpiration.getTime()) !== null && _a !== void 0 ? _a : Infinity;
2109
+ if (expTime > Date.now()) {
2110
+ return accessToken;
2111
+ }
2112
+ if (!refreshToken) {
2113
+ throw new Error(`Refresh token missing`);
2114
+ }
2115
+ const refreshExpTime = (_b = refreshTokenExpiration === null || refreshTokenExpiration === void 0 ? void 0 : refreshTokenExpiration.getTime()) !== null && _b !== void 0 ? _b : Infinity;
2116
+ if (refreshExpTime <= Date.now()) {
2117
+ throw new Error(`Refresh token has expired`);
2118
+ }
2119
+ const refreshedLogin = yield refreshAccessToken(db.cloud.options.databaseUrl, currentUser);
2120
+ yield db.table('$logins').update(claims.sub, {
2121
+ accessToken: refreshedLogin.accessToken,
2122
+ accessTokenExpiration: refreshedLogin.accessTokenExpiration,
2123
+ });
2124
+ return refreshedLogin.accessToken;
2091
2125
  });
2092
- return refreshedLogin.accessToken;
2093
- }
2094
- async function authenticate(url, context, fetchToken, userInteraction, hints) {
2095
- if (context.accessToken &&
2096
- context.accessTokenExpiration.getTime() > Date.now()) {
2097
- return context;
2098
- }
2099
- else if (context.refreshToken &&
2100
- (!context.refreshTokenExpiration ||
2101
- context.refreshTokenExpiration.getTime() > Date.now())) {
2102
- return await refreshAccessToken(url, context);
2103
- }
2104
- else {
2105
- return await userAuthenticate(context, fetchToken, userInteraction, hints);
2106
- }
2107
2126
  }
2108
- async function refreshAccessToken(url, login) {
2109
- if (!login.refreshToken)
2110
- throw new Error(`Cannot refresh token - refresh token is missing.`);
2111
- if (!login.nonExportablePrivateKey)
2112
- throw new Error(`login.nonExportablePrivateKey is missing - cannot sign refresh token without a private key.`);
2113
- const time_stamp = Date.now();
2114
- const signing_algorithm = 'RSASSA-PKCS1-v1_5';
2115
- const textEncoder = new TextEncoder();
2116
- const data = textEncoder.encode(login.refreshToken + time_stamp);
2117
- const binarySignature = await crypto.subtle.sign(signing_algorithm, login.nonExportablePrivateKey, data);
2118
- const signature = b64encode(binarySignature);
2119
- const tokenRequest = {
2120
- grant_type: 'refresh_token',
2121
- refresh_token: login.refreshToken,
2122
- scopes: ['ACCESS_DB'],
2123
- signature,
2124
- signing_algorithm,
2125
- time_stamp,
2126
- };
2127
- const res = await fetch(`${url}/token`, {
2128
- body: JSON.stringify(tokenRequest),
2129
- method: 'post',
2130
- headers: { 'Content-Type': 'application/json' },
2131
- mode: 'cors',
2127
+ function authenticate(url, context, fetchToken, userInteraction, hints) {
2128
+ return __awaiter$1(this, void 0, void 0, function* () {
2129
+ if (context.accessToken &&
2130
+ context.accessTokenExpiration.getTime() > Date.now()) {
2131
+ return context;
2132
+ }
2133
+ else if (context.refreshToken &&
2134
+ (!context.refreshTokenExpiration ||
2135
+ context.refreshTokenExpiration.getTime() > Date.now())) {
2136
+ return yield refreshAccessToken(url, context);
2137
+ }
2138
+ else {
2139
+ return yield userAuthenticate(context, fetchToken, userInteraction, hints);
2140
+ }
2132
2141
  });
2133
- if (res.status !== 200)
2134
- throw new Error(`RefreshToken: Status ${res.status} from ${url}/token`);
2135
- const response = await res.json();
2136
- login.accessToken = response.accessToken;
2137
- login.accessTokenExpiration = response.accessTokenExpiration
2138
- ? new Date(response.accessTokenExpiration)
2139
- : undefined;
2140
- return login;
2141
- }
2142
- async function userAuthenticate(context, fetchToken, userInteraction, hints) {
2143
- const { privateKey, publicKey } = await crypto.subtle.generateKey({
2144
- name: 'RSASSA-PKCS1-v1_5',
2145
- modulusLength: 2048,
2146
- publicExponent: new Uint8Array([0x01, 0x00, 0x01]),
2147
- hash: { name: 'SHA-256' },
2148
- }, false, // Non-exportable...
2149
- ['sign', 'verify']);
2150
- if (!privateKey || !publicKey)
2151
- throw new Error(`Could not generate RSA keypair`); // Typings suggest these can be undefined...
2152
- context.nonExportablePrivateKey = privateKey; //...but storable!
2153
- const publicKeySPKI = await crypto.subtle.exportKey('spki', publicKey);
2154
- const publicKeyPEM = spkiToPEM(publicKeySPKI);
2155
- context.publicKey = publicKey;
2156
- try {
2157
- const response2 = await fetchToken({
2158
- public_key: publicKeyPEM,
2159
- hints,
2142
+ }
2143
+ function refreshAccessToken(url, login) {
2144
+ return __awaiter$1(this, void 0, void 0, function* () {
2145
+ if (!login.refreshToken)
2146
+ throw new Error(`Cannot refresh token - refresh token is missing.`);
2147
+ if (!login.nonExportablePrivateKey)
2148
+ throw new Error(`login.nonExportablePrivateKey is missing - cannot sign refresh token without a private key.`);
2149
+ const time_stamp = Date.now();
2150
+ const signing_algorithm = 'RSASSA-PKCS1-v1_5';
2151
+ const textEncoder = new TextEncoder();
2152
+ const data = textEncoder.encode(login.refreshToken + time_stamp);
2153
+ const binarySignature = yield crypto.subtle.sign(signing_algorithm, login.nonExportablePrivateKey, data);
2154
+ const signature = b64encode(binarySignature);
2155
+ const tokenRequest = {
2156
+ grant_type: 'refresh_token',
2157
+ refresh_token: login.refreshToken,
2158
+ scopes: ['ACCESS_DB'],
2159
+ signature,
2160
+ signing_algorithm,
2161
+ time_stamp,
2162
+ };
2163
+ const res = yield fetch(`${url}/token`, {
2164
+ body: JSON.stringify(tokenRequest),
2165
+ method: 'post',
2166
+ headers: { 'Content-Type': 'application/json' },
2167
+ mode: 'cors',
2160
2168
  });
2161
- if (response2.type !== 'tokens')
2162
- throw new Error(`Unexpected response type from token endpoint: ${response2.type}`);
2163
- context.accessToken = response2.accessToken;
2164
- context.accessTokenExpiration = new Date(response2.accessTokenExpiration);
2165
- context.refreshToken = response2.refreshToken;
2166
- if (response2.refreshTokenExpiration) {
2167
- context.refreshTokenExpiration = new Date(response2.refreshTokenExpiration);
2168
- }
2169
- context.userId = response2.claims.sub;
2170
- context.email = response2.claims.email;
2171
- context.name = response2.claims.name;
2172
- context.claims = response2.claims;
2173
- if (response2.alerts && response2.alerts.length > 0) {
2174
- await interactWithUser(userInteraction, {
2175
- type: 'message-alert',
2176
- title: 'Authentication Alert',
2177
- fields: {},
2178
- alerts: response2.alerts,
2169
+ if (res.status !== 200)
2170
+ throw new Error(`RefreshToken: Status ${res.status} from ${url}/token`);
2171
+ const response = yield res.json();
2172
+ login.accessToken = response.accessToken;
2173
+ login.accessTokenExpiration = response.accessTokenExpiration
2174
+ ? new Date(response.accessTokenExpiration)
2175
+ : undefined;
2176
+ return login;
2177
+ });
2178
+ }
2179
+ function userAuthenticate(context, fetchToken, userInteraction, hints) {
2180
+ return __awaiter$1(this, void 0, void 0, function* () {
2181
+ const { privateKey, publicKey } = yield crypto.subtle.generateKey({
2182
+ name: 'RSASSA-PKCS1-v1_5',
2183
+ modulusLength: 2048,
2184
+ publicExponent: new Uint8Array([0x01, 0x00, 0x01]),
2185
+ hash: { name: 'SHA-256' },
2186
+ }, false, // Non-exportable...
2187
+ ['sign', 'verify']);
2188
+ if (!privateKey || !publicKey)
2189
+ throw new Error(`Could not generate RSA keypair`); // Typings suggest these can be undefined...
2190
+ context.nonExportablePrivateKey = privateKey; //...but storable!
2191
+ const publicKeySPKI = yield crypto.subtle.exportKey('spki', publicKey);
2192
+ const publicKeyPEM = spkiToPEM(publicKeySPKI);
2193
+ context.publicKey = publicKey;
2194
+ try {
2195
+ const response2 = yield fetchToken({
2196
+ public_key: publicKeyPEM,
2197
+ hints,
2179
2198
  });
2199
+ if (response2.type !== 'tokens')
2200
+ throw new Error(`Unexpected response type from token endpoint: ${response2.type}`);
2201
+ context.accessToken = response2.accessToken;
2202
+ context.accessTokenExpiration = new Date(response2.accessTokenExpiration);
2203
+ context.refreshToken = response2.refreshToken;
2204
+ if (response2.refreshTokenExpiration) {
2205
+ context.refreshTokenExpiration = new Date(response2.refreshTokenExpiration);
2206
+ }
2207
+ context.userId = response2.claims.sub;
2208
+ context.email = response2.claims.email;
2209
+ context.name = response2.claims.name;
2210
+ context.claims = response2.claims;
2211
+ if (response2.alerts && response2.alerts.length > 0) {
2212
+ yield interactWithUser(userInteraction, {
2213
+ type: 'message-alert',
2214
+ title: 'Authentication Alert',
2215
+ fields: {},
2216
+ alerts: response2.alerts,
2217
+ });
2218
+ }
2219
+ return context;
2180
2220
  }
2181
- return context;
2182
- }
2183
- catch (error) {
2184
- await alertUser(userInteraction, 'Authentication Failed', {
2185
- type: 'error',
2186
- messageCode: 'GENERIC_ERROR',
2187
- message: `We're having a problem authenticating right now.`,
2188
- messageParams: {}
2189
- }).catch(() => { });
2190
- throw error;
2191
- }
2221
+ catch (error) {
2222
+ yield alertUser(userInteraction, 'Authentication Failed', {
2223
+ type: 'error',
2224
+ messageCode: 'GENERIC_ERROR',
2225
+ message: `We're having a problem authenticating right now.`,
2226
+ messageParams: {}
2227
+ }).catch(() => { });
2228
+ throw error;
2229
+ }
2230
+ });
2192
2231
  }
2193
2232
  function spkiToPEM(keydata) {
2194
2233
  const keydataB64 = b64encode(keydata);
@@ -2224,9 +2263,11 @@ class AuthPersistedContext {
2224
2263
  lastLogin: new Date(0)
2225
2264
  }));
2226
2265
  }
2227
- async save() {
2228
- const db = wm$1.get(this);
2229
- db.table("$logins").put(this);
2266
+ save() {
2267
+ return __awaiter$1(this, void 0, void 0, function* () {
2268
+ const db = wm$1.get(this);
2269
+ db.table("$logins").put(this);
2270
+ });
2230
2271
  }
2231
2272
  }
2232
2273
 
@@ -2242,92 +2283,95 @@ class HttpError extends Error {
2242
2283
 
2243
2284
  function otpFetchTokenCallback(db) {
2244
2285
  const { userInteraction } = db.cloud;
2245
- return async function otpAuthenticate({ public_key, hints }) {
2246
- let tokenRequest;
2247
- const url = db.cloud.options?.databaseUrl;
2248
- if (!url)
2249
- throw new Error(`No database URL given.`);
2250
- if (hints?.grant_type === 'demo') {
2251
- const demo_user = await promptForEmail(userInteraction, 'Enter a demo user email', hints?.email || hints?.userId);
2252
- tokenRequest = {
2253
- demo_user,
2254
- grant_type: 'demo',
2255
- scopes: ['ACCESS_DB'],
2256
- public_key,
2257
- };
2258
- }
2259
- else {
2260
- const email = await promptForEmail(userInteraction, 'Enter email address', hints?.email);
2261
- tokenRequest = {
2262
- email,
2263
- grant_type: 'otp',
2264
- scopes: ['ACCESS_DB'],
2265
- public_key,
2266
- };
2267
- }
2268
- const res1 = await fetch(`${url}/token`, {
2269
- body: JSON.stringify(tokenRequest),
2270
- method: 'post',
2271
- headers: { 'Content-Type': 'application/json', mode: 'cors' },
2272
- });
2273
- if (res1.status !== 200) {
2274
- const errMsg = await res1.text();
2275
- await alertUser(userInteraction, "Token request failed", {
2276
- type: 'error',
2277
- messageCode: 'GENERIC_ERROR',
2278
- message: errMsg,
2279
- messageParams: {}
2280
- }).catch(() => { });
2281
- throw new HttpError(res1, errMsg);
2282
- }
2283
- const response = await res1.json();
2284
- if (response.type === 'tokens') {
2285
- // Demo user request can get a "tokens" response right away
2286
- return response;
2287
- }
2288
- else if (tokenRequest.grant_type === 'otp') {
2289
- if (response.type !== 'otp-sent')
2290
- throw new Error(`Unexpected response from ${url}/token`);
2291
- const otp = await promptForOTP(userInteraction, tokenRequest.email);
2292
- tokenRequest.otp = otp || '';
2293
- tokenRequest.otp_id = response.otp_id;
2294
- let res2 = await fetch(`${url}/token`, {
2286
+ return function otpAuthenticate({ public_key, hints }) {
2287
+ var _a;
2288
+ return __awaiter$1(this, void 0, void 0, function* () {
2289
+ let tokenRequest;
2290
+ const url = (_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl;
2291
+ if (!url)
2292
+ throw new Error(`No database URL given.`);
2293
+ if ((hints === null || hints === void 0 ? void 0 : hints.grant_type) === 'demo') {
2294
+ const demo_user = yield promptForEmail(userInteraction, 'Enter a demo user email', (hints === null || hints === void 0 ? void 0 : hints.email) || (hints === null || hints === void 0 ? void 0 : hints.userId));
2295
+ tokenRequest = {
2296
+ demo_user,
2297
+ grant_type: 'demo',
2298
+ scopes: ['ACCESS_DB'],
2299
+ public_key,
2300
+ };
2301
+ }
2302
+ else {
2303
+ const email = yield promptForEmail(userInteraction, 'Enter email address', hints === null || hints === void 0 ? void 0 : hints.email);
2304
+ tokenRequest = {
2305
+ email,
2306
+ grant_type: 'otp',
2307
+ scopes: ['ACCESS_DB'],
2308
+ public_key,
2309
+ };
2310
+ }
2311
+ const res1 = yield fetch(`${url}/token`, {
2295
2312
  body: JSON.stringify(tokenRequest),
2296
2313
  method: 'post',
2297
- headers: { 'Content-Type': 'application/json' },
2298
- mode: 'cors',
2314
+ headers: { 'Content-Type': 'application/json', mode: 'cors' },
2299
2315
  });
2300
- while (res2.status === 401) {
2301
- const errorText = await res2.text();
2302
- tokenRequest.otp = await promptForOTP(userInteraction, tokenRequest.email, {
2316
+ if (res1.status !== 200) {
2317
+ const errMsg = yield res1.text();
2318
+ yield alertUser(userInteraction, "Token request failed", {
2303
2319
  type: 'error',
2304
- messageCode: 'INVALID_OTP',
2305
- message: errorText,
2320
+ messageCode: 'GENERIC_ERROR',
2321
+ message: errMsg,
2306
2322
  messageParams: {}
2307
- });
2308
- res2 = await fetch(`${url}/token`, {
2323
+ }).catch(() => { });
2324
+ throw new HttpError(res1, errMsg);
2325
+ }
2326
+ const response = yield res1.json();
2327
+ if (response.type === 'tokens') {
2328
+ // Demo user request can get a "tokens" response right away
2329
+ return response;
2330
+ }
2331
+ else if (tokenRequest.grant_type === 'otp') {
2332
+ if (response.type !== 'otp-sent')
2333
+ throw new Error(`Unexpected response from ${url}/token`);
2334
+ const otp = yield promptForOTP(userInteraction, tokenRequest.email);
2335
+ tokenRequest.otp = otp || '';
2336
+ tokenRequest.otp_id = response.otp_id;
2337
+ let res2 = yield fetch(`${url}/token`, {
2309
2338
  body: JSON.stringify(tokenRequest),
2310
2339
  method: 'post',
2311
2340
  headers: { 'Content-Type': 'application/json' },
2312
2341
  mode: 'cors',
2313
2342
  });
2343
+ while (res2.status === 401) {
2344
+ const errorText = yield res2.text();
2345
+ tokenRequest.otp = yield promptForOTP(userInteraction, tokenRequest.email, {
2346
+ type: 'error',
2347
+ messageCode: 'INVALID_OTP',
2348
+ message: errorText,
2349
+ messageParams: {}
2350
+ });
2351
+ res2 = yield fetch(`${url}/token`, {
2352
+ body: JSON.stringify(tokenRequest),
2353
+ method: 'post',
2354
+ headers: { 'Content-Type': 'application/json' },
2355
+ mode: 'cors',
2356
+ });
2357
+ }
2358
+ if (res2.status !== 200) {
2359
+ const errMsg = yield res2.text();
2360
+ yield alertUser(userInteraction, "OTP Authentication Failed", {
2361
+ type: 'error',
2362
+ messageCode: 'GENERIC_ERROR',
2363
+ message: errMsg,
2364
+ messageParams: {}
2365
+ }).catch(() => { });
2366
+ throw new HttpError(res2, errMsg);
2367
+ }
2368
+ const response2 = yield res2.json();
2369
+ return response2;
2314
2370
  }
2315
- if (res2.status !== 200) {
2316
- const errMsg = await res2.text();
2317
- await alertUser(userInteraction, "OTP Authentication Failed", {
2318
- type: 'error',
2319
- messageCode: 'GENERIC_ERROR',
2320
- message: errMsg,
2321
- messageParams: {}
2322
- }).catch(() => { });
2323
- throw new HttpError(res2, errMsg);
2371
+ else {
2372
+ throw new Error(`Unexpected response from ${url}/token`);
2324
2373
  }
2325
- const response2 = await res2.json();
2326
- return response2;
2327
- }
2328
- else {
2329
- throw new Error(`Unexpected response from ${url}/token`);
2330
- }
2374
+ });
2331
2375
  };
2332
2376
  }
2333
2377
 
@@ -2342,83 +2386,87 @@ function otpFetchTokenCallback(db) {
2342
2386
  * @param db
2343
2387
  * @param newUser
2344
2388
  */
2345
- async function setCurrentUser(db, user) {
2346
- if (user.userId === db.cloud.currentUserId)
2347
- return; // Already this user.
2348
- const $logins = db.table('$logins');
2349
- await db.transaction('rw', $logins, async (tx) => {
2350
- const existingLogins = await $logins.toArray();
2351
- await Promise.all(existingLogins
2352
- .filter((login) => login.userId !== user.userId && login.isLoggedIn)
2353
- .map((login) => {
2354
- login.isLoggedIn = false;
2355
- return $logins.put(login);
2389
+ function setCurrentUser(db, user) {
2390
+ return __awaiter$1(this, void 0, void 0, function* () {
2391
+ if (user.userId === db.cloud.currentUserId)
2392
+ return; // Already this user.
2393
+ const $logins = db.table('$logins');
2394
+ yield db.transaction('rw', $logins, (tx) => __awaiter$1(this, void 0, void 0, function* () {
2395
+ const existingLogins = yield $logins.toArray();
2396
+ yield Promise.all(existingLogins
2397
+ .filter((login) => login.userId !== user.userId && login.isLoggedIn)
2398
+ .map((login) => {
2399
+ login.isLoggedIn = false;
2400
+ return $logins.put(login);
2401
+ }));
2402
+ user.isLoggedIn = true;
2403
+ user.lastLogin = new Date();
2404
+ yield user.save();
2405
+ console.debug('Saved new user', user.email);
2356
2406
  }));
2357
- user.isLoggedIn = true;
2358
- user.lastLogin = new Date();
2359
- await user.save();
2360
- console.debug('Saved new user', user.email);
2407
+ yield new Promise((resolve) => {
2408
+ if (db.cloud.currentUserId === user.userId) {
2409
+ resolve(null);
2410
+ }
2411
+ else {
2412
+ const subscription = db.cloud.currentUser.subscribe((currentUser) => {
2413
+ if (currentUser.userId === user.userId) {
2414
+ subscription.unsubscribe();
2415
+ resolve(null);
2416
+ }
2417
+ });
2418
+ }
2419
+ });
2420
+ // TANKAR!!!!
2421
+ // V: Service workern kommer inte ha tillgång till currentUserObservable om den inte istället härrör från ett liveQuery.
2422
+ // V: Samma med andra windows.
2423
+ // V: Så kanske göra om den till att häröra från liveQuery som läser $logins.orderBy('lastLogin').last().
2424
+ // V: Då bara vara medveten om:
2425
+ // V: En sån observable börjar hämta data vid första subscribe
2426
+ // V: Vi har inget "inital value" men kan emulera det till att vara ANONYMOUS_USER
2427
+ // V: Om requireAuth är true, så borde db.on(ready) hålla databasen stängd för alla utom denna observable.
2428
+ // V: Om inte så behöver den inte blocka.
2429
+ // Andra tankar:
2430
+ // * Man kan inte byta användare när man är offline. Skulle gå att flytta realms till undanstuff-tabell vid user-change.
2431
+ // men troligen inte värt det.
2432
+ // * Istället: sälj inte inte switch-user funktionalitet utan tala enbart om inloggat vs icke inloggat läge.
2433
+ // * populate $logins med ANONYMOUS så att en påbörjad inloggning inte räknas, alternativt ha en boolean prop!
2434
+ // Kanske bäst ha en boolean prop!
2435
+ // * Alternativ switch-user funktionalitet:
2436
+ // * DBCore gömmer data från realms man inte har tillgång till.
2437
+ // * Cursor impl behövs också då.
2438
+ // * Då blir det snabba user switch.
2439
+ // * claims-settet som skickas till servern blir summan av alla claims. Då måste servern stödja multipla tokens eller
2440
+ // att ens token är ett samlad.
2361
2441
  });
2362
- await new Promise((resolve) => {
2363
- if (db.cloud.currentUserId === user.userId) {
2364
- resolve(null);
2365
- }
2366
- else {
2367
- const subscription = db.cloud.currentUser.subscribe((currentUser) => {
2368
- if (currentUser.userId === user.userId) {
2369
- subscription.unsubscribe();
2370
- resolve(null);
2442
+ }
2443
+
2444
+ function login(db, hints) {
2445
+ return __awaiter$1(this, void 0, void 0, function* () {
2446
+ const currentUser = yield db.getCurrentUser();
2447
+ if (currentUser.isLoggedIn) {
2448
+ if (hints) {
2449
+ if (hints.email && db.cloud.currentUser.value.email !== hints.email) {
2450
+ throw new Error(`Must logout before changing user`);
2451
+ }
2452
+ if (hints.userId && db.cloud.currentUserId !== hints.userId) {
2453
+ throw new Error(`Must logout before changing user`);
2371
2454
  }
2372
- });
2373
- }
2374
- });
2375
- // TANKAR!!!!
2376
- // V: Service workern kommer inte ha tillgång till currentUserObservable om den inte istället härrör från ett liveQuery.
2377
- // V: Samma med andra windows.
2378
- // V: Så kanske göra om den till att häröra från liveQuery som läser $logins.orderBy('lastLogin').last().
2379
- // V: Då bara vara medveten om:
2380
- // V: En sån observable börjar hämta data vid första subscribe
2381
- // V: Vi har inget "inital value" men kan emulera det till att vara ANONYMOUS_USER
2382
- // V: Om requireAuth är true, så borde db.on(ready) hålla databasen stängd för alla utom denna observable.
2383
- // V: Om inte så behöver den inte blocka.
2384
- // Andra tankar:
2385
- // * Man kan inte byta användare när man är offline. Skulle gå att flytta realms till undanstuff-tabell vid user-change.
2386
- // men troligen inte värt det.
2387
- // * Istället: sälj inte inte switch-user funktionalitet utan tala enbart om inloggat vs icke inloggat läge.
2388
- // * populate $logins med ANONYMOUS så att en påbörjad inloggning inte räknas, alternativt ha en boolean prop!
2389
- // Kanske bäst ha en boolean prop!
2390
- // * Alternativ switch-user funktionalitet:
2391
- // * DBCore gömmer data från realms man inte har tillgång till.
2392
- // * Cursor impl behövs också då.
2393
- // * Då blir det snabba user switch.
2394
- // * claims-settet som skickas till servern blir summan av alla claims. Då måste servern stödja multipla tokens eller
2395
- // att ens token är ett samlad.
2396
- }
2397
-
2398
- async function login(db, hints) {
2399
- const currentUser = await db.getCurrentUser();
2400
- if (currentUser.isLoggedIn) {
2401
- if (hints) {
2402
- if (hints.email && db.cloud.currentUser.value.email !== hints.email) {
2403
- throw new Error(`Must logout before changing user`);
2404
- }
2405
- if (hints.userId && db.cloud.currentUserId !== hints.userId) {
2406
- throw new Error(`Must logout before changing user`);
2407
2455
  }
2456
+ // Already authenticated according to given hints.
2457
+ return;
2408
2458
  }
2409
- // Already authenticated according to given hints.
2410
- return;
2411
- }
2412
- const context = new AuthPersistedContext(db, {
2413
- claims: {},
2414
- lastLogin: new Date(0),
2459
+ const context = new AuthPersistedContext(db, {
2460
+ claims: {},
2461
+ lastLogin: new Date(0),
2462
+ });
2463
+ yield authenticate(db.cloud.options.databaseUrl, context, db.cloud.options.fetchTokens || otpFetchTokenCallback(db), db.cloud.userInteraction, hints);
2464
+ yield context.save();
2465
+ yield setCurrentUser(db, context);
2466
+ // Make sure to resync as the new login will be authorized
2467
+ // for new realms.
2468
+ triggerSync(db, "pull");
2415
2469
  });
2416
- await authenticate(db.cloud.options.databaseUrl, context, db.cloud.options.fetchTokens || otpFetchTokenCallback(db), db.cloud.userInteraction, hints);
2417
- await context.save();
2418
- await setCurrentUser(db, context);
2419
- // Make sure to resync as the new login will be authorized
2420
- // for new realms.
2421
- triggerSync(db, "pull");
2422
2470
  }
2423
2471
 
2424
2472
  const UNAUTHORIZED_USER = {
@@ -2433,7 +2481,7 @@ try {
2433
2481
  Object.freeze(UNAUTHORIZED_USER);
2434
2482
  Object.freeze(UNAUTHORIZED_USER.claims);
2435
2483
  }
2436
- catch { }
2484
+ catch (_a) { }
2437
2485
 
2438
2486
  const swHolder = {};
2439
2487
  const swContainer = self.document && navigator.serviceWorker; // self.document is to verify we're not the SW ourself
@@ -2442,8 +2490,9 @@ if (swContainer)
2442
2490
  if (typeof self !== 'undefined' && 'clients' in self && !self.document) {
2443
2491
  // We are the service worker. Propagate messages to all our clients.
2444
2492
  addEventListener('message', (ev) => {
2445
- if (ev.data?.type?.startsWith('sw-broadcast-')) {
2446
- [...self['clients'].matchAll({ includeUncontrolled: true })].forEach((client) => client.id !== ev.source?.id && client.postMessage(ev.data));
2493
+ var _a, _b;
2494
+ if ((_b = (_a = ev.data) === null || _a === void 0 ? void 0 : _a.type) === null || _b === void 0 ? void 0 : _b.startsWith('sw-broadcast-')) {
2495
+ [...self['clients'].matchAll({ includeUncontrolled: true })].forEach((client) => { var _a; return client.id !== ((_a = ev.source) === null || _a === void 0 ? void 0 : _a.id) && client.postMessage(ev.data); });
2447
2496
  }
2448
2497
  });
2449
2498
  }
@@ -2455,7 +2504,8 @@ class SWBroadcastChannel {
2455
2504
  if (!swContainer)
2456
2505
  return () => { };
2457
2506
  const forwarder = (ev) => {
2458
- if (ev.data?.type === `sw-broadcast-${this.name}`) {
2507
+ var _a;
2508
+ if (((_a = ev.data) === null || _a === void 0 ? void 0 : _a.type) === `sw-broadcast-${this.name}`) {
2459
2509
  listener(ev.data.message);
2460
2510
  }
2461
2511
  };
@@ -2463,6 +2513,7 @@ class SWBroadcastChannel {
2463
2513
  return () => swContainer.removeEventListener('message', forwarder);
2464
2514
  }
2465
2515
  postMessage(message) {
2516
+ var _a;
2466
2517
  if (typeof self['clients'] === 'object') {
2467
2518
  // We're a service worker. Propagate to our browser clients.
2468
2519
  [...self['clients'].matchAll({ includeUncontrolled: true })].forEach((client) => client.postMessage({
@@ -2473,7 +2524,7 @@ class SWBroadcastChannel {
2473
2524
  else if (swHolder.registration) {
2474
2525
  // We're a client (browser window or other worker)
2475
2526
  // Post to SW so it can repost to all its clients and to itself
2476
- swHolder.registration.active?.postMessage({
2527
+ (_a = swHolder.registration.active) === null || _a === void 0 ? void 0 : _a.postMessage({
2477
2528
  type: `sw-broadcast-${this.name}`,
2478
2529
  message
2479
2530
  });
@@ -2516,22 +2567,24 @@ class BroadcastedAndLocalEvent extends Observable$1 {
2516
2567
  this.bc = bc;
2517
2568
  }
2518
2569
  next(message) {
2519
- console.debug("BroadcastedAndLocalEvent: bc.postMessage()", { ...message }, "bc is a", this.bc);
2570
+ console.debug("BroadcastedAndLocalEvent: bc.postMessage()", Object.assign({}, message), "bc is a", this.bc);
2520
2571
  this.bc.postMessage(message);
2521
2572
  const ev = new CustomEvent(`lbc-${this.name}`, { detail: message });
2522
2573
  self.dispatchEvent(ev);
2523
2574
  }
2524
2575
  }
2525
2576
 
2526
- async function computeRealmSetHash({ realms, inviteRealms, }) {
2527
- const data = JSON.stringify([
2528
- ...realms.map((realmId) => ({ realmId, accepted: true })),
2529
- ...inviteRealms.map((realmId) => ({ realmId, accepted: false })),
2530
- ].sort((a, b) => a.realmId < b.realmId ? -1 : a.realmId > b.realmId ? 1 : 0));
2531
- const byteArray = new TextEncoder().encode(data);
2532
- const digestBytes = await crypto.subtle.digest('SHA-1', byteArray);
2533
- const base64 = b64encode(digestBytes);
2534
- return base64;
2577
+ function computeRealmSetHash({ realms, inviteRealms, }) {
2578
+ return __awaiter$1(this, void 0, void 0, function* () {
2579
+ const data = JSON.stringify([
2580
+ ...realms.map((realmId) => ({ realmId, accepted: true })),
2581
+ ...inviteRealms.map((realmId) => ({ realmId, accepted: false })),
2582
+ ].sort((a, b) => a.realmId < b.realmId ? -1 : a.realmId > b.realmId ? 1 : 0));
2583
+ const byteArray = new TextEncoder().encode(data);
2584
+ const digestBytes = yield crypto.subtle.digest('SHA-1', byteArray);
2585
+ const base64 = b64encode(digestBytes);
2586
+ return base64;
2587
+ });
2535
2588
  }
2536
2589
 
2537
2590
  function getSyncableTables(db) {
@@ -2546,7 +2599,8 @@ function getMutationTable(tableName) {
2546
2599
  }
2547
2600
 
2548
2601
  function getTableFromMutationTable(mutationTable) {
2549
- const tableName = /^\$(.*)_mutations$/.exec(mutationTable)?.[1];
2602
+ var _a;
2603
+ const tableName = (_a = /^\$(.*)_mutations$/.exec(mutationTable)) === null || _a === void 0 ? void 0 : _a[1];
2550
2604
  if (!tableName)
2551
2605
  throw new Error(`Given mutationTable ${mutationTable} is not correct`);
2552
2606
  return tableName;
@@ -2557,49 +2611,51 @@ function flatten(a) {
2557
2611
  return concat.apply([], a);
2558
2612
  }
2559
2613
 
2560
- async function listClientChanges(mutationTables, db, { since = {}, limit = Infinity } = {}) {
2561
- const allMutsOnTables = await Promise.all(mutationTables.map(async (mutationTable) => {
2562
- const tableName = getTableFromMutationTable(mutationTable.name);
2563
- const lastRevision = since[tableName];
2564
- let query = lastRevision
2565
- ? mutationTable.where('rev').above(lastRevision)
2566
- : mutationTable;
2567
- if (limit < Infinity)
2568
- query = query.limit(limit);
2569
- const muts = await query.toArray();
2570
- //const objTable = db.table(tableName);
2571
- /*for (const mut of muts) {
2572
- if (mut.type === "insert" || mut.type === "upsert") {
2573
- mut.values = await objTable.bulkGet(mut.keys);
2574
- }
2575
- }*/
2576
- return muts.map((mut) => ({
2577
- table: tableName,
2578
- mut,
2579
- }));
2580
- }));
2581
- // Sort by time to get a true order of the operations (between tables)
2582
- const sorted = flatten(allMutsOnTables).sort((a, b) => a.mut.ts - b.mut.ts);
2583
- const result = [];
2584
- let currentEntry = null;
2585
- let currentTxid = null;
2586
- for (const { table, mut } of sorted) {
2587
- if (currentEntry &&
2588
- currentEntry.table === table &&
2589
- currentTxid === mut.txid) {
2590
- currentEntry.muts.push(mut);
2591
- }
2592
- else {
2593
- currentEntry = {
2594
- table,
2595
- muts: [mut],
2596
- };
2597
- currentTxid = mut.txid;
2598
- result.push(currentEntry);
2614
+ function listClientChanges(mutationTables, db, { since = {}, limit = Infinity } = {}) {
2615
+ return __awaiter$1(this, void 0, void 0, function* () {
2616
+ const allMutsOnTables = yield Promise.all(mutationTables.map((mutationTable) => __awaiter$1(this, void 0, void 0, function* () {
2617
+ const tableName = getTableFromMutationTable(mutationTable.name);
2618
+ const lastRevision = since[tableName];
2619
+ let query = lastRevision
2620
+ ? mutationTable.where('rev').above(lastRevision)
2621
+ : mutationTable;
2622
+ if (limit < Infinity)
2623
+ query = query.limit(limit);
2624
+ const muts = yield query.toArray();
2625
+ //const objTable = db.table(tableName);
2626
+ /*for (const mut of muts) {
2627
+ if (mut.type === "insert" || mut.type === "upsert") {
2628
+ mut.values = await objTable.bulkGet(mut.keys);
2629
+ }
2630
+ }*/
2631
+ return muts.map((mut) => ({
2632
+ table: tableName,
2633
+ mut,
2634
+ }));
2635
+ })));
2636
+ // Sort by time to get a true order of the operations (between tables)
2637
+ const sorted = flatten(allMutsOnTables).sort((a, b) => a.mut.ts - b.mut.ts);
2638
+ const result = [];
2639
+ let currentEntry = null;
2640
+ let currentTxid = null;
2641
+ for (const { table, mut } of sorted) {
2642
+ if (currentEntry &&
2643
+ currentEntry.table === table &&
2644
+ currentTxid === mut.txid) {
2645
+ currentEntry.muts.push(mut);
2646
+ }
2647
+ else {
2648
+ currentEntry = {
2649
+ table,
2650
+ muts: [mut],
2651
+ };
2652
+ currentTxid = mut.txid;
2653
+ result.push(currentEntry);
2654
+ }
2599
2655
  }
2600
- }
2601
- // Filter out those tables that doesn't have any mutations:
2602
- return result;
2656
+ // Filter out those tables that doesn't have any mutations:
2657
+ return result;
2658
+ });
2603
2659
  }
2604
2660
 
2605
2661
  function randomString(bytes) {
@@ -2608,58 +2664,60 @@ function randomString(bytes) {
2608
2664
  return btoa(String.fromCharCode.apply(null, buf));
2609
2665
  }
2610
2666
 
2611
- async function listSyncifiedChanges(tablesToSyncify, currentUser, schema, alreadySyncedRealms) {
2612
- const txid = `upload-${randomString(8)}`;
2613
- if (currentUser.isLoggedIn) {
2614
- if (tablesToSyncify.length > 0) {
2615
- const ignoredRealms = new Set(alreadySyncedRealms || []);
2616
- const upserts = await Promise.all(tablesToSyncify.map(async (table) => {
2617
- const { extractKey } = table.core.schema.primaryKey;
2618
- if (!extractKey)
2619
- return { table: table.name, muts: [] }; // Outbound tables are not synced.
2620
- const dexieCloudTableSchema = schema[table.name];
2621
- const query = dexieCloudTableSchema?.generatedGlobalId
2622
- ? table.filter((item) => {
2623
- const id = extractKey(item);
2624
- return (!ignoredRealms.has(item.realmId || '') &&
2625
- //(id[0] !== '#' || !!item.$ts) && // Private obj need no sync if not changed
2626
- isValidSyncableID(id));
2627
- })
2628
- : table.filter((item) => {
2629
- extractKey(item);
2630
- return (!ignoredRealms.has(item.realmId || '') &&
2631
- //(id[0] !== '#' || !!item.$ts) && // Private obj need no sync if not changed
2632
- isValidAtID(extractKey(item), dexieCloudTableSchema?.idPrefix));
2633
- });
2634
- const unsyncedObjects = await query.toArray();
2635
- if (unsyncedObjects.length > 0) {
2636
- const mut = {
2637
- type: 'upsert',
2638
- values: unsyncedObjects,
2639
- keys: unsyncedObjects.map(extractKey),
2640
- userId: currentUser.userId,
2641
- txid,
2642
- };
2643
- return {
2644
- table: table.name,
2645
- muts: [mut],
2646
- };
2647
- }
2648
- else {
2649
- return {
2650
- table: table.name,
2651
- muts: [],
2652
- };
2653
- }
2654
- }));
2655
- return upserts.filter((op) => op.muts.length > 0);
2667
+ function listSyncifiedChanges(tablesToSyncify, currentUser, schema, alreadySyncedRealms) {
2668
+ return __awaiter$1(this, void 0, void 0, function* () {
2669
+ const txid = `upload-${randomString(8)}`;
2670
+ if (currentUser.isLoggedIn) {
2671
+ if (tablesToSyncify.length > 0) {
2672
+ const ignoredRealms = new Set(alreadySyncedRealms || []);
2673
+ const upserts = yield Promise.all(tablesToSyncify.map((table) => __awaiter$1(this, void 0, void 0, function* () {
2674
+ const { extractKey } = table.core.schema.primaryKey;
2675
+ if (!extractKey)
2676
+ return { table: table.name, muts: [] }; // Outbound tables are not synced.
2677
+ const dexieCloudTableSchema = schema[table.name];
2678
+ const query = (dexieCloudTableSchema === null || dexieCloudTableSchema === void 0 ? void 0 : dexieCloudTableSchema.generatedGlobalId)
2679
+ ? table.filter((item) => {
2680
+ const id = extractKey(item);
2681
+ return (!ignoredRealms.has(item.realmId || '') &&
2682
+ //(id[0] !== '#' || !!item.$ts) && // Private obj need no sync if not changed
2683
+ isValidSyncableID(id));
2684
+ })
2685
+ : table.filter((item) => {
2686
+ extractKey(item);
2687
+ return (!ignoredRealms.has(item.realmId || '') &&
2688
+ //(id[0] !== '#' || !!item.$ts) && // Private obj need no sync if not changed
2689
+ isValidAtID(extractKey(item), dexieCloudTableSchema === null || dexieCloudTableSchema === void 0 ? void 0 : dexieCloudTableSchema.idPrefix));
2690
+ });
2691
+ const unsyncedObjects = yield query.toArray();
2692
+ if (unsyncedObjects.length > 0) {
2693
+ const mut = {
2694
+ type: 'upsert',
2695
+ values: unsyncedObjects,
2696
+ keys: unsyncedObjects.map(extractKey),
2697
+ userId: currentUser.userId,
2698
+ txid,
2699
+ };
2700
+ return {
2701
+ table: table.name,
2702
+ muts: [mut],
2703
+ };
2704
+ }
2705
+ else {
2706
+ return {
2707
+ table: table.name,
2708
+ muts: [],
2709
+ };
2710
+ }
2711
+ })));
2712
+ return upserts.filter((op) => op.muts.length > 0);
2713
+ }
2656
2714
  }
2657
- }
2658
- return [];
2715
+ return [];
2716
+ });
2659
2717
  }
2660
2718
 
2661
2719
  function getTablesToSyncify(db, syncState) {
2662
- const syncedTables = syncState?.syncedTables || [];
2720
+ const syncedTables = (syncState === null || syncState === void 0 ? void 0 : syncState.syncedTables) || [];
2663
2721
  const syncableTables = getSyncableTables(db);
2664
2722
  const tablesToSyncify = syncableTables.filter((tbl) => !syncedTables.includes(tbl.name));
2665
2723
  return tablesToSyncify;
@@ -3181,23 +3239,17 @@ class FakeBigInt {
3181
3239
  return this.v;
3182
3240
  }
3183
3241
  }
3184
- const defs = {
3185
- ...undefinedDef,
3186
- ...(hasBigIntSupport
3187
- ? {}
3188
- : {
3189
- bigint: {
3190
- test: (val) => val instanceof FakeBigInt,
3191
- replace: (fakeBigInt) => {
3192
- return {
3193
- $t: 'bigint',
3194
- ...fakeBigInt
3195
- };
3196
- },
3197
- revive: ({ v, }) => new FakeBigInt(v)
3198
- }
3199
- })
3200
- };
3242
+ const defs = Object.assign(Object.assign({}, undefinedDef), (hasBigIntSupport
3243
+ ? {}
3244
+ : {
3245
+ bigint: {
3246
+ test: (val) => val instanceof FakeBigInt,
3247
+ replace: (fakeBigInt) => {
3248
+ return Object.assign({ $t: 'bigint' }, fakeBigInt);
3249
+ },
3250
+ revive: ({ v, }) => new FakeBigInt(v)
3251
+ }
3252
+ }));
3201
3253
  const TSON = TypesonSimplified(builtin, defs);
3202
3254
  const BISON = Bison(defs);
3203
3255
 
@@ -3246,110 +3298,107 @@ function encodeIdsForServer(schema, currentUser, changes) {
3246
3298
  }
3247
3299
  function cloneChange(change, rewriteValues) {
3248
3300
  // clone on demand:
3249
- return {
3250
- ...change,
3251
- muts: rewriteValues
3252
- ? change.muts.map((m) => ({
3253
- ...m,
3254
- keys: m.keys.slice(),
3255
- values: m.values.slice(),
3256
- }))
3257
- : change.muts.map((m) => ({ ...m, keys: m.keys.slice() })),
3258
- };
3301
+ return Object.assign(Object.assign({}, change), { muts: rewriteValues
3302
+ ? change.muts.map((m) => (Object.assign(Object.assign({}, m), { keys: m.keys.slice(), values: m.values.slice() })))
3303
+ : change.muts.map((m) => (Object.assign(Object.assign({}, m), { keys: m.keys.slice() }))) });
3259
3304
  }
3260
3305
 
3261
3306
  //import {BisonWebStreamReader} from "dreambase-library/dist/typeson-simplified/BisonWebStreamReader";
3262
- async function syncWithServer(changes, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser) {
3263
- //
3264
- // Push changes to server using fetch
3265
- //
3266
- const headers = {
3267
- Accept: 'application/json, application/x-bison, application/x-bison-stream',
3268
- 'Content-Type': 'application/tson'
3269
- };
3270
- const accessToken = await loadAccessToken(db);
3271
- if (accessToken) {
3272
- headers.Authorization = `Bearer ${accessToken}`;
3273
- }
3274
- const syncRequest = {
3275
- v: 2,
3276
- dbID: syncState?.remoteDbId,
3277
- clientIdentity,
3278
- schema: schema || {},
3279
- lastPull: syncState ? {
3280
- serverRevision: syncState.serverRevision,
3281
- realms: syncState.realms,
3282
- inviteRealms: syncState.inviteRealms
3283
- } : undefined,
3284
- baseRevs,
3285
- changes: encodeIdsForServer(db.dx.core.schema, currentUser, changes)
3286
- };
3287
- console.debug("Sync request", syncRequest);
3288
- db.syncStateChangedEvent.next({
3289
- phase: 'pushing',
3290
- });
3291
- const res = await fetch(`${databaseUrl}/sync`, {
3292
- method: 'post',
3293
- headers,
3294
- body: TSON.stringify(syncRequest)
3295
- });
3296
- //const contentLength = Number(res.headers.get('content-length'));
3297
- db.syncStateChangedEvent.next({
3298
- phase: 'pulling'
3299
- });
3300
- if (!res.ok) {
3301
- throw new HttpError(res);
3302
- }
3303
- switch (res.headers.get('content-type')) {
3304
- case 'application/x-bison':
3305
- return BISON.fromBinary(await res.blob());
3306
- case 'application/x-bison-stream': //return BisonWebStreamReader(BISON, res);
3307
- default:
3308
- case 'application/json': {
3309
- const text = await res.text();
3310
- const syncRes = TSON.parse(text);
3311
- return syncRes;
3307
+ function syncWithServer(changes, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser) {
3308
+ return __awaiter$1(this, void 0, void 0, function* () {
3309
+ //
3310
+ // Push changes to server using fetch
3311
+ //
3312
+ const headers = {
3313
+ Accept: 'application/json, application/x-bison, application/x-bison-stream',
3314
+ 'Content-Type': 'application/tson'
3315
+ };
3316
+ const accessToken = yield loadAccessToken(db);
3317
+ if (accessToken) {
3318
+ headers.Authorization = `Bearer ${accessToken}`;
3312
3319
  }
3313
- }
3320
+ const syncRequest = {
3321
+ v: 2,
3322
+ dbID: syncState === null || syncState === void 0 ? void 0 : syncState.remoteDbId,
3323
+ clientIdentity,
3324
+ schema: schema || {},
3325
+ lastPull: syncState ? {
3326
+ serverRevision: syncState.serverRevision,
3327
+ realms: syncState.realms,
3328
+ inviteRealms: syncState.inviteRealms
3329
+ } : undefined,
3330
+ baseRevs,
3331
+ changes: encodeIdsForServer(db.dx.core.schema, currentUser, changes)
3332
+ };
3333
+ console.debug("Sync request", syncRequest);
3334
+ db.syncStateChangedEvent.next({
3335
+ phase: 'pushing',
3336
+ });
3337
+ const res = yield fetch(`${databaseUrl}/sync`, {
3338
+ method: 'post',
3339
+ headers,
3340
+ body: TSON.stringify(syncRequest)
3341
+ });
3342
+ //const contentLength = Number(res.headers.get('content-length'));
3343
+ db.syncStateChangedEvent.next({
3344
+ phase: 'pulling'
3345
+ });
3346
+ if (!res.ok) {
3347
+ throw new HttpError(res);
3348
+ }
3349
+ switch (res.headers.get('content-type')) {
3350
+ case 'application/x-bison':
3351
+ return BISON.fromBinary(yield res.blob());
3352
+ case 'application/x-bison-stream': //return BisonWebStreamReader(BISON, res);
3353
+ default:
3354
+ case 'application/json': {
3355
+ const text = yield res.text();
3356
+ const syncRes = TSON.parse(text);
3357
+ return syncRes;
3358
+ }
3359
+ }
3360
+ });
3314
3361
  }
3315
3362
 
3316
- async function modifyLocalObjectsWithNewUserId(syncifiedTables, currentUser, alreadySyncedRealms) {
3317
- const ignoredRealms = new Set(alreadySyncedRealms || []);
3318
- for (const table of syncifiedTables) {
3319
- if (table.name === "members") {
3320
- // members
3321
- await table.toCollection().modify((member) => {
3322
- if (!ignoredRealms.has(member.realmId) && (!member.userId || member.userId === UNAUTHORIZED_USER.userId)) {
3323
- member.userId = currentUser.userId;
3324
- }
3325
- });
3326
- }
3327
- else if (table.name === "roles") ;
3328
- else if (table.name === "realms") {
3329
- // realms
3330
- await table.toCollection().modify((realm) => {
3331
- if (!ignoredRealms.has(realm.realmId) && (realm.owner === undefined || realm.owner === UNAUTHORIZED_USER.userId)) {
3332
- realm.owner = currentUser.userId;
3333
- }
3334
- });
3335
- }
3336
- else {
3337
- // application entities
3338
- await table.toCollection().modify((obj) => {
3339
- if (!obj.realmId || !ignoredRealms.has(obj.realmId)) {
3340
- if (!obj.owner || obj.owner === UNAUTHORIZED_USER.userId)
3341
- obj.owner = currentUser.userId;
3342
- if (!obj.realmId || obj.realmId === UNAUTHORIZED_USER.userId) {
3343
- obj.realmId = currentUser.userId;
3363
+ function modifyLocalObjectsWithNewUserId(syncifiedTables, currentUser, alreadySyncedRealms) {
3364
+ return __awaiter$1(this, void 0, void 0, function* () {
3365
+ const ignoredRealms = new Set(alreadySyncedRealms || []);
3366
+ for (const table of syncifiedTables) {
3367
+ if (table.name === "members") {
3368
+ // members
3369
+ yield table.toCollection().modify((member) => {
3370
+ if (!ignoredRealms.has(member.realmId) && (!member.userId || member.userId === UNAUTHORIZED_USER.userId)) {
3371
+ member.userId = currentUser.userId;
3344
3372
  }
3345
- }
3346
- });
3373
+ });
3374
+ }
3375
+ else if (table.name === "roles") ;
3376
+ else if (table.name === "realms") {
3377
+ // realms
3378
+ yield table.toCollection().modify((realm) => {
3379
+ if (!ignoredRealms.has(realm.realmId) && (realm.owner === undefined || realm.owner === UNAUTHORIZED_USER.userId)) {
3380
+ realm.owner = currentUser.userId;
3381
+ }
3382
+ });
3383
+ }
3384
+ else {
3385
+ // application entities
3386
+ yield table.toCollection().modify((obj) => {
3387
+ if (!obj.realmId || !ignoredRealms.has(obj.realmId)) {
3388
+ if (!obj.owner || obj.owner === UNAUTHORIZED_USER.userId)
3389
+ obj.owner = currentUser.userId;
3390
+ if (!obj.realmId || obj.realmId === UNAUTHORIZED_USER.userId) {
3391
+ obj.realmId = currentUser.userId;
3392
+ }
3393
+ }
3394
+ });
3395
+ }
3347
3396
  }
3348
- }
3397
+ });
3349
3398
  }
3350
3399
 
3351
3400
  function throwIfCancelled(cancelToken) {
3352
- if (cancelToken?.cancelled)
3401
+ if (cancelToken === null || cancelToken === void 0 ? void 0 : cancelToken.cancelled)
3353
3402
  throw new Dexie.AbortError(`Operation was cancelled`);
3354
3403
  }
3355
3404
 
@@ -3361,17 +3410,19 @@ let isOnline = navigator.onLine;
3361
3410
  self.addEventListener('online', () => isOnline = true);
3362
3411
  self.addEventListener('offline', () => isOnline = false);
3363
3412
 
3364
- async function updateBaseRevs(db, schema, latestRevisions, serverRev) {
3365
- await db.$baseRevs.bulkPut(Object.keys(schema)
3366
- .filter((table) => schema[table].markedForSync)
3367
- .map((tableName) => {
3368
- const lastClientRevOnPreviousServerRev = latestRevisions[tableName] || 0;
3369
- return {
3370
- tableName,
3371
- clientRev: lastClientRevOnPreviousServerRev + 1,
3372
- serverRev,
3373
- };
3374
- }));
3413
+ function updateBaseRevs(db, schema, latestRevisions, serverRev) {
3414
+ return __awaiter$1(this, void 0, void 0, function* () {
3415
+ yield db.$baseRevs.bulkPut(Object.keys(schema)
3416
+ .filter((table) => schema[table].markedForSync)
3417
+ .map((tableName) => {
3418
+ const lastClientRevOnPreviousServerRev = latestRevisions[tableName] || 0;
3419
+ return {
3420
+ tableName,
3421
+ clientRev: lastClientRevOnPreviousServerRev + 1,
3422
+ serverRev,
3423
+ };
3424
+ }));
3425
+ });
3375
3426
  }
3376
3427
 
3377
3428
  function getLatestRevisionsPerTable(clientChangeSet, lastRevisions = {}) {
@@ -3382,104 +3433,108 @@ function getLatestRevisionsPerTable(clientChangeSet, lastRevisions = {}) {
3382
3433
  return lastRevisions;
3383
3434
  }
3384
3435
 
3385
- async function bulkUpdate(table, keys, changeSpecs) {
3386
- const objs = await table.bulkGet(keys);
3387
- const resultKeys = [];
3388
- const resultObjs = [];
3389
- keys.forEach((key, idx) => {
3390
- const obj = objs[idx];
3391
- if (obj) {
3392
- for (const [keyPath, value] of Object.entries(changeSpecs[idx])) {
3393
- if (keyPath === table.schema.primKey.keyPath) {
3394
- if (cmp(value, key) !== 0) {
3395
- throw new Error(`Cannot change primary key`);
3436
+ function bulkUpdate(table, keys, changeSpecs) {
3437
+ return __awaiter$1(this, void 0, void 0, function* () {
3438
+ const objs = yield table.bulkGet(keys);
3439
+ const resultKeys = [];
3440
+ const resultObjs = [];
3441
+ keys.forEach((key, idx) => {
3442
+ const obj = objs[idx];
3443
+ if (obj) {
3444
+ for (const [keyPath, value] of Object.entries(changeSpecs[idx])) {
3445
+ if (keyPath === table.schema.primKey.keyPath) {
3446
+ if (cmp(value, key) !== 0) {
3447
+ throw new Error(`Cannot change primary key`);
3448
+ }
3449
+ }
3450
+ else {
3451
+ Dexie.setByKeyPath(obj, keyPath, value);
3396
3452
  }
3397
3453
  }
3398
- else {
3399
- Dexie.setByKeyPath(obj, keyPath, value);
3400
- }
3454
+ resultKeys.push(key);
3455
+ resultObjs.push(obj);
3401
3456
  }
3402
- resultKeys.push(key);
3403
- resultObjs.push(obj);
3404
- }
3457
+ });
3458
+ yield (table.schema.primKey.keyPath == null
3459
+ ? table.bulkPut(resultObjs, resultKeys)
3460
+ : table.bulkPut(resultObjs));
3405
3461
  });
3406
- await (table.schema.primKey.keyPath == null
3407
- ? table.bulkPut(resultObjs, resultKeys)
3408
- : table.bulkPut(resultObjs));
3409
- }
3410
-
3411
- async function applyServerChanges(changes, db) {
3412
- console.debug('Applying server changes', changes, Dexie.currentTransaction);
3413
- for (const { table: tableName, muts } of changes) {
3414
- const table = db.table(tableName);
3415
- if (!table)
3416
- continue; // If server sends changes on a table we don't have, ignore it.
3417
- const { primaryKey } = table.core.schema;
3418
- const keyDecoder = (key) => {
3419
- switch (key[0]) {
3420
- case '[':
3421
- // Decode JSON array
3422
- if (key.endsWith(']'))
3423
- try {
3424
- // On server, array keys are transformed to JSON string representation
3425
- return JSON.parse(key);
3462
+ }
3463
+
3464
+ function applyServerChanges(changes, db) {
3465
+ return __awaiter$1(this, void 0, void 0, function* () {
3466
+ console.debug('Applying server changes', changes, Dexie.currentTransaction);
3467
+ for (const { table: tableName, muts } of changes) {
3468
+ const table = db.table(tableName);
3469
+ if (!table)
3470
+ continue; // If server sends changes on a table we don't have, ignore it.
3471
+ const { primaryKey } = table.core.schema;
3472
+ const keyDecoder = (key) => {
3473
+ switch (key[0]) {
3474
+ case '[':
3475
+ // Decode JSON array
3476
+ if (key.endsWith(']'))
3477
+ try {
3478
+ // On server, array keys are transformed to JSON string representation
3479
+ return JSON.parse(key);
3480
+ }
3481
+ catch (_a) { }
3482
+ return key;
3483
+ case '#':
3484
+ // Decode private ID (do the opposite from what's done in encodeIdsForServer())
3485
+ if (key.endsWith(':' + db.cloud.currentUserId)) {
3486
+ return key.substr(0, key.length - db.cloud.currentUserId.length - 1);
3426
3487
  }
3427
- catch { }
3428
- return key;
3429
- case '#':
3430
- // Decode private ID (do the opposite from what's done in encodeIdsForServer())
3431
- if (key.endsWith(':' + db.cloud.currentUserId)) {
3432
- return key.substr(0, key.length - db.cloud.currentUserId.length - 1);
3433
- }
3434
- return key;
3435
- default:
3436
- return key;
3437
- }
3438
- };
3439
- for (const mut of muts) {
3440
- const keys = mut.keys.map(keyDecoder);
3441
- switch (mut.type) {
3442
- case 'insert':
3443
- if (primaryKey.outbound) {
3444
- await table.bulkAdd(mut.values, keys);
3445
- }
3446
- else {
3447
- keys.forEach((key, i) => {
3448
- // Make sure inbound keys are consistent
3449
- Dexie.setByKeyPath(mut.values[i], primaryKey.keyPath, key);
3450
- });
3451
- await table.bulkAdd(mut.values);
3452
- }
3453
- break;
3454
- case 'upsert':
3455
- if (primaryKey.outbound) {
3456
- await table.bulkPut(mut.values, keys);
3457
- }
3458
- else {
3459
- keys.forEach((key, i) => {
3460
- // Make sure inbound keys are consistent
3461
- Dexie.setByKeyPath(mut.values[i], primaryKey.keyPath, key);
3462
- });
3463
- await table.bulkPut(mut.values);
3464
- }
3465
- break;
3466
- case 'modify':
3467
- if (keys.length === 1) {
3468
- await table.update(keys[0], mut.changeSpec);
3469
- }
3470
- else {
3471
- await table.where(':id').anyOf(keys).modify(mut.changeSpec);
3472
- }
3473
- break;
3474
- case 'update':
3475
- await bulkUpdate(table, keys, mut.changeSpecs);
3476
- break;
3477
- case 'delete':
3478
- await table.bulkDelete(keys);
3479
- break;
3488
+ return key;
3489
+ default:
3490
+ return key;
3491
+ }
3492
+ };
3493
+ for (const mut of muts) {
3494
+ const keys = mut.keys.map(keyDecoder);
3495
+ switch (mut.type) {
3496
+ case 'insert':
3497
+ if (primaryKey.outbound) {
3498
+ yield table.bulkAdd(mut.values, keys);
3499
+ }
3500
+ else {
3501
+ keys.forEach((key, i) => {
3502
+ // Make sure inbound keys are consistent
3503
+ Dexie.setByKeyPath(mut.values[i], primaryKey.keyPath, key);
3504
+ });
3505
+ yield table.bulkAdd(mut.values);
3506
+ }
3507
+ break;
3508
+ case 'upsert':
3509
+ if (primaryKey.outbound) {
3510
+ yield table.bulkPut(mut.values, keys);
3511
+ }
3512
+ else {
3513
+ keys.forEach((key, i) => {
3514
+ // Make sure inbound keys are consistent
3515
+ Dexie.setByKeyPath(mut.values[i], primaryKey.keyPath, key);
3516
+ });
3517
+ yield table.bulkPut(mut.values);
3518
+ }
3519
+ break;
3520
+ case 'modify':
3521
+ if (keys.length === 1) {
3522
+ yield table.update(keys[0], mut.changeSpec);
3523
+ }
3524
+ else {
3525
+ yield table.where(':id').anyOf(keys).modify(mut.changeSpec);
3526
+ }
3527
+ break;
3528
+ case 'update':
3529
+ yield bulkUpdate(table, keys, mut.changeSpecs);
3530
+ break;
3531
+ case 'delete':
3532
+ yield table.bulkDelete(keys);
3533
+ break;
3534
+ }
3480
3535
  }
3481
3536
  }
3482
- }
3537
+ });
3483
3538
  }
3484
3539
 
3485
3540
  const CURRENT_SYNC_WORKER = 'currentSyncWorker';
@@ -3487,14 +3542,14 @@ function sync(db, options, schema, syncOptions) {
3487
3542
  return _sync
3488
3543
  .apply(this, arguments)
3489
3544
  .then(() => {
3490
- if (!syncOptions?.justCheckIfNeeded) {
3545
+ if (!(syncOptions === null || syncOptions === void 0 ? void 0 : syncOptions.justCheckIfNeeded)) {
3491
3546
  db.syncStateChangedEvent.next({
3492
3547
  phase: 'in-sync',
3493
3548
  });
3494
3549
  }
3495
3550
  })
3496
- .catch(async (error) => {
3497
- if (syncOptions?.justCheckIfNeeded)
3551
+ .catch((error) => __awaiter$1(this, void 0, void 0, function* () {
3552
+ if (syncOptions === null || syncOptions === void 0 ? void 0 : syncOptions.justCheckIfNeeded)
3498
3553
  return Promise.reject(error); // Just rethrow.
3499
3554
  console.debug('Error from _sync', {
3500
3555
  isOnline,
@@ -3502,23 +3557,20 @@ function sync(db, options, schema, syncOptions) {
3502
3557
  error,
3503
3558
  });
3504
3559
  if (isOnline &&
3505
- syncOptions?.retryImmediatelyOnFetchError &&
3506
- error?.name === 'TypeError' &&
3507
- /fetch/.test(error?.message)) {
3560
+ (syncOptions === null || syncOptions === void 0 ? void 0 : syncOptions.retryImmediatelyOnFetchError) &&
3561
+ (error === null || error === void 0 ? void 0 : error.name) === 'TypeError' &&
3562
+ /fetch/.test(error === null || error === void 0 ? void 0 : error.message)) {
3508
3563
  db.syncStateChangedEvent.next({
3509
3564
  phase: 'error',
3510
3565
  error,
3511
3566
  });
3512
3567
  // Retry again in 500 ms but if it fails again, don't retry.
3513
- await new Promise((resolve) => setTimeout(resolve, 500));
3514
- return await sync(db, options, schema, {
3515
- ...syncOptions,
3516
- retryImmediatelyOnFetchError: false,
3517
- });
3568
+ yield new Promise((resolve) => setTimeout(resolve, 500));
3569
+ return yield sync(db, options, schema, Object.assign(Object.assign({}, syncOptions), { retryImmediatelyOnFetchError: false }));
3518
3570
  }
3519
3571
  // Make sure that no matter whether sync() explodes or not,
3520
3572
  // always update the timestamp. Also store the error.
3521
- await db.$syncState.update('syncState', {
3573
+ yield db.$syncState.update('syncState', {
3522
3574
  timestamp: new Date(),
3523
3575
  error: '' + error,
3524
3576
  });
@@ -3527,234 +3579,239 @@ function sync(db, options, schema, syncOptions) {
3527
3579
  error,
3528
3580
  });
3529
3581
  return Promise.reject(error);
3530
- });
3582
+ }));
3531
3583
  }
3532
- async function _sync(db, options, schema, { isInitialSync, cancelToken, justCheckIfNeeded, purpose } = {
3584
+ function _sync(db, options, schema, { isInitialSync, cancelToken, justCheckIfNeeded, purpose } = {
3533
3585
  isInitialSync: false,
3534
3586
  }) {
3535
- if (!justCheckIfNeeded) {
3536
- console.debug('SYNC STARTED', { isInitialSync, purpose });
3537
- }
3538
- if (!db.cloud.options?.databaseUrl)
3539
- throw new Error(`Internal error: sync must not be called when no databaseUrl is configured`);
3540
- const { databaseUrl } = options;
3541
- const currentUser = await db.getCurrentUser(); // Keep same value across entire sync flow:
3542
- const tablesToSync = currentUser.isLoggedIn ? getSyncableTables(db) : [];
3543
- const mutationTables = tablesToSync.map((tbl) => db.table(getMutationTable(tbl.name)));
3544
- // If this is not the initial sync,
3545
- // go through tables that were previously not synced but should now be according to
3546
- // logged in state and the sync table whitelist in db.cloud.options.
3547
- //
3548
- // Prepare for syncification by modifying locally unauthorized objects:
3549
- //
3550
- const persistedSyncState = await db.getPersistedSyncState();
3551
- const tablesToSyncify = !isInitialSync && currentUser.isLoggedIn
3552
- ? getTablesToSyncify(db, persistedSyncState)
3553
- : [];
3554
- throwIfCancelled(cancelToken);
3555
- const doSyncify = tablesToSyncify.length > 0;
3556
- if (doSyncify) {
3557
- if (justCheckIfNeeded)
3558
- return true;
3559
- //console.debug('sync doSyncify is true');
3560
- await db.transaction('rw', tablesToSyncify, async (tx) => {
3561
- // @ts-ignore
3562
- tx.idbtrans.disableChangeTracking = true;
3563
- // @ts-ignore
3564
- tx.idbtrans.disableAccessControl = true; // TODO: Take care of this flag in access control middleware!
3565
- await modifyLocalObjectsWithNewUserId(tablesToSyncify, currentUser, persistedSyncState?.realms);
3566
- });
3567
- throwIfCancelled(cancelToken);
3568
- }
3569
- //
3570
- // List changes to sync
3571
- //
3572
- const [clientChangeSet, syncState, baseRevs] = await db.transaction('r', db.tables, async () => {
3573
- const syncState = await db.getPersistedSyncState();
3574
- const baseRevs = await db.$baseRevs.toArray();
3575
- let clientChanges = await listClientChanges(mutationTables);
3587
+ var _a;
3588
+ return __awaiter$1(this, void 0, void 0, function* () {
3589
+ if (!justCheckIfNeeded) {
3590
+ console.debug('SYNC STARTED', { isInitialSync, purpose });
3591
+ }
3592
+ if (!((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl))
3593
+ throw new Error(`Internal error: sync must not be called when no databaseUrl is configured`);
3594
+ const { databaseUrl } = options;
3595
+ const currentUser = yield db.getCurrentUser(); // Keep same value across entire sync flow:
3596
+ const tablesToSync = currentUser.isLoggedIn ? getSyncableTables(db) : [];
3597
+ const mutationTables = tablesToSync.map((tbl) => db.table(getMutationTable(tbl.name)));
3598
+ // If this is not the initial sync,
3599
+ // go through tables that were previously not synced but should now be according to
3600
+ // logged in state and the sync table whitelist in db.cloud.options.
3601
+ //
3602
+ // Prepare for syncification by modifying locally unauthorized objects:
3603
+ //
3604
+ const persistedSyncState = yield db.getPersistedSyncState();
3605
+ const tablesToSyncify = !isInitialSync && currentUser.isLoggedIn
3606
+ ? getTablesToSyncify(db, persistedSyncState)
3607
+ : [];
3576
3608
  throwIfCancelled(cancelToken);
3609
+ const doSyncify = tablesToSyncify.length > 0;
3577
3610
  if (doSyncify) {
3578
- const alreadySyncedRealms = [
3579
- ...(persistedSyncState?.realms || []),
3580
- ...(persistedSyncState?.inviteRealms || []),
3581
- ];
3582
- const syncificationInserts = await listSyncifiedChanges(tablesToSyncify, currentUser, schema, alreadySyncedRealms);
3611
+ if (justCheckIfNeeded)
3612
+ return true;
3613
+ //console.debug('sync doSyncify is true');
3614
+ yield db.transaction('rw', tablesToSyncify, (tx) => __awaiter$1(this, void 0, void 0, function* () {
3615
+ // @ts-ignore
3616
+ tx.idbtrans.disableChangeTracking = true;
3617
+ // @ts-ignore
3618
+ tx.idbtrans.disableAccessControl = true; // TODO: Take care of this flag in access control middleware!
3619
+ yield modifyLocalObjectsWithNewUserId(tablesToSyncify, currentUser, persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.realms);
3620
+ }));
3583
3621
  throwIfCancelled(cancelToken);
3584
- clientChanges = clientChanges.concat(syncificationInserts);
3585
- return [clientChanges, syncState, baseRevs];
3586
- }
3587
- return [clientChanges, syncState, baseRevs];
3588
- });
3589
- const syncIsNeeded = clientChangeSet.some((set) => set.muts.some((mut) => mut.keys.length > 0));
3590
- if (justCheckIfNeeded) {
3591
- console.debug('Sync is needed:', syncIsNeeded);
3592
- return syncIsNeeded;
3593
- }
3594
- if (purpose === 'push' && !syncIsNeeded) {
3595
- // The purpose of this request was to push changes
3596
- return false;
3597
- }
3598
- const latestRevisions = getLatestRevisionsPerTable(clientChangeSet, syncState?.latestRevisions);
3599
- const clientIdentity = syncState?.clientIdentity || randomString$1(16);
3600
- //
3601
- // Push changes to server
3602
- //
3603
- throwIfCancelled(cancelToken);
3604
- const res = await syncWithServer(clientChangeSet, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser);
3605
- console.debug('Sync response', res);
3606
- //
3607
- // Apply changes locally and clear old change entries:
3608
- //
3609
- const done = await db.transaction('rw', db.tables, async (tx) => {
3610
- // @ts-ignore
3611
- tx.idbtrans.disableChangeTracking = true;
3612
- // @ts-ignore
3613
- tx.idbtrans.disableAccessControl = true; // TODO: Take care of this flag in access control middleware!
3614
- // Update db.cloud.schema from server response.
3615
- // Local schema MAY include a subset of tables, so do not force all tables into local schema.
3616
- for (const tableName of Object.keys(schema)) {
3617
- if (res.schema[tableName]) {
3618
- // Write directly into configured schema. This code can only be executed alone.
3619
- schema[tableName] = res.schema[tableName];
3620
- }
3621
3622
  }
3622
- await db.$syncState.put(schema, 'schema');
3623
- // List mutations that happened during our exchange with the server:
3624
- const addedClientChanges = await listClientChanges(mutationTables, db, {
3625
- since: latestRevisions,
3626
- });
3627
3623
  //
3628
- // Delete changes now as server has return success
3629
- // (but keep changes that haven't reached server yet)
3624
+ // List changes to sync
3630
3625
  //
3631
- for (const mutTable of mutationTables) {
3632
- const tableName = getTableFromMutationTable(mutTable.name);
3633
- if (!addedClientChanges.some((ch) => ch.table === tableName && ch.muts.length > 0)) {
3634
- // No added mutations for this table during the time we sent changes
3635
- // to the server.
3636
- // It is therefore safe to clear all changes (which is faster than
3637
- // deleting a range)
3638
- await Promise.all([
3639
- mutTable.clear(),
3640
- db.$baseRevs.where({ tableName }).delete(),
3641
- ]);
3642
- }
3643
- else if (latestRevisions[tableName]) {
3644
- const latestRev = latestRevisions[tableName] || 0;
3645
- await Promise.all([
3646
- mutTable.where('rev').belowOrEqual(latestRev).delete(),
3647
- db.$baseRevs
3648
- .where(':id')
3649
- .between([tableName, -Infinity], [tableName, latestRev + 1], true, true)
3650
- .reverse()
3651
- .offset(1) // Keep one entry (the one mapping muts that came during fetch --> previous server revision)
3652
- .delete(),
3653
- ]);
3626
+ const [clientChangeSet, syncState, baseRevs] = yield db.transaction('r', db.tables, () => __awaiter$1(this, void 0, void 0, function* () {
3627
+ const syncState = yield db.getPersistedSyncState();
3628
+ const baseRevs = yield db.$baseRevs.toArray();
3629
+ let clientChanges = yield listClientChanges(mutationTables);
3630
+ throwIfCancelled(cancelToken);
3631
+ if (doSyncify) {
3632
+ const alreadySyncedRealms = [
3633
+ ...((persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.realms) || []),
3634
+ ...((persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.inviteRealms) || []),
3635
+ ];
3636
+ const syncificationInserts = yield listSyncifiedChanges(tablesToSyncify, currentUser, schema, alreadySyncedRealms);
3637
+ throwIfCancelled(cancelToken);
3638
+ clientChanges = clientChanges.concat(syncificationInserts);
3639
+ return [clientChanges, syncState, baseRevs];
3654
3640
  }
3655
- else ;
3656
- }
3657
- // Update latestRevisions object according to additional changes:
3658
- getLatestRevisionsPerTable(addedClientChanges, latestRevisions);
3659
- // Update/add new entries into baseRevs map.
3660
- // * On tables without mutations since last serverRevision,
3661
- // this will update existing entry.
3662
- // * On tables where mutations have been recorded since last
3663
- // serverRevision, this will create a new entry.
3664
- // The purpose of this operation is to mark a start revision (per table)
3665
- // so that all client-mutations that come after this, will be mapped to current
3666
- // server revision.
3667
- await updateBaseRevs(db, schema, latestRevisions, res.serverRevision);
3668
- const syncState = await db.getPersistedSyncState();
3669
- //
3670
- // Delete objects from removed realms
3671
- //
3672
- await deleteObjectsFromRemovedRealms(db, res, syncState);
3673
- //
3674
- // Update syncState
3675
- //
3676
- const newSyncState = syncState || {
3677
- syncedTables: [],
3678
- latestRevisions: {},
3679
- realms: [],
3680
- inviteRealms: [],
3681
- clientIdentity,
3682
- };
3683
- newSyncState.syncedTables = tablesToSync
3684
- .map((tbl) => tbl.name)
3685
- .concat(tablesToSyncify.map((tbl) => tbl.name));
3686
- newSyncState.latestRevisions = latestRevisions;
3687
- newSyncState.remoteDbId = res.dbId;
3688
- newSyncState.initiallySynced = true;
3689
- newSyncState.realms = res.realms;
3690
- newSyncState.inviteRealms = res.inviteRealms;
3691
- newSyncState.serverRevision = res.serverRevision;
3692
- newSyncState.timestamp = new Date();
3693
- delete newSyncState.error;
3694
- const filteredChanges = filterServerChangesThroughAddedClientChanges(res.changes, addedClientChanges);
3641
+ return [clientChanges, syncState, baseRevs];
3642
+ }));
3643
+ const syncIsNeeded = clientChangeSet.some((set) => set.muts.some((mut) => mut.keys.length > 0));
3644
+ if (justCheckIfNeeded) {
3645
+ console.debug('Sync is needed:', syncIsNeeded);
3646
+ return syncIsNeeded;
3647
+ }
3648
+ if (purpose === 'push' && !syncIsNeeded) {
3649
+ // The purpose of this request was to push changes
3650
+ return false;
3651
+ }
3652
+ const latestRevisions = getLatestRevisionsPerTable(clientChangeSet, syncState === null || syncState === void 0 ? void 0 : syncState.latestRevisions);
3653
+ const clientIdentity = (syncState === null || syncState === void 0 ? void 0 : syncState.clientIdentity) || randomString$1(16);
3695
3654
  //
3696
- // apply server changes
3655
+ // Push changes to server
3697
3656
  //
3698
- await applyServerChanges(filteredChanges, db);
3657
+ throwIfCancelled(cancelToken);
3658
+ const res = yield syncWithServer(clientChangeSet, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser);
3659
+ console.debug('Sync response', res);
3699
3660
  //
3700
- // Update syncState
3661
+ // Apply changes locally and clear old change entries:
3701
3662
  //
3702
- db.$syncState.put(newSyncState, 'syncState');
3703
- return addedClientChanges.length === 0;
3663
+ const done = yield db.transaction('rw', db.tables, (tx) => __awaiter$1(this, void 0, void 0, function* () {
3664
+ // @ts-ignore
3665
+ tx.idbtrans.disableChangeTracking = true;
3666
+ // @ts-ignore
3667
+ tx.idbtrans.disableAccessControl = true; // TODO: Take care of this flag in access control middleware!
3668
+ // Update db.cloud.schema from server response.
3669
+ // Local schema MAY include a subset of tables, so do not force all tables into local schema.
3670
+ for (const tableName of Object.keys(schema)) {
3671
+ if (res.schema[tableName]) {
3672
+ // Write directly into configured schema. This code can only be executed alone.
3673
+ schema[tableName] = res.schema[tableName];
3674
+ }
3675
+ }
3676
+ yield db.$syncState.put(schema, 'schema');
3677
+ // List mutations that happened during our exchange with the server:
3678
+ const addedClientChanges = yield listClientChanges(mutationTables, db, {
3679
+ since: latestRevisions,
3680
+ });
3681
+ //
3682
+ // Delete changes now as server has return success
3683
+ // (but keep changes that haven't reached server yet)
3684
+ //
3685
+ for (const mutTable of mutationTables) {
3686
+ const tableName = getTableFromMutationTable(mutTable.name);
3687
+ if (!addedClientChanges.some((ch) => ch.table === tableName && ch.muts.length > 0)) {
3688
+ // No added mutations for this table during the time we sent changes
3689
+ // to the server.
3690
+ // It is therefore safe to clear all changes (which is faster than
3691
+ // deleting a range)
3692
+ yield Promise.all([
3693
+ mutTable.clear(),
3694
+ db.$baseRevs.where({ tableName }).delete(),
3695
+ ]);
3696
+ }
3697
+ else if (latestRevisions[tableName]) {
3698
+ const latestRev = latestRevisions[tableName] || 0;
3699
+ yield Promise.all([
3700
+ mutTable.where('rev').belowOrEqual(latestRev).delete(),
3701
+ db.$baseRevs
3702
+ .where(':id')
3703
+ .between([tableName, -Infinity], [tableName, latestRev + 1], true, true)
3704
+ .reverse()
3705
+ .offset(1) // Keep one entry (the one mapping muts that came during fetch --> previous server revision)
3706
+ .delete(),
3707
+ ]);
3708
+ }
3709
+ else ;
3710
+ }
3711
+ // Update latestRevisions object according to additional changes:
3712
+ getLatestRevisionsPerTable(addedClientChanges, latestRevisions);
3713
+ // Update/add new entries into baseRevs map.
3714
+ // * On tables without mutations since last serverRevision,
3715
+ // this will update existing entry.
3716
+ // * On tables where mutations have been recorded since last
3717
+ // serverRevision, this will create a new entry.
3718
+ // The purpose of this operation is to mark a start revision (per table)
3719
+ // so that all client-mutations that come after this, will be mapped to current
3720
+ // server revision.
3721
+ yield updateBaseRevs(db, schema, latestRevisions, res.serverRevision);
3722
+ const syncState = yield db.getPersistedSyncState();
3723
+ //
3724
+ // Delete objects from removed realms
3725
+ //
3726
+ yield deleteObjectsFromRemovedRealms(db, res, syncState);
3727
+ //
3728
+ // Update syncState
3729
+ //
3730
+ const newSyncState = syncState || {
3731
+ syncedTables: [],
3732
+ latestRevisions: {},
3733
+ realms: [],
3734
+ inviteRealms: [],
3735
+ clientIdentity,
3736
+ };
3737
+ newSyncState.syncedTables = tablesToSync
3738
+ .map((tbl) => tbl.name)
3739
+ .concat(tablesToSyncify.map((tbl) => tbl.name));
3740
+ newSyncState.latestRevisions = latestRevisions;
3741
+ newSyncState.remoteDbId = res.dbId;
3742
+ newSyncState.initiallySynced = true;
3743
+ newSyncState.realms = res.realms;
3744
+ newSyncState.inviteRealms = res.inviteRealms;
3745
+ newSyncState.serverRevision = res.serverRevision;
3746
+ newSyncState.timestamp = new Date();
3747
+ delete newSyncState.error;
3748
+ const filteredChanges = filterServerChangesThroughAddedClientChanges(res.changes, addedClientChanges);
3749
+ //
3750
+ // apply server changes
3751
+ //
3752
+ yield applyServerChanges(filteredChanges, db);
3753
+ //
3754
+ // Update syncState
3755
+ //
3756
+ db.$syncState.put(newSyncState, 'syncState');
3757
+ return addedClientChanges.length === 0;
3758
+ }));
3759
+ if (!done) {
3760
+ console.debug('MORE SYNC NEEDED. Go for it again!');
3761
+ return yield _sync(db, options, schema, { isInitialSync, cancelToken });
3762
+ }
3763
+ console.debug('SYNC DONE', { isInitialSync });
3764
+ return false; // Not needed anymore
3704
3765
  });
3705
- if (!done) {
3706
- console.debug('MORE SYNC NEEDED. Go for it again!');
3707
- return await _sync(db, options, schema, { isInitialSync, cancelToken });
3708
- }
3709
- console.debug('SYNC DONE', { isInitialSync });
3710
- return false; // Not needed anymore
3711
- }
3712
- async function deleteObjectsFromRemovedRealms(db, res, prevState) {
3713
- const deletedRealms = new Set();
3714
- const rejectedRealms = new Set();
3715
- const previousRealmSet = prevState ? prevState.realms : [];
3716
- const previousInviteRealmSet = prevState ? prevState.inviteRealms : [];
3717
- const updatedRealmSet = new Set(res.realms);
3718
- const updatedTotalRealmSet = new Set(res.realms.concat(res.inviteRealms));
3719
- for (const realmId of previousRealmSet) {
3720
- if (!updatedRealmSet.has(realmId)) {
3721
- rejectedRealms.add(realmId);
3766
+ }
3767
+ function deleteObjectsFromRemovedRealms(db, res, prevState) {
3768
+ return __awaiter$1(this, void 0, void 0, function* () {
3769
+ const deletedRealms = new Set();
3770
+ const rejectedRealms = new Set();
3771
+ const previousRealmSet = prevState ? prevState.realms : [];
3772
+ const previousInviteRealmSet = prevState ? prevState.inviteRealms : [];
3773
+ const updatedRealmSet = new Set(res.realms);
3774
+ const updatedTotalRealmSet = new Set(res.realms.concat(res.inviteRealms));
3775
+ for (const realmId of previousRealmSet) {
3776
+ if (!updatedRealmSet.has(realmId)) {
3777
+ rejectedRealms.add(realmId);
3778
+ if (!updatedTotalRealmSet.has(realmId)) {
3779
+ deletedRealms.add(realmId);
3780
+ }
3781
+ }
3782
+ }
3783
+ for (const realmId of previousInviteRealmSet.concat(previousRealmSet)) {
3722
3784
  if (!updatedTotalRealmSet.has(realmId)) {
3723
3785
  deletedRealms.add(realmId);
3724
3786
  }
3725
3787
  }
3726
- }
3727
- for (const realmId of previousInviteRealmSet.concat(previousRealmSet)) {
3728
- if (!updatedTotalRealmSet.has(realmId)) {
3729
- deletedRealms.add(realmId);
3730
- }
3731
- }
3732
- if (deletedRealms.size > 0 || rejectedRealms.size > 0) {
3733
- const tables = getSyncableTables(db);
3734
- for (const table of tables) {
3735
- let realmsToDelete = ['realms', 'members', 'roles'].includes(table.name)
3736
- ? deletedRealms // These tables should spare rejected ones.
3737
- : rejectedRealms; // All other tables shoudl delete rejected+deleted ones
3738
- if (realmsToDelete.size === 0)
3739
- continue;
3740
- if (table.schema.indexes.some((idx) => idx.keyPath === 'realmId' ||
3741
- (Array.isArray(idx.keyPath) && idx.keyPath[0] === 'realmId'))) {
3742
- // There's an index to use:
3743
- //console.debug(`REMOVAL: deleting all ${table.name} where realmId anyOf `, JSON.stringify([...realmsToDelete]));
3744
- await table
3745
- .where('realmId')
3746
- .anyOf([...realmsToDelete])
3747
- .delete();
3748
- }
3749
- else {
3750
- // No index to use:
3751
- //console.debug(`REMOVAL: deleting all ${table.name} where realmId is any of `, JSON.stringify([...realmsToDelete]), realmsToDelete.size);
3752
- await table
3753
- .filter((obj) => !!obj?.realmId && realmsToDelete.has(obj.realmId))
3754
- .delete();
3788
+ if (deletedRealms.size > 0 || rejectedRealms.size > 0) {
3789
+ const tables = getSyncableTables(db);
3790
+ for (const table of tables) {
3791
+ let realmsToDelete = ['realms', 'members', 'roles'].includes(table.name)
3792
+ ? deletedRealms // These tables should spare rejected ones.
3793
+ : rejectedRealms; // All other tables shoudl delete rejected+deleted ones
3794
+ if (realmsToDelete.size === 0)
3795
+ continue;
3796
+ if (table.schema.indexes.some((idx) => idx.keyPath === 'realmId' ||
3797
+ (Array.isArray(idx.keyPath) && idx.keyPath[0] === 'realmId'))) {
3798
+ // There's an index to use:
3799
+ //console.debug(`REMOVAL: deleting all ${table.name} where realmId anyOf `, JSON.stringify([...realmsToDelete]));
3800
+ yield table
3801
+ .where('realmId')
3802
+ .anyOf([...realmsToDelete])
3803
+ .delete();
3804
+ }
3805
+ else {
3806
+ // No index to use:
3807
+ //console.debug(`REMOVAL: deleting all ${table.name} where realmId is any of `, JSON.stringify([...realmsToDelete]), realmsToDelete.size);
3808
+ yield table
3809
+ .filter((obj) => !!(obj === null || obj === void 0 ? void 0 : obj.realmId) && realmsToDelete.has(obj.realmId))
3810
+ .delete();
3811
+ }
3755
3812
  }
3756
3813
  }
3757
- }
3814
+ });
3758
3815
  }
3759
3816
  function filterServerChangesThroughAddedClientChanges(serverChanges, addedClientChanges) {
3760
3817
  const changes = {};
@@ -3772,7 +3829,7 @@ function MessagesFromServerConsumer(db) {
3772
3829
  let isWorking = false;
3773
3830
  let loopWarning = 0;
3774
3831
  let loopDetection = [0, 0, 0, 0, 0, 0, 0, 0, 0, Date.now()];
3775
- event.subscribe(async () => {
3832
+ event.subscribe(() => __awaiter$1(this, void 0, void 0, function* () {
3776
3833
  if (isWorking)
3777
3834
  return;
3778
3835
  if (queue.length > 0) {
@@ -3781,7 +3838,7 @@ function MessagesFromServerConsumer(db) {
3781
3838
  loopDetection.push(Date.now());
3782
3839
  readyToServe.next(false);
3783
3840
  try {
3784
- await consumeQueue();
3841
+ yield consumeQueue();
3785
3842
  }
3786
3843
  finally {
3787
3844
  if (loopDetection[loopDetection.length - 1] - loopDetection[0] <
@@ -3791,170 +3848,173 @@ function MessagesFromServerConsumer(db) {
3791
3848
  // Last time we did this, we ended up here too. Wait for a minute.
3792
3849
  console.warn(`Slowing down websocket loop for one minute`);
3793
3850
  loopWarning = Date.now() + 60000;
3794
- await new Promise((resolve) => setTimeout(resolve, 60000));
3851
+ yield new Promise((resolve) => setTimeout(resolve, 60000));
3795
3852
  }
3796
3853
  else {
3797
3854
  // This is a one-time event. Just pause 10 seconds.
3798
3855
  console.warn(`Slowing down websocket loop for 10 seconds`);
3799
3856
  loopWarning = Date.now() + 10000;
3800
- await new Promise((resolve) => setTimeout(resolve, 10000));
3857
+ yield new Promise((resolve) => setTimeout(resolve, 10000));
3801
3858
  }
3802
3859
  }
3803
3860
  isWorking = false;
3804
3861
  readyToServe.next(true);
3805
3862
  }
3806
3863
  }
3807
- });
3864
+ }));
3808
3865
  function enqueue(msg) {
3809
3866
  queue.push(msg);
3810
3867
  event.next(null);
3811
3868
  }
3812
- async function consumeQueue() {
3813
- while (queue.length > 0) {
3814
- const msg = queue.shift();
3815
- try {
3816
- // If the sync worker or service worker is syncing, wait 'til thei're done.
3817
- // It's no need to have two channels at the same time - even though it wouldnt
3818
- // be a problem - this is an optimization.
3819
- await db.cloud.syncState
3820
- .pipe(filter(({ phase }) => phase === 'in-sync' || phase === 'error'), take(1))
3821
- .toPromise();
3822
- console.debug('processing msg', msg);
3823
- const persistedSyncState = db.cloud.persistedSyncState.value;
3824
- //syncState.
3825
- if (!msg)
3826
- continue;
3827
- switch (msg.type) {
3828
- case 'token-expired':
3829
- console.debug('WebSocket observable: Token expired. Refreshing token...');
3830
- const user = db.cloud.currentUser.value;
3831
- // Refresh access token
3832
- const refreshedLogin = await refreshAccessToken(db.cloud.options.databaseUrl, user);
3833
- // Persist updated access token
3834
- await db.table('$logins').update(user.userId, {
3835
- accessToken: refreshedLogin.accessToken,
3836
- accessTokenExpiration: refreshedLogin.accessTokenExpiration,
3837
- });
3838
- // Updating $logins will trigger emission of db.cloud.currentUser observable, which
3839
- // in turn will lead to that connectWebSocket.ts will reconnect the socket with the
3840
- // new token. So we don't need to do anything more here.
3841
- break;
3842
- case 'realm-added':
3843
- //if (!persistedSyncState?.realms?.includes(msg.realm) && !persistedSyncState?.inviteRealms?.includes(msg.realm)) {
3844
- triggerSync(db, 'pull');
3845
- //}
3846
- break;
3847
- case 'realm-accepted':
3848
- //if (!persistedSyncState?.realms?.includes(msg.realm)) {
3849
- triggerSync(db, 'pull');
3850
- //}
3851
- break;
3852
- case 'realm-removed':
3853
- //if (
3854
- persistedSyncState?.realms?.includes(msg.realm) ||
3855
- persistedSyncState?.inviteRealms?.includes(msg.realm);
3856
- //) {
3857
- triggerSync(db, 'pull');
3858
- //}
3859
- break;
3860
- case 'realms-changed':
3861
- triggerSync(db, 'pull');
3862
- break;
3863
- case 'changes':
3864
- console.debug('changes');
3865
- if (db.cloud.syncState.value?.phase === 'error') {
3869
+ function consumeQueue() {
3870
+ var _a, _b, _c;
3871
+ return __awaiter$1(this, void 0, void 0, function* () {
3872
+ while (queue.length > 0) {
3873
+ const msg = queue.shift();
3874
+ try {
3875
+ // If the sync worker or service worker is syncing, wait 'til thei're done.
3876
+ // It's no need to have two channels at the same time - even though it wouldnt
3877
+ // be a problem - this is an optimization.
3878
+ yield db.cloud.syncState
3879
+ .pipe(filter(({ phase }) => phase === 'in-sync' || phase === 'error'), take(1))
3880
+ .toPromise();
3881
+ console.debug('processing msg', msg);
3882
+ const persistedSyncState = db.cloud.persistedSyncState.value;
3883
+ //syncState.
3884
+ if (!msg)
3885
+ continue;
3886
+ switch (msg.type) {
3887
+ case 'token-expired':
3888
+ console.debug('WebSocket observable: Token expired. Refreshing token...');
3889
+ const user = db.cloud.currentUser.value;
3890
+ // Refresh access token
3891
+ const refreshedLogin = yield refreshAccessToken(db.cloud.options.databaseUrl, user);
3892
+ // Persist updated access token
3893
+ yield db.table('$logins').update(user.userId, {
3894
+ accessToken: refreshedLogin.accessToken,
3895
+ accessTokenExpiration: refreshedLogin.accessTokenExpiration,
3896
+ });
3897
+ // Updating $logins will trigger emission of db.cloud.currentUser observable, which
3898
+ // in turn will lead to that connectWebSocket.ts will reconnect the socket with the
3899
+ // new token. So we don't need to do anything more here.
3900
+ break;
3901
+ case 'realm-added':
3902
+ //if (!persistedSyncState?.realms?.includes(msg.realm) && !persistedSyncState?.inviteRealms?.includes(msg.realm)) {
3866
3903
  triggerSync(db, 'pull');
3904
+ //}
3867
3905
  break;
3868
- }
3869
- await db.transaction('rw', db.dx.tables, async (tx) => {
3870
- // @ts-ignore
3871
- tx.idbtrans.disableChangeTracking = true;
3872
- // @ts-ignore
3873
- tx.idbtrans.disableAccessControl = true;
3874
- const [schema, syncState, currentUser] = await Promise.all([
3875
- db.getSchema(),
3876
- db.getPersistedSyncState(),
3877
- db.getCurrentUser(),
3878
- ]);
3879
- console.debug('ws message queue: in transaction');
3880
- if (!syncState || !schema || !currentUser) {
3881
- console.debug('required vars not present', {
3882
- syncState,
3883
- schema,
3884
- currentUser,
3885
- });
3886
- return; // Initial sync must have taken place - otherwise, ignore this.
3906
+ case 'realm-accepted':
3907
+ //if (!persistedSyncState?.realms?.includes(msg.realm)) {
3908
+ triggerSync(db, 'pull');
3909
+ //}
3910
+ break;
3911
+ case 'realm-removed':
3912
+ //if (
3913
+ ((_a = persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.realms) === null || _a === void 0 ? void 0 : _a.includes(msg.realm)) ||
3914
+ ((_b = persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.inviteRealms) === null || _b === void 0 ? void 0 : _b.includes(msg.realm));
3915
+ //) {
3916
+ triggerSync(db, 'pull');
3917
+ //}
3918
+ break;
3919
+ case 'realms-changed':
3920
+ triggerSync(db, 'pull');
3921
+ break;
3922
+ case 'changes':
3923
+ console.debug('changes');
3924
+ if (((_c = db.cloud.syncState.value) === null || _c === void 0 ? void 0 : _c.phase) === 'error') {
3925
+ triggerSync(db, 'pull');
3926
+ break;
3887
3927
  }
3888
- // Verify again in ACID tx that we're on same server revision.
3889
- if (msg.baseRev !== syncState.serverRevision) {
3890
- console.debug(`baseRev (${msg.baseRev}) differs from our serverRevision in syncState (${syncState.serverRevision})`);
3891
- // Should we trigger a sync now? No. This is a normal case
3892
- // when another local peer (such as the SW or a websocket channel on other tab) has
3893
- // updated syncState from new server information but we are not aware yet. It would
3894
- // be unnescessary to do a sync in that case. Instead, the caller of this consumeQueue()
3895
- // function will do readyToServe.next(true) right after this return, which will lead
3896
- // to a "ready" message being sent to server with the new accurate serverRev we have,
3897
- // so that the next message indeed will be correct.
3898
- if (typeof msg.baseRev === 'string' && // v2 format
3899
- (typeof syncState.serverRevision === 'bigint' || // v1 format
3900
- typeof syncState.serverRevision === 'object') // v1 format old browser
3901
- ) {
3902
- // The reason for the diff seems to be that server has migrated the revision format.
3903
- // Do a full sync to update revision format.
3904
- // If we don't do a sync request now, we could stuck in an endless loop.
3928
+ yield db.transaction('rw', db.dx.tables, (tx) => __awaiter$1(this, void 0, void 0, function* () {
3929
+ // @ts-ignore
3930
+ tx.idbtrans.disableChangeTracking = true;
3931
+ // @ts-ignore
3932
+ tx.idbtrans.disableAccessControl = true;
3933
+ const [schema, syncState, currentUser] = yield Promise.all([
3934
+ db.getSchema(),
3935
+ db.getPersistedSyncState(),
3936
+ db.getCurrentUser(),
3937
+ ]);
3938
+ console.debug('ws message queue: in transaction');
3939
+ if (!syncState || !schema || !currentUser) {
3940
+ console.debug('required vars not present', {
3941
+ syncState,
3942
+ schema,
3943
+ currentUser,
3944
+ });
3945
+ return; // Initial sync must have taken place - otherwise, ignore this.
3946
+ }
3947
+ // Verify again in ACID tx that we're on same server revision.
3948
+ if (msg.baseRev !== syncState.serverRevision) {
3949
+ console.debug(`baseRev (${msg.baseRev}) differs from our serverRevision in syncState (${syncState.serverRevision})`);
3950
+ // Should we trigger a sync now? No. This is a normal case
3951
+ // when another local peer (such as the SW or a websocket channel on other tab) has
3952
+ // updated syncState from new server information but we are not aware yet. It would
3953
+ // be unnescessary to do a sync in that case. Instead, the caller of this consumeQueue()
3954
+ // function will do readyToServe.next(true) right after this return, which will lead
3955
+ // to a "ready" message being sent to server with the new accurate serverRev we have,
3956
+ // so that the next message indeed will be correct.
3957
+ if (typeof msg.baseRev === 'string' && // v2 format
3958
+ (typeof syncState.serverRevision === 'bigint' || // v1 format
3959
+ typeof syncState.serverRevision === 'object') // v1 format old browser
3960
+ ) {
3961
+ // The reason for the diff seems to be that server has migrated the revision format.
3962
+ // Do a full sync to update revision format.
3963
+ // If we don't do a sync request now, we could stuck in an endless loop.
3964
+ triggerSync(db, 'pull');
3965
+ }
3966
+ return; // Ignore message
3967
+ }
3968
+ // Verify also that the message is based on the exact same set of realms
3969
+ const ourRealmSetHash = yield Dexie.waitFor(
3970
+ // Keep TX in non-IDB work
3971
+ computeRealmSetHash(syncState));
3972
+ console.debug('ourRealmSetHash', ourRealmSetHash);
3973
+ if (ourRealmSetHash !== msg.realmSetHash) {
3974
+ console.debug('not same realmSetHash', msg.realmSetHash);
3905
3975
  triggerSync(db, 'pull');
3976
+ // The message isn't based on the same realms.
3977
+ // Trigger a sync instead to resolve all things up.
3978
+ return;
3906
3979
  }
3907
- return; // Ignore message
3908
- }
3909
- // Verify also that the message is based on the exact same set of realms
3910
- const ourRealmSetHash = await Dexie.waitFor(
3911
- // Keep TX in non-IDB work
3912
- computeRealmSetHash(syncState));
3913
- console.debug('ourRealmSetHash', ourRealmSetHash);
3914
- if (ourRealmSetHash !== msg.realmSetHash) {
3915
- console.debug('not same realmSetHash', msg.realmSetHash);
3916
- triggerSync(db, 'pull');
3917
- // The message isn't based on the same realms.
3918
- // Trigger a sync instead to resolve all things up.
3919
- return;
3920
- }
3921
- // Get clientChanges
3922
- let clientChanges = [];
3923
- if (currentUser.isLoggedIn) {
3924
- const mutationTables = getSyncableTables(db).map((tbl) => db.table(getMutationTable(tbl.name)));
3925
- clientChanges = await listClientChanges(mutationTables, db);
3926
- console.debug('msg queue: client changes', clientChanges);
3927
- }
3928
- if (msg.changes.length > 0) {
3929
- const filteredChanges = filterServerChangesThroughAddedClientChanges(msg.changes, clientChanges);
3980
+ // Get clientChanges
3981
+ let clientChanges = [];
3982
+ if (currentUser.isLoggedIn) {
3983
+ const mutationTables = getSyncableTables(db).map((tbl) => db.table(getMutationTable(tbl.name)));
3984
+ clientChanges = yield listClientChanges(mutationTables, db);
3985
+ console.debug('msg queue: client changes', clientChanges);
3986
+ }
3987
+ if (msg.changes.length > 0) {
3988
+ const filteredChanges = filterServerChangesThroughAddedClientChanges(msg.changes, clientChanges);
3989
+ //
3990
+ // apply server changes
3991
+ //
3992
+ console.debug('applying filtered server changes', filteredChanges);
3993
+ yield applyServerChanges(filteredChanges, db);
3994
+ }
3995
+ // Update latest revisions per table in case there are unsynced changes
3996
+ // This can be a real case in future when we allow non-eagery sync.
3997
+ // And it can actually be realistic now also, but very rare.
3998
+ syncState.latestRevisions = getLatestRevisionsPerTable(clientChanges, syncState.latestRevisions);
3999
+ syncState.serverRevision = msg.newRev;
4000
+ // Update base revs
4001
+ console.debug('Updating baseRefs', syncState.latestRevisions);
4002
+ yield updateBaseRevs(db, schema, syncState.latestRevisions, msg.newRev);
3930
4003
  //
3931
- // apply server changes
4004
+ // Update syncState
3932
4005
  //
3933
- console.debug('applying filtered server changes', filteredChanges);
3934
- await applyServerChanges(filteredChanges, db);
3935
- }
3936
- // Update latest revisions per table in case there are unsynced changes
3937
- // This can be a real case in future when we allow non-eagery sync.
3938
- // And it can actually be realistic now also, but very rare.
3939
- syncState.latestRevisions = getLatestRevisionsPerTable(clientChanges, syncState.latestRevisions);
3940
- syncState.serverRevision = msg.newRev;
3941
- // Update base revs
3942
- console.debug('Updating baseRefs', syncState.latestRevisions);
3943
- await updateBaseRevs(db, schema, syncState.latestRevisions, msg.newRev);
3944
- //
3945
- // Update syncState
3946
- //
3947
- console.debug('Updating syncState', syncState);
3948
- await db.$syncState.put(syncState, 'syncState');
3949
- });
3950
- console.debug('msg queue: done with rw transaction');
3951
- break;
4006
+ console.debug('Updating syncState', syncState);
4007
+ yield db.$syncState.put(syncState, 'syncState');
4008
+ }));
4009
+ console.debug('msg queue: done with rw transaction');
4010
+ break;
4011
+ }
4012
+ }
4013
+ catch (error) {
4014
+ console.error(`Error in msg queue`, error);
3952
4015
  }
3953
4016
  }
3954
- catch (error) {
3955
- console.error(`Error in msg queue`, error);
3956
- }
3957
- }
4017
+ });
3958
4018
  }
3959
4019
  return {
3960
4020
  enqueue,
@@ -4099,9 +4159,10 @@ function toStringTag(o) {
4099
4159
  return toString.call(o).slice(8, -1);
4100
4160
  }
4101
4161
  function getEffectiveKeys(primaryKey, req) {
4162
+ var _a;
4102
4163
  if (req.type === 'delete')
4103
4164
  return req.keys;
4104
- return req.keys?.slice() || req.values.map(primaryKey.extractKey);
4165
+ return ((_a = req.keys) === null || _a === void 0 ? void 0 : _a.slice()) || req.values.map(primaryKey.extractKey);
4105
4166
  }
4106
4167
  function applyToUpperBitFix(orig, bits) {
4107
4168
  return ((bits & 1 ? orig[0].toUpperCase() : orig[0].toLowerCase()) +
@@ -4192,9 +4253,7 @@ function createIdGenerationMiddleware(db) {
4192
4253
  name: 'idGenerationMiddleware',
4193
4254
  level: 1,
4194
4255
  create: (core) => {
4195
- return {
4196
- ...core,
4197
- table: (tableName) => {
4256
+ return Object.assign(Object.assign({}, core), { table: (tableName) => {
4198
4257
  const table = core.table(tableName);
4199
4258
  function generateOrVerifyAtKeys(req, idPrefix) {
4200
4259
  let valueClones = null;
@@ -4220,24 +4279,19 @@ function createIdGenerationMiddleware(db) {
4220
4279
  `If you want to generate IDs programmatically, remove '@' from the schema to get rid of this constraint. Dexie Cloud supports custom IDs as long as they are random and globally unique.`);
4221
4280
  }
4222
4281
  });
4223
- return table.mutate({
4224
- ...req,
4225
- keys,
4226
- values: valueClones || req.values,
4227
- });
4282
+ return table.mutate(Object.assign(Object.assign({}, req), { keys, values: valueClones || req.values }));
4228
4283
  }
4229
- return {
4230
- ...table,
4231
- mutate: (req) => {
4284
+ return Object.assign(Object.assign({}, table), { mutate: (req) => {
4285
+ var _a, _b;
4232
4286
  // @ts-ignore
4233
4287
  if (req.trans.disableChangeTracking) {
4234
4288
  // Disable ID policy checks and ID generation
4235
4289
  return table.mutate(req);
4236
4290
  }
4237
4291
  if (req.type === 'add' || req.type === 'put') {
4238
- const cloudTableSchema = db.cloud.schema?.[tableName];
4239
- if (!cloudTableSchema?.generatedGlobalId) {
4240
- if (cloudTableSchema?.markedForSync) {
4292
+ const cloudTableSchema = (_a = db.cloud.schema) === null || _a === void 0 ? void 0 : _a[tableName];
4293
+ if (!(cloudTableSchema === null || cloudTableSchema === void 0 ? void 0 : cloudTableSchema.generatedGlobalId)) {
4294
+ if (cloudTableSchema === null || cloudTableSchema === void 0 ? void 0 : cloudTableSchema.markedForSync) {
4241
4295
  // Just make sure primary key is of a supported type:
4242
4296
  const keys = getEffectiveKeys(table.schema.primaryKey, req);
4243
4297
  keys.forEach((key, idx) => {
@@ -4251,7 +4305,7 @@ function createIdGenerationMiddleware(db) {
4251
4305
  }
4252
4306
  }
4253
4307
  else {
4254
- if (db.cloud.options?.databaseUrl && !db.initiallySynced) {
4308
+ if (((_b = db.cloud.options) === null || _b === void 0 ? void 0 : _b.databaseUrl) && !db.initiallySynced) {
4255
4309
  // A database URL is configured but no initial sync has been performed.
4256
4310
  const keys = getEffectiveKeys(table.schema.primaryKey, req);
4257
4311
  // Check if the operation would yield any INSERT. If so, complain! We never want wrong ID prefixes stored.
@@ -4272,10 +4326,8 @@ function createIdGenerationMiddleware(db) {
4272
4326
  }
4273
4327
  }
4274
4328
  return table.mutate(req);
4275
- },
4276
- };
4277
- },
4278
- };
4329
+ } });
4330
+ } });
4279
4331
  },
4280
4332
  };
4281
4333
  }
@@ -4286,19 +4338,16 @@ function createImplicitPropSetterMiddleware(db) {
4286
4338
  name: 'implicitPropSetterMiddleware',
4287
4339
  level: 1,
4288
4340
  create: (core) => {
4289
- return {
4290
- ...core,
4291
- table: (tableName) => {
4341
+ return Object.assign(Object.assign({}, core), { table: (tableName) => {
4292
4342
  const table = core.table(tableName);
4293
- return {
4294
- ...table,
4295
- mutate: (req) => {
4343
+ return Object.assign(Object.assign({}, table), { mutate: (req) => {
4344
+ var _a, _b, _c, _d;
4296
4345
  // @ts-ignore
4297
4346
  if (req.trans.disableChangeTracking) {
4298
4347
  return table.mutate(req);
4299
4348
  }
4300
4349
  const trans = req.trans;
4301
- if (db.cloud.schema?.[tableName]?.markedForSync) {
4350
+ if ((_b = (_a = db.cloud.schema) === null || _a === void 0 ? void 0 : _a[tableName]) === null || _b === void 0 ? void 0 : _b.markedForSync) {
4302
4351
  if (req.type === 'add' || req.type === 'put') {
4303
4352
  // No matter if user is logged in or not, make sure "owner" and "realmId" props are set properly.
4304
4353
  // If not logged in, this will be changed upon syncification of the tables (next sync after login),
@@ -4312,7 +4361,7 @@ function createImplicitPropSetterMiddleware(db) {
4312
4361
  if (!obj.realmId) {
4313
4362
  obj.realmId = trans.currentUser.userId;
4314
4363
  }
4315
- const key = table.schema.primaryKey.extractKey?.(obj);
4364
+ const key = (_d = (_c = table.schema.primaryKey).extractKey) === null || _d === void 0 ? void 0 : _d.call(_c, obj);
4316
4365
  if (typeof key === 'string' && key[0] === '#') {
4317
4366
  // Add $ts prop for put operations and
4318
4367
  // disable update operations as well as consistent
@@ -4339,10 +4388,8 @@ function createImplicitPropSetterMiddleware(db) {
4339
4388
  }
4340
4389
  }
4341
4390
  return table.mutate(req);
4342
- },
4343
- };
4344
- },
4345
- };
4391
+ } });
4392
+ } });
4346
4393
  },
4347
4394
  };
4348
4395
  }
@@ -4361,15 +4408,7 @@ function allSettled(possiblePromises) {
4361
4408
  let counter$1 = 0;
4362
4409
  function guardedTable(table) {
4363
4410
  const prop = "$lock" + (++counter$1);
4364
- return {
4365
- ...table,
4366
- count: readLock(table.count, prop),
4367
- get: readLock(table.get, prop),
4368
- getMany: readLock(table.getMany, prop),
4369
- openCursor: readLock(table.openCursor, prop),
4370
- query: readLock(table.query, prop),
4371
- mutate: writeLock(table.mutate, prop),
4372
- };
4411
+ return Object.assign(Object.assign({}, table), { count: readLock(table.count, prop), get: readLock(table.get, prop), getMany: readLock(table.getMany, prop), openCursor: readLock(table.openCursor, prop), query: readLock(table.query, prop), mutate: writeLock(table.mutate, prop) });
4373
4412
  }
4374
4413
  function readLock(fn, prop) {
4375
4414
  return function readLocker(req) {
@@ -4419,16 +4458,14 @@ function createMutationTrackingMiddleware({ currentUserObservable, db }) {
4419
4458
  core.table(`$${tbl.name}_mutations`)
4420
4459
  ]));
4421
4460
  }
4422
- catch {
4461
+ catch (_a) {
4423
4462
  throwVersionIncrementNeeded();
4424
4463
  }
4425
- return {
4426
- ...core,
4427
- transaction: (tables, mode) => {
4464
+ return Object.assign(Object.assign({}, core), { transaction: (tables, mode) => {
4428
4465
  let tx;
4429
4466
  if (mode === 'readwrite') {
4430
4467
  const mutationTables = tables
4431
- .filter((tbl) => db.cloud.schema?.[tbl]?.markedForSync)
4468
+ .filter((tbl) => { var _a, _b; return (_b = (_a = db.cloud.schema) === null || _a === void 0 ? void 0 : _a[tbl]) === null || _b === void 0 ? void 0 : _b.markedForSync; })
4432
4469
  .map((tbl) => getMutationTable(tbl));
4433
4470
  tx = core.transaction([...tables, ...mutationTables], mode);
4434
4471
  }
@@ -4451,7 +4488,8 @@ function createMutationTrackingMiddleware({ currentUserObservable, db }) {
4451
4488
  outstandingTransactions.next(outstandingTransactions.value);
4452
4489
  };
4453
4490
  const txComplete = () => {
4454
- if (tx.mutationsAdded && db.cloud.options?.databaseUrl) {
4491
+ var _a;
4492
+ if (tx.mutationsAdded && ((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl)) {
4455
4493
  if (db.cloud.usingServiceWorker) {
4456
4494
  console.debug('registering sync event');
4457
4495
  registerSyncEvent(db, "push");
@@ -4467,8 +4505,7 @@ function createMutationTrackingMiddleware({ currentUserObservable, db }) {
4467
4505
  tx.addEventListener('abort', removeTransaction);
4468
4506
  }
4469
4507
  return tx;
4470
- },
4471
- table: (tableName) => {
4508
+ }, table: (tableName) => {
4472
4509
  const table = core.table(tableName);
4473
4510
  if (/^\$/.test(tableName)) {
4474
4511
  if (tableName.endsWith('_mutations')) {
@@ -4476,20 +4513,15 @@ function createMutationTrackingMiddleware({ currentUserObservable, db }) {
4476
4513
  // make sure to set the mutationsAdded flag on transaction.
4477
4514
  // This is also done in mutateAndLog() as that function talks to a
4478
4515
  // lower level DBCore and wouldn't be catched by this code.
4479
- return {
4480
- ...table,
4481
- mutate: (req) => {
4516
+ return Object.assign(Object.assign({}, table), { mutate: (req) => {
4482
4517
  if (req.type === 'add' || req.type === 'put') {
4483
4518
  req.trans.mutationsAdded = true;
4484
4519
  }
4485
4520
  return table.mutate(req);
4486
- }
4487
- };
4521
+ } });
4488
4522
  }
4489
4523
  else if (tableName === '$logins') {
4490
- return {
4491
- ...table,
4492
- mutate: (req) => {
4524
+ return Object.assign(Object.assign({}, table), { mutate: (req) => {
4493
4525
  //console.debug('Mutating $logins table', req);
4494
4526
  return table
4495
4527
  .mutate(req)
@@ -4503,8 +4535,7 @@ function createMutationTrackingMiddleware({ currentUserObservable, db }) {
4503
4535
  console.debug('Failed mutation $logins', err);
4504
4536
  return Promise.reject(err);
4505
4537
  });
4506
- }
4507
- };
4538
+ } });
4508
4539
  }
4509
4540
  else {
4510
4541
  return table;
@@ -4512,17 +4543,16 @@ function createMutationTrackingMiddleware({ currentUserObservable, db }) {
4512
4543
  }
4513
4544
  const { schema } = table;
4514
4545
  const mutsTable = mutTableMap.get(tableName);
4515
- return guardedTable({
4516
- ...table,
4517
- mutate: (req) => {
4546
+ return guardedTable(Object.assign(Object.assign({}, table), { mutate: (req) => {
4547
+ var _a, _b, _c;
4518
4548
  const trans = req.trans;
4519
4549
  if (!trans.txid)
4520
4550
  return table.mutate(req); // Upgrade transactions not guarded by us.
4521
4551
  if (trans.disableChangeTracking)
4522
4552
  return table.mutate(req);
4523
- if (!db.cloud.schema?.[tableName]?.markedForSync)
4553
+ if (!((_b = (_a = db.cloud.schema) === null || _a === void 0 ? void 0 : _a[tableName]) === null || _b === void 0 ? void 0 : _b.markedForSync))
4524
4554
  return table.mutate(req);
4525
- if (!trans.currentUser?.isLoggedIn) {
4555
+ if (!((_c = trans.currentUser) === null || _c === void 0 ? void 0 : _c.isLoggedIn)) {
4526
4556
  // Unauthorized user should not log mutations.
4527
4557
  // Instead, after login all local data should be logged at once.
4528
4558
  return table.mutate(req);
@@ -4545,8 +4575,7 @@ function createMutationTrackingMiddleware({ currentUserObservable, db }) {
4545
4575
  });
4546
4576
  })
4547
4577
  : mutateAndLog(req);
4548
- }
4549
- });
4578
+ } }));
4550
4579
  function mutateAndLog(req) {
4551
4580
  const trans = req.trans;
4552
4581
  trans.mutationsAdded = true;
@@ -4617,18 +4646,14 @@ function createMutationTrackingMiddleware({ currentUserObservable, db }) {
4617
4646
  : res;
4618
4647
  });
4619
4648
  }
4620
- }
4621
- };
4649
+ } });
4622
4650
  }
4623
4651
  };
4624
4652
  }
4625
4653
 
4626
4654
  function overrideParseStoresSpec(origFunc, dexie) {
4627
4655
  return function (stores, dbSchema) {
4628
- const storesClone = {
4629
- ...DEXIE_CLOUD_SCHEMA,
4630
- ...stores,
4631
- };
4656
+ const storesClone = Object.assign(Object.assign({}, DEXIE_CLOUD_SCHEMA), stores);
4632
4657
  const cloudSchema = dexie.cloud.schema || (dexie.cloud.schema = {});
4633
4658
  const allPrefixes = new Set();
4634
4659
  Object.keys(storesClone).forEach(tableName => {
@@ -4660,110 +4685,15 @@ function overrideParseStoresSpec(origFunc, dexie) {
4660
4685
  };
4661
4686
  }
4662
4687
 
4663
- const SECONDS = 1000;
4664
- const MINUTES = 60 * SECONDS;
4665
-
4666
- const myId = randomString(16);
4667
-
4668
- const GUARDED_JOB_HEARTBEAT = 1 * SECONDS;
4669
- const GUARDED_JOB_TIMEOUT = 1 * MINUTES;
4670
- async function performGuardedJob(db, jobName, jobsTableName, job, { awaitRemoteJob } = {}) {
4671
- // Start working.
4672
- //
4673
- // Check if someone else is working on this already.
4674
- //
4675
- const jobsTable = db.table(jobsTableName);
4676
- async function aquireLock() {
4677
- const gotTheLock = await db.transaction('rw!', jobsTableName, async () => {
4678
- const currentWork = await jobsTable.get(jobName);
4679
- if (!currentWork) {
4680
- // No one else is working. Let's record that we are.
4681
- await jobsTable.add({
4682
- nodeId: myId,
4683
- started: new Date(),
4684
- heartbeat: new Date()
4685
- }, jobName);
4686
- return true;
4687
- }
4688
- else if (currentWork.heartbeat.getTime() <
4689
- Date.now() - GUARDED_JOB_TIMEOUT) {
4690
- console.warn(`Latest ${jobName} worker seem to have died.\n`, `The dead job started:`, currentWork.started, `\n`, `Last heart beat was:`, currentWork.heartbeat, '\n', `We're now taking over!`);
4691
- // Now, take over!
4692
- await jobsTable.put({
4693
- nodeId: myId,
4694
- started: new Date(),
4695
- heartbeat: new Date()
4696
- }, jobName);
4697
- return true;
4698
- }
4699
- return false;
4700
- });
4701
- if (gotTheLock)
4702
- return true;
4703
- // Someone else took the job.
4704
- if (awaitRemoteJob) {
4705
- try {
4706
- const jobDoneObservable = from$1(liveQuery(() => jobsTable.get(jobName))).pipe(timeout(GUARDED_JOB_TIMEOUT), filter((job) => !job)); // Wait til job is not there anymore.
4707
- await jobDoneObservable.toPromise();
4708
- return false;
4709
- }
4710
- catch (err) {
4711
- if (err.name !== 'TimeoutError') {
4712
- throw err;
4713
- }
4714
- // Timeout stopped us! Try aquire the lock now.
4715
- // It will likely succeed this time unless
4716
- // another client took it.
4717
- return await aquireLock();
4718
- }
4719
- }
4720
- return false;
4721
- }
4722
- if (await aquireLock()) {
4723
- // We own the lock entry and can do our job undisturbed.
4724
- // We're not within a transaction, but these type of locks
4725
- // spans over transactions.
4726
- // Start our heart beat during the job.
4727
- // Use setInterval to make sure we are updating heartbeat even during long-lived fetch calls.
4728
- const heartbeat = setInterval(() => {
4729
- jobsTable.update(jobName, (job) => {
4730
- if (job.nodeId === myId) {
4731
- job.heartbeat = new Date();
4732
- }
4733
- });
4734
- }, GUARDED_JOB_HEARTBEAT);
4735
- try {
4736
- return await job();
4737
- }
4738
- finally {
4739
- // Stop heartbeat
4740
- clearInterval(heartbeat);
4741
- // Remove the persisted job state:
4742
- await db.transaction('rw!', jobsTableName, async () => {
4743
- const currentWork = await jobsTable.get(jobName);
4744
- if (currentWork && currentWork.nodeId === myId) {
4745
- jobsTable.delete(jobName);
4746
- }
4747
- });
4748
- }
4749
- }
4750
- }
4751
-
4752
- async function performInitialSync(db, cloudOptions, cloudSchema) {
4753
- console.debug("Performing initial sync");
4754
- await performGuardedJob(db, 'initialSync', '$jobs', async () => {
4755
- // Even though caller has already checked it,
4756
- // Do check again (now within a transaction) that we really do not have a sync state:
4757
- const syncState = await db.getPersistedSyncState();
4758
- if (!syncState?.initiallySynced) {
4759
- await sync(db, cloudOptions, cloudSchema, { isInitialSync: true });
4760
- }
4761
- }, { awaitRemoteJob: true } // Don't return until the job is done!
4762
- );
4763
- console.debug("Done initial sync");
4688
+ function performInitialSync(db, cloudOptions, cloudSchema) {
4689
+ return __awaiter$1(this, void 0, void 0, function* () {
4690
+ console.debug('Performing initial sync');
4691
+ yield sync(db, cloudOptions, cloudSchema, { isInitialSync: true });
4692
+ console.debug('Done initial sync');
4693
+ });
4764
4694
  }
4765
4695
 
4766
- const USER_INACTIVITY_TIMEOUT = 300000; // 300_000;
4696
+ const USER_INACTIVITY_TIMEOUT = 180000; // 3 minutes
4767
4697
  const INACTIVE_WAIT_TIME = 20000;
4768
4698
  // This observable will be emitted to later down....
4769
4699
  const userIsActive = new BehaviorSubject(true);
@@ -4777,9 +4707,13 @@ const userIsActive = new BehaviorSubject(true);
4777
4707
  // for just a short time.
4778
4708
  const userIsReallyActive = new BehaviorSubject(true);
4779
4709
  userIsActive
4780
- .pipe(switchMap((isActive) => isActive
4781
- ? of(true)
4782
- : of(false).pipe(delay(INACTIVE_WAIT_TIME))), distinctUntilChanged())
4710
+ .pipe(switchMap((isActive) => {
4711
+ //console.debug('SyncStatus: DUBB: isActive changed to', isActive);
4712
+ return isActive
4713
+ ? of(true)
4714
+ : of(false).pipe(delay(INACTIVE_WAIT_TIME))
4715
+ ;
4716
+ }), distinctUntilChanged())
4783
4717
  .subscribe(userIsReallyActive);
4784
4718
  //
4785
4719
  // First create some corner-stone observables to build the flow on
@@ -4794,7 +4728,7 @@ const documentBecomesHidden = visibilityStateIsChanged.pipe(filter(() => documen
4794
4728
  const documentBecomesVisible = visibilityStateIsChanged.pipe(filter(() => document.visibilityState === 'visible'));
4795
4729
  // Any of various user-activity-related events happen:
4796
4730
  const userDoesSomething = typeof window !== 'undefined'
4797
- ? merge(documentBecomesVisible, fromEvent(window, 'mousemove'), fromEvent(window, 'keydown'), fromEvent(window, 'wheel'), fromEvent(window, 'touchmove'))
4731
+ ? merge(documentBecomesVisible, fromEvent(window, 'mousedown'), fromEvent(window, 'mousemove'), fromEvent(window, 'keydown'), fromEvent(window, 'wheel'), fromEvent(window, 'touchmove'))
4798
4732
  : of({});
4799
4733
  if (typeof document !== 'undefined') {
4800
4734
  //
@@ -4845,6 +4779,7 @@ class WSConnection extends Subscription$1 {
4845
4779
  constructor(databaseUrl, rev, realmSetHash, clientIdentity, token, tokenExpiration, subscriber, messageProducer, webSocketStatus) {
4846
4780
  super(() => this.teardown());
4847
4781
  this.id = ++counter;
4782
+ this.reconnecting = false;
4848
4783
  console.debug('New WebSocket Connection', this.id, token ? 'authorized' : 'unauthorized');
4849
4784
  this.databaseUrl = databaseUrl;
4850
4785
  this.rev = rev;
@@ -4864,7 +4799,7 @@ class WSConnection extends Subscription$1 {
4864
4799
  this.disconnect();
4865
4800
  }
4866
4801
  disconnect() {
4867
- this.webSocketStatus.next("disconnected");
4802
+ this.webSocketStatus.next('disconnected');
4868
4803
  if (this.pinger) {
4869
4804
  clearInterval(this.pinger);
4870
4805
  this.pinger = null;
@@ -4873,7 +4808,7 @@ class WSConnection extends Subscription$1 {
4873
4808
  try {
4874
4809
  this.ws.close();
4875
4810
  }
4876
- catch { }
4811
+ catch (_a) { }
4877
4812
  }
4878
4813
  this.ws = null;
4879
4814
  if (this.messageProducerSubscription) {
@@ -4882,158 +4817,180 @@ class WSConnection extends Subscription$1 {
4882
4817
  }
4883
4818
  }
4884
4819
  reconnect() {
4885
- this.disconnect();
4886
- this.connect();
4887
- }
4888
- async connect() {
4889
- this.webSocketStatus.next("connecting");
4890
- this.lastServerActivity = new Date();
4891
- if (this.pauseUntil && this.pauseUntil > new Date()) {
4892
- console.debug('WS not reconnecting just yet', {
4893
- id: this.id,
4894
- pauseUntil: this.pauseUntil,
4895
- });
4896
- return;
4897
- }
4898
- if (this.ws) {
4899
- throw new Error(`Called connect() when a connection is already open`);
4900
- }
4901
- if (!this.databaseUrl)
4902
- throw new Error(`Cannot connect without a database URL`);
4903
- if (this.closed) {
4904
- return;
4905
- }
4906
- if (this.tokenExpiration && this.tokenExpiration < new Date()) {
4907
- this.subscriber.error(new TokenExpiredError()); // Will be handled in connectWebSocket.ts.
4820
+ if (this.reconnecting)
4908
4821
  return;
4822
+ this.reconnecting = true;
4823
+ try {
4824
+ this.disconnect();
4909
4825
  }
4910
- this.pinger = setInterval(async () => {
4911
- if (this.closed) {
4912
- console.debug('pinger check', this.id, 'CLOSED.');
4913
- this.teardown();
4826
+ catch (_a) { }
4827
+ this.connect()
4828
+ .catch(() => { })
4829
+ .then(() => (this.reconnecting = false)); // finally()
4830
+ }
4831
+ connect() {
4832
+ return __awaiter$1(this, void 0, void 0, function* () {
4833
+ this.lastServerActivity = new Date();
4834
+ if (this.pauseUntil && this.pauseUntil > new Date()) {
4835
+ console.debug('WS not reconnecting just yet', {
4836
+ id: this.id,
4837
+ pauseUntil: this.pauseUntil,
4838
+ });
4914
4839
  return;
4915
4840
  }
4916
4841
  if (this.ws) {
4917
- try {
4918
- this.ws.send(JSON.stringify({ type: 'ping' }));
4919
- setTimeout(() => {
4920
- console.debug('pinger setTimeout', this.id, this.pinger ? `alive` : 'dead');
4921
- if (!this.pinger)
4922
- return;
4923
- if (this.closed) {
4924
- console.debug('pinger setTimeout', this.id, 'subscription is closed');
4925
- this.teardown();
4926
- return;
4927
- }
4928
- if (this.lastServerActivity <
4929
- new Date(Date.now() - SERVER_PING_TIMEOUT)) {
4930
- // Server inactive. Reconnect if user is active.
4931
- console.debug('pinger: server is inactive');
4932
- console.debug('pinger reconnecting');
4933
- this.reconnect();
4934
- }
4935
- else {
4936
- console.debug('pinger: server still active');
4937
- }
4938
- }, SERVER_PING_TIMEOUT);
4939
- }
4940
- catch {
4941
- console.debug('pinger catch error', this.id, 'reconnecting');
4942
- this.reconnect();
4943
- }
4842
+ throw new Error(`Called connect() when a connection is already open`);
4944
4843
  }
4945
- else {
4946
- console.debug('pinger', this.id, 'reconnecting');
4947
- this.reconnect();
4948
- }
4949
- }, CLIENT_PING_INTERVAL);
4950
- // The following vars are needed because we must know which callback to ack when server sends it's ack to us.
4951
- const wsUrl = new URL(this.databaseUrl);
4952
- wsUrl.protocol = wsUrl.protocol === 'http:' ? 'ws' : 'wss';
4953
- const searchParams = new URLSearchParams();
4954
- if (this.subscriber.closed)
4955
- return;
4956
- searchParams.set('v', "2");
4957
- searchParams.set('rev', this.rev);
4958
- searchParams.set('realmsHash', this.realmSetHash);
4959
- searchParams.set('clientId', this.clientIdentity);
4960
- if (this.token) {
4961
- searchParams.set('token', this.token);
4962
- }
4963
- // Connect the WebSocket to given url:
4964
- console.debug('dexie-cloud WebSocket create');
4965
- const ws = (this.ws = new WebSocket(`${wsUrl}/changes?${searchParams}`));
4966
- //ws.binaryType = "arraybuffer"; // For future when subscribing to actual changes.
4967
- ws.onclose = (event) => {
4968
- if (!this.pinger)
4844
+ if (!this.databaseUrl)
4845
+ throw new Error(`Cannot connect without a database URL`);
4846
+ if (this.closed) {
4847
+ //console.debug('SyncStatus: DUBB: Ooops it was closed!');
4969
4848
  return;
4970
- console.debug('dexie-cloud WebSocket onclosed', this.id);
4971
- this.reconnect();
4972
- };
4973
- ws.onmessage = (event) => {
4974
- if (!this.pinger)
4849
+ }
4850
+ if (this.tokenExpiration && this.tokenExpiration < new Date()) {
4851
+ this.subscriber.error(new TokenExpiredError()); // Will be handled in connectWebSocket.ts.
4975
4852
  return;
4976
- console.debug('dexie-cloud WebSocket onmessage', event.data);
4977
- this.lastServerActivity = new Date();
4978
- try {
4979
- const msg = TSON.parse(event.data);
4980
- if (msg.type === 'error') {
4981
- throw new Error(`Error message from dexie-cloud: ${msg.error}`);
4853
+ }
4854
+ this.webSocketStatus.next('connecting');
4855
+ this.pinger = setInterval(() => __awaiter$1(this, void 0, void 0, function* () {
4856
+ if (this.closed) {
4857
+ console.debug('pinger check', this.id, 'CLOSED.');
4858
+ this.teardown();
4859
+ return;
4982
4860
  }
4983
- if (msg.type === 'rev') {
4984
- this.rev = msg.rev; // No meaning but seems reasonable.
4861
+ if (this.ws) {
4862
+ try {
4863
+ this.ws.send(JSON.stringify({ type: 'ping' }));
4864
+ setTimeout(() => {
4865
+ console.debug('pinger setTimeout', this.id, this.pinger ? `alive` : 'dead');
4866
+ if (!this.pinger)
4867
+ return;
4868
+ if (this.closed) {
4869
+ console.debug('pinger setTimeout', this.id, 'subscription is closed');
4870
+ this.teardown();
4871
+ return;
4872
+ }
4873
+ if (this.lastServerActivity <
4874
+ new Date(Date.now() - SERVER_PING_TIMEOUT)) {
4875
+ // Server inactive. Reconnect if user is active.
4876
+ console.debug('pinger: server is inactive');
4877
+ console.debug('pinger reconnecting');
4878
+ this.reconnect();
4879
+ }
4880
+ else {
4881
+ console.debug('pinger: server still active');
4882
+ }
4883
+ }, SERVER_PING_TIMEOUT);
4884
+ }
4885
+ catch (_a) {
4886
+ console.debug('pinger catch error', this.id, 'reconnecting');
4887
+ this.reconnect();
4888
+ }
4985
4889
  }
4986
- if (msg.type !== 'pong') {
4987
- this.subscriber.next(msg);
4890
+ else {
4891
+ console.debug('pinger', this.id, 'reconnecting');
4892
+ this.reconnect();
4988
4893
  }
4894
+ }), CLIENT_PING_INTERVAL);
4895
+ // The following vars are needed because we must know which callback to ack when server sends it's ack to us.
4896
+ const wsUrl = new URL(this.databaseUrl);
4897
+ wsUrl.protocol = wsUrl.protocol === 'http:' ? 'ws' : 'wss';
4898
+ const searchParams = new URLSearchParams();
4899
+ if (this.subscriber.closed)
4900
+ return;
4901
+ searchParams.set('v', '2');
4902
+ searchParams.set('rev', this.rev);
4903
+ searchParams.set('realmsHash', this.realmSetHash);
4904
+ searchParams.set('clientId', this.clientIdentity);
4905
+ if (this.token) {
4906
+ searchParams.set('token', this.token);
4989
4907
  }
4990
- catch (e) {
4991
- this.subscriber.error(e);
4992
- }
4993
- };
4994
- try {
4995
- await new Promise((resolve, reject) => {
4996
- ws.onopen = (event) => {
4997
- console.debug('dexie-cloud WebSocket onopen');
4998
- resolve(null);
4999
- };
5000
- ws.onerror = (event) => {
5001
- const error = event.error || new Error('WebSocket Error');
5002
- this.disconnect();
5003
- this.subscriber.error(error);
5004
- this.webSocketStatus.next("error");
5005
- reject(error);
5006
- };
5007
- });
5008
- this.messageProducerSubscription = this.messageProducer.subscribe(msg => {
5009
- if (!this.closed) {
5010
- if (msg.type === 'ready' && this.webSocketStatus.value !== 'connected') {
5011
- this.webSocketStatus.next("connected");
4908
+ // Connect the WebSocket to given url:
4909
+ console.debug('dexie-cloud WebSocket create');
4910
+ const ws = (this.ws = new WebSocket(`${wsUrl}/changes?${searchParams}`));
4911
+ //ws.binaryType = "arraybuffer"; // For future when subscribing to actual changes.
4912
+ ws.onclose = (event) => {
4913
+ if (!this.pinger)
4914
+ return;
4915
+ console.debug('dexie-cloud WebSocket onclosed', this.id);
4916
+ this.reconnect();
4917
+ };
4918
+ ws.onmessage = (event) => {
4919
+ if (!this.pinger)
4920
+ return;
4921
+ console.debug('dexie-cloud WebSocket onmessage', event.data);
4922
+ this.lastServerActivity = new Date();
4923
+ try {
4924
+ const msg = TSON.parse(event.data);
4925
+ if (msg.type === 'error') {
4926
+ throw new Error(`Error message from dexie-cloud: ${msg.error}`);
4927
+ }
4928
+ if (msg.type === 'rev') {
4929
+ this.rev = msg.rev; // No meaning but seems reasonable.
4930
+ }
4931
+ if (msg.type !== 'pong') {
4932
+ this.subscriber.next(msg);
5012
4933
  }
5013
- this.ws?.send(TSON.stringify(msg));
5014
4934
  }
5015
- });
5016
- }
5017
- catch (error) {
5018
- this.pauseUntil = new Date(Date.now() + FAIL_RETRY_WAIT_TIME);
5019
- }
4935
+ catch (e) {
4936
+ this.subscriber.error(e);
4937
+ }
4938
+ };
4939
+ try {
4940
+ let everConnected = false;
4941
+ yield new Promise((resolve, reject) => {
4942
+ ws.onopen = (event) => {
4943
+ console.debug('dexie-cloud WebSocket onopen');
4944
+ everConnected = true;
4945
+ resolve(null);
4946
+ };
4947
+ ws.onerror = (event) => {
4948
+ if (!everConnected) {
4949
+ const error = event.error || new Error('WebSocket Error');
4950
+ this.subscriber.error(error);
4951
+ this.webSocketStatus.next('error');
4952
+ reject(error);
4953
+ }
4954
+ else {
4955
+ this.reconnect();
4956
+ }
4957
+ };
4958
+ });
4959
+ this.messageProducerSubscription = this.messageProducer.subscribe((msg) => {
4960
+ var _a;
4961
+ if (!this.closed) {
4962
+ if (msg.type === 'ready' &&
4963
+ this.webSocketStatus.value !== 'connected') {
4964
+ this.webSocketStatus.next('connected');
4965
+ }
4966
+ (_a = this.ws) === null || _a === void 0 ? void 0 : _a.send(TSON.stringify(msg));
4967
+ }
4968
+ });
4969
+ }
4970
+ catch (error) {
4971
+ this.pauseUntil = new Date(Date.now() + FAIL_RETRY_WAIT_TIME);
4972
+ }
4973
+ });
5020
4974
  }
5021
4975
  }
5022
4976
 
5023
4977
  function sleep(ms) {
5024
4978
  return new Promise((resolve) => setTimeout(resolve, ms));
5025
4979
  }
5026
- async function waitAndReconnectWhenUserDoesSomething(error) {
5027
- console.error(`WebSocket observable: error but revive when user does some active thing...`, error);
5028
- // Sleep some seconds...
5029
- await sleep(3000);
5030
- // Wait til user does something (move mouse, tap, scroll, click etc)
5031
- console.debug('waiting for someone to do something');
5032
- await userDoesSomething.pipe(take(1)).toPromise();
5033
- console.debug('someone did something!');
4980
+ function waitAndReconnectWhenUserDoesSomething(error) {
4981
+ return __awaiter$1(this, void 0, void 0, function* () {
4982
+ console.error(`WebSocket observable: error but revive when user does some active thing...`, error);
4983
+ // Sleep some seconds...
4984
+ yield sleep(3000);
4985
+ // Wait til user does something (move mouse, tap, scroll, click etc)
4986
+ console.debug('waiting for someone to do something');
4987
+ yield userDoesSomething.pipe(take(1)).toPromise();
4988
+ console.debug('someone did something!');
4989
+ });
5034
4990
  }
5035
4991
  function connectWebSocket(db) {
5036
- if (!db.cloud.options?.databaseUrl) {
4992
+ var _a;
4993
+ if (!((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl)) {
5037
4994
  throw new Error(`No database URL to connect WebSocket to`);
5038
4995
  }
5039
4996
  const messageProducer = db.messageConsumer.readyToServe.pipe(filter((isReady) => isReady), // When consumer is ready for new messages, produce such a message to inform server about it
@@ -5045,32 +5002,35 @@ function connectWebSocket(db) {
5045
5002
  rev: syncState.serverRevision,
5046
5003
  })));
5047
5004
  function createObservable() {
5048
- return db.cloud.persistedSyncState.pipe(filter(syncState => syncState?.serverRevision), // Don't connect before there's no initial sync performed.
5005
+ return db.cloud.persistedSyncState.pipe(filter((syncState) => syncState === null || syncState === void 0 ? void 0 : syncState.serverRevision), // Don't connect before there's no initial sync performed.
5049
5006
  take(1), // Don't continue waking up whenever syncState change
5050
- switchMap((syncState) => db.cloud.currentUser.pipe(map(userLogin => [userLogin, syncState]))), switchMap(([userLogin, syncState]) => userIsReallyActive.pipe(map((isActive) => [isActive ? userLogin : null, syncState]))), switchMap(async ([userLogin, syncState]) => [userLogin, await computeRealmSetHash(syncState)]), switchMap(([userLogin, realmSetHash]) =>
5007
+ switchMap((syncState) => db.cloud.currentUser.pipe(map((userLogin) => [userLogin, syncState]))), switchMap(([userLogin, syncState]) => userIsReallyActive.pipe(map((isActive) => [isActive ? userLogin : null, syncState]))), switchMap(([userLogin, syncState]) => __awaiter$1(this, void 0, void 0, function* () { return [userLogin, yield computeRealmSetHash(syncState)]; })), switchMap(([userLogin, realmSetHash]) =>
5051
5008
  // Let server end query changes from last entry of same client-ID and forward.
5052
5009
  // If no new entries, server won't bother the client. If new entries, server sends only those
5053
5010
  // and the baseRev of the last from same client-ID.
5054
5011
  userLogin
5055
5012
  ? new WSObservable(db.cloud.options.databaseUrl, db.cloud.persistedSyncState.value.serverRevision, realmSetHash, db.cloud.persistedSyncState.value.clientIdentity, messageProducer, db.cloud.webSocketStatus, userLogin.accessToken, userLogin.accessTokenExpiration)
5056
5013
  : from$1([])), catchError((error) => {
5057
- if (error?.name === 'TokenExpiredError') {
5014
+ if ((error === null || error === void 0 ? void 0 : error.name) === 'TokenExpiredError') {
5058
5015
  console.debug('WebSocket observable: Token expired. Refreshing token...');
5059
- return of(true).pipe(switchMap(async () => {
5016
+ return of(true).pipe(switchMap(() => __awaiter$1(this, void 0, void 0, function* () {
5060
5017
  // Refresh access token
5061
- const user = await db.getCurrentUser();
5062
- const refreshedLogin = await refreshAccessToken(db.cloud.options.databaseUrl, user);
5018
+ const user = yield db.getCurrentUser();
5019
+ const refreshedLogin = yield refreshAccessToken(db.cloud.options.databaseUrl, user);
5063
5020
  // Persist updated access token
5064
- await db.table('$logins').update(user.userId, {
5021
+ yield db.table('$logins').update(user.userId, {
5065
5022
  accessToken: refreshedLogin.accessToken,
5066
5023
  accessTokenExpiration: refreshedLogin.accessTokenExpiration,
5067
5024
  });
5068
- }), switchMap(() => createObservable()));
5025
+ })), switchMap(() => createObservable()));
5069
5026
  }
5070
5027
  else {
5071
5028
  return throwError(error);
5072
5029
  }
5073
- }), catchError((error) => from$1(waitAndReconnectWhenUserDoesSomething(error)).pipe(switchMap(() => createObservable()))));
5030
+ }), catchError((error) => {
5031
+ db.cloud.webSocketStatus.next("error");
5032
+ return from$1(waitAndReconnectWhenUserDoesSomething(error)).pipe(switchMap(() => createObservable()));
5033
+ }));
5074
5034
  }
5075
5035
  return createObservable().subscribe((msg) => {
5076
5036
  if (msg) {
@@ -5084,17 +5044,113 @@ function connectWebSocket(db) {
5084
5044
  });
5085
5045
  }
5086
5046
 
5087
- async function isSyncNeeded(db) {
5088
- return db.cloud.options?.databaseUrl && db.cloud.schema
5089
- ? await sync(db, db.cloud.options, db.cloud.schema, { justCheckIfNeeded: true })
5090
- : false;
5047
+ function isSyncNeeded(db) {
5048
+ var _a;
5049
+ return __awaiter$1(this, void 0, void 0, function* () {
5050
+ return ((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl) && db.cloud.schema
5051
+ ? yield sync(db, db.cloud.options, db.cloud.schema, { justCheckIfNeeded: true })
5052
+ : false;
5053
+ });
5054
+ }
5055
+
5056
+ const SECONDS = 1000;
5057
+ const MINUTES = 60 * SECONDS;
5058
+
5059
+ const myId = randomString(16);
5060
+
5061
+ const GUARDED_JOB_HEARTBEAT = 1 * SECONDS;
5062
+ const GUARDED_JOB_TIMEOUT = 1 * MINUTES;
5063
+ function performGuardedJob(db, jobName, jobsTableName, job, { awaitRemoteJob } = {}) {
5064
+ return __awaiter$1(this, void 0, void 0, function* () {
5065
+ // Start working.
5066
+ //
5067
+ // Check if someone else is working on this already.
5068
+ //
5069
+ const jobsTable = db.table(jobsTableName);
5070
+ function aquireLock() {
5071
+ return __awaiter$1(this, void 0, void 0, function* () {
5072
+ const gotTheLock = yield db.transaction('rw!', jobsTableName, () => __awaiter$1(this, void 0, void 0, function* () {
5073
+ const currentWork = yield jobsTable.get(jobName);
5074
+ if (!currentWork) {
5075
+ // No one else is working. Let's record that we are.
5076
+ yield jobsTable.add({
5077
+ nodeId: myId,
5078
+ started: new Date(),
5079
+ heartbeat: new Date()
5080
+ }, jobName);
5081
+ return true;
5082
+ }
5083
+ else if (currentWork.heartbeat.getTime() <
5084
+ Date.now() - GUARDED_JOB_TIMEOUT) {
5085
+ console.warn(`Latest ${jobName} worker seem to have died.\n`, `The dead job started:`, currentWork.started, `\n`, `Last heart beat was:`, currentWork.heartbeat, '\n', `We're now taking over!`);
5086
+ // Now, take over!
5087
+ yield jobsTable.put({
5088
+ nodeId: myId,
5089
+ started: new Date(),
5090
+ heartbeat: new Date()
5091
+ }, jobName);
5092
+ return true;
5093
+ }
5094
+ return false;
5095
+ }));
5096
+ if (gotTheLock)
5097
+ return true;
5098
+ // Someone else took the job.
5099
+ if (awaitRemoteJob) {
5100
+ try {
5101
+ const jobDoneObservable = from$1(liveQuery(() => jobsTable.get(jobName))).pipe(timeout(GUARDED_JOB_TIMEOUT), filter((job) => !job)); // Wait til job is not there anymore.
5102
+ yield jobDoneObservable.toPromise();
5103
+ return false;
5104
+ }
5105
+ catch (err) {
5106
+ if (err.name !== 'TimeoutError') {
5107
+ throw err;
5108
+ }
5109
+ // Timeout stopped us! Try aquire the lock now.
5110
+ // It will likely succeed this time unless
5111
+ // another client took it.
5112
+ return yield aquireLock();
5113
+ }
5114
+ }
5115
+ return false;
5116
+ });
5117
+ }
5118
+ if (yield aquireLock()) {
5119
+ // We own the lock entry and can do our job undisturbed.
5120
+ // We're not within a transaction, but these type of locks
5121
+ // spans over transactions.
5122
+ // Start our heart beat during the job.
5123
+ // Use setInterval to make sure we are updating heartbeat even during long-lived fetch calls.
5124
+ const heartbeat = setInterval(() => {
5125
+ jobsTable.update(jobName, (job) => {
5126
+ if (job.nodeId === myId) {
5127
+ job.heartbeat = new Date();
5128
+ }
5129
+ });
5130
+ }, GUARDED_JOB_HEARTBEAT);
5131
+ try {
5132
+ return yield job();
5133
+ }
5134
+ finally {
5135
+ // Stop heartbeat
5136
+ clearInterval(heartbeat);
5137
+ // Remove the persisted job state:
5138
+ yield db.transaction('rw!', jobsTableName, () => __awaiter$1(this, void 0, void 0, function* () {
5139
+ const currentWork = yield jobsTable.get(jobName);
5140
+ if (currentWork && currentWork.nodeId === myId) {
5141
+ yield jobsTable.delete(jobName);
5142
+ }
5143
+ }));
5144
+ }
5145
+ }
5146
+ });
5091
5147
  }
5092
5148
 
5093
5149
  const ongoingSyncs = new WeakMap();
5094
5150
  function syncIfPossible(db, cloudOptions, cloudSchema, options) {
5095
5151
  const ongoing = ongoingSyncs.get(db);
5096
5152
  if (ongoing) {
5097
- if (ongoing.pull || options?.purpose === 'push') {
5153
+ if (ongoing.pull || (options === null || options === void 0 ? void 0 : options.purpose) === 'push') {
5098
5154
  console.debug('syncIfPossible(): returning the ongoing sync promise.');
5099
5155
  return ongoing.promise;
5100
5156
  }
@@ -5136,32 +5192,34 @@ function syncIfPossible(db, cloudOptions, cloudSchema, options) {
5136
5192
  }
5137
5193
  }
5138
5194
  const promise = _syncIfPossible();
5139
- ongoingSyncs.set(db, { promise, pull: options?.purpose !== 'push' });
5195
+ ongoingSyncs.set(db, { promise, pull: (options === null || options === void 0 ? void 0 : options.purpose) !== 'push' });
5140
5196
  return promise;
5141
- async function _syncIfPossible() {
5142
- try {
5143
- if (db.cloud.usingServiceWorker) {
5144
- if (IS_SERVICE_WORKER) {
5145
- await sync(db, cloudOptions, cloudSchema, options);
5197
+ function _syncIfPossible() {
5198
+ return __awaiter$1(this, void 0, void 0, function* () {
5199
+ try {
5200
+ if (db.cloud.usingServiceWorker) {
5201
+ if (IS_SERVICE_WORKER) {
5202
+ yield sync(db, cloudOptions, cloudSchema, options);
5203
+ }
5204
+ }
5205
+ else {
5206
+ // We use a flow that is better suited for the case when multiple workers want to
5207
+ // do the same thing.
5208
+ yield performGuardedJob(db, CURRENT_SYNC_WORKER, '$jobs', () => sync(db, cloudOptions, cloudSchema, options));
5146
5209
  }
5210
+ ongoingSyncs.delete(db);
5211
+ console.debug('Done sync');
5147
5212
  }
5148
- else {
5149
- // We use a flow that is better suited for the case when multiple workers want to
5150
- // do the same thing.
5151
- await performGuardedJob(db, CURRENT_SYNC_WORKER, '$jobs', () => sync(db, cloudOptions, cloudSchema, options));
5213
+ catch (error) {
5214
+ ongoingSyncs.delete(db);
5215
+ console.error(`Failed to sync client changes`, error);
5216
+ throw error; // Make sure we rethrow error so that sync event is retried.
5217
+ // I don't think we should setTimout or so here.
5218
+ // Unless server tells us to in some response.
5219
+ // Then we could follow that advice but not by waiting here but by registering
5220
+ // Something that triggers an event listened to in startPushWorker()
5152
5221
  }
5153
- ongoingSyncs.delete(db);
5154
- console.debug('Done sync');
5155
- }
5156
- catch (error) {
5157
- ongoingSyncs.delete(db);
5158
- console.error(`Failed to sync client changes`, error);
5159
- throw error; // Make sure we rethrow error so that sync event is retried.
5160
- // I don't think we should setTimout or so here.
5161
- // Unless server tells us to in some response.
5162
- // Then we could follow that advice but not by waiting here but by registering
5163
- // Something that triggers an event listened to in startPushWorker()
5164
- }
5222
+ });
5165
5223
  }
5166
5224
  }
5167
5225
 
@@ -5231,8 +5289,9 @@ function updateSchemaFromOptions(schema, options) {
5231
5289
  }
5232
5290
 
5233
5291
  function verifySchema(db) {
5292
+ var _a, _b;
5234
5293
  for (const table of db.tables) {
5235
- if (db.cloud.schema?.[table.name]?.markedForSync) {
5294
+ if ((_b = (_a = db.cloud.schema) === null || _a === void 0 ? void 0 : _a[table.name]) === null || _b === void 0 ? void 0 : _b.markedForSync) {
5236
5295
  if (table.schema.primKey.auto) {
5237
5296
  throw new Dexie.SchemaError(`Table ${table.name} is both autoIncremented and synced. ` +
5238
5297
  `Use db.cloud.configure({unsyncedTables: [${JSON.stringify(table.name)}]}) to blacklist it from sync`);
@@ -5325,7 +5384,7 @@ function resolveText({ message, messageCode, messageParams }) {
5325
5384
  function LoginDialog({ title, alerts, fields, onCancel, onSubmit, }) {
5326
5385
  const [params, setParams] = l({});
5327
5386
  const firstFieldRef = s();
5328
- h(() => firstFieldRef.current?.focus(), []);
5387
+ h(() => { var _a; return (_a = firstFieldRef.current) === null || _a === void 0 ? void 0 : _a.focus(); }, []);
5329
5388
  return (a$1(Dialog, null,
5330
5389
  a$1(y, null,
5331
5390
  a$1("h3", { style: Styles.WindowHeader }, title),
@@ -5335,7 +5394,7 @@ function LoginDialog({ title, alerts, fields, onCancel, onSubmit, }) {
5335
5394
  onSubmit(params);
5336
5395
  } }, Object.entries(fields).map(([fieldName, { type, label, placeholder }], idx) => (a$1("label", { style: Styles.Label },
5337
5396
  label ? `${label}: ` : '',
5338
- a$1("input", { ref: idx === 0 ? firstFieldRef : undefined, type: type, name: fieldName, autoComplete: "on", style: Styles.Input, autoFocus: true, placeholder: placeholder, value: params[fieldName] || '', onInput: (ev) => setParams({ ...params, [fieldName]: valueTransformer(type, ev.target?.['value']) }) })))))),
5397
+ a$1("input", { ref: idx === 0 ? firstFieldRef : undefined, type: type, name: fieldName, autoComplete: "on", style: Styles.Input, autoFocus: true, placeholder: placeholder, value: params[fieldName] || '', onInput: (ev) => { var _a; return setParams(Object.assign(Object.assign({}, params), { [fieldName]: valueTransformer(type, (_a = ev.target) === null || _a === void 0 ? void 0 : _a['value']) })); } })))))),
5339
5398
  a$1("div", { style: Styles.ButtonsDiv },
5340
5399
  a$1("button", { type: "submit", style: Styles.Button, onClick: () => onSubmit(params) }, "Submit"),
5341
5400
  a$1("button", { style: Styles.Button, onClick: onCancel }, "Cancel"))));
@@ -5367,7 +5426,7 @@ class LoginGui extends p$1 {
5367
5426
  if (!userInteraction)
5368
5427
  return null;
5369
5428
  //if (props.db.cloud.userInteraction.observers.length > 1) return null; // Someone else subscribes.
5370
- return a$1(LoginDialog, { ...userInteraction });
5429
+ return a$1(LoginDialog, Object.assign({}, userInteraction));
5371
5430
  }
5372
5431
  }
5373
5432
  function setupDefaultGUI(db) {
@@ -5494,6 +5553,21 @@ function createSharedValueObservable(o, defaultValue) {
5494
5553
  return rv;
5495
5554
  }
5496
5555
 
5556
+ const getGlobalRolesObservable = associate((db) => {
5557
+ return createSharedValueObservable(liveQuery(() => db.roles
5558
+ .where({ realmId: 'rlm-public' })
5559
+ .toArray()
5560
+ .then((roles) => {
5561
+ const rv = {};
5562
+ for (const role of roles
5563
+ .slice()
5564
+ .sort((a, b) => (a.sortOrder || 0) - (b.sortOrder || 0))) {
5565
+ rv[role.name] = role;
5566
+ }
5567
+ return rv;
5568
+ })), {});
5569
+ });
5570
+
5497
5571
  const getCurrentUserEmitter = associate((db) => new BehaviorSubject(UNAUTHORIZED_USER));
5498
5572
 
5499
5573
  const getInternalAccessControlObservable = associate((db) => {
@@ -5538,7 +5612,7 @@ function mergePermissions(...permissions) {
5538
5612
  if (permissions.length === 0)
5539
5613
  return {};
5540
5614
  const reduced = permissions.reduce((result, next) => {
5541
- const ret = { ...result };
5615
+ const ret = Object.assign({}, result);
5542
5616
  for (const [verb, rights] of Object.entries(next)) {
5543
5617
  if (verb in ret && ret[verb]) {
5544
5618
  if (ret[verb] === '*')
@@ -5595,19 +5669,36 @@ function mergePermissions(...permissions) {
5595
5669
  }
5596
5670
 
5597
5671
  const getPermissionsLookupObservable = associate((db) => {
5598
- const o = getInternalAccessControlObservable(db._novip);
5599
- return mapValueObservable(o, ({ selfMembers, realms, userId }) => {
5672
+ const o = createSharedValueObservable(combineLatest([
5673
+ getInternalAccessControlObservable(db._novip),
5674
+ getGlobalRolesObservable(db._novip),
5675
+ ]).pipe(map(([{ selfMembers, realms, userId }, globalRoles]) => ({
5676
+ selfMembers,
5677
+ realms,
5678
+ userId,
5679
+ globalRoles,
5680
+ }))), {
5681
+ selfMembers: [],
5682
+ realms: [],
5683
+ userId: UNAUTHORIZED_USER.userId,
5684
+ globalRoles: {},
5685
+ });
5686
+ return mapValueObservable(o, ({ selfMembers, realms, userId, globalRoles }) => {
5600
5687
  const rv = realms
5601
- .map((realm) => ({
5602
- ...realm,
5603
- permissions: realm.owner === userId
5604
- ? { manage: '*' }
5605
- : mergePermissions(...selfMembers
5606
- .filter((m) => m.realmId === realm.realmId)
5607
- .map((m) => m.permissions)
5608
- .filter((p) => p)),
5609
- }))
5610
- .reduce((p, c) => ({ ...p, [c.realmId]: c }), {
5688
+ .map((realm) => {
5689
+ const selfRealmMembers = selfMembers.filter((m) => m.realmId === realm.realmId);
5690
+ const directPermissionSets = selfRealmMembers
5691
+ .map((m) => m.permissions)
5692
+ .filter((p) => p);
5693
+ const rolePermissionSets = flatten(selfRealmMembers.map((m) => m.roles).filter((roleName) => roleName))
5694
+ .map((role) => globalRoles[role])
5695
+ .filter((role) => role)
5696
+ .map((role) => role.permissions);
5697
+ return Object.assign(Object.assign({}, realm), { permissions: realm.owner === userId
5698
+ ? { manage: '*' }
5699
+ : mergePermissions(...directPermissionSets, ...rolePermissionSets) });
5700
+ })
5701
+ .reduce((p, c) => (Object.assign(Object.assign({}, p), { [c.realmId]: c })), {
5611
5702
  [userId]: {
5612
5703
  realmId: userId,
5613
5704
  owner: userId,
@@ -5626,47 +5717,50 @@ class PermissionChecker {
5626
5717
  this.isOwner = isOwner;
5627
5718
  }
5628
5719
  add(...tableNames) {
5720
+ var _a;
5629
5721
  // If user can manage the whole realm, return true.
5630
5722
  if (this.permissions.manage === '*')
5631
5723
  return true;
5632
5724
  // If user can manage given table in realm, return true
5633
- if (this.permissions.manage?.includes(this.tableName))
5725
+ if ((_a = this.permissions.manage) === null || _a === void 0 ? void 0 : _a.includes(this.tableName))
5634
5726
  return true;
5635
5727
  // If user can add any type, return true
5636
5728
  if (this.permissions.add === '*')
5637
5729
  return true;
5638
5730
  // If user can add objects into given table names in the realm, return true
5639
- if (tableNames.every((tableName) => this.permissions.add?.includes(tableName))) {
5731
+ if (tableNames.every((tableName) => { var _a; return (_a = this.permissions.add) === null || _a === void 0 ? void 0 : _a.includes(tableName); })) {
5640
5732
  return true;
5641
5733
  }
5642
5734
  return false;
5643
5735
  }
5644
5736
  update(...props) {
5737
+ var _a, _b;
5645
5738
  // If user is owner of this object, or if user can manage the whole realm, return true.
5646
5739
  if (this.isOwner || this.permissions.manage === '*')
5647
5740
  return true;
5648
5741
  // If user can manage given table in realm, return true
5649
- if (this.permissions.manage?.includes(this.tableName))
5742
+ if ((_a = this.permissions.manage) === null || _a === void 0 ? void 0 : _a.includes(this.tableName))
5650
5743
  return true;
5651
5744
  // If user can update any prop in any table in this realm, return true unless
5652
5745
  // it regards to ownership change:
5653
5746
  if (this.permissions.update === '*') {
5654
5747
  return props.every((prop) => prop !== 'owner');
5655
5748
  }
5656
- const tablePermissions = this.permissions.update?.[this.tableName];
5749
+ const tablePermissions = (_b = this.permissions.update) === null || _b === void 0 ? void 0 : _b[this.tableName];
5657
5750
  // If user can update any prop in table and realm, return true unless
5658
5751
  // accessing special props owner or realmId
5659
5752
  if (tablePermissions === '*')
5660
5753
  return props.every((prop) => prop !== 'owner');
5661
5754
  // Explicitely listed properties to allow updates on:
5662
- return props.every((prop) => tablePermissions?.some((permittedProp) => permittedProp === prop || (permittedProp === '*' && prop !== 'owner')));
5755
+ return props.every((prop) => tablePermissions === null || tablePermissions === void 0 ? void 0 : tablePermissions.some((permittedProp) => permittedProp === prop || (permittedProp === '*' && prop !== 'owner')));
5663
5756
  }
5664
5757
  delete() {
5758
+ var _a;
5665
5759
  // If user is owner of this object, or if user can manage the whole realm, return true.
5666
5760
  if (this.isOwner || this.permissions.manage === '*')
5667
5761
  return true;
5668
5762
  // If user can manage given table in realm, return true
5669
- if (this.permissions.manage?.includes(this.tableName))
5763
+ if ((_a = this.permissions.manage) === null || _a === void 0 ? void 0 : _a.includes(this.tableName))
5670
5764
  return true;
5671
5765
  return false;
5672
5766
  }
@@ -5690,7 +5784,7 @@ function permissions(dexie, obj, tableName) {
5690
5784
  const realm = permissionsLookup[realmId || dexie.cloud.currentUserId];
5691
5785
  if (!realm)
5692
5786
  return new PermissionChecker({}, tableName, !owner || owner === dexie.cloud.currentUserId);
5693
- return new PermissionChecker(realm.permissions, tableName, !owner || owner === dexie.cloud.currentUserId);
5787
+ return new PermissionChecker(realm.permissions, tableName, realmId === dexie.cloud.currentUserId || owner === dexie.cloud.currentUserId);
5694
5788
  };
5695
5789
  const o = source.pipe(map(mapper));
5696
5790
  o.getValue = () => mapper(source.getValue());
@@ -5702,7 +5796,7 @@ const getInvitesObservable = associate((db) => {
5702
5796
  const permissions = getPermissionsLookupObservable(db._novip);
5703
5797
  const accessControl = getInternalAccessControlObservable(db._novip);
5704
5798
  return createSharedValueObservable(combineLatest([membersByEmail, accessControl, permissions]).pipe(map$1(([membersByEmail, accessControl, realmLookup]) => {
5705
- const reducer = (result, m) => ({ ...result, [m.id]: { ...m, realm: realmLookup[m.realmId] } });
5799
+ const reducer = (result, m) => (Object.assign(Object.assign({}, result), { [m.id]: Object.assign(Object.assign({}, m), { realm: realmLookup[m.realmId] }) }));
5706
5800
  const emailMembersById = membersByEmail.reduce(reducer, {});
5707
5801
  const membersById = accessControl.selfMembers.reduce(reducer, emailMembersById);
5708
5802
  return Object.values(membersById).filter(m => !m.accepted);
@@ -5729,15 +5823,15 @@ function dexieCloud(dexie) {
5729
5823
  let configuredProgramatically = false;
5730
5824
  // local sync worker - used when there's no service worker.
5731
5825
  let localSyncWorker = null;
5732
- dexie.on('ready', async (dexie) => {
5826
+ dexie.on('ready', (dexie) => __awaiter$1(this, void 0, void 0, function* () {
5733
5827
  try {
5734
- await onDbReady(dexie);
5828
+ yield onDbReady(dexie);
5735
5829
  }
5736
5830
  catch (error) {
5737
5831
  console.error(error);
5738
5832
  // Make sure to succeed with database open even if network is down.
5739
5833
  }
5740
- }, true // true = sticky
5834
+ }), true // true = sticky
5741
5835
  );
5742
5836
  /** Void starting subscribers after a close has happened. */
5743
5837
  let closed = false;
@@ -5753,8 +5847,8 @@ function dexieCloud(dexie) {
5753
5847
  currentUserEmitter.next(UNAUTHORIZED_USER);
5754
5848
  });
5755
5849
  dexie.cloud = {
5756
- version: '4.0.0-beta.15',
5757
- options: { ...DEFAULT_OPTIONS },
5850
+ version: '4.0.0-beta.18',
5851
+ options: Object.assign({}, DEFAULT_OPTIONS),
5758
5852
  schema: null,
5759
5853
  serverState: null,
5760
5854
  get currentUserId() {
@@ -5768,14 +5862,17 @@ function dexieCloud(dexie) {
5768
5862
  persistedSyncState: new BehaviorSubject(undefined),
5769
5863
  userInteraction: new BehaviorSubject(undefined),
5770
5864
  webSocketStatus: new BehaviorSubject('not-started'),
5771
- async login(hint) {
5772
- const db = DexieCloudDB(dexie);
5773
- await db.cloud.sync();
5774
- await login(db, hint);
5865
+ login(hint) {
5866
+ return __awaiter$1(this, void 0, void 0, function* () {
5867
+ const db = DexieCloudDB(dexie);
5868
+ yield db.cloud.sync();
5869
+ yield login(db, hint);
5870
+ });
5775
5871
  },
5776
5872
  invites: getInvitesObservable(dexie),
5873
+ roles: getGlobalRolesObservable(dexie),
5777
5874
  configure(options) {
5778
- options = dexie.cloud.options = { ...dexie.cloud.options, ...options };
5875
+ options = dexie.cloud.options = Object.assign(Object.assign({}, dexie.cloud.options), options);
5779
5876
  configuredProgramatically = true;
5780
5877
  if (options.databaseUrl && options.nameSuffix) {
5781
5878
  // @ts-ignore
@@ -5784,41 +5881,43 @@ function dexieCloud(dexie) {
5784
5881
  }
5785
5882
  updateSchemaFromOptions(dexie.cloud.schema, dexie.cloud.options);
5786
5883
  },
5787
- async sync({ wait, purpose } = { wait: true, purpose: 'push' }) {
5788
- if (wait === undefined)
5789
- wait = true;
5790
- const db = DexieCloudDB(dexie);
5791
- if (purpose === 'pull') {
5792
- const syncState = db.cloud.persistedSyncState.value;
5793
- triggerSync(db, purpose);
5794
- if (wait) {
5795
- const newSyncState = await db.cloud.persistedSyncState
5796
- .pipe(filter((newSyncState) => newSyncState?.timestamp != null &&
5797
- (!syncState || newSyncState.timestamp > syncState.timestamp)), take(1))
5798
- .toPromise();
5799
- if (newSyncState?.error) {
5800
- throw new Error(`Sync error: ` + newSyncState.error);
5884
+ sync({ wait, purpose } = { wait: true, purpose: 'push' }) {
5885
+ return __awaiter$1(this, void 0, void 0, function* () {
5886
+ if (wait === undefined)
5887
+ wait = true;
5888
+ const db = DexieCloudDB(dexie);
5889
+ if (purpose === 'pull') {
5890
+ const syncState = db.cloud.persistedSyncState.value;
5891
+ triggerSync(db, purpose);
5892
+ if (wait) {
5893
+ const newSyncState = yield db.cloud.persistedSyncState
5894
+ .pipe(filter((newSyncState) => (newSyncState === null || newSyncState === void 0 ? void 0 : newSyncState.timestamp) != null &&
5895
+ (!syncState || newSyncState.timestamp > syncState.timestamp)), take(1))
5896
+ .toPromise();
5897
+ if (newSyncState === null || newSyncState === void 0 ? void 0 : newSyncState.error) {
5898
+ throw new Error(`Sync error: ` + newSyncState.error);
5899
+ }
5801
5900
  }
5802
5901
  }
5803
- }
5804
- else if (await isSyncNeeded(db)) {
5805
- const syncState = db.cloud.persistedSyncState.value;
5806
- triggerSync(db, purpose);
5807
- if (wait) {
5808
- console.debug('db.cloud.login() is waiting for sync completion...');
5809
- await from$1(liveQuery(async () => {
5810
- const syncNeeded = await isSyncNeeded(db);
5811
- const newSyncState = await db.getPersistedSyncState();
5812
- if (newSyncState?.timestamp !== syncState?.timestamp &&
5813
- newSyncState?.error)
5814
- throw new Error(`Sync error: ` + newSyncState.error);
5815
- return syncNeeded;
5816
- }))
5817
- .pipe(filter((isNeeded) => !isNeeded), take(1))
5818
- .toPromise();
5819
- console.debug('Done waiting for sync completion because we have nothing to push anymore');
5902
+ else if (yield isSyncNeeded(db)) {
5903
+ const syncState = db.cloud.persistedSyncState.value;
5904
+ triggerSync(db, purpose);
5905
+ if (wait) {
5906
+ console.debug('db.cloud.login() is waiting for sync completion...');
5907
+ yield from$1(liveQuery(() => __awaiter$1(this, void 0, void 0, function* () {
5908
+ const syncNeeded = yield isSyncNeeded(db);
5909
+ const newSyncState = yield db.getPersistedSyncState();
5910
+ if ((newSyncState === null || newSyncState === void 0 ? void 0 : newSyncState.timestamp) !== (syncState === null || syncState === void 0 ? void 0 : syncState.timestamp) &&
5911
+ (newSyncState === null || newSyncState === void 0 ? void 0 : newSyncState.error))
5912
+ throw new Error(`Sync error: ` + newSyncState.error);
5913
+ return syncNeeded;
5914
+ })))
5915
+ .pipe(filter((isNeeded) => !isNeeded), take(1))
5916
+ .toPromise();
5917
+ console.debug('Done waiting for sync completion because we have nothing to push anymore');
5918
+ }
5820
5919
  }
5821
- }
5920
+ });
5822
5921
  },
5823
5922
  permissions(obj, tableName) {
5824
5923
  return permissions(dexie._novip, obj, tableName);
@@ -5830,7 +5929,8 @@ function dexieCloud(dexie) {
5830
5929
  return generateKey(dexie.cloud.schema[this.name].idPrefix || '', shardKey);
5831
5930
  };
5832
5931
  dexie.Table.prototype.idPrefix = function () {
5833
- return this.db.cloud.schema?.[this.name]?.idPrefix || '';
5932
+ var _a, _b;
5933
+ return ((_b = (_a = this.db.cloud.schema) === null || _a === void 0 ? void 0 : _a[this.name]) === null || _b === void 0 ? void 0 : _b.idPrefix) || '';
5834
5934
  };
5835
5935
  dexie.use(createMutationTrackingMiddleware({
5836
5936
  currentUserObservable: dexie.cloud.currentUser,
@@ -5838,163 +5938,167 @@ function dexieCloud(dexie) {
5838
5938
  }));
5839
5939
  dexie.use(createImplicitPropSetterMiddleware(DexieCloudDB(dexie)));
5840
5940
  dexie.use(createIdGenerationMiddleware(DexieCloudDB(dexie)));
5841
- async function onDbReady(dexie) {
5842
- closed = false; // As Dexie calls us, we are not closed anymore. Maybe reopened? Remember db.ready event is registered with sticky flag!
5843
- const db = DexieCloudDB(dexie);
5844
- // Setup default GUI:
5845
- if (!IS_SERVICE_WORKER) {
5846
- if (!db.cloud.options?.customLoginGui) {
5847
- subscriptions.push(setupDefaultGUI(dexie));
5848
- }
5849
- subscriptions.push(computeSyncState(db).subscribe(dexie.cloud.syncState));
5850
- }
5851
- //verifyConfig(db.cloud.options); Not needed (yet at least!)
5852
- // Verify the user has allowed version increment.
5853
- if (!db.tables.every((table) => table.core)) {
5854
- throwVersionIncrementNeeded();
5855
- }
5856
- const swRegistrations = 'serviceWorker' in navigator
5857
- ? await navigator.serviceWorker.getRegistrations()
5858
- : [];
5859
- const initiallySynced = await db.transaction('rw', db.$syncState, async () => {
5860
- const { options, schema } = db.cloud;
5861
- const [persistedOptions, persistedSchema, persistedSyncState] = await Promise.all([
5862
- db.getOptions(),
5863
- db.getSchema(),
5864
- db.getPersistedSyncState(),
5865
- ]);
5866
- if (!configuredProgramatically) {
5867
- // Options not specified programatically (use case for SW!)
5868
- // Take persisted options:
5869
- db.cloud.options = persistedOptions || null;
5870
- }
5871
- else if (!persistedOptions ||
5872
- JSON.stringify(persistedOptions) !== JSON.stringify(options)) {
5873
- // Update persisted options:
5874
- if (!options)
5875
- throw new Error(`Internal error`); // options cannot be null if configuredProgramatically is set.
5876
- await db.$syncState.put(options, 'options');
5877
- }
5878
- if (db.cloud.options?.tryUseServiceWorker &&
5879
- 'serviceWorker' in navigator &&
5880
- swRegistrations.length > 0 &&
5881
- !DISABLE_SERVICEWORKER_STRATEGY) {
5882
- // * Configured for using service worker if available.
5883
- // * Browser supports service workers
5884
- // * There are at least one service worker registration
5885
- console.debug('Dexie Cloud Addon: Using service worker');
5886
- db.cloud.usingServiceWorker = true;
5887
- }
5888
- else {
5889
- // Not configured for using service worker or no service worker
5890
- // registration exists. Don't rely on service worker to do any job.
5891
- // Use LocalSyncWorker instead.
5892
- if (db.cloud.options?.tryUseServiceWorker && !IS_SERVICE_WORKER) {
5893
- console.debug('dexie-cloud-addon: Not using service worker.', swRegistrations.length === 0
5894
- ? 'No SW registrations found.'
5895
- : 'serviceWorker' in navigator && DISABLE_SERVICEWORKER_STRATEGY
5896
- ? 'Avoiding SW background sync and SW periodic bg sync for this browser due to browser bugs.'
5897
- : 'navigator.serviceWorker not present');
5941
+ function onDbReady(dexie) {
5942
+ var _a, _b, _c, _d, _e, _f;
5943
+ return __awaiter$1(this, void 0, void 0, function* () {
5944
+ closed = false; // As Dexie calls us, we are not closed anymore. Maybe reopened? Remember db.ready event is registered with sticky flag!
5945
+ const db = DexieCloudDB(dexie);
5946
+ // Setup default GUI:
5947
+ if (!IS_SERVICE_WORKER) {
5948
+ if (!((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.customLoginGui)) {
5949
+ subscriptions.push(setupDefaultGUI(dexie));
5898
5950
  }
5899
- db.cloud.usingServiceWorker = false;
5951
+ subscriptions.push(computeSyncState(db).subscribe(dexie.cloud.syncState));
5900
5952
  }
5901
- updateSchemaFromOptions(schema, db.cloud.options);
5902
- updateSchemaFromOptions(persistedSchema, db.cloud.options);
5903
- if (!schema) {
5904
- // Database opened dynamically (use case for SW!)
5905
- // Take persisted schema:
5906
- db.cloud.schema = persistedSchema || null;
5953
+ //verifyConfig(db.cloud.options); Not needed (yet at least!)
5954
+ // Verify the user has allowed version increment.
5955
+ if (!db.tables.every((table) => table.core)) {
5956
+ throwVersionIncrementNeeded();
5907
5957
  }
5908
- else if (!persistedSchema ||
5909
- JSON.stringify(persistedSchema) !== JSON.stringify(schema)) {
5910
- // Update persisted schema (but don't overwrite table prefixes)
5911
- const newPersistedSchema = persistedSchema || {};
5912
- for (const [table, tblSchema] of Object.entries(schema)) {
5913
- const newTblSchema = newPersistedSchema[table];
5914
- if (!newTblSchema) {
5915
- newPersistedSchema[table] = { ...tblSchema };
5958
+ const swRegistrations = 'serviceWorker' in navigator
5959
+ ? yield navigator.serviceWorker.getRegistrations()
5960
+ : [];
5961
+ const initiallySynced = yield db.transaction('rw', db.$syncState, () => __awaiter$1(this, void 0, void 0, function* () {
5962
+ var _g, _h;
5963
+ const { options, schema } = db.cloud;
5964
+ const [persistedOptions, persistedSchema, persistedSyncState] = yield Promise.all([
5965
+ db.getOptions(),
5966
+ db.getSchema(),
5967
+ db.getPersistedSyncState(),
5968
+ ]);
5969
+ if (!configuredProgramatically) {
5970
+ // Options not specified programatically (use case for SW!)
5971
+ // Take persisted options:
5972
+ db.cloud.options = persistedOptions || null;
5973
+ }
5974
+ else if (!persistedOptions ||
5975
+ JSON.stringify(persistedOptions) !== JSON.stringify(options)) {
5976
+ // Update persisted options:
5977
+ if (!options)
5978
+ throw new Error(`Internal error`); // options cannot be null if configuredProgramatically is set.
5979
+ yield db.$syncState.put(options, 'options');
5980
+ }
5981
+ if (((_g = db.cloud.options) === null || _g === void 0 ? void 0 : _g.tryUseServiceWorker) &&
5982
+ 'serviceWorker' in navigator &&
5983
+ swRegistrations.length > 0 &&
5984
+ !DISABLE_SERVICEWORKER_STRATEGY) {
5985
+ // * Configured for using service worker if available.
5986
+ // * Browser supports service workers
5987
+ // * There are at least one service worker registration
5988
+ console.debug('Dexie Cloud Addon: Using service worker');
5989
+ db.cloud.usingServiceWorker = true;
5990
+ }
5991
+ else {
5992
+ // Not configured for using service worker or no service worker
5993
+ // registration exists. Don't rely on service worker to do any job.
5994
+ // Use LocalSyncWorker instead.
5995
+ if (((_h = db.cloud.options) === null || _h === void 0 ? void 0 : _h.tryUseServiceWorker) && !IS_SERVICE_WORKER) {
5996
+ console.debug('dexie-cloud-addon: Not using service worker.', swRegistrations.length === 0
5997
+ ? 'No SW registrations found.'
5998
+ : 'serviceWorker' in navigator && DISABLE_SERVICEWORKER_STRATEGY
5999
+ ? 'Avoiding SW background sync and SW periodic bg sync for this browser due to browser bugs.'
6000
+ : 'navigator.serviceWorker not present');
5916
6001
  }
5917
- else {
5918
- newTblSchema.markedForSync = tblSchema.markedForSync;
5919
- tblSchema.deleted = newTblSchema.deleted;
5920
- newTblSchema.generatedGlobalId = tblSchema.generatedGlobalId;
6002
+ db.cloud.usingServiceWorker = false;
6003
+ }
6004
+ updateSchemaFromOptions(schema, db.cloud.options);
6005
+ updateSchemaFromOptions(persistedSchema, db.cloud.options);
6006
+ if (!schema) {
6007
+ // Database opened dynamically (use case for SW!)
6008
+ // Take persisted schema:
6009
+ db.cloud.schema = persistedSchema || null;
6010
+ }
6011
+ else if (!persistedSchema ||
6012
+ JSON.stringify(persistedSchema) !== JSON.stringify(schema)) {
6013
+ // Update persisted schema (but don't overwrite table prefixes)
6014
+ const newPersistedSchema = persistedSchema || {};
6015
+ for (const [table, tblSchema] of Object.entries(schema)) {
6016
+ const newTblSchema = newPersistedSchema[table];
6017
+ if (!newTblSchema) {
6018
+ newPersistedSchema[table] = Object.assign({}, tblSchema);
6019
+ }
6020
+ else {
6021
+ newTblSchema.markedForSync = tblSchema.markedForSync;
6022
+ tblSchema.deleted = newTblSchema.deleted;
6023
+ newTblSchema.generatedGlobalId = tblSchema.generatedGlobalId;
6024
+ }
5921
6025
  }
6026
+ yield db.$syncState.put(newPersistedSchema, 'schema');
6027
+ // Make sure persisted table prefixes are being used instead of computed ones:
6028
+ // Let's assign all props as the newPersistedSchems should be what we should be working with.
6029
+ Object.assign(schema, newPersistedSchema);
5922
6030
  }
5923
- await db.$syncState.put(newPersistedSchema, 'schema');
5924
- // Make sure persisted table prefixes are being used instead of computed ones:
5925
- // Let's assign all props as the newPersistedSchems should be what we should be working with.
5926
- Object.assign(schema, newPersistedSchema);
6031
+ return persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.initiallySynced;
6032
+ }));
6033
+ if (initiallySynced) {
6034
+ db.setInitiallySynced(true);
5927
6035
  }
5928
- return persistedSyncState?.initiallySynced;
5929
- });
5930
- if (initiallySynced) {
5931
- db.setInitiallySynced(true);
5932
- }
5933
- verifySchema(db);
5934
- if (db.cloud.options?.databaseUrl && !initiallySynced) {
5935
- await performInitialSync(db, db.cloud.options, db.cloud.schema);
5936
- db.setInitiallySynced(true);
5937
- }
5938
- // Manage CurrentUser observable:
5939
- throwIfClosed();
5940
- if (!IS_SERVICE_WORKER) {
5941
- subscriptions.push(liveQuery(() => db.getCurrentUser()).subscribe(currentUserEmitter));
5942
- // Manage PersistendSyncState observable:
5943
- subscriptions.push(liveQuery(() => db.getPersistedSyncState()).subscribe(db.cloud.persistedSyncState));
5944
- // Wait till currentUser and persistedSyncState gets populated
5945
- // with things from the database and not just the default values.
5946
- // This is so that when db.open() completes, user should be safe
5947
- // to subscribe to these observables and get actual data.
5948
- await combineLatest([
5949
- currentUserEmitter.pipe(skip(1), take(1)),
5950
- db.cloud.persistedSyncState.pipe(skip(1), take(1)),
5951
- ]).toPromise();
5952
- }
5953
- // HERE: If requireAuth, do athentication now.
5954
- if (db.cloud.options?.requireAuth) {
5955
- await login(db);
5956
- }
5957
- if (localSyncWorker)
5958
- localSyncWorker.stop();
5959
- localSyncWorker = null;
5960
- throwIfClosed();
5961
- if (db.cloud.usingServiceWorker && db.cloud.options?.databaseUrl) {
5962
- registerSyncEvent(db, 'push').catch(() => { });
5963
- registerPeriodicSyncEvent(db).catch(() => { });
5964
- }
5965
- else if (db.cloud.options?.databaseUrl &&
5966
- db.cloud.schema &&
5967
- !IS_SERVICE_WORKER) {
5968
- // There's no SW. Start SyncWorker instead.
5969
- localSyncWorker = LocalSyncWorker(db, db.cloud.options, db.cloud.schema);
5970
- localSyncWorker.start();
5971
- triggerSync(db, 'push');
5972
- }
5973
- // Listen to online event and do sync.
5974
- throwIfClosed();
5975
- if (!IS_SERVICE_WORKER) {
5976
- subscriptions.push(fromEvent(self, 'online').subscribe(() => {
5977
- console.debug('online!');
5978
- db.syncStateChangedEvent.next({
5979
- phase: 'not-in-sync',
5980
- });
6036
+ verifySchema(db);
6037
+ if (((_b = db.cloud.options) === null || _b === void 0 ? void 0 : _b.databaseUrl) && !initiallySynced) {
6038
+ yield performInitialSync(db, db.cloud.options, db.cloud.schema);
6039
+ db.setInitiallySynced(true);
6040
+ }
6041
+ // Manage CurrentUser observable:
6042
+ throwIfClosed();
6043
+ if (!IS_SERVICE_WORKER) {
6044
+ subscriptions.push(liveQuery(() => db.getCurrentUser()).subscribe(currentUserEmitter));
6045
+ // Manage PersistendSyncState observable:
6046
+ subscriptions.push(liveQuery(() => db.getPersistedSyncState()).subscribe(db.cloud.persistedSyncState));
6047
+ // Wait till currentUser and persistedSyncState gets populated
6048
+ // with things from the database and not just the default values.
6049
+ // This is so that when db.open() completes, user should be safe
6050
+ // to subscribe to these observables and get actual data.
6051
+ yield combineLatest([
6052
+ currentUserEmitter.pipe(skip(1), take(1)),
6053
+ db.cloud.persistedSyncState.pipe(skip(1), take(1)),
6054
+ ]).toPromise();
6055
+ }
6056
+ // HERE: If requireAuth, do athentication now.
6057
+ if ((_c = db.cloud.options) === null || _c === void 0 ? void 0 : _c.requireAuth) {
6058
+ yield login(db);
6059
+ }
6060
+ if (localSyncWorker)
6061
+ localSyncWorker.stop();
6062
+ localSyncWorker = null;
6063
+ throwIfClosed();
6064
+ if (db.cloud.usingServiceWorker && ((_d = db.cloud.options) === null || _d === void 0 ? void 0 : _d.databaseUrl)) {
6065
+ registerSyncEvent(db, 'push').catch(() => { });
6066
+ registerPeriodicSyncEvent(db).catch(() => { });
6067
+ }
6068
+ else if (((_e = db.cloud.options) === null || _e === void 0 ? void 0 : _e.databaseUrl) &&
6069
+ db.cloud.schema &&
6070
+ !IS_SERVICE_WORKER) {
6071
+ // There's no SW. Start SyncWorker instead.
6072
+ localSyncWorker = LocalSyncWorker(db, db.cloud.options, db.cloud.schema);
6073
+ localSyncWorker.start();
5981
6074
  triggerSync(db, 'push');
5982
- }), fromEvent(self, 'offline').subscribe(() => {
5983
- console.debug('offline!');
5984
- db.syncStateChangedEvent.next({
5985
- phase: 'offline',
5986
- });
5987
- }));
5988
- }
5989
- // Connect WebSocket only if we're a browser window
5990
- if (typeof window !== 'undefined' &&
5991
- !IS_SERVICE_WORKER &&
5992
- db.cloud.options?.databaseUrl) {
5993
- subscriptions.push(connectWebSocket(db));
5994
- }
6075
+ }
6076
+ // Listen to online event and do sync.
6077
+ throwIfClosed();
6078
+ if (!IS_SERVICE_WORKER) {
6079
+ subscriptions.push(fromEvent(self, 'online').subscribe(() => {
6080
+ console.debug('online!');
6081
+ db.syncStateChangedEvent.next({
6082
+ phase: 'not-in-sync',
6083
+ });
6084
+ triggerSync(db, 'push');
6085
+ }), fromEvent(self, 'offline').subscribe(() => {
6086
+ console.debug('offline!');
6087
+ db.syncStateChangedEvent.next({
6088
+ phase: 'offline',
6089
+ });
6090
+ }));
6091
+ }
6092
+ // Connect WebSocket only if we're a browser window
6093
+ if (typeof window !== 'undefined' &&
6094
+ !IS_SERVICE_WORKER &&
6095
+ ((_f = db.cloud.options) === null || _f === void 0 ? void 0 : _f.databaseUrl)) {
6096
+ subscriptions.push(connectWebSocket(db));
6097
+ }
6098
+ });
5995
6099
  }
5996
6100
  }
5997
- dexieCloud.version = '4.0.0-beta.15';
6101
+ dexieCloud.version = '4.0.0-beta.18';
5998
6102
  Dexie.Cloud = dexieCloud;
5999
6103
 
6000
6104
  export { dexieCloud as default, dexieCloud, getTiedObjectId, getTiedRealmId };