dexie-cloud-addon 1.0.0-beta.10 → 1.0.0-beta.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/dist/modern/dexie-cloud-addon.js +189 -158
  2. package/dist/modern/dexie-cloud-addon.js.map +1 -1
  3. package/dist/modern/dexie-cloud-addon.min.js +1 -1
  4. package/dist/modern/dexie-cloud-addon.min.js.map +1 -1
  5. package/dist/modern/service-worker.js +1208 -1176
  6. package/dist/modern/service-worker.js.map +1 -1
  7. package/dist/modern/service-worker.min.js +1 -1
  8. package/dist/modern/service-worker.min.js.map +1 -1
  9. package/dist/module-es5/dexie-cloud-addon.js +259 -207
  10. package/dist/module-es5/dexie-cloud-addon.js.map +1 -1
  11. package/dist/module-es5/dexie-cloud-addon.min.js +1 -1
  12. package/dist/module-es5/dexie-cloud-addon.min.js.map +1 -1
  13. package/dist/types/WSObservable.d.ts +11 -6
  14. package/dist/types/WebSocketStatus.d.ts +1 -0
  15. package/dist/types/helpers/BroadcastedLocalEvent.d.ts +8 -0
  16. package/dist/types/helpers/visibleState.d.ts +1 -0
  17. package/dist/types/sync/syncServerToClientOnly.d.ts +3 -0
  18. package/dist/types/types/CloudConnectionStatus.d.ts +0 -0
  19. package/dist/types/types/ConnectionStatus.d.ts +0 -0
  20. package/dist/types/types/LoginState.d.ts +41 -0
  21. package/dist/types/types/SyncConnectionStatus.d.ts +1 -0
  22. package/dist/types/types/SyncFlowStatus.d.ts +6 -0
  23. package/dist/types/types/SyncStatus.d.ts +6 -0
  24. package/dist/umd/dexie-cloud-addon.js +259 -207
  25. package/dist/umd/dexie-cloud-addon.js.map +1 -1
  26. package/dist/umd/dexie-cloud-addon.min.js +1 -1
  27. package/dist/umd/dexie-cloud-addon.min.js.map +1 -1
  28. package/dist/umd/service-worker.js +1208 -1176
  29. package/dist/umd/service-worker.js.map +1 -1
  30. package/dist/umd/service-worker.min.js +1 -1
  31. package/dist/umd/service-worker.min.js.map +1 -1
  32. package/dist/umd-modern/dexie-cloud-addon.js +188 -157
  33. package/dist/umd-modern/dexie-cloud-addon.js.map +1 -1
  34. package/package.json +2 -2
@@ -2120,1234 +2120,1218 @@ function timeout(due, scheduler) {
2120
2120
  return timeoutWith(due, throwError(new TimeoutError()), scheduler);
2121
2121
  }
2122
2122
 
2123
- const { toString: toStr } = {};
2124
- function getToStringTag(val) {
2125
- return toStr.call(val).slice(8, -1);
2126
- }
2127
- function escapeDollarProps(value) {
2128
- const keys = Object.keys(value);
2129
- let dollarKeys = null;
2130
- for (let i = 0, l = keys.length; i < l; ++i) {
2131
- if (keys[i][0] === "$") {
2132
- dollarKeys = dollarKeys || [];
2133
- dollarKeys.push(keys[i]);
2123
+ //const hasSW = 'serviceWorker' in navigator;
2124
+ let hasComplainedAboutSyncEvent = false;
2125
+ async function registerSyncEvent(db, purpose) {
2126
+ try {
2127
+ // Send sync event to SW:
2128
+ const sw = await navigator.serviceWorker.ready;
2129
+ if (purpose === "push" && sw.sync) {
2130
+ await sw.sync.register(`dexie-cloud:${db.name}`);
2131
+ }
2132
+ if (sw.active) {
2133
+ // Use postMessage for pull syncs and for browsers not supporting sync event (Firefox, Safari).
2134
+ // Also chromium based browsers with sw.sync as a fallback for sleepy sync events not taking action for a while.
2135
+ sw.active.postMessage({
2136
+ type: 'dexie-cloud-sync',
2137
+ dbName: db.name,
2138
+ purpose
2139
+ });
2134
2140
  }
2141
+ else {
2142
+ console.error(`Dexie Cloud: There's no active service worker. Can this ever happen??`);
2143
+ }
2144
+ return;
2135
2145
  }
2136
- if (!dollarKeys)
2137
- return value;
2138
- const clone = { ...value };
2139
- for (const k of dollarKeys) {
2140
- delete clone[k];
2141
- clone["$" + k] = value[k];
2146
+ catch (e) {
2147
+ if (!hasComplainedAboutSyncEvent) {
2148
+ console.debug(`Dexie Cloud: Could not register sync event`, e);
2149
+ hasComplainedAboutSyncEvent = true;
2150
+ }
2142
2151
  }
2143
- return clone;
2144
2152
  }
2145
- const ObjectDef = {
2146
- replace: escapeDollarProps,
2147
- };
2148
- function TypesonSimplified(...typeDefsInputs) {
2149
- const typeDefs = typeDefsInputs.reduce((p, c) => ({ ...p, ...c }), typeDefsInputs.reduce((p, c) => ({ ...c, ...p }), {}));
2150
- const protoMap = new WeakMap();
2151
- return {
2152
- stringify(value, alternateChannel, space) {
2153
- const json = JSON.stringify(value, function (key) {
2154
- const realVal = this[key];
2155
- const typeDef = getTypeDef(realVal);
2156
- return typeDef
2157
- ? typeDef.replace(realVal, alternateChannel, typeDefs)
2158
- : realVal;
2159
- }, space);
2160
- return json;
2161
- },
2162
- parse(tson, alternateChannel) {
2163
- let parent = null;
2164
- let unescapeParentKeys = [];
2165
- return JSON.parse(tson, function (key, value) {
2166
- //
2167
- // Parent Part
2168
- //
2169
- const type = value === null || value === void 0 ? void 0 : value.$t;
2170
- if (type) {
2171
- const typeDef = typeDefs[type];
2172
- value = typeDef
2173
- ? typeDef.revive(value, alternateChannel, typeDefs)
2174
- : value;
2175
- }
2176
- if (value === parent) {
2177
- // Do what the kid told us to
2178
- if (unescapeParentKeys.length > 0) {
2179
- // Unescape dollar props
2180
- value = { ...value };
2181
- for (const k of unescapeParentKeys) {
2182
- value[k.substr(1)] = value[k];
2183
- delete value[k];
2184
- }
2185
- }
2186
- unescapeParentKeys = [];
2187
- return value;
2188
- }
2189
- //
2190
- // Child part
2191
- //
2192
- if (key[0] === "$" && key !== "$t") {
2193
- parent = this;
2194
- unescapeParentKeys.push(key);
2195
- }
2196
- return value;
2197
- });
2198
- },
2199
- };
2200
- function getTypeDef(realVal) {
2201
- const type = typeof realVal;
2202
- switch (typeof realVal) {
2203
- case "object":
2204
- case "function": {
2205
- // "object", "function", null
2206
- if (realVal === null)
2207
- return null;
2208
- const proto = Object.getPrototypeOf(realVal);
2209
- if (!proto)
2210
- return ObjectDef;
2211
- let typeDef = protoMap.get(proto);
2212
- if (typeDef !== undefined)
2213
- return typeDef; // Null counts to! So the caching of Array.prototype also counts.
2214
- const toStringTag = getToStringTag(realVal);
2215
- const entry = Object.entries(typeDefs).find(([typeName, typeDef]) => { var _a, _b; return (_b = (_a = typeDef === null || typeDef === void 0 ? void 0 : typeDef.test) === null || _a === void 0 ? void 0 : _a.call(typeDef, realVal, toStringTag)) !== null && _b !== void 0 ? _b : typeName === toStringTag; });
2216
- typeDef = entry === null || entry === void 0 ? void 0 : entry[1];
2217
- if (!typeDef) {
2218
- typeDef = Array.isArray(realVal)
2219
- ? null
2220
- : typeof realVal === "function"
2221
- ? typeDefs.function || null
2222
- : ObjectDef;
2223
- }
2224
- protoMap.set(proto, typeDef);
2225
- return typeDef;
2153
+ async function registerPeriodicSyncEvent(db) {
2154
+ var _a;
2155
+ try {
2156
+ // Register periodicSync event to SW:
2157
+ // @ts-ignore
2158
+ const { periodicSync } = await navigator.serviceWorker.ready;
2159
+ if (periodicSync) {
2160
+ try {
2161
+ await periodicSync.register(`dexie-cloud:${db.name}`, (_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.periodicSync);
2162
+ console.debug(`Dexie Cloud: Successfully registered periodicsync event for ${db.name}`);
2163
+ }
2164
+ catch (e) {
2165
+ console.debug(`Dexie Cloud: Failed to register periodic sync. Your PWA must be installed to allow background sync.`, e);
2226
2166
  }
2227
- default:
2228
- return typeDefs[type];
2229
2167
  }
2168
+ else {
2169
+ console.debug(`Dexie Cloud: periodicSync not supported.`);
2170
+ }
2171
+ }
2172
+ catch (e) {
2173
+ console.debug(`Dexie Cloud: Could not register periodicSync for ${db.name}`, e);
2230
2174
  }
2231
2175
  }
2232
2176
 
2233
- const BisonBinaryTypes = {
2234
- Blob: {
2235
- test: (blob, toStringTag) => toStringTag === "Blob",
2236
- replace: (blob, altChannel) => {
2237
- const i = altChannel.length;
2238
- altChannel.push(blob);
2239
- return {
2240
- $t: "Blob",
2241
- mimeType: blob.type,
2242
- i,
2243
- };
2244
- },
2245
- revive: ({ i, mimeType }, altChannel) => new Blob([altChannel[i]], { type: mimeType }),
2246
- },
2247
- };
2248
-
2249
- var numberDef = {
2250
- number: {
2251
- replace: (num) => {
2252
- switch (true) {
2253
- case isNaN(num):
2254
- return { $t: "number", v: "NaN" };
2255
- case num === Infinity:
2256
- return { $t: "number", v: "Infinity" };
2257
- case num === -Infinity:
2258
- return { $t: "number", v: "-Infinity" };
2259
- default:
2260
- return num;
2261
- }
2262
- },
2263
- revive: ({ v }) => Number(v),
2264
- },
2265
- };
2177
+ function triggerSync(db, purpose) {
2178
+ if (db.cloud.usingServiceWorker) {
2179
+ registerSyncEvent(db, purpose);
2180
+ }
2181
+ else {
2182
+ db.localSyncEvent.next({ purpose });
2183
+ }
2184
+ }
2266
2185
 
2267
- const bigIntDef = {
2268
- bigint: {
2269
- replace: (realVal) => {
2270
- return { $t: "bigint", v: "" + realVal };
2271
- },
2272
- revive: (obj) => BigInt(obj.v),
2273
- },
2274
- };
2186
+ const b64decode = typeof Buffer !== "undefined"
2187
+ ? (base64) => Buffer.from(base64, "base64")
2188
+ : (base64) => {
2189
+ const binary_string = atob(base64);
2190
+ const len = binary_string.length;
2191
+ const bytes = new Uint8Array(len);
2192
+ for (var i = 0; i < len; i++) {
2193
+ bytes[i] = binary_string.charCodeAt(i);
2194
+ }
2195
+ return bytes;
2196
+ };
2197
+ const b64encode = typeof Buffer !== "undefined"
2198
+ ? (b) => ArrayBuffer.isView(b)
2199
+ ? Buffer.from(b.buffer, b.byteOffset, b.byteLength).toString("base64")
2200
+ : Buffer.from(b).toString("base64")
2201
+ : (b) => btoa(String.fromCharCode.apply(null, b));
2275
2202
 
2276
- var DateDef = {
2277
- Date: {
2278
- replace: (date) => ({
2279
- $t: "Date",
2280
- v: isNaN(date.getTime()) ? "NaN" : date.toISOString(),
2281
- }),
2282
- revive: ({ v }) => new Date(v === "NaN" ? NaN : Date.parse(v)),
2283
- },
2284
- };
2203
+ async function computeRealmSetHash({ realms, inviteRealms, }) {
2204
+ const data = JSON.stringify([
2205
+ ...realms.map((realmId) => ({ realmId, accepted: true })),
2206
+ ...inviteRealms.map((realmId) => ({ realmId, accepted: false })),
2207
+ ].sort((a, b) => a.realmId < b.realmId ? -1 : a.realmId > b.realmId ? 1 : 0));
2208
+ const byteArray = new TextEncoder().encode(data);
2209
+ const digestBytes = await crypto.subtle.digest('SHA-1', byteArray);
2210
+ const base64 = b64encode(digestBytes);
2211
+ return base64;
2212
+ }
2285
2213
 
2286
- var SetDef = {
2287
- Set: {
2288
- replace: (set) => ({
2289
- $t: "Set",
2290
- v: Array.from(set.entries()),
2291
- }),
2292
- revive: ({ v }) => new Set(v),
2293
- },
2294
- };
2214
+ function getSyncableTables(db) {
2215
+ return Object.entries(db.cloud.schema || {})
2216
+ .filter(([, { markedForSync }]) => markedForSync)
2217
+ .map(([tbl]) => db.table(tbl));
2218
+ }
2295
2219
 
2296
- var MapDef = {
2297
- Map: {
2298
- replace: (map) => ({
2299
- $t: "Map",
2300
- v: Array.from(map.entries()),
2301
- }),
2302
- revive: ({ v }) => new Map(v),
2303
- },
2304
- };
2220
+ function getMutationTable(tableName) {
2221
+ return `$${tableName}_mutations`;
2222
+ }
2305
2223
 
2306
- const _global = typeof globalThis !== "undefined"
2307
- ? globalThis
2308
- : typeof self !== "undefined"
2309
- ? self
2310
- : typeof global === "undefined"
2311
- ? global
2312
- : undefined;
2224
+ function getTableFromMutationTable(mutationTable) {
2225
+ var _a;
2226
+ const tableName = (_a = /^\$(.*)_mutations$/.exec(mutationTable)) === null || _a === void 0 ? void 0 : _a[1];
2227
+ if (!tableName)
2228
+ throw new Error(`Given mutationTable ${mutationTable} is not correct`);
2229
+ return tableName;
2230
+ }
2313
2231
 
2314
- var TypedArraysDefs = [
2315
- "Int8Array",
2316
- "Uint8Array",
2317
- "Uint8ClampedArray",
2318
- "Int16Array",
2319
- "Uint16Array",
2320
- "Int32Array",
2321
- "Uint32Array",
2322
- "Float32Array",
2323
- "Float64Array",
2324
- "DataView",
2325
- "BigInt64Array",
2326
- "BigUint64Array",
2327
- ].reduce((specs, typeName) => ({
2328
- ...specs,
2329
- [typeName]: {
2330
- // Replace passes the the typed array into $t, buffer so that
2331
- // the ArrayBuffer typedef takes care of further handling of the buffer:
2332
- // {$t:"Uint8Array",buffer:{$t:"ArrayBuffer",idx:0}}
2333
- // CHANGED ABOVE! Now shortcutting that for more sparse format of the typed arrays
2334
- // to contain the b64 property directly.
2335
- replace: (a, _, typeDefs) => {
2336
- const result = {
2337
- $t: typeName,
2338
- v: typeDefs.ArrayBuffer.replace(a.byteOffset === 0 && a.byteLength === a.buffer.byteLength
2339
- ? a.buffer
2340
- : a.buffer.slice(a.byteOffset, a.byteOffset + a.byteLength), _, typeDefs).v,
2341
- };
2342
- return result;
2343
- },
2344
- revive: ({ v }, _, typeDefs) => {
2345
- const TypedArray = _global[typeName];
2346
- return (TypedArray &&
2347
- new TypedArray(typeDefs.ArrayBuffer.revive({ v }, _, typeDefs)));
2348
- },
2349
- },
2350
- }), {});
2232
+ async function listClientChanges(mutationTables, db, { since = {}, limit = Infinity } = {}) {
2233
+ const allMutsOnTables = await Promise.all(mutationTables.map(async (mutationTable) => {
2234
+ const tableName = getTableFromMutationTable(mutationTable.name);
2235
+ const lastRevision = since[tableName];
2236
+ let query = lastRevision
2237
+ ? mutationTable.where("rev").above(lastRevision)
2238
+ : mutationTable;
2239
+ if (limit < Infinity)
2240
+ query = query.limit(limit);
2241
+ const muts = await query.toArray();
2242
+ //const objTable = db.table(tableName);
2243
+ /*for (const mut of muts) {
2244
+ if (mut.type === "insert" || mut.type === "upsert") {
2245
+ mut.values = await objTable.bulkGet(mut.keys);
2246
+ }
2247
+ }*/
2248
+ return {
2249
+ table: tableName,
2250
+ muts,
2251
+ };
2252
+ }));
2253
+ // Filter out those tables that doesn't have any mutations:
2254
+ return allMutsOnTables.filter(({ muts }) => muts.length > 0);
2255
+ }
2351
2256
 
2352
- const b64decode = typeof Buffer !== "undefined"
2353
- ? (base64) => Buffer.from(base64, "base64")
2354
- : (base64) => {
2355
- const binary_string = atob(base64);
2356
- const len = binary_string.length;
2357
- const bytes = new Uint8Array(len);
2358
- for (var i = 0; i < len; i++) {
2359
- bytes[i] = binary_string.charCodeAt(i);
2360
- }
2361
- return bytes;
2362
- };
2363
- const b64encode = typeof Buffer !== "undefined"
2364
- ? (b) => ArrayBuffer.isView(b)
2365
- ? Buffer.from(b.buffer, b.byteOffset, b.byteLength).toString("base64")
2366
- : Buffer.from(b).toString("base64")
2367
- : (b) => btoa(String.fromCharCode.apply(null, b));
2257
+ //@ts-check
2258
+ const randomFillSync = crypto.getRandomValues;
2368
2259
 
2369
- function b64LexEncode(b) {
2370
- return b64ToLex(b64encode(b));
2371
- }
2372
- function b64LexDecode(b64Lex) {
2373
- return b64decode(lexToB64(b64Lex));
2374
- }
2375
- function b64ToLex(base64) {
2376
- var encoded = "";
2377
- for (var i = 0, length = base64.length; i < length; i++) {
2378
- encoded += ENCODE_TABLE[base64[i]];
2379
- }
2380
- return encoded;
2260
+ function assert(b) {
2261
+ if (!b)
2262
+ throw new Error('Assertion Failed');
2381
2263
  }
2382
- function lexToB64(base64lex) {
2383
- // only accept string input
2384
- if (typeof base64lex !== "string") {
2385
- throw new Error("invalid decoder input: " + base64lex);
2264
+ function setByKeyPath(obj, keyPath, value) {
2265
+ if (!obj || keyPath === undefined)
2266
+ return;
2267
+ if ('isFrozen' in Object && Object.isFrozen(obj))
2268
+ return;
2269
+ if (typeof keyPath !== 'string' && 'length' in keyPath) {
2270
+ assert(typeof value !== 'string' && 'length' in value);
2271
+ for (var i = 0, l = keyPath.length; i < l; ++i) {
2272
+ setByKeyPath(obj, keyPath[i], value[i]);
2273
+ }
2386
2274
  }
2387
- var base64 = "";
2388
- for (var i = 0, length = base64lex.length; i < length; i++) {
2389
- base64 += DECODE_TABLE[base64lex[i]];
2275
+ else {
2276
+ var period = keyPath.indexOf('.');
2277
+ if (period !== -1) {
2278
+ var currentKeyPath = keyPath.substr(0, period);
2279
+ var remainingKeyPath = keyPath.substr(period + 1);
2280
+ if (remainingKeyPath === '')
2281
+ if (value === undefined) {
2282
+ if (Array.isArray(obj)) {
2283
+ if (!isNaN(parseInt(currentKeyPath)))
2284
+ obj.splice(parseInt(currentKeyPath), 1);
2285
+ }
2286
+ else
2287
+ delete obj[currentKeyPath];
2288
+ // @ts-ignore: even if currentKeyPath would be numeric string and obj would be array - it works.
2289
+ }
2290
+ else
2291
+ obj[currentKeyPath] = value;
2292
+ else {
2293
+ //@ts-ignore: even if currentKeyPath would be numeric string and obj would be array - it works.
2294
+ var innerObj = obj[currentKeyPath];
2295
+ //@ts-ignore: even if currentKeyPath would be numeric string and obj would be array - it works.
2296
+ if (!innerObj)
2297
+ innerObj = obj[currentKeyPath] = {};
2298
+ setByKeyPath(innerObj, remainingKeyPath, value);
2299
+ }
2300
+ }
2301
+ else {
2302
+ if (value === undefined) {
2303
+ if (Array.isArray(obj) && !isNaN(parseInt(keyPath)))
2304
+ // @ts-ignore: even if currentKeyPath would be numeric string and obj would be array - it works.
2305
+ obj.splice(keyPath, 1);
2306
+ //@ts-ignore: even if currentKeyPath would be numeric string and obj would be array - it works.
2307
+ else
2308
+ delete obj[keyPath];
2309
+ //@ts-ignore: even if currentKeyPath would be numeric string and obj would be array - it works.
2310
+ }
2311
+ else
2312
+ obj[keyPath] = value;
2313
+ }
2390
2314
  }
2391
- return base64;
2392
2315
  }
2393
- const DECODE_TABLE = {
2394
- "-": "=",
2395
- "0": "A",
2396
- "1": "B",
2397
- "2": "C",
2398
- "3": "D",
2399
- "4": "E",
2400
- "5": "F",
2401
- "6": "G",
2402
- "7": "H",
2403
- "8": "I",
2404
- "9": "J",
2405
- A: "K",
2406
- B: "L",
2407
- C: "M",
2408
- D: "N",
2409
- E: "O",
2410
- F: "P",
2411
- G: "Q",
2412
- H: "R",
2413
- I: "S",
2414
- J: "T",
2415
- K: "U",
2416
- L: "V",
2417
- M: "W",
2418
- N: "X",
2419
- O: "Y",
2420
- P: "Z",
2421
- Q: "a",
2422
- R: "b",
2423
- S: "c",
2424
- T: "d",
2425
- U: "e",
2426
- V: "f",
2427
- W: "g",
2428
- X: "h",
2429
- Y: "i",
2430
- Z: "j",
2431
- _: "k",
2432
- a: "l",
2433
- b: "m",
2434
- c: "n",
2435
- d: "o",
2436
- e: "p",
2437
- f: "q",
2438
- g: "r",
2439
- h: "s",
2440
- i: "t",
2441
- j: "u",
2442
- k: "v",
2443
- l: "w",
2444
- m: "x",
2445
- n: "y",
2446
- o: "z",
2447
- p: "0",
2448
- q: "1",
2449
- r: "2",
2450
- s: "3",
2451
- t: "4",
2452
- u: "5",
2453
- v: "6",
2454
- w: "7",
2455
- x: "8",
2456
- y: "9",
2457
- z: "+",
2458
- "|": "/",
2316
+ const randomString$1 = typeof self === 'undefined' ? (bytes) => {
2317
+ // Node
2318
+ const buf = Buffer.alloc(bytes);
2319
+ randomFillSync(buf);
2320
+ return buf.toString("base64");
2321
+ } : (bytes) => {
2322
+ // Web
2323
+ const buf = new Uint8Array(bytes);
2324
+ crypto.getRandomValues(buf);
2325
+ return btoa(String.fromCharCode.apply(null, buf));
2459
2326
  };
2460
- const ENCODE_TABLE = {};
2461
- for (const c of Object.keys(DECODE_TABLE)) {
2462
- ENCODE_TABLE[DECODE_TABLE[c]] = c;
2463
- }
2464
2327
 
2465
- var ArrayBufferDef = {
2466
- ArrayBuffer: {
2467
- replace: (ab) => ({
2468
- $t: "ArrayBuffer",
2469
- v: b64LexEncode(ab),
2470
- }),
2471
- revive: ({ v }) => {
2472
- const ba = b64LexDecode(v);
2473
- return ba.buffer.byteLength === ba.byteLength
2474
- ? ba.buffer
2475
- : ba.buffer.slice(ba.byteOffset, ba.byteOffset + ba.byteLength);
2476
- },
2477
- },
2478
- };
2479
-
2480
- class FakeBlob {
2481
- constructor(buf, type) {
2482
- this.buf = buf;
2483
- this.type = type;
2484
- }
2328
+ /** Verifies that given primary key is valid.
2329
+ * The reason we narrow validity for valid keys are twofold:
2330
+ * 1: Make sure to only support types that can be used as an object index in DBKeyMutationSet.
2331
+ * For example, ArrayBuffer cannot be used (gives "object ArrayBuffer") but Uint8Array can be
2332
+ * used (gives comma-delimited list of included bytes).
2333
+ * 2: Avoid using plain numbers and Dates as keys when they are synced, as they are not globally unique.
2334
+ * 3: Since we store the key as a VARCHAR server side in current version, try not promote types that stringifies to become very long server side.
2335
+ *
2336
+ * @param id
2337
+ * @returns
2338
+ */
2339
+ function isValidSyncableID(id) {
2340
+ if (typeof id === "string")
2341
+ return true;
2342
+ //if (validIDTypes[toStringTag(id)]) return true;
2343
+ //if (Array.isArray(id)) return id.every((part) => isValidSyncableID(part));
2344
+ if (Array.isArray(id) && id.some(key => isValidSyncableID(key)) && id.every(isValidSyncableIDPart))
2345
+ return true;
2346
+ return false;
2347
+ }
2348
+ /** Verifies that given key part is valid.
2349
+ * 1: Make sure that arrays of this types are stringified correclty and works with DBKeyMutationSet.
2350
+ * For example, ArrayBuffer cannot be used (gives "object ArrayBuffer") but Uint8Array can be
2351
+ * used (gives comma-delimited list of included bytes).
2352
+ * 2: Since we store the key as a VARCHAR server side in current version, try not promote types that stringifies to become very long server side.
2353
+ */
2354
+ function isValidSyncableIDPart(part) {
2355
+ return typeof part === "string" || typeof part === "number" || Array.isArray(part) && part.every(isValidSyncableIDPart);
2356
+ }
2357
+ function isValidAtID(id, idPrefix) {
2358
+ return !idPrefix || (typeof id === "string" && id.startsWith(idPrefix));
2485
2359
  }
2486
2360
 
2487
- function readBlobSync(b) {
2488
- const req = new XMLHttpRequest();
2489
- req.overrideMimeType("text/plain; charset=x-user-defined");
2490
- req.open("GET", URL.createObjectURL(b), false); // Sync
2491
- req.send();
2492
- if (req.status !== 200 && req.status !== 0) {
2493
- throw new Error("Bad Blob access: " + req.status);
2361
+ function applyOperation(target, table, op) {
2362
+ const tbl = target[table] || (target[table] = {});
2363
+ switch (op.type) {
2364
+ case "insert":
2365
+ // TODO: Don't treat insert and upsert the same?
2366
+ case "upsert":
2367
+ op.keys.forEach((key, idx) => {
2368
+ tbl[key] = {
2369
+ type: "ups",
2370
+ val: op.values[idx],
2371
+ };
2372
+ });
2373
+ break;
2374
+ case "update":
2375
+ case "modify": {
2376
+ op.keys.forEach((key, idx) => {
2377
+ const changeSpec = op.type === "update"
2378
+ ? op.changeSpecs[idx]
2379
+ : op.changeSpec;
2380
+ const entry = tbl[key];
2381
+ if (!entry) {
2382
+ tbl[key] = {
2383
+ type: "upd",
2384
+ mod: changeSpec,
2385
+ };
2386
+ }
2387
+ else {
2388
+ switch (entry.type) {
2389
+ case "ups":
2390
+ // Adjust the existing upsert with additional updates
2391
+ for (const [propPath, value] of Object.entries(changeSpec)) {
2392
+ setByKeyPath(entry.val, propPath, value);
2393
+ }
2394
+ break;
2395
+ case "del":
2396
+ // No action.
2397
+ break;
2398
+ case "upd":
2399
+ // Adjust existing update with additional updates
2400
+ Object.assign(entry.mod, changeSpec); // May work for deep props as well - new keys is added later, right? Does the prop order persist along TSON and all? But it will not be 100% when combined with some server code (seach for "address.city": "Stockholm" comment)
2401
+ break;
2402
+ }
2403
+ }
2404
+ });
2405
+ break;
2406
+ }
2407
+ case "delete":
2408
+ op.keys.forEach((key) => {
2409
+ tbl[key] = {
2410
+ type: "del",
2411
+ };
2412
+ });
2413
+ break;
2494
2414
  }
2495
- return req.responseText;
2415
+ return target;
2496
2416
  }
2497
2417
 
2498
- function string2ArrayBuffer(str) {
2499
- const array = new Uint8Array(str.length);
2500
- for (let i = 0; i < str.length; ++i) {
2501
- array[i] = str.charCodeAt(i); // & 0xff;
2418
+ function applyOperations(target, ops) {
2419
+ for (const { table, muts } of ops) {
2420
+ for (const mut of muts) {
2421
+ applyOperation(target, table, mut);
2422
+ }
2502
2423
  }
2503
- return array.buffer;
2504
2424
  }
2505
2425
 
2506
- var BlobDef = {
2507
- Blob: {
2508
- test: (blob, toStringTag) => toStringTag === "Blob" || blob instanceof FakeBlob,
2509
- replace: (blob) => ({
2510
- $t: "Blob",
2511
- v: blob instanceof FakeBlob
2512
- ? b64encode(blob.buf)
2513
- : b64encode(string2ArrayBuffer(readBlobSync(blob))),
2514
- type: blob.type,
2515
- }),
2516
- revive: ({ type, v }) => {
2517
- const ab = b64decode(v);
2518
- return typeof Blob !== undefined
2519
- ? new Blob([ab])
2520
- : new FakeBlob(ab.buffer, type);
2521
- },
2522
- },
2523
- };
2524
-
2525
- const builtin = {
2526
- ...numberDef,
2527
- ...bigIntDef,
2528
- ...DateDef,
2529
- ...SetDef,
2530
- ...MapDef,
2531
- ...TypedArraysDefs,
2532
- ...ArrayBufferDef,
2533
- ...BlobDef,
2534
- };
2535
-
2536
- function Bison(...typeDefsInputs) {
2537
- const tson = TypesonSimplified(builtin, BisonBinaryTypes, ...typeDefsInputs);
2538
- return {
2539
- toBinary(value) {
2540
- const [blob, json] = this.stringify(value);
2541
- const lenBuf = new ArrayBuffer(4);
2542
- new DataView(lenBuf).setUint32(0, blob.size);
2543
- return new Blob([lenBuf, blob, json]);
2544
- },
2545
- stringify(value) {
2546
- const binaries = [];
2547
- const json = tson.stringify(value, binaries);
2548
- const blob = new Blob(binaries.map((b) => {
2549
- const lenBuf = new ArrayBuffer(4);
2550
- new DataView(lenBuf).setUint32(0, "byteLength" in b ? b.byteLength : b.size);
2551
- return new Blob([lenBuf, b]);
2552
- }));
2553
- return [blob, json];
2554
- },
2555
- async parse(json, binData) {
2556
- let pos = 0;
2557
- const arrayBuffers = [];
2558
- const buf = await readBlobBinary(binData);
2559
- const view = new DataView(buf);
2560
- while (pos < buf.byteLength) {
2561
- const len = view.getUint32(pos);
2562
- pos += 4;
2563
- const ab = buf.slice(pos, pos + len);
2564
- pos += len;
2565
- arrayBuffers.push(ab);
2426
+ function subtractChanges(target, // Server change set
2427
+ changesToSubtract // additional mutations on client during syncWithServer()
2428
+ ) {
2429
+ var _a, _b, _c;
2430
+ for (const [table, mutationSet] of Object.entries(changesToSubtract)) {
2431
+ for (const [key, mut] of Object.entries(mutationSet)) {
2432
+ switch (mut.type) {
2433
+ case 'ups':
2434
+ {
2435
+ const targetMut = (_a = target[table]) === null || _a === void 0 ? void 0 : _a[key];
2436
+ if (targetMut) {
2437
+ switch (targetMut.type) {
2438
+ case 'ups':
2439
+ delete target[table][key];
2440
+ break;
2441
+ case 'del':
2442
+ // Leave delete operation.
2443
+ // (Don't resurrect objects unintenionally (using tx(get, put) pattern locally))
2444
+ break;
2445
+ case 'upd':
2446
+ delete target[table][key];
2447
+ break;
2448
+ }
2449
+ }
2450
+ }
2451
+ break;
2452
+ case 'del':
2453
+ (_b = target[table]) === null || _b === void 0 ? true : delete _b[key];
2454
+ break;
2455
+ case 'upd': {
2456
+ const targetMut = (_c = target[table]) === null || _c === void 0 ? void 0 : _c[key];
2457
+ if (targetMut) {
2458
+ switch (targetMut.type) {
2459
+ case 'ups':
2460
+ // Adjust the server upsert with locally updated values.
2461
+ for (const [propPath, value] of Object.entries(mut.mod)) {
2462
+ setByKeyPath(targetMut.val, propPath, value);
2463
+ }
2464
+ break;
2465
+ case 'del':
2466
+ // Leave delete.
2467
+ break;
2468
+ case 'upd':
2469
+ // Remove the local update props from the server update mutation.
2470
+ for (const propPath of Object.keys(mut.mod)) {
2471
+ delete targetMut.mod[propPath];
2472
+ }
2473
+ break;
2474
+ }
2475
+ }
2476
+ break;
2477
+ }
2566
2478
  }
2567
- return tson.parse(json, arrayBuffers);
2568
- },
2569
- async fromBinary(blob) {
2570
- const len = new DataView(await readBlobBinary(blob.slice(0, 4))).getUint32(0);
2571
- const binData = blob.slice(4, len + 4);
2572
- const json = await readBlob(blob.slice(len + 4));
2573
- return await this.parse(json, binData);
2574
- },
2575
- };
2576
- }
2577
- function readBlob(blob) {
2578
- return new Promise((resolve, reject) => {
2579
- const reader = new FileReader();
2580
- reader.onabort = (ev) => reject(new Error("file read aborted"));
2581
- reader.onerror = (ev) => reject(ev.target.error);
2582
- reader.onload = (ev) => resolve(ev.target.result);
2583
- reader.readAsText(blob);
2584
- });
2585
- }
2586
- function readBlobBinary(blob) {
2587
- return new Promise((resolve, reject) => {
2588
- const reader = new FileReader();
2589
- reader.onabort = (ev) => reject(new Error("file read aborted"));
2590
- reader.onerror = (ev) => reject(ev.target.error);
2591
- reader.onload = (ev) => resolve(ev.target.result);
2592
- reader.readAsArrayBuffer(blob);
2593
- });
2479
+ }
2480
+ }
2594
2481
  }
2595
2482
 
2596
- /** The undefined type is not part of builtin but can be manually added.
2597
- * The reason for supporting undefined is if the following object should be revived correctly:
2598
- *
2599
- * {foo: undefined}
2483
+ /** Convert a DBKeyMutationSet (which is an internal format capable of looking up changes per ID)
2484
+ * ...into a DBOperationsSet (which is more optimal for performing DB operations into DB (bulkAdd() etc))
2600
2485
  *
2601
- * Without including this typedef, the revived object would just be {}.
2602
- * If including this typedef, the revived object would be {foo: undefined}.
2486
+ * @param inSet
2487
+ * @returns DBOperationsSet representing inSet
2603
2488
  */
2604
- var undefinedDef = {
2605
- undefined: {
2606
- replace: () => {
2607
- },
2608
- revive: () => undefined,
2609
- },
2610
- };
2611
-
2612
- // Since server revisions are stored in bigints, we need to handle clients without
2613
- // bigint support to not fail when serverRevision is passed over to client.
2614
- // We need to not fail when reviving it and we need to somehow store the information.
2615
- // Since the revived version will later on be put into indexedDB we have another
2616
- // issue: When reading it back from indexedDB we will get a poco object that we
2617
- // cannot replace correctly when sending it to server. So we will also need
2618
- // to do an explicit workaround in the protocol where a bigint is supported.
2619
- // The workaround should be there regardless if browser supports BigInt or not, because
2620
- // the serverRev might have been stored in IDB before the browser was upgraded to support bigint.
2621
- //
2622
- // if (typeof serverRev.rev !== "bigint")
2623
- // if (hasBigIntSupport)
2624
- // serverRev.rev = bigIntDef.bigint.revive(server.rev)
2625
- // else
2626
- // serverRev.rev = new FakeBigInt(server.rev)
2627
- const hasBigIntSupport = typeof BigInt(0) === 'bigint';
2628
- function getValueOfBigInt(x) {
2629
- if (typeof x === 'bigint') {
2630
- return x;
2631
- }
2632
- if (hasBigIntSupport) {
2633
- return typeof x === 'string' ? BigInt(x) : BigInt(x.v);
2634
- }
2635
- else {
2636
- return typeof x === 'string' ? Number(x) : Number(x.v);
2637
- }
2638
- }
2639
- function compareBigInts(a, b) {
2640
- const valA = getValueOfBigInt(a);
2641
- const valB = getValueOfBigInt(b);
2642
- return valA < valB ? -1 : valA > valB ? 1 : 0;
2643
- }
2644
- class FakeBigInt {
2645
- constructor(value) {
2646
- this.v = value;
2647
- }
2648
- toString() {
2649
- return this.v;
2650
- }
2651
- }
2652
- const defs = {
2653
- ...undefinedDef,
2654
- ...(hasBigIntSupport
2655
- ? {}
2656
- : {
2657
- bigint: {
2658
- test: (val) => val instanceof FakeBigInt,
2659
- replace: (fakeBigInt) => {
2660
- return {
2661
- $t: 'bigint',
2662
- ...fakeBigInt
2663
- };
2664
- },
2665
- revive: ({ v, }) => new FakeBigInt(v)
2666
- }
2667
- })
2668
- };
2669
- const TSON = TypesonSimplified(builtin, defs);
2670
- const BISON = Bison(defs);
2671
-
2672
- //const hasSW = 'serviceWorker' in navigator;
2673
- let hasComplainedAboutSyncEvent = false;
2674
- async function registerSyncEvent(db, purpose) {
2675
- try {
2676
- // Send sync event to SW:
2677
- const sw = await navigator.serviceWorker.ready;
2678
- if (purpose === "push" && sw.sync) {
2679
- await sw.sync.register(`dexie-cloud:${db.name}`);
2680
- }
2681
- if (sw.active) {
2682
- // Use postMessage for pull syncs and for browsers not supporting sync event (Firefox, Safari).
2683
- // Also chromium based browsers with sw.sync as a fallback for sleepy sync events not taking action for a while.
2684
- sw.active.postMessage({
2685
- type: 'dexie-cloud-sync',
2686
- dbName: db.name,
2687
- purpose
2688
- });
2689
- }
2690
- else {
2691
- console.error(`Dexie Cloud: There's no active service worker. Can this ever happen??`);
2692
- }
2693
- return;
2694
- }
2695
- catch (e) {
2696
- if (!hasComplainedAboutSyncEvent) {
2697
- console.debug(`Dexie Cloud: Could not register sync event`, e);
2698
- hasComplainedAboutSyncEvent = true;
2489
+ function toDBOperationSet(inSet) {
2490
+ // Fictive transaction:
2491
+ const txid = randomString$1(16);
2492
+ // Convert data into a temporary map to collect mutations of same table and type
2493
+ const map = {};
2494
+ for (const [table, ops] of Object.entries(inSet)) {
2495
+ for (const [key, op] of Object.entries(ops)) {
2496
+ const mapEntry = map[table] || (map[table] = {});
2497
+ const ops = mapEntry[op.type] || (mapEntry[op.type] = []);
2498
+ ops.push(Object.assign({ key }, op)); // DBKeyMutation doesn't contain key, so we need to bring it in.
2699
2499
  }
2700
2500
  }
2701
- }
2702
- async function registerPeriodicSyncEvent(db) {
2703
- var _a;
2704
- try {
2705
- // Register periodicSync event to SW:
2706
- // @ts-ignore
2707
- const { periodicSync } = await navigator.serviceWorker.ready;
2708
- if (periodicSync) {
2709
- try {
2710
- await periodicSync.register(`dexie-cloud:${db.name}`, (_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.periodicSync);
2711
- console.debug(`Dexie Cloud: Successfully registered periodicsync event for ${db.name}`);
2712
- }
2713
- catch (e) {
2714
- console.debug(`Dexie Cloud: Failed to register periodic sync. Your PWA must be installed to allow background sync.`, e);
2501
+ // Start computing the resulting format:
2502
+ const result = [];
2503
+ for (const [table, ops] of Object.entries(map)) {
2504
+ const resultEntry = {
2505
+ table,
2506
+ muts: [],
2507
+ };
2508
+ for (const [optype, muts] of Object.entries(ops)) {
2509
+ switch (optype) {
2510
+ case "ups": {
2511
+ const op = {
2512
+ type: "upsert",
2513
+ keys: muts.map(mut => mut.key),
2514
+ values: muts.map(mut => mut.val),
2515
+ txid
2516
+ };
2517
+ resultEntry.muts.push(op);
2518
+ break;
2519
+ }
2520
+ case "upd": {
2521
+ const op = {
2522
+ type: "update",
2523
+ keys: muts.map(mut => mut.key),
2524
+ changeSpecs: muts.map(mut => mut.mod),
2525
+ txid
2526
+ };
2527
+ resultEntry.muts.push(op);
2528
+ break;
2529
+ }
2530
+ case "del": {
2531
+ const op = {
2532
+ type: "delete",
2533
+ keys: muts.map(mut => mut.key),
2534
+ txid,
2535
+ };
2536
+ resultEntry.muts.push(op);
2537
+ break;
2538
+ }
2715
2539
  }
2716
2540
  }
2717
- else {
2718
- console.debug(`Dexie Cloud: periodicSync not supported.`);
2719
- }
2720
- }
2721
- catch (e) {
2722
- console.debug(`Dexie Cloud: Could not register periodicSync for ${db.name}`, e);
2541
+ result.push(resultEntry);
2723
2542
  }
2543
+ return result;
2724
2544
  }
2725
2545
 
2726
- function triggerSync(db, purpose) {
2727
- if (db.cloud.usingServiceWorker) {
2728
- registerSyncEvent(db, purpose);
2729
- }
2730
- else {
2731
- db.localSyncEvent.next({ purpose });
2732
- }
2546
+ function getDbNameFromDbUrl(dbUrl) {
2547
+ const url = new URL(dbUrl);
2548
+ return url.pathname === "/"
2549
+ ? url.hostname.split('.')[0]
2550
+ : url.pathname.split('/')[1];
2733
2551
  }
2734
2552
 
2735
- async function computeRealmSetHash({ realms, inviteRealms, }) {
2736
- const data = JSON.stringify([
2737
- ...realms.map((realmId) => ({ realmId, accepted: true })),
2738
- ...inviteRealms.map((realmId) => ({ realmId, accepted: false })),
2739
- ].sort((a, b) => a.realmId < b.realmId ? -1 : a.realmId > b.realmId ? 1 : 0));
2740
- const byteArray = new TextEncoder().encode(data);
2741
- const digestBytes = await crypto.subtle.digest('SHA-1', byteArray);
2742
- const base64 = b64encode(digestBytes);
2743
- return base64;
2553
+ async function listSyncifiedChanges(tablesToSyncify, currentUser, schema, alreadySyncedRealms) {
2554
+ if (currentUser.isLoggedIn) {
2555
+ if (tablesToSyncify.length > 0) {
2556
+ const ignoredRealms = new Set(alreadySyncedRealms || []);
2557
+ const inserts = await Promise.all(tablesToSyncify.map(async (table) => {
2558
+ const { extractKey } = table.core.schema.primaryKey;
2559
+ if (!extractKey)
2560
+ return { table: table.name, muts: [] }; // Outbound tables are not synced.
2561
+ const dexieCloudTableSchema = schema[table.name];
2562
+ const query = (dexieCloudTableSchema === null || dexieCloudTableSchema === void 0 ? void 0 : dexieCloudTableSchema.generatedGlobalId)
2563
+ ? table.filter((item) => !ignoredRealms.has(item.realmId || "") && isValidSyncableID(extractKey(item)))
2564
+ : table.filter((item) => !ignoredRealms.has(item.realmId || "") && isValidAtID(extractKey(item), dexieCloudTableSchema === null || dexieCloudTableSchema === void 0 ? void 0 : dexieCloudTableSchema.idPrefix));
2565
+ const unsyncedObjects = await query.toArray();
2566
+ if (unsyncedObjects.length > 0) {
2567
+ const mut = {
2568
+ type: "insert",
2569
+ values: unsyncedObjects,
2570
+ keys: unsyncedObjects.map(extractKey),
2571
+ userId: currentUser.userId,
2572
+ };
2573
+ return {
2574
+ table: table.name,
2575
+ muts: [mut],
2576
+ };
2577
+ }
2578
+ else {
2579
+ return {
2580
+ table: table.name,
2581
+ muts: []
2582
+ };
2583
+ }
2584
+ }));
2585
+ return inserts.filter(op => op.muts.length > 0);
2586
+ }
2587
+ }
2588
+ return [];
2744
2589
  }
2745
2590
 
2746
- function getSyncableTables(db) {
2747
- return Object.entries(db.cloud.schema || {})
2748
- .filter(([, { markedForSync }]) => markedForSync)
2749
- .map(([tbl]) => db.table(tbl));
2591
+ function getTablesToSyncify(db, syncState) {
2592
+ const syncedTables = (syncState === null || syncState === void 0 ? void 0 : syncState.syncedTables) || [];
2593
+ const syncableTables = getSyncableTables(db);
2594
+ const tablesToSyncify = syncableTables.filter((tbl) => !syncedTables.includes(tbl.name));
2595
+ return tablesToSyncify;
2750
2596
  }
2751
2597
 
2752
- function getMutationTable(tableName) {
2753
- return `$${tableName}_mutations`;
2598
+ function interactWithUser(userInteraction, req) {
2599
+ return new Promise((resolve, reject) => {
2600
+ const interactionProps = {
2601
+ ...req,
2602
+ onSubmit: (res) => {
2603
+ userInteraction.next(undefined);
2604
+ resolve(res);
2605
+ },
2606
+ onCancel: () => {
2607
+ userInteraction.next(undefined);
2608
+ reject(new Dexie.AbortError("User cancelled"));
2609
+ },
2610
+ };
2611
+ userInteraction.next(interactionProps);
2612
+ // Start subscribing for external updates to db.cloud.userInteraction, and if so, cancel this request.
2613
+ /*const subscription = userInteraction.subscribe((currentInteractionProps) => {
2614
+ if (currentInteractionProps !== interactionProps) {
2615
+ if (subscription) subscription.unsubscribe();
2616
+ if (!done) {
2617
+ reject(new Dexie.AbortError("User cancelled"));
2618
+ }
2619
+ }
2620
+ });*/
2621
+ });
2754
2622
  }
2755
-
2756
- function getTableFromMutationTable(mutationTable) {
2757
- var _a;
2758
- const tableName = (_a = /^\$(.*)_mutations$/.exec(mutationTable)) === null || _a === void 0 ? void 0 : _a[1];
2759
- if (!tableName)
2760
- throw new Error(`Given mutationTable ${mutationTable} is not correct`);
2761
- return tableName;
2623
+ function alertUser(userInteraction, title, ...alerts) {
2624
+ return interactWithUser(userInteraction, {
2625
+ type: 'message-alert',
2626
+ title,
2627
+ alerts,
2628
+ fields: {}
2629
+ });
2762
2630
  }
2763
-
2764
- async function listClientChanges(mutationTables, db, { since = {}, limit = Infinity } = {}) {
2765
- const allMutsOnTables = await Promise.all(mutationTables.map(async (mutationTable) => {
2766
- const tableName = getTableFromMutationTable(mutationTable.name);
2767
- const lastRevision = since[tableName];
2768
- let query = lastRevision
2769
- ? mutationTable.where("rev").above(lastRevision)
2770
- : mutationTable;
2771
- if (limit < Infinity)
2772
- query = query.limit(limit);
2773
- const muts = await query.toArray();
2774
- //const objTable = db.table(tableName);
2775
- /*for (const mut of muts) {
2776
- if (mut.type === "insert" || mut.type === "upsert") {
2777
- mut.values = await objTable.bulkGet(mut.keys);
2778
- }
2779
- }*/
2780
- return {
2781
- table: tableName,
2782
- muts,
2783
- };
2784
- }));
2785
- // Filter out those tables that doesn't have any mutations:
2786
- return allMutsOnTables.filter(({ muts }) => muts.length > 0);
2631
+ async function promptForEmail(userInteraction, title, emailHint) {
2632
+ let email = emailHint || '';
2633
+ while (!email || !/^[\w-\.]+@([\w-]+\.)+[\w-]{2,10}$/.test(email)) {
2634
+ email = (await interactWithUser(userInteraction, {
2635
+ type: 'email',
2636
+ title,
2637
+ alerts: email
2638
+ ? [
2639
+ {
2640
+ type: 'error',
2641
+ messageCode: 'INVALID_EMAIL',
2642
+ message: 'Please enter a valid email address',
2643
+ messageParams: {},
2644
+ },
2645
+ ]
2646
+ : [],
2647
+ fields: {
2648
+ email: {
2649
+ type: 'email',
2650
+ placeholder: 'you@somedomain.com',
2651
+ },
2652
+ },
2653
+ })).email;
2654
+ }
2655
+ return email;
2787
2656
  }
2788
-
2789
- //@ts-check
2790
- const randomFillSync = crypto.getRandomValues;
2791
-
2792
- function assert(b) {
2793
- if (!b)
2794
- throw new Error('Assertion Failed');
2657
+ async function promptForOTP(userInteraction, email, alert) {
2658
+ const alerts = [
2659
+ {
2660
+ type: 'info',
2661
+ messageCode: 'OTP_SENT',
2662
+ message: `A One-Time password has been sent to {email}`,
2663
+ messageParams: { email },
2664
+ },
2665
+ ];
2666
+ if (alert) {
2667
+ alerts.push(alert);
2668
+ }
2669
+ const { otp } = await interactWithUser(userInteraction, {
2670
+ type: 'otp',
2671
+ title: 'Enter OTP',
2672
+ alerts,
2673
+ fields: {
2674
+ otp: {
2675
+ type: 'otp',
2676
+ label: 'OTP',
2677
+ placeholder: 'Paste OTP here',
2678
+ },
2679
+ },
2680
+ });
2681
+ return otp;
2795
2682
  }
2796
- function setByKeyPath(obj, keyPath, value) {
2797
- if (!obj || keyPath === undefined)
2798
- return;
2799
- if ('isFrozen' in Object && Object.isFrozen(obj))
2683
+
2684
+ async function loadAccessToken(db) {
2685
+ var _a, _b;
2686
+ const currentUser = await db.getCurrentUser();
2687
+ const { accessToken, accessTokenExpiration, refreshToken, refreshTokenExpiration, claims, } = currentUser;
2688
+ if (!accessToken)
2800
2689
  return;
2801
- if (typeof keyPath !== 'string' && 'length' in keyPath) {
2802
- assert(typeof value !== 'string' && 'length' in value);
2803
- for (var i = 0, l = keyPath.length; i < l; ++i) {
2804
- setByKeyPath(obj, keyPath[i], value[i]);
2805
- }
2690
+ const expTime = (_a = accessTokenExpiration === null || accessTokenExpiration === void 0 ? void 0 : accessTokenExpiration.getTime()) !== null && _a !== void 0 ? _a : Infinity;
2691
+ if (expTime > Date.now()) {
2692
+ return accessToken;
2693
+ }
2694
+ if (!refreshToken) {
2695
+ throw new Error(`Refresh token missing`);
2696
+ }
2697
+ const refreshExpTime = (_b = refreshTokenExpiration === null || refreshTokenExpiration === void 0 ? void 0 : refreshTokenExpiration.getTime()) !== null && _b !== void 0 ? _b : Infinity;
2698
+ if (refreshExpTime <= Date.now()) {
2699
+ throw new Error(`Refresh token has expired`);
2700
+ }
2701
+ const refreshedLogin = await refreshAccessToken(db.cloud.options.databaseUrl, currentUser);
2702
+ await db.table('$logins').update(claims.sub, {
2703
+ accessToken: refreshedLogin.accessToken,
2704
+ accessTokenExpiration: refreshedLogin.accessTokenExpiration,
2705
+ });
2706
+ return refreshedLogin.accessToken;
2707
+ }
2708
+ async function authenticate(url, context, fetchToken, userInteraction, hints) {
2709
+ if (context.accessToken &&
2710
+ context.accessTokenExpiration.getTime() > Date.now()) {
2711
+ return context;
2712
+ }
2713
+ else if (context.refreshToken &&
2714
+ (!context.refreshTokenExpiration ||
2715
+ context.refreshTokenExpiration.getTime() > Date.now())) {
2716
+ return await refreshAccessToken(url, context);
2806
2717
  }
2807
2718
  else {
2808
- var period = keyPath.indexOf('.');
2809
- if (period !== -1) {
2810
- var currentKeyPath = keyPath.substr(0, period);
2811
- var remainingKeyPath = keyPath.substr(period + 1);
2812
- if (remainingKeyPath === '')
2813
- if (value === undefined) {
2814
- if (Array.isArray(obj)) {
2815
- if (!isNaN(parseInt(currentKeyPath)))
2816
- obj.splice(parseInt(currentKeyPath), 1);
2817
- }
2818
- else
2819
- delete obj[currentKeyPath];
2820
- // @ts-ignore: even if currentKeyPath would be numeric string and obj would be array - it works.
2821
- }
2822
- else
2823
- obj[currentKeyPath] = value;
2824
- else {
2825
- //@ts-ignore: even if currentKeyPath would be numeric string and obj would be array - it works.
2826
- var innerObj = obj[currentKeyPath];
2827
- //@ts-ignore: even if currentKeyPath would be numeric string and obj would be array - it works.
2828
- if (!innerObj)
2829
- innerObj = obj[currentKeyPath] = {};
2830
- setByKeyPath(innerObj, remainingKeyPath, value);
2831
- }
2719
+ return await userAuthenticate(context, fetchToken, userInteraction, hints);
2720
+ }
2721
+ }
2722
+ async function refreshAccessToken(url, login) {
2723
+ if (!login.refreshToken)
2724
+ throw new Error(`Cannot refresh token - refresh token is missing.`);
2725
+ if (!login.nonExportablePrivateKey)
2726
+ throw new Error(`login.nonExportablePrivateKey is missing - cannot sign refresh token without a private key.`);
2727
+ const time_stamp = Date.now();
2728
+ const signing_algorithm = 'RSASSA-PKCS1-v1_5';
2729
+ const textEncoder = new TextEncoder();
2730
+ const data = textEncoder.encode(login.refreshToken + time_stamp);
2731
+ const binarySignature = await crypto.subtle.sign(signing_algorithm, login.nonExportablePrivateKey, data);
2732
+ const signature = b64encode(binarySignature);
2733
+ const tokenRequest = {
2734
+ grant_type: 'refresh_token',
2735
+ refresh_token: login.refreshToken,
2736
+ scopes: ['ACCESS_DB'],
2737
+ signature,
2738
+ signing_algorithm,
2739
+ time_stamp,
2740
+ };
2741
+ const res = await fetch(`${url}/token`, {
2742
+ body: JSON.stringify(tokenRequest),
2743
+ method: 'post',
2744
+ headers: { 'Content-Type': 'application/json' },
2745
+ mode: 'cors',
2746
+ });
2747
+ if (res.status !== 200)
2748
+ throw new Error(`RefreshToken: Status ${res.status} from ${url}/token`);
2749
+ const response = await res.json();
2750
+ login.accessToken = response.accessToken;
2751
+ login.accessTokenExpiration = response.accessTokenExpiration
2752
+ ? new Date(response.accessTokenExpiration)
2753
+ : undefined;
2754
+ return login;
2755
+ }
2756
+ async function userAuthenticate(context, fetchToken, userInteraction, hints) {
2757
+ const { privateKey, publicKey } = await crypto.subtle.generateKey({
2758
+ name: 'RSASSA-PKCS1-v1_5',
2759
+ modulusLength: 2048,
2760
+ publicExponent: new Uint8Array([0x01, 0x00, 0x01]),
2761
+ hash: { name: 'SHA-256' },
2762
+ }, false, // Non-exportable...
2763
+ ['sign', 'verify']);
2764
+ context.nonExportablePrivateKey = privateKey; //...but storable!
2765
+ const publicKeySPKI = await crypto.subtle.exportKey('spki', publicKey);
2766
+ const publicKeyPEM = spkiToPEM(publicKeySPKI);
2767
+ context.publicKey = publicKey;
2768
+ try {
2769
+ const response2 = await fetchToken({
2770
+ public_key: publicKeyPEM,
2771
+ hints,
2772
+ });
2773
+ if (response2.type !== 'tokens')
2774
+ throw new Error(`Unexpected response type from token endpoint: ${response2.type}`);
2775
+ context.accessToken = response2.accessToken;
2776
+ context.accessTokenExpiration = new Date(response2.accessTokenExpiration);
2777
+ context.refreshToken = response2.refreshToken;
2778
+ if (response2.refreshTokenExpiration) {
2779
+ context.refreshTokenExpiration = new Date(response2.refreshTokenExpiration);
2832
2780
  }
2833
- else {
2834
- if (value === undefined) {
2835
- if (Array.isArray(obj) && !isNaN(parseInt(keyPath)))
2836
- // @ts-ignore: even if currentKeyPath would be numeric string and obj would be array - it works.
2837
- obj.splice(keyPath, 1);
2838
- //@ts-ignore: even if currentKeyPath would be numeric string and obj would be array - it works.
2839
- else
2840
- delete obj[keyPath];
2841
- //@ts-ignore: even if currentKeyPath would be numeric string and obj would be array - it works.
2842
- }
2843
- else
2844
- obj[keyPath] = value;
2781
+ context.userId = response2.claims.sub;
2782
+ context.email = response2.claims.email;
2783
+ context.name = response2.claims.name;
2784
+ context.claims = response2.claims;
2785
+ if (response2.alerts && response2.alerts.length > 0) {
2786
+ await interactWithUser(userInteraction, {
2787
+ type: 'message-alert',
2788
+ title: 'Authentication Alert',
2789
+ fields: {},
2790
+ alerts: response2.alerts,
2791
+ });
2845
2792
  }
2793
+ return context;
2794
+ }
2795
+ catch (error) {
2796
+ await alertUser(userInteraction, 'Authentication Failed', {
2797
+ type: 'error',
2798
+ messageCode: 'GENERIC_ERROR',
2799
+ message: `We're having a problem to authenticate rigth now.`,
2800
+ messageParams: {}
2801
+ }).catch(() => { });
2802
+ throw error;
2846
2803
  }
2847
2804
  }
2848
- const randomString$1 = typeof self === 'undefined' ? (bytes) => {
2849
- // Node
2850
- const buf = Buffer.alloc(bytes);
2851
- randomFillSync(buf);
2852
- return buf.toString("base64");
2853
- } : (bytes) => {
2854
- // Web
2855
- const buf = new Uint8Array(bytes);
2856
- crypto.getRandomValues(buf);
2857
- return btoa(String.fromCharCode.apply(null, buf));
2858
- };
2859
-
2860
- /** Verifies that given primary key is valid.
2861
- * The reason we narrow validity for valid keys are twofold:
2862
- * 1: Make sure to only support types that can be used as an object index in DBKeyMutationSet.
2863
- * For example, ArrayBuffer cannot be used (gives "object ArrayBuffer") but Uint8Array can be
2864
- * used (gives comma-delimited list of included bytes).
2865
- * 2: Avoid using plain numbers and Dates as keys when they are synced, as they are not globally unique.
2866
- * 3: Since we store the key as a VARCHAR server side in current version, try not promote types that stringifies to become very long server side.
2867
- *
2868
- * @param id
2869
- * @returns
2870
- */
2871
- function isValidSyncableID(id) {
2872
- if (typeof id === "string")
2873
- return true;
2874
- //if (validIDTypes[toStringTag(id)]) return true;
2875
- //if (Array.isArray(id)) return id.every((part) => isValidSyncableID(part));
2876
- if (Array.isArray(id) && id.some(key => isValidSyncableID(key)) && id.every(isValidSyncableIDPart))
2877
- return true;
2878
- return false;
2879
- }
2880
- /** Verifies that given key part is valid.
2881
- * 1: Make sure that arrays of this types are stringified correclty and works with DBKeyMutationSet.
2882
- * For example, ArrayBuffer cannot be used (gives "object ArrayBuffer") but Uint8Array can be
2883
- * used (gives comma-delimited list of included bytes).
2884
- * 2: Since we store the key as a VARCHAR server side in current version, try not promote types that stringifies to become very long server side.
2885
- */
2886
- function isValidSyncableIDPart(part) {
2887
- return typeof part === "string" || typeof part === "number" || Array.isArray(part) && part.every(isValidSyncableIDPart);
2805
+ function spkiToPEM(keydata) {
2806
+ const keydataB64 = b64encode(keydata);
2807
+ const keydataB64Pem = formatAsPem(keydataB64);
2808
+ return keydataB64Pem;
2888
2809
  }
2889
- function isValidAtID(id, idPrefix) {
2890
- return !idPrefix || (typeof id === "string" && id.startsWith(idPrefix));
2810
+ function formatAsPem(str) {
2811
+ let finalString = '-----BEGIN PUBLIC KEY-----\n';
2812
+ while (str.length > 0) {
2813
+ finalString += str.substring(0, 64) + '\n';
2814
+ str = str.substring(64);
2815
+ }
2816
+ finalString = finalString + '-----END PUBLIC KEY-----';
2817
+ return finalString;
2891
2818
  }
2892
2819
 
2893
- function applyOperation(target, table, op) {
2894
- const tbl = target[table] || (target[table] = {});
2895
- switch (op.type) {
2896
- case "insert":
2897
- // TODO: Don't treat insert and upsert the same?
2898
- case "upsert":
2899
- op.keys.forEach((key, idx) => {
2900
- tbl[key] = {
2901
- type: "ups",
2902
- val: op.values[idx],
2903
- };
2904
- });
2905
- break;
2906
- case "update":
2907
- case "modify": {
2908
- op.keys.forEach((key, idx) => {
2909
- const changeSpec = op.type === "update"
2910
- ? op.changeSpecs[idx]
2911
- : op.changeSpec;
2912
- const entry = tbl[key];
2913
- if (!entry) {
2914
- tbl[key] = {
2915
- type: "upd",
2916
- mod: changeSpec,
2917
- };
2820
+ const { toString: toStr } = {};
2821
+ function getToStringTag(val) {
2822
+ return toStr.call(val).slice(8, -1);
2823
+ }
2824
+ function escapeDollarProps(value) {
2825
+ const keys = Object.keys(value);
2826
+ let dollarKeys = null;
2827
+ for (let i = 0, l = keys.length; i < l; ++i) {
2828
+ if (keys[i][0] === "$") {
2829
+ dollarKeys = dollarKeys || [];
2830
+ dollarKeys.push(keys[i]);
2831
+ }
2832
+ }
2833
+ if (!dollarKeys)
2834
+ return value;
2835
+ const clone = { ...value };
2836
+ for (const k of dollarKeys) {
2837
+ delete clone[k];
2838
+ clone["$" + k] = value[k];
2839
+ }
2840
+ return clone;
2841
+ }
2842
+ const ObjectDef = {
2843
+ replace: escapeDollarProps,
2844
+ };
2845
+ function TypesonSimplified(...typeDefsInputs) {
2846
+ const typeDefs = typeDefsInputs.reduce((p, c) => ({ ...p, ...c }), typeDefsInputs.reduce((p, c) => ({ ...c, ...p }), {}));
2847
+ const protoMap = new WeakMap();
2848
+ return {
2849
+ stringify(value, alternateChannel, space) {
2850
+ const json = JSON.stringify(value, function (key) {
2851
+ const realVal = this[key];
2852
+ const typeDef = getTypeDef(realVal);
2853
+ return typeDef
2854
+ ? typeDef.replace(realVal, alternateChannel, typeDefs)
2855
+ : realVal;
2856
+ }, space);
2857
+ return json;
2858
+ },
2859
+ parse(tson, alternateChannel) {
2860
+ let parent = null;
2861
+ let unescapeParentKeys = [];
2862
+ return JSON.parse(tson, function (key, value) {
2863
+ //
2864
+ // Parent Part
2865
+ //
2866
+ const type = value === null || value === void 0 ? void 0 : value.$t;
2867
+ if (type) {
2868
+ const typeDef = typeDefs[type];
2869
+ value = typeDef
2870
+ ? typeDef.revive(value, alternateChannel, typeDefs)
2871
+ : value;
2918
2872
  }
2919
- else {
2920
- switch (entry.type) {
2921
- case "ups":
2922
- // Adjust the existing upsert with additional updates
2923
- for (const [propPath, value] of Object.entries(changeSpec)) {
2924
- setByKeyPath(entry.val, propPath, value);
2925
- }
2926
- break;
2927
- case "del":
2928
- // No action.
2929
- break;
2930
- case "upd":
2931
- // Adjust existing update with additional updates
2932
- Object.assign(entry.mod, changeSpec); // May work for deep props as well - new keys is added later, right? Does the prop order persist along TSON and all? But it will not be 100% when combined with some server code (seach for "address.city": "Stockholm" comment)
2933
- break;
2873
+ if (value === parent) {
2874
+ // Do what the kid told us to
2875
+ if (unescapeParentKeys.length > 0) {
2876
+ // Unescape dollar props
2877
+ value = { ...value };
2878
+ for (const k of unescapeParentKeys) {
2879
+ value[k.substr(1)] = value[k];
2880
+ delete value[k];
2881
+ }
2934
2882
  }
2883
+ unescapeParentKeys = [];
2884
+ return value;
2935
2885
  }
2886
+ //
2887
+ // Child part
2888
+ //
2889
+ if (key[0] === "$" && key !== "$t") {
2890
+ parent = this;
2891
+ unescapeParentKeys.push(key);
2892
+ }
2893
+ return value;
2936
2894
  });
2937
- break;
2895
+ },
2896
+ };
2897
+ function getTypeDef(realVal) {
2898
+ const type = typeof realVal;
2899
+ switch (typeof realVal) {
2900
+ case "object":
2901
+ case "function": {
2902
+ // "object", "function", null
2903
+ if (realVal === null)
2904
+ return null;
2905
+ const proto = Object.getPrototypeOf(realVal);
2906
+ if (!proto)
2907
+ return ObjectDef;
2908
+ let typeDef = protoMap.get(proto);
2909
+ if (typeDef !== undefined)
2910
+ return typeDef; // Null counts to! So the caching of Array.prototype also counts.
2911
+ const toStringTag = getToStringTag(realVal);
2912
+ const entry = Object.entries(typeDefs).find(([typeName, typeDef]) => { var _a, _b; return (_b = (_a = typeDef === null || typeDef === void 0 ? void 0 : typeDef.test) === null || _a === void 0 ? void 0 : _a.call(typeDef, realVal, toStringTag)) !== null && _b !== void 0 ? _b : typeName === toStringTag; });
2913
+ typeDef = entry === null || entry === void 0 ? void 0 : entry[1];
2914
+ if (!typeDef) {
2915
+ typeDef = Array.isArray(realVal)
2916
+ ? null
2917
+ : typeof realVal === "function"
2918
+ ? typeDefs.function || null
2919
+ : ObjectDef;
2920
+ }
2921
+ protoMap.set(proto, typeDef);
2922
+ return typeDef;
2923
+ }
2924
+ default:
2925
+ return typeDefs[type];
2938
2926
  }
2939
- case "delete":
2940
- op.keys.forEach((key) => {
2941
- tbl[key] = {
2942
- type: "del",
2943
- };
2944
- });
2945
- break;
2946
2927
  }
2947
- return target;
2948
2928
  }
2949
2929
 
2950
- function applyOperations(target, ops) {
2951
- for (const { table, muts } of ops) {
2952
- for (const mut of muts) {
2953
- applyOperation(target, table, mut);
2954
- }
2955
- }
2956
- }
2930
+ const BisonBinaryTypes = {
2931
+ Blob: {
2932
+ test: (blob, toStringTag) => toStringTag === "Blob",
2933
+ replace: (blob, altChannel) => {
2934
+ const i = altChannel.length;
2935
+ altChannel.push(blob);
2936
+ return {
2937
+ $t: "Blob",
2938
+ mimeType: blob.type,
2939
+ i,
2940
+ };
2941
+ },
2942
+ revive: ({ i, mimeType }, altChannel) => new Blob([altChannel[i]], { type: mimeType }),
2943
+ },
2944
+ };
2957
2945
 
2958
- function subtractChanges(target, // Server change set
2959
- changesToSubtract // additional mutations on client during syncWithServer()
2960
- ) {
2961
- var _a, _b, _c;
2962
- for (const [table, mutationSet] of Object.entries(changesToSubtract)) {
2963
- for (const [key, mut] of Object.entries(mutationSet)) {
2964
- switch (mut.type) {
2965
- case 'ups':
2966
- {
2967
- const targetMut = (_a = target[table]) === null || _a === void 0 ? void 0 : _a[key];
2968
- if (targetMut) {
2969
- switch (targetMut.type) {
2970
- case 'ups':
2971
- delete target[table][key];
2972
- break;
2973
- case 'del':
2974
- // Leave delete operation.
2975
- // (Don't resurrect objects unintenionally (using tx(get, put) pattern locally))
2976
- break;
2977
- case 'upd':
2978
- delete target[table][key];
2979
- break;
2980
- }
2981
- }
2982
- }
2983
- break;
2984
- case 'del':
2985
- (_b = target[table]) === null || _b === void 0 ? true : delete _b[key];
2986
- break;
2987
- case 'upd': {
2988
- const targetMut = (_c = target[table]) === null || _c === void 0 ? void 0 : _c[key];
2989
- if (targetMut) {
2990
- switch (targetMut.type) {
2991
- case 'ups':
2992
- // Adjust the server upsert with locally updated values.
2993
- for (const [propPath, value] of Object.entries(mut.mod)) {
2994
- setByKeyPath(targetMut.val, propPath, value);
2995
- }
2996
- break;
2997
- case 'del':
2998
- // Leave delete.
2999
- break;
3000
- case 'upd':
3001
- // Remove the local update props from the server update mutation.
3002
- for (const propPath of Object.keys(mut.mod)) {
3003
- delete targetMut.mod[propPath];
3004
- }
3005
- break;
3006
- }
3007
- }
3008
- break;
3009
- }
2946
+ var numberDef = {
2947
+ number: {
2948
+ replace: (num) => {
2949
+ switch (true) {
2950
+ case isNaN(num):
2951
+ return { $t: "number", v: "NaN" };
2952
+ case num === Infinity:
2953
+ return { $t: "number", v: "Infinity" };
2954
+ case num === -Infinity:
2955
+ return { $t: "number", v: "-Infinity" };
2956
+ default:
2957
+ return num;
3010
2958
  }
3011
- }
2959
+ },
2960
+ revive: ({ v }) => Number(v),
2961
+ },
2962
+ };
2963
+
2964
+ const bigIntDef = {
2965
+ bigint: {
2966
+ replace: (realVal) => {
2967
+ return { $t: "bigint", v: "" + realVal };
2968
+ },
2969
+ revive: (obj) => BigInt(obj.v),
2970
+ },
2971
+ };
2972
+
2973
+ var DateDef = {
2974
+ Date: {
2975
+ replace: (date) => ({
2976
+ $t: "Date",
2977
+ v: isNaN(date.getTime()) ? "NaN" : date.toISOString(),
2978
+ }),
2979
+ revive: ({ v }) => new Date(v === "NaN" ? NaN : Date.parse(v)),
2980
+ },
2981
+ };
2982
+
2983
+ var SetDef = {
2984
+ Set: {
2985
+ replace: (set) => ({
2986
+ $t: "Set",
2987
+ v: Array.from(set.entries()),
2988
+ }),
2989
+ revive: ({ v }) => new Set(v),
2990
+ },
2991
+ };
2992
+
2993
+ var MapDef = {
2994
+ Map: {
2995
+ replace: (map) => ({
2996
+ $t: "Map",
2997
+ v: Array.from(map.entries()),
2998
+ }),
2999
+ revive: ({ v }) => new Map(v),
3000
+ },
3001
+ };
3002
+
3003
+ const _global = typeof globalThis !== "undefined"
3004
+ ? globalThis
3005
+ : typeof self !== "undefined"
3006
+ ? self
3007
+ : typeof global === "undefined"
3008
+ ? global
3009
+ : undefined;
3010
+
3011
+ var TypedArraysDefs = [
3012
+ "Int8Array",
3013
+ "Uint8Array",
3014
+ "Uint8ClampedArray",
3015
+ "Int16Array",
3016
+ "Uint16Array",
3017
+ "Int32Array",
3018
+ "Uint32Array",
3019
+ "Float32Array",
3020
+ "Float64Array",
3021
+ "DataView",
3022
+ "BigInt64Array",
3023
+ "BigUint64Array",
3024
+ ].reduce((specs, typeName) => ({
3025
+ ...specs,
3026
+ [typeName]: {
3027
+ // Replace passes the the typed array into $t, buffer so that
3028
+ // the ArrayBuffer typedef takes care of further handling of the buffer:
3029
+ // {$t:"Uint8Array",buffer:{$t:"ArrayBuffer",idx:0}}
3030
+ // CHANGED ABOVE! Now shortcutting that for more sparse format of the typed arrays
3031
+ // to contain the b64 property directly.
3032
+ replace: (a, _, typeDefs) => {
3033
+ const result = {
3034
+ $t: typeName,
3035
+ v: typeDefs.ArrayBuffer.replace(a.byteOffset === 0 && a.byteLength === a.buffer.byteLength
3036
+ ? a.buffer
3037
+ : a.buffer.slice(a.byteOffset, a.byteOffset + a.byteLength), _, typeDefs).v,
3038
+ };
3039
+ return result;
3040
+ },
3041
+ revive: ({ v }, _, typeDefs) => {
3042
+ const TypedArray = _global[typeName];
3043
+ return (TypedArray &&
3044
+ new TypedArray(typeDefs.ArrayBuffer.revive({ v }, _, typeDefs)));
3045
+ },
3046
+ },
3047
+ }), {});
3048
+
3049
+ function b64LexEncode(b) {
3050
+ return b64ToLex(b64encode(b));
3051
+ }
3052
+ function b64LexDecode(b64Lex) {
3053
+ return b64decode(lexToB64(b64Lex));
3054
+ }
3055
+ function b64ToLex(base64) {
3056
+ var encoded = "";
3057
+ for (var i = 0, length = base64.length; i < length; i++) {
3058
+ encoded += ENCODE_TABLE[base64[i]];
3012
3059
  }
3060
+ return encoded;
3061
+ }
3062
+ function lexToB64(base64lex) {
3063
+ // only accept string input
3064
+ if (typeof base64lex !== "string") {
3065
+ throw new Error("invalid decoder input: " + base64lex);
3066
+ }
3067
+ var base64 = "";
3068
+ for (var i = 0, length = base64lex.length; i < length; i++) {
3069
+ base64 += DECODE_TABLE[base64lex[i]];
3070
+ }
3071
+ return base64;
3072
+ }
3073
+ const DECODE_TABLE = {
3074
+ "-": "=",
3075
+ "0": "A",
3076
+ "1": "B",
3077
+ "2": "C",
3078
+ "3": "D",
3079
+ "4": "E",
3080
+ "5": "F",
3081
+ "6": "G",
3082
+ "7": "H",
3083
+ "8": "I",
3084
+ "9": "J",
3085
+ A: "K",
3086
+ B: "L",
3087
+ C: "M",
3088
+ D: "N",
3089
+ E: "O",
3090
+ F: "P",
3091
+ G: "Q",
3092
+ H: "R",
3093
+ I: "S",
3094
+ J: "T",
3095
+ K: "U",
3096
+ L: "V",
3097
+ M: "W",
3098
+ N: "X",
3099
+ O: "Y",
3100
+ P: "Z",
3101
+ Q: "a",
3102
+ R: "b",
3103
+ S: "c",
3104
+ T: "d",
3105
+ U: "e",
3106
+ V: "f",
3107
+ W: "g",
3108
+ X: "h",
3109
+ Y: "i",
3110
+ Z: "j",
3111
+ _: "k",
3112
+ a: "l",
3113
+ b: "m",
3114
+ c: "n",
3115
+ d: "o",
3116
+ e: "p",
3117
+ f: "q",
3118
+ g: "r",
3119
+ h: "s",
3120
+ i: "t",
3121
+ j: "u",
3122
+ k: "v",
3123
+ l: "w",
3124
+ m: "x",
3125
+ n: "y",
3126
+ o: "z",
3127
+ p: "0",
3128
+ q: "1",
3129
+ r: "2",
3130
+ s: "3",
3131
+ t: "4",
3132
+ u: "5",
3133
+ v: "6",
3134
+ w: "7",
3135
+ x: "8",
3136
+ y: "9",
3137
+ z: "+",
3138
+ "|": "/",
3139
+ };
3140
+ const ENCODE_TABLE = {};
3141
+ for (const c of Object.keys(DECODE_TABLE)) {
3142
+ ENCODE_TABLE[DECODE_TABLE[c]] = c;
3013
3143
  }
3014
3144
 
3015
- /** Convert a DBKeyMutationSet (which is an internal format capable of looking up changes per ID)
3016
- * ...into a DBOperationsSet (which is more optimal for performing DB operations into DB (bulkAdd() etc))
3017
- *
3018
- * @param inSet
3019
- * @returns DBOperationsSet representing inSet
3020
- */
3021
- function toDBOperationSet(inSet) {
3022
- // Fictive transaction:
3023
- const txid = randomString$1(16);
3024
- // Convert data into a temporary map to collect mutations of same table and type
3025
- const map = {};
3026
- for (const [table, ops] of Object.entries(inSet)) {
3027
- for (const [key, op] of Object.entries(ops)) {
3028
- const mapEntry = map[table] || (map[table] = {});
3029
- const ops = mapEntry[op.type] || (mapEntry[op.type] = []);
3030
- ops.push(Object.assign({ key }, op)); // DBKeyMutation doesn't contain key, so we need to bring it in.
3031
- }
3032
- }
3033
- // Start computing the resulting format:
3034
- const result = [];
3035
- for (const [table, ops] of Object.entries(map)) {
3036
- const resultEntry = {
3037
- table,
3038
- muts: [],
3039
- };
3040
- for (const [optype, muts] of Object.entries(ops)) {
3041
- switch (optype) {
3042
- case "ups": {
3043
- const op = {
3044
- type: "upsert",
3045
- keys: muts.map(mut => mut.key),
3046
- values: muts.map(mut => mut.val),
3047
- txid
3048
- };
3049
- resultEntry.muts.push(op);
3050
- break;
3051
- }
3052
- case "upd": {
3053
- const op = {
3054
- type: "update",
3055
- keys: muts.map(mut => mut.key),
3056
- changeSpecs: muts.map(mut => mut.mod),
3057
- txid
3058
- };
3059
- resultEntry.muts.push(op);
3060
- break;
3061
- }
3062
- case "del": {
3063
- const op = {
3064
- type: "delete",
3065
- keys: muts.map(mut => mut.key),
3066
- txid,
3067
- };
3068
- resultEntry.muts.push(op);
3069
- break;
3070
- }
3071
- }
3072
- }
3073
- result.push(resultEntry);
3145
+ var ArrayBufferDef = {
3146
+ ArrayBuffer: {
3147
+ replace: (ab) => ({
3148
+ $t: "ArrayBuffer",
3149
+ v: b64LexEncode(ab),
3150
+ }),
3151
+ revive: ({ v }) => {
3152
+ const ba = b64LexDecode(v);
3153
+ return ba.buffer.byteLength === ba.byteLength
3154
+ ? ba.buffer
3155
+ : ba.buffer.slice(ba.byteOffset, ba.byteOffset + ba.byteLength);
3156
+ },
3157
+ },
3158
+ };
3159
+
3160
+ class FakeBlob {
3161
+ constructor(buf, type) {
3162
+ this.buf = buf;
3163
+ this.type = type;
3074
3164
  }
3075
- return result;
3076
3165
  }
3077
3166
 
3078
- function getDbNameFromDbUrl(dbUrl) {
3079
- const url = new URL(dbUrl);
3080
- return url.pathname === "/"
3081
- ? url.hostname.split('.')[0]
3082
- : url.pathname.split('/')[1];
3167
+ function readBlobSync(b) {
3168
+ const req = new XMLHttpRequest();
3169
+ req.overrideMimeType("text/plain; charset=x-user-defined");
3170
+ req.open("GET", URL.createObjectURL(b), false); // Sync
3171
+ req.send();
3172
+ if (req.status !== 200 && req.status !== 0) {
3173
+ throw new Error("Bad Blob access: " + req.status);
3174
+ }
3175
+ return req.responseText;
3083
3176
  }
3084
3177
 
3085
- async function listSyncifiedChanges(tablesToSyncify, currentUser, schema, alreadySyncedRealms) {
3086
- if (currentUser.isLoggedIn) {
3087
- if (tablesToSyncify.length > 0) {
3088
- const ignoredRealms = new Set(alreadySyncedRealms || []);
3089
- const inserts = await Promise.all(tablesToSyncify.map(async (table) => {
3090
- const { extractKey } = table.core.schema.primaryKey;
3091
- if (!extractKey)
3092
- return { table: table.name, muts: [] }; // Outbound tables are not synced.
3093
- const dexieCloudTableSchema = schema[table.name];
3094
- const query = (dexieCloudTableSchema === null || dexieCloudTableSchema === void 0 ? void 0 : dexieCloudTableSchema.generatedGlobalId)
3095
- ? table.filter((item) => !ignoredRealms.has(item.realmId || "") && isValidSyncableID(extractKey(item)))
3096
- : table.filter((item) => !ignoredRealms.has(item.realmId || "") && isValidAtID(extractKey(item), dexieCloudTableSchema === null || dexieCloudTableSchema === void 0 ? void 0 : dexieCloudTableSchema.idPrefix));
3097
- const unsyncedObjects = await query.toArray();
3098
- if (unsyncedObjects.length > 0) {
3099
- const mut = {
3100
- type: "insert",
3101
- values: unsyncedObjects,
3102
- keys: unsyncedObjects.map(extractKey),
3103
- userId: currentUser.userId,
3104
- };
3105
- return {
3106
- table: table.name,
3107
- muts: [mut],
3108
- };
3109
- }
3110
- else {
3111
- return {
3112
- table: table.name,
3113
- muts: []
3114
- };
3115
- }
3116
- }));
3117
- return inserts.filter(op => op.muts.length > 0);
3118
- }
3178
+ function string2ArrayBuffer(str) {
3179
+ const array = new Uint8Array(str.length);
3180
+ for (let i = 0; i < str.length; ++i) {
3181
+ array[i] = str.charCodeAt(i); // & 0xff;
3119
3182
  }
3120
- return [];
3183
+ return array.buffer;
3121
3184
  }
3122
3185
 
3123
- function getTablesToSyncify(db, syncState) {
3124
- const syncedTables = (syncState === null || syncState === void 0 ? void 0 : syncState.syncedTables) || [];
3125
- const syncableTables = getSyncableTables(db);
3126
- const tablesToSyncify = syncableTables.filter((tbl) => !syncedTables.includes(tbl.name));
3127
- return tablesToSyncify;
3128
- }
3186
+ var BlobDef = {
3187
+ Blob: {
3188
+ test: (blob, toStringTag) => toStringTag === "Blob" || blob instanceof FakeBlob,
3189
+ replace: (blob) => ({
3190
+ $t: "Blob",
3191
+ v: blob instanceof FakeBlob
3192
+ ? b64encode(blob.buf)
3193
+ : b64encode(string2ArrayBuffer(readBlobSync(blob))),
3194
+ type: blob.type,
3195
+ }),
3196
+ revive: ({ type, v }) => {
3197
+ const ab = b64decode(v);
3198
+ return typeof Blob !== undefined
3199
+ ? new Blob([ab])
3200
+ : new FakeBlob(ab.buffer, type);
3201
+ },
3202
+ },
3203
+ };
3129
3204
 
3130
- function interactWithUser(userInteraction, req) {
3131
- return new Promise((resolve, reject) => {
3132
- const interactionProps = {
3133
- ...req,
3134
- onSubmit: (res) => {
3135
- userInteraction.next(undefined);
3136
- resolve(res);
3137
- },
3138
- onCancel: () => {
3139
- userInteraction.next(undefined);
3140
- reject(new Dexie.AbortError("User cancelled"));
3141
- },
3142
- };
3143
- userInteraction.next(interactionProps);
3144
- // Start subscribing for external updates to db.cloud.userInteraction, and if so, cancel this request.
3145
- /*const subscription = userInteraction.subscribe((currentInteractionProps) => {
3146
- if (currentInteractionProps !== interactionProps) {
3147
- if (subscription) subscription.unsubscribe();
3148
- if (!done) {
3149
- reject(new Dexie.AbortError("User cancelled"));
3150
- }
3151
- }
3152
- });*/
3153
- });
3154
- }
3155
- function alertUser(userInteraction, title, ...alerts) {
3156
- return interactWithUser(userInteraction, {
3157
- type: 'message-alert',
3158
- title,
3159
- alerts,
3160
- fields: {}
3161
- });
3162
- }
3163
- async function promptForEmail(userInteraction, title, emailHint) {
3164
- let email = emailHint || '';
3165
- while (!email || !/^[\w-\.]+@([\w-]+\.)+[\w-]{2,10}$/.test(email)) {
3166
- email = (await interactWithUser(userInteraction, {
3167
- type: 'email',
3168
- title,
3169
- alerts: email
3170
- ? [
3171
- {
3172
- type: 'error',
3173
- messageCode: 'INVALID_EMAIL',
3174
- message: 'Please enter a valid email address',
3175
- messageParams: {},
3176
- },
3177
- ]
3178
- : [],
3179
- fields: {
3180
- email: {
3181
- type: 'email',
3182
- placeholder: 'you@somedomain.com',
3183
- },
3184
- },
3185
- })).email;
3186
- }
3187
- return email;
3188
- }
3189
- async function promptForOTP(userInteraction, email, alert) {
3190
- const alerts = [
3191
- {
3192
- type: 'info',
3193
- messageCode: 'OTP_SENT',
3194
- message: `A One-Time password has been sent to {email}`,
3195
- messageParams: { email },
3205
+ const builtin = {
3206
+ ...numberDef,
3207
+ ...bigIntDef,
3208
+ ...DateDef,
3209
+ ...SetDef,
3210
+ ...MapDef,
3211
+ ...TypedArraysDefs,
3212
+ ...ArrayBufferDef,
3213
+ ...BlobDef,
3214
+ };
3215
+
3216
+ function Bison(...typeDefsInputs) {
3217
+ const tson = TypesonSimplified(builtin, BisonBinaryTypes, ...typeDefsInputs);
3218
+ return {
3219
+ toBinary(value) {
3220
+ const [blob, json] = this.stringify(value);
3221
+ const lenBuf = new ArrayBuffer(4);
3222
+ new DataView(lenBuf).setUint32(0, blob.size);
3223
+ return new Blob([lenBuf, blob, json]);
3224
+ },
3225
+ stringify(value) {
3226
+ const binaries = [];
3227
+ const json = tson.stringify(value, binaries);
3228
+ const blob = new Blob(binaries.map((b) => {
3229
+ const lenBuf = new ArrayBuffer(4);
3230
+ new DataView(lenBuf).setUint32(0, "byteLength" in b ? b.byteLength : b.size);
3231
+ return new Blob([lenBuf, b]);
3232
+ }));
3233
+ return [blob, json];
3196
3234
  },
3197
- ];
3198
- if (alert) {
3199
- alerts.push(alert);
3200
- }
3201
- const { otp } = await interactWithUser(userInteraction, {
3202
- type: 'otp',
3203
- title: 'Enter OTP',
3204
- alerts,
3205
- fields: {
3206
- otp: {
3207
- type: 'otp',
3208
- label: 'OTP',
3209
- placeholder: 'Paste OTP here',
3210
- },
3235
+ async parse(json, binData) {
3236
+ let pos = 0;
3237
+ const arrayBuffers = [];
3238
+ const buf = await readBlobBinary(binData);
3239
+ const view = new DataView(buf);
3240
+ while (pos < buf.byteLength) {
3241
+ const len = view.getUint32(pos);
3242
+ pos += 4;
3243
+ const ab = buf.slice(pos, pos + len);
3244
+ pos += len;
3245
+ arrayBuffers.push(ab);
3246
+ }
3247
+ return tson.parse(json, arrayBuffers);
3211
3248
  },
3212
- });
3213
- return otp;
3249
+ async fromBinary(blob) {
3250
+ const len = new DataView(await readBlobBinary(blob.slice(0, 4))).getUint32(0);
3251
+ const binData = blob.slice(4, len + 4);
3252
+ const json = await readBlob(blob.slice(len + 4));
3253
+ return await this.parse(json, binData);
3254
+ },
3255
+ };
3214
3256
  }
3215
-
3216
- async function loadAccessToken(db) {
3217
- var _a, _b;
3218
- const currentUser = await db.getCurrentUser();
3219
- const { accessToken, accessTokenExpiration, refreshToken, refreshTokenExpiration, claims, } = currentUser;
3220
- if (!accessToken)
3221
- return;
3222
- const expTime = (_a = accessTokenExpiration === null || accessTokenExpiration === void 0 ? void 0 : accessTokenExpiration.getTime()) !== null && _a !== void 0 ? _a : Infinity;
3223
- if (expTime > Date.now()) {
3224
- return accessToken;
3225
- }
3226
- if (!refreshToken) {
3227
- throw new Error(`Refresh token missing`);
3228
- }
3229
- const refreshExpTime = (_b = refreshTokenExpiration === null || refreshTokenExpiration === void 0 ? void 0 : refreshTokenExpiration.getTime()) !== null && _b !== void 0 ? _b : Infinity;
3230
- if (refreshExpTime <= Date.now()) {
3231
- throw new Error(`Refresh token has expired`);
3232
- }
3233
- const refreshedLogin = await refreshAccessToken(db.cloud.options.databaseUrl, currentUser);
3234
- await db.table('$logins').update(claims.sub, {
3235
- accessToken: refreshedLogin.accessToken,
3236
- accessTokenExpiration: refreshedLogin.accessTokenExpiration,
3257
+ function readBlob(blob) {
3258
+ return new Promise((resolve, reject) => {
3259
+ const reader = new FileReader();
3260
+ reader.onabort = (ev) => reject(new Error("file read aborted"));
3261
+ reader.onerror = (ev) => reject(ev.target.error);
3262
+ reader.onload = (ev) => resolve(ev.target.result);
3263
+ reader.readAsText(blob);
3237
3264
  });
3238
- return refreshedLogin.accessToken;
3239
- }
3240
- async function authenticate(url, context, fetchToken, userInteraction, hints) {
3241
- if (context.accessToken &&
3242
- context.accessTokenExpiration.getTime() > Date.now()) {
3243
- return context;
3244
- }
3245
- else if (context.refreshToken &&
3246
- (!context.refreshTokenExpiration ||
3247
- context.refreshTokenExpiration.getTime() > Date.now())) {
3248
- return await refreshAccessToken(url, context);
3249
- }
3250
- else {
3251
- return await userAuthenticate(context, fetchToken, userInteraction, hints);
3252
- }
3253
3265
  }
3254
- async function refreshAccessToken(url, login) {
3255
- if (!login.refreshToken)
3256
- throw new Error(`Cannot refresh token - refresh token is missing.`);
3257
- if (!login.nonExportablePrivateKey)
3258
- throw new Error(`login.nonExportablePrivateKey is missing - cannot sign refresh token without a private key.`);
3259
- const time_stamp = Date.now();
3260
- const signing_algorithm = 'RSASSA-PKCS1-v1_5';
3261
- const textEncoder = new TextEncoder();
3262
- const data = textEncoder.encode(login.refreshToken + time_stamp);
3263
- const binarySignature = await crypto.subtle.sign(signing_algorithm, login.nonExportablePrivateKey, data);
3264
- const signature = b64encode(binarySignature);
3265
- const tokenRequest = {
3266
- grant_type: 'refresh_token',
3267
- refresh_token: login.refreshToken,
3268
- scopes: ['ACCESS_DB'],
3269
- signature,
3270
- signing_algorithm,
3271
- time_stamp,
3272
- };
3273
- const res = await fetch(`${url}/token`, {
3274
- body: JSON.stringify(tokenRequest),
3275
- method: 'post',
3276
- headers: { 'Content-Type': 'application/json' },
3277
- mode: 'cors',
3266
+ function readBlobBinary(blob) {
3267
+ return new Promise((resolve, reject) => {
3268
+ const reader = new FileReader();
3269
+ reader.onabort = (ev) => reject(new Error("file read aborted"));
3270
+ reader.onerror = (ev) => reject(ev.target.error);
3271
+ reader.onload = (ev) => resolve(ev.target.result);
3272
+ reader.readAsArrayBuffer(blob);
3278
3273
  });
3279
- if (res.status !== 200)
3280
- throw new Error(`RefreshToken: Status ${res.status} from ${url}/token`);
3281
- const response = await res.json();
3282
- login.accessToken = response.accessToken;
3283
- login.accessTokenExpiration = response.accessTokenExpiration
3284
- ? new Date(response.accessTokenExpiration)
3285
- : undefined;
3286
- return login;
3287
3274
  }
3288
- async function userAuthenticate(context, fetchToken, userInteraction, hints) {
3289
- const { privateKey, publicKey } = await crypto.subtle.generateKey({
3290
- name: 'RSASSA-PKCS1-v1_5',
3291
- modulusLength: 2048,
3292
- publicExponent: new Uint8Array([0x01, 0x00, 0x01]),
3293
- hash: { name: 'SHA-256' },
3294
- }, false, // Non-exportable...
3295
- ['sign', 'verify']);
3296
- context.nonExportablePrivateKey = privateKey; //...but storable!
3297
- const publicKeySPKI = await crypto.subtle.exportKey('spki', publicKey);
3298
- const publicKeyPEM = spkiToPEM(publicKeySPKI);
3299
- context.publicKey = publicKey;
3300
- try {
3301
- const response2 = await fetchToken({
3302
- public_key: publicKeyPEM,
3303
- hints,
3304
- });
3305
- if (response2.type !== 'tokens')
3306
- throw new Error(`Unexpected response type from token endpoint: ${response2.type}`);
3307
- context.accessToken = response2.accessToken;
3308
- context.accessTokenExpiration = new Date(response2.accessTokenExpiration);
3309
- context.refreshToken = response2.refreshToken;
3310
- if (response2.refreshTokenExpiration) {
3311
- context.refreshTokenExpiration = new Date(response2.refreshTokenExpiration);
3312
- }
3313
- context.userId = response2.claims.sub;
3314
- context.email = response2.claims.email;
3315
- context.name = response2.claims.name;
3316
- context.claims = response2.claims;
3317
- if (response2.alerts && response2.alerts.length > 0) {
3318
- await interactWithUser(userInteraction, {
3319
- type: 'message-alert',
3320
- title: 'Authentication Alert',
3321
- fields: {},
3322
- alerts: response2.alerts,
3323
- });
3324
- }
3325
- return context;
3326
- }
3327
- catch (error) {
3328
- await alertUser(userInteraction, 'Authentication Failed', {
3329
- type: 'error',
3330
- messageCode: 'GENERIC_ERROR',
3331
- message: `We're having a problem to authenticate rigth now.`,
3332
- messageParams: {}
3333
- }).catch(() => { });
3334
- throw error;
3275
+
3276
+ /** The undefined type is not part of builtin but can be manually added.
3277
+ * The reason for supporting undefined is if the following object should be revived correctly:
3278
+ *
3279
+ * {foo: undefined}
3280
+ *
3281
+ * Without including this typedef, the revived object would just be {}.
3282
+ * If including this typedef, the revived object would be {foo: undefined}.
3283
+ */
3284
+ var undefinedDef = {
3285
+ undefined: {
3286
+ replace: () => {
3287
+ },
3288
+ revive: () => undefined,
3289
+ },
3290
+ };
3291
+
3292
+ // Since server revisions are stored in bigints, we need to handle clients without
3293
+ // bigint support to not fail when serverRevision is passed over to client.
3294
+ // We need to not fail when reviving it and we need to somehow store the information.
3295
+ // Since the revived version will later on be put into indexedDB we have another
3296
+ // issue: When reading it back from indexedDB we will get a poco object that we
3297
+ // cannot replace correctly when sending it to server. So we will also need
3298
+ // to do an explicit workaround in the protocol where a bigint is supported.
3299
+ // The workaround should be there regardless if browser supports BigInt or not, because
3300
+ // the serverRev might have been stored in IDB before the browser was upgraded to support bigint.
3301
+ //
3302
+ // if (typeof serverRev.rev !== "bigint")
3303
+ // if (hasBigIntSupport)
3304
+ // serverRev.rev = bigIntDef.bigint.revive(server.rev)
3305
+ // else
3306
+ // serverRev.rev = new FakeBigInt(server.rev)
3307
+ const hasBigIntSupport = typeof BigInt(0) === 'bigint';
3308
+ class FakeBigInt {
3309
+ constructor(value) {
3310
+ this.v = value;
3335
3311
  }
3336
- }
3337
- function spkiToPEM(keydata) {
3338
- const keydataB64 = b64encode(keydata);
3339
- const keydataB64Pem = formatAsPem(keydataB64);
3340
- return keydataB64Pem;
3341
- }
3342
- function formatAsPem(str) {
3343
- let finalString = '-----BEGIN PUBLIC KEY-----\n';
3344
- while (str.length > 0) {
3345
- finalString += str.substring(0, 64) + '\n';
3346
- str = str.substring(64);
3312
+ toString() {
3313
+ return this.v;
3347
3314
  }
3348
- finalString = finalString + '-----END PUBLIC KEY-----';
3349
- return finalString;
3350
3315
  }
3316
+ const defs = {
3317
+ ...undefinedDef,
3318
+ ...(hasBigIntSupport
3319
+ ? {}
3320
+ : {
3321
+ bigint: {
3322
+ test: (val) => val instanceof FakeBigInt,
3323
+ replace: (fakeBigInt) => {
3324
+ return {
3325
+ $t: 'bigint',
3326
+ ...fakeBigInt
3327
+ };
3328
+ },
3329
+ revive: ({ v, }) => new FakeBigInt(v)
3330
+ }
3331
+ })
3332
+ };
3333
+ const TSON = TypesonSimplified(builtin, defs);
3334
+ const BISON = Bison(defs);
3351
3335
 
3352
3336
  class HttpError extends Error {
3353
3337
  constructor(res, message) {
@@ -3373,6 +3357,7 @@ async function syncWithServer(changes, syncState, baseRevs, db, databaseUrl, sch
3373
3357
  headers.Authorization = `Bearer ${accessToken}`;
3374
3358
  }
3375
3359
  const syncRequest = {
3360
+ v: 2,
3376
3361
  dbID: syncState === null || syncState === void 0 ? void 0 : syncState.remoteDbId,
3377
3362
  clientIdentity,
3378
3363
  schema: schema || {},
@@ -3499,8 +3484,8 @@ async function updateBaseRevs(db, schema, latestRevisions, serverRev) {
3499
3484
 
3500
3485
  function getLatestRevisionsPerTable(clientChangeSet, lastRevisions = {}) {
3501
3486
  for (const { table, muts } of clientChangeSet) {
3502
- const lastRev = muts.length > 0 ? muts[muts.length - 1].rev || 0 : 0;
3503
- lastRevisions[table] = lastRev;
3487
+ const lastRev = muts.length > 0 ? muts[muts.length - 1].rev : null;
3488
+ lastRevisions[table] = lastRev || lastRevisions[table] || 0;
3504
3489
  }
3505
3490
  return lastRevisions;
3506
3491
  }
@@ -3821,16 +3806,35 @@ function MessagesFromServerConsumer(db) {
3821
3806
  const readyToServe = new BehaviorSubject(true);
3822
3807
  const event = new BehaviorSubject(null);
3823
3808
  let isWorking = false;
3809
+ let loopWarning = 0;
3810
+ let loopDetection = [0, 0, 0, 0, 0, 0, 0, 0, 0, Date.now()];
3824
3811
  event.subscribe(async () => {
3825
3812
  if (isWorking)
3826
3813
  return;
3827
3814
  if (queue.length > 0) {
3828
3815
  isWorking = true;
3816
+ loopDetection.shift();
3817
+ loopDetection.push(Date.now());
3829
3818
  readyToServe.next(false);
3830
3819
  try {
3831
3820
  await consumeQueue();
3832
3821
  }
3833
3822
  finally {
3823
+ if (loopDetection[loopDetection.length - 1] - loopDetection[0] < 10000) {
3824
+ // Ten loops within 10 seconds. Slow down!
3825
+ if (Date.now() - loopWarning < 5000) {
3826
+ // Last time we did this, we ended up here too. Wait for a minute.
3827
+ console.warn(`Slowing down websocket loop for one minute`);
3828
+ loopWarning = Date.now() + 60000;
3829
+ await new Promise(resolve => setTimeout(resolve, 60000));
3830
+ }
3831
+ else {
3832
+ // This is a one-time event. Just pause 10 seconds.
3833
+ console.warn(`Slowing down websocket loop for 10 seconds`);
3834
+ loopWarning = Date.now() + 10000;
3835
+ await new Promise(resolve => setTimeout(resolve, 10000));
3836
+ }
3837
+ }
3834
3838
  isWorking = false;
3835
3839
  readyToServe.next(true);
3836
3840
  }
@@ -3846,6 +3850,9 @@ function MessagesFromServerConsumer(db) {
3846
3850
  const msg = queue.shift();
3847
3851
  try {
3848
3852
  console.debug('processing msg', msg);
3853
+ // If the sync worker or service worker is syncing, wait 'til thei're done.
3854
+ // It's no need to have two channels at the same time - even though it wouldnt
3855
+ // be a problem - this is an optimization.
3849
3856
  await db.cloud.syncState
3850
3857
  .pipe(filter(({ phase }) => phase === 'in-sync' || phase === 'error'), take(1))
3851
3858
  .toPromise();
@@ -3869,26 +3876,23 @@ function MessagesFromServerConsumer(db) {
3869
3876
  // in turn will lead to that connectWebSocket.ts will reconnect the socket with the
3870
3877
  // new token. So we don't need to do anything more here.
3871
3878
  break;
3872
- case 'rev':
3873
- if (!(persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.serverRevision) ||
3874
- compareBigInts(persistedSyncState.serverRevision, msg.rev) < 0) {
3875
- triggerSync(db, "pull");
3876
- }
3877
- break;
3878
3879
  case 'realm-added':
3879
3880
  if (!((_a = persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.realms) === null || _a === void 0 ? void 0 : _a.includes(msg.realm))) {
3880
- triggerSync(db, "pull");
3881
+ triggerSync(db, 'pull');
3881
3882
  }
3882
3883
  break;
3883
3884
  case 'realm-removed':
3884
3885
  if ((_b = persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.realms) === null || _b === void 0 ? void 0 : _b.includes(msg.realm)) {
3885
- triggerSync(db, "pull");
3886
+ triggerSync(db, 'pull');
3886
3887
  }
3887
3888
  break;
3889
+ case 'realms-changed':
3890
+ triggerSync(db, 'pull');
3891
+ break;
3888
3892
  case 'changes':
3889
3893
  console.debug('changes');
3890
3894
  if (((_c = db.cloud.syncState.value) === null || _c === void 0 ? void 0 : _c.phase) === 'error') {
3891
- triggerSync(db, "pull");
3895
+ triggerSync(db, 'pull');
3892
3896
  break;
3893
3897
  }
3894
3898
  await db.transaction('rw', db.dx.tables, async (tx) => {
@@ -3911,16 +3915,34 @@ function MessagesFromServerConsumer(db) {
3911
3915
  return; // Initial sync must have taken place - otherwise, ignore this.
3912
3916
  }
3913
3917
  // Verify again in ACID tx that we're on same server revision.
3914
- if (compareBigInts(msg.baseRev, syncState.serverRevision) !== 0) {
3918
+ if (msg.baseRev !== syncState.serverRevision) {
3915
3919
  console.debug(`baseRev (${msg.baseRev}) differs from our serverRevision in syncState (${syncState.serverRevision})`);
3920
+ // Should we trigger a sync now? No. This is a normal case
3921
+ // when another local peer (such as the SW or a websocket channel on other tab) has
3922
+ // updated syncState from new server information but we are not aware yet. It would
3923
+ // be unnescessary to do a sync in that case. Instead, the caller of this consumeQueue()
3924
+ // function will do readyToServe.next(true) right after this return, which will lead
3925
+ // to a "ready" message being sent to server with the new accurate serverRev we have,
3926
+ // so that the next message indeed will be correct.
3927
+ if (typeof msg.baseRev === 'string' && // v2 format
3928
+ (typeof syncState.serverRevision === 'bigint' || // v1 format
3929
+ typeof syncState.serverRevision === 'object') // v1 format old browser
3930
+ ) {
3931
+ // The reason for the diff seems to be that server has migrated the revision format.
3932
+ // Do a full sync to update revision format.
3933
+ // If we don't do a sync request now, we could stuck in an endless loop.
3934
+ triggerSync(db, 'pull');
3935
+ }
3916
3936
  return; // Ignore message
3917
3937
  }
3918
3938
  // Verify also that the message is based on the exact same set of realms
3919
- const ourRealmSetHash = await Dexie.waitFor(computeRealmSetHash(syncState));
3939
+ const ourRealmSetHash = await Dexie.waitFor(
3940
+ // Keep TX in non-IDB work
3941
+ computeRealmSetHash(syncState));
3920
3942
  console.debug('ourRealmSetHash', ourRealmSetHash);
3921
3943
  if (ourRealmSetHash !== msg.realmSetHash) {
3922
3944
  console.debug('not same realmSetHash', msg.realmSetHash);
3923
- triggerSync(db, "pull");
3945
+ triggerSync(db, 'pull');
3924
3946
  // The message isn't based on the same realms.
3925
3947
  // Trigger a sync instead to resolve all things up.
3926
3948
  return;
@@ -3932,12 +3954,14 @@ function MessagesFromServerConsumer(db) {
3932
3954
  clientChanges = await listClientChanges(mutationTables, db);
3933
3955
  console.debug('msg queue: client changes', clientChanges);
3934
3956
  }
3935
- const filteredChanges = filterServerChangesThroughAddedClientChanges(msg.changes, clientChanges);
3936
- //
3937
- // apply server changes
3938
- //
3939
- console.debug('applying filtered server changes', filteredChanges);
3940
- await applyServerChanges(filteredChanges, db);
3957
+ if (msg.changes.length > 0) {
3958
+ const filteredChanges = filterServerChangesThroughAddedClientChanges(msg.changes, clientChanges);
3959
+ //
3960
+ // apply server changes
3961
+ //
3962
+ console.debug('applying filtered server changes', filteredChanges);
3963
+ await applyServerChanges(filteredChanges, db);
3964
+ }
3941
3965
  // Update latest revisions per table in case there are unsynced changes
3942
3966
  // This can be a real case in future when we allow non-eagery sync.
3943
3967
  // And it can actually be realistic now also, but very rare.
@@ -5018,18 +5042,19 @@ const SERVER_PING_TIMEOUT = 20000;
5018
5042
  const CLIENT_PING_INTERVAL = 30000;
5019
5043
  const FAIL_RETRY_WAIT_TIME = 60000;
5020
5044
  class WSObservable extends Observable$1 {
5021
- constructor(databaseUrl, rev, clientIdentity, messageProducer, webSocketStatus, token, tokenExpiration) {
5022
- super((subscriber) => new WSConnection(databaseUrl, rev, clientIdentity, token, tokenExpiration, subscriber, messageProducer, webSocketStatus));
5045
+ constructor(databaseUrl, rev, realmSetHash, clientIdentity, messageProducer, webSocketStatus, token, tokenExpiration) {
5046
+ super((subscriber) => new WSConnection(databaseUrl, rev, realmSetHash, clientIdentity, token, tokenExpiration, subscriber, messageProducer, webSocketStatus));
5023
5047
  }
5024
5048
  }
5025
5049
  let counter = 0;
5026
5050
  class WSConnection extends Subscription$1 {
5027
- constructor(databaseUrl, rev, clientIdentity, token, tokenExpiration, subscriber, messageProducer, webSocketStatus) {
5051
+ constructor(databaseUrl, rev, realmSetHash, clientIdentity, token, tokenExpiration, subscriber, messageProducer, webSocketStatus) {
5028
5052
  super(() => this.teardown());
5029
5053
  this.id = ++counter;
5030
5054
  console.debug('New WebSocket Connection', this.id, token ? 'authorized' : 'unauthorized');
5031
5055
  this.databaseUrl = databaseUrl;
5032
5056
  this.rev = rev;
5057
+ this.realmSetHash = realmSetHash;
5033
5058
  this.clientIdentity = clientIdentity;
5034
5059
  this.token = token;
5035
5060
  this.tokenExpiration = tokenExpiration;
@@ -5134,7 +5159,9 @@ class WSConnection extends Subscription$1 {
5134
5159
  const searchParams = new URLSearchParams();
5135
5160
  if (this.subscriber.closed)
5136
5161
  return;
5162
+ searchParams.set('v', "2");
5137
5163
  searchParams.set('rev', this.rev);
5164
+ searchParams.set('realmsHash', this.realmSetHash);
5138
5165
  searchParams.set('clientId', this.clientIdentity);
5139
5166
  if (this.token) {
5140
5167
  searchParams.set('token', this.token);
@@ -5228,12 +5255,12 @@ function connectWebSocket(db) {
5228
5255
  function createObservable() {
5229
5256
  return db.cloud.persistedSyncState.pipe(filter(syncState => syncState === null || syncState === void 0 ? void 0 : syncState.serverRevision), // Don't connect before there's no initial sync performed.
5230
5257
  take(1), // Don't continue waking up whenever syncState change
5231
- switchMap(() => db.cloud.currentUser), switchMap((userLogin) => userIsReallyActive.pipe(map((isActive) => (isActive ? userLogin : null)))), switchMap((userLogin) =>
5258
+ switchMap((syncState) => db.cloud.currentUser.pipe(map(userLogin => [userLogin, syncState]))), switchMap(([userLogin, syncState]) => userIsReallyActive.pipe(map((isActive) => [isActive ? userLogin : null, syncState]))), switchMap(async ([userLogin, syncState]) => [userLogin, await computeRealmSetHash(syncState)]), switchMap(([userLogin, realmSetHash]) =>
5232
5259
  // Let server end query changes from last entry of same client-ID and forward.
5233
5260
  // If no new entries, server won't bother the client. If new entries, server sends only those
5234
5261
  // and the baseRev of the last from same client-ID.
5235
5262
  userLogin
5236
- ? new WSObservable(db.cloud.options.databaseUrl, db.cloud.persistedSyncState.value.serverRevision, db.cloud.persistedSyncState.value.clientIdentity, messageProducer, db.cloud.webSocketStatus, userLogin.accessToken, userLogin.accessTokenExpiration)
5263
+ ? new WSObservable(db.cloud.options.databaseUrl, db.cloud.persistedSyncState.value.serverRevision, realmSetHash, db.cloud.persistedSyncState.value.clientIdentity, messageProducer, db.cloud.webSocketStatus, userLogin.accessToken, userLogin.accessTokenExpiration)
5237
5264
  : from$1([])), catchError((error) => {
5238
5265
  if ((error === null || error === void 0 ? void 0 : error.name) === 'TokenExpiredError') {
5239
5266
  console.debug('WebSocket observable: Token expired. Refreshing token...');
@@ -5353,22 +5380,26 @@ function LocalSyncWorker(db, cloudOptions, cloudSchema) {
5353
5380
  //let periodicSyncHandler: ((event: Event) => void) | null = null;
5354
5381
  let cancelToken = { cancelled: false };
5355
5382
  function syncAndRetry(purpose, retryNum = 1) {
5356
- syncIfPossible(db, cloudOptions, cloudSchema, {
5357
- cancelToken,
5358
- retryImmediatelyOnFetchError: true,
5359
- purpose
5360
- }).catch((e) => {
5361
- console.error('error in syncIfPossible()', e);
5362
- if (cancelToken.cancelled) {
5363
- stop();
5364
- }
5365
- else if (retryNum < 3) {
5366
- // Mimic service worker sync event: retry 3 times
5367
- // * first retry after 5 minutes
5368
- // * second retry 15 minutes later
5369
- setTimeout(() => syncAndRetry(purpose, retryNum + 1), [0, 5, 15][retryNum] * MINUTES);
5370
- }
5371
- });
5383
+ // Use setTimeout() to get onto a clean stack and
5384
+ // break free from possible active transaction:
5385
+ setTimeout(() => {
5386
+ syncIfPossible(db, cloudOptions, cloudSchema, {
5387
+ cancelToken,
5388
+ retryImmediatelyOnFetchError: true,
5389
+ purpose,
5390
+ }).catch((e) => {
5391
+ console.error('error in syncIfPossible()', e);
5392
+ if (cancelToken.cancelled) {
5393
+ stop();
5394
+ }
5395
+ else if (retryNum < 3) {
5396
+ // Mimic service worker sync event: retry 3 times
5397
+ // * first retry after 5 minutes
5398
+ // * second retry 15 minutes later
5399
+ setTimeout(() => syncAndRetry(purpose, retryNum + 1), [0, 5, 15][retryNum] * MINUTES);
5400
+ }
5401
+ });
5402
+ }, 0);
5372
5403
  }
5373
5404
  const start = () => {
5374
5405
  // Sync eagerly whenever a change has happened (+ initially when there's no syncState yet)
@@ -5376,7 +5407,7 @@ function LocalSyncWorker(db, cloudOptions, cloudSchema) {
5376
5407
  console.debug('Starting LocalSyncWorker', db.localSyncEvent['id']);
5377
5408
  localSyncEventSubscription = db.localSyncEvent.subscribe(({ purpose }) => {
5378
5409
  try {
5379
- syncAndRetry(purpose || "pull");
5410
+ syncAndRetry(purpose || 'pull');
5380
5411
  }
5381
5412
  catch (err) {
5382
5413
  console.error('What-the....', err);
@@ -5954,6 +5985,7 @@ function syncDB(dbName, purpose) {
5954
5985
  // Avoid race conditions.
5955
5986
  managedDBs.delete(db.name);
5956
5987
  }
5988
+ console.debug(`Dexie Cloud SW: Closing Dexie instance for ${dbName}`);
5957
5989
  db.dx.close();
5958
5990
  return false;
5959
5991
  }
@@ -6000,7 +6032,7 @@ if (!DISABLE_SERVICEWORKER_STRATEGY) {
6000
6032
  // Mimic background sync behavior - retry in X minutes on failure.
6001
6033
  // But lesser timeout and more number of times.
6002
6034
  const syncAndRetry = (num = 1) => {
6003
- return syncDB(dbName, event.data.purpuse || "pull").catch(async (e) => {
6035
+ return syncDB(dbName, event.data.purpose || "pull").catch(async (e) => {
6004
6036
  if (num === 3)
6005
6037
  throw e;
6006
6038
  await sleep(60000); // 1 minute