dexie-cloud-addon 1.0.0-beta.10 → 1.0.0-beta.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/modern/dexie-cloud-addon.js +189 -158
- package/dist/modern/dexie-cloud-addon.js.map +1 -1
- package/dist/modern/dexie-cloud-addon.min.js +1 -1
- package/dist/modern/dexie-cloud-addon.min.js.map +1 -1
- package/dist/modern/service-worker.js +1208 -1176
- package/dist/modern/service-worker.js.map +1 -1
- package/dist/modern/service-worker.min.js +1 -1
- package/dist/modern/service-worker.min.js.map +1 -1
- package/dist/module-es5/dexie-cloud-addon.js +259 -207
- package/dist/module-es5/dexie-cloud-addon.js.map +1 -1
- package/dist/module-es5/dexie-cloud-addon.min.js +1 -1
- package/dist/module-es5/dexie-cloud-addon.min.js.map +1 -1
- package/dist/types/WSObservable.d.ts +11 -6
- package/dist/types/WebSocketStatus.d.ts +1 -0
- package/dist/types/helpers/BroadcastedLocalEvent.d.ts +8 -0
- package/dist/types/helpers/visibleState.d.ts +1 -0
- package/dist/types/sync/syncServerToClientOnly.d.ts +3 -0
- package/dist/types/types/CloudConnectionStatus.d.ts +0 -0
- package/dist/types/types/ConnectionStatus.d.ts +0 -0
- package/dist/types/types/LoginState.d.ts +41 -0
- package/dist/types/types/SyncConnectionStatus.d.ts +1 -0
- package/dist/types/types/SyncFlowStatus.d.ts +6 -0
- package/dist/types/types/SyncStatus.d.ts +6 -0
- package/dist/umd/dexie-cloud-addon.js +259 -207
- package/dist/umd/dexie-cloud-addon.js.map +1 -1
- package/dist/umd/dexie-cloud-addon.min.js +1 -1
- package/dist/umd/dexie-cloud-addon.min.js.map +1 -1
- package/dist/umd/service-worker.js +1208 -1176
- package/dist/umd/service-worker.js.map +1 -1
- package/dist/umd/service-worker.min.js +1 -1
- package/dist/umd/service-worker.min.js.map +1 -1
- package/dist/umd-modern/dexie-cloud-addon.js +188 -157
- package/dist/umd-modern/dexie-cloud-addon.js.map +1 -1
- package/package.json +2 -2
|
@@ -2127,1234 +2127,1218 @@
|
|
|
2127
2127
|
return timeoutWith(due, throwError(new TimeoutError()), scheduler);
|
|
2128
2128
|
}
|
|
2129
2129
|
|
|
2130
|
-
const
|
|
2131
|
-
|
|
2132
|
-
|
|
2133
|
-
|
|
2134
|
-
|
|
2135
|
-
|
|
2136
|
-
|
|
2137
|
-
|
|
2138
|
-
|
|
2139
|
-
|
|
2140
|
-
|
|
2130
|
+
//const hasSW = 'serviceWorker' in navigator;
|
|
2131
|
+
let hasComplainedAboutSyncEvent = false;
|
|
2132
|
+
async function registerSyncEvent(db, purpose) {
|
|
2133
|
+
try {
|
|
2134
|
+
// Send sync event to SW:
|
|
2135
|
+
const sw = await navigator.serviceWorker.ready;
|
|
2136
|
+
if (purpose === "push" && sw.sync) {
|
|
2137
|
+
await sw.sync.register(`dexie-cloud:${db.name}`);
|
|
2138
|
+
}
|
|
2139
|
+
if (sw.active) {
|
|
2140
|
+
// Use postMessage for pull syncs and for browsers not supporting sync event (Firefox, Safari).
|
|
2141
|
+
// Also chromium based browsers with sw.sync as a fallback for sleepy sync events not taking action for a while.
|
|
2142
|
+
sw.active.postMessage({
|
|
2143
|
+
type: 'dexie-cloud-sync',
|
|
2144
|
+
dbName: db.name,
|
|
2145
|
+
purpose
|
|
2146
|
+
});
|
|
2141
2147
|
}
|
|
2148
|
+
else {
|
|
2149
|
+
console.error(`Dexie Cloud: There's no active service worker. Can this ever happen??`);
|
|
2150
|
+
}
|
|
2151
|
+
return;
|
|
2142
2152
|
}
|
|
2143
|
-
|
|
2144
|
-
|
|
2145
|
-
|
|
2146
|
-
|
|
2147
|
-
|
|
2148
|
-
clone["$" + k] = value[k];
|
|
2153
|
+
catch (e) {
|
|
2154
|
+
if (!hasComplainedAboutSyncEvent) {
|
|
2155
|
+
console.debug(`Dexie Cloud: Could not register sync event`, e);
|
|
2156
|
+
hasComplainedAboutSyncEvent = true;
|
|
2157
|
+
}
|
|
2149
2158
|
}
|
|
2150
|
-
return clone;
|
|
2151
2159
|
}
|
|
2152
|
-
|
|
2153
|
-
|
|
2154
|
-
|
|
2155
|
-
|
|
2156
|
-
|
|
2157
|
-
|
|
2158
|
-
|
|
2159
|
-
|
|
2160
|
-
|
|
2161
|
-
|
|
2162
|
-
|
|
2163
|
-
|
|
2164
|
-
|
|
2165
|
-
: realVal;
|
|
2166
|
-
}, space);
|
|
2167
|
-
return json;
|
|
2168
|
-
},
|
|
2169
|
-
parse(tson, alternateChannel) {
|
|
2170
|
-
let parent = null;
|
|
2171
|
-
let unescapeParentKeys = [];
|
|
2172
|
-
return JSON.parse(tson, function (key, value) {
|
|
2173
|
-
//
|
|
2174
|
-
// Parent Part
|
|
2175
|
-
//
|
|
2176
|
-
const type = value === null || value === void 0 ? void 0 : value.$t;
|
|
2177
|
-
if (type) {
|
|
2178
|
-
const typeDef = typeDefs[type];
|
|
2179
|
-
value = typeDef
|
|
2180
|
-
? typeDef.revive(value, alternateChannel, typeDefs)
|
|
2181
|
-
: value;
|
|
2182
|
-
}
|
|
2183
|
-
if (value === parent) {
|
|
2184
|
-
// Do what the kid told us to
|
|
2185
|
-
if (unescapeParentKeys.length > 0) {
|
|
2186
|
-
// Unescape dollar props
|
|
2187
|
-
value = { ...value };
|
|
2188
|
-
for (const k of unescapeParentKeys) {
|
|
2189
|
-
value[k.substr(1)] = value[k];
|
|
2190
|
-
delete value[k];
|
|
2191
|
-
}
|
|
2192
|
-
}
|
|
2193
|
-
unescapeParentKeys = [];
|
|
2194
|
-
return value;
|
|
2195
|
-
}
|
|
2196
|
-
//
|
|
2197
|
-
// Child part
|
|
2198
|
-
//
|
|
2199
|
-
if (key[0] === "$" && key !== "$t") {
|
|
2200
|
-
parent = this;
|
|
2201
|
-
unescapeParentKeys.push(key);
|
|
2202
|
-
}
|
|
2203
|
-
return value;
|
|
2204
|
-
});
|
|
2205
|
-
},
|
|
2206
|
-
};
|
|
2207
|
-
function getTypeDef(realVal) {
|
|
2208
|
-
const type = typeof realVal;
|
|
2209
|
-
switch (typeof realVal) {
|
|
2210
|
-
case "object":
|
|
2211
|
-
case "function": {
|
|
2212
|
-
// "object", "function", null
|
|
2213
|
-
if (realVal === null)
|
|
2214
|
-
return null;
|
|
2215
|
-
const proto = Object.getPrototypeOf(realVal);
|
|
2216
|
-
if (!proto)
|
|
2217
|
-
return ObjectDef;
|
|
2218
|
-
let typeDef = protoMap.get(proto);
|
|
2219
|
-
if (typeDef !== undefined)
|
|
2220
|
-
return typeDef; // Null counts to! So the caching of Array.prototype also counts.
|
|
2221
|
-
const toStringTag = getToStringTag(realVal);
|
|
2222
|
-
const entry = Object.entries(typeDefs).find(([typeName, typeDef]) => { var _a, _b; return (_b = (_a = typeDef === null || typeDef === void 0 ? void 0 : typeDef.test) === null || _a === void 0 ? void 0 : _a.call(typeDef, realVal, toStringTag)) !== null && _b !== void 0 ? _b : typeName === toStringTag; });
|
|
2223
|
-
typeDef = entry === null || entry === void 0 ? void 0 : entry[1];
|
|
2224
|
-
if (!typeDef) {
|
|
2225
|
-
typeDef = Array.isArray(realVal)
|
|
2226
|
-
? null
|
|
2227
|
-
: typeof realVal === "function"
|
|
2228
|
-
? typeDefs.function || null
|
|
2229
|
-
: ObjectDef;
|
|
2230
|
-
}
|
|
2231
|
-
protoMap.set(proto, typeDef);
|
|
2232
|
-
return typeDef;
|
|
2160
|
+
async function registerPeriodicSyncEvent(db) {
|
|
2161
|
+
var _a;
|
|
2162
|
+
try {
|
|
2163
|
+
// Register periodicSync event to SW:
|
|
2164
|
+
// @ts-ignore
|
|
2165
|
+
const { periodicSync } = await navigator.serviceWorker.ready;
|
|
2166
|
+
if (periodicSync) {
|
|
2167
|
+
try {
|
|
2168
|
+
await periodicSync.register(`dexie-cloud:${db.name}`, (_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.periodicSync);
|
|
2169
|
+
console.debug(`Dexie Cloud: Successfully registered periodicsync event for ${db.name}`);
|
|
2170
|
+
}
|
|
2171
|
+
catch (e) {
|
|
2172
|
+
console.debug(`Dexie Cloud: Failed to register periodic sync. Your PWA must be installed to allow background sync.`, e);
|
|
2233
2173
|
}
|
|
2234
|
-
default:
|
|
2235
|
-
return typeDefs[type];
|
|
2236
2174
|
}
|
|
2175
|
+
else {
|
|
2176
|
+
console.debug(`Dexie Cloud: periodicSync not supported.`);
|
|
2177
|
+
}
|
|
2178
|
+
}
|
|
2179
|
+
catch (e) {
|
|
2180
|
+
console.debug(`Dexie Cloud: Could not register periodicSync for ${db.name}`, e);
|
|
2237
2181
|
}
|
|
2238
2182
|
}
|
|
2239
2183
|
|
|
2240
|
-
|
|
2241
|
-
|
|
2242
|
-
|
|
2243
|
-
|
|
2244
|
-
|
|
2245
|
-
|
|
2246
|
-
|
|
2247
|
-
|
|
2248
|
-
mimeType: blob.type,
|
|
2249
|
-
i,
|
|
2250
|
-
};
|
|
2251
|
-
},
|
|
2252
|
-
revive: ({ i, mimeType }, altChannel) => new Blob([altChannel[i]], { type: mimeType }),
|
|
2253
|
-
},
|
|
2254
|
-
};
|
|
2255
|
-
|
|
2256
|
-
var numberDef = {
|
|
2257
|
-
number: {
|
|
2258
|
-
replace: (num) => {
|
|
2259
|
-
switch (true) {
|
|
2260
|
-
case isNaN(num):
|
|
2261
|
-
return { $t: "number", v: "NaN" };
|
|
2262
|
-
case num === Infinity:
|
|
2263
|
-
return { $t: "number", v: "Infinity" };
|
|
2264
|
-
case num === -Infinity:
|
|
2265
|
-
return { $t: "number", v: "-Infinity" };
|
|
2266
|
-
default:
|
|
2267
|
-
return num;
|
|
2268
|
-
}
|
|
2269
|
-
},
|
|
2270
|
-
revive: ({ v }) => Number(v),
|
|
2271
|
-
},
|
|
2272
|
-
};
|
|
2184
|
+
function triggerSync(db, purpose) {
|
|
2185
|
+
if (db.cloud.usingServiceWorker) {
|
|
2186
|
+
registerSyncEvent(db, purpose);
|
|
2187
|
+
}
|
|
2188
|
+
else {
|
|
2189
|
+
db.localSyncEvent.next({ purpose });
|
|
2190
|
+
}
|
|
2191
|
+
}
|
|
2273
2192
|
|
|
2274
|
-
const
|
|
2275
|
-
|
|
2276
|
-
|
|
2277
|
-
|
|
2278
|
-
|
|
2279
|
-
|
|
2280
|
-
|
|
2281
|
-
|
|
2193
|
+
const b64decode = typeof Buffer !== "undefined"
|
|
2194
|
+
? (base64) => Buffer.from(base64, "base64")
|
|
2195
|
+
: (base64) => {
|
|
2196
|
+
const binary_string = atob(base64);
|
|
2197
|
+
const len = binary_string.length;
|
|
2198
|
+
const bytes = new Uint8Array(len);
|
|
2199
|
+
for (var i = 0; i < len; i++) {
|
|
2200
|
+
bytes[i] = binary_string.charCodeAt(i);
|
|
2201
|
+
}
|
|
2202
|
+
return bytes;
|
|
2203
|
+
};
|
|
2204
|
+
const b64encode = typeof Buffer !== "undefined"
|
|
2205
|
+
? (b) => ArrayBuffer.isView(b)
|
|
2206
|
+
? Buffer.from(b.buffer, b.byteOffset, b.byteLength).toString("base64")
|
|
2207
|
+
: Buffer.from(b).toString("base64")
|
|
2208
|
+
: (b) => btoa(String.fromCharCode.apply(null, b));
|
|
2282
2209
|
|
|
2283
|
-
|
|
2284
|
-
|
|
2285
|
-
|
|
2286
|
-
|
|
2287
|
-
|
|
2288
|
-
|
|
2289
|
-
|
|
2290
|
-
|
|
2291
|
-
|
|
2210
|
+
async function computeRealmSetHash({ realms, inviteRealms, }) {
|
|
2211
|
+
const data = JSON.stringify([
|
|
2212
|
+
...realms.map((realmId) => ({ realmId, accepted: true })),
|
|
2213
|
+
...inviteRealms.map((realmId) => ({ realmId, accepted: false })),
|
|
2214
|
+
].sort((a, b) => a.realmId < b.realmId ? -1 : a.realmId > b.realmId ? 1 : 0));
|
|
2215
|
+
const byteArray = new TextEncoder().encode(data);
|
|
2216
|
+
const digestBytes = await crypto.subtle.digest('SHA-1', byteArray);
|
|
2217
|
+
const base64 = b64encode(digestBytes);
|
|
2218
|
+
return base64;
|
|
2219
|
+
}
|
|
2292
2220
|
|
|
2293
|
-
|
|
2294
|
-
|
|
2295
|
-
|
|
2296
|
-
|
|
2297
|
-
|
|
2298
|
-
}),
|
|
2299
|
-
revive: ({ v }) => new Set(v),
|
|
2300
|
-
},
|
|
2301
|
-
};
|
|
2221
|
+
function getSyncableTables(db) {
|
|
2222
|
+
return Object.entries(db.cloud.schema || {})
|
|
2223
|
+
.filter(([, { markedForSync }]) => markedForSync)
|
|
2224
|
+
.map(([tbl]) => db.table(tbl));
|
|
2225
|
+
}
|
|
2302
2226
|
|
|
2303
|
-
|
|
2304
|
-
|
|
2305
|
-
|
|
2306
|
-
$t: "Map",
|
|
2307
|
-
v: Array.from(map.entries()),
|
|
2308
|
-
}),
|
|
2309
|
-
revive: ({ v }) => new Map(v),
|
|
2310
|
-
},
|
|
2311
|
-
};
|
|
2227
|
+
function getMutationTable(tableName) {
|
|
2228
|
+
return `$${tableName}_mutations`;
|
|
2229
|
+
}
|
|
2312
2230
|
|
|
2313
|
-
|
|
2314
|
-
|
|
2315
|
-
|
|
2316
|
-
|
|
2317
|
-
|
|
2318
|
-
|
|
2319
|
-
|
|
2231
|
+
function getTableFromMutationTable(mutationTable) {
|
|
2232
|
+
var _a;
|
|
2233
|
+
const tableName = (_a = /^\$(.*)_mutations$/.exec(mutationTable)) === null || _a === void 0 ? void 0 : _a[1];
|
|
2234
|
+
if (!tableName)
|
|
2235
|
+
throw new Error(`Given mutationTable ${mutationTable} is not correct`);
|
|
2236
|
+
return tableName;
|
|
2237
|
+
}
|
|
2320
2238
|
|
|
2321
|
-
|
|
2322
|
-
|
|
2323
|
-
|
|
2324
|
-
|
|
2325
|
-
|
|
2326
|
-
|
|
2327
|
-
|
|
2328
|
-
|
|
2329
|
-
|
|
2330
|
-
|
|
2331
|
-
|
|
2332
|
-
|
|
2333
|
-
|
|
2334
|
-
|
|
2335
|
-
|
|
2336
|
-
|
|
2337
|
-
|
|
2338
|
-
|
|
2339
|
-
|
|
2340
|
-
|
|
2341
|
-
|
|
2342
|
-
|
|
2343
|
-
|
|
2344
|
-
|
|
2345
|
-
v: typeDefs.ArrayBuffer.replace(a.byteOffset === 0 && a.byteLength === a.buffer.byteLength
|
|
2346
|
-
? a.buffer
|
|
2347
|
-
: a.buffer.slice(a.byteOffset, a.byteOffset + a.byteLength), _, typeDefs).v,
|
|
2348
|
-
};
|
|
2349
|
-
return result;
|
|
2350
|
-
},
|
|
2351
|
-
revive: ({ v }, _, typeDefs) => {
|
|
2352
|
-
const TypedArray = _global[typeName];
|
|
2353
|
-
return (TypedArray &&
|
|
2354
|
-
new TypedArray(typeDefs.ArrayBuffer.revive({ v }, _, typeDefs)));
|
|
2355
|
-
},
|
|
2356
|
-
},
|
|
2357
|
-
}), {});
|
|
2239
|
+
async function listClientChanges(mutationTables, db, { since = {}, limit = Infinity } = {}) {
|
|
2240
|
+
const allMutsOnTables = await Promise.all(mutationTables.map(async (mutationTable) => {
|
|
2241
|
+
const tableName = getTableFromMutationTable(mutationTable.name);
|
|
2242
|
+
const lastRevision = since[tableName];
|
|
2243
|
+
let query = lastRevision
|
|
2244
|
+
? mutationTable.where("rev").above(lastRevision)
|
|
2245
|
+
: mutationTable;
|
|
2246
|
+
if (limit < Infinity)
|
|
2247
|
+
query = query.limit(limit);
|
|
2248
|
+
const muts = await query.toArray();
|
|
2249
|
+
//const objTable = db.table(tableName);
|
|
2250
|
+
/*for (const mut of muts) {
|
|
2251
|
+
if (mut.type === "insert" || mut.type === "upsert") {
|
|
2252
|
+
mut.values = await objTable.bulkGet(mut.keys);
|
|
2253
|
+
}
|
|
2254
|
+
}*/
|
|
2255
|
+
return {
|
|
2256
|
+
table: tableName,
|
|
2257
|
+
muts,
|
|
2258
|
+
};
|
|
2259
|
+
}));
|
|
2260
|
+
// Filter out those tables that doesn't have any mutations:
|
|
2261
|
+
return allMutsOnTables.filter(({ muts }) => muts.length > 0);
|
|
2262
|
+
}
|
|
2358
2263
|
|
|
2359
|
-
|
|
2360
|
-
|
|
2361
|
-
: (base64) => {
|
|
2362
|
-
const binary_string = atob(base64);
|
|
2363
|
-
const len = binary_string.length;
|
|
2364
|
-
const bytes = new Uint8Array(len);
|
|
2365
|
-
for (var i = 0; i < len; i++) {
|
|
2366
|
-
bytes[i] = binary_string.charCodeAt(i);
|
|
2367
|
-
}
|
|
2368
|
-
return bytes;
|
|
2369
|
-
};
|
|
2370
|
-
const b64encode = typeof Buffer !== "undefined"
|
|
2371
|
-
? (b) => ArrayBuffer.isView(b)
|
|
2372
|
-
? Buffer.from(b.buffer, b.byteOffset, b.byteLength).toString("base64")
|
|
2373
|
-
: Buffer.from(b).toString("base64")
|
|
2374
|
-
: (b) => btoa(String.fromCharCode.apply(null, b));
|
|
2264
|
+
//@ts-check
|
|
2265
|
+
const randomFillSync = crypto.getRandomValues;
|
|
2375
2266
|
|
|
2376
|
-
function
|
|
2377
|
-
|
|
2378
|
-
|
|
2379
|
-
function b64LexDecode(b64Lex) {
|
|
2380
|
-
return b64decode(lexToB64(b64Lex));
|
|
2381
|
-
}
|
|
2382
|
-
function b64ToLex(base64) {
|
|
2383
|
-
var encoded = "";
|
|
2384
|
-
for (var i = 0, length = base64.length; i < length; i++) {
|
|
2385
|
-
encoded += ENCODE_TABLE[base64[i]];
|
|
2386
|
-
}
|
|
2387
|
-
return encoded;
|
|
2267
|
+
function assert(b) {
|
|
2268
|
+
if (!b)
|
|
2269
|
+
throw new Error('Assertion Failed');
|
|
2388
2270
|
}
|
|
2389
|
-
function
|
|
2390
|
-
|
|
2391
|
-
|
|
2392
|
-
|
|
2271
|
+
function setByKeyPath(obj, keyPath, value) {
|
|
2272
|
+
if (!obj || keyPath === undefined)
|
|
2273
|
+
return;
|
|
2274
|
+
if ('isFrozen' in Object && Object.isFrozen(obj))
|
|
2275
|
+
return;
|
|
2276
|
+
if (typeof keyPath !== 'string' && 'length' in keyPath) {
|
|
2277
|
+
assert(typeof value !== 'string' && 'length' in value);
|
|
2278
|
+
for (var i = 0, l = keyPath.length; i < l; ++i) {
|
|
2279
|
+
setByKeyPath(obj, keyPath[i], value[i]);
|
|
2280
|
+
}
|
|
2393
2281
|
}
|
|
2394
|
-
|
|
2395
|
-
|
|
2396
|
-
|
|
2282
|
+
else {
|
|
2283
|
+
var period = keyPath.indexOf('.');
|
|
2284
|
+
if (period !== -1) {
|
|
2285
|
+
var currentKeyPath = keyPath.substr(0, period);
|
|
2286
|
+
var remainingKeyPath = keyPath.substr(period + 1);
|
|
2287
|
+
if (remainingKeyPath === '')
|
|
2288
|
+
if (value === undefined) {
|
|
2289
|
+
if (Array.isArray(obj)) {
|
|
2290
|
+
if (!isNaN(parseInt(currentKeyPath)))
|
|
2291
|
+
obj.splice(parseInt(currentKeyPath), 1);
|
|
2292
|
+
}
|
|
2293
|
+
else
|
|
2294
|
+
delete obj[currentKeyPath];
|
|
2295
|
+
// @ts-ignore: even if currentKeyPath would be numeric string and obj would be array - it works.
|
|
2296
|
+
}
|
|
2297
|
+
else
|
|
2298
|
+
obj[currentKeyPath] = value;
|
|
2299
|
+
else {
|
|
2300
|
+
//@ts-ignore: even if currentKeyPath would be numeric string and obj would be array - it works.
|
|
2301
|
+
var innerObj = obj[currentKeyPath];
|
|
2302
|
+
//@ts-ignore: even if currentKeyPath would be numeric string and obj would be array - it works.
|
|
2303
|
+
if (!innerObj)
|
|
2304
|
+
innerObj = obj[currentKeyPath] = {};
|
|
2305
|
+
setByKeyPath(innerObj, remainingKeyPath, value);
|
|
2306
|
+
}
|
|
2307
|
+
}
|
|
2308
|
+
else {
|
|
2309
|
+
if (value === undefined) {
|
|
2310
|
+
if (Array.isArray(obj) && !isNaN(parseInt(keyPath)))
|
|
2311
|
+
// @ts-ignore: even if currentKeyPath would be numeric string and obj would be array - it works.
|
|
2312
|
+
obj.splice(keyPath, 1);
|
|
2313
|
+
//@ts-ignore: even if currentKeyPath would be numeric string and obj would be array - it works.
|
|
2314
|
+
else
|
|
2315
|
+
delete obj[keyPath];
|
|
2316
|
+
//@ts-ignore: even if currentKeyPath would be numeric string and obj would be array - it works.
|
|
2317
|
+
}
|
|
2318
|
+
else
|
|
2319
|
+
obj[keyPath] = value;
|
|
2320
|
+
}
|
|
2397
2321
|
}
|
|
2398
|
-
return base64;
|
|
2399
2322
|
}
|
|
2400
|
-
const
|
|
2401
|
-
|
|
2402
|
-
|
|
2403
|
-
|
|
2404
|
-
|
|
2405
|
-
|
|
2406
|
-
|
|
2407
|
-
|
|
2408
|
-
|
|
2409
|
-
|
|
2410
|
-
"8": "I",
|
|
2411
|
-
"9": "J",
|
|
2412
|
-
A: "K",
|
|
2413
|
-
B: "L",
|
|
2414
|
-
C: "M",
|
|
2415
|
-
D: "N",
|
|
2416
|
-
E: "O",
|
|
2417
|
-
F: "P",
|
|
2418
|
-
G: "Q",
|
|
2419
|
-
H: "R",
|
|
2420
|
-
I: "S",
|
|
2421
|
-
J: "T",
|
|
2422
|
-
K: "U",
|
|
2423
|
-
L: "V",
|
|
2424
|
-
M: "W",
|
|
2425
|
-
N: "X",
|
|
2426
|
-
O: "Y",
|
|
2427
|
-
P: "Z",
|
|
2428
|
-
Q: "a",
|
|
2429
|
-
R: "b",
|
|
2430
|
-
S: "c",
|
|
2431
|
-
T: "d",
|
|
2432
|
-
U: "e",
|
|
2433
|
-
V: "f",
|
|
2434
|
-
W: "g",
|
|
2435
|
-
X: "h",
|
|
2436
|
-
Y: "i",
|
|
2437
|
-
Z: "j",
|
|
2438
|
-
_: "k",
|
|
2439
|
-
a: "l",
|
|
2440
|
-
b: "m",
|
|
2441
|
-
c: "n",
|
|
2442
|
-
d: "o",
|
|
2443
|
-
e: "p",
|
|
2444
|
-
f: "q",
|
|
2445
|
-
g: "r",
|
|
2446
|
-
h: "s",
|
|
2447
|
-
i: "t",
|
|
2448
|
-
j: "u",
|
|
2449
|
-
k: "v",
|
|
2450
|
-
l: "w",
|
|
2451
|
-
m: "x",
|
|
2452
|
-
n: "y",
|
|
2453
|
-
o: "z",
|
|
2454
|
-
p: "0",
|
|
2455
|
-
q: "1",
|
|
2456
|
-
r: "2",
|
|
2457
|
-
s: "3",
|
|
2458
|
-
t: "4",
|
|
2459
|
-
u: "5",
|
|
2460
|
-
v: "6",
|
|
2461
|
-
w: "7",
|
|
2462
|
-
x: "8",
|
|
2463
|
-
y: "9",
|
|
2464
|
-
z: "+",
|
|
2465
|
-
"|": "/",
|
|
2323
|
+
const randomString$1 = typeof self === 'undefined' ? (bytes) => {
|
|
2324
|
+
// Node
|
|
2325
|
+
const buf = Buffer.alloc(bytes);
|
|
2326
|
+
randomFillSync(buf);
|
|
2327
|
+
return buf.toString("base64");
|
|
2328
|
+
} : (bytes) => {
|
|
2329
|
+
// Web
|
|
2330
|
+
const buf = new Uint8Array(bytes);
|
|
2331
|
+
crypto.getRandomValues(buf);
|
|
2332
|
+
return btoa(String.fromCharCode.apply(null, buf));
|
|
2466
2333
|
};
|
|
2467
|
-
const ENCODE_TABLE = {};
|
|
2468
|
-
for (const c of Object.keys(DECODE_TABLE)) {
|
|
2469
|
-
ENCODE_TABLE[DECODE_TABLE[c]] = c;
|
|
2470
|
-
}
|
|
2471
2334
|
|
|
2472
|
-
|
|
2473
|
-
|
|
2474
|
-
|
|
2475
|
-
|
|
2476
|
-
|
|
2477
|
-
|
|
2478
|
-
|
|
2479
|
-
|
|
2480
|
-
|
|
2481
|
-
|
|
2482
|
-
|
|
2483
|
-
|
|
2484
|
-
|
|
2485
|
-
|
|
2486
|
-
|
|
2487
|
-
|
|
2488
|
-
|
|
2489
|
-
|
|
2490
|
-
|
|
2491
|
-
|
|
2335
|
+
/** Verifies that given primary key is valid.
|
|
2336
|
+
* The reason we narrow validity for valid keys are twofold:
|
|
2337
|
+
* 1: Make sure to only support types that can be used as an object index in DBKeyMutationSet.
|
|
2338
|
+
* For example, ArrayBuffer cannot be used (gives "object ArrayBuffer") but Uint8Array can be
|
|
2339
|
+
* used (gives comma-delimited list of included bytes).
|
|
2340
|
+
* 2: Avoid using plain numbers and Dates as keys when they are synced, as they are not globally unique.
|
|
2341
|
+
* 3: Since we store the key as a VARCHAR server side in current version, try not promote types that stringifies to become very long server side.
|
|
2342
|
+
*
|
|
2343
|
+
* @param id
|
|
2344
|
+
* @returns
|
|
2345
|
+
*/
|
|
2346
|
+
function isValidSyncableID(id) {
|
|
2347
|
+
if (typeof id === "string")
|
|
2348
|
+
return true;
|
|
2349
|
+
//if (validIDTypes[toStringTag(id)]) return true;
|
|
2350
|
+
//if (Array.isArray(id)) return id.every((part) => isValidSyncableID(part));
|
|
2351
|
+
if (Array.isArray(id) && id.some(key => isValidSyncableID(key)) && id.every(isValidSyncableIDPart))
|
|
2352
|
+
return true;
|
|
2353
|
+
return false;
|
|
2354
|
+
}
|
|
2355
|
+
/** Verifies that given key part is valid.
|
|
2356
|
+
* 1: Make sure that arrays of this types are stringified correclty and works with DBKeyMutationSet.
|
|
2357
|
+
* For example, ArrayBuffer cannot be used (gives "object ArrayBuffer") but Uint8Array can be
|
|
2358
|
+
* used (gives comma-delimited list of included bytes).
|
|
2359
|
+
* 2: Since we store the key as a VARCHAR server side in current version, try not promote types that stringifies to become very long server side.
|
|
2360
|
+
*/
|
|
2361
|
+
function isValidSyncableIDPart(part) {
|
|
2362
|
+
return typeof part === "string" || typeof part === "number" || Array.isArray(part) && part.every(isValidSyncableIDPart);
|
|
2363
|
+
}
|
|
2364
|
+
function isValidAtID(id, idPrefix) {
|
|
2365
|
+
return !idPrefix || (typeof id === "string" && id.startsWith(idPrefix));
|
|
2492
2366
|
}
|
|
2493
2367
|
|
|
2494
|
-
function
|
|
2495
|
-
const
|
|
2496
|
-
|
|
2497
|
-
|
|
2498
|
-
|
|
2499
|
-
|
|
2500
|
-
|
|
2368
|
+
function applyOperation(target, table, op) {
|
|
2369
|
+
const tbl = target[table] || (target[table] = {});
|
|
2370
|
+
switch (op.type) {
|
|
2371
|
+
case "insert":
|
|
2372
|
+
// TODO: Don't treat insert and upsert the same?
|
|
2373
|
+
case "upsert":
|
|
2374
|
+
op.keys.forEach((key, idx) => {
|
|
2375
|
+
tbl[key] = {
|
|
2376
|
+
type: "ups",
|
|
2377
|
+
val: op.values[idx],
|
|
2378
|
+
};
|
|
2379
|
+
});
|
|
2380
|
+
break;
|
|
2381
|
+
case "update":
|
|
2382
|
+
case "modify": {
|
|
2383
|
+
op.keys.forEach((key, idx) => {
|
|
2384
|
+
const changeSpec = op.type === "update"
|
|
2385
|
+
? op.changeSpecs[idx]
|
|
2386
|
+
: op.changeSpec;
|
|
2387
|
+
const entry = tbl[key];
|
|
2388
|
+
if (!entry) {
|
|
2389
|
+
tbl[key] = {
|
|
2390
|
+
type: "upd",
|
|
2391
|
+
mod: changeSpec,
|
|
2392
|
+
};
|
|
2393
|
+
}
|
|
2394
|
+
else {
|
|
2395
|
+
switch (entry.type) {
|
|
2396
|
+
case "ups":
|
|
2397
|
+
// Adjust the existing upsert with additional updates
|
|
2398
|
+
for (const [propPath, value] of Object.entries(changeSpec)) {
|
|
2399
|
+
setByKeyPath(entry.val, propPath, value);
|
|
2400
|
+
}
|
|
2401
|
+
break;
|
|
2402
|
+
case "del":
|
|
2403
|
+
// No action.
|
|
2404
|
+
break;
|
|
2405
|
+
case "upd":
|
|
2406
|
+
// Adjust existing update with additional updates
|
|
2407
|
+
Object.assign(entry.mod, changeSpec); // May work for deep props as well - new keys is added later, right? Does the prop order persist along TSON and all? But it will not be 100% when combined with some server code (seach for "address.city": "Stockholm" comment)
|
|
2408
|
+
break;
|
|
2409
|
+
}
|
|
2410
|
+
}
|
|
2411
|
+
});
|
|
2412
|
+
break;
|
|
2413
|
+
}
|
|
2414
|
+
case "delete":
|
|
2415
|
+
op.keys.forEach((key) => {
|
|
2416
|
+
tbl[key] = {
|
|
2417
|
+
type: "del",
|
|
2418
|
+
};
|
|
2419
|
+
});
|
|
2420
|
+
break;
|
|
2501
2421
|
}
|
|
2502
|
-
return
|
|
2422
|
+
return target;
|
|
2503
2423
|
}
|
|
2504
2424
|
|
|
2505
|
-
function
|
|
2506
|
-
const
|
|
2507
|
-
|
|
2508
|
-
|
|
2425
|
+
function applyOperations(target, ops) {
|
|
2426
|
+
for (const { table, muts } of ops) {
|
|
2427
|
+
for (const mut of muts) {
|
|
2428
|
+
applyOperation(target, table, mut);
|
|
2429
|
+
}
|
|
2509
2430
|
}
|
|
2510
|
-
return array.buffer;
|
|
2511
2431
|
}
|
|
2512
2432
|
|
|
2513
|
-
|
|
2514
|
-
|
|
2515
|
-
|
|
2516
|
-
|
|
2517
|
-
|
|
2518
|
-
|
|
2519
|
-
|
|
2520
|
-
:
|
|
2521
|
-
|
|
2522
|
-
|
|
2523
|
-
|
|
2524
|
-
|
|
2525
|
-
|
|
2526
|
-
|
|
2527
|
-
|
|
2528
|
-
|
|
2529
|
-
|
|
2530
|
-
|
|
2531
|
-
|
|
2532
|
-
|
|
2533
|
-
|
|
2534
|
-
|
|
2535
|
-
|
|
2536
|
-
|
|
2537
|
-
|
|
2538
|
-
|
|
2539
|
-
|
|
2540
|
-
|
|
2541
|
-
|
|
2542
|
-
|
|
2543
|
-
|
|
2544
|
-
|
|
2545
|
-
|
|
2546
|
-
|
|
2547
|
-
|
|
2548
|
-
|
|
2549
|
-
|
|
2550
|
-
|
|
2551
|
-
|
|
2552
|
-
|
|
2553
|
-
|
|
2554
|
-
|
|
2555
|
-
|
|
2556
|
-
|
|
2557
|
-
|
|
2558
|
-
|
|
2559
|
-
|
|
2560
|
-
|
|
2561
|
-
|
|
2562
|
-
|
|
2563
|
-
|
|
2564
|
-
|
|
2565
|
-
const buf = await readBlobBinary(binData);
|
|
2566
|
-
const view = new DataView(buf);
|
|
2567
|
-
while (pos < buf.byteLength) {
|
|
2568
|
-
const len = view.getUint32(pos);
|
|
2569
|
-
pos += 4;
|
|
2570
|
-
const ab = buf.slice(pos, pos + len);
|
|
2571
|
-
pos += len;
|
|
2572
|
-
arrayBuffers.push(ab);
|
|
2433
|
+
function subtractChanges(target, // Server change set
|
|
2434
|
+
changesToSubtract // additional mutations on client during syncWithServer()
|
|
2435
|
+
) {
|
|
2436
|
+
var _a, _b, _c;
|
|
2437
|
+
for (const [table, mutationSet] of Object.entries(changesToSubtract)) {
|
|
2438
|
+
for (const [key, mut] of Object.entries(mutationSet)) {
|
|
2439
|
+
switch (mut.type) {
|
|
2440
|
+
case 'ups':
|
|
2441
|
+
{
|
|
2442
|
+
const targetMut = (_a = target[table]) === null || _a === void 0 ? void 0 : _a[key];
|
|
2443
|
+
if (targetMut) {
|
|
2444
|
+
switch (targetMut.type) {
|
|
2445
|
+
case 'ups':
|
|
2446
|
+
delete target[table][key];
|
|
2447
|
+
break;
|
|
2448
|
+
case 'del':
|
|
2449
|
+
// Leave delete operation.
|
|
2450
|
+
// (Don't resurrect objects unintenionally (using tx(get, put) pattern locally))
|
|
2451
|
+
break;
|
|
2452
|
+
case 'upd':
|
|
2453
|
+
delete target[table][key];
|
|
2454
|
+
break;
|
|
2455
|
+
}
|
|
2456
|
+
}
|
|
2457
|
+
}
|
|
2458
|
+
break;
|
|
2459
|
+
case 'del':
|
|
2460
|
+
(_b = target[table]) === null || _b === void 0 ? true : delete _b[key];
|
|
2461
|
+
break;
|
|
2462
|
+
case 'upd': {
|
|
2463
|
+
const targetMut = (_c = target[table]) === null || _c === void 0 ? void 0 : _c[key];
|
|
2464
|
+
if (targetMut) {
|
|
2465
|
+
switch (targetMut.type) {
|
|
2466
|
+
case 'ups':
|
|
2467
|
+
// Adjust the server upsert with locally updated values.
|
|
2468
|
+
for (const [propPath, value] of Object.entries(mut.mod)) {
|
|
2469
|
+
setByKeyPath(targetMut.val, propPath, value);
|
|
2470
|
+
}
|
|
2471
|
+
break;
|
|
2472
|
+
case 'del':
|
|
2473
|
+
// Leave delete.
|
|
2474
|
+
break;
|
|
2475
|
+
case 'upd':
|
|
2476
|
+
// Remove the local update props from the server update mutation.
|
|
2477
|
+
for (const propPath of Object.keys(mut.mod)) {
|
|
2478
|
+
delete targetMut.mod[propPath];
|
|
2479
|
+
}
|
|
2480
|
+
break;
|
|
2481
|
+
}
|
|
2482
|
+
}
|
|
2483
|
+
break;
|
|
2484
|
+
}
|
|
2573
2485
|
}
|
|
2574
|
-
|
|
2575
|
-
|
|
2576
|
-
async fromBinary(blob) {
|
|
2577
|
-
const len = new DataView(await readBlobBinary(blob.slice(0, 4))).getUint32(0);
|
|
2578
|
-
const binData = blob.slice(4, len + 4);
|
|
2579
|
-
const json = await readBlob(blob.slice(len + 4));
|
|
2580
|
-
return await this.parse(json, binData);
|
|
2581
|
-
},
|
|
2582
|
-
};
|
|
2583
|
-
}
|
|
2584
|
-
function readBlob(blob) {
|
|
2585
|
-
return new Promise((resolve, reject) => {
|
|
2586
|
-
const reader = new FileReader();
|
|
2587
|
-
reader.onabort = (ev) => reject(new Error("file read aborted"));
|
|
2588
|
-
reader.onerror = (ev) => reject(ev.target.error);
|
|
2589
|
-
reader.onload = (ev) => resolve(ev.target.result);
|
|
2590
|
-
reader.readAsText(blob);
|
|
2591
|
-
});
|
|
2592
|
-
}
|
|
2593
|
-
function readBlobBinary(blob) {
|
|
2594
|
-
return new Promise((resolve, reject) => {
|
|
2595
|
-
const reader = new FileReader();
|
|
2596
|
-
reader.onabort = (ev) => reject(new Error("file read aborted"));
|
|
2597
|
-
reader.onerror = (ev) => reject(ev.target.error);
|
|
2598
|
-
reader.onload = (ev) => resolve(ev.target.result);
|
|
2599
|
-
reader.readAsArrayBuffer(blob);
|
|
2600
|
-
});
|
|
2486
|
+
}
|
|
2487
|
+
}
|
|
2601
2488
|
}
|
|
2602
2489
|
|
|
2603
|
-
/**
|
|
2604
|
-
*
|
|
2605
|
-
*
|
|
2606
|
-
* {foo: undefined}
|
|
2490
|
+
/** Convert a DBKeyMutationSet (which is an internal format capable of looking up changes per ID)
|
|
2491
|
+
* ...into a DBOperationsSet (which is more optimal for performing DB operations into DB (bulkAdd() etc))
|
|
2607
2492
|
*
|
|
2608
|
-
*
|
|
2609
|
-
*
|
|
2493
|
+
* @param inSet
|
|
2494
|
+
* @returns DBOperationsSet representing inSet
|
|
2610
2495
|
*/
|
|
2611
|
-
|
|
2612
|
-
|
|
2613
|
-
|
|
2614
|
-
|
|
2615
|
-
|
|
2616
|
-
|
|
2617
|
-
|
|
2618
|
-
|
|
2619
|
-
|
|
2620
|
-
|
|
2621
|
-
// We need to not fail when reviving it and we need to somehow store the information.
|
|
2622
|
-
// Since the revived version will later on be put into indexedDB we have another
|
|
2623
|
-
// issue: When reading it back from indexedDB we will get a poco object that we
|
|
2624
|
-
// cannot replace correctly when sending it to server. So we will also need
|
|
2625
|
-
// to do an explicit workaround in the protocol where a bigint is supported.
|
|
2626
|
-
// The workaround should be there regardless if browser supports BigInt or not, because
|
|
2627
|
-
// the serverRev might have been stored in IDB before the browser was upgraded to support bigint.
|
|
2628
|
-
//
|
|
2629
|
-
// if (typeof serverRev.rev !== "bigint")
|
|
2630
|
-
// if (hasBigIntSupport)
|
|
2631
|
-
// serverRev.rev = bigIntDef.bigint.revive(server.rev)
|
|
2632
|
-
// else
|
|
2633
|
-
// serverRev.rev = new FakeBigInt(server.rev)
|
|
2634
|
-
const hasBigIntSupport = typeof BigInt(0) === 'bigint';
|
|
2635
|
-
function getValueOfBigInt(x) {
|
|
2636
|
-
if (typeof x === 'bigint') {
|
|
2637
|
-
return x;
|
|
2638
|
-
}
|
|
2639
|
-
if (hasBigIntSupport) {
|
|
2640
|
-
return typeof x === 'string' ? BigInt(x) : BigInt(x.v);
|
|
2641
|
-
}
|
|
2642
|
-
else {
|
|
2643
|
-
return typeof x === 'string' ? Number(x) : Number(x.v);
|
|
2644
|
-
}
|
|
2645
|
-
}
|
|
2646
|
-
function compareBigInts(a, b) {
|
|
2647
|
-
const valA = getValueOfBigInt(a);
|
|
2648
|
-
const valB = getValueOfBigInt(b);
|
|
2649
|
-
return valA < valB ? -1 : valA > valB ? 1 : 0;
|
|
2650
|
-
}
|
|
2651
|
-
class FakeBigInt {
|
|
2652
|
-
constructor(value) {
|
|
2653
|
-
this.v = value;
|
|
2654
|
-
}
|
|
2655
|
-
toString() {
|
|
2656
|
-
return this.v;
|
|
2657
|
-
}
|
|
2658
|
-
}
|
|
2659
|
-
const defs = {
|
|
2660
|
-
...undefinedDef,
|
|
2661
|
-
...(hasBigIntSupport
|
|
2662
|
-
? {}
|
|
2663
|
-
: {
|
|
2664
|
-
bigint: {
|
|
2665
|
-
test: (val) => val instanceof FakeBigInt,
|
|
2666
|
-
replace: (fakeBigInt) => {
|
|
2667
|
-
return {
|
|
2668
|
-
$t: 'bigint',
|
|
2669
|
-
...fakeBigInt
|
|
2670
|
-
};
|
|
2671
|
-
},
|
|
2672
|
-
revive: ({ v, }) => new FakeBigInt(v)
|
|
2673
|
-
}
|
|
2674
|
-
})
|
|
2675
|
-
};
|
|
2676
|
-
const TSON = TypesonSimplified(builtin, defs);
|
|
2677
|
-
const BISON = Bison(defs);
|
|
2678
|
-
|
|
2679
|
-
//const hasSW = 'serviceWorker' in navigator;
|
|
2680
|
-
let hasComplainedAboutSyncEvent = false;
|
|
2681
|
-
async function registerSyncEvent(db, purpose) {
|
|
2682
|
-
try {
|
|
2683
|
-
// Send sync event to SW:
|
|
2684
|
-
const sw = await navigator.serviceWorker.ready;
|
|
2685
|
-
if (purpose === "push" && sw.sync) {
|
|
2686
|
-
await sw.sync.register(`dexie-cloud:${db.name}`);
|
|
2687
|
-
}
|
|
2688
|
-
if (sw.active) {
|
|
2689
|
-
// Use postMessage for pull syncs and for browsers not supporting sync event (Firefox, Safari).
|
|
2690
|
-
// Also chromium based browsers with sw.sync as a fallback for sleepy sync events not taking action for a while.
|
|
2691
|
-
sw.active.postMessage({
|
|
2692
|
-
type: 'dexie-cloud-sync',
|
|
2693
|
-
dbName: db.name,
|
|
2694
|
-
purpose
|
|
2695
|
-
});
|
|
2696
|
-
}
|
|
2697
|
-
else {
|
|
2698
|
-
console.error(`Dexie Cloud: There's no active service worker. Can this ever happen??`);
|
|
2699
|
-
}
|
|
2700
|
-
return;
|
|
2701
|
-
}
|
|
2702
|
-
catch (e) {
|
|
2703
|
-
if (!hasComplainedAboutSyncEvent) {
|
|
2704
|
-
console.debug(`Dexie Cloud: Could not register sync event`, e);
|
|
2705
|
-
hasComplainedAboutSyncEvent = true;
|
|
2496
|
+
function toDBOperationSet(inSet) {
|
|
2497
|
+
// Fictive transaction:
|
|
2498
|
+
const txid = randomString$1(16);
|
|
2499
|
+
// Convert data into a temporary map to collect mutations of same table and type
|
|
2500
|
+
const map = {};
|
|
2501
|
+
for (const [table, ops] of Object.entries(inSet)) {
|
|
2502
|
+
for (const [key, op] of Object.entries(ops)) {
|
|
2503
|
+
const mapEntry = map[table] || (map[table] = {});
|
|
2504
|
+
const ops = mapEntry[op.type] || (mapEntry[op.type] = []);
|
|
2505
|
+
ops.push(Object.assign({ key }, op)); // DBKeyMutation doesn't contain key, so we need to bring it in.
|
|
2706
2506
|
}
|
|
2707
2507
|
}
|
|
2708
|
-
|
|
2709
|
-
|
|
2710
|
-
|
|
2711
|
-
|
|
2712
|
-
|
|
2713
|
-
|
|
2714
|
-
|
|
2715
|
-
|
|
2716
|
-
|
|
2717
|
-
|
|
2718
|
-
|
|
2719
|
-
|
|
2720
|
-
|
|
2721
|
-
|
|
2508
|
+
// Start computing the resulting format:
|
|
2509
|
+
const result = [];
|
|
2510
|
+
for (const [table, ops] of Object.entries(map)) {
|
|
2511
|
+
const resultEntry = {
|
|
2512
|
+
table,
|
|
2513
|
+
muts: [],
|
|
2514
|
+
};
|
|
2515
|
+
for (const [optype, muts] of Object.entries(ops)) {
|
|
2516
|
+
switch (optype) {
|
|
2517
|
+
case "ups": {
|
|
2518
|
+
const op = {
|
|
2519
|
+
type: "upsert",
|
|
2520
|
+
keys: muts.map(mut => mut.key),
|
|
2521
|
+
values: muts.map(mut => mut.val),
|
|
2522
|
+
txid
|
|
2523
|
+
};
|
|
2524
|
+
resultEntry.muts.push(op);
|
|
2525
|
+
break;
|
|
2526
|
+
}
|
|
2527
|
+
case "upd": {
|
|
2528
|
+
const op = {
|
|
2529
|
+
type: "update",
|
|
2530
|
+
keys: muts.map(mut => mut.key),
|
|
2531
|
+
changeSpecs: muts.map(mut => mut.mod),
|
|
2532
|
+
txid
|
|
2533
|
+
};
|
|
2534
|
+
resultEntry.muts.push(op);
|
|
2535
|
+
break;
|
|
2536
|
+
}
|
|
2537
|
+
case "del": {
|
|
2538
|
+
const op = {
|
|
2539
|
+
type: "delete",
|
|
2540
|
+
keys: muts.map(mut => mut.key),
|
|
2541
|
+
txid,
|
|
2542
|
+
};
|
|
2543
|
+
resultEntry.muts.push(op);
|
|
2544
|
+
break;
|
|
2545
|
+
}
|
|
2722
2546
|
}
|
|
2723
2547
|
}
|
|
2724
|
-
|
|
2725
|
-
console.debug(`Dexie Cloud: periodicSync not supported.`);
|
|
2726
|
-
}
|
|
2727
|
-
}
|
|
2728
|
-
catch (e) {
|
|
2729
|
-
console.debug(`Dexie Cloud: Could not register periodicSync for ${db.name}`, e);
|
|
2548
|
+
result.push(resultEntry);
|
|
2730
2549
|
}
|
|
2550
|
+
return result;
|
|
2731
2551
|
}
|
|
2732
2552
|
|
|
2733
|
-
function
|
|
2734
|
-
|
|
2735
|
-
|
|
2736
|
-
|
|
2737
|
-
|
|
2738
|
-
db.localSyncEvent.next({ purpose });
|
|
2739
|
-
}
|
|
2553
|
+
function getDbNameFromDbUrl(dbUrl) {
|
|
2554
|
+
const url = new URL(dbUrl);
|
|
2555
|
+
return url.pathname === "/"
|
|
2556
|
+
? url.hostname.split('.')[0]
|
|
2557
|
+
: url.pathname.split('/')[1];
|
|
2740
2558
|
}
|
|
2741
2559
|
|
|
2742
|
-
async function
|
|
2743
|
-
|
|
2744
|
-
|
|
2745
|
-
|
|
2746
|
-
|
|
2747
|
-
|
|
2748
|
-
|
|
2749
|
-
|
|
2750
|
-
|
|
2560
|
+
async function listSyncifiedChanges(tablesToSyncify, currentUser, schema, alreadySyncedRealms) {
|
|
2561
|
+
if (currentUser.isLoggedIn) {
|
|
2562
|
+
if (tablesToSyncify.length > 0) {
|
|
2563
|
+
const ignoredRealms = new Set(alreadySyncedRealms || []);
|
|
2564
|
+
const inserts = await Promise.all(tablesToSyncify.map(async (table) => {
|
|
2565
|
+
const { extractKey } = table.core.schema.primaryKey;
|
|
2566
|
+
if (!extractKey)
|
|
2567
|
+
return { table: table.name, muts: [] }; // Outbound tables are not synced.
|
|
2568
|
+
const dexieCloudTableSchema = schema[table.name];
|
|
2569
|
+
const query = (dexieCloudTableSchema === null || dexieCloudTableSchema === void 0 ? void 0 : dexieCloudTableSchema.generatedGlobalId)
|
|
2570
|
+
? table.filter((item) => !ignoredRealms.has(item.realmId || "") && isValidSyncableID(extractKey(item)))
|
|
2571
|
+
: table.filter((item) => !ignoredRealms.has(item.realmId || "") && isValidAtID(extractKey(item), dexieCloudTableSchema === null || dexieCloudTableSchema === void 0 ? void 0 : dexieCloudTableSchema.idPrefix));
|
|
2572
|
+
const unsyncedObjects = await query.toArray();
|
|
2573
|
+
if (unsyncedObjects.length > 0) {
|
|
2574
|
+
const mut = {
|
|
2575
|
+
type: "insert",
|
|
2576
|
+
values: unsyncedObjects,
|
|
2577
|
+
keys: unsyncedObjects.map(extractKey),
|
|
2578
|
+
userId: currentUser.userId,
|
|
2579
|
+
};
|
|
2580
|
+
return {
|
|
2581
|
+
table: table.name,
|
|
2582
|
+
muts: [mut],
|
|
2583
|
+
};
|
|
2584
|
+
}
|
|
2585
|
+
else {
|
|
2586
|
+
return {
|
|
2587
|
+
table: table.name,
|
|
2588
|
+
muts: []
|
|
2589
|
+
};
|
|
2590
|
+
}
|
|
2591
|
+
}));
|
|
2592
|
+
return inserts.filter(op => op.muts.length > 0);
|
|
2593
|
+
}
|
|
2594
|
+
}
|
|
2595
|
+
return [];
|
|
2751
2596
|
}
|
|
2752
2597
|
|
|
2753
|
-
function
|
|
2754
|
-
|
|
2755
|
-
|
|
2756
|
-
|
|
2598
|
+
function getTablesToSyncify(db, syncState) {
|
|
2599
|
+
const syncedTables = (syncState === null || syncState === void 0 ? void 0 : syncState.syncedTables) || [];
|
|
2600
|
+
const syncableTables = getSyncableTables(db);
|
|
2601
|
+
const tablesToSyncify = syncableTables.filter((tbl) => !syncedTables.includes(tbl.name));
|
|
2602
|
+
return tablesToSyncify;
|
|
2757
2603
|
}
|
|
2758
2604
|
|
|
2759
|
-
function
|
|
2760
|
-
return
|
|
2605
|
+
function interactWithUser(userInteraction, req) {
|
|
2606
|
+
return new Promise((resolve, reject) => {
|
|
2607
|
+
const interactionProps = {
|
|
2608
|
+
...req,
|
|
2609
|
+
onSubmit: (res) => {
|
|
2610
|
+
userInteraction.next(undefined);
|
|
2611
|
+
resolve(res);
|
|
2612
|
+
},
|
|
2613
|
+
onCancel: () => {
|
|
2614
|
+
userInteraction.next(undefined);
|
|
2615
|
+
reject(new Dexie__default['default'].AbortError("User cancelled"));
|
|
2616
|
+
},
|
|
2617
|
+
};
|
|
2618
|
+
userInteraction.next(interactionProps);
|
|
2619
|
+
// Start subscribing for external updates to db.cloud.userInteraction, and if so, cancel this request.
|
|
2620
|
+
/*const subscription = userInteraction.subscribe((currentInteractionProps) => {
|
|
2621
|
+
if (currentInteractionProps !== interactionProps) {
|
|
2622
|
+
if (subscription) subscription.unsubscribe();
|
|
2623
|
+
if (!done) {
|
|
2624
|
+
reject(new Dexie.AbortError("User cancelled"));
|
|
2625
|
+
}
|
|
2626
|
+
}
|
|
2627
|
+
});*/
|
|
2628
|
+
});
|
|
2761
2629
|
}
|
|
2762
|
-
|
|
2763
|
-
|
|
2764
|
-
|
|
2765
|
-
|
|
2766
|
-
|
|
2767
|
-
|
|
2768
|
-
|
|
2630
|
+
function alertUser(userInteraction, title, ...alerts) {
|
|
2631
|
+
return interactWithUser(userInteraction, {
|
|
2632
|
+
type: 'message-alert',
|
|
2633
|
+
title,
|
|
2634
|
+
alerts,
|
|
2635
|
+
fields: {}
|
|
2636
|
+
});
|
|
2769
2637
|
}
|
|
2770
|
-
|
|
2771
|
-
|
|
2772
|
-
|
|
2773
|
-
|
|
2774
|
-
|
|
2775
|
-
|
|
2776
|
-
|
|
2777
|
-
|
|
2778
|
-
|
|
2779
|
-
|
|
2780
|
-
|
|
2781
|
-
|
|
2782
|
-
|
|
2783
|
-
|
|
2784
|
-
|
|
2785
|
-
|
|
2786
|
-
|
|
2787
|
-
|
|
2788
|
-
|
|
2789
|
-
|
|
2790
|
-
|
|
2791
|
-
|
|
2792
|
-
|
|
2793
|
-
|
|
2638
|
+
async function promptForEmail(userInteraction, title, emailHint) {
|
|
2639
|
+
let email = emailHint || '';
|
|
2640
|
+
while (!email || !/^[\w-\.]+@([\w-]+\.)+[\w-]{2,10}$/.test(email)) {
|
|
2641
|
+
email = (await interactWithUser(userInteraction, {
|
|
2642
|
+
type: 'email',
|
|
2643
|
+
title,
|
|
2644
|
+
alerts: email
|
|
2645
|
+
? [
|
|
2646
|
+
{
|
|
2647
|
+
type: 'error',
|
|
2648
|
+
messageCode: 'INVALID_EMAIL',
|
|
2649
|
+
message: 'Please enter a valid email address',
|
|
2650
|
+
messageParams: {},
|
|
2651
|
+
},
|
|
2652
|
+
]
|
|
2653
|
+
: [],
|
|
2654
|
+
fields: {
|
|
2655
|
+
email: {
|
|
2656
|
+
type: 'email',
|
|
2657
|
+
placeholder: 'you@somedomain.com',
|
|
2658
|
+
},
|
|
2659
|
+
},
|
|
2660
|
+
})).email;
|
|
2661
|
+
}
|
|
2662
|
+
return email;
|
|
2794
2663
|
}
|
|
2795
|
-
|
|
2796
|
-
|
|
2797
|
-
|
|
2798
|
-
|
|
2799
|
-
|
|
2800
|
-
|
|
2801
|
-
|
|
2664
|
+
async function promptForOTP(userInteraction, email, alert) {
|
|
2665
|
+
const alerts = [
|
|
2666
|
+
{
|
|
2667
|
+
type: 'info',
|
|
2668
|
+
messageCode: 'OTP_SENT',
|
|
2669
|
+
message: `A One-Time password has been sent to {email}`,
|
|
2670
|
+
messageParams: { email },
|
|
2671
|
+
},
|
|
2672
|
+
];
|
|
2673
|
+
if (alert) {
|
|
2674
|
+
alerts.push(alert);
|
|
2675
|
+
}
|
|
2676
|
+
const { otp } = await interactWithUser(userInteraction, {
|
|
2677
|
+
type: 'otp',
|
|
2678
|
+
title: 'Enter OTP',
|
|
2679
|
+
alerts,
|
|
2680
|
+
fields: {
|
|
2681
|
+
otp: {
|
|
2682
|
+
type: 'otp',
|
|
2683
|
+
label: 'OTP',
|
|
2684
|
+
placeholder: 'Paste OTP here',
|
|
2685
|
+
},
|
|
2686
|
+
},
|
|
2687
|
+
});
|
|
2688
|
+
return otp;
|
|
2802
2689
|
}
|
|
2803
|
-
|
|
2804
|
-
|
|
2805
|
-
|
|
2806
|
-
|
|
2690
|
+
|
|
2691
|
+
async function loadAccessToken(db) {
|
|
2692
|
+
var _a, _b;
|
|
2693
|
+
const currentUser = await db.getCurrentUser();
|
|
2694
|
+
const { accessToken, accessTokenExpiration, refreshToken, refreshTokenExpiration, claims, } = currentUser;
|
|
2695
|
+
if (!accessToken)
|
|
2807
2696
|
return;
|
|
2808
|
-
|
|
2809
|
-
|
|
2810
|
-
|
|
2811
|
-
|
|
2812
|
-
|
|
2697
|
+
const expTime = (_a = accessTokenExpiration === null || accessTokenExpiration === void 0 ? void 0 : accessTokenExpiration.getTime()) !== null && _a !== void 0 ? _a : Infinity;
|
|
2698
|
+
if (expTime > Date.now()) {
|
|
2699
|
+
return accessToken;
|
|
2700
|
+
}
|
|
2701
|
+
if (!refreshToken) {
|
|
2702
|
+
throw new Error(`Refresh token missing`);
|
|
2703
|
+
}
|
|
2704
|
+
const refreshExpTime = (_b = refreshTokenExpiration === null || refreshTokenExpiration === void 0 ? void 0 : refreshTokenExpiration.getTime()) !== null && _b !== void 0 ? _b : Infinity;
|
|
2705
|
+
if (refreshExpTime <= Date.now()) {
|
|
2706
|
+
throw new Error(`Refresh token has expired`);
|
|
2707
|
+
}
|
|
2708
|
+
const refreshedLogin = await refreshAccessToken(db.cloud.options.databaseUrl, currentUser);
|
|
2709
|
+
await db.table('$logins').update(claims.sub, {
|
|
2710
|
+
accessToken: refreshedLogin.accessToken,
|
|
2711
|
+
accessTokenExpiration: refreshedLogin.accessTokenExpiration,
|
|
2712
|
+
});
|
|
2713
|
+
return refreshedLogin.accessToken;
|
|
2714
|
+
}
|
|
2715
|
+
async function authenticate(url, context, fetchToken, userInteraction, hints) {
|
|
2716
|
+
if (context.accessToken &&
|
|
2717
|
+
context.accessTokenExpiration.getTime() > Date.now()) {
|
|
2718
|
+
return context;
|
|
2719
|
+
}
|
|
2720
|
+
else if (context.refreshToken &&
|
|
2721
|
+
(!context.refreshTokenExpiration ||
|
|
2722
|
+
context.refreshTokenExpiration.getTime() > Date.now())) {
|
|
2723
|
+
return await refreshAccessToken(url, context);
|
|
2813
2724
|
}
|
|
2814
2725
|
else {
|
|
2815
|
-
|
|
2816
|
-
|
|
2817
|
-
|
|
2818
|
-
|
|
2819
|
-
|
|
2820
|
-
|
|
2821
|
-
|
|
2822
|
-
|
|
2823
|
-
|
|
2824
|
-
|
|
2825
|
-
|
|
2826
|
-
|
|
2827
|
-
|
|
2828
|
-
|
|
2829
|
-
|
|
2830
|
-
|
|
2831
|
-
|
|
2832
|
-
|
|
2833
|
-
|
|
2834
|
-
|
|
2835
|
-
|
|
2836
|
-
|
|
2837
|
-
|
|
2838
|
-
|
|
2726
|
+
return await userAuthenticate(context, fetchToken, userInteraction, hints);
|
|
2727
|
+
}
|
|
2728
|
+
}
|
|
2729
|
+
async function refreshAccessToken(url, login) {
|
|
2730
|
+
if (!login.refreshToken)
|
|
2731
|
+
throw new Error(`Cannot refresh token - refresh token is missing.`);
|
|
2732
|
+
if (!login.nonExportablePrivateKey)
|
|
2733
|
+
throw new Error(`login.nonExportablePrivateKey is missing - cannot sign refresh token without a private key.`);
|
|
2734
|
+
const time_stamp = Date.now();
|
|
2735
|
+
const signing_algorithm = 'RSASSA-PKCS1-v1_5';
|
|
2736
|
+
const textEncoder = new TextEncoder();
|
|
2737
|
+
const data = textEncoder.encode(login.refreshToken + time_stamp);
|
|
2738
|
+
const binarySignature = await crypto.subtle.sign(signing_algorithm, login.nonExportablePrivateKey, data);
|
|
2739
|
+
const signature = b64encode(binarySignature);
|
|
2740
|
+
const tokenRequest = {
|
|
2741
|
+
grant_type: 'refresh_token',
|
|
2742
|
+
refresh_token: login.refreshToken,
|
|
2743
|
+
scopes: ['ACCESS_DB'],
|
|
2744
|
+
signature,
|
|
2745
|
+
signing_algorithm,
|
|
2746
|
+
time_stamp,
|
|
2747
|
+
};
|
|
2748
|
+
const res = await fetch(`${url}/token`, {
|
|
2749
|
+
body: JSON.stringify(tokenRequest),
|
|
2750
|
+
method: 'post',
|
|
2751
|
+
headers: { 'Content-Type': 'application/json' },
|
|
2752
|
+
mode: 'cors',
|
|
2753
|
+
});
|
|
2754
|
+
if (res.status !== 200)
|
|
2755
|
+
throw new Error(`RefreshToken: Status ${res.status} from ${url}/token`);
|
|
2756
|
+
const response = await res.json();
|
|
2757
|
+
login.accessToken = response.accessToken;
|
|
2758
|
+
login.accessTokenExpiration = response.accessTokenExpiration
|
|
2759
|
+
? new Date(response.accessTokenExpiration)
|
|
2760
|
+
: undefined;
|
|
2761
|
+
return login;
|
|
2762
|
+
}
|
|
2763
|
+
async function userAuthenticate(context, fetchToken, userInteraction, hints) {
|
|
2764
|
+
const { privateKey, publicKey } = await crypto.subtle.generateKey({
|
|
2765
|
+
name: 'RSASSA-PKCS1-v1_5',
|
|
2766
|
+
modulusLength: 2048,
|
|
2767
|
+
publicExponent: new Uint8Array([0x01, 0x00, 0x01]),
|
|
2768
|
+
hash: { name: 'SHA-256' },
|
|
2769
|
+
}, false, // Non-exportable...
|
|
2770
|
+
['sign', 'verify']);
|
|
2771
|
+
context.nonExportablePrivateKey = privateKey; //...but storable!
|
|
2772
|
+
const publicKeySPKI = await crypto.subtle.exportKey('spki', publicKey);
|
|
2773
|
+
const publicKeyPEM = spkiToPEM(publicKeySPKI);
|
|
2774
|
+
context.publicKey = publicKey;
|
|
2775
|
+
try {
|
|
2776
|
+
const response2 = await fetchToken({
|
|
2777
|
+
public_key: publicKeyPEM,
|
|
2778
|
+
hints,
|
|
2779
|
+
});
|
|
2780
|
+
if (response2.type !== 'tokens')
|
|
2781
|
+
throw new Error(`Unexpected response type from token endpoint: ${response2.type}`);
|
|
2782
|
+
context.accessToken = response2.accessToken;
|
|
2783
|
+
context.accessTokenExpiration = new Date(response2.accessTokenExpiration);
|
|
2784
|
+
context.refreshToken = response2.refreshToken;
|
|
2785
|
+
if (response2.refreshTokenExpiration) {
|
|
2786
|
+
context.refreshTokenExpiration = new Date(response2.refreshTokenExpiration);
|
|
2839
2787
|
}
|
|
2840
|
-
|
|
2841
|
-
|
|
2842
|
-
|
|
2843
|
-
|
|
2844
|
-
|
|
2845
|
-
|
|
2846
|
-
|
|
2847
|
-
|
|
2848
|
-
|
|
2849
|
-
|
|
2850
|
-
|
|
2851
|
-
obj[keyPath] = value;
|
|
2788
|
+
context.userId = response2.claims.sub;
|
|
2789
|
+
context.email = response2.claims.email;
|
|
2790
|
+
context.name = response2.claims.name;
|
|
2791
|
+
context.claims = response2.claims;
|
|
2792
|
+
if (response2.alerts && response2.alerts.length > 0) {
|
|
2793
|
+
await interactWithUser(userInteraction, {
|
|
2794
|
+
type: 'message-alert',
|
|
2795
|
+
title: 'Authentication Alert',
|
|
2796
|
+
fields: {},
|
|
2797
|
+
alerts: response2.alerts,
|
|
2798
|
+
});
|
|
2852
2799
|
}
|
|
2800
|
+
return context;
|
|
2801
|
+
}
|
|
2802
|
+
catch (error) {
|
|
2803
|
+
await alertUser(userInteraction, 'Authentication Failed', {
|
|
2804
|
+
type: 'error',
|
|
2805
|
+
messageCode: 'GENERIC_ERROR',
|
|
2806
|
+
message: `We're having a problem to authenticate rigth now.`,
|
|
2807
|
+
messageParams: {}
|
|
2808
|
+
}).catch(() => { });
|
|
2809
|
+
throw error;
|
|
2853
2810
|
}
|
|
2854
2811
|
}
|
|
2855
|
-
|
|
2856
|
-
|
|
2857
|
-
const
|
|
2858
|
-
|
|
2859
|
-
return buf.toString("base64");
|
|
2860
|
-
} : (bytes) => {
|
|
2861
|
-
// Web
|
|
2862
|
-
const buf = new Uint8Array(bytes);
|
|
2863
|
-
crypto.getRandomValues(buf);
|
|
2864
|
-
return btoa(String.fromCharCode.apply(null, buf));
|
|
2865
|
-
};
|
|
2866
|
-
|
|
2867
|
-
/** Verifies that given primary key is valid.
|
|
2868
|
-
* The reason we narrow validity for valid keys are twofold:
|
|
2869
|
-
* 1: Make sure to only support types that can be used as an object index in DBKeyMutationSet.
|
|
2870
|
-
* For example, ArrayBuffer cannot be used (gives "object ArrayBuffer") but Uint8Array can be
|
|
2871
|
-
* used (gives comma-delimited list of included bytes).
|
|
2872
|
-
* 2: Avoid using plain numbers and Dates as keys when they are synced, as they are not globally unique.
|
|
2873
|
-
* 3: Since we store the key as a VARCHAR server side in current version, try not promote types that stringifies to become very long server side.
|
|
2874
|
-
*
|
|
2875
|
-
* @param id
|
|
2876
|
-
* @returns
|
|
2877
|
-
*/
|
|
2878
|
-
function isValidSyncableID(id) {
|
|
2879
|
-
if (typeof id === "string")
|
|
2880
|
-
return true;
|
|
2881
|
-
//if (validIDTypes[toStringTag(id)]) return true;
|
|
2882
|
-
//if (Array.isArray(id)) return id.every((part) => isValidSyncableID(part));
|
|
2883
|
-
if (Array.isArray(id) && id.some(key => isValidSyncableID(key)) && id.every(isValidSyncableIDPart))
|
|
2884
|
-
return true;
|
|
2885
|
-
return false;
|
|
2886
|
-
}
|
|
2887
|
-
/** Verifies that given key part is valid.
|
|
2888
|
-
* 1: Make sure that arrays of this types are stringified correclty and works with DBKeyMutationSet.
|
|
2889
|
-
* For example, ArrayBuffer cannot be used (gives "object ArrayBuffer") but Uint8Array can be
|
|
2890
|
-
* used (gives comma-delimited list of included bytes).
|
|
2891
|
-
* 2: Since we store the key as a VARCHAR server side in current version, try not promote types that stringifies to become very long server side.
|
|
2892
|
-
*/
|
|
2893
|
-
function isValidSyncableIDPart(part) {
|
|
2894
|
-
return typeof part === "string" || typeof part === "number" || Array.isArray(part) && part.every(isValidSyncableIDPart);
|
|
2812
|
+
function spkiToPEM(keydata) {
|
|
2813
|
+
const keydataB64 = b64encode(keydata);
|
|
2814
|
+
const keydataB64Pem = formatAsPem(keydataB64);
|
|
2815
|
+
return keydataB64Pem;
|
|
2895
2816
|
}
|
|
2896
|
-
function
|
|
2897
|
-
|
|
2817
|
+
function formatAsPem(str) {
|
|
2818
|
+
let finalString = '-----BEGIN PUBLIC KEY-----\n';
|
|
2819
|
+
while (str.length > 0) {
|
|
2820
|
+
finalString += str.substring(0, 64) + '\n';
|
|
2821
|
+
str = str.substring(64);
|
|
2822
|
+
}
|
|
2823
|
+
finalString = finalString + '-----END PUBLIC KEY-----';
|
|
2824
|
+
return finalString;
|
|
2898
2825
|
}
|
|
2899
2826
|
|
|
2900
|
-
|
|
2901
|
-
|
|
2902
|
-
|
|
2903
|
-
|
|
2904
|
-
|
|
2905
|
-
|
|
2906
|
-
|
|
2907
|
-
|
|
2908
|
-
|
|
2909
|
-
|
|
2910
|
-
|
|
2911
|
-
|
|
2912
|
-
|
|
2913
|
-
|
|
2914
|
-
|
|
2915
|
-
|
|
2916
|
-
|
|
2917
|
-
|
|
2918
|
-
|
|
2919
|
-
|
|
2920
|
-
|
|
2921
|
-
|
|
2922
|
-
|
|
2923
|
-
|
|
2924
|
-
|
|
2827
|
+
const { toString: toStr } = {};
|
|
2828
|
+
function getToStringTag(val) {
|
|
2829
|
+
return toStr.call(val).slice(8, -1);
|
|
2830
|
+
}
|
|
2831
|
+
function escapeDollarProps(value) {
|
|
2832
|
+
const keys = Object.keys(value);
|
|
2833
|
+
let dollarKeys = null;
|
|
2834
|
+
for (let i = 0, l = keys.length; i < l; ++i) {
|
|
2835
|
+
if (keys[i][0] === "$") {
|
|
2836
|
+
dollarKeys = dollarKeys || [];
|
|
2837
|
+
dollarKeys.push(keys[i]);
|
|
2838
|
+
}
|
|
2839
|
+
}
|
|
2840
|
+
if (!dollarKeys)
|
|
2841
|
+
return value;
|
|
2842
|
+
const clone = { ...value };
|
|
2843
|
+
for (const k of dollarKeys) {
|
|
2844
|
+
delete clone[k];
|
|
2845
|
+
clone["$" + k] = value[k];
|
|
2846
|
+
}
|
|
2847
|
+
return clone;
|
|
2848
|
+
}
|
|
2849
|
+
const ObjectDef = {
|
|
2850
|
+
replace: escapeDollarProps,
|
|
2851
|
+
};
|
|
2852
|
+
function TypesonSimplified(...typeDefsInputs) {
|
|
2853
|
+
const typeDefs = typeDefsInputs.reduce((p, c) => ({ ...p, ...c }), typeDefsInputs.reduce((p, c) => ({ ...c, ...p }), {}));
|
|
2854
|
+
const protoMap = new WeakMap();
|
|
2855
|
+
return {
|
|
2856
|
+
stringify(value, alternateChannel, space) {
|
|
2857
|
+
const json = JSON.stringify(value, function (key) {
|
|
2858
|
+
const realVal = this[key];
|
|
2859
|
+
const typeDef = getTypeDef(realVal);
|
|
2860
|
+
return typeDef
|
|
2861
|
+
? typeDef.replace(realVal, alternateChannel, typeDefs)
|
|
2862
|
+
: realVal;
|
|
2863
|
+
}, space);
|
|
2864
|
+
return json;
|
|
2865
|
+
},
|
|
2866
|
+
parse(tson, alternateChannel) {
|
|
2867
|
+
let parent = null;
|
|
2868
|
+
let unescapeParentKeys = [];
|
|
2869
|
+
return JSON.parse(tson, function (key, value) {
|
|
2870
|
+
//
|
|
2871
|
+
// Parent Part
|
|
2872
|
+
//
|
|
2873
|
+
const type = value === null || value === void 0 ? void 0 : value.$t;
|
|
2874
|
+
if (type) {
|
|
2875
|
+
const typeDef = typeDefs[type];
|
|
2876
|
+
value = typeDef
|
|
2877
|
+
? typeDef.revive(value, alternateChannel, typeDefs)
|
|
2878
|
+
: value;
|
|
2925
2879
|
}
|
|
2926
|
-
|
|
2927
|
-
|
|
2928
|
-
|
|
2929
|
-
|
|
2930
|
-
|
|
2931
|
-
|
|
2932
|
-
|
|
2933
|
-
|
|
2934
|
-
|
|
2935
|
-
// No action.
|
|
2936
|
-
break;
|
|
2937
|
-
case "upd":
|
|
2938
|
-
// Adjust existing update with additional updates
|
|
2939
|
-
Object.assign(entry.mod, changeSpec); // May work for deep props as well - new keys is added later, right? Does the prop order persist along TSON and all? But it will not be 100% when combined with some server code (seach for "address.city": "Stockholm" comment)
|
|
2940
|
-
break;
|
|
2880
|
+
if (value === parent) {
|
|
2881
|
+
// Do what the kid told us to
|
|
2882
|
+
if (unescapeParentKeys.length > 0) {
|
|
2883
|
+
// Unescape dollar props
|
|
2884
|
+
value = { ...value };
|
|
2885
|
+
for (const k of unescapeParentKeys) {
|
|
2886
|
+
value[k.substr(1)] = value[k];
|
|
2887
|
+
delete value[k];
|
|
2888
|
+
}
|
|
2941
2889
|
}
|
|
2890
|
+
unescapeParentKeys = [];
|
|
2891
|
+
return value;
|
|
2942
2892
|
}
|
|
2893
|
+
//
|
|
2894
|
+
// Child part
|
|
2895
|
+
//
|
|
2896
|
+
if (key[0] === "$" && key !== "$t") {
|
|
2897
|
+
parent = this;
|
|
2898
|
+
unescapeParentKeys.push(key);
|
|
2899
|
+
}
|
|
2900
|
+
return value;
|
|
2943
2901
|
});
|
|
2944
|
-
|
|
2902
|
+
},
|
|
2903
|
+
};
|
|
2904
|
+
function getTypeDef(realVal) {
|
|
2905
|
+
const type = typeof realVal;
|
|
2906
|
+
switch (typeof realVal) {
|
|
2907
|
+
case "object":
|
|
2908
|
+
case "function": {
|
|
2909
|
+
// "object", "function", null
|
|
2910
|
+
if (realVal === null)
|
|
2911
|
+
return null;
|
|
2912
|
+
const proto = Object.getPrototypeOf(realVal);
|
|
2913
|
+
if (!proto)
|
|
2914
|
+
return ObjectDef;
|
|
2915
|
+
let typeDef = protoMap.get(proto);
|
|
2916
|
+
if (typeDef !== undefined)
|
|
2917
|
+
return typeDef; // Null counts to! So the caching of Array.prototype also counts.
|
|
2918
|
+
const toStringTag = getToStringTag(realVal);
|
|
2919
|
+
const entry = Object.entries(typeDefs).find(([typeName, typeDef]) => { var _a, _b; return (_b = (_a = typeDef === null || typeDef === void 0 ? void 0 : typeDef.test) === null || _a === void 0 ? void 0 : _a.call(typeDef, realVal, toStringTag)) !== null && _b !== void 0 ? _b : typeName === toStringTag; });
|
|
2920
|
+
typeDef = entry === null || entry === void 0 ? void 0 : entry[1];
|
|
2921
|
+
if (!typeDef) {
|
|
2922
|
+
typeDef = Array.isArray(realVal)
|
|
2923
|
+
? null
|
|
2924
|
+
: typeof realVal === "function"
|
|
2925
|
+
? typeDefs.function || null
|
|
2926
|
+
: ObjectDef;
|
|
2927
|
+
}
|
|
2928
|
+
protoMap.set(proto, typeDef);
|
|
2929
|
+
return typeDef;
|
|
2930
|
+
}
|
|
2931
|
+
default:
|
|
2932
|
+
return typeDefs[type];
|
|
2945
2933
|
}
|
|
2946
|
-
case "delete":
|
|
2947
|
-
op.keys.forEach((key) => {
|
|
2948
|
-
tbl[key] = {
|
|
2949
|
-
type: "del",
|
|
2950
|
-
};
|
|
2951
|
-
});
|
|
2952
|
-
break;
|
|
2953
2934
|
}
|
|
2954
|
-
return target;
|
|
2955
2935
|
}
|
|
2956
2936
|
|
|
2957
|
-
|
|
2958
|
-
|
|
2959
|
-
|
|
2960
|
-
|
|
2961
|
-
|
|
2962
|
-
|
|
2963
|
-
|
|
2937
|
+
const BisonBinaryTypes = {
|
|
2938
|
+
Blob: {
|
|
2939
|
+
test: (blob, toStringTag) => toStringTag === "Blob",
|
|
2940
|
+
replace: (blob, altChannel) => {
|
|
2941
|
+
const i = altChannel.length;
|
|
2942
|
+
altChannel.push(blob);
|
|
2943
|
+
return {
|
|
2944
|
+
$t: "Blob",
|
|
2945
|
+
mimeType: blob.type,
|
|
2946
|
+
i,
|
|
2947
|
+
};
|
|
2948
|
+
},
|
|
2949
|
+
revive: ({ i, mimeType }, altChannel) => new Blob([altChannel[i]], { type: mimeType }),
|
|
2950
|
+
},
|
|
2951
|
+
};
|
|
2964
2952
|
|
|
2965
|
-
|
|
2966
|
-
|
|
2967
|
-
|
|
2968
|
-
|
|
2969
|
-
|
|
2970
|
-
|
|
2971
|
-
|
|
2972
|
-
|
|
2973
|
-
|
|
2974
|
-
|
|
2975
|
-
|
|
2976
|
-
|
|
2977
|
-
case 'ups':
|
|
2978
|
-
delete target[table][key];
|
|
2979
|
-
break;
|
|
2980
|
-
case 'del':
|
|
2981
|
-
// Leave delete operation.
|
|
2982
|
-
// (Don't resurrect objects unintenionally (using tx(get, put) pattern locally))
|
|
2983
|
-
break;
|
|
2984
|
-
case 'upd':
|
|
2985
|
-
delete target[table][key];
|
|
2986
|
-
break;
|
|
2987
|
-
}
|
|
2988
|
-
}
|
|
2989
|
-
}
|
|
2990
|
-
break;
|
|
2991
|
-
case 'del':
|
|
2992
|
-
(_b = target[table]) === null || _b === void 0 ? true : delete _b[key];
|
|
2993
|
-
break;
|
|
2994
|
-
case 'upd': {
|
|
2995
|
-
const targetMut = (_c = target[table]) === null || _c === void 0 ? void 0 : _c[key];
|
|
2996
|
-
if (targetMut) {
|
|
2997
|
-
switch (targetMut.type) {
|
|
2998
|
-
case 'ups':
|
|
2999
|
-
// Adjust the server upsert with locally updated values.
|
|
3000
|
-
for (const [propPath, value] of Object.entries(mut.mod)) {
|
|
3001
|
-
setByKeyPath(targetMut.val, propPath, value);
|
|
3002
|
-
}
|
|
3003
|
-
break;
|
|
3004
|
-
case 'del':
|
|
3005
|
-
// Leave delete.
|
|
3006
|
-
break;
|
|
3007
|
-
case 'upd':
|
|
3008
|
-
// Remove the local update props from the server update mutation.
|
|
3009
|
-
for (const propPath of Object.keys(mut.mod)) {
|
|
3010
|
-
delete targetMut.mod[propPath];
|
|
3011
|
-
}
|
|
3012
|
-
break;
|
|
3013
|
-
}
|
|
3014
|
-
}
|
|
3015
|
-
break;
|
|
3016
|
-
}
|
|
2953
|
+
var numberDef = {
|
|
2954
|
+
number: {
|
|
2955
|
+
replace: (num) => {
|
|
2956
|
+
switch (true) {
|
|
2957
|
+
case isNaN(num):
|
|
2958
|
+
return { $t: "number", v: "NaN" };
|
|
2959
|
+
case num === Infinity:
|
|
2960
|
+
return { $t: "number", v: "Infinity" };
|
|
2961
|
+
case num === -Infinity:
|
|
2962
|
+
return { $t: "number", v: "-Infinity" };
|
|
2963
|
+
default:
|
|
2964
|
+
return num;
|
|
3017
2965
|
}
|
|
3018
|
-
}
|
|
2966
|
+
},
|
|
2967
|
+
revive: ({ v }) => Number(v),
|
|
2968
|
+
},
|
|
2969
|
+
};
|
|
2970
|
+
|
|
2971
|
+
const bigIntDef = {
|
|
2972
|
+
bigint: {
|
|
2973
|
+
replace: (realVal) => {
|
|
2974
|
+
return { $t: "bigint", v: "" + realVal };
|
|
2975
|
+
},
|
|
2976
|
+
revive: (obj) => BigInt(obj.v),
|
|
2977
|
+
},
|
|
2978
|
+
};
|
|
2979
|
+
|
|
2980
|
+
var DateDef = {
|
|
2981
|
+
Date: {
|
|
2982
|
+
replace: (date) => ({
|
|
2983
|
+
$t: "Date",
|
|
2984
|
+
v: isNaN(date.getTime()) ? "NaN" : date.toISOString(),
|
|
2985
|
+
}),
|
|
2986
|
+
revive: ({ v }) => new Date(v === "NaN" ? NaN : Date.parse(v)),
|
|
2987
|
+
},
|
|
2988
|
+
};
|
|
2989
|
+
|
|
2990
|
+
var SetDef = {
|
|
2991
|
+
Set: {
|
|
2992
|
+
replace: (set) => ({
|
|
2993
|
+
$t: "Set",
|
|
2994
|
+
v: Array.from(set.entries()),
|
|
2995
|
+
}),
|
|
2996
|
+
revive: ({ v }) => new Set(v),
|
|
2997
|
+
},
|
|
2998
|
+
};
|
|
2999
|
+
|
|
3000
|
+
var MapDef = {
|
|
3001
|
+
Map: {
|
|
3002
|
+
replace: (map) => ({
|
|
3003
|
+
$t: "Map",
|
|
3004
|
+
v: Array.from(map.entries()),
|
|
3005
|
+
}),
|
|
3006
|
+
revive: ({ v }) => new Map(v),
|
|
3007
|
+
},
|
|
3008
|
+
};
|
|
3009
|
+
|
|
3010
|
+
const _global = typeof globalThis !== "undefined"
|
|
3011
|
+
? globalThis
|
|
3012
|
+
: typeof self !== "undefined"
|
|
3013
|
+
? self
|
|
3014
|
+
: typeof global === "undefined"
|
|
3015
|
+
? global
|
|
3016
|
+
: undefined;
|
|
3017
|
+
|
|
3018
|
+
var TypedArraysDefs = [
|
|
3019
|
+
"Int8Array",
|
|
3020
|
+
"Uint8Array",
|
|
3021
|
+
"Uint8ClampedArray",
|
|
3022
|
+
"Int16Array",
|
|
3023
|
+
"Uint16Array",
|
|
3024
|
+
"Int32Array",
|
|
3025
|
+
"Uint32Array",
|
|
3026
|
+
"Float32Array",
|
|
3027
|
+
"Float64Array",
|
|
3028
|
+
"DataView",
|
|
3029
|
+
"BigInt64Array",
|
|
3030
|
+
"BigUint64Array",
|
|
3031
|
+
].reduce((specs, typeName) => ({
|
|
3032
|
+
...specs,
|
|
3033
|
+
[typeName]: {
|
|
3034
|
+
// Replace passes the the typed array into $t, buffer so that
|
|
3035
|
+
// the ArrayBuffer typedef takes care of further handling of the buffer:
|
|
3036
|
+
// {$t:"Uint8Array",buffer:{$t:"ArrayBuffer",idx:0}}
|
|
3037
|
+
// CHANGED ABOVE! Now shortcutting that for more sparse format of the typed arrays
|
|
3038
|
+
// to contain the b64 property directly.
|
|
3039
|
+
replace: (a, _, typeDefs) => {
|
|
3040
|
+
const result = {
|
|
3041
|
+
$t: typeName,
|
|
3042
|
+
v: typeDefs.ArrayBuffer.replace(a.byteOffset === 0 && a.byteLength === a.buffer.byteLength
|
|
3043
|
+
? a.buffer
|
|
3044
|
+
: a.buffer.slice(a.byteOffset, a.byteOffset + a.byteLength), _, typeDefs).v,
|
|
3045
|
+
};
|
|
3046
|
+
return result;
|
|
3047
|
+
},
|
|
3048
|
+
revive: ({ v }, _, typeDefs) => {
|
|
3049
|
+
const TypedArray = _global[typeName];
|
|
3050
|
+
return (TypedArray &&
|
|
3051
|
+
new TypedArray(typeDefs.ArrayBuffer.revive({ v }, _, typeDefs)));
|
|
3052
|
+
},
|
|
3053
|
+
},
|
|
3054
|
+
}), {});
|
|
3055
|
+
|
|
3056
|
+
function b64LexEncode(b) {
|
|
3057
|
+
return b64ToLex(b64encode(b));
|
|
3058
|
+
}
|
|
3059
|
+
function b64LexDecode(b64Lex) {
|
|
3060
|
+
return b64decode(lexToB64(b64Lex));
|
|
3061
|
+
}
|
|
3062
|
+
function b64ToLex(base64) {
|
|
3063
|
+
var encoded = "";
|
|
3064
|
+
for (var i = 0, length = base64.length; i < length; i++) {
|
|
3065
|
+
encoded += ENCODE_TABLE[base64[i]];
|
|
3019
3066
|
}
|
|
3067
|
+
return encoded;
|
|
3068
|
+
}
|
|
3069
|
+
function lexToB64(base64lex) {
|
|
3070
|
+
// only accept string input
|
|
3071
|
+
if (typeof base64lex !== "string") {
|
|
3072
|
+
throw new Error("invalid decoder input: " + base64lex);
|
|
3073
|
+
}
|
|
3074
|
+
var base64 = "";
|
|
3075
|
+
for (var i = 0, length = base64lex.length; i < length; i++) {
|
|
3076
|
+
base64 += DECODE_TABLE[base64lex[i]];
|
|
3077
|
+
}
|
|
3078
|
+
return base64;
|
|
3079
|
+
}
|
|
3080
|
+
const DECODE_TABLE = {
|
|
3081
|
+
"-": "=",
|
|
3082
|
+
"0": "A",
|
|
3083
|
+
"1": "B",
|
|
3084
|
+
"2": "C",
|
|
3085
|
+
"3": "D",
|
|
3086
|
+
"4": "E",
|
|
3087
|
+
"5": "F",
|
|
3088
|
+
"6": "G",
|
|
3089
|
+
"7": "H",
|
|
3090
|
+
"8": "I",
|
|
3091
|
+
"9": "J",
|
|
3092
|
+
A: "K",
|
|
3093
|
+
B: "L",
|
|
3094
|
+
C: "M",
|
|
3095
|
+
D: "N",
|
|
3096
|
+
E: "O",
|
|
3097
|
+
F: "P",
|
|
3098
|
+
G: "Q",
|
|
3099
|
+
H: "R",
|
|
3100
|
+
I: "S",
|
|
3101
|
+
J: "T",
|
|
3102
|
+
K: "U",
|
|
3103
|
+
L: "V",
|
|
3104
|
+
M: "W",
|
|
3105
|
+
N: "X",
|
|
3106
|
+
O: "Y",
|
|
3107
|
+
P: "Z",
|
|
3108
|
+
Q: "a",
|
|
3109
|
+
R: "b",
|
|
3110
|
+
S: "c",
|
|
3111
|
+
T: "d",
|
|
3112
|
+
U: "e",
|
|
3113
|
+
V: "f",
|
|
3114
|
+
W: "g",
|
|
3115
|
+
X: "h",
|
|
3116
|
+
Y: "i",
|
|
3117
|
+
Z: "j",
|
|
3118
|
+
_: "k",
|
|
3119
|
+
a: "l",
|
|
3120
|
+
b: "m",
|
|
3121
|
+
c: "n",
|
|
3122
|
+
d: "o",
|
|
3123
|
+
e: "p",
|
|
3124
|
+
f: "q",
|
|
3125
|
+
g: "r",
|
|
3126
|
+
h: "s",
|
|
3127
|
+
i: "t",
|
|
3128
|
+
j: "u",
|
|
3129
|
+
k: "v",
|
|
3130
|
+
l: "w",
|
|
3131
|
+
m: "x",
|
|
3132
|
+
n: "y",
|
|
3133
|
+
o: "z",
|
|
3134
|
+
p: "0",
|
|
3135
|
+
q: "1",
|
|
3136
|
+
r: "2",
|
|
3137
|
+
s: "3",
|
|
3138
|
+
t: "4",
|
|
3139
|
+
u: "5",
|
|
3140
|
+
v: "6",
|
|
3141
|
+
w: "7",
|
|
3142
|
+
x: "8",
|
|
3143
|
+
y: "9",
|
|
3144
|
+
z: "+",
|
|
3145
|
+
"|": "/",
|
|
3146
|
+
};
|
|
3147
|
+
const ENCODE_TABLE = {};
|
|
3148
|
+
for (const c of Object.keys(DECODE_TABLE)) {
|
|
3149
|
+
ENCODE_TABLE[DECODE_TABLE[c]] = c;
|
|
3020
3150
|
}
|
|
3021
3151
|
|
|
3022
|
-
|
|
3023
|
-
|
|
3024
|
-
|
|
3025
|
-
|
|
3026
|
-
|
|
3027
|
-
|
|
3028
|
-
|
|
3029
|
-
|
|
3030
|
-
|
|
3031
|
-
|
|
3032
|
-
|
|
3033
|
-
|
|
3034
|
-
|
|
3035
|
-
|
|
3036
|
-
|
|
3037
|
-
|
|
3038
|
-
|
|
3039
|
-
|
|
3040
|
-
|
|
3041
|
-
const result = [];
|
|
3042
|
-
for (const [table, ops] of Object.entries(map)) {
|
|
3043
|
-
const resultEntry = {
|
|
3044
|
-
table,
|
|
3045
|
-
muts: [],
|
|
3046
|
-
};
|
|
3047
|
-
for (const [optype, muts] of Object.entries(ops)) {
|
|
3048
|
-
switch (optype) {
|
|
3049
|
-
case "ups": {
|
|
3050
|
-
const op = {
|
|
3051
|
-
type: "upsert",
|
|
3052
|
-
keys: muts.map(mut => mut.key),
|
|
3053
|
-
values: muts.map(mut => mut.val),
|
|
3054
|
-
txid
|
|
3055
|
-
};
|
|
3056
|
-
resultEntry.muts.push(op);
|
|
3057
|
-
break;
|
|
3058
|
-
}
|
|
3059
|
-
case "upd": {
|
|
3060
|
-
const op = {
|
|
3061
|
-
type: "update",
|
|
3062
|
-
keys: muts.map(mut => mut.key),
|
|
3063
|
-
changeSpecs: muts.map(mut => mut.mod),
|
|
3064
|
-
txid
|
|
3065
|
-
};
|
|
3066
|
-
resultEntry.muts.push(op);
|
|
3067
|
-
break;
|
|
3068
|
-
}
|
|
3069
|
-
case "del": {
|
|
3070
|
-
const op = {
|
|
3071
|
-
type: "delete",
|
|
3072
|
-
keys: muts.map(mut => mut.key),
|
|
3073
|
-
txid,
|
|
3074
|
-
};
|
|
3075
|
-
resultEntry.muts.push(op);
|
|
3076
|
-
break;
|
|
3077
|
-
}
|
|
3078
|
-
}
|
|
3079
|
-
}
|
|
3080
|
-
result.push(resultEntry);
|
|
3152
|
+
var ArrayBufferDef = {
|
|
3153
|
+
ArrayBuffer: {
|
|
3154
|
+
replace: (ab) => ({
|
|
3155
|
+
$t: "ArrayBuffer",
|
|
3156
|
+
v: b64LexEncode(ab),
|
|
3157
|
+
}),
|
|
3158
|
+
revive: ({ v }) => {
|
|
3159
|
+
const ba = b64LexDecode(v);
|
|
3160
|
+
return ba.buffer.byteLength === ba.byteLength
|
|
3161
|
+
? ba.buffer
|
|
3162
|
+
: ba.buffer.slice(ba.byteOffset, ba.byteOffset + ba.byteLength);
|
|
3163
|
+
},
|
|
3164
|
+
},
|
|
3165
|
+
};
|
|
3166
|
+
|
|
3167
|
+
class FakeBlob {
|
|
3168
|
+
constructor(buf, type) {
|
|
3169
|
+
this.buf = buf;
|
|
3170
|
+
this.type = type;
|
|
3081
3171
|
}
|
|
3082
|
-
return result;
|
|
3083
3172
|
}
|
|
3084
3173
|
|
|
3085
|
-
function
|
|
3086
|
-
const
|
|
3087
|
-
|
|
3088
|
-
|
|
3089
|
-
|
|
3174
|
+
function readBlobSync(b) {
|
|
3175
|
+
const req = new XMLHttpRequest();
|
|
3176
|
+
req.overrideMimeType("text/plain; charset=x-user-defined");
|
|
3177
|
+
req.open("GET", URL.createObjectURL(b), false); // Sync
|
|
3178
|
+
req.send();
|
|
3179
|
+
if (req.status !== 200 && req.status !== 0) {
|
|
3180
|
+
throw new Error("Bad Blob access: " + req.status);
|
|
3181
|
+
}
|
|
3182
|
+
return req.responseText;
|
|
3090
3183
|
}
|
|
3091
3184
|
|
|
3092
|
-
|
|
3093
|
-
|
|
3094
|
-
|
|
3095
|
-
|
|
3096
|
-
const inserts = await Promise.all(tablesToSyncify.map(async (table) => {
|
|
3097
|
-
const { extractKey } = table.core.schema.primaryKey;
|
|
3098
|
-
if (!extractKey)
|
|
3099
|
-
return { table: table.name, muts: [] }; // Outbound tables are not synced.
|
|
3100
|
-
const dexieCloudTableSchema = schema[table.name];
|
|
3101
|
-
const query = (dexieCloudTableSchema === null || dexieCloudTableSchema === void 0 ? void 0 : dexieCloudTableSchema.generatedGlobalId)
|
|
3102
|
-
? table.filter((item) => !ignoredRealms.has(item.realmId || "") && isValidSyncableID(extractKey(item)))
|
|
3103
|
-
: table.filter((item) => !ignoredRealms.has(item.realmId || "") && isValidAtID(extractKey(item), dexieCloudTableSchema === null || dexieCloudTableSchema === void 0 ? void 0 : dexieCloudTableSchema.idPrefix));
|
|
3104
|
-
const unsyncedObjects = await query.toArray();
|
|
3105
|
-
if (unsyncedObjects.length > 0) {
|
|
3106
|
-
const mut = {
|
|
3107
|
-
type: "insert",
|
|
3108
|
-
values: unsyncedObjects,
|
|
3109
|
-
keys: unsyncedObjects.map(extractKey),
|
|
3110
|
-
userId: currentUser.userId,
|
|
3111
|
-
};
|
|
3112
|
-
return {
|
|
3113
|
-
table: table.name,
|
|
3114
|
-
muts: [mut],
|
|
3115
|
-
};
|
|
3116
|
-
}
|
|
3117
|
-
else {
|
|
3118
|
-
return {
|
|
3119
|
-
table: table.name,
|
|
3120
|
-
muts: []
|
|
3121
|
-
};
|
|
3122
|
-
}
|
|
3123
|
-
}));
|
|
3124
|
-
return inserts.filter(op => op.muts.length > 0);
|
|
3125
|
-
}
|
|
3185
|
+
function string2ArrayBuffer(str) {
|
|
3186
|
+
const array = new Uint8Array(str.length);
|
|
3187
|
+
for (let i = 0; i < str.length; ++i) {
|
|
3188
|
+
array[i] = str.charCodeAt(i); // & 0xff;
|
|
3126
3189
|
}
|
|
3127
|
-
return
|
|
3190
|
+
return array.buffer;
|
|
3128
3191
|
}
|
|
3129
3192
|
|
|
3130
|
-
|
|
3131
|
-
|
|
3132
|
-
|
|
3133
|
-
|
|
3134
|
-
|
|
3135
|
-
|
|
3193
|
+
var BlobDef = {
|
|
3194
|
+
Blob: {
|
|
3195
|
+
test: (blob, toStringTag) => toStringTag === "Blob" || blob instanceof FakeBlob,
|
|
3196
|
+
replace: (blob) => ({
|
|
3197
|
+
$t: "Blob",
|
|
3198
|
+
v: blob instanceof FakeBlob
|
|
3199
|
+
? b64encode(blob.buf)
|
|
3200
|
+
: b64encode(string2ArrayBuffer(readBlobSync(blob))),
|
|
3201
|
+
type: blob.type,
|
|
3202
|
+
}),
|
|
3203
|
+
revive: ({ type, v }) => {
|
|
3204
|
+
const ab = b64decode(v);
|
|
3205
|
+
return typeof Blob !== undefined
|
|
3206
|
+
? new Blob([ab])
|
|
3207
|
+
: new FakeBlob(ab.buffer, type);
|
|
3208
|
+
},
|
|
3209
|
+
},
|
|
3210
|
+
};
|
|
3136
3211
|
|
|
3137
|
-
|
|
3138
|
-
|
|
3139
|
-
|
|
3140
|
-
|
|
3141
|
-
|
|
3142
|
-
|
|
3143
|
-
|
|
3144
|
-
|
|
3145
|
-
|
|
3146
|
-
|
|
3147
|
-
|
|
3148
|
-
|
|
3149
|
-
|
|
3150
|
-
|
|
3151
|
-
|
|
3152
|
-
|
|
3153
|
-
|
|
3154
|
-
|
|
3155
|
-
|
|
3156
|
-
|
|
3157
|
-
|
|
3158
|
-
|
|
3159
|
-
|
|
3160
|
-
|
|
3161
|
-
|
|
3162
|
-
|
|
3163
|
-
|
|
3164
|
-
|
|
3165
|
-
|
|
3166
|
-
alerts,
|
|
3167
|
-
fields: {}
|
|
3168
|
-
});
|
|
3169
|
-
}
|
|
3170
|
-
async function promptForEmail(userInteraction, title, emailHint) {
|
|
3171
|
-
let email = emailHint || '';
|
|
3172
|
-
while (!email || !/^[\w-\.]+@([\w-]+\.)+[\w-]{2,10}$/.test(email)) {
|
|
3173
|
-
email = (await interactWithUser(userInteraction, {
|
|
3174
|
-
type: 'email',
|
|
3175
|
-
title,
|
|
3176
|
-
alerts: email
|
|
3177
|
-
? [
|
|
3178
|
-
{
|
|
3179
|
-
type: 'error',
|
|
3180
|
-
messageCode: 'INVALID_EMAIL',
|
|
3181
|
-
message: 'Please enter a valid email address',
|
|
3182
|
-
messageParams: {},
|
|
3183
|
-
},
|
|
3184
|
-
]
|
|
3185
|
-
: [],
|
|
3186
|
-
fields: {
|
|
3187
|
-
email: {
|
|
3188
|
-
type: 'email',
|
|
3189
|
-
placeholder: 'you@somedomain.com',
|
|
3190
|
-
},
|
|
3191
|
-
},
|
|
3192
|
-
})).email;
|
|
3193
|
-
}
|
|
3194
|
-
return email;
|
|
3195
|
-
}
|
|
3196
|
-
async function promptForOTP(userInteraction, email, alert) {
|
|
3197
|
-
const alerts = [
|
|
3198
|
-
{
|
|
3199
|
-
type: 'info',
|
|
3200
|
-
messageCode: 'OTP_SENT',
|
|
3201
|
-
message: `A One-Time password has been sent to {email}`,
|
|
3202
|
-
messageParams: { email },
|
|
3212
|
+
const builtin = {
|
|
3213
|
+
...numberDef,
|
|
3214
|
+
...bigIntDef,
|
|
3215
|
+
...DateDef,
|
|
3216
|
+
...SetDef,
|
|
3217
|
+
...MapDef,
|
|
3218
|
+
...TypedArraysDefs,
|
|
3219
|
+
...ArrayBufferDef,
|
|
3220
|
+
...BlobDef,
|
|
3221
|
+
};
|
|
3222
|
+
|
|
3223
|
+
function Bison(...typeDefsInputs) {
|
|
3224
|
+
const tson = TypesonSimplified(builtin, BisonBinaryTypes, ...typeDefsInputs);
|
|
3225
|
+
return {
|
|
3226
|
+
toBinary(value) {
|
|
3227
|
+
const [blob, json] = this.stringify(value);
|
|
3228
|
+
const lenBuf = new ArrayBuffer(4);
|
|
3229
|
+
new DataView(lenBuf).setUint32(0, blob.size);
|
|
3230
|
+
return new Blob([lenBuf, blob, json]);
|
|
3231
|
+
},
|
|
3232
|
+
stringify(value) {
|
|
3233
|
+
const binaries = [];
|
|
3234
|
+
const json = tson.stringify(value, binaries);
|
|
3235
|
+
const blob = new Blob(binaries.map((b) => {
|
|
3236
|
+
const lenBuf = new ArrayBuffer(4);
|
|
3237
|
+
new DataView(lenBuf).setUint32(0, "byteLength" in b ? b.byteLength : b.size);
|
|
3238
|
+
return new Blob([lenBuf, b]);
|
|
3239
|
+
}));
|
|
3240
|
+
return [blob, json];
|
|
3203
3241
|
},
|
|
3204
|
-
|
|
3205
|
-
|
|
3206
|
-
|
|
3207
|
-
|
|
3208
|
-
|
|
3209
|
-
|
|
3210
|
-
|
|
3211
|
-
|
|
3212
|
-
|
|
3213
|
-
|
|
3214
|
-
|
|
3215
|
-
|
|
3216
|
-
|
|
3217
|
-
},
|
|
3242
|
+
async parse(json, binData) {
|
|
3243
|
+
let pos = 0;
|
|
3244
|
+
const arrayBuffers = [];
|
|
3245
|
+
const buf = await readBlobBinary(binData);
|
|
3246
|
+
const view = new DataView(buf);
|
|
3247
|
+
while (pos < buf.byteLength) {
|
|
3248
|
+
const len = view.getUint32(pos);
|
|
3249
|
+
pos += 4;
|
|
3250
|
+
const ab = buf.slice(pos, pos + len);
|
|
3251
|
+
pos += len;
|
|
3252
|
+
arrayBuffers.push(ab);
|
|
3253
|
+
}
|
|
3254
|
+
return tson.parse(json, arrayBuffers);
|
|
3218
3255
|
},
|
|
3219
|
-
|
|
3220
|
-
|
|
3256
|
+
async fromBinary(blob) {
|
|
3257
|
+
const len = new DataView(await readBlobBinary(blob.slice(0, 4))).getUint32(0);
|
|
3258
|
+
const binData = blob.slice(4, len + 4);
|
|
3259
|
+
const json = await readBlob(blob.slice(len + 4));
|
|
3260
|
+
return await this.parse(json, binData);
|
|
3261
|
+
},
|
|
3262
|
+
};
|
|
3221
3263
|
}
|
|
3222
|
-
|
|
3223
|
-
|
|
3224
|
-
|
|
3225
|
-
|
|
3226
|
-
|
|
3227
|
-
|
|
3228
|
-
|
|
3229
|
-
const expTime = (_a = accessTokenExpiration === null || accessTokenExpiration === void 0 ? void 0 : accessTokenExpiration.getTime()) !== null && _a !== void 0 ? _a : Infinity;
|
|
3230
|
-
if (expTime > Date.now()) {
|
|
3231
|
-
return accessToken;
|
|
3232
|
-
}
|
|
3233
|
-
if (!refreshToken) {
|
|
3234
|
-
throw new Error(`Refresh token missing`);
|
|
3235
|
-
}
|
|
3236
|
-
const refreshExpTime = (_b = refreshTokenExpiration === null || refreshTokenExpiration === void 0 ? void 0 : refreshTokenExpiration.getTime()) !== null && _b !== void 0 ? _b : Infinity;
|
|
3237
|
-
if (refreshExpTime <= Date.now()) {
|
|
3238
|
-
throw new Error(`Refresh token has expired`);
|
|
3239
|
-
}
|
|
3240
|
-
const refreshedLogin = await refreshAccessToken(db.cloud.options.databaseUrl, currentUser);
|
|
3241
|
-
await db.table('$logins').update(claims.sub, {
|
|
3242
|
-
accessToken: refreshedLogin.accessToken,
|
|
3243
|
-
accessTokenExpiration: refreshedLogin.accessTokenExpiration,
|
|
3264
|
+
function readBlob(blob) {
|
|
3265
|
+
return new Promise((resolve, reject) => {
|
|
3266
|
+
const reader = new FileReader();
|
|
3267
|
+
reader.onabort = (ev) => reject(new Error("file read aborted"));
|
|
3268
|
+
reader.onerror = (ev) => reject(ev.target.error);
|
|
3269
|
+
reader.onload = (ev) => resolve(ev.target.result);
|
|
3270
|
+
reader.readAsText(blob);
|
|
3244
3271
|
});
|
|
3245
|
-
return refreshedLogin.accessToken;
|
|
3246
|
-
}
|
|
3247
|
-
async function authenticate(url, context, fetchToken, userInteraction, hints) {
|
|
3248
|
-
if (context.accessToken &&
|
|
3249
|
-
context.accessTokenExpiration.getTime() > Date.now()) {
|
|
3250
|
-
return context;
|
|
3251
|
-
}
|
|
3252
|
-
else if (context.refreshToken &&
|
|
3253
|
-
(!context.refreshTokenExpiration ||
|
|
3254
|
-
context.refreshTokenExpiration.getTime() > Date.now())) {
|
|
3255
|
-
return await refreshAccessToken(url, context);
|
|
3256
|
-
}
|
|
3257
|
-
else {
|
|
3258
|
-
return await userAuthenticate(context, fetchToken, userInteraction, hints);
|
|
3259
|
-
}
|
|
3260
3272
|
}
|
|
3261
|
-
|
|
3262
|
-
|
|
3263
|
-
|
|
3264
|
-
|
|
3265
|
-
|
|
3266
|
-
|
|
3267
|
-
|
|
3268
|
-
const textEncoder = new TextEncoder();
|
|
3269
|
-
const data = textEncoder.encode(login.refreshToken + time_stamp);
|
|
3270
|
-
const binarySignature = await crypto.subtle.sign(signing_algorithm, login.nonExportablePrivateKey, data);
|
|
3271
|
-
const signature = b64encode(binarySignature);
|
|
3272
|
-
const tokenRequest = {
|
|
3273
|
-
grant_type: 'refresh_token',
|
|
3274
|
-
refresh_token: login.refreshToken,
|
|
3275
|
-
scopes: ['ACCESS_DB'],
|
|
3276
|
-
signature,
|
|
3277
|
-
signing_algorithm,
|
|
3278
|
-
time_stamp,
|
|
3279
|
-
};
|
|
3280
|
-
const res = await fetch(`${url}/token`, {
|
|
3281
|
-
body: JSON.stringify(tokenRequest),
|
|
3282
|
-
method: 'post',
|
|
3283
|
-
headers: { 'Content-Type': 'application/json' },
|
|
3284
|
-
mode: 'cors',
|
|
3273
|
+
function readBlobBinary(blob) {
|
|
3274
|
+
return new Promise((resolve, reject) => {
|
|
3275
|
+
const reader = new FileReader();
|
|
3276
|
+
reader.onabort = (ev) => reject(new Error("file read aborted"));
|
|
3277
|
+
reader.onerror = (ev) => reject(ev.target.error);
|
|
3278
|
+
reader.onload = (ev) => resolve(ev.target.result);
|
|
3279
|
+
reader.readAsArrayBuffer(blob);
|
|
3285
3280
|
});
|
|
3286
|
-
if (res.status !== 200)
|
|
3287
|
-
throw new Error(`RefreshToken: Status ${res.status} from ${url}/token`);
|
|
3288
|
-
const response = await res.json();
|
|
3289
|
-
login.accessToken = response.accessToken;
|
|
3290
|
-
login.accessTokenExpiration = response.accessTokenExpiration
|
|
3291
|
-
? new Date(response.accessTokenExpiration)
|
|
3292
|
-
: undefined;
|
|
3293
|
-
return login;
|
|
3294
3281
|
}
|
|
3295
|
-
|
|
3296
|
-
|
|
3297
|
-
|
|
3298
|
-
|
|
3299
|
-
|
|
3300
|
-
|
|
3301
|
-
|
|
3302
|
-
|
|
3303
|
-
|
|
3304
|
-
|
|
3305
|
-
|
|
3306
|
-
|
|
3307
|
-
|
|
3308
|
-
|
|
3309
|
-
|
|
3310
|
-
|
|
3311
|
-
|
|
3312
|
-
|
|
3313
|
-
|
|
3314
|
-
|
|
3315
|
-
|
|
3316
|
-
|
|
3317
|
-
|
|
3318
|
-
|
|
3319
|
-
|
|
3320
|
-
|
|
3321
|
-
|
|
3322
|
-
|
|
3323
|
-
|
|
3324
|
-
|
|
3325
|
-
|
|
3326
|
-
|
|
3327
|
-
|
|
3328
|
-
|
|
3329
|
-
|
|
3330
|
-
|
|
3331
|
-
}
|
|
3332
|
-
return context;
|
|
3333
|
-
}
|
|
3334
|
-
catch (error) {
|
|
3335
|
-
await alertUser(userInteraction, 'Authentication Failed', {
|
|
3336
|
-
type: 'error',
|
|
3337
|
-
messageCode: 'GENERIC_ERROR',
|
|
3338
|
-
message: `We're having a problem to authenticate rigth now.`,
|
|
3339
|
-
messageParams: {}
|
|
3340
|
-
}).catch(() => { });
|
|
3341
|
-
throw error;
|
|
3282
|
+
|
|
3283
|
+
/** The undefined type is not part of builtin but can be manually added.
|
|
3284
|
+
* The reason for supporting undefined is if the following object should be revived correctly:
|
|
3285
|
+
*
|
|
3286
|
+
* {foo: undefined}
|
|
3287
|
+
*
|
|
3288
|
+
* Without including this typedef, the revived object would just be {}.
|
|
3289
|
+
* If including this typedef, the revived object would be {foo: undefined}.
|
|
3290
|
+
*/
|
|
3291
|
+
var undefinedDef = {
|
|
3292
|
+
undefined: {
|
|
3293
|
+
replace: () => {
|
|
3294
|
+
},
|
|
3295
|
+
revive: () => undefined,
|
|
3296
|
+
},
|
|
3297
|
+
};
|
|
3298
|
+
|
|
3299
|
+
// Since server revisions are stored in bigints, we need to handle clients without
|
|
3300
|
+
// bigint support to not fail when serverRevision is passed over to client.
|
|
3301
|
+
// We need to not fail when reviving it and we need to somehow store the information.
|
|
3302
|
+
// Since the revived version will later on be put into indexedDB we have another
|
|
3303
|
+
// issue: When reading it back from indexedDB we will get a poco object that we
|
|
3304
|
+
// cannot replace correctly when sending it to server. So we will also need
|
|
3305
|
+
// to do an explicit workaround in the protocol where a bigint is supported.
|
|
3306
|
+
// The workaround should be there regardless if browser supports BigInt or not, because
|
|
3307
|
+
// the serverRev might have been stored in IDB before the browser was upgraded to support bigint.
|
|
3308
|
+
//
|
|
3309
|
+
// if (typeof serverRev.rev !== "bigint")
|
|
3310
|
+
// if (hasBigIntSupport)
|
|
3311
|
+
// serverRev.rev = bigIntDef.bigint.revive(server.rev)
|
|
3312
|
+
// else
|
|
3313
|
+
// serverRev.rev = new FakeBigInt(server.rev)
|
|
3314
|
+
const hasBigIntSupport = typeof BigInt(0) === 'bigint';
|
|
3315
|
+
class FakeBigInt {
|
|
3316
|
+
constructor(value) {
|
|
3317
|
+
this.v = value;
|
|
3342
3318
|
}
|
|
3343
|
-
|
|
3344
|
-
|
|
3345
|
-
const keydataB64 = b64encode(keydata);
|
|
3346
|
-
const keydataB64Pem = formatAsPem(keydataB64);
|
|
3347
|
-
return keydataB64Pem;
|
|
3348
|
-
}
|
|
3349
|
-
function formatAsPem(str) {
|
|
3350
|
-
let finalString = '-----BEGIN PUBLIC KEY-----\n';
|
|
3351
|
-
while (str.length > 0) {
|
|
3352
|
-
finalString += str.substring(0, 64) + '\n';
|
|
3353
|
-
str = str.substring(64);
|
|
3319
|
+
toString() {
|
|
3320
|
+
return this.v;
|
|
3354
3321
|
}
|
|
3355
|
-
finalString = finalString + '-----END PUBLIC KEY-----';
|
|
3356
|
-
return finalString;
|
|
3357
3322
|
}
|
|
3323
|
+
const defs = {
|
|
3324
|
+
...undefinedDef,
|
|
3325
|
+
...(hasBigIntSupport
|
|
3326
|
+
? {}
|
|
3327
|
+
: {
|
|
3328
|
+
bigint: {
|
|
3329
|
+
test: (val) => val instanceof FakeBigInt,
|
|
3330
|
+
replace: (fakeBigInt) => {
|
|
3331
|
+
return {
|
|
3332
|
+
$t: 'bigint',
|
|
3333
|
+
...fakeBigInt
|
|
3334
|
+
};
|
|
3335
|
+
},
|
|
3336
|
+
revive: ({ v, }) => new FakeBigInt(v)
|
|
3337
|
+
}
|
|
3338
|
+
})
|
|
3339
|
+
};
|
|
3340
|
+
const TSON = TypesonSimplified(builtin, defs);
|
|
3341
|
+
const BISON = Bison(defs);
|
|
3358
3342
|
|
|
3359
3343
|
class HttpError extends Error {
|
|
3360
3344
|
constructor(res, message) {
|
|
@@ -3380,6 +3364,7 @@
|
|
|
3380
3364
|
headers.Authorization = `Bearer ${accessToken}`;
|
|
3381
3365
|
}
|
|
3382
3366
|
const syncRequest = {
|
|
3367
|
+
v: 2,
|
|
3383
3368
|
dbID: syncState === null || syncState === void 0 ? void 0 : syncState.remoteDbId,
|
|
3384
3369
|
clientIdentity,
|
|
3385
3370
|
schema: schema || {},
|
|
@@ -3506,8 +3491,8 @@
|
|
|
3506
3491
|
|
|
3507
3492
|
function getLatestRevisionsPerTable(clientChangeSet, lastRevisions = {}) {
|
|
3508
3493
|
for (const { table, muts } of clientChangeSet) {
|
|
3509
|
-
const lastRev = muts.length > 0 ? muts[muts.length - 1].rev
|
|
3510
|
-
lastRevisions[table] = lastRev;
|
|
3494
|
+
const lastRev = muts.length > 0 ? muts[muts.length - 1].rev : null;
|
|
3495
|
+
lastRevisions[table] = lastRev || lastRevisions[table] || 0;
|
|
3511
3496
|
}
|
|
3512
3497
|
return lastRevisions;
|
|
3513
3498
|
}
|
|
@@ -3828,16 +3813,35 @@
|
|
|
3828
3813
|
const readyToServe = new rxjs.BehaviorSubject(true);
|
|
3829
3814
|
const event = new rxjs.BehaviorSubject(null);
|
|
3830
3815
|
let isWorking = false;
|
|
3816
|
+
let loopWarning = 0;
|
|
3817
|
+
let loopDetection = [0, 0, 0, 0, 0, 0, 0, 0, 0, Date.now()];
|
|
3831
3818
|
event.subscribe(async () => {
|
|
3832
3819
|
if (isWorking)
|
|
3833
3820
|
return;
|
|
3834
3821
|
if (queue.length > 0) {
|
|
3835
3822
|
isWorking = true;
|
|
3823
|
+
loopDetection.shift();
|
|
3824
|
+
loopDetection.push(Date.now());
|
|
3836
3825
|
readyToServe.next(false);
|
|
3837
3826
|
try {
|
|
3838
3827
|
await consumeQueue();
|
|
3839
3828
|
}
|
|
3840
3829
|
finally {
|
|
3830
|
+
if (loopDetection[loopDetection.length - 1] - loopDetection[0] < 10000) {
|
|
3831
|
+
// Ten loops within 10 seconds. Slow down!
|
|
3832
|
+
if (Date.now() - loopWarning < 5000) {
|
|
3833
|
+
// Last time we did this, we ended up here too. Wait for a minute.
|
|
3834
|
+
console.warn(`Slowing down websocket loop for one minute`);
|
|
3835
|
+
loopWarning = Date.now() + 60000;
|
|
3836
|
+
await new Promise(resolve => setTimeout(resolve, 60000));
|
|
3837
|
+
}
|
|
3838
|
+
else {
|
|
3839
|
+
// This is a one-time event. Just pause 10 seconds.
|
|
3840
|
+
console.warn(`Slowing down websocket loop for 10 seconds`);
|
|
3841
|
+
loopWarning = Date.now() + 10000;
|
|
3842
|
+
await new Promise(resolve => setTimeout(resolve, 10000));
|
|
3843
|
+
}
|
|
3844
|
+
}
|
|
3841
3845
|
isWorking = false;
|
|
3842
3846
|
readyToServe.next(true);
|
|
3843
3847
|
}
|
|
@@ -3853,6 +3857,9 @@
|
|
|
3853
3857
|
const msg = queue.shift();
|
|
3854
3858
|
try {
|
|
3855
3859
|
console.debug('processing msg', msg);
|
|
3860
|
+
// If the sync worker or service worker is syncing, wait 'til thei're done.
|
|
3861
|
+
// It's no need to have two channels at the same time - even though it wouldnt
|
|
3862
|
+
// be a problem - this is an optimization.
|
|
3856
3863
|
await db.cloud.syncState
|
|
3857
3864
|
.pipe(filter(({ phase }) => phase === 'in-sync' || phase === 'error'), take(1))
|
|
3858
3865
|
.toPromise();
|
|
@@ -3876,26 +3883,23 @@
|
|
|
3876
3883
|
// in turn will lead to that connectWebSocket.ts will reconnect the socket with the
|
|
3877
3884
|
// new token. So we don't need to do anything more here.
|
|
3878
3885
|
break;
|
|
3879
|
-
case 'rev':
|
|
3880
|
-
if (!(persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.serverRevision) ||
|
|
3881
|
-
compareBigInts(persistedSyncState.serverRevision, msg.rev) < 0) {
|
|
3882
|
-
triggerSync(db, "pull");
|
|
3883
|
-
}
|
|
3884
|
-
break;
|
|
3885
3886
|
case 'realm-added':
|
|
3886
3887
|
if (!((_a = persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.realms) === null || _a === void 0 ? void 0 : _a.includes(msg.realm))) {
|
|
3887
|
-
triggerSync(db,
|
|
3888
|
+
triggerSync(db, 'pull');
|
|
3888
3889
|
}
|
|
3889
3890
|
break;
|
|
3890
3891
|
case 'realm-removed':
|
|
3891
3892
|
if ((_b = persistedSyncState === null || persistedSyncState === void 0 ? void 0 : persistedSyncState.realms) === null || _b === void 0 ? void 0 : _b.includes(msg.realm)) {
|
|
3892
|
-
triggerSync(db,
|
|
3893
|
+
triggerSync(db, 'pull');
|
|
3893
3894
|
}
|
|
3894
3895
|
break;
|
|
3896
|
+
case 'realms-changed':
|
|
3897
|
+
triggerSync(db, 'pull');
|
|
3898
|
+
break;
|
|
3895
3899
|
case 'changes':
|
|
3896
3900
|
console.debug('changes');
|
|
3897
3901
|
if (((_c = db.cloud.syncState.value) === null || _c === void 0 ? void 0 : _c.phase) === 'error') {
|
|
3898
|
-
triggerSync(db,
|
|
3902
|
+
triggerSync(db, 'pull');
|
|
3899
3903
|
break;
|
|
3900
3904
|
}
|
|
3901
3905
|
await db.transaction('rw', db.dx.tables, async (tx) => {
|
|
@@ -3918,16 +3922,34 @@
|
|
|
3918
3922
|
return; // Initial sync must have taken place - otherwise, ignore this.
|
|
3919
3923
|
}
|
|
3920
3924
|
// Verify again in ACID tx that we're on same server revision.
|
|
3921
|
-
if (
|
|
3925
|
+
if (msg.baseRev !== syncState.serverRevision) {
|
|
3922
3926
|
console.debug(`baseRev (${msg.baseRev}) differs from our serverRevision in syncState (${syncState.serverRevision})`);
|
|
3927
|
+
// Should we trigger a sync now? No. This is a normal case
|
|
3928
|
+
// when another local peer (such as the SW or a websocket channel on other tab) has
|
|
3929
|
+
// updated syncState from new server information but we are not aware yet. It would
|
|
3930
|
+
// be unnescessary to do a sync in that case. Instead, the caller of this consumeQueue()
|
|
3931
|
+
// function will do readyToServe.next(true) right after this return, which will lead
|
|
3932
|
+
// to a "ready" message being sent to server with the new accurate serverRev we have,
|
|
3933
|
+
// so that the next message indeed will be correct.
|
|
3934
|
+
if (typeof msg.baseRev === 'string' && // v2 format
|
|
3935
|
+
(typeof syncState.serverRevision === 'bigint' || // v1 format
|
|
3936
|
+
typeof syncState.serverRevision === 'object') // v1 format old browser
|
|
3937
|
+
) {
|
|
3938
|
+
// The reason for the diff seems to be that server has migrated the revision format.
|
|
3939
|
+
// Do a full sync to update revision format.
|
|
3940
|
+
// If we don't do a sync request now, we could stuck in an endless loop.
|
|
3941
|
+
triggerSync(db, 'pull');
|
|
3942
|
+
}
|
|
3923
3943
|
return; // Ignore message
|
|
3924
3944
|
}
|
|
3925
3945
|
// Verify also that the message is based on the exact same set of realms
|
|
3926
|
-
const ourRealmSetHash = await Dexie__default['default'].waitFor(
|
|
3946
|
+
const ourRealmSetHash = await Dexie__default['default'].waitFor(
|
|
3947
|
+
// Keep TX in non-IDB work
|
|
3948
|
+
computeRealmSetHash(syncState));
|
|
3927
3949
|
console.debug('ourRealmSetHash', ourRealmSetHash);
|
|
3928
3950
|
if (ourRealmSetHash !== msg.realmSetHash) {
|
|
3929
3951
|
console.debug('not same realmSetHash', msg.realmSetHash);
|
|
3930
|
-
triggerSync(db,
|
|
3952
|
+
triggerSync(db, 'pull');
|
|
3931
3953
|
// The message isn't based on the same realms.
|
|
3932
3954
|
// Trigger a sync instead to resolve all things up.
|
|
3933
3955
|
return;
|
|
@@ -3939,12 +3961,14 @@
|
|
|
3939
3961
|
clientChanges = await listClientChanges(mutationTables, db);
|
|
3940
3962
|
console.debug('msg queue: client changes', clientChanges);
|
|
3941
3963
|
}
|
|
3942
|
-
|
|
3943
|
-
|
|
3944
|
-
|
|
3945
|
-
|
|
3946
|
-
|
|
3947
|
-
|
|
3964
|
+
if (msg.changes.length > 0) {
|
|
3965
|
+
const filteredChanges = filterServerChangesThroughAddedClientChanges(msg.changes, clientChanges);
|
|
3966
|
+
//
|
|
3967
|
+
// apply server changes
|
|
3968
|
+
//
|
|
3969
|
+
console.debug('applying filtered server changes', filteredChanges);
|
|
3970
|
+
await applyServerChanges(filteredChanges, db);
|
|
3971
|
+
}
|
|
3948
3972
|
// Update latest revisions per table in case there are unsynced changes
|
|
3949
3973
|
// This can be a real case in future when we allow non-eagery sync.
|
|
3950
3974
|
// And it can actually be realistic now also, but very rare.
|
|
@@ -5025,18 +5049,19 @@
|
|
|
5025
5049
|
const CLIENT_PING_INTERVAL = 30000;
|
|
5026
5050
|
const FAIL_RETRY_WAIT_TIME = 60000;
|
|
5027
5051
|
class WSObservable extends rxjs.Observable {
|
|
5028
|
-
constructor(databaseUrl, rev, clientIdentity, messageProducer, webSocketStatus, token, tokenExpiration) {
|
|
5029
|
-
super((subscriber) => new WSConnection(databaseUrl, rev, clientIdentity, token, tokenExpiration, subscriber, messageProducer, webSocketStatus));
|
|
5052
|
+
constructor(databaseUrl, rev, realmSetHash, clientIdentity, messageProducer, webSocketStatus, token, tokenExpiration) {
|
|
5053
|
+
super((subscriber) => new WSConnection(databaseUrl, rev, realmSetHash, clientIdentity, token, tokenExpiration, subscriber, messageProducer, webSocketStatus));
|
|
5030
5054
|
}
|
|
5031
5055
|
}
|
|
5032
5056
|
let counter = 0;
|
|
5033
5057
|
class WSConnection extends rxjs.Subscription {
|
|
5034
|
-
constructor(databaseUrl, rev, clientIdentity, token, tokenExpiration, subscriber, messageProducer, webSocketStatus) {
|
|
5058
|
+
constructor(databaseUrl, rev, realmSetHash, clientIdentity, token, tokenExpiration, subscriber, messageProducer, webSocketStatus) {
|
|
5035
5059
|
super(() => this.teardown());
|
|
5036
5060
|
this.id = ++counter;
|
|
5037
5061
|
console.debug('New WebSocket Connection', this.id, token ? 'authorized' : 'unauthorized');
|
|
5038
5062
|
this.databaseUrl = databaseUrl;
|
|
5039
5063
|
this.rev = rev;
|
|
5064
|
+
this.realmSetHash = realmSetHash;
|
|
5040
5065
|
this.clientIdentity = clientIdentity;
|
|
5041
5066
|
this.token = token;
|
|
5042
5067
|
this.tokenExpiration = tokenExpiration;
|
|
@@ -5141,7 +5166,9 @@
|
|
|
5141
5166
|
const searchParams = new URLSearchParams();
|
|
5142
5167
|
if (this.subscriber.closed)
|
|
5143
5168
|
return;
|
|
5169
|
+
searchParams.set('v', "2");
|
|
5144
5170
|
searchParams.set('rev', this.rev);
|
|
5171
|
+
searchParams.set('realmsHash', this.realmSetHash);
|
|
5145
5172
|
searchParams.set('clientId', this.clientIdentity);
|
|
5146
5173
|
if (this.token) {
|
|
5147
5174
|
searchParams.set('token', this.token);
|
|
@@ -5235,12 +5262,12 @@
|
|
|
5235
5262
|
function createObservable() {
|
|
5236
5263
|
return db.cloud.persistedSyncState.pipe(filter(syncState => syncState === null || syncState === void 0 ? void 0 : syncState.serverRevision), // Don't connect before there's no initial sync performed.
|
|
5237
5264
|
take(1), // Don't continue waking up whenever syncState change
|
|
5238
|
-
switchMap(() => db.cloud.currentUser), switchMap((userLogin) => userIsReallyActive.pipe(map((isActive) =>
|
|
5265
|
+
switchMap((syncState) => db.cloud.currentUser.pipe(map(userLogin => [userLogin, syncState]))), switchMap(([userLogin, syncState]) => userIsReallyActive.pipe(map((isActive) => [isActive ? userLogin : null, syncState]))), switchMap(async ([userLogin, syncState]) => [userLogin, await computeRealmSetHash(syncState)]), switchMap(([userLogin, realmSetHash]) =>
|
|
5239
5266
|
// Let server end query changes from last entry of same client-ID and forward.
|
|
5240
5267
|
// If no new entries, server won't bother the client. If new entries, server sends only those
|
|
5241
5268
|
// and the baseRev of the last from same client-ID.
|
|
5242
5269
|
userLogin
|
|
5243
|
-
? new WSObservable(db.cloud.options.databaseUrl, db.cloud.persistedSyncState.value.serverRevision, db.cloud.persistedSyncState.value.clientIdentity, messageProducer, db.cloud.webSocketStatus, userLogin.accessToken, userLogin.accessTokenExpiration)
|
|
5270
|
+
? new WSObservable(db.cloud.options.databaseUrl, db.cloud.persistedSyncState.value.serverRevision, realmSetHash, db.cloud.persistedSyncState.value.clientIdentity, messageProducer, db.cloud.webSocketStatus, userLogin.accessToken, userLogin.accessTokenExpiration)
|
|
5244
5271
|
: rxjs.from([])), catchError((error) => {
|
|
5245
5272
|
if ((error === null || error === void 0 ? void 0 : error.name) === 'TokenExpiredError') {
|
|
5246
5273
|
console.debug('WebSocket observable: Token expired. Refreshing token...');
|
|
@@ -5360,22 +5387,26 @@
|
|
|
5360
5387
|
//let periodicSyncHandler: ((event: Event) => void) | null = null;
|
|
5361
5388
|
let cancelToken = { cancelled: false };
|
|
5362
5389
|
function syncAndRetry(purpose, retryNum = 1) {
|
|
5363
|
-
|
|
5364
|
-
|
|
5365
|
-
|
|
5366
|
-
|
|
5367
|
-
|
|
5368
|
-
|
|
5369
|
-
|
|
5370
|
-
|
|
5371
|
-
|
|
5372
|
-
|
|
5373
|
-
|
|
5374
|
-
|
|
5375
|
-
|
|
5376
|
-
|
|
5377
|
-
|
|
5378
|
-
|
|
5390
|
+
// Use setTimeout() to get onto a clean stack and
|
|
5391
|
+
// break free from possible active transaction:
|
|
5392
|
+
setTimeout(() => {
|
|
5393
|
+
syncIfPossible(db, cloudOptions, cloudSchema, {
|
|
5394
|
+
cancelToken,
|
|
5395
|
+
retryImmediatelyOnFetchError: true,
|
|
5396
|
+
purpose,
|
|
5397
|
+
}).catch((e) => {
|
|
5398
|
+
console.error('error in syncIfPossible()', e);
|
|
5399
|
+
if (cancelToken.cancelled) {
|
|
5400
|
+
stop();
|
|
5401
|
+
}
|
|
5402
|
+
else if (retryNum < 3) {
|
|
5403
|
+
// Mimic service worker sync event: retry 3 times
|
|
5404
|
+
// * first retry after 5 minutes
|
|
5405
|
+
// * second retry 15 minutes later
|
|
5406
|
+
setTimeout(() => syncAndRetry(purpose, retryNum + 1), [0, 5, 15][retryNum] * MINUTES);
|
|
5407
|
+
}
|
|
5408
|
+
});
|
|
5409
|
+
}, 0);
|
|
5379
5410
|
}
|
|
5380
5411
|
const start = () => {
|
|
5381
5412
|
// Sync eagerly whenever a change has happened (+ initially when there's no syncState yet)
|
|
@@ -5383,7 +5414,7 @@
|
|
|
5383
5414
|
console.debug('Starting LocalSyncWorker', db.localSyncEvent['id']);
|
|
5384
5415
|
localSyncEventSubscription = db.localSyncEvent.subscribe(({ purpose }) => {
|
|
5385
5416
|
try {
|
|
5386
|
-
syncAndRetry(purpose ||
|
|
5417
|
+
syncAndRetry(purpose || 'pull');
|
|
5387
5418
|
}
|
|
5388
5419
|
catch (err) {
|
|
5389
5420
|
console.error('What-the....', err);
|
|
@@ -5961,6 +5992,7 @@
|
|
|
5961
5992
|
// Avoid race conditions.
|
|
5962
5993
|
managedDBs.delete(db.name);
|
|
5963
5994
|
}
|
|
5995
|
+
console.debug(`Dexie Cloud SW: Closing Dexie instance for ${dbName}`);
|
|
5964
5996
|
db.dx.close();
|
|
5965
5997
|
return false;
|
|
5966
5998
|
}
|
|
@@ -6007,7 +6039,7 @@
|
|
|
6007
6039
|
// Mimic background sync behavior - retry in X minutes on failure.
|
|
6008
6040
|
// But lesser timeout and more number of times.
|
|
6009
6041
|
const syncAndRetry = (num = 1) => {
|
|
6010
|
-
return syncDB(dbName, event.data.
|
|
6042
|
+
return syncDB(dbName, event.data.purpose || "pull").catch(async (e) => {
|
|
6011
6043
|
if (num === 3)
|
|
6012
6044
|
throw e;
|
|
6013
6045
|
await sleep(60000); // 1 minute
|