dexie-cloud-addon 4.3.9 → 4.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/modern/DexieCloudAPI.d.ts +17 -0
- package/dist/modern/DexieCloudOptions.d.ts +9 -0
- package/dist/modern/TSON.d.ts +0 -6
- package/dist/modern/db/DexieCloudDB.d.ts +2 -0
- package/dist/modern/db/entities/EntityCommon.d.ts +1 -0
- package/dist/modern/dexie-cloud-addon.js +3692 -2356
- package/dist/modern/dexie-cloud-addon.js.map +1 -1
- package/dist/modern/dexie-cloud-addon.min.js +1 -1
- package/dist/modern/dexie-cloud-addon.min.js.map +1 -1
- package/dist/modern/middlewares/blobResolveMiddleware.d.ts +21 -0
- package/dist/modern/service-worker.js +2377 -1041
- package/dist/modern/service-worker.js.map +1 -1
- package/dist/modern/service-worker.min.js +1 -1
- package/dist/modern/service-worker.min.js.map +1 -1
- package/dist/modern/sync/BlobDownloadTracker.d.ts +33 -0
- package/dist/modern/sync/BlobSavingQueue.d.ts +35 -0
- package/dist/modern/sync/blobOffloading.d.ts +37 -0
- package/dist/modern/sync/blobProgress.d.ts +25 -0
- package/dist/modern/sync/blobResolve.d.ts +85 -0
- package/dist/modern/sync/eagerBlobDownloader.d.ts +20 -0
- package/dist/modern/sync/loadCachedAccessToken.d.ts +2 -0
- package/dist/modern/types/DXCAlert.d.ts +6 -0
- package/dist/modern/types/TXExpandos.d.ts +1 -0
- package/dist/umd/dexie-cloud-addon.js +3778 -2442
- package/dist/umd/dexie-cloud-addon.js.map +1 -1
- package/dist/umd/dexie-cloud-addon.min.js +1 -1
- package/dist/umd/dexie-cloud-addon.min.js.map +1 -1
- package/dist/umd/service-worker.js +2262 -926
- package/dist/umd/service-worker.js.map +1 -1
- package/dist/umd/service-worker.min.js +1 -1
- package/dist/umd/service-worker.min.js.map +1 -1
- package/package.json +5 -6
- package/dist/modern/default-ui/AuthProviderButton.d.ts +0 -21
- package/dist/modern/default-ui/ProviderSelectionDialog.d.ts +0 -7
- package/dist/modern/default-ui/SelectDialog.d.ts +0 -10
- package/dist/modern/dexie-cloud-addon.min.js.gz +0 -0
- package/dist/umd/DISABLE_SERVICEWORKER_STRATEGY.d.ts +0 -1
- package/dist/umd/DXCWebSocketStatus.d.ts +0 -1
- package/dist/umd/DexieCloudAPI.d.ts +0 -75
- package/dist/umd/DexieCloudOptions.d.ts +0 -27
- package/dist/umd/DexieCloudSyncOptions.d.ts +0 -4
- package/dist/umd/DexieCloudTable.d.ts +0 -18
- package/dist/umd/InvalidLicenseError.d.ts +0 -5
- package/dist/umd/Invite.d.ts +0 -8
- package/dist/umd/PermissionChecker.d.ts +0 -15
- package/dist/umd/TSON.d.ts +0 -17
- package/dist/umd/WSObservable.d.ts +0 -72
- package/dist/umd/associate.d.ts +0 -1
- package/dist/umd/authentication/AuthPersistedContext.d.ts +0 -9
- package/dist/umd/authentication/TokenErrorResponseError.d.ts +0 -10
- package/dist/umd/authentication/TokenExpiredError.d.ts +0 -3
- package/dist/umd/authentication/UNAUTHORIZED_USER.d.ts +0 -2
- package/dist/umd/authentication/authenticate.d.ts +0 -13
- package/dist/umd/authentication/interactWithUser.d.ts +0 -21
- package/dist/umd/authentication/login.d.ts +0 -3
- package/dist/umd/authentication/logout.d.ts +0 -5
- package/dist/umd/authentication/otpFetchTokenCallback.d.ts +0 -3
- package/dist/umd/authentication/setCurrentUser.d.ts +0 -14
- package/dist/umd/authentication/waitUntil.d.ts +0 -3
- package/dist/umd/computeSyncState.d.ts +0 -4
- package/dist/umd/createSharedValueObservable.d.ts +0 -3
- package/dist/umd/currentUserEmitter.d.ts +0 -3
- package/dist/umd/db/DexieCloudDB.d.ts +0 -61
- package/dist/umd/db/entities/BaseRevisionMapEntry.d.ts +0 -5
- package/dist/umd/db/entities/EntityCommon.d.ts +0 -5
- package/dist/umd/db/entities/GuardedJob.d.ts +0 -5
- package/dist/umd/db/entities/Member.d.ts +0 -19
- package/dist/umd/db/entities/PersistedSyncState.d.ts +0 -22
- package/dist/umd/db/entities/Realm.d.ts +0 -14
- package/dist/umd/db/entities/Role.d.ts +0 -11
- package/dist/umd/db/entities/UserLogin.d.ts +0 -23
- package/dist/umd/default-ui/Dialog.d.ts +0 -5
- package/dist/umd/default-ui/LoginDialog.d.ts +0 -3
- package/dist/umd/default-ui/Styles.d.ts +0 -3
- package/dist/umd/default-ui/index.d.ts +0 -24
- package/dist/umd/define-ydoc-trigger.d.ts +0 -3
- package/dist/umd/dexie-cloud-addon.d.ts +0 -3
- package/dist/umd/dexie-cloud-addon.js.gz +0 -0
- package/dist/umd/dexie-cloud-addon.min.js.gz +0 -0
- package/dist/umd/dexie-cloud-client.d.ts +0 -23
- package/dist/umd/errors/HttpError.d.ts +0 -5
- package/dist/umd/extend-dexie-interface.d.ts +0 -23
- package/dist/umd/getGlobalRolesObservable.d.ts +0 -5
- package/dist/umd/getInternalAccessControlObservable.d.ts +0 -12
- package/dist/umd/getInvitesObservable.d.ts +0 -23
- package/dist/umd/getPermissionsLookupObservable.d.ts +0 -16
- package/dist/umd/getTiedRealmId.d.ts +0 -2
- package/dist/umd/helpers/BroadcastedAndLocalEvent.d.ts +0 -8
- package/dist/umd/helpers/CancelToken.d.ts +0 -4
- package/dist/umd/helpers/IS_SERVICE_WORKER.d.ts +0 -1
- package/dist/umd/helpers/SWBroadcastChannel.d.ts +0 -12
- package/dist/umd/helpers/allSettled.d.ts +0 -1
- package/dist/umd/helpers/bulkUpdate.d.ts +0 -4
- package/dist/umd/helpers/computeRealmSetHash.d.ts +0 -2
- package/dist/umd/helpers/date-constants.d.ts +0 -5
- package/dist/umd/helpers/flatten.d.ts +0 -1
- package/dist/umd/helpers/getMutationTable.d.ts +0 -1
- package/dist/umd/helpers/getSyncableTables.d.ts +0 -4
- package/dist/umd/helpers/getTableFromMutationTable.d.ts +0 -1
- package/dist/umd/helpers/makeArray.d.ts +0 -1
- package/dist/umd/helpers/randomString.d.ts +0 -1
- package/dist/umd/helpers/resolveText.d.ts +0 -16
- package/dist/umd/helpers/throwVersionIncrementNeeded.d.ts +0 -1
- package/dist/umd/helpers/visibilityState.d.ts +0 -1
- package/dist/umd/isEagerSyncDisabled.d.ts +0 -2
- package/dist/umd/isFirefox.d.ts +0 -1
- package/dist/umd/isSafari.d.ts +0 -2
- package/dist/umd/mapValueObservable.d.ts +0 -5
- package/dist/umd/mergePermissions.d.ts +0 -2
- package/dist/umd/middleware-helpers/guardedTable.d.ts +0 -11
- package/dist/umd/middleware-helpers/idGenerationHelpers.d.ts +0 -18
- package/dist/umd/middlewares/createIdGenerationMiddleware.d.ts +0 -3
- package/dist/umd/middlewares/createImplicitPropSetterMiddleware.d.ts +0 -3
- package/dist/umd/middlewares/createMutationTrackingMiddleware.d.ts +0 -17
- package/dist/umd/middlewares/outstandingTransaction.d.ts +0 -4
- package/dist/umd/overrideParseStoresSpec.d.ts +0 -4
- package/dist/umd/performInitialSync.d.ts +0 -4
- package/dist/umd/permissions.d.ts +0 -9
- package/dist/umd/prodLog.d.ts +0 -9
- package/dist/umd/service-worker.d.ts +0 -1
- package/dist/umd/sync/DEXIE_CLOUD_SYNCER_ID.d.ts +0 -1
- package/dist/umd/sync/LocalSyncWorker.d.ts +0 -7
- package/dist/umd/sync/SyncRequiredError.d.ts +0 -3
- package/dist/umd/sync/applyServerChanges.d.ts +0 -3
- package/dist/umd/sync/connectWebSocket.d.ts +0 -2
- package/dist/umd/sync/encodeIdsForServer.d.ts +0 -4
- package/dist/umd/sync/extractRealm.d.ts +0 -2
- package/dist/umd/sync/getLatestRevisionsPerTable.d.ts +0 -6
- package/dist/umd/sync/getTablesToSyncify.d.ts +0 -3
- package/dist/umd/sync/isOnline.d.ts +0 -1
- package/dist/umd/sync/isSyncNeeded.d.ts +0 -2
- package/dist/umd/sync/listClientChanges.d.ts +0 -9
- package/dist/umd/sync/listSyncifiedChanges.d.ts +0 -5
- package/dist/umd/sync/messageConsumerIsReady.d.ts +0 -2
- package/dist/umd/sync/messagesFromServerQueue.d.ts +0 -8
- package/dist/umd/sync/modifyLocalObjectsWithNewUserId.d.ts +0 -4
- package/dist/umd/sync/myId.d.ts +0 -1
- package/dist/umd/sync/numUnsyncedMutations.d.ts +0 -2
- package/dist/umd/sync/old_startSyncingClientChanges.d.ts +0 -39
- package/dist/umd/sync/performGuardedJob.d.ts +0 -2
- package/dist/umd/sync/ratelimit.d.ts +0 -3
- package/dist/umd/sync/registerSyncEvent.d.ts +0 -3
- package/dist/umd/sync/sync.d.ts +0 -15
- package/dist/umd/sync/syncIfPossible.d.ts +0 -5
- package/dist/umd/sync/syncWithServer.d.ts +0 -6
- package/dist/umd/sync/triggerSync.d.ts +0 -2
- package/dist/umd/sync/updateBaseRevs.d.ts +0 -5
- package/dist/umd/types/DXCAlert.d.ts +0 -25
- package/dist/umd/types/DXCInputField.d.ts +0 -11
- package/dist/umd/types/DXCUserInteraction.d.ts +0 -93
- package/dist/umd/types/NewIdOptions.d.ts +0 -3
- package/dist/umd/types/SWMessageEvent.d.ts +0 -3
- package/dist/umd/types/SWSyncEvent.d.ts +0 -4
- package/dist/umd/types/SyncState.d.ts +0 -9
- package/dist/umd/types/TXExpandos.d.ts +0 -11
- package/dist/umd/updateSchemaFromOptions.d.ts +0 -3
- package/dist/umd/userIsActive.d.ts +0 -7
- package/dist/umd/verifyConfig.d.ts +0 -2
- package/dist/umd/verifySchema.d.ts +0 -2
- package/dist/umd/yjs/YDexieCloudSyncState.d.ts +0 -3
- package/dist/umd/yjs/YTable.d.ts +0 -3
- package/dist/umd/yjs/applyYMessages.d.ts +0 -9
- package/dist/umd/yjs/awareness.d.ts +0 -3
- package/dist/umd/yjs/createYClientUpdateObservable.d.ts +0 -4
- package/dist/umd/yjs/createYHandler.d.ts +0 -2
- package/dist/umd/yjs/downloadYDocsFromServer.d.ts +0 -3
- package/dist/umd/yjs/getUpdatesTable.d.ts +0 -3
- package/dist/umd/yjs/listUpdatesSince.d.ts +0 -3
- package/dist/umd/yjs/listYClientMessagesAndStateVector.d.ts +0 -26
- package/dist/umd/yjs/reopenDocSignal.d.ts +0 -10
- package/dist/umd/yjs/updateYSyncStates.d.ts +0 -6
- /package/dist/{umd/authentication/currentUserObservable.d.ts → modern/sync/blobOffloading.test.d.ts} +0 -0
|
@@ -8,7 +8,7 @@
|
|
|
8
8
|
*
|
|
9
9
|
* ==========================================================================
|
|
10
10
|
*
|
|
11
|
-
* Version 4.
|
|
11
|
+
* Version 4.4.0, Wed Mar 18 2026
|
|
12
12
|
*
|
|
13
13
|
* https://dexie.org
|
|
14
14
|
*
|
|
@@ -16,9 +16,9 @@
|
|
|
16
16
|
*
|
|
17
17
|
*/
|
|
18
18
|
|
|
19
|
-
import Dexie, { PropModification, cmp,
|
|
20
|
-
import { Observable, BehaviorSubject, firstValueFrom, Subject, from, filter as filter$1, of, fromEvent, merge, switchMap as switchMap$1, tap as tap$1, mergeMap, Subscription, throwError,
|
|
21
|
-
import { filter, switchMap, delay, distinctUntilChanged,
|
|
19
|
+
import Dexie, { PropModification, cmp, liveQuery, RangeSet } from 'dexie';
|
|
20
|
+
import { Observable, BehaviorSubject, firstValueFrom, Subject, from, combineLatest, timer, filter as filter$1, of, fromEvent, merge, switchMap as switchMap$1, tap as tap$1, mergeMap, Subscription, throwError, map as map$1, share as share$1, startWith as startWith$1 } from 'rxjs';
|
|
21
|
+
import { filter, map, share, switchMap, delay, distinctUntilChanged, tap, take, catchError, debounceTime, startWith, skip } from 'rxjs/operators';
|
|
22
22
|
import { Encoder, writeVarString, writeAny, writeVarUint8Array, writeBigUint64, toUint8Array } from 'lib0/encoding';
|
|
23
23
|
import { Decoder, readVarString, readAny, readVarUint8Array, readBigUint64, hasContent, readUint8 } from 'lib0/decoding';
|
|
24
24
|
import * as Y from 'yjs';
|
|
@@ -325,6 +325,409 @@ function triggerSync(db, purpose) {
|
|
|
325
325
|
}
|
|
326
326
|
}
|
|
327
327
|
|
|
328
|
+
const { toString: toStr } = {};
|
|
329
|
+
function getToStringTag(val) {
|
|
330
|
+
return toStr.call(val).slice(8, -1);
|
|
331
|
+
}
|
|
332
|
+
function escapeDollarProps(value) {
|
|
333
|
+
const keys = Object.keys(value);
|
|
334
|
+
let dollarKeys = null;
|
|
335
|
+
for (let i = 0, l = keys.length; i < l; ++i) {
|
|
336
|
+
if (keys[i][0] === "$") {
|
|
337
|
+
dollarKeys = dollarKeys || [];
|
|
338
|
+
dollarKeys.push(keys[i]);
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
if (!dollarKeys)
|
|
342
|
+
return value;
|
|
343
|
+
const clone = { ...value };
|
|
344
|
+
for (const k of dollarKeys) {
|
|
345
|
+
delete clone[k];
|
|
346
|
+
}
|
|
347
|
+
for (const k of dollarKeys) {
|
|
348
|
+
clone["$" + k] = value[k];
|
|
349
|
+
}
|
|
350
|
+
return clone;
|
|
351
|
+
}
|
|
352
|
+
const ObjectDef = {
|
|
353
|
+
replace: escapeDollarProps,
|
|
354
|
+
};
|
|
355
|
+
function TypesonSimplified(...typeDefsInputs) {
|
|
356
|
+
const typeDefs = typeDefsInputs.reduce((p, c) => ({ ...p, ...c }), typeDefsInputs.reduce((p, c) => ({ ...c, ...p }), {}));
|
|
357
|
+
const protoMap = new WeakMap();
|
|
358
|
+
return {
|
|
359
|
+
stringify(value, alternateChannel, space) {
|
|
360
|
+
const json = JSON.stringify(value, function (key) {
|
|
361
|
+
const realVal = this[key];
|
|
362
|
+
const typeDef = getTypeDef(realVal);
|
|
363
|
+
return typeDef
|
|
364
|
+
? typeDef.replace(realVal, alternateChannel, typeDefs)
|
|
365
|
+
: realVal;
|
|
366
|
+
}, space);
|
|
367
|
+
return json;
|
|
368
|
+
},
|
|
369
|
+
parse(tson, alternateChannel) {
|
|
370
|
+
const stack = [];
|
|
371
|
+
return JSON.parse(tson, function (key, value) {
|
|
372
|
+
//
|
|
373
|
+
// Parent Part
|
|
374
|
+
//
|
|
375
|
+
const type = value?.$t;
|
|
376
|
+
if (type) {
|
|
377
|
+
const typeDef = typeDefs[type];
|
|
378
|
+
value = typeDef
|
|
379
|
+
? typeDef.revive(value, alternateChannel, typeDefs)
|
|
380
|
+
: value;
|
|
381
|
+
}
|
|
382
|
+
let top = stack[stack.length - 1];
|
|
383
|
+
if (top && top[0] === value) {
|
|
384
|
+
// Do what the kid told us to
|
|
385
|
+
// Unescape dollar props
|
|
386
|
+
value = { ...value };
|
|
387
|
+
// Delete keys that children wanted us to delete
|
|
388
|
+
for (const k of top[1])
|
|
389
|
+
delete value[k];
|
|
390
|
+
// Set keys that children wanted us to set
|
|
391
|
+
for (const [k, v] of Object.entries(top[2])) {
|
|
392
|
+
value[k] = v;
|
|
393
|
+
}
|
|
394
|
+
stack.pop();
|
|
395
|
+
}
|
|
396
|
+
//
|
|
397
|
+
// Child part
|
|
398
|
+
//
|
|
399
|
+
if (value === undefined || (key[0] === "$" && key !== "$t")) {
|
|
400
|
+
top = stack[stack.length - 1];
|
|
401
|
+
let deletes;
|
|
402
|
+
let mods;
|
|
403
|
+
if (top && top[0] === this) {
|
|
404
|
+
deletes = top[1];
|
|
405
|
+
mods = top[2];
|
|
406
|
+
}
|
|
407
|
+
else {
|
|
408
|
+
stack.push([this, (deletes = []), (mods = {})]);
|
|
409
|
+
}
|
|
410
|
+
if (key[0] === "$" && key !== "$t") {
|
|
411
|
+
// Unescape props (also preserves undefined if this is a combo)
|
|
412
|
+
deletes.push(key);
|
|
413
|
+
mods[key.substr(1)] = value;
|
|
414
|
+
}
|
|
415
|
+
else {
|
|
416
|
+
// Preserve undefined
|
|
417
|
+
mods[key] = undefined;
|
|
418
|
+
}
|
|
419
|
+
}
|
|
420
|
+
return value;
|
|
421
|
+
});
|
|
422
|
+
},
|
|
423
|
+
};
|
|
424
|
+
function getTypeDef(realVal) {
|
|
425
|
+
const type = typeof realVal;
|
|
426
|
+
switch (typeof realVal) {
|
|
427
|
+
case "object":
|
|
428
|
+
case "function": {
|
|
429
|
+
// "object", "function", null
|
|
430
|
+
if (realVal === null)
|
|
431
|
+
return null;
|
|
432
|
+
const proto = Object.getPrototypeOf(realVal);
|
|
433
|
+
if (!proto)
|
|
434
|
+
return ObjectDef;
|
|
435
|
+
let typeDef = protoMap.get(proto);
|
|
436
|
+
if (typeDef !== undefined)
|
|
437
|
+
return typeDef; // Null counts to! So the caching of Array.prototype also counts.
|
|
438
|
+
const toStringTag = getToStringTag(realVal);
|
|
439
|
+
const entry = Object.entries(typeDefs).find(([typeName, typeDef]) => typeDef?.test?.(realVal, toStringTag) ?? typeName === toStringTag);
|
|
440
|
+
typeDef = entry?.[1];
|
|
441
|
+
if (!typeDef) {
|
|
442
|
+
typeDef = Array.isArray(realVal)
|
|
443
|
+
? null
|
|
444
|
+
: typeof realVal === "function"
|
|
445
|
+
? typeDefs.function || null
|
|
446
|
+
: ObjectDef;
|
|
447
|
+
}
|
|
448
|
+
protoMap.set(proto, typeDef);
|
|
449
|
+
return typeDef;
|
|
450
|
+
}
|
|
451
|
+
default:
|
|
452
|
+
return typeDefs[type];
|
|
453
|
+
}
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
class FakeBlob {
|
|
458
|
+
constructor(buf, type) {
|
|
459
|
+
this.buf = buf;
|
|
460
|
+
this.type = type;
|
|
461
|
+
}
|
|
462
|
+
}
|
|
463
|
+
|
|
464
|
+
/**
|
|
465
|
+
* TSONRef - Reference to a blob stored separately from the main data.
|
|
466
|
+
*
|
|
467
|
+
* When TSON parses data containing blob references, it creates TSONRef
|
|
468
|
+
* instances instead of the actual binary data. The client can then
|
|
469
|
+
* resolve these refs asynchronously.
|
|
470
|
+
*
|
|
471
|
+
* @example
|
|
472
|
+
* ```typescript
|
|
473
|
+
* // Configure resolver
|
|
474
|
+
* TSONRef.resolver = async (ref) => {
|
|
475
|
+
* const response = await fetch(`/blob/${ref.ref}`);
|
|
476
|
+
* return response.arrayBuffer();
|
|
477
|
+
* };
|
|
478
|
+
*
|
|
479
|
+
* // After parsing, resolve all refs in an object
|
|
480
|
+
* await resolveAllRefs(data);
|
|
481
|
+
* ```
|
|
482
|
+
*/
|
|
483
|
+
var _a;
|
|
484
|
+
/** Symbol for type checking TSONRef instances */
|
|
485
|
+
const TSON_REF_SYMBOL = Symbol.for('TSONRef');
|
|
486
|
+
/**
|
|
487
|
+
* TSONRef represents a reference to binary data stored as a blob.
|
|
488
|
+
*/
|
|
489
|
+
class TSONRef {
|
|
490
|
+
constructor(
|
|
491
|
+
/** Original TSON type: 'ArrayBuffer', 'Blob', 'Uint8Array', etc */
|
|
492
|
+
type,
|
|
493
|
+
/** Blob reference ID (UUID) */
|
|
494
|
+
ref,
|
|
495
|
+
/** Size in bytes */
|
|
496
|
+
size,
|
|
497
|
+
/** Content-Type (for Blob type) */
|
|
498
|
+
contentType) {
|
|
499
|
+
this.type = type;
|
|
500
|
+
this.ref = ref;
|
|
501
|
+
this.size = size;
|
|
502
|
+
this.contentType = contentType;
|
|
503
|
+
/** Type brand for runtime identification */
|
|
504
|
+
this[_a] = true;
|
|
505
|
+
Object.freeze(this);
|
|
506
|
+
}
|
|
507
|
+
/**
|
|
508
|
+
* Resolve this reference to actual data.
|
|
509
|
+
* Requires TSONRef.resolver to be configured.
|
|
510
|
+
*/
|
|
511
|
+
async resolve() {
|
|
512
|
+
if (!TSONRef.resolver) {
|
|
513
|
+
throw new Error('TSONRef.resolver not configured. ' +
|
|
514
|
+
'Set TSONRef.resolver to a function that fetches blobs.');
|
|
515
|
+
}
|
|
516
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
517
|
+
const data = await TSONRef.resolver(this);
|
|
518
|
+
return this.reconstruct(data);
|
|
519
|
+
}
|
|
520
|
+
/**
|
|
521
|
+
* Reconstruct the original type from ArrayBuffer.
|
|
522
|
+
* Validates byte alignment for TypedArrays that require it.
|
|
523
|
+
*/
|
|
524
|
+
reconstruct(data) {
|
|
525
|
+
// Helper to validate alignment for multi-byte TypedArrays
|
|
526
|
+
const validateAlignment = (bytesPerElement, typeName) => {
|
|
527
|
+
if (data.byteLength % bytesPerElement !== 0) {
|
|
528
|
+
throw new RangeError(`Buffer length ${data.byteLength} is not aligned to ${bytesPerElement} bytes for ${typeName}`);
|
|
529
|
+
}
|
|
530
|
+
};
|
|
531
|
+
switch (this.type) {
|
|
532
|
+
case 'ArrayBuffer':
|
|
533
|
+
return data;
|
|
534
|
+
case 'Uint8Array':
|
|
535
|
+
return new Uint8Array(data);
|
|
536
|
+
case 'Blob':
|
|
537
|
+
return new Blob([data], { type: this.contentType });
|
|
538
|
+
// Handle other TypedArrays with alignment validation
|
|
539
|
+
case 'Int8Array':
|
|
540
|
+
return new Int8Array(data);
|
|
541
|
+
case 'Uint8ClampedArray':
|
|
542
|
+
return new Uint8ClampedArray(data);
|
|
543
|
+
case 'Int16Array':
|
|
544
|
+
validateAlignment(2, 'Int16Array');
|
|
545
|
+
return new Int16Array(data);
|
|
546
|
+
case 'Uint16Array':
|
|
547
|
+
validateAlignment(2, 'Uint16Array');
|
|
548
|
+
return new Uint16Array(data);
|
|
549
|
+
case 'Int32Array':
|
|
550
|
+
validateAlignment(4, 'Int32Array');
|
|
551
|
+
return new Int32Array(data);
|
|
552
|
+
case 'Uint32Array':
|
|
553
|
+
validateAlignment(4, 'Uint32Array');
|
|
554
|
+
return new Uint32Array(data);
|
|
555
|
+
case 'Float32Array':
|
|
556
|
+
validateAlignment(4, 'Float32Array');
|
|
557
|
+
return new Float32Array(data);
|
|
558
|
+
case 'Float64Array':
|
|
559
|
+
validateAlignment(8, 'Float64Array');
|
|
560
|
+
return new Float64Array(data);
|
|
561
|
+
case 'BigInt64Array':
|
|
562
|
+
validateAlignment(8, 'BigInt64Array');
|
|
563
|
+
return new BigInt64Array(data);
|
|
564
|
+
case 'BigUint64Array':
|
|
565
|
+
validateAlignment(8, 'BigUint64Array');
|
|
566
|
+
return new BigUint64Array(data);
|
|
567
|
+
default:
|
|
568
|
+
console.warn(`Unknown TSONRef type: ${this.type}, returning ArrayBuffer`);
|
|
569
|
+
return data;
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
/**
|
|
573
|
+
* Check if a value is a TSONRef instance.
|
|
574
|
+
*/
|
|
575
|
+
static isTSONRef(value) {
|
|
576
|
+
return (value !== null &&
|
|
577
|
+
typeof value === 'object' &&
|
|
578
|
+
TSON_REF_SYMBOL in value &&
|
|
579
|
+
value[TSON_REF_SYMBOL] === true);
|
|
580
|
+
}
|
|
581
|
+
/**
|
|
582
|
+
* Check if a value is TSONRef serialized data (has $ref).
|
|
583
|
+
*/
|
|
584
|
+
static isTSONRefData(value) {
|
|
585
|
+
return (value !== null &&
|
|
586
|
+
typeof value === 'object' &&
|
|
587
|
+
'$ref' in value &&
|
|
588
|
+
'$t' in value &&
|
|
589
|
+
'$size' in value);
|
|
590
|
+
}
|
|
591
|
+
/**
|
|
592
|
+
* Create TSONRef from serialized data.
|
|
593
|
+
*/
|
|
594
|
+
static fromData(data) {
|
|
595
|
+
return new TSONRef(data.$t, data.$ref, data.$size, data.$ct);
|
|
596
|
+
}
|
|
597
|
+
/**
|
|
598
|
+
* Serialize to JSON-compatible format.
|
|
599
|
+
*/
|
|
600
|
+
toJSON() {
|
|
601
|
+
const result = {
|
|
602
|
+
$t: this.type,
|
|
603
|
+
$ref: this.ref,
|
|
604
|
+
$size: this.size,
|
|
605
|
+
};
|
|
606
|
+
if (this.contentType) {
|
|
607
|
+
result.$ct = this.contentType;
|
|
608
|
+
}
|
|
609
|
+
return result;
|
|
610
|
+
}
|
|
611
|
+
}
|
|
612
|
+
_a = TSON_REF_SYMBOL;
|
|
613
|
+
/** Symbol for type checking */
|
|
614
|
+
TSONRef.TYPE_SYMBOL = TSON_REF_SYMBOL;
|
|
615
|
+
/** Global resolver function - must be configured before resolving */
|
|
616
|
+
TSONRef.resolver = null;
|
|
617
|
+
|
|
618
|
+
function readBlobSync(b) {
|
|
619
|
+
const req = new XMLHttpRequest();
|
|
620
|
+
req.overrideMimeType("text/plain; charset=x-user-defined");
|
|
621
|
+
const url = URL.createObjectURL(b);
|
|
622
|
+
try {
|
|
623
|
+
req.open("GET", url, false); // Sync
|
|
624
|
+
req.send();
|
|
625
|
+
if (req.status !== 200 && req.status !== 0) {
|
|
626
|
+
throw new Error("Bad Blob access: " + req.status);
|
|
627
|
+
}
|
|
628
|
+
return req.responseText;
|
|
629
|
+
}
|
|
630
|
+
finally {
|
|
631
|
+
URL.revokeObjectURL(url);
|
|
632
|
+
}
|
|
633
|
+
}
|
|
634
|
+
|
|
635
|
+
const numberTypeDef = {
|
|
636
|
+
number: {
|
|
637
|
+
replace: (num) => {
|
|
638
|
+
switch (true) {
|
|
639
|
+
case isNaN(num):
|
|
640
|
+
return { $t: "number", v: "NaN" };
|
|
641
|
+
case num === Infinity:
|
|
642
|
+
return { $t: "number", v: "Infinity" };
|
|
643
|
+
case num === -Infinity:
|
|
644
|
+
return { $t: "number", v: "-Infinity" };
|
|
645
|
+
default:
|
|
646
|
+
return num;
|
|
647
|
+
}
|
|
648
|
+
},
|
|
649
|
+
revive: ({ v }) => Number(v),
|
|
650
|
+
},
|
|
651
|
+
};
|
|
652
|
+
|
|
653
|
+
const dateTypeDef = {
|
|
654
|
+
Date: {
|
|
655
|
+
replace: (date) => ({
|
|
656
|
+
$t: "Date",
|
|
657
|
+
v: isNaN(date.getTime()) ? "NaN" : date.toISOString(),
|
|
658
|
+
}),
|
|
659
|
+
revive: ({ v }) => new Date(v === "NaN" ? NaN : Date.parse(v)),
|
|
660
|
+
},
|
|
661
|
+
};
|
|
662
|
+
|
|
663
|
+
const setTypeDef = {
|
|
664
|
+
Set: {
|
|
665
|
+
replace: (set) => ({
|
|
666
|
+
$t: "Set",
|
|
667
|
+
v: Array.from(set),
|
|
668
|
+
}),
|
|
669
|
+
revive: ({ v }) => new Set(v),
|
|
670
|
+
},
|
|
671
|
+
};
|
|
672
|
+
|
|
673
|
+
const mapTypeDef = {
|
|
674
|
+
Map: {
|
|
675
|
+
replace: (map) => ({
|
|
676
|
+
$t: "Map",
|
|
677
|
+
v: Array.from(map.entries()),
|
|
678
|
+
}),
|
|
679
|
+
revive: ({ v }) => new Map(v),
|
|
680
|
+
},
|
|
681
|
+
};
|
|
682
|
+
|
|
683
|
+
const _global = typeof globalThis !== "undefined" // All modern environments (node, bun, deno, browser, workers, webview etc)
|
|
684
|
+
? globalThis
|
|
685
|
+
: typeof self !== "undefined" // Older browsers, workers, webview, window etc
|
|
686
|
+
? self
|
|
687
|
+
: typeof global !== "undefined" // Older versions of node
|
|
688
|
+
? global
|
|
689
|
+
: undefined; // Unsupported environment. No idea to return 'this' since we are in a module or a function scope anyway.
|
|
690
|
+
|
|
691
|
+
const typedArrayTypeDefs = [
|
|
692
|
+
"Int8Array",
|
|
693
|
+
"Uint8Array",
|
|
694
|
+
"Uint8ClampedArray",
|
|
695
|
+
"Int16Array",
|
|
696
|
+
"Uint16Array",
|
|
697
|
+
"Int32Array",
|
|
698
|
+
"Uint32Array",
|
|
699
|
+
"Float32Array",
|
|
700
|
+
"Float64Array",
|
|
701
|
+
"DataView",
|
|
702
|
+
"BigInt64Array",
|
|
703
|
+
"BigUint64Array",
|
|
704
|
+
].reduce((specs, typeName) => ({
|
|
705
|
+
...specs,
|
|
706
|
+
[typeName]: {
|
|
707
|
+
// Replace passes the typed array into $t, buffer so that
|
|
708
|
+
// the ArrayBuffer typedef takes care of further handling of the buffer:
|
|
709
|
+
// {$t:"Uint8Array",buffer:{$t:"ArrayBuffer",idx:0}}
|
|
710
|
+
// CHANGED ABOVE! Now shortcutting that for more sparse format of the typed arrays
|
|
711
|
+
// to contain the b64 property directly.
|
|
712
|
+
replace: (a, _, typeDefs) => {
|
|
713
|
+
const buffer = a.buffer;
|
|
714
|
+
const slicedBuffer = a.byteOffset === 0 && a.byteLength === buffer.byteLength
|
|
715
|
+
? buffer
|
|
716
|
+
: buffer.slice(a.byteOffset, a.byteOffset + a.byteLength);
|
|
717
|
+
const result = {
|
|
718
|
+
$t: typeName,
|
|
719
|
+
v: typeDefs.ArrayBuffer.replace(slicedBuffer, _, typeDefs).v,
|
|
720
|
+
};
|
|
721
|
+
return result;
|
|
722
|
+
},
|
|
723
|
+
revive: ({ v }, _, typeDefs) => {
|
|
724
|
+
const TypedArray = _global[typeName];
|
|
725
|
+
return (TypedArray &&
|
|
726
|
+
new TypedArray(typeDefs.ArrayBuffer.revive({ v }, _, typeDefs)));
|
|
727
|
+
},
|
|
728
|
+
},
|
|
729
|
+
}), {});
|
|
730
|
+
|
|
328
731
|
const hasArrayBufferFromBase64 = "fromBase64" in Uint8Array; // https://github.com/tc39/proposal-arraybuffer-base64;
|
|
329
732
|
const hasArrayBufferToBase64 = "toBase64" in Uint8Array.prototype; // https://github.com/tc39/proposal-arraybuffer-base64;
|
|
330
733
|
const b64decode = typeof Buffer !== "undefined"
|
|
@@ -366,183 +769,261 @@ const b64encode = typeof Buffer !== "undefined"
|
|
|
366
769
|
const strs = [];
|
|
367
770
|
for (let i = 0, l = u8a.length; i < l; i += CHUNK_SIZE) {
|
|
368
771
|
const chunk = u8a.subarray(i, i + CHUNK_SIZE);
|
|
369
|
-
strs.push(String.fromCharCode.apply(null, chunk));
|
|
772
|
+
strs.push(String.fromCharCode.apply(null, Array.from(chunk)));
|
|
370
773
|
}
|
|
371
774
|
return btoa(strs.join(""));
|
|
372
775
|
};
|
|
373
776
|
|
|
374
|
-
function
|
|
375
|
-
return
|
|
376
|
-
const data = JSON.stringify([
|
|
377
|
-
...realms.map((realmId) => ({ realmId, accepted: true })),
|
|
378
|
-
...inviteRealms.map((realmId) => ({ realmId, accepted: false })),
|
|
379
|
-
].sort((a, b) => a.realmId < b.realmId ? -1 : a.realmId > b.realmId ? 1 : 0));
|
|
380
|
-
const byteArray = new TextEncoder().encode(data);
|
|
381
|
-
const digestBytes = yield crypto.subtle.digest('SHA-1', byteArray);
|
|
382
|
-
const base64 = b64encode(digestBytes);
|
|
383
|
-
return base64;
|
|
384
|
-
});
|
|
777
|
+
function b64LexEncode(b) {
|
|
778
|
+
return b64ToLex(b64encode(b));
|
|
385
779
|
}
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
return Object.entries(db.cloud.schema || {})
|
|
389
|
-
.filter(([, { markedForSync }]) => markedForSync)
|
|
390
|
-
.map(([tbl]) => db.tables.filter(({ name }) => name === tbl)[0])
|
|
391
|
-
.filter(cloudTableSchema => cloudTableSchema);
|
|
780
|
+
function b64LexDecode(b64Lex) {
|
|
781
|
+
return b64decode(lexToB64(b64Lex));
|
|
392
782
|
}
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
783
|
+
function b64ToLex(base64) {
|
|
784
|
+
var encoded = "";
|
|
785
|
+
for (var i = 0, length = base64.length; i < length; i++) {
|
|
786
|
+
encoded += ENCODE_TABLE[base64[i]];
|
|
787
|
+
}
|
|
788
|
+
return encoded;
|
|
396
789
|
}
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
790
|
+
function lexToB64(base64lex) {
|
|
791
|
+
// only accept string input
|
|
792
|
+
if (typeof base64lex !== "string") {
|
|
793
|
+
throw new Error("invalid decoder input: " + base64lex);
|
|
794
|
+
}
|
|
795
|
+
var base64 = "";
|
|
796
|
+
for (var i = 0, length = base64lex.length; i < length; i++) {
|
|
797
|
+
base64 += DECODE_TABLE[base64lex[i]];
|
|
798
|
+
}
|
|
799
|
+
return base64;
|
|
404
800
|
}
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
801
|
+
const DECODE_TABLE = {
|
|
802
|
+
"-": "=",
|
|
803
|
+
"0": "A",
|
|
804
|
+
"1": "B",
|
|
805
|
+
"2": "C",
|
|
806
|
+
"3": "D",
|
|
807
|
+
"4": "E",
|
|
808
|
+
"5": "F",
|
|
809
|
+
"6": "G",
|
|
810
|
+
"7": "H",
|
|
811
|
+
"8": "I",
|
|
812
|
+
"9": "J",
|
|
813
|
+
A: "K",
|
|
814
|
+
B: "L",
|
|
815
|
+
C: "M",
|
|
816
|
+
D: "N",
|
|
817
|
+
E: "O",
|
|
818
|
+
F: "P",
|
|
819
|
+
G: "Q",
|
|
820
|
+
H: "R",
|
|
821
|
+
I: "S",
|
|
822
|
+
J: "T",
|
|
823
|
+
K: "U",
|
|
824
|
+
L: "V",
|
|
825
|
+
M: "W",
|
|
826
|
+
N: "X",
|
|
827
|
+
O: "Y",
|
|
828
|
+
P: "Z",
|
|
829
|
+
Q: "a",
|
|
830
|
+
R: "b",
|
|
831
|
+
S: "c",
|
|
832
|
+
T: "d",
|
|
833
|
+
U: "e",
|
|
834
|
+
V: "f",
|
|
835
|
+
W: "g",
|
|
836
|
+
X: "h",
|
|
837
|
+
Y: "i",
|
|
838
|
+
Z: "j",
|
|
839
|
+
_: "k",
|
|
840
|
+
a: "l",
|
|
841
|
+
b: "m",
|
|
842
|
+
c: "n",
|
|
843
|
+
d: "o",
|
|
844
|
+
e: "p",
|
|
845
|
+
f: "q",
|
|
846
|
+
g: "r",
|
|
847
|
+
h: "s",
|
|
848
|
+
i: "t",
|
|
849
|
+
j: "u",
|
|
850
|
+
k: "v",
|
|
851
|
+
l: "w",
|
|
852
|
+
m: "x",
|
|
853
|
+
n: "y",
|
|
854
|
+
o: "z",
|
|
855
|
+
p: "0",
|
|
856
|
+
q: "1",
|
|
857
|
+
r: "2",
|
|
858
|
+
s: "3",
|
|
859
|
+
t: "4",
|
|
860
|
+
u: "5",
|
|
861
|
+
v: "6",
|
|
862
|
+
w: "7",
|
|
863
|
+
x: "8",
|
|
864
|
+
y: "9",
|
|
865
|
+
z: "+",
|
|
866
|
+
"|": "/",
|
|
867
|
+
};
|
|
868
|
+
const ENCODE_TABLE = {};
|
|
869
|
+
for (const c of Object.keys(DECODE_TABLE)) {
|
|
870
|
+
ENCODE_TABLE[DECODE_TABLE[c]] = c;
|
|
409
871
|
}
|
|
410
872
|
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
const sorted = flatten(allMutsOnTables).sort((a, b) => a.mut.txid === b.mut.txid
|
|
432
|
-
? a.mut.opNo - b.mut.opNo // Within same transaction, sort by opNo
|
|
433
|
-
: a.mut.ts - b.mut.ts // Different transactions - sort by timestamp when mutation resolved
|
|
434
|
-
);
|
|
435
|
-
const result = [];
|
|
436
|
-
let currentEntry = null;
|
|
437
|
-
let currentTxid = null;
|
|
438
|
-
for (const { table, mut } of sorted) {
|
|
439
|
-
if (currentEntry &&
|
|
440
|
-
currentEntry.table === table &&
|
|
441
|
-
currentTxid === mut.txid) {
|
|
442
|
-
currentEntry.muts.push(mut);
|
|
443
|
-
}
|
|
444
|
-
else {
|
|
445
|
-
currentEntry = {
|
|
446
|
-
table,
|
|
447
|
-
muts: [mut],
|
|
448
|
-
};
|
|
449
|
-
currentTxid = mut.txid;
|
|
450
|
-
result.push(currentEntry);
|
|
451
|
-
}
|
|
452
|
-
}
|
|
453
|
-
// Filter out those tables that doesn't have any mutations:
|
|
454
|
-
return result;
|
|
455
|
-
});
|
|
456
|
-
}
|
|
457
|
-
function removeRedundantUpdateOps(muts) {
|
|
458
|
-
const updateCoverage = new Map();
|
|
459
|
-
for (const mut of muts) {
|
|
460
|
-
if (mut.type === 'update') {
|
|
461
|
-
if (mut.keys.length !== 1 || mut.changeSpecs.length !== 1) {
|
|
462
|
-
continue; // Don't optimize multi-key updates
|
|
463
|
-
}
|
|
464
|
-
const strKey = '' + mut.keys[0];
|
|
465
|
-
const changeSpecs = mut.changeSpecs[0];
|
|
466
|
-
if (Object.values(changeSpecs).some(v => typeof v === "object" && v && "@@propmod" in v)) {
|
|
467
|
-
continue; // Cannot optimize if any PropModification is present
|
|
468
|
-
}
|
|
469
|
-
let keyCoverage = updateCoverage.get(strKey);
|
|
470
|
-
if (keyCoverage) {
|
|
471
|
-
keyCoverage.push({ txid: mut.txid, updateSpec: changeSpecs });
|
|
472
|
-
}
|
|
473
|
-
else {
|
|
474
|
-
updateCoverage.set(strKey, [{ txid: mut.txid, updateSpec: changeSpecs }]);
|
|
475
|
-
}
|
|
476
|
-
}
|
|
873
|
+
const arrayBufferTypeDef = {
|
|
874
|
+
ArrayBuffer: {
|
|
875
|
+
replace: (ab) => ({
|
|
876
|
+
$t: "ArrayBuffer",
|
|
877
|
+
v: b64LexEncode(ab),
|
|
878
|
+
}),
|
|
879
|
+
revive: ({ v }) => {
|
|
880
|
+
const ba = b64LexDecode(v);
|
|
881
|
+
const buf = ba.buffer.byteLength === ba.byteLength
|
|
882
|
+
? ba.buffer
|
|
883
|
+
: ba.buffer.slice(ba.byteOffset, ba.byteOffset + ba.byteLength);
|
|
884
|
+
return buf;
|
|
885
|
+
},
|
|
886
|
+
},
|
|
887
|
+
};
|
|
888
|
+
|
|
889
|
+
function string2ArrayBuffer(str) {
|
|
890
|
+
const array = new Uint8Array(str.length);
|
|
891
|
+
for (let i = 0; i < str.length; ++i) {
|
|
892
|
+
array[i] = str.charCodeAt(i); // & 0xff;
|
|
477
893
|
}
|
|
478
|
-
|
|
479
|
-
// Only apply optimization to update mutations that are single-key
|
|
480
|
-
if (mut.type !== 'update')
|
|
481
|
-
return true;
|
|
482
|
-
if (mut.keys.length !== 1 || mut.changeSpecs.length !== 1)
|
|
483
|
-
return true;
|
|
484
|
-
// Check if this has PropModifications - if so, skip optimization
|
|
485
|
-
const changeSpecs = mut.changeSpecs[0];
|
|
486
|
-
if (Object.values(changeSpecs).some(v => typeof v === "object" && v && "@@propmod" in v)) {
|
|
487
|
-
return true; // Cannot optimize if any PropModification is present
|
|
488
|
-
}
|
|
489
|
-
// Keep track of properties that aren't overlapped by later transactions
|
|
490
|
-
const unoverlappedProps = new Set(Object.keys(mut.changeSpecs[0]));
|
|
491
|
-
const strKey = '' + mut.keys[0];
|
|
492
|
-
const keyCoverage = updateCoverage.get(strKey);
|
|
493
|
-
if (!keyCoverage)
|
|
494
|
-
return true; // No coverage info - cannot optimize
|
|
495
|
-
for (let i = keyCoverage.length - 1; i >= 0; --i) {
|
|
496
|
-
const { txid, updateSpec } = keyCoverage[i];
|
|
497
|
-
if (txid === mut.txid)
|
|
498
|
-
break; // Stop when reaching own txid
|
|
499
|
-
// If all changes in updateSpec are covered by all props on all mut.changeSpecs then
|
|
500
|
-
// txid is redundant and can be removed.
|
|
501
|
-
for (const keyPath of Object.keys(updateSpec)) {
|
|
502
|
-
unoverlappedProps.delete(keyPath);
|
|
503
|
-
}
|
|
504
|
-
}
|
|
505
|
-
if (unoverlappedProps.size === 0) {
|
|
506
|
-
// This operation is completely overlapped by later operations. It can be removed.
|
|
507
|
-
return false;
|
|
508
|
-
}
|
|
509
|
-
return true;
|
|
510
|
-
});
|
|
511
|
-
return muts;
|
|
512
|
-
}
|
|
513
|
-
function canonicalizeToUpdateOps(muts) {
|
|
514
|
-
muts = muts.map(mut => {
|
|
515
|
-
if (mut.type === 'modify' && mut.criteria.index === null) {
|
|
516
|
-
// The criteria is on primary key. Convert to an update operation instead.
|
|
517
|
-
// It is simpler for the server to handle and also more efficient.
|
|
518
|
-
const updateMut = Object.assign(Object.assign({}, mut), { criteria: undefined, changeSpec: undefined, type: 'update', keys: mut.keys, changeSpecs: [mut.changeSpec] });
|
|
519
|
-
delete updateMut.criteria;
|
|
520
|
-
delete updateMut.changeSpec;
|
|
521
|
-
return updateMut;
|
|
522
|
-
}
|
|
523
|
-
return mut;
|
|
524
|
-
});
|
|
525
|
-
return muts;
|
|
894
|
+
return array.buffer;
|
|
526
895
|
}
|
|
527
896
|
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
897
|
+
const blobTypeDef = {
|
|
898
|
+
Blob: {
|
|
899
|
+
test: (blob, toStringTag) => toStringTag === "Blob" || blob instanceof FakeBlob,
|
|
900
|
+
replace: (blob) => ({
|
|
901
|
+
$t: "Blob",
|
|
902
|
+
v: blob instanceof FakeBlob
|
|
903
|
+
? b64encode(blob.buf)
|
|
904
|
+
: b64encode(string2ArrayBuffer(readBlobSync(blob))),
|
|
905
|
+
type: blob.type,
|
|
906
|
+
}),
|
|
907
|
+
revive: ({ type, v }) => {
|
|
908
|
+
const ab = b64decode(v);
|
|
909
|
+
const buf = ab.buffer.byteLength === ab.byteLength
|
|
910
|
+
? ab.buffer
|
|
911
|
+
: ab.buffer.slice(ab.byteOffset, ab.byteOffset + ab.byteLength);
|
|
912
|
+
return typeof Blob !== "undefined"
|
|
913
|
+
? new Blob([new Uint8Array(buf)], { type })
|
|
914
|
+
: new FakeBlob(buf, type);
|
|
915
|
+
},
|
|
916
|
+
},
|
|
917
|
+
};
|
|
918
|
+
|
|
919
|
+
({
|
|
920
|
+
...numberTypeDef,
|
|
921
|
+
...dateTypeDef,
|
|
922
|
+
...setTypeDef,
|
|
923
|
+
...mapTypeDef,
|
|
924
|
+
...typedArrayTypeDefs,
|
|
925
|
+
...arrayBufferTypeDef,
|
|
926
|
+
...blobTypeDef, // Should be moved to another preset for DOM types (or universal? since it supports node as well with FakeBlob)
|
|
927
|
+
});
|
|
928
|
+
|
|
929
|
+
const fileTypeDef = {
|
|
930
|
+
File: {
|
|
931
|
+
test: (file, toStringTag) => toStringTag === "File",
|
|
932
|
+
replace: (file) => ({
|
|
933
|
+
$t: "File",
|
|
934
|
+
v: b64encode(string2ArrayBuffer(readBlobSync(file))),
|
|
935
|
+
type: file.type,
|
|
936
|
+
name: file.name,
|
|
937
|
+
lastModified: new Date(file.lastModified).toISOString(),
|
|
938
|
+
}),
|
|
939
|
+
revive: ({ type, v, name, lastModified }) => {
|
|
940
|
+
const ab = b64decode(v);
|
|
941
|
+
const buf = ab.buffer.byteLength === ab.byteLength
|
|
942
|
+
? ab.buffer
|
|
943
|
+
: ab.buffer.slice(ab.byteOffset, ab.byteOffset + ab.byteLength);
|
|
944
|
+
return new File([new Uint8Array(buf)], name, {
|
|
945
|
+
type,
|
|
946
|
+
lastModified: new Date(lastModified).getTime(),
|
|
947
|
+
});
|
|
948
|
+
},
|
|
949
|
+
},
|
|
950
|
+
};
|
|
951
|
+
|
|
952
|
+
/** The undefined type is not part of builtin but can be manually added.
|
|
953
|
+
* The reason for supporting undefined is if the following object should be revived correctly:
|
|
954
|
+
*
|
|
955
|
+
* {foo: undefined}
|
|
956
|
+
*
|
|
957
|
+
* Without including this typedef, the revived object would just be {}.
|
|
958
|
+
* If including this typedef, the revived object would be {foo: undefined}.
|
|
959
|
+
*/
|
|
960
|
+
const undefinedTypeDef = {
|
|
961
|
+
undefined: {
|
|
962
|
+
replace: () => ({
|
|
963
|
+
$t: "undefined",
|
|
964
|
+
}),
|
|
965
|
+
revive: () => undefined,
|
|
966
|
+
},
|
|
967
|
+
};
|
|
968
|
+
|
|
969
|
+
const getRandomValues = typeof crypto !== "undefined"
|
|
970
|
+
? crypto.getRandomValues.bind(crypto)
|
|
971
|
+
: (buf) => {
|
|
972
|
+
for (let i = 0; i < buf.length; ++i) {
|
|
973
|
+
buf[i] = Math.floor(Math.random() * 256);
|
|
974
|
+
}
|
|
975
|
+
};
|
|
976
|
+
let time$1 = 0;
|
|
977
|
+
/**
|
|
978
|
+
* Generates unique ID where bytes 0-6 represents a timestampish value
|
|
979
|
+
* instead of random, similary to UUID version 1 but with random istead of MAC address.
|
|
980
|
+
*
|
|
981
|
+
* With "timestampish" we mean milliseconds from 1970 approximately, as in bulk-creation
|
|
982
|
+
* scenarios, milliseconds in future will be used (while creating more than 1 id per
|
|
983
|
+
* millisecond)
|
|
984
|
+
*
|
|
985
|
+
* This is similary UUID version 1 but with random instead of Mac, and with
|
|
986
|
+
* support for generating unique IDs the same millisecond.
|
|
987
|
+
*
|
|
988
|
+
* It's even more similar to the "version 6" proposal at
|
|
989
|
+
* https://bradleypeabody.github.io/uuidv6/.
|
|
990
|
+
*
|
|
991
|
+
* Difference from "version 6" proposal is that we keep the clock-sequence within
|
|
992
|
+
* the timestamp part to allow 9 more bits for randomness. This is at the cost of
|
|
993
|
+
* knwoing how exact the time-stamp is. But since we anyway don't expect a perfect
|
|
994
|
+
* time stamps as many clients may have wrong time settings, what we want is just
|
|
995
|
+
* a sorted ID, still universially unique.
|
|
996
|
+
*
|
|
997
|
+
* Random part is totally 73 bits entropy, which basically means that a collisions would
|
|
998
|
+
* be likely if 9 444 732 965 739 290 427 392 devices was generating ids during the exact same
|
|
999
|
+
* millisecond.
|
|
1000
|
+
*
|
|
1001
|
+
*/
|
|
1002
|
+
function newId() {
|
|
1003
|
+
const a = new Uint8Array(18);
|
|
1004
|
+
const timePart = new Uint8Array(a.buffer, 0, 6);
|
|
1005
|
+
const now = Date.now(); // Will fit into 6 bytes until year 10 895.
|
|
1006
|
+
if (time$1 >= now) {
|
|
1007
|
+
// User is bulk-creating objects the same millisecond.
|
|
1008
|
+
// Increment the time part by one millisecond for each item.
|
|
1009
|
+
// If bulk-creating 1,000,000 rows client-side in 0 seconds,
|
|
1010
|
+
// the last time-stamp will be 1,000 seconds in future, which is no biggie at all.
|
|
1011
|
+
// The point is to create a nice order of the generated IDs instead of
|
|
1012
|
+
// using random ids.
|
|
1013
|
+
++time$1;
|
|
542
1014
|
}
|
|
543
1015
|
else {
|
|
544
|
-
|
|
1016
|
+
time$1 = now;
|
|
545
1017
|
}
|
|
1018
|
+
timePart[0] = time$1 / 0x10000000000;
|
|
1019
|
+
timePart[1] = time$1 / 0x100000000;
|
|
1020
|
+
timePart[2] = time$1 / 0x1000000;
|
|
1021
|
+
timePart[3] = time$1 / 0x10000;
|
|
1022
|
+
timePart[4] = time$1 / 0x100;
|
|
1023
|
+
timePart[5] = time$1;
|
|
1024
|
+
const randomPart = new Uint8Array(a.buffer, 6);
|
|
1025
|
+
getRandomValues(randomPart);
|
|
1026
|
+
return b64LexEncode(a);
|
|
546
1027
|
}
|
|
547
1028
|
|
|
548
1029
|
function assert(b) {
|
|
@@ -605,7 +1086,7 @@ function setByKeyPath(obj, keyPath, value) {
|
|
|
605
1086
|
}
|
|
606
1087
|
}
|
|
607
1088
|
}
|
|
608
|
-
const randomString = typeof self !== 'undefined' && typeof crypto !== 'undefined' ? (bytes, randomFill = crypto.getRandomValues.bind(crypto)) => {
|
|
1089
|
+
const randomString$1 = typeof self !== 'undefined' && typeof crypto !== 'undefined' ? (bytes, randomFill = crypto.getRandomValues.bind(crypto)) => {
|
|
609
1090
|
// Web
|
|
610
1091
|
const buf = new Uint8Array(bytes);
|
|
611
1092
|
randomFill(buf);
|
|
@@ -1076,9 +1557,183 @@ function getFetchResponseBodyGenerator(res) {
|
|
|
1076
1557
|
};
|
|
1077
1558
|
}
|
|
1078
1559
|
|
|
1560
|
+
function computeRealmSetHash(_a) {
|
|
1561
|
+
return __awaiter(this, arguments, void 0, function* ({ realms, inviteRealms, }) {
|
|
1562
|
+
const data = JSON.stringify([
|
|
1563
|
+
...realms.map((realmId) => ({ realmId, accepted: true })),
|
|
1564
|
+
...inviteRealms.map((realmId) => ({ realmId, accepted: false })),
|
|
1565
|
+
].sort((a, b) => a.realmId < b.realmId ? -1 : a.realmId > b.realmId ? 1 : 0));
|
|
1566
|
+
const byteArray = new TextEncoder().encode(data);
|
|
1567
|
+
const digestBytes = yield crypto.subtle.digest('SHA-1', byteArray);
|
|
1568
|
+
const base64 = b64encode(digestBytes);
|
|
1569
|
+
return base64;
|
|
1570
|
+
});
|
|
1571
|
+
}
|
|
1572
|
+
|
|
1573
|
+
function getSyncableTables(db) {
|
|
1574
|
+
return Object.entries(db.cloud.schema || {})
|
|
1575
|
+
.filter(([, { markedForSync }]) => markedForSync)
|
|
1576
|
+
.map(([tbl]) => db.tables.find(({ name }) => name === tbl))
|
|
1577
|
+
.filter((syncableTable) => !!syncableTable);
|
|
1578
|
+
}
|
|
1579
|
+
|
|
1580
|
+
function getMutationTable(tableName) {
|
|
1581
|
+
return `$${tableName}_mutations`;
|
|
1582
|
+
}
|
|
1583
|
+
|
|
1584
|
+
function getTableFromMutationTable(mutationTable) {
|
|
1585
|
+
var _a;
|
|
1586
|
+
const tableName = (_a = /^\$(.*)_mutations$/.exec(mutationTable)) === null || _a === void 0 ? void 0 : _a[1];
|
|
1587
|
+
if (!tableName)
|
|
1588
|
+
throw new Error(`Given mutationTable ${mutationTable} is not correct`);
|
|
1589
|
+
return tableName;
|
|
1590
|
+
}
|
|
1591
|
+
|
|
1592
|
+
const concat = [].concat;
|
|
1593
|
+
function flatten(a) {
|
|
1594
|
+
return concat.apply([], a);
|
|
1595
|
+
}
|
|
1596
|
+
|
|
1597
|
+
function listClientChanges(mutationTables_1, db_1) {
|
|
1598
|
+
return __awaiter(this, arguments, void 0, function* (mutationTables, db, { since = {}, limit = Infinity } = {}) {
|
|
1599
|
+
const allMutsOnTables = yield Promise.all(mutationTables.map((mutationTable) => __awaiter(this, void 0, void 0, function* () {
|
|
1600
|
+
const tableName = getTableFromMutationTable(mutationTable.name);
|
|
1601
|
+
const lastRevision = since[tableName];
|
|
1602
|
+
let query = lastRevision
|
|
1603
|
+
? mutationTable.where('rev').above(lastRevision)
|
|
1604
|
+
: mutationTable;
|
|
1605
|
+
if (limit < Infinity)
|
|
1606
|
+
query = query.limit(limit);
|
|
1607
|
+
let muts = yield query.toArray();
|
|
1608
|
+
muts = canonicalizeToUpdateOps(muts);
|
|
1609
|
+
muts = removeRedundantUpdateOps(muts);
|
|
1610
|
+
const rv = muts.map((mut) => ({
|
|
1611
|
+
table: tableName,
|
|
1612
|
+
mut,
|
|
1613
|
+
}));
|
|
1614
|
+
return rv;
|
|
1615
|
+
})));
|
|
1616
|
+
// Sort by time to get a true order of the operations (between tables)
|
|
1617
|
+
const sorted = flatten(allMutsOnTables).sort((a, b) => a.mut.txid === b.mut.txid
|
|
1618
|
+
? a.mut.opNo - b.mut.opNo // Within same transaction, sort by opNo
|
|
1619
|
+
: a.mut.ts - b.mut.ts // Different transactions - sort by timestamp when mutation resolved
|
|
1620
|
+
);
|
|
1621
|
+
const result = [];
|
|
1622
|
+
let currentEntry = null;
|
|
1623
|
+
let currentTxid = null;
|
|
1624
|
+
for (const { table, mut } of sorted) {
|
|
1625
|
+
if (currentEntry &&
|
|
1626
|
+
currentEntry.table === table &&
|
|
1627
|
+
currentTxid === mut.txid) {
|
|
1628
|
+
currentEntry.muts.push(mut);
|
|
1629
|
+
}
|
|
1630
|
+
else {
|
|
1631
|
+
currentEntry = {
|
|
1632
|
+
table,
|
|
1633
|
+
muts: [mut],
|
|
1634
|
+
};
|
|
1635
|
+
currentTxid = mut.txid;
|
|
1636
|
+
result.push(currentEntry);
|
|
1637
|
+
}
|
|
1638
|
+
}
|
|
1639
|
+
// Filter out those tables that doesn't have any mutations:
|
|
1640
|
+
return result;
|
|
1641
|
+
});
|
|
1642
|
+
}
|
|
1643
|
+
function removeRedundantUpdateOps(muts) {
|
|
1644
|
+
const updateCoverage = new Map();
|
|
1645
|
+
for (const mut of muts) {
|
|
1646
|
+
if (mut.type === 'update') {
|
|
1647
|
+
if (mut.keys.length !== 1 || mut.changeSpecs.length !== 1) {
|
|
1648
|
+
continue; // Don't optimize multi-key updates
|
|
1649
|
+
}
|
|
1650
|
+
const strKey = '' + mut.keys[0];
|
|
1651
|
+
const changeSpecs = mut.changeSpecs[0];
|
|
1652
|
+
if (Object.values(changeSpecs).some(v => typeof v === "object" && v && "@@propmod" in v)) {
|
|
1653
|
+
continue; // Cannot optimize if any PropModification is present
|
|
1654
|
+
}
|
|
1655
|
+
let keyCoverage = updateCoverage.get(strKey);
|
|
1656
|
+
if (keyCoverage) {
|
|
1657
|
+
keyCoverage.push({ txid: mut.txid, updateSpec: changeSpecs });
|
|
1658
|
+
}
|
|
1659
|
+
else {
|
|
1660
|
+
updateCoverage.set(strKey, [{ txid: mut.txid, updateSpec: changeSpecs }]);
|
|
1661
|
+
}
|
|
1662
|
+
}
|
|
1663
|
+
}
|
|
1664
|
+
muts = muts.filter(mut => {
|
|
1665
|
+
// Only apply optimization to update mutations that are single-key
|
|
1666
|
+
if (mut.type !== 'update')
|
|
1667
|
+
return true;
|
|
1668
|
+
if (mut.keys.length !== 1 || mut.changeSpecs.length !== 1)
|
|
1669
|
+
return true;
|
|
1670
|
+
// Check if this has PropModifications - if so, skip optimization
|
|
1671
|
+
const changeSpecs = mut.changeSpecs[0];
|
|
1672
|
+
if (Object.values(changeSpecs).some(v => typeof v === "object" && v && "@@propmod" in v)) {
|
|
1673
|
+
return true; // Cannot optimize if any PropModification is present
|
|
1674
|
+
}
|
|
1675
|
+
// Keep track of properties that aren't overlapped by later transactions
|
|
1676
|
+
const unoverlappedProps = new Set(Object.keys(mut.changeSpecs[0]));
|
|
1677
|
+
const strKey = '' + mut.keys[0];
|
|
1678
|
+
const keyCoverage = updateCoverage.get(strKey);
|
|
1679
|
+
if (!keyCoverage)
|
|
1680
|
+
return true; // No coverage info - cannot optimize
|
|
1681
|
+
for (let i = keyCoverage.length - 1; i >= 0; --i) {
|
|
1682
|
+
const { txid, updateSpec } = keyCoverage[i];
|
|
1683
|
+
if (txid === mut.txid)
|
|
1684
|
+
break; // Stop when reaching own txid
|
|
1685
|
+
// If all changes in updateSpec are covered by all props on all mut.changeSpecs then
|
|
1686
|
+
// txid is redundant and can be removed.
|
|
1687
|
+
for (const keyPath of Object.keys(updateSpec)) {
|
|
1688
|
+
unoverlappedProps.delete(keyPath);
|
|
1689
|
+
}
|
|
1690
|
+
}
|
|
1691
|
+
if (unoverlappedProps.size === 0) {
|
|
1692
|
+
// This operation is completely overlapped by later operations. It can be removed.
|
|
1693
|
+
return false;
|
|
1694
|
+
}
|
|
1695
|
+
return true;
|
|
1696
|
+
});
|
|
1697
|
+
return muts;
|
|
1698
|
+
}
|
|
1699
|
+
function canonicalizeToUpdateOps(muts) {
|
|
1700
|
+
muts = muts.map(mut => {
|
|
1701
|
+
if (mut.type === 'modify' && mut.criteria.index === null) {
|
|
1702
|
+
// The criteria is on primary key. Convert to an update operation instead.
|
|
1703
|
+
// It is simpler for the server to handle and also more efficient.
|
|
1704
|
+
const updateMut = Object.assign(Object.assign({}, mut), { criteria: undefined, changeSpec: undefined, type: 'update', keys: mut.keys, changeSpecs: [mut.changeSpec] });
|
|
1705
|
+
delete updateMut.criteria;
|
|
1706
|
+
delete updateMut.changeSpec;
|
|
1707
|
+
return updateMut;
|
|
1708
|
+
}
|
|
1709
|
+
return mut;
|
|
1710
|
+
});
|
|
1711
|
+
return muts;
|
|
1712
|
+
}
|
|
1713
|
+
|
|
1714
|
+
function randomString(bytes) {
|
|
1715
|
+
const buf = new Uint8Array(bytes);
|
|
1716
|
+
if (typeof crypto !== 'undefined') {
|
|
1717
|
+
crypto.getRandomValues(buf);
|
|
1718
|
+
}
|
|
1719
|
+
else {
|
|
1720
|
+
for (let i = 0; i < bytes; i++)
|
|
1721
|
+
buf[i] = Math.floor(Math.random() * 256);
|
|
1722
|
+
}
|
|
1723
|
+
if (typeof Buffer !== 'undefined' && Buffer.from) {
|
|
1724
|
+
return Buffer.from(buf).toString('base64');
|
|
1725
|
+
}
|
|
1726
|
+
else if (typeof btoa !== 'undefined') {
|
|
1727
|
+
return btoa(String.fromCharCode.apply(null, buf));
|
|
1728
|
+
}
|
|
1729
|
+
else {
|
|
1730
|
+
throw new Error('No btoa or Buffer available');
|
|
1731
|
+
}
|
|
1732
|
+
}
|
|
1733
|
+
|
|
1079
1734
|
function listSyncifiedChanges(tablesToSyncify, currentUser, schema, alreadySyncedRealms) {
|
|
1080
1735
|
return __awaiter(this, void 0, void 0, function* () {
|
|
1081
|
-
const txid = `upload-${randomString
|
|
1736
|
+
const txid = `upload-${randomString(8)}`;
|
|
1082
1737
|
if (currentUser.isLoggedIn) {
|
|
1083
1738
|
if (tablesToSyncify.length > 0) {
|
|
1084
1739
|
const ignoredRealms = new Set(alreadySyncedRealms || []);
|
|
@@ -1246,861 +1901,360 @@ function promptForOTP(userInteraction, email, alert) {
|
|
|
1246
1901
|
type: 'info',
|
|
1247
1902
|
messageCode: 'OTP_SENT',
|
|
1248
1903
|
message: `A One-Time password has been sent to {email}`,
|
|
1249
|
-
messageParams: { email },
|
|
1250
|
-
},
|
|
1251
|
-
];
|
|
1252
|
-
if (alert) {
|
|
1253
|
-
alerts.push(alert);
|
|
1254
|
-
}
|
|
1255
|
-
const { otp } = yield interactWithUser(userInteraction, {
|
|
1256
|
-
type: 'otp',
|
|
1257
|
-
title: 'Enter OTP',
|
|
1258
|
-
alerts,
|
|
1259
|
-
fields: {
|
|
1260
|
-
otp: {
|
|
1261
|
-
type: 'otp',
|
|
1262
|
-
label: 'OTP',
|
|
1263
|
-
placeholder: 'Paste OTP here',
|
|
1264
|
-
},
|
|
1265
|
-
},
|
|
1266
|
-
});
|
|
1267
|
-
return otp;
|
|
1268
|
-
});
|
|
1269
|
-
}
|
|
1270
|
-
function confirmLogout(userInteraction, currentUserId, numUnsyncedChanges) {
|
|
1271
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
1272
|
-
const alerts = [
|
|
1273
|
-
{
|
|
1274
|
-
type: 'warning',
|
|
1275
|
-
messageCode: 'LOGOUT_CONFIRMATION',
|
|
1276
|
-
message: `{numUnsyncedChanges} unsynced changes will get lost!
|
|
1277
|
-
Logout anyway?`,
|
|
1278
|
-
messageParams: {
|
|
1279
|
-
currentUserId,
|
|
1280
|
-
numUnsyncedChanges: numUnsyncedChanges.toString(),
|
|
1281
|
-
}
|
|
1282
|
-
},
|
|
1283
|
-
];
|
|
1284
|
-
return yield interactWithUser(userInteraction, {
|
|
1285
|
-
type: 'logout-confirmation',
|
|
1286
|
-
title: 'Confirm Logout',
|
|
1287
|
-
alerts,
|
|
1288
|
-
fields: {},
|
|
1289
|
-
submitLabel: 'Confirm logout',
|
|
1290
|
-
cancelLabel: 'Cancel'
|
|
1291
|
-
})
|
|
1292
|
-
.then(() => true)
|
|
1293
|
-
.catch(() => false);
|
|
1294
|
-
});
|
|
1295
|
-
}
|
|
1296
|
-
/**
|
|
1297
|
-
* Prompts the user to select an authentication method (OAuth provider or OTP).
|
|
1298
|
-
*
|
|
1299
|
-
* This function converts OAuth providers and OTP option into generic DXCOption[]
|
|
1300
|
-
* for the DXCSelect interaction, handling icon fetching and style hints.
|
|
1301
|
-
*
|
|
1302
|
-
* @param userInteraction - The user interaction BehaviorSubject
|
|
1303
|
-
* @param providers - Available OAuth providers
|
|
1304
|
-
* @param otpEnabled - Whether OTP is available
|
|
1305
|
-
* @param title - Dialog title
|
|
1306
|
-
* @param alerts - Optional alerts to display
|
|
1307
|
-
* @returns Promise resolving to the user's selection
|
|
1308
|
-
*/
|
|
1309
|
-
function promptForProvider(userInteraction_1, providers_1, otpEnabled_1) {
|
|
1310
|
-
return __awaiter(this, arguments, void 0, function* (userInteraction, providers, otpEnabled, title = 'Choose login method', alerts = []) {
|
|
1311
|
-
// Convert providers to generic options
|
|
1312
|
-
const providerOptions = providers.map(providerToOption);
|
|
1313
|
-
// Build the options array
|
|
1314
|
-
const options = [...providerOptions];
|
|
1315
|
-
// Add OTP option if enabled
|
|
1316
|
-
if (otpEnabled) {
|
|
1317
|
-
options.push({
|
|
1318
|
-
name: 'otp',
|
|
1319
|
-
value: 'email',
|
|
1320
|
-
displayName: 'Continue with email',
|
|
1321
|
-
iconUrl: EmailIcon,
|
|
1322
|
-
styleHint: 'otp',
|
|
1323
|
-
});
|
|
1324
|
-
}
|
|
1325
|
-
return new Promise((resolve, reject) => {
|
|
1326
|
-
const interactionProps = {
|
|
1327
|
-
type: 'generic',
|
|
1328
|
-
title,
|
|
1329
|
-
alerts,
|
|
1330
|
-
options,
|
|
1331
|
-
fields: {},
|
|
1332
|
-
submitLabel: '', // No submit button - just options
|
|
1333
|
-
cancelLabel: 'Cancel',
|
|
1334
|
-
onSubmit: (params) => {
|
|
1335
|
-
userInteraction.next(undefined);
|
|
1336
|
-
// Check which option was selected
|
|
1337
|
-
if ('otp' in params) {
|
|
1338
|
-
resolve({ type: 'otp' });
|
|
1339
|
-
}
|
|
1340
|
-
else if ('provider' in params) {
|
|
1341
|
-
resolve({ type: 'provider', provider: params.provider });
|
|
1342
|
-
}
|
|
1343
|
-
else {
|
|
1344
|
-
// Unknown - default to OTP
|
|
1345
|
-
resolve({ type: 'otp' });
|
|
1346
|
-
}
|
|
1347
|
-
},
|
|
1348
|
-
onCancel: () => {
|
|
1349
|
-
userInteraction.next(undefined);
|
|
1350
|
-
reject(new Dexie.AbortError('User cancelled'));
|
|
1351
|
-
},
|
|
1352
|
-
};
|
|
1353
|
-
userInteraction.next(interactionProps);
|
|
1354
|
-
});
|
|
1355
|
-
});
|
|
1356
|
-
}
|
|
1357
|
-
|
|
1358
|
-
/**
|
|
1359
|
-
* Error thrown when initiating an OAuth redirect.
|
|
1360
|
-
*
|
|
1361
|
-
* This is not a real error - it's used to signal that the page is
|
|
1362
|
-
* navigating away to an OAuth provider. It should be caught and
|
|
1363
|
-
* silently ignored at the appropriate level.
|
|
1364
|
-
*/
|
|
1365
|
-
class OAuthRedirectError extends Error {
|
|
1366
|
-
constructor(provider) {
|
|
1367
|
-
super(`OAuth redirect initiated for provider: ${provider}`);
|
|
1368
|
-
this.name = 'OAuthRedirectError';
|
|
1369
|
-
this.provider = provider;
|
|
1370
|
-
}
|
|
1371
|
-
}
|
|
1372
|
-
|
|
1373
|
-
function loadAccessToken(db) {
|
|
1374
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
1375
|
-
var _a, _b, _c;
|
|
1376
|
-
const currentUser = yield db.getCurrentUser();
|
|
1377
|
-
const { accessToken, accessTokenExpiration, refreshToken, refreshTokenExpiration, claims, } = currentUser;
|
|
1378
|
-
if (!accessToken)
|
|
1379
|
-
return null;
|
|
1380
|
-
const expTime = (_a = accessTokenExpiration === null || accessTokenExpiration === void 0 ? void 0 : accessTokenExpiration.getTime()) !== null && _a !== void 0 ? _a : Infinity;
|
|
1381
|
-
if (expTime > Date.now() && (((_b = currentUser.license) === null || _b === void 0 ? void 0 : _b.status) || 'ok') === 'ok') {
|
|
1382
|
-
return currentUser;
|
|
1383
|
-
}
|
|
1384
|
-
if (!refreshToken) {
|
|
1385
|
-
throw new Error(`Refresh token missing`);
|
|
1386
|
-
}
|
|
1387
|
-
const refreshExpTime = (_c = refreshTokenExpiration === null || refreshTokenExpiration === void 0 ? void 0 : refreshTokenExpiration.getTime()) !== null && _c !== void 0 ? _c : Infinity;
|
|
1388
|
-
if (refreshExpTime <= Date.now()) {
|
|
1389
|
-
throw new Error(`Refresh token has expired`);
|
|
1390
|
-
}
|
|
1391
|
-
const refreshedLogin = yield refreshAccessToken(db.cloud.options.databaseUrl, currentUser);
|
|
1392
|
-
yield db.table('$logins').update(claims.sub, {
|
|
1393
|
-
accessToken: refreshedLogin.accessToken,
|
|
1394
|
-
accessTokenExpiration: refreshedLogin.accessTokenExpiration,
|
|
1395
|
-
claims: refreshedLogin.claims,
|
|
1396
|
-
license: refreshedLogin.license,
|
|
1397
|
-
data: refreshedLogin.data,
|
|
1398
|
-
});
|
|
1399
|
-
return refreshedLogin;
|
|
1400
|
-
});
|
|
1401
|
-
}
|
|
1402
|
-
function authenticate(url, context, fetchToken, userInteraction, hints) {
|
|
1403
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
1404
|
-
if (context.accessToken &&
|
|
1405
|
-
context.accessTokenExpiration.getTime() > Date.now()) {
|
|
1406
|
-
return context;
|
|
1407
|
-
}
|
|
1408
|
-
else if (context.refreshToken &&
|
|
1409
|
-
(!context.refreshTokenExpiration ||
|
|
1410
|
-
context.refreshTokenExpiration.getTime() > Date.now())) {
|
|
1411
|
-
return yield refreshAccessToken(url, context);
|
|
1412
|
-
}
|
|
1413
|
-
else {
|
|
1414
|
-
return yield userAuthenticate(context, fetchToken, userInteraction, hints);
|
|
1415
|
-
}
|
|
1416
|
-
});
|
|
1417
|
-
}
|
|
1418
|
-
function refreshAccessToken(url, login) {
|
|
1419
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
1420
|
-
if (!login.refreshToken)
|
|
1421
|
-
throw new Error(`Cannot refresh token - refresh token is missing.`);
|
|
1422
|
-
if (!login.nonExportablePrivateKey)
|
|
1423
|
-
throw new Error(`login.nonExportablePrivateKey is missing - cannot sign refresh token without a private key.`);
|
|
1424
|
-
const time_stamp = Date.now();
|
|
1425
|
-
const signing_algorithm = 'RSASSA-PKCS1-v1_5';
|
|
1426
|
-
const textEncoder = new TextEncoder();
|
|
1427
|
-
const data = textEncoder.encode(login.refreshToken + time_stamp);
|
|
1428
|
-
const binarySignature = yield crypto.subtle.sign(signing_algorithm, login.nonExportablePrivateKey, data);
|
|
1429
|
-
const signature = b64encode(binarySignature);
|
|
1430
|
-
const tokenRequest = {
|
|
1431
|
-
grant_type: 'refresh_token',
|
|
1432
|
-
refresh_token: login.refreshToken,
|
|
1433
|
-
scopes: ['ACCESS_DB'],
|
|
1434
|
-
signature,
|
|
1435
|
-
signing_algorithm,
|
|
1436
|
-
time_stamp,
|
|
1437
|
-
};
|
|
1438
|
-
const res = yield fetch(`${url}/token`, {
|
|
1439
|
-
body: JSON.stringify(tokenRequest),
|
|
1440
|
-
method: 'post',
|
|
1441
|
-
headers: { 'Content-Type': 'application/json' },
|
|
1442
|
-
mode: 'cors',
|
|
1443
|
-
});
|
|
1444
|
-
if (res.status !== 200)
|
|
1445
|
-
throw new Error(`RefreshToken: Status ${res.status} from ${url}/token`);
|
|
1446
|
-
const response = yield res.json();
|
|
1447
|
-
if (response.type === 'error') {
|
|
1448
|
-
throw new TokenErrorResponseError(response);
|
|
1449
|
-
}
|
|
1450
|
-
login.accessToken = response.accessToken;
|
|
1451
|
-
login.accessTokenExpiration = response.accessTokenExpiration
|
|
1452
|
-
? new Date(response.accessTokenExpiration)
|
|
1453
|
-
: undefined;
|
|
1454
|
-
login.claims = response.claims;
|
|
1455
|
-
login.license = {
|
|
1456
|
-
type: response.userType,
|
|
1457
|
-
status: response.claims.license || 'ok',
|
|
1458
|
-
};
|
|
1459
|
-
if (response.evalDaysLeft != null) {
|
|
1460
|
-
login.license.evalDaysLeft = response.evalDaysLeft;
|
|
1461
|
-
}
|
|
1462
|
-
if (response.userValidUntil != null) {
|
|
1463
|
-
login.license.validUntil = new Date(response.userValidUntil);
|
|
1464
|
-
}
|
|
1465
|
-
if (response.data) {
|
|
1466
|
-
login.data = response.data;
|
|
1467
|
-
}
|
|
1468
|
-
return login;
|
|
1469
|
-
});
|
|
1470
|
-
}
|
|
1471
|
-
function userAuthenticate(context, fetchToken, userInteraction, hints) {
|
|
1472
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
1473
|
-
if (!crypto.subtle) {
|
|
1474
|
-
if (typeof location !== 'undefined' && location.protocol === 'http:') {
|
|
1475
|
-
throw new Error(`Dexie Cloud Addon needs to use WebCrypto, but your browser has disabled it due to being served from an insecure location. Please serve it from https or http://localhost:<port> (See https://stackoverflow.com/questions/46670556/how-to-enable-crypto-subtle-for-unsecure-origins-in-chrome/46671627#46671627)`);
|
|
1476
|
-
}
|
|
1477
|
-
else {
|
|
1478
|
-
throw new Error(`This browser does not support WebCrypto.`);
|
|
1479
|
-
}
|
|
1480
|
-
}
|
|
1481
|
-
const { privateKey, publicKey } = yield crypto.subtle.generateKey({
|
|
1482
|
-
name: 'RSASSA-PKCS1-v1_5',
|
|
1483
|
-
modulusLength: 2048,
|
|
1484
|
-
publicExponent: new Uint8Array([0x01, 0x00, 0x01]),
|
|
1485
|
-
hash: { name: 'SHA-256' },
|
|
1486
|
-
}, false, // Non-exportable...
|
|
1487
|
-
['sign', 'verify']);
|
|
1488
|
-
if (!privateKey || !publicKey)
|
|
1489
|
-
throw new Error(`Could not generate RSA keypair`); // Typings suggest these can be undefined...
|
|
1490
|
-
context.nonExportablePrivateKey = privateKey; //...but storable!
|
|
1491
|
-
const publicKeySPKI = yield crypto.subtle.exportKey('spki', publicKey);
|
|
1492
|
-
const publicKeyPEM = spkiToPEM(publicKeySPKI);
|
|
1493
|
-
context.publicKey = publicKey;
|
|
1494
|
-
try {
|
|
1495
|
-
const response2 = yield fetchToken({
|
|
1496
|
-
public_key: publicKeyPEM,
|
|
1497
|
-
hints,
|
|
1498
|
-
});
|
|
1499
|
-
if (response2.type === 'error') {
|
|
1500
|
-
throw new TokenErrorResponseError(response2);
|
|
1501
|
-
}
|
|
1502
|
-
if (response2.type !== 'tokens')
|
|
1503
|
-
throw new Error(`Unexpected response type from token endpoint: ${response2.type}`);
|
|
1504
|
-
/*const licenseStatus = response2.claims.license || 'ok';
|
|
1505
|
-
if (licenseStatus !== 'ok') {
|
|
1506
|
-
throw new InvalidLicenseError(licenseStatus);
|
|
1507
|
-
}*/
|
|
1508
|
-
context.accessToken = response2.accessToken;
|
|
1509
|
-
context.accessTokenExpiration = new Date(response2.accessTokenExpiration);
|
|
1510
|
-
context.refreshToken = response2.refreshToken;
|
|
1511
|
-
if (response2.refreshTokenExpiration) {
|
|
1512
|
-
context.refreshTokenExpiration = new Date(response2.refreshTokenExpiration);
|
|
1513
|
-
}
|
|
1514
|
-
context.userId = response2.claims.sub;
|
|
1515
|
-
context.email = response2.claims.email;
|
|
1516
|
-
context.name = response2.claims.name;
|
|
1517
|
-
context.claims = response2.claims;
|
|
1518
|
-
context.license = {
|
|
1519
|
-
type: response2.userType,
|
|
1520
|
-
status: response2.claims.license || 'ok',
|
|
1521
|
-
};
|
|
1522
|
-
context.data = response2.data;
|
|
1523
|
-
if (response2.evalDaysLeft != null) {
|
|
1524
|
-
context.license.evalDaysLeft = response2.evalDaysLeft;
|
|
1525
|
-
}
|
|
1526
|
-
if (response2.userValidUntil != null) {
|
|
1527
|
-
context.license.validUntil = new Date(response2.userValidUntil);
|
|
1528
|
-
}
|
|
1529
|
-
if (response2.alerts && response2.alerts.length > 0) {
|
|
1530
|
-
yield interactWithUser(userInteraction, {
|
|
1531
|
-
type: 'message-alert',
|
|
1532
|
-
title: 'Authentication Alert',
|
|
1533
|
-
fields: {},
|
|
1534
|
-
alerts: response2.alerts,
|
|
1535
|
-
});
|
|
1536
|
-
}
|
|
1537
|
-
return context;
|
|
1538
|
-
}
|
|
1539
|
-
catch (error) {
|
|
1540
|
-
// OAuth redirect is not an error - page is navigating away
|
|
1541
|
-
if (error instanceof OAuthRedirectError || (error === null || error === void 0 ? void 0 : error.name) === 'OAuthRedirectError') {
|
|
1542
|
-
throw error; // Re-throw without logging
|
|
1543
|
-
}
|
|
1544
|
-
if (error instanceof TokenErrorResponseError) {
|
|
1545
|
-
yield alertUser(userInteraction, error.title, {
|
|
1546
|
-
type: 'error',
|
|
1547
|
-
messageCode: error.messageCode,
|
|
1548
|
-
message: error.message,
|
|
1549
|
-
messageParams: {},
|
|
1550
|
-
});
|
|
1551
|
-
throw error;
|
|
1552
|
-
}
|
|
1553
|
-
let message = `We're having a problem authenticating right now.`;
|
|
1554
|
-
console.error(`Error authenticating`, error);
|
|
1555
|
-
if (error instanceof TypeError) {
|
|
1556
|
-
const isOffline = typeof navigator !== undefined && !navigator.onLine;
|
|
1557
|
-
if (isOffline) {
|
|
1558
|
-
message = `You seem to be offline. Please connect to the internet and try again.`;
|
|
1559
|
-
}
|
|
1560
|
-
else if (Dexie.debug || (typeof location !== 'undefined' && (location.hostname === 'localhost' || location.hostname === '127.0.0.1'))) {
|
|
1561
|
-
// The audience is most likely the developer. Suggest to whitelist the localhost origin:
|
|
1562
|
-
message = `Could not connect to server. Please verify that your origin '${location.origin}' is whitelisted using \`npx dexie-cloud whitelist\``;
|
|
1563
|
-
}
|
|
1564
|
-
else {
|
|
1565
|
-
message = `Could not connect to server. Please verify the connection.`;
|
|
1566
|
-
}
|
|
1567
|
-
yield alertUser(userInteraction, 'Authentication Failed', {
|
|
1568
|
-
type: 'error',
|
|
1569
|
-
messageCode: 'GENERIC_ERROR',
|
|
1570
|
-
message,
|
|
1571
|
-
messageParams: {},
|
|
1572
|
-
}).catch(() => { });
|
|
1573
|
-
}
|
|
1574
|
-
throw error;
|
|
1575
|
-
}
|
|
1576
|
-
});
|
|
1577
|
-
}
|
|
1578
|
-
function spkiToPEM(keydata) {
|
|
1579
|
-
const keydataB64 = b64encode(keydata);
|
|
1580
|
-
const keydataB64Pem = formatAsPem(keydataB64);
|
|
1581
|
-
return keydataB64Pem;
|
|
1582
|
-
}
|
|
1583
|
-
function formatAsPem(str) {
|
|
1584
|
-
let finalString = '-----BEGIN PUBLIC KEY-----\n';
|
|
1585
|
-
while (str.length > 0) {
|
|
1586
|
-
finalString += str.substring(0, 64) + '\n';
|
|
1587
|
-
str = str.substring(64);
|
|
1588
|
-
}
|
|
1589
|
-
finalString = finalString + '-----END PUBLIC KEY-----';
|
|
1590
|
-
return finalString;
|
|
1591
|
-
}
|
|
1592
|
-
|
|
1593
|
-
const { toString: toStr } = {};
|
|
1594
|
-
function getToStringTag(val) {
|
|
1595
|
-
return toStr.call(val).slice(8, -1);
|
|
1596
|
-
}
|
|
1597
|
-
function escapeDollarProps(value) {
|
|
1598
|
-
const keys = Object.keys(value);
|
|
1599
|
-
let dollarKeys = null;
|
|
1600
|
-
for (let i = 0, l = keys.length; i < l; ++i) {
|
|
1601
|
-
if (keys[i][0] === "$") {
|
|
1602
|
-
dollarKeys = dollarKeys || [];
|
|
1603
|
-
dollarKeys.push(keys[i]);
|
|
1604
|
-
}
|
|
1605
|
-
}
|
|
1606
|
-
if (!dollarKeys)
|
|
1607
|
-
return value;
|
|
1608
|
-
const clone = { ...value };
|
|
1609
|
-
for (const k of dollarKeys) {
|
|
1610
|
-
delete clone[k];
|
|
1611
|
-
}
|
|
1612
|
-
for (const k of dollarKeys) {
|
|
1613
|
-
clone["$" + k] = value[k];
|
|
1614
|
-
}
|
|
1615
|
-
return clone;
|
|
1616
|
-
}
|
|
1617
|
-
const ObjectDef = {
|
|
1618
|
-
replace: escapeDollarProps,
|
|
1619
|
-
};
|
|
1620
|
-
function TypesonSimplified(...typeDefsInputs) {
|
|
1621
|
-
const typeDefs = typeDefsInputs.reduce((p, c) => ({ ...p, ...c }), typeDefsInputs.reduce((p, c) => ({ ...c, ...p }), {}));
|
|
1622
|
-
const protoMap = new WeakMap();
|
|
1623
|
-
return {
|
|
1624
|
-
stringify(value, alternateChannel, space) {
|
|
1625
|
-
const json = JSON.stringify(value, function (key) {
|
|
1626
|
-
const realVal = this[key];
|
|
1627
|
-
const typeDef = getTypeDef(realVal);
|
|
1628
|
-
return typeDef
|
|
1629
|
-
? typeDef.replace(realVal, alternateChannel, typeDefs)
|
|
1630
|
-
: realVal;
|
|
1631
|
-
}, space);
|
|
1632
|
-
return json;
|
|
1633
|
-
},
|
|
1634
|
-
parse(tson, alternateChannel) {
|
|
1635
|
-
const stack = [];
|
|
1636
|
-
return JSON.parse(tson, function (key, value) {
|
|
1637
|
-
//
|
|
1638
|
-
// Parent Part
|
|
1639
|
-
//
|
|
1640
|
-
const type = value === null || value === void 0 ? void 0 : value.$t;
|
|
1641
|
-
if (type) {
|
|
1642
|
-
const typeDef = typeDefs[type];
|
|
1643
|
-
value = typeDef
|
|
1644
|
-
? typeDef.revive(value, alternateChannel, typeDefs)
|
|
1645
|
-
: value;
|
|
1646
|
-
}
|
|
1647
|
-
let top = stack[stack.length - 1];
|
|
1648
|
-
if (top && top[0] === value) {
|
|
1649
|
-
// Do what the kid told us to
|
|
1650
|
-
// Unescape dollar props
|
|
1651
|
-
value = { ...value };
|
|
1652
|
-
// Delete keys that children wanted us to delete
|
|
1653
|
-
for (const k of top[1])
|
|
1654
|
-
delete value[k];
|
|
1655
|
-
// Set keys that children wanted us to set
|
|
1656
|
-
for (const [k, v] of Object.entries(top[2])) {
|
|
1657
|
-
value[k] = v;
|
|
1658
|
-
}
|
|
1659
|
-
stack.pop();
|
|
1660
|
-
}
|
|
1661
|
-
//
|
|
1662
|
-
// Child part
|
|
1663
|
-
//
|
|
1664
|
-
if (value === undefined || (key[0] === "$" && key !== "$t")) {
|
|
1665
|
-
top = stack[stack.length - 1];
|
|
1666
|
-
let deletes;
|
|
1667
|
-
let mods;
|
|
1668
|
-
if (top && top[0] === this) {
|
|
1669
|
-
deletes = top[1];
|
|
1670
|
-
mods = top[2];
|
|
1671
|
-
}
|
|
1672
|
-
else {
|
|
1673
|
-
stack.push([this, (deletes = []), (mods = {})]);
|
|
1674
|
-
}
|
|
1675
|
-
if (key[0] === "$" && key !== "$t") {
|
|
1676
|
-
// Unescape props (also preserves undefined if this is a combo)
|
|
1677
|
-
deletes.push(key);
|
|
1678
|
-
mods[key.substr(1)] = value;
|
|
1679
|
-
}
|
|
1680
|
-
else {
|
|
1681
|
-
// Preserve undefined
|
|
1682
|
-
mods[key] = undefined;
|
|
1683
|
-
}
|
|
1684
|
-
}
|
|
1685
|
-
return value;
|
|
1686
|
-
});
|
|
1687
|
-
},
|
|
1688
|
-
};
|
|
1689
|
-
function getTypeDef(realVal) {
|
|
1690
|
-
const type = typeof realVal;
|
|
1691
|
-
switch (typeof realVal) {
|
|
1692
|
-
case "object":
|
|
1693
|
-
case "function": {
|
|
1694
|
-
// "object", "function", null
|
|
1695
|
-
if (realVal === null)
|
|
1696
|
-
return null;
|
|
1697
|
-
const proto = Object.getPrototypeOf(realVal);
|
|
1698
|
-
if (!proto)
|
|
1699
|
-
return ObjectDef;
|
|
1700
|
-
let typeDef = protoMap.get(proto);
|
|
1701
|
-
if (typeDef !== undefined)
|
|
1702
|
-
return typeDef; // Null counts to! So the caching of Array.prototype also counts.
|
|
1703
|
-
const toStringTag = getToStringTag(realVal);
|
|
1704
|
-
const entry = Object.entries(typeDefs).find(([typeName, typeDef]) => { var _a, _b; return (_b = (_a = typeDef === null || typeDef === void 0 ? void 0 : typeDef.test) === null || _a === void 0 ? void 0 : _a.call(typeDef, realVal, toStringTag)) !== null && _b !== void 0 ? _b : typeName === toStringTag; });
|
|
1705
|
-
typeDef = entry === null || entry === void 0 ? void 0 : entry[1];
|
|
1706
|
-
if (!typeDef) {
|
|
1707
|
-
typeDef = Array.isArray(realVal)
|
|
1708
|
-
? null
|
|
1709
|
-
: typeof realVal === "function"
|
|
1710
|
-
? typeDefs.function || null
|
|
1711
|
-
: ObjectDef;
|
|
1712
|
-
}
|
|
1713
|
-
protoMap.set(proto, typeDef);
|
|
1714
|
-
return typeDef;
|
|
1715
|
-
}
|
|
1716
|
-
default:
|
|
1717
|
-
return typeDefs[type];
|
|
1718
|
-
}
|
|
1719
|
-
}
|
|
1720
|
-
}
|
|
1721
|
-
|
|
1722
|
-
const BisonBinaryTypes = {
|
|
1723
|
-
Blob: {
|
|
1724
|
-
test: (blob, toStringTag) => toStringTag === "Blob",
|
|
1725
|
-
replace: (blob, altChannel) => {
|
|
1726
|
-
const i = altChannel.length;
|
|
1727
|
-
altChannel.push(blob);
|
|
1728
|
-
return {
|
|
1729
|
-
$t: "Blob",
|
|
1730
|
-
mimeType: blob.type,
|
|
1731
|
-
i,
|
|
1732
|
-
};
|
|
1733
|
-
},
|
|
1734
|
-
revive: ({ i, mimeType }, altChannel) => new Blob([altChannel[i]], { type: mimeType }),
|
|
1735
|
-
},
|
|
1736
|
-
};
|
|
1737
|
-
|
|
1738
|
-
var numberDef = {
|
|
1739
|
-
number: {
|
|
1740
|
-
replace: (num) => {
|
|
1741
|
-
switch (true) {
|
|
1742
|
-
case isNaN(num):
|
|
1743
|
-
return { $t: "number", v: "NaN" };
|
|
1744
|
-
case num === Infinity:
|
|
1745
|
-
return { $t: "number", v: "Infinity" };
|
|
1746
|
-
case num === -Infinity:
|
|
1747
|
-
return { $t: "number", v: "-Infinity" };
|
|
1748
|
-
default:
|
|
1749
|
-
return num;
|
|
1750
|
-
}
|
|
1751
|
-
},
|
|
1752
|
-
revive: ({ v }) => Number(v),
|
|
1753
|
-
},
|
|
1754
|
-
};
|
|
1755
|
-
|
|
1756
|
-
const bigIntDef$1 = {
|
|
1757
|
-
bigint: {
|
|
1758
|
-
replace: (realVal) => {
|
|
1759
|
-
return { $t: "bigint", v: "" + realVal };
|
|
1760
|
-
},
|
|
1761
|
-
revive: (obj) => BigInt(obj.v),
|
|
1762
|
-
},
|
|
1763
|
-
};
|
|
1764
|
-
|
|
1765
|
-
var DateDef = {
|
|
1766
|
-
Date: {
|
|
1767
|
-
replace: (date) => ({
|
|
1768
|
-
$t: "Date",
|
|
1769
|
-
v: isNaN(date.getTime()) ? "NaN" : date.toISOString(),
|
|
1770
|
-
}),
|
|
1771
|
-
revive: ({ v }) => new Date(v === "NaN" ? NaN : Date.parse(v)),
|
|
1772
|
-
},
|
|
1773
|
-
};
|
|
1774
|
-
|
|
1775
|
-
var SetDef = {
|
|
1776
|
-
Set: {
|
|
1777
|
-
replace: (set) => ({
|
|
1778
|
-
$t: "Set",
|
|
1779
|
-
v: Array.from(set.entries()),
|
|
1780
|
-
}),
|
|
1781
|
-
revive: ({ v }) => new Set(v),
|
|
1782
|
-
},
|
|
1783
|
-
};
|
|
1784
|
-
|
|
1785
|
-
var MapDef = {
|
|
1786
|
-
Map: {
|
|
1787
|
-
replace: (map) => ({
|
|
1788
|
-
$t: "Map",
|
|
1789
|
-
v: Array.from(map.entries()),
|
|
1790
|
-
}),
|
|
1791
|
-
revive: ({ v }) => new Map(v),
|
|
1792
|
-
},
|
|
1793
|
-
};
|
|
1794
|
-
|
|
1795
|
-
const _global = typeof globalThis !== "undefined" // All modern environments (node, bun, deno, browser, workers, webview etc)
|
|
1796
|
-
? globalThis
|
|
1797
|
-
: typeof self !== "undefined" // Older browsers, workers, webview, window etc
|
|
1798
|
-
? self
|
|
1799
|
-
: typeof global !== "undefined" // Older versions of node
|
|
1800
|
-
? global
|
|
1801
|
-
: undefined; // Unsupported environment. No idea to return 'this' since we are in a module or a function scope anyway.
|
|
1802
|
-
|
|
1803
|
-
var TypedArraysDefs = [
|
|
1804
|
-
"Int8Array",
|
|
1805
|
-
"Uint8Array",
|
|
1806
|
-
"Uint8ClampedArray",
|
|
1807
|
-
"Int16Array",
|
|
1808
|
-
"Uint16Array",
|
|
1809
|
-
"Int32Array",
|
|
1810
|
-
"Uint32Array",
|
|
1811
|
-
"Float32Array",
|
|
1812
|
-
"Float64Array",
|
|
1813
|
-
"DataView",
|
|
1814
|
-
"BigInt64Array",
|
|
1815
|
-
"BigUint64Array",
|
|
1816
|
-
].reduce((specs, typeName) => ({
|
|
1817
|
-
...specs,
|
|
1818
|
-
[typeName]: {
|
|
1819
|
-
// Replace passes the the typed array into $t, buffer so that
|
|
1820
|
-
// the ArrayBuffer typedef takes care of further handling of the buffer:
|
|
1821
|
-
// {$t:"Uint8Array",buffer:{$t:"ArrayBuffer",idx:0}}
|
|
1822
|
-
// CHANGED ABOVE! Now shortcutting that for more sparse format of the typed arrays
|
|
1823
|
-
// to contain the b64 property directly.
|
|
1824
|
-
replace: (a, _, typeDefs) => {
|
|
1825
|
-
const result = {
|
|
1826
|
-
$t: typeName,
|
|
1827
|
-
v: typeDefs.ArrayBuffer.replace(a.byteOffset === 0 && a.byteLength === a.buffer.byteLength
|
|
1828
|
-
? a.buffer
|
|
1829
|
-
: a.buffer.slice(a.byteOffset, a.byteOffset + a.byteLength), _, typeDefs).v,
|
|
1830
|
-
};
|
|
1831
|
-
return result;
|
|
1832
|
-
},
|
|
1833
|
-
revive: ({ v }, _, typeDefs) => {
|
|
1834
|
-
const TypedArray = _global[typeName];
|
|
1835
|
-
return (TypedArray &&
|
|
1836
|
-
new TypedArray(typeDefs.ArrayBuffer.revive({ v }, _, typeDefs)));
|
|
1837
|
-
},
|
|
1838
|
-
},
|
|
1839
|
-
}), {});
|
|
1840
|
-
|
|
1841
|
-
function b64LexEncode(b) {
|
|
1842
|
-
return b64ToLex(b64encode(b));
|
|
1843
|
-
}
|
|
1844
|
-
function b64LexDecode(b64Lex) {
|
|
1845
|
-
return b64decode(lexToB64(b64Lex));
|
|
1846
|
-
}
|
|
1847
|
-
function b64ToLex(base64) {
|
|
1848
|
-
var encoded = "";
|
|
1849
|
-
for (var i = 0, length = base64.length; i < length; i++) {
|
|
1850
|
-
encoded += ENCODE_TABLE[base64[i]];
|
|
1851
|
-
}
|
|
1852
|
-
return encoded;
|
|
1853
|
-
}
|
|
1854
|
-
function lexToB64(base64lex) {
|
|
1855
|
-
// only accept string input
|
|
1856
|
-
if (typeof base64lex !== "string") {
|
|
1857
|
-
throw new Error("invalid decoder input: " + base64lex);
|
|
1858
|
-
}
|
|
1859
|
-
var base64 = "";
|
|
1860
|
-
for (var i = 0, length = base64lex.length; i < length; i++) {
|
|
1861
|
-
base64 += DECODE_TABLE[base64lex[i]];
|
|
1862
|
-
}
|
|
1863
|
-
return base64;
|
|
1864
|
-
}
|
|
1865
|
-
const DECODE_TABLE = {
|
|
1866
|
-
"-": "=",
|
|
1867
|
-
"0": "A",
|
|
1868
|
-
"1": "B",
|
|
1869
|
-
"2": "C",
|
|
1870
|
-
"3": "D",
|
|
1871
|
-
"4": "E",
|
|
1872
|
-
"5": "F",
|
|
1873
|
-
"6": "G",
|
|
1874
|
-
"7": "H",
|
|
1875
|
-
"8": "I",
|
|
1876
|
-
"9": "J",
|
|
1877
|
-
A: "K",
|
|
1878
|
-
B: "L",
|
|
1879
|
-
C: "M",
|
|
1880
|
-
D: "N",
|
|
1881
|
-
E: "O",
|
|
1882
|
-
F: "P",
|
|
1883
|
-
G: "Q",
|
|
1884
|
-
H: "R",
|
|
1885
|
-
I: "S",
|
|
1886
|
-
J: "T",
|
|
1887
|
-
K: "U",
|
|
1888
|
-
L: "V",
|
|
1889
|
-
M: "W",
|
|
1890
|
-
N: "X",
|
|
1891
|
-
O: "Y",
|
|
1892
|
-
P: "Z",
|
|
1893
|
-
Q: "a",
|
|
1894
|
-
R: "b",
|
|
1895
|
-
S: "c",
|
|
1896
|
-
T: "d",
|
|
1897
|
-
U: "e",
|
|
1898
|
-
V: "f",
|
|
1899
|
-
W: "g",
|
|
1900
|
-
X: "h",
|
|
1901
|
-
Y: "i",
|
|
1902
|
-
Z: "j",
|
|
1903
|
-
_: "k",
|
|
1904
|
-
a: "l",
|
|
1905
|
-
b: "m",
|
|
1906
|
-
c: "n",
|
|
1907
|
-
d: "o",
|
|
1908
|
-
e: "p",
|
|
1909
|
-
f: "q",
|
|
1910
|
-
g: "r",
|
|
1911
|
-
h: "s",
|
|
1912
|
-
i: "t",
|
|
1913
|
-
j: "u",
|
|
1914
|
-
k: "v",
|
|
1915
|
-
l: "w",
|
|
1916
|
-
m: "x",
|
|
1917
|
-
n: "y",
|
|
1918
|
-
o: "z",
|
|
1919
|
-
p: "0",
|
|
1920
|
-
q: "1",
|
|
1921
|
-
r: "2",
|
|
1922
|
-
s: "3",
|
|
1923
|
-
t: "4",
|
|
1924
|
-
u: "5",
|
|
1925
|
-
v: "6",
|
|
1926
|
-
w: "7",
|
|
1927
|
-
x: "8",
|
|
1928
|
-
y: "9",
|
|
1929
|
-
z: "+",
|
|
1930
|
-
"|": "/",
|
|
1931
|
-
};
|
|
1932
|
-
const ENCODE_TABLE = {};
|
|
1933
|
-
for (const c of Object.keys(DECODE_TABLE)) {
|
|
1934
|
-
ENCODE_TABLE[DECODE_TABLE[c]] = c;
|
|
1904
|
+
messageParams: { email },
|
|
1905
|
+
},
|
|
1906
|
+
];
|
|
1907
|
+
if (alert) {
|
|
1908
|
+
alerts.push(alert);
|
|
1909
|
+
}
|
|
1910
|
+
const { otp } = yield interactWithUser(userInteraction, {
|
|
1911
|
+
type: 'otp',
|
|
1912
|
+
title: 'Enter OTP',
|
|
1913
|
+
alerts,
|
|
1914
|
+
fields: {
|
|
1915
|
+
otp: {
|
|
1916
|
+
type: 'otp',
|
|
1917
|
+
label: 'OTP',
|
|
1918
|
+
placeholder: 'Paste OTP here',
|
|
1919
|
+
},
|
|
1920
|
+
},
|
|
1921
|
+
});
|
|
1922
|
+
return otp;
|
|
1923
|
+
});
|
|
1935
1924
|
}
|
|
1936
|
-
|
|
1937
|
-
|
|
1938
|
-
|
|
1939
|
-
|
|
1940
|
-
|
|
1941
|
-
|
|
1942
|
-
|
|
1943
|
-
|
|
1944
|
-
|
|
1945
|
-
|
|
1946
|
-
|
|
1947
|
-
|
|
1948
|
-
|
|
1949
|
-
|
|
1950
|
-
|
|
1951
|
-
|
|
1952
|
-
|
|
1953
|
-
|
|
1954
|
-
|
|
1955
|
-
|
|
1956
|
-
|
|
1925
|
+
function confirmLogout(userInteraction, currentUserId, numUnsyncedChanges) {
|
|
1926
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
1927
|
+
const alerts = [
|
|
1928
|
+
{
|
|
1929
|
+
type: 'warning',
|
|
1930
|
+
messageCode: 'LOGOUT_CONFIRMATION',
|
|
1931
|
+
message: `{numUnsyncedChanges} unsynced changes will get lost!
|
|
1932
|
+
Logout anyway?`,
|
|
1933
|
+
messageParams: {
|
|
1934
|
+
currentUserId,
|
|
1935
|
+
numUnsyncedChanges: numUnsyncedChanges.toString(),
|
|
1936
|
+
}
|
|
1937
|
+
},
|
|
1938
|
+
];
|
|
1939
|
+
return yield interactWithUser(userInteraction, {
|
|
1940
|
+
type: 'logout-confirmation',
|
|
1941
|
+
title: 'Confirm Logout',
|
|
1942
|
+
alerts,
|
|
1943
|
+
fields: {},
|
|
1944
|
+
submitLabel: 'Confirm logout',
|
|
1945
|
+
cancelLabel: 'Cancel'
|
|
1946
|
+
})
|
|
1947
|
+
.then(() => true)
|
|
1948
|
+
.catch(() => false);
|
|
1949
|
+
});
|
|
1957
1950
|
}
|
|
1958
|
-
|
|
1959
|
-
|
|
1960
|
-
|
|
1961
|
-
|
|
1962
|
-
|
|
1963
|
-
|
|
1964
|
-
|
|
1965
|
-
|
|
1966
|
-
|
|
1967
|
-
|
|
1951
|
+
/**
|
|
1952
|
+
* Prompts the user to select an authentication method (OAuth provider or OTP).
|
|
1953
|
+
*
|
|
1954
|
+
* This function converts OAuth providers and OTP option into generic DXCOption[]
|
|
1955
|
+
* for the DXCSelect interaction, handling icon fetching and style hints.
|
|
1956
|
+
*
|
|
1957
|
+
* @param userInteraction - The user interaction BehaviorSubject
|
|
1958
|
+
* @param providers - Available OAuth providers
|
|
1959
|
+
* @param otpEnabled - Whether OTP is available
|
|
1960
|
+
* @param title - Dialog title
|
|
1961
|
+
* @param alerts - Optional alerts to display
|
|
1962
|
+
* @returns Promise resolving to the user's selection
|
|
1963
|
+
*/
|
|
1964
|
+
function promptForProvider(userInteraction_1, providers_1, otpEnabled_1) {
|
|
1965
|
+
return __awaiter(this, arguments, void 0, function* (userInteraction, providers, otpEnabled, title = 'Choose login method', alerts = []) {
|
|
1966
|
+
// Convert providers to generic options
|
|
1967
|
+
const providerOptions = providers.map(providerToOption);
|
|
1968
|
+
// Build the options array
|
|
1969
|
+
const options = [...providerOptions];
|
|
1970
|
+
// Add OTP option if enabled
|
|
1971
|
+
if (otpEnabled) {
|
|
1972
|
+
options.push({
|
|
1973
|
+
name: 'otp',
|
|
1974
|
+
value: 'email',
|
|
1975
|
+
displayName: 'Continue with email',
|
|
1976
|
+
iconUrl: EmailIcon,
|
|
1977
|
+
styleHint: 'otp',
|
|
1978
|
+
});
|
|
1979
|
+
}
|
|
1980
|
+
return new Promise((resolve, reject) => {
|
|
1981
|
+
const interactionProps = {
|
|
1982
|
+
type: 'generic',
|
|
1983
|
+
title,
|
|
1984
|
+
alerts,
|
|
1985
|
+
options,
|
|
1986
|
+
fields: {},
|
|
1987
|
+
submitLabel: '', // No submit button - just options
|
|
1988
|
+
cancelLabel: 'Cancel',
|
|
1989
|
+
onSubmit: (params) => {
|
|
1990
|
+
userInteraction.next(undefined);
|
|
1991
|
+
// Check which option was selected
|
|
1992
|
+
if ('otp' in params) {
|
|
1993
|
+
resolve({ type: 'otp' });
|
|
1994
|
+
}
|
|
1995
|
+
else if ('provider' in params) {
|
|
1996
|
+
resolve({ type: 'provider', provider: params.provider });
|
|
1997
|
+
}
|
|
1998
|
+
else {
|
|
1999
|
+
// Unknown - default to OTP
|
|
2000
|
+
resolve({ type: 'otp' });
|
|
2001
|
+
}
|
|
2002
|
+
},
|
|
2003
|
+
onCancel: () => {
|
|
2004
|
+
userInteraction.next(undefined);
|
|
2005
|
+
reject(new Dexie.AbortError('User cancelled'));
|
|
2006
|
+
},
|
|
2007
|
+
};
|
|
2008
|
+
userInteraction.next(interactionProps);
|
|
2009
|
+
});
|
|
2010
|
+
});
|
|
1968
2011
|
}
|
|
1969
2012
|
|
|
1970
|
-
|
|
1971
|
-
|
|
1972
|
-
|
|
1973
|
-
|
|
2013
|
+
/**
|
|
2014
|
+
* Error thrown when initiating an OAuth redirect.
|
|
2015
|
+
*
|
|
2016
|
+
* This is not a real error - it's used to signal that the page is
|
|
2017
|
+
* navigating away to an OAuth provider. It should be caught and
|
|
2018
|
+
* silently ignored at the appropriate level.
|
|
2019
|
+
*/
|
|
2020
|
+
class OAuthRedirectError extends Error {
|
|
2021
|
+
constructor(provider) {
|
|
2022
|
+
super(`OAuth redirect initiated for provider: ${provider}`);
|
|
2023
|
+
this.name = 'OAuthRedirectError';
|
|
2024
|
+
this.provider = provider;
|
|
1974
2025
|
}
|
|
1975
|
-
return array.buffer;
|
|
1976
2026
|
}
|
|
1977
2027
|
|
|
1978
|
-
|
|
1979
|
-
|
|
1980
|
-
test: (blob, toStringTag) => toStringTag === "Blob" || blob instanceof FakeBlob,
|
|
1981
|
-
replace: (blob) => ({
|
|
1982
|
-
$t: "Blob",
|
|
1983
|
-
v: blob instanceof FakeBlob
|
|
1984
|
-
? b64encode(blob.buf)
|
|
1985
|
-
: b64encode(string2ArrayBuffer(readBlobSync(blob))),
|
|
1986
|
-
type: blob.type,
|
|
1987
|
-
}),
|
|
1988
|
-
revive: ({ type, v }) => {
|
|
1989
|
-
const ab = b64decode(v);
|
|
1990
|
-
return typeof Blob !== undefined
|
|
1991
|
-
? new Blob([ab])
|
|
1992
|
-
: new FakeBlob(ab.buffer, type);
|
|
1993
|
-
},
|
|
1994
|
-
},
|
|
1995
|
-
};
|
|
1996
|
-
|
|
1997
|
-
const builtin = {
|
|
1998
|
-
...numberDef,
|
|
1999
|
-
...bigIntDef$1,
|
|
2000
|
-
...DateDef,
|
|
2001
|
-
...SetDef,
|
|
2002
|
-
...MapDef,
|
|
2003
|
-
...TypedArraysDefs,
|
|
2004
|
-
...ArrayBufferDef,
|
|
2005
|
-
...BlobDef, // Should be moved to another preset for DOM types (or universal? since it supports node as well with FakeBlob)
|
|
2006
|
-
};
|
|
2028
|
+
const SECONDS = 1000;
|
|
2029
|
+
const MINUTES = 60 * SECONDS;
|
|
2007
2030
|
|
|
2008
|
-
function
|
|
2009
|
-
|
|
2010
|
-
|
|
2011
|
-
|
|
2012
|
-
|
|
2013
|
-
|
|
2014
|
-
|
|
2015
|
-
|
|
2016
|
-
|
|
2017
|
-
|
|
2018
|
-
|
|
2019
|
-
|
|
2020
|
-
|
|
2021
|
-
|
|
2022
|
-
|
|
2023
|
-
|
|
2024
|
-
|
|
2025
|
-
|
|
2026
|
-
|
|
2027
|
-
|
|
2028
|
-
|
|
2029
|
-
|
|
2030
|
-
|
|
2031
|
-
|
|
2032
|
-
|
|
2033
|
-
|
|
2034
|
-
|
|
2035
|
-
|
|
2036
|
-
pos += len;
|
|
2037
|
-
arrayBuffers.push(ab);
|
|
2038
|
-
}
|
|
2039
|
-
return tson.parse(json, arrayBuffers);
|
|
2040
|
-
},
|
|
2041
|
-
async fromBinary(blob) {
|
|
2042
|
-
const len = new DataView(await readBlobBinary(blob.slice(0, 4))).getUint32(0);
|
|
2043
|
-
const binData = blob.slice(4, len + 4);
|
|
2044
|
-
const json = await readBlob(blob.slice(len + 4));
|
|
2045
|
-
return await this.parse(json, binData);
|
|
2046
|
-
},
|
|
2047
|
-
};
|
|
2031
|
+
function loadAccessToken(db) {
|
|
2032
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
2033
|
+
var _a, _b, _c;
|
|
2034
|
+
const currentUser = yield db.getCurrentUser();
|
|
2035
|
+
const { accessToken, accessTokenExpiration, refreshToken, refreshTokenExpiration, claims, } = currentUser;
|
|
2036
|
+
if (!accessToken)
|
|
2037
|
+
return null;
|
|
2038
|
+
const expTime = (_a = accessTokenExpiration === null || accessTokenExpiration === void 0 ? void 0 : accessTokenExpiration.getTime()) !== null && _a !== void 0 ? _a : Infinity;
|
|
2039
|
+
if (expTime > (Date.now() + 5 * MINUTES) && (((_b = currentUser.license) === null || _b === void 0 ? void 0 : _b.status) || 'ok') === 'ok') {
|
|
2040
|
+
return currentUser;
|
|
2041
|
+
}
|
|
2042
|
+
if (!refreshToken) {
|
|
2043
|
+
throw new Error(`Refresh token missing`);
|
|
2044
|
+
}
|
|
2045
|
+
const refreshExpTime = (_c = refreshTokenExpiration === null || refreshTokenExpiration === void 0 ? void 0 : refreshTokenExpiration.getTime()) !== null && _c !== void 0 ? _c : Infinity;
|
|
2046
|
+
if (refreshExpTime <= Date.now()) {
|
|
2047
|
+
throw new Error(`Refresh token has expired`);
|
|
2048
|
+
}
|
|
2049
|
+
const refreshedLogin = yield refreshAccessToken(db.cloud.options.databaseUrl, currentUser);
|
|
2050
|
+
yield db.table('$logins').update(claims.sub, {
|
|
2051
|
+
accessToken: refreshedLogin.accessToken,
|
|
2052
|
+
accessTokenExpiration: refreshedLogin.accessTokenExpiration,
|
|
2053
|
+
claims: refreshedLogin.claims,
|
|
2054
|
+
license: refreshedLogin.license,
|
|
2055
|
+
data: refreshedLogin.data,
|
|
2056
|
+
});
|
|
2057
|
+
return refreshedLogin;
|
|
2058
|
+
});
|
|
2048
2059
|
}
|
|
2049
|
-
function
|
|
2050
|
-
return
|
|
2051
|
-
|
|
2052
|
-
|
|
2053
|
-
|
|
2054
|
-
|
|
2055
|
-
|
|
2060
|
+
function authenticate(url, context, fetchToken, userInteraction, hints) {
|
|
2061
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
2062
|
+
if (context.accessToken &&
|
|
2063
|
+
context.accessTokenExpiration.getTime() > Date.now()) {
|
|
2064
|
+
return context;
|
|
2065
|
+
}
|
|
2066
|
+
else if (context.refreshToken &&
|
|
2067
|
+
(!context.refreshTokenExpiration ||
|
|
2068
|
+
context.refreshTokenExpiration.getTime() > Date.now())) {
|
|
2069
|
+
return yield refreshAccessToken(url, context);
|
|
2070
|
+
}
|
|
2071
|
+
else {
|
|
2072
|
+
return yield userAuthenticate(context, fetchToken, userInteraction, hints);
|
|
2073
|
+
}
|
|
2074
|
+
});
|
|
2075
|
+
}
|
|
2076
|
+
function refreshAccessToken(url, login) {
|
|
2077
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
2078
|
+
if (!login.refreshToken)
|
|
2079
|
+
throw new Error(`Cannot refresh token - refresh token is missing.`);
|
|
2080
|
+
if (!login.nonExportablePrivateKey)
|
|
2081
|
+
throw new Error(`login.nonExportablePrivateKey is missing - cannot sign refresh token without a private key.`);
|
|
2082
|
+
const time_stamp = Date.now();
|
|
2083
|
+
const signing_algorithm = 'RSASSA-PKCS1-v1_5';
|
|
2084
|
+
const textEncoder = new TextEncoder();
|
|
2085
|
+
const data = textEncoder.encode(login.refreshToken + time_stamp);
|
|
2086
|
+
const binarySignature = yield crypto.subtle.sign(signing_algorithm, login.nonExportablePrivateKey, data);
|
|
2087
|
+
const signature = b64encode(binarySignature);
|
|
2088
|
+
const tokenRequest = {
|
|
2089
|
+
grant_type: 'refresh_token',
|
|
2090
|
+
refresh_token: login.refreshToken,
|
|
2091
|
+
scopes: ['ACCESS_DB'],
|
|
2092
|
+
signature,
|
|
2093
|
+
signing_algorithm,
|
|
2094
|
+
time_stamp,
|
|
2095
|
+
};
|
|
2096
|
+
const res = yield fetch(`${url}/token`, {
|
|
2097
|
+
body: JSON.stringify(tokenRequest),
|
|
2098
|
+
method: 'post',
|
|
2099
|
+
headers: { 'Content-Type': 'application/json' },
|
|
2100
|
+
mode: 'cors',
|
|
2101
|
+
});
|
|
2102
|
+
if (res.status !== 200)
|
|
2103
|
+
throw new Error(`RefreshToken: Status ${res.status} from ${url}/token`);
|
|
2104
|
+
const response = yield res.json();
|
|
2105
|
+
if (response.type === 'error') {
|
|
2106
|
+
throw new TokenErrorResponseError(response);
|
|
2107
|
+
}
|
|
2108
|
+
login.accessToken = response.accessToken;
|
|
2109
|
+
login.accessTokenExpiration = response.accessTokenExpiration
|
|
2110
|
+
? new Date(response.accessTokenExpiration)
|
|
2111
|
+
: undefined;
|
|
2112
|
+
login.claims = response.claims;
|
|
2113
|
+
login.license = {
|
|
2114
|
+
type: response.userType,
|
|
2115
|
+
status: response.claims.license || 'ok',
|
|
2116
|
+
};
|
|
2117
|
+
if (response.evalDaysLeft != null) {
|
|
2118
|
+
login.license.evalDaysLeft = response.evalDaysLeft;
|
|
2119
|
+
}
|
|
2120
|
+
if (response.userValidUntil != null) {
|
|
2121
|
+
login.license.validUntil = new Date(response.userValidUntil);
|
|
2122
|
+
}
|
|
2123
|
+
if (response.data) {
|
|
2124
|
+
login.data = response.data;
|
|
2125
|
+
}
|
|
2126
|
+
return login;
|
|
2056
2127
|
});
|
|
2057
2128
|
}
|
|
2058
|
-
function
|
|
2059
|
-
return
|
|
2060
|
-
|
|
2061
|
-
|
|
2062
|
-
|
|
2063
|
-
|
|
2064
|
-
|
|
2129
|
+
function userAuthenticate(context, fetchToken, userInteraction, hints) {
|
|
2130
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
2131
|
+
if (!crypto.subtle) {
|
|
2132
|
+
if (typeof location !== 'undefined' && location.protocol === 'http:') {
|
|
2133
|
+
throw new Error(`Dexie Cloud Addon needs to use WebCrypto, but your browser has disabled it due to being served from an insecure location. Please serve it from https or http://localhost:<port> (See https://stackoverflow.com/questions/46670556/how-to-enable-crypto-subtle-for-unsecure-origins-in-chrome/46671627#46671627)`);
|
|
2134
|
+
}
|
|
2135
|
+
else {
|
|
2136
|
+
throw new Error(`This browser does not support WebCrypto.`);
|
|
2137
|
+
}
|
|
2138
|
+
}
|
|
2139
|
+
const { privateKey, publicKey } = yield crypto.subtle.generateKey({
|
|
2140
|
+
name: 'RSASSA-PKCS1-v1_5',
|
|
2141
|
+
modulusLength: 2048,
|
|
2142
|
+
publicExponent: new Uint8Array([0x01, 0x00, 0x01]),
|
|
2143
|
+
hash: { name: 'SHA-256' },
|
|
2144
|
+
}, false, // Non-exportable...
|
|
2145
|
+
['sign', 'verify']);
|
|
2146
|
+
if (!privateKey || !publicKey)
|
|
2147
|
+
throw new Error(`Could not generate RSA keypair`); // Typings suggest these can be undefined...
|
|
2148
|
+
context.nonExportablePrivateKey = privateKey; //...but storable!
|
|
2149
|
+
const publicKeySPKI = yield crypto.subtle.exportKey('spki', publicKey);
|
|
2150
|
+
const publicKeyPEM = spkiToPEM(publicKeySPKI);
|
|
2151
|
+
context.publicKey = publicKey;
|
|
2152
|
+
try {
|
|
2153
|
+
const response2 = yield fetchToken({
|
|
2154
|
+
public_key: publicKeyPEM,
|
|
2155
|
+
hints,
|
|
2156
|
+
});
|
|
2157
|
+
if (response2.type === 'error') {
|
|
2158
|
+
throw new TokenErrorResponseError(response2);
|
|
2159
|
+
}
|
|
2160
|
+
if (response2.type !== 'tokens')
|
|
2161
|
+
throw new Error(`Unexpected response type from token endpoint: ${response2.type}`);
|
|
2162
|
+
/*const licenseStatus = response2.claims.license || 'ok';
|
|
2163
|
+
if (licenseStatus !== 'ok') {
|
|
2164
|
+
throw new InvalidLicenseError(licenseStatus);
|
|
2165
|
+
}*/
|
|
2166
|
+
context.accessToken = response2.accessToken;
|
|
2167
|
+
context.accessTokenExpiration = new Date(response2.accessTokenExpiration);
|
|
2168
|
+
context.refreshToken = response2.refreshToken;
|
|
2169
|
+
if (response2.refreshTokenExpiration) {
|
|
2170
|
+
context.refreshTokenExpiration = new Date(response2.refreshTokenExpiration);
|
|
2171
|
+
}
|
|
2172
|
+
context.userId = response2.claims.sub;
|
|
2173
|
+
context.email = response2.claims.email;
|
|
2174
|
+
context.name = response2.claims.name;
|
|
2175
|
+
context.claims = response2.claims;
|
|
2176
|
+
context.license = {
|
|
2177
|
+
type: response2.userType,
|
|
2178
|
+
status: response2.claims.license || 'ok',
|
|
2179
|
+
};
|
|
2180
|
+
context.data = response2.data;
|
|
2181
|
+
if (response2.evalDaysLeft != null) {
|
|
2182
|
+
context.license.evalDaysLeft = response2.evalDaysLeft;
|
|
2183
|
+
}
|
|
2184
|
+
if (response2.userValidUntil != null) {
|
|
2185
|
+
context.license.validUntil = new Date(response2.userValidUntil);
|
|
2186
|
+
}
|
|
2187
|
+
if (response2.alerts && response2.alerts.length > 0) {
|
|
2188
|
+
yield interactWithUser(userInteraction, {
|
|
2189
|
+
type: 'message-alert',
|
|
2190
|
+
title: 'Authentication Alert',
|
|
2191
|
+
fields: {},
|
|
2192
|
+
alerts: response2.alerts,
|
|
2193
|
+
});
|
|
2194
|
+
}
|
|
2195
|
+
return context;
|
|
2196
|
+
}
|
|
2197
|
+
catch (error) {
|
|
2198
|
+
// OAuth redirect is not an error - page is navigating away
|
|
2199
|
+
if (error instanceof OAuthRedirectError || (error === null || error === void 0 ? void 0 : error.name) === 'OAuthRedirectError') {
|
|
2200
|
+
throw error; // Re-throw without logging
|
|
2201
|
+
}
|
|
2202
|
+
if (error instanceof TokenErrorResponseError) {
|
|
2203
|
+
yield alertUser(userInteraction, error.title, {
|
|
2204
|
+
type: 'error',
|
|
2205
|
+
messageCode: error.messageCode,
|
|
2206
|
+
message: error.message,
|
|
2207
|
+
messageParams: {},
|
|
2208
|
+
});
|
|
2209
|
+
throw error;
|
|
2210
|
+
}
|
|
2211
|
+
let message = `We're having a problem authenticating right now.`;
|
|
2212
|
+
console.error(`Error authenticating`, error);
|
|
2213
|
+
if (error instanceof TypeError) {
|
|
2214
|
+
const isOffline = typeof navigator !== 'undefined' && !navigator.onLine;
|
|
2215
|
+
if (isOffline) {
|
|
2216
|
+
message = `You seem to be offline. Please connect to the internet and try again.`;
|
|
2217
|
+
}
|
|
2218
|
+
else if (typeof location !== 'undefined' && (Dexie.debug || location.hostname === 'localhost' || location.hostname === '127.0.0.1')) {
|
|
2219
|
+
// The audience is most likely the developer. Suggest to whitelist the localhost origin:
|
|
2220
|
+
const whitelistCommand = `npx dexie-cloud whitelist ${location.origin}`;
|
|
2221
|
+
message = `Could not connect to server. Please verify that your origin '${location.origin}' is whitelisted using \`npx dexie-cloud whitelist\``;
|
|
2222
|
+
yield alertUser(userInteraction, 'Authentication Failed', {
|
|
2223
|
+
type: 'error',
|
|
2224
|
+
messageCode: 'GENERIC_ERROR',
|
|
2225
|
+
message,
|
|
2226
|
+
messageParams: {},
|
|
2227
|
+
copyText: whitelistCommand,
|
|
2228
|
+
}).catch(() => { });
|
|
2229
|
+
}
|
|
2230
|
+
else {
|
|
2231
|
+
message = `Could not connect to server. Please verify the connection.`;
|
|
2232
|
+
yield alertUser(userInteraction, 'Authentication Failed', {
|
|
2233
|
+
type: 'error',
|
|
2234
|
+
messageCode: 'GENERIC_ERROR',
|
|
2235
|
+
message,
|
|
2236
|
+
messageParams: {},
|
|
2237
|
+
}).catch(() => { });
|
|
2238
|
+
}
|
|
2239
|
+
}
|
|
2240
|
+
throw error;
|
|
2241
|
+
}
|
|
2065
2242
|
});
|
|
2066
2243
|
}
|
|
2067
|
-
|
|
2068
|
-
|
|
2069
|
-
|
|
2070
|
-
|
|
2071
|
-
|
|
2072
|
-
|
|
2073
|
-
|
|
2074
|
-
|
|
2075
|
-
|
|
2076
|
-
|
|
2077
|
-
|
|
2078
|
-
|
|
2079
|
-
|
|
2080
|
-
|
|
2081
|
-
revive: () => undefined,
|
|
2082
|
-
},
|
|
2083
|
-
};
|
|
2084
|
-
|
|
2085
|
-
var FileDef = {
|
|
2086
|
-
File: {
|
|
2087
|
-
test: (file, toStringTag) => toStringTag === "File",
|
|
2088
|
-
replace: (file) => ({
|
|
2089
|
-
$t: "File",
|
|
2090
|
-
v: b64encode(string2ArrayBuffer(readBlobSync(file))),
|
|
2091
|
-
type: file.type,
|
|
2092
|
-
name: file.name,
|
|
2093
|
-
lastModified: new Date(file.lastModified).toISOString(),
|
|
2094
|
-
}),
|
|
2095
|
-
revive: ({ type, v, name, lastModified }) => {
|
|
2096
|
-
const ab = b64decode(v);
|
|
2097
|
-
return new File([ab], name, {
|
|
2098
|
-
type,
|
|
2099
|
-
lastModified: new Date(lastModified).getTime(),
|
|
2100
|
-
});
|
|
2101
|
-
},
|
|
2102
|
-
},
|
|
2103
|
-
};
|
|
2244
|
+
function spkiToPEM(keydata) {
|
|
2245
|
+
const keydataB64 = b64encode(keydata);
|
|
2246
|
+
const keydataB64Pem = formatAsPem(keydataB64);
|
|
2247
|
+
return keydataB64Pem;
|
|
2248
|
+
}
|
|
2249
|
+
function formatAsPem(str) {
|
|
2250
|
+
let finalString = '-----BEGIN PUBLIC KEY-----\n';
|
|
2251
|
+
while (str.length > 0) {
|
|
2252
|
+
finalString += str.substring(0, 64) + '\n';
|
|
2253
|
+
str = str.substring(64);
|
|
2254
|
+
}
|
|
2255
|
+
finalString = finalString + '-----END PUBLIC KEY-----';
|
|
2256
|
+
return finalString;
|
|
2257
|
+
}
|
|
2104
2258
|
|
|
2105
2259
|
// Since server revisions are stored in bigints, we need to handle clients without
|
|
2106
2260
|
// bigint support to not fail when serverRevision is passed over to client.
|
|
@@ -2137,7 +2291,7 @@ const bigIntDef = hasBigIntSupport
|
|
|
2137
2291
|
revive: ({ v }) => new FakeBigInt(v),
|
|
2138
2292
|
},
|
|
2139
2293
|
};
|
|
2140
|
-
const defs = Object.assign(Object.assign(Object.assign(Object.assign({},
|
|
2294
|
+
const defs = Object.assign(Object.assign(Object.assign(Object.assign({}, undefinedTypeDef), bigIntDef), fileTypeDef), { PropModification: {
|
|
2141
2295
|
test: (val) => val instanceof PropModification,
|
|
2142
2296
|
replace: (propModification) => {
|
|
2143
2297
|
return Object.assign({ $t: 'PropModification' }, propModification['@@propmod']);
|
|
@@ -2149,8 +2303,14 @@ const defs = Object.assign(Object.assign(Object.assign(Object.assign({}, undefin
|
|
|
2149
2303
|
return new PropModification(propModSpec);
|
|
2150
2304
|
},
|
|
2151
2305
|
} });
|
|
2152
|
-
const TSON = TypesonSimplified(
|
|
2153
|
-
|
|
2306
|
+
const TSON = TypesonSimplified(
|
|
2307
|
+
// Standard type definitions - TSON is transparent to BlobRefs
|
|
2308
|
+
// BlobRefs use _bt convention and are handled by blobResolveMiddleware, not TSON
|
|
2309
|
+
typedArrayTypeDefs, arrayBufferTypeDef, blobTypeDef,
|
|
2310
|
+
// Non-binary built-in types
|
|
2311
|
+
numberTypeDef, dateTypeDef, setTypeDef, mapTypeDef,
|
|
2312
|
+
// Custom type definitions
|
|
2313
|
+
defs);
|
|
2154
2314
|
|
|
2155
2315
|
class HttpError extends Error {
|
|
2156
2316
|
constructor(res, message) {
|
|
@@ -2266,7 +2426,7 @@ function syncWithServer(changes, y, syncState, baseRevs, db, databaseUrl, schema
|
|
|
2266
2426
|
// Push changes to server using fetch
|
|
2267
2427
|
//
|
|
2268
2428
|
const headers = {
|
|
2269
|
-
Accept: 'application/json
|
|
2429
|
+
Accept: 'application/json',
|
|
2270
2430
|
'Content-Type': 'application/tson',
|
|
2271
2431
|
};
|
|
2272
2432
|
const updatedUser = yield loadAccessToken(db);
|
|
@@ -2285,7 +2445,7 @@ function syncWithServer(changes, y, syncState, baseRevs, db, databaseUrl, schema
|
|
|
2285
2445
|
headers.Authorization = `Bearer ${accessToken}`;
|
|
2286
2446
|
}
|
|
2287
2447
|
const syncRequest = {
|
|
2288
|
-
v:
|
|
2448
|
+
v: 3, // v3 = supports BlobRef
|
|
2289
2449
|
dbID: syncState === null || syncState === void 0 ? void 0 : syncState.remoteDbId,
|
|
2290
2450
|
clientIdentity,
|
|
2291
2451
|
schema: schema || {},
|
|
@@ -2323,8 +2483,9 @@ function syncWithServer(changes, y, syncState, baseRevs, db, databaseUrl, schema
|
|
|
2323
2483
|
}
|
|
2324
2484
|
switch (res.headers.get('content-type')) {
|
|
2325
2485
|
case 'application/x-bison':
|
|
2326
|
-
|
|
2327
|
-
|
|
2486
|
+
case 'application/x-bison-stream':
|
|
2487
|
+
// BISON format deprecated - throw error if server sends it
|
|
2488
|
+
throw new Error('BISON format no longer supported. Server should send application/json.');
|
|
2328
2489
|
default:
|
|
2329
2490
|
case 'application/json': {
|
|
2330
2491
|
const text = yield res.text();
|
|
@@ -2728,13 +2889,462 @@ function applyYServerMessages(yMessages, db) {
|
|
|
2728
2889
|
console.error(`Failed to apply YMessage`, m, e);
|
|
2729
2890
|
}
|
|
2730
2891
|
}
|
|
2731
|
-
return {
|
|
2732
|
-
receivedUntils,
|
|
2733
|
-
resyncNeeded,
|
|
2734
|
-
yServerRevision,
|
|
2735
|
-
};
|
|
2892
|
+
return {
|
|
2893
|
+
receivedUntils,
|
|
2894
|
+
resyncNeeded,
|
|
2895
|
+
yServerRevision,
|
|
2896
|
+
};
|
|
2897
|
+
});
|
|
2898
|
+
}
|
|
2899
|
+
|
|
2900
|
+
/**
|
|
2901
|
+
* Check if a value is a BlobRef (offloaded binary data)
|
|
2902
|
+
* A BlobRef has _bt (type), ref (blob ID), but no v (inline data)
|
|
2903
|
+
*/
|
|
2904
|
+
function isBlobRef(value) {
|
|
2905
|
+
if (typeof value !== 'object' || value === null)
|
|
2906
|
+
return false;
|
|
2907
|
+
const obj = value;
|
|
2908
|
+
return (typeof obj._bt === 'string' &&
|
|
2909
|
+
typeof obj.ref === 'string' &&
|
|
2910
|
+
obj.v === undefined // No inline data = it's a reference
|
|
2911
|
+
);
|
|
2912
|
+
}
|
|
2913
|
+
/**
|
|
2914
|
+
* Check if a value is a serialized TSONRef (after IndexedDB storage)
|
|
2915
|
+
* Has 'type' instead of '$t', and no Symbol marker
|
|
2916
|
+
*/
|
|
2917
|
+
function isSerializedTSONRef(value) {
|
|
2918
|
+
if (typeof value !== 'object' || value === null)
|
|
2919
|
+
return false;
|
|
2920
|
+
const obj = value;
|
|
2921
|
+
return (typeof obj.type === 'string' &&
|
|
2922
|
+
typeof obj.ref === 'string' &&
|
|
2923
|
+
typeof obj.size === 'number' &&
|
|
2924
|
+
obj._bt === undefined // Not a raw BlobRef
|
|
2925
|
+
);
|
|
2926
|
+
}
|
|
2927
|
+
/**
|
|
2928
|
+
* Convert downloaded Uint8Array to the original type specified in BlobRef
|
|
2929
|
+
*/
|
|
2930
|
+
function convertToOriginalType(data, ref) {
|
|
2931
|
+
// Get the underlying ArrayBuffer (handle shared buffer case)
|
|
2932
|
+
const buffer = data.buffer.byteLength === data.byteLength
|
|
2933
|
+
? data.buffer
|
|
2934
|
+
: data.buffer.slice(data.byteOffset, data.byteOffset + data.byteLength);
|
|
2935
|
+
switch (ref._bt) {
|
|
2936
|
+
case 'Blob':
|
|
2937
|
+
return new Blob([new Uint8Array(buffer)], { type: ref.ct || '' });
|
|
2938
|
+
case 'ArrayBuffer':
|
|
2939
|
+
return buffer;
|
|
2940
|
+
case 'Uint8Array':
|
|
2941
|
+
return data;
|
|
2942
|
+
case 'Int8Array':
|
|
2943
|
+
return new Int8Array(buffer);
|
|
2944
|
+
case 'Uint8ClampedArray':
|
|
2945
|
+
return new Uint8ClampedArray(buffer);
|
|
2946
|
+
case 'Int16Array':
|
|
2947
|
+
return new Int16Array(buffer);
|
|
2948
|
+
case 'Uint16Array':
|
|
2949
|
+
return new Uint16Array(buffer);
|
|
2950
|
+
case 'Int32Array':
|
|
2951
|
+
return new Int32Array(buffer);
|
|
2952
|
+
case 'Uint32Array':
|
|
2953
|
+
return new Uint32Array(buffer);
|
|
2954
|
+
case 'Float32Array':
|
|
2955
|
+
return new Float32Array(buffer);
|
|
2956
|
+
case 'Float64Array':
|
|
2957
|
+
return new Float64Array(buffer);
|
|
2958
|
+
case 'BigInt64Array':
|
|
2959
|
+
return new BigInt64Array(buffer);
|
|
2960
|
+
case 'BigUint64Array':
|
|
2961
|
+
return new BigUint64Array(buffer);
|
|
2962
|
+
case 'DataView':
|
|
2963
|
+
return new DataView(buffer);
|
|
2964
|
+
default:
|
|
2965
|
+
// Fallback to Uint8Array for unknown types
|
|
2966
|
+
return data;
|
|
2967
|
+
}
|
|
2968
|
+
}
|
|
2969
|
+
/**
|
|
2970
|
+
* Recursively resolve all BlobRefs in an object and collect them for queueing.
|
|
2971
|
+
* Returns a new object with BlobRefs replaced by their original type data,
|
|
2972
|
+
* and populates the resolvedBlobs array with keyPath info for each blob.
|
|
2973
|
+
*
|
|
2974
|
+
* @param obj - Object to resolve
|
|
2975
|
+
* @param dbUrl - Base URL for the database
|
|
2976
|
+
* @param accessToken - Access token for blob downloads
|
|
2977
|
+
* @param resolvedBlobs - Array to collect resolved blob info
|
|
2978
|
+
* @param currentPath - Current property path (for tracking)
|
|
2979
|
+
* @param visited - WeakMap for circular reference detection
|
|
2980
|
+
*/
|
|
2981
|
+
function resolveAllBlobRefs(obj_1, dbUrl_1) {
|
|
2982
|
+
return __awaiter(this, arguments, void 0, function* (obj, dbUrl, resolvedBlobs = [], currentPath = '', visited = new WeakMap(), tracker) {
|
|
2983
|
+
if (obj == null) { // null or undefined
|
|
2984
|
+
return obj;
|
|
2985
|
+
}
|
|
2986
|
+
// Check if this is a BlobRef - resolve it and track it
|
|
2987
|
+
if (isBlobRef(obj)) {
|
|
2988
|
+
const rawData = yield tracker.download(obj, dbUrl);
|
|
2989
|
+
const data = convertToOriginalType(rawData, obj);
|
|
2990
|
+
resolvedBlobs.push({ keyPath: currentPath, data, ref: obj.ref });
|
|
2991
|
+
return data;
|
|
2992
|
+
}
|
|
2993
|
+
// Handle arrays
|
|
2994
|
+
if (Array.isArray(obj)) {
|
|
2995
|
+
// Avoid circular references - check and set BEFORE iterating
|
|
2996
|
+
if (visited.has(obj)) {
|
|
2997
|
+
return visited.get(obj);
|
|
2998
|
+
}
|
|
2999
|
+
const result = [];
|
|
3000
|
+
visited.set(obj, result); // Set before iterating to handle self-references
|
|
3001
|
+
for (let i = 0; i < obj.length; i++) {
|
|
3002
|
+
const itemPath = currentPath ? `${currentPath}.${i}` : `${i}`;
|
|
3003
|
+
result.push(yield resolveAllBlobRefs(obj[i], dbUrl, resolvedBlobs, itemPath, visited, tracker));
|
|
3004
|
+
}
|
|
3005
|
+
return result;
|
|
3006
|
+
}
|
|
3007
|
+
// Handle POJO objects only (not Date, RegExp, Blob, ArrayBuffer, etc.)
|
|
3008
|
+
if (typeof obj === 'object' && obj.constructor === Object) {
|
|
3009
|
+
// Avoid circular references
|
|
3010
|
+
if (visited.has(obj)) {
|
|
3011
|
+
return visited.get(obj);
|
|
3012
|
+
}
|
|
3013
|
+
const result = {};
|
|
3014
|
+
visited.set(obj, result);
|
|
3015
|
+
for (const [propName, value] of Object.entries(obj)) {
|
|
3016
|
+
// Skip the _hasBlobRefs marker itself
|
|
3017
|
+
if (propName === '_hasBlobRefs') {
|
|
3018
|
+
continue;
|
|
3019
|
+
}
|
|
3020
|
+
const propPath = currentPath ? `${currentPath}.${propName}` : propName;
|
|
3021
|
+
result[propName] = yield resolveAllBlobRefs(value, dbUrl, resolvedBlobs, propPath, visited, tracker);
|
|
3022
|
+
}
|
|
3023
|
+
return result;
|
|
3024
|
+
}
|
|
3025
|
+
return obj;
|
|
3026
|
+
});
|
|
3027
|
+
}
|
|
3028
|
+
/**
|
|
3029
|
+
* Check if an object has unresolved BlobRefs
|
|
3030
|
+
*/
|
|
3031
|
+
function hasUnresolvedBlobRefs(obj) {
|
|
3032
|
+
return (typeof obj === 'object' &&
|
|
3033
|
+
obj !== null &&
|
|
3034
|
+
obj._hasBlobRefs === 1);
|
|
3035
|
+
}
|
|
3036
|
+
|
|
3037
|
+
/**
|
|
3038
|
+
* Blob Offloading for Dexie Cloud
|
|
3039
|
+
*
|
|
3040
|
+
* Handles uploading large blobs to blob storage before sync,
|
|
3041
|
+
* and resolving BlobRefs when reading from the database.
|
|
3042
|
+
*/
|
|
3043
|
+
// Blobs >= 4KB are offloaded to blob storage
|
|
3044
|
+
const BLOB_OFFLOAD_THRESHOLD = 4096;
|
|
3045
|
+
// Cache: once we know the server doesn't support blob storage, skip future uploads.
|
|
3046
|
+
// Maps databaseUrl → boolean (true = supported, false = not supported).
|
|
3047
|
+
const blobEndpointSupported = new Map();
|
|
3048
|
+
/**
|
|
3049
|
+
* Cross-realm type detection helpers (performance-optimized)
|
|
3050
|
+
*
|
|
3051
|
+
* When code runs in different JavaScript realms (e.g., Service Worker context),
|
|
3052
|
+
* `instanceof` checks can fail because each realm has its own global constructors.
|
|
3053
|
+
* We use Object.prototype.toString which works reliably across realms.
|
|
3054
|
+
*
|
|
3055
|
+
* Performance considerations (this is a hot path - every property is checked):
|
|
3056
|
+
* - Early return for primitives via typeof
|
|
3057
|
+
* - Static Set for O(1) TypedArray tag lookup
|
|
3058
|
+
* - Single typeTag call per check
|
|
3059
|
+
*/
|
|
3060
|
+
// TypedArray/DataView tags for size check
|
|
3061
|
+
const ARRAYBUFFER_VIEW_TAGS = new Set([
|
|
3062
|
+
'Int8Array', 'Uint8Array', 'Uint8ClampedArray',
|
|
3063
|
+
'Int16Array', 'Uint16Array', 'Int32Array', 'Uint32Array',
|
|
3064
|
+
'Float32Array', 'Float64Array', 'BigInt64Array', 'BigUint64Array',
|
|
3065
|
+
'DataView'
|
|
3066
|
+
]);
|
|
3067
|
+
// Static Set for O(1) lookup of binary type tags
|
|
3068
|
+
const BINARY_TYPE_TAGS = new Set([
|
|
3069
|
+
'Blob',
|
|
3070
|
+
'File',
|
|
3071
|
+
'ArrayBuffer',
|
|
3072
|
+
...ARRAYBUFFER_VIEW_TAGS,
|
|
3073
|
+
]);
|
|
3074
|
+
/**
|
|
3075
|
+
* Get the [[Class]] internal property via Object.prototype.toString
|
|
3076
|
+
*/
|
|
3077
|
+
function getTypeTag(value) {
|
|
3078
|
+
return Object.prototype.toString.call(value).slice(8, -1);
|
|
3079
|
+
}
|
|
3080
|
+
/**
|
|
3081
|
+
* Get the original type name for a value
|
|
3082
|
+
*/
|
|
3083
|
+
function getOrigType(value) {
|
|
3084
|
+
const tag = getTypeTag(value);
|
|
3085
|
+
if (tag === 'Blob' || tag === 'File')
|
|
3086
|
+
return 'Blob';
|
|
3087
|
+
if (tag === 'ArrayBuffer')
|
|
3088
|
+
return 'ArrayBuffer';
|
|
3089
|
+
return tag;
|
|
3090
|
+
}
|
|
3091
|
+
/**
|
|
3092
|
+
* Check if a value should be offloaded to blob storage
|
|
3093
|
+
* Performance-optimized for hot path traversal.
|
|
3094
|
+
*/
|
|
3095
|
+
function shouldOffloadBlob(value) {
|
|
3096
|
+
// Fast path: primitives (most common case)
|
|
3097
|
+
// typeof returns: "string", "number", "boolean", "undefined", "symbol", "bigint", "function", "object"
|
|
3098
|
+
const t = typeof value;
|
|
3099
|
+
if (t !== 'object' || value === null)
|
|
3100
|
+
return false;
|
|
3101
|
+
// Get type tag once (cross-realm safe)
|
|
3102
|
+
const tag = getTypeTag(value);
|
|
3103
|
+
// Quick check: is this even a binary type?
|
|
3104
|
+
if (!BINARY_TYPE_TAGS.has(tag))
|
|
3105
|
+
return false;
|
|
3106
|
+
// Blob/File: always offload regardless of size.
|
|
3107
|
+
// This ensures blobs are never stored inline in IndexedDB, which avoids
|
|
3108
|
+
// issues with synchronous blob reading (e.g. in service workers where
|
|
3109
|
+
// XMLHttpRequest is unavailable — see #2182).
|
|
3110
|
+
if (tag === 'Blob' || tag === 'File') {
|
|
3111
|
+
return true;
|
|
3112
|
+
}
|
|
3113
|
+
// ArrayBuffer/TypedArray/DataView: only offload above threshold
|
|
3114
|
+
if (tag === 'ArrayBuffer') {
|
|
3115
|
+
return value.byteLength >= BLOB_OFFLOAD_THRESHOLD;
|
|
3116
|
+
}
|
|
3117
|
+
// TypedArray or DataView
|
|
3118
|
+
return value.byteLength >= BLOB_OFFLOAD_THRESHOLD;
|
|
3119
|
+
}
|
|
3120
|
+
/**
|
|
3121
|
+
* Upload a blob to the blob storage endpoint
|
|
3122
|
+
*/
|
|
3123
|
+
function uploadBlob(databaseUrl, getCachedAccessToken, blob) {
|
|
3124
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
3125
|
+
const accessToken = yield getCachedAccessToken();
|
|
3126
|
+
if (!accessToken) {
|
|
3127
|
+
throw new Error('Failed to load access token for blob upload');
|
|
3128
|
+
}
|
|
3129
|
+
const blobId = newId();
|
|
3130
|
+
// URL format: {databaseUrl}/blob/{blobId}
|
|
3131
|
+
const url = `${databaseUrl}/blob/${blobId}`;
|
|
3132
|
+
let body;
|
|
3133
|
+
let contentType;
|
|
3134
|
+
let size;
|
|
3135
|
+
const origType = getOrigType(blob);
|
|
3136
|
+
// Use type tag for cross-realm compatible checks
|
|
3137
|
+
const tag = getTypeTag(blob);
|
|
3138
|
+
if (tag === 'Blob' || tag === 'File') {
|
|
3139
|
+
body = blob;
|
|
3140
|
+
contentType = blob.type || 'application/octet-stream';
|
|
3141
|
+
size = blob.size;
|
|
3142
|
+
}
|
|
3143
|
+
else if (tag === 'ArrayBuffer') {
|
|
3144
|
+
body = blob;
|
|
3145
|
+
contentType = 'application/octet-stream';
|
|
3146
|
+
size = blob.byteLength;
|
|
3147
|
+
}
|
|
3148
|
+
else if (ARRAYBUFFER_VIEW_TAGS.has(tag)) {
|
|
3149
|
+
// ArrayBufferView (TypedArray or DataView) - create a proper ArrayBuffer copy
|
|
3150
|
+
const view = blob;
|
|
3151
|
+
const arrayBuffer = new ArrayBuffer(view.byteLength);
|
|
3152
|
+
new Uint8Array(arrayBuffer).set(new Uint8Array(view.buffer, view.byteOffset, view.byteLength));
|
|
3153
|
+
body = arrayBuffer;
|
|
3154
|
+
contentType = 'application/octet-stream';
|
|
3155
|
+
size = view.byteLength;
|
|
3156
|
+
}
|
|
3157
|
+
else {
|
|
3158
|
+
throw new Error(`Unsupported blob type: ${tag}`);
|
|
3159
|
+
}
|
|
3160
|
+
// Add content type as query param for the server to store
|
|
3161
|
+
const uploadUrl = `${url}?ct=${encodeURIComponent(contentType)}`;
|
|
3162
|
+
const response = yield fetch(uploadUrl, {
|
|
3163
|
+
method: 'PUT',
|
|
3164
|
+
headers: {
|
|
3165
|
+
'Authorization': `Bearer ${accessToken}`,
|
|
3166
|
+
'Content-Type': contentType,
|
|
3167
|
+
},
|
|
3168
|
+
body,
|
|
3169
|
+
});
|
|
3170
|
+
if (!response.ok) {
|
|
3171
|
+
if (response.status === 404 || response.status === 405) {
|
|
3172
|
+
// Server doesn't support blob storage endpoint — fall back to inline storage.
|
|
3173
|
+
// This happens when a new client connects to an older server (pre-3.0).
|
|
3174
|
+
return null;
|
|
3175
|
+
}
|
|
3176
|
+
throw new Error(`Failed to upload blob: ${response.status} ${response.statusText}`);
|
|
3177
|
+
}
|
|
3178
|
+
// The server returns the ref with version prefix (e.g., "1:blobId")
|
|
3179
|
+
const result = yield response.json();
|
|
3180
|
+
// Return BlobRef with server's ref (includes version) and original type preserved in _bt
|
|
3181
|
+
return Object.assign({ _bt: origType, ref: result.ref, size: size }, (origType === 'Blob' ? { ct: contentType } : {}) // Only include content type for Blobs
|
|
3182
|
+
);
|
|
3183
|
+
});
|
|
3184
|
+
}
|
|
3185
|
+
function offloadBlobsAndMarkDirty(obj, databaseUrl, getCachedAccessToken) {
|
|
3186
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
3187
|
+
const dirtyFlag = { dirty: false };
|
|
3188
|
+
const result = yield offloadBlobs(obj, databaseUrl, getCachedAccessToken, dirtyFlag);
|
|
3189
|
+
// Mark the object as dirty for sync if any blobs were offloaded
|
|
3190
|
+
if (dirtyFlag.dirty && typeof result === 'object' && result !== null && result.constructor === Object) {
|
|
3191
|
+
result._hasBlobRefs = 1;
|
|
3192
|
+
}
|
|
3193
|
+
return result;
|
|
3194
|
+
});
|
|
3195
|
+
}
|
|
3196
|
+
/**
|
|
3197
|
+
* Recursively scan an object for large blobs and upload them
|
|
3198
|
+
* Returns a new object with blobs replaced by BlobRefs
|
|
3199
|
+
*/
|
|
3200
|
+
function offloadBlobs(obj_1, databaseUrl_1, getCachedAccessToken_1) {
|
|
3201
|
+
return __awaiter(this, arguments, void 0, function* (obj, databaseUrl, getCachedAccessToken, dirtyFlag = { dirty: false }, visited = new WeakSet()) {
|
|
3202
|
+
if (obj === null || obj === undefined) {
|
|
3203
|
+
return obj;
|
|
3204
|
+
}
|
|
3205
|
+
// Check if this is a blob that should be offloaded
|
|
3206
|
+
if (shouldOffloadBlob(obj)) {
|
|
3207
|
+
if (blobEndpointSupported.get(databaseUrl) === false) {
|
|
3208
|
+
// Server known to not support blob storage — keep inline
|
|
3209
|
+
return obj;
|
|
3210
|
+
}
|
|
3211
|
+
const blobRef = yield uploadBlob(databaseUrl, getCachedAccessToken, obj);
|
|
3212
|
+
if (blobRef === null) {
|
|
3213
|
+
// Server doesn't support blob storage — keep original inline
|
|
3214
|
+
blobEndpointSupported.set(databaseUrl, false);
|
|
3215
|
+
return obj;
|
|
3216
|
+
}
|
|
3217
|
+
blobEndpointSupported.set(databaseUrl, true);
|
|
3218
|
+
dirtyFlag.dirty = true;
|
|
3219
|
+
return blobRef;
|
|
3220
|
+
}
|
|
3221
|
+
if (typeof obj !== 'object') {
|
|
3222
|
+
return obj;
|
|
3223
|
+
}
|
|
3224
|
+
// Avoid circular references - check BEFORE processing
|
|
3225
|
+
if (visited.has(obj)) {
|
|
3226
|
+
return obj;
|
|
3227
|
+
}
|
|
3228
|
+
visited.add(obj);
|
|
3229
|
+
// Handle arrays
|
|
3230
|
+
if (Array.isArray(obj)) {
|
|
3231
|
+
const result = [];
|
|
3232
|
+
for (const item of obj) {
|
|
3233
|
+
result.push(yield offloadBlobs(item, databaseUrl, getCachedAccessToken, dirtyFlag, visited));
|
|
3234
|
+
}
|
|
3235
|
+
return result;
|
|
3236
|
+
}
|
|
3237
|
+
// Traverse plain objects (POJO-like) - use prototype check since IndexedDB
|
|
3238
|
+
// may return objects where constructor !== Object
|
|
3239
|
+
const proto = Object.getPrototypeOf(obj);
|
|
3240
|
+
if (proto !== Object.prototype && proto !== null) {
|
|
3241
|
+
return obj;
|
|
3242
|
+
}
|
|
3243
|
+
const result = {};
|
|
3244
|
+
for (const [key, value] of Object.entries(obj)) {
|
|
3245
|
+
result[key] = yield offloadBlobs(value, databaseUrl, getCachedAccessToken, dirtyFlag, visited);
|
|
3246
|
+
}
|
|
3247
|
+
return result;
|
|
3248
|
+
});
|
|
3249
|
+
}
|
|
3250
|
+
/**
|
|
3251
|
+
* Process a DBOperationsSet and offload any large blobs
|
|
3252
|
+
* Returns a new DBOperationsSet with blobs replaced by BlobRefs
|
|
3253
|
+
*/
|
|
3254
|
+
function offloadBlobsInOperations(operations, databaseUrl, getCachedAccessToken) {
|
|
3255
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
3256
|
+
const result = [];
|
|
3257
|
+
for (const tableOps of operations) {
|
|
3258
|
+
const processedMuts = [];
|
|
3259
|
+
for (const mut of tableOps.muts) {
|
|
3260
|
+
const processedMut = yield offloadBlobsInOperation(mut, databaseUrl, getCachedAccessToken);
|
|
3261
|
+
processedMuts.push(processedMut);
|
|
3262
|
+
}
|
|
3263
|
+
result.push({
|
|
3264
|
+
table: tableOps.table,
|
|
3265
|
+
muts: processedMuts,
|
|
3266
|
+
});
|
|
3267
|
+
}
|
|
3268
|
+
return result;
|
|
3269
|
+
});
|
|
3270
|
+
}
|
|
3271
|
+
function offloadBlobsInOperation(op, databaseUrl, getCachedAccessToken) {
|
|
3272
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
3273
|
+
switch (op.type) {
|
|
3274
|
+
case 'insert':
|
|
3275
|
+
case 'upsert': {
|
|
3276
|
+
const processedValues = yield Promise.all(op.values.map(value => offloadBlobsAndMarkDirty(value, databaseUrl, getCachedAccessToken)));
|
|
3277
|
+
return Object.assign(Object.assign({}, op), { values: processedValues });
|
|
3278
|
+
}
|
|
3279
|
+
case 'update': {
|
|
3280
|
+
const processedChangeSpecs = yield Promise.all(op.changeSpecs.map(spec => offloadBlobsAndMarkDirty(spec, databaseUrl, getCachedAccessToken)));
|
|
3281
|
+
return Object.assign(Object.assign({}, op), { changeSpecs: processedChangeSpecs });
|
|
3282
|
+
}
|
|
3283
|
+
case 'modify': {
|
|
3284
|
+
const processedChangeSpec = yield offloadBlobsAndMarkDirty(op.changeSpec, databaseUrl, getCachedAccessToken);
|
|
3285
|
+
return Object.assign(Object.assign({}, op), { changeSpec: processedChangeSpec });
|
|
3286
|
+
}
|
|
3287
|
+
case 'delete':
|
|
3288
|
+
// No blobs in delete operations
|
|
3289
|
+
return op;
|
|
3290
|
+
default:
|
|
3291
|
+
return op;
|
|
3292
|
+
}
|
|
2736
3293
|
});
|
|
2737
3294
|
}
|
|
3295
|
+
/**
|
|
3296
|
+
* Check if there are any large blobs in the operations that need offloading
|
|
3297
|
+
* This is a quick check to avoid unnecessary processing
|
|
3298
|
+
*/
|
|
3299
|
+
function hasLargeBlobsInOperations(operations) {
|
|
3300
|
+
for (const tableOps of operations) {
|
|
3301
|
+
for (const mut of tableOps.muts) {
|
|
3302
|
+
if (hasLargeBlobsInOperation(mut)) {
|
|
3303
|
+
return true;
|
|
3304
|
+
}
|
|
3305
|
+
}
|
|
3306
|
+
}
|
|
3307
|
+
return false;
|
|
3308
|
+
}
|
|
3309
|
+
function hasLargeBlobsInOperation(op) {
|
|
3310
|
+
switch (op.type) {
|
|
3311
|
+
case 'insert':
|
|
3312
|
+
case 'upsert':
|
|
3313
|
+
return op.values.some(value => hasLargeBlobs(value));
|
|
3314
|
+
case 'update':
|
|
3315
|
+
return op.changeSpecs.some(spec => hasLargeBlobs(spec));
|
|
3316
|
+
case 'modify':
|
|
3317
|
+
return hasLargeBlobs(op.changeSpec);
|
|
3318
|
+
default:
|
|
3319
|
+
return false;
|
|
3320
|
+
}
|
|
3321
|
+
}
|
|
3322
|
+
function hasLargeBlobs(obj, visited = new WeakSet()) {
|
|
3323
|
+
if (obj === null || obj === undefined) {
|
|
3324
|
+
return false;
|
|
3325
|
+
}
|
|
3326
|
+
if (shouldOffloadBlob(obj)) {
|
|
3327
|
+
return true;
|
|
3328
|
+
}
|
|
3329
|
+
if (typeof obj !== 'object') {
|
|
3330
|
+
return false;
|
|
3331
|
+
}
|
|
3332
|
+
// Avoid circular references - check BEFORE processing
|
|
3333
|
+
if (visited.has(obj)) {
|
|
3334
|
+
return false;
|
|
3335
|
+
}
|
|
3336
|
+
visited.add(obj);
|
|
3337
|
+
if (Array.isArray(obj)) {
|
|
3338
|
+
return obj.some(item => hasLargeBlobs(item, visited));
|
|
3339
|
+
}
|
|
3340
|
+
// Traverse plain objects (POJO-like) - use duck typing since IndexedDB
|
|
3341
|
+
// may return objects where constructor !== Object
|
|
3342
|
+
const proto = Object.getPrototypeOf(obj);
|
|
3343
|
+
if (proto === Object.prototype || proto === null) {
|
|
3344
|
+
return Object.values(obj).some(value => hasLargeBlobs(value, visited));
|
|
3345
|
+
}
|
|
3346
|
+
return false;
|
|
3347
|
+
}
|
|
2738
3348
|
|
|
2739
3349
|
function updateYSyncStates(lastUpdateIdsBeforeSync, receivedUntilsAfterSync, db) {
|
|
2740
3350
|
return __awaiter(this, void 0, void 0, function* () {
|
|
@@ -2917,6 +3527,33 @@ function downloadYDocsFromServer(db_1, databaseUrl_1, _a) {
|
|
|
2917
3527
|
});
|
|
2918
3528
|
}
|
|
2919
3529
|
|
|
3530
|
+
const wm$3 = new WeakMap();
|
|
3531
|
+
function loadCachedAccessToken(db) {
|
|
3532
|
+
var _a, _b, _c, _d;
|
|
3533
|
+
let cached = wm$3.get(db);
|
|
3534
|
+
if (cached && cached.expiration > Date.now() + 5 * MINUTES) {
|
|
3535
|
+
return Promise.resolve(cached.accessToken);
|
|
3536
|
+
}
|
|
3537
|
+
const currentUser = db.cloud.currentUser.value;
|
|
3538
|
+
if (currentUser && currentUser.accessToken && ((_b = (_a = currentUser.accessTokenExpiration) === null || _a === void 0 ? void 0 : _a.getTime()) !== null && _b !== void 0 ? _b : Infinity) > Date.now() + 5 * MINUTES) {
|
|
3539
|
+
wm$3.set(db, {
|
|
3540
|
+
accessToken: currentUser.accessToken,
|
|
3541
|
+
expiration: (_d = (_c = currentUser.accessTokenExpiration) === null || _c === void 0 ? void 0 : _c.getTime()) !== null && _d !== void 0 ? _d : Infinity
|
|
3542
|
+
});
|
|
3543
|
+
return Promise.resolve(currentUser.accessToken);
|
|
3544
|
+
}
|
|
3545
|
+
return Dexie.ignoreTransaction(() => loadAccessToken(db).then(user => {
|
|
3546
|
+
var _a, _b;
|
|
3547
|
+
if (user === null || user === void 0 ? void 0 : user.accessToken) {
|
|
3548
|
+
wm$3.set(db, {
|
|
3549
|
+
accessToken: user.accessToken,
|
|
3550
|
+
expiration: (_b = (_a = user.accessTokenExpiration) === null || _a === void 0 ? void 0 : _a.getTime()) !== null && _b !== void 0 ? _b : Infinity
|
|
3551
|
+
});
|
|
3552
|
+
}
|
|
3553
|
+
return (user === null || user === void 0 ? void 0 : user.accessToken) || null;
|
|
3554
|
+
}));
|
|
3555
|
+
}
|
|
3556
|
+
|
|
2920
3557
|
const CURRENT_SYNC_WORKER = 'currentSyncWorker';
|
|
2921
3558
|
function sync(db, options, schema, syncOptions) {
|
|
2922
3559
|
return _sync(db, options, schema, syncOptions)
|
|
@@ -3034,12 +3671,20 @@ function _sync(db_1, options_1, schema_1) {
|
|
|
3034
3671
|
return false;
|
|
3035
3672
|
}
|
|
3036
3673
|
const latestRevisions = getLatestRevisionsPerTable(clientChangeSet, syncState === null || syncState === void 0 ? void 0 : syncState.latestRevisions);
|
|
3037
|
-
const clientIdentity = (syncState === null || syncState === void 0 ? void 0 : syncState.clientIdentity) || randomString(16);
|
|
3674
|
+
const clientIdentity = (syncState === null || syncState === void 0 ? void 0 : syncState.clientIdentity) || randomString$1(16);
|
|
3675
|
+
//
|
|
3676
|
+
// Offload large blobs to blob storage before sync
|
|
3677
|
+
//
|
|
3678
|
+
let processedChangeSet = clientChangeSet;
|
|
3679
|
+
const hasLargeBlobs = hasLargeBlobsInOperations(clientChangeSet);
|
|
3680
|
+
if (hasLargeBlobs) {
|
|
3681
|
+
processedChangeSet = yield offloadBlobsInOperations(clientChangeSet, databaseUrl, () => loadCachedAccessToken(db));
|
|
3682
|
+
}
|
|
3038
3683
|
//
|
|
3039
3684
|
// Push changes to server
|
|
3040
3685
|
//
|
|
3041
3686
|
throwIfCancelled(cancelToken);
|
|
3042
|
-
const res = yield syncWithServer(
|
|
3687
|
+
const res = yield syncWithServer(processedChangeSet, yMessages, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser);
|
|
3043
3688
|
console.debug('Sync response', res);
|
|
3044
3689
|
//
|
|
3045
3690
|
// Apply changes locally and clear old change entries:
|
|
@@ -3442,6 +4087,65 @@ function MessagesFromServerConsumer(db) {
|
|
|
3442
4087
|
};
|
|
3443
4088
|
}
|
|
3444
4089
|
|
|
4090
|
+
/**
|
|
4091
|
+
* Deduplicates in-flight blob downloads.
|
|
4092
|
+
*
|
|
4093
|
+
* Both the blob-resolve middleware and the eager blob downloader may
|
|
4094
|
+
* try to fetch the same blob concurrently. This tracker ensures each
|
|
4095
|
+
* unique blob ref is only downloaded once — subsequent requests for
|
|
4096
|
+
* the same ref piggyback on the existing promise.
|
|
4097
|
+
*
|
|
4098
|
+
* Instantiate once per DexieCloudDB.
|
|
4099
|
+
*/
|
|
4100
|
+
class BlobDownloadTracker {
|
|
4101
|
+
constructor(db) {
|
|
4102
|
+
this.inFlight = new Map();
|
|
4103
|
+
this.db = db;
|
|
4104
|
+
}
|
|
4105
|
+
/**
|
|
4106
|
+
* Download a blob, deduplicating concurrent requests for the same ref.
|
|
4107
|
+
*
|
|
4108
|
+
* @param blobRef - The BlobRef to download
|
|
4109
|
+
* @param dbUrl - Base URL for the database (e.g., 'https://mydb.dexie.cloud')
|
|
4110
|
+
*/
|
|
4111
|
+
download(blobRef, dbUrl) {
|
|
4112
|
+
let promise = this.inFlight.get(blobRef.ref);
|
|
4113
|
+
if (!promise) {
|
|
4114
|
+
promise = loadCachedAccessToken(this.db).then(accessToken => {
|
|
4115
|
+
if (!accessToken)
|
|
4116
|
+
throw new Error("No access token available for blob download");
|
|
4117
|
+
return downloadBlob(blobRef, dbUrl, accessToken);
|
|
4118
|
+
}).finally(() => this.inFlight.delete(blobRef.ref));
|
|
4119
|
+
// When the promise settles (either fulfilled or rejected), remove it from the in-flight map
|
|
4120
|
+
this.inFlight.set(blobRef.ref, promise);
|
|
4121
|
+
}
|
|
4122
|
+
return promise;
|
|
4123
|
+
}
|
|
4124
|
+
}
|
|
4125
|
+
/**
|
|
4126
|
+
* Download blob data from server via proxy endpoint.
|
|
4127
|
+
* Uses auth header for authentication (same as sync).
|
|
4128
|
+
*
|
|
4129
|
+
* @param blobRef - The BlobRef to download
|
|
4130
|
+
* @param dbUrl - Base URL for the database (e.g., 'https://mydb.dexie.cloud')
|
|
4131
|
+
* @param accessToken - Access token for authentication
|
|
4132
|
+
*/
|
|
4133
|
+
function downloadBlob(blobRef, dbUrl, accessToken) {
|
|
4134
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
4135
|
+
const downloadUrl = `${dbUrl}/blob/${blobRef.ref}`;
|
|
4136
|
+
const response = yield fetch(downloadUrl, {
|
|
4137
|
+
headers: {
|
|
4138
|
+
'Authorization': `Bearer ${accessToken}`
|
|
4139
|
+
}
|
|
4140
|
+
});
|
|
4141
|
+
if (!response.ok) {
|
|
4142
|
+
throw new Error(`Failed to download blob ${blobRef.ref}: ${response.status} ${response.statusText}`);
|
|
4143
|
+
}
|
|
4144
|
+
const arrayBuffer = yield response.arrayBuffer();
|
|
4145
|
+
return new Uint8Array(arrayBuffer);
|
|
4146
|
+
});
|
|
4147
|
+
}
|
|
4148
|
+
|
|
3445
4149
|
const wm$2 = new WeakMap();
|
|
3446
4150
|
const DEXIE_CLOUD_SCHEMA = {
|
|
3447
4151
|
members: '@id, [userId+realmId], [email+realmId], realmId',
|
|
@@ -3456,7 +4160,7 @@ let static_counter = 0;
|
|
|
3456
4160
|
function DexieCloudDB(dx) {
|
|
3457
4161
|
if ('vip' in dx)
|
|
3458
4162
|
dx = dx['vip']; // Avoid race condition. Always map to a vipped dexie that don't block during db.on.ready().
|
|
3459
|
-
let db = wm$2.get(dx
|
|
4163
|
+
let db = wm$2.get(dx);
|
|
3460
4164
|
if (!db) {
|
|
3461
4165
|
const localSyncEvent = new Subject();
|
|
3462
4166
|
let syncStateChangedEvent = new BroadcastedAndLocalEvent(`syncstatechanged-${dx.name}`);
|
|
@@ -3475,7 +4179,9 @@ function DexieCloudDB(dx) {
|
|
|
3475
4179
|
get tables() {
|
|
3476
4180
|
return dx.tables;
|
|
3477
4181
|
},
|
|
3478
|
-
cloud
|
|
4182
|
+
get cloud() {
|
|
4183
|
+
return dx.cloud;
|
|
4184
|
+
},
|
|
3479
4185
|
get $jobs() {
|
|
3480
4186
|
return dx.table('$jobs');
|
|
3481
4187
|
},
|
|
@@ -3544,7 +4250,8 @@ function DexieCloudDB(dx) {
|
|
|
3544
4250
|
Object.assign(db, helperMethods);
|
|
3545
4251
|
db.messageConsumer = MessagesFromServerConsumer(db);
|
|
3546
4252
|
db.messageProducer = new Subject();
|
|
3547
|
-
|
|
4253
|
+
db.blobDownloadTracker = new BlobDownloadTracker(db);
|
|
4254
|
+
wm$2.set(dx, db);
|
|
3548
4255
|
}
|
|
3549
4256
|
return db;
|
|
3550
4257
|
}
|
|
@@ -3554,6 +4261,221 @@ function nameFromKeyPath(keyPath) {
|
|
|
3554
4261
|
keyPath ? ('[' + [].join.call(keyPath, '+') + ']') : "";
|
|
3555
4262
|
}
|
|
3556
4263
|
|
|
4264
|
+
/**
|
|
4265
|
+
* Blob Progress Tracking
|
|
4266
|
+
*
|
|
4267
|
+
* Uses liveQuery to reactively track unresolved blob refs.
|
|
4268
|
+
* Any change to _hasBlobRefs in any syncable table automatically
|
|
4269
|
+
* triggers a re-scan — no manual updateBlobProgress() needed.
|
|
4270
|
+
*/
|
|
4271
|
+
/**
|
|
4272
|
+
* BehaviorSubject for the isDownloading flag, controlled by eagerBlobDownloader.
|
|
4273
|
+
*/
|
|
4274
|
+
function createDownloadingState() {
|
|
4275
|
+
return new BehaviorSubject(false);
|
|
4276
|
+
}
|
|
4277
|
+
/**
|
|
4278
|
+
* Set downloading state.
|
|
4279
|
+
*/
|
|
4280
|
+
function setDownloadingState(downloading$, isDownloading) {
|
|
4281
|
+
if (downloading$.value !== isDownloading) {
|
|
4282
|
+
downloading$.next(isDownloading);
|
|
4283
|
+
}
|
|
4284
|
+
}
|
|
4285
|
+
/**
|
|
4286
|
+
* Create a liveQuery-based Observable<BlobProgress>.
|
|
4287
|
+
*
|
|
4288
|
+
* Combines a liveQuery (blobsRemaining, bytesRemaining) with an external
|
|
4289
|
+
* isDownloading flag controlled by the eager downloader.
|
|
4290
|
+
*/
|
|
4291
|
+
function observeBlobProgress(db, downloading$) {
|
|
4292
|
+
const blobStats$ = from(liveQuery(() => __awaiter(this, void 0, void 0, function* () {
|
|
4293
|
+
let blobsRemaining = 0;
|
|
4294
|
+
let bytesRemaining = 0;
|
|
4295
|
+
const syncedTables = getSyncableTables(db);
|
|
4296
|
+
yield db.dx.transaction('r', syncedTables, (tx) => __awaiter(this, void 0, void 0, function* () {
|
|
4297
|
+
tx.idbtrans.disableBlobResolve = true;
|
|
4298
|
+
for (const table of syncedTables) {
|
|
4299
|
+
const hasIndex = !!table.schema.idxByName['_hasBlobRefs'];
|
|
4300
|
+
if (!hasIndex)
|
|
4301
|
+
continue;
|
|
4302
|
+
const unresolvedObjects = yield table
|
|
4303
|
+
.where('_hasBlobRefs')
|
|
4304
|
+
.equals(1)
|
|
4305
|
+
.toArray();
|
|
4306
|
+
for (const obj of unresolvedObjects) {
|
|
4307
|
+
const blobs = findBlobRefs(obj);
|
|
4308
|
+
blobsRemaining += blobs.length;
|
|
4309
|
+
bytesRemaining += blobs.reduce((sum, blob) => sum + (blob.size || 0), 0);
|
|
4310
|
+
}
|
|
4311
|
+
}
|
|
4312
|
+
}));
|
|
4313
|
+
return { blobsRemaining, bytesRemaining };
|
|
4314
|
+
})));
|
|
4315
|
+
return combineLatest([blobStats$, downloading$]).pipe(map(([stats, isDownloading]) => ({
|
|
4316
|
+
isDownloading: isDownloading && stats.blobsRemaining > 0,
|
|
4317
|
+
blobsRemaining: stats.blobsRemaining,
|
|
4318
|
+
bytesRemaining: stats.bytesRemaining,
|
|
4319
|
+
})), share({ resetOnRefCountZero: () => timer(2000) }) // Keep alive for 2s after last unsubscription to avoid rapid re-subscriptions during UI updates
|
|
4320
|
+
);
|
|
4321
|
+
}
|
|
4322
|
+
/**
|
|
4323
|
+
* Find all unresolved refs (BlobRef or TSONRef) in an object (recursive).
|
|
4324
|
+
* Handles both live TSONRef instances and serialized TSONRefs (after IndexedDB).
|
|
4325
|
+
*/
|
|
4326
|
+
function findBlobRefs(obj) {
|
|
4327
|
+
const refs = [];
|
|
4328
|
+
function scan(value) {
|
|
4329
|
+
if (value === null || value === undefined)
|
|
4330
|
+
return;
|
|
4331
|
+
if (typeof value !== 'object')
|
|
4332
|
+
return;
|
|
4333
|
+
if (TSONRef.isTSONRef(value)) {
|
|
4334
|
+
refs.push({ ref: value.ref, size: value.size });
|
|
4335
|
+
return;
|
|
4336
|
+
}
|
|
4337
|
+
if (isSerializedTSONRef(value)) {
|
|
4338
|
+
const obj = value;
|
|
4339
|
+
refs.push({ ref: obj.ref, size: obj.size });
|
|
4340
|
+
return;
|
|
4341
|
+
}
|
|
4342
|
+
if (isBlobRef(value)) {
|
|
4343
|
+
refs.push({ ref: value.ref, size: value.size || 0 });
|
|
4344
|
+
return;
|
|
4345
|
+
}
|
|
4346
|
+
if (Array.isArray(value)) {
|
|
4347
|
+
value.forEach(scan);
|
|
4348
|
+
}
|
|
4349
|
+
else if (value.constructor === Object) {
|
|
4350
|
+
Object.values(value).forEach(scan);
|
|
4351
|
+
}
|
|
4352
|
+
}
|
|
4353
|
+
scan(obj);
|
|
4354
|
+
return refs;
|
|
4355
|
+
}
|
|
4356
|
+
|
|
4357
|
+
/**
|
|
4358
|
+
* Eager Blob Downloader
|
|
4359
|
+
*
|
|
4360
|
+
* Downloads unresolved blobs in the background when blobMode='eager'.
|
|
4361
|
+
* Called after sync completes to prefetch blobs for offline access.
|
|
4362
|
+
*
|
|
4363
|
+
* Progress is tracked automatically via liveQuery in blobProgress.ts —
|
|
4364
|
+
* no manual progress reporting needed here.
|
|
4365
|
+
*/
|
|
4366
|
+
/**
|
|
4367
|
+
* Download all unresolved blobs in the background.
|
|
4368
|
+
*
|
|
4369
|
+
* This is called when blobMode='eager' (default) after sync completes.
|
|
4370
|
+
* BlobRef URLs are signed (SAS tokens) so no auth header needed.
|
|
4371
|
+
*
|
|
4372
|
+
* Each blob is saved atomically using Table.update() to avoid race conditions.
|
|
4373
|
+
*/
|
|
4374
|
+
function downloadUnresolvedBlobs(db, downloading$, signal) {
|
|
4375
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
4376
|
+
var _a;
|
|
4377
|
+
const debugLog = (msg) => console.debug(`[dexie-cloud] ${msg}`);
|
|
4378
|
+
debugLog('Eager download: Starting...');
|
|
4379
|
+
// Scan for unresolved blobs
|
|
4380
|
+
const syncedTables = getSyncableTables(db);
|
|
4381
|
+
let hasWork = false;
|
|
4382
|
+
for (const table of syncedTables) {
|
|
4383
|
+
try {
|
|
4384
|
+
const hasIndex = !!table.schema.idxByName['_hasBlobRefs'];
|
|
4385
|
+
if (!hasIndex)
|
|
4386
|
+
continue;
|
|
4387
|
+
const count = yield table.where('_hasBlobRefs').equals(1).count();
|
|
4388
|
+
if (count > 0) {
|
|
4389
|
+
hasWork = true;
|
|
4390
|
+
break;
|
|
4391
|
+
}
|
|
4392
|
+
}
|
|
4393
|
+
catch (_b) {
|
|
4394
|
+
// skip
|
|
4395
|
+
}
|
|
4396
|
+
}
|
|
4397
|
+
if (!hasWork) {
|
|
4398
|
+
debugLog('Eager download: No blobs remaining, exiting');
|
|
4399
|
+
return;
|
|
4400
|
+
}
|
|
4401
|
+
setDownloadingState(downloading$, true);
|
|
4402
|
+
try {
|
|
4403
|
+
debugLog(`Eager download: Found ${syncedTables.length} syncable tables: ${syncedTables.map(t => t.name).join(', ')}`);
|
|
4404
|
+
for (const table of syncedTables) {
|
|
4405
|
+
if (signal === null || signal === void 0 ? void 0 : signal.aborted)
|
|
4406
|
+
;
|
|
4407
|
+
try {
|
|
4408
|
+
// Check if table has _hasBlobRefs index
|
|
4409
|
+
const hasIndex = table.schema.indexes.some(idx => idx.name === '_hasBlobRefs');
|
|
4410
|
+
if (!hasIndex)
|
|
4411
|
+
continue;
|
|
4412
|
+
// Query objects with _hasBlobRefs marker
|
|
4413
|
+
const unresolvedObjects = yield table
|
|
4414
|
+
.where('_hasBlobRefs')
|
|
4415
|
+
.equals(1)
|
|
4416
|
+
.toArray();
|
|
4417
|
+
debugLog(`Eager download: Table ${table.name} has ${unresolvedObjects.length} unresolved objects`);
|
|
4418
|
+
const databaseUrl = (_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl;
|
|
4419
|
+
if (!databaseUrl)
|
|
4420
|
+
throw new Error('Database URL is required to download blobs');
|
|
4421
|
+
// Download up to MAX_CONCURRENT blobs in parallel
|
|
4422
|
+
const MAX_CONCURRENT = 6;
|
|
4423
|
+
const primaryKey = table.schema.primKey;
|
|
4424
|
+
// Filter to actionable objects first
|
|
4425
|
+
const pending = unresolvedObjects.filter(obj => {
|
|
4426
|
+
if (!hasUnresolvedBlobRefs(obj))
|
|
4427
|
+
return false;
|
|
4428
|
+
const key = primaryKey.keyPath
|
|
4429
|
+
? Dexie.getByKeyPath(obj, primaryKey.keyPath)
|
|
4430
|
+
: undefined;
|
|
4431
|
+
return key !== undefined;
|
|
4432
|
+
});
|
|
4433
|
+
// Process in parallel with concurrency limit
|
|
4434
|
+
let i = 0;
|
|
4435
|
+
const runNext = () => __awaiter(this, void 0, void 0, function* () {
|
|
4436
|
+
while (i < pending.length) {
|
|
4437
|
+
if (signal === null || signal === void 0 ? void 0 : signal.aborted)
|
|
4438
|
+
;
|
|
4439
|
+
const obj = pending[i++];
|
|
4440
|
+
const key = Dexie.getByKeyPath(obj, primaryKey.keyPath);
|
|
4441
|
+
try {
|
|
4442
|
+
// Refresh token per object — cheap (returns cached) but ensures
|
|
4443
|
+
// we pick up renewed tokens during long download sessions.
|
|
4444
|
+
const resolvedBlobs = [];
|
|
4445
|
+
yield resolveAllBlobRefs(obj, databaseUrl, resolvedBlobs, '', new WeakMap(), db.blobDownloadTracker);
|
|
4446
|
+
const updateSpec = {
|
|
4447
|
+
_hasBlobRefs: undefined,
|
|
4448
|
+
};
|
|
4449
|
+
for (const blob of resolvedBlobs) {
|
|
4450
|
+
updateSpec[blob.keyPath] = blob.data;
|
|
4451
|
+
}
|
|
4452
|
+
debugLog(`Eager download: Updating ${table.name}:${key} with ${resolvedBlobs.length} blobs`);
|
|
4453
|
+
yield table.update(key, updateSpec);
|
|
4454
|
+
// liveQuery in blobProgress.ts auto-detects this change
|
|
4455
|
+
}
|
|
4456
|
+
catch (err) {
|
|
4457
|
+
console.error(`Failed to download blobs for ${table.name}:${key}:`, err);
|
|
4458
|
+
}
|
|
4459
|
+
}
|
|
4460
|
+
});
|
|
4461
|
+
// Launch up to MAX_CONCURRENT workers
|
|
4462
|
+
const workers = [];
|
|
4463
|
+
for (let w = 0; w < Math.min(MAX_CONCURRENT, pending.length); w++) {
|
|
4464
|
+
workers.push(runNext());
|
|
4465
|
+
}
|
|
4466
|
+
yield Promise.all(workers);
|
|
4467
|
+
}
|
|
4468
|
+
catch (err) {
|
|
4469
|
+
// Table might not have _hasBlobRefs index or other issues - skip silently
|
|
4470
|
+
}
|
|
4471
|
+
}
|
|
4472
|
+
}
|
|
4473
|
+
finally {
|
|
4474
|
+
setDownloadingState(downloading$, false);
|
|
4475
|
+
}
|
|
4476
|
+
});
|
|
4477
|
+
}
|
|
4478
|
+
|
|
3557
4479
|
// Emulate true-private property db. Why? So it's not stored in DB.
|
|
3558
4480
|
const wm$1 = new WeakMap();
|
|
3559
4481
|
class AuthPersistedContext {
|
|
@@ -4512,7 +5434,7 @@ function createMutationTrackingMiddleware({ currentUserObservable, db, }) {
|
|
|
4512
5434
|
}
|
|
4513
5435
|
if (mode === 'readwrite') {
|
|
4514
5436
|
// Give each transaction a globally unique id.
|
|
4515
|
-
tx.txid = randomString
|
|
5437
|
+
tx.txid = randomString(16);
|
|
4516
5438
|
tx.opCount = 0;
|
|
4517
5439
|
// Introduce the concept of current user that lasts through the entire transaction.
|
|
4518
5440
|
// This is important because the tracked mutations must be connected to the user.
|
|
@@ -4810,6 +5732,318 @@ function createMutationTrackingMiddleware({ currentUserObservable, db, }) {
|
|
|
4810
5732
|
};
|
|
4811
5733
|
}
|
|
4812
5734
|
|
|
5735
|
+
/**
|
|
5736
|
+
* BlobSavingQueue - Queues resolved blobs for saving back to IndexedDB
|
|
5737
|
+
*
|
|
5738
|
+
* Uses setTimeout(fn, 0) instead of queueMicrotask to completely isolate
|
|
5739
|
+
* from Dexie's Promise.PSD context. This prevents the save operation
|
|
5740
|
+
* from inheriting any ongoing transaction.
|
|
5741
|
+
*
|
|
5742
|
+
* Each blob is saved atomically using downCore transaction with the specific
|
|
5743
|
+
* keyPath to avoid race conditions with other property changes.
|
|
5744
|
+
*/
|
|
5745
|
+
class BlobSavingQueue {
|
|
5746
|
+
constructor(db) {
|
|
5747
|
+
this.queue = [];
|
|
5748
|
+
this.isProcessing = false;
|
|
5749
|
+
this.db = db;
|
|
5750
|
+
}
|
|
5751
|
+
/**
|
|
5752
|
+
* Queue a resolved blob for saving.
|
|
5753
|
+
* Only the specific blob property will be updated atomically.
|
|
5754
|
+
*/
|
|
5755
|
+
saveBlobs(tableName, primaryKey, resolvedBlobs) {
|
|
5756
|
+
this.queue.push({ tableName, primaryKey, resolvedBlobs });
|
|
5757
|
+
this.startConsumer();
|
|
5758
|
+
}
|
|
5759
|
+
/**
|
|
5760
|
+
* Start the consumer if not already processing.
|
|
5761
|
+
* Uses setTimeout(fn, 0) to completely break out of any
|
|
5762
|
+
* Dexie transaction context (Promise.PSD).
|
|
5763
|
+
*/
|
|
5764
|
+
startConsumer() {
|
|
5765
|
+
if (this.isProcessing)
|
|
5766
|
+
return;
|
|
5767
|
+
this.isProcessing = true;
|
|
5768
|
+
// Use setTimeout to completely isolate from Dexie's PSD context
|
|
5769
|
+
// queueMicrotask would risk inheriting the current transaction
|
|
5770
|
+
setTimeout(() => {
|
|
5771
|
+
this.processQueue();
|
|
5772
|
+
}, 0);
|
|
5773
|
+
}
|
|
5774
|
+
/**
|
|
5775
|
+
* Process all queued blobs.
|
|
5776
|
+
* Runs in a completely isolated context (no inherited transaction).
|
|
5777
|
+
* Uses atomic updates to avoid race conditions.
|
|
5778
|
+
*/
|
|
5779
|
+
processQueue() {
|
|
5780
|
+
const item = this.queue.shift();
|
|
5781
|
+
if (!item) {
|
|
5782
|
+
this.isProcessing = false;
|
|
5783
|
+
return;
|
|
5784
|
+
}
|
|
5785
|
+
// Atomic update of just the blob property
|
|
5786
|
+
this.db.transaction('rw', item.tableName, (tx) => {
|
|
5787
|
+
const trans = tx.idbtrans;
|
|
5788
|
+
trans.disableChangeTracking = true; // Don't regard this as a change for sync purposes
|
|
5789
|
+
trans.disableAccessControl = true; // Bypass any access control checks since this is an internal operation
|
|
5790
|
+
trans.disableBlobResolve = true; // Custom flag to skip blob resolve middleware during this transaction
|
|
5791
|
+
const updateSpec = {};
|
|
5792
|
+
for (const blob of item.resolvedBlobs) {
|
|
5793
|
+
updateSpec[blob.keyPath] = blob.data;
|
|
5794
|
+
}
|
|
5795
|
+
tx.table(item.tableName).update(item.primaryKey, obj => {
|
|
5796
|
+
// Check that object still has the same unresolved blob refs before applying update (i.e. it hasn't been modified since we read it)
|
|
5797
|
+
for (const blob of item.resolvedBlobs) {
|
|
5798
|
+
// Verify atomicity - none of the blob properties has been modified since we read it. If any of them was modified, skip updating this item to avoid overwriting user changes.
|
|
5799
|
+
const currentValue = Dexie.getByKeyPath(obj, blob.keyPath);
|
|
5800
|
+
if (currentValue === undefined) {
|
|
5801
|
+
// Blob property was removed - skip updating this blob
|
|
5802
|
+
continue;
|
|
5803
|
+
}
|
|
5804
|
+
if (!isBlobRef(currentValue)) {
|
|
5805
|
+
// Blob property was modified to a non-blob-ref value - skip updating this blob
|
|
5806
|
+
continue;
|
|
5807
|
+
}
|
|
5808
|
+
if (currentValue.ref !== blob.ref) {
|
|
5809
|
+
// Blob property was modified - skip updating this blob
|
|
5810
|
+
return; // Stop. Another items has been queued to fully fix the object.
|
|
5811
|
+
}
|
|
5812
|
+
Dexie.setByKeyPath(obj, blob.keyPath, blob.data);
|
|
5813
|
+
}
|
|
5814
|
+
delete obj._hasBlobRefs; // Clear the _hasBlobRefs marker if all refs was resolved.
|
|
5815
|
+
});
|
|
5816
|
+
}).catch((error) => {
|
|
5817
|
+
console.error(`Error saving resolved blobs on ${item.tableName}:${item.primaryKey}:`, error);
|
|
5818
|
+
}).finally(() => {
|
|
5819
|
+
// Process next item in the queue
|
|
5820
|
+
return this.processQueue();
|
|
5821
|
+
});
|
|
5822
|
+
}
|
|
5823
|
+
}
|
|
5824
|
+
|
|
5825
|
+
/**
|
|
5826
|
+
* DBCore Middleware for resolving BlobRefs on read
|
|
5827
|
+
*
|
|
5828
|
+
* This middleware intercepts read operations and resolves any BlobRefs
|
|
5829
|
+
* found in objects marked with _hasBlobRefs.
|
|
5830
|
+
*
|
|
5831
|
+
* Important: Avoids async/await to preserve Dexie's Promise.PSD context.
|
|
5832
|
+
* Uses Dexie.waitFor() only for explicit rw transactions to keep them alive.
|
|
5833
|
+
* For readonly or implicit transactions, resolves directly (no waitFor needed).
|
|
5834
|
+
*
|
|
5835
|
+
* Resolved blobs are queued for saving via BlobSavingQueue, which uses
|
|
5836
|
+
* setTimeout(fn, 0) to completely isolate from Dexie's transaction context.
|
|
5837
|
+
* Each blob is saved atomically using Table.update() with its keyPath to
|
|
5838
|
+
* avoid race conditions with other property changes.
|
|
5839
|
+
*
|
|
5840
|
+
* Blob downloads use Authorization header (same as sync) via the server
|
|
5841
|
+
* proxy endpoint: GET /blob/{ref}
|
|
5842
|
+
*/
|
|
5843
|
+
function createBlobResolveMiddleware(db) {
|
|
5844
|
+
return {
|
|
5845
|
+
stack: 'dbcore',
|
|
5846
|
+
name: 'blobResolve',
|
|
5847
|
+
level: -2, // Run below other middlewares and after sync and caching middlewares
|
|
5848
|
+
create(downlevelDatabase) {
|
|
5849
|
+
// Create a single queue instance for this database
|
|
5850
|
+
const blobSavingQueue = new BlobSavingQueue(db);
|
|
5851
|
+
return Object.assign(Object.assign({}, downlevelDatabase), { table(tableName) {
|
|
5852
|
+
var _a;
|
|
5853
|
+
if (!db.cloud) {
|
|
5854
|
+
// db.cloud not yet initialized - skip blob resolution
|
|
5855
|
+
// Fall through to downlevel table to avoid crash
|
|
5856
|
+
return downlevelDatabase.table(tableName);
|
|
5857
|
+
}
|
|
5858
|
+
const dbUrl = (_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl;
|
|
5859
|
+
const downlevelTable = downlevelDatabase.table(tableName);
|
|
5860
|
+
// Skip internal tables
|
|
5861
|
+
if (tableName.startsWith('$')) {
|
|
5862
|
+
return downlevelTable;
|
|
5863
|
+
}
|
|
5864
|
+
return Object.assign(Object.assign({}, downlevelTable), { get(req) {
|
|
5865
|
+
var _a;
|
|
5866
|
+
if ((_a = req.trans) === null || _a === void 0 ? void 0 : _a.disableBlobResolve) {
|
|
5867
|
+
return downlevelTable.get(req);
|
|
5868
|
+
}
|
|
5869
|
+
return downlevelTable.get(req).then(result => {
|
|
5870
|
+
if (result && hasUnresolvedBlobRefs(result)) {
|
|
5871
|
+
return resolveAndSave(downlevelTable, req.trans, req.key, result, blobSavingQueue, db);
|
|
5872
|
+
}
|
|
5873
|
+
return result;
|
|
5874
|
+
});
|
|
5875
|
+
},
|
|
5876
|
+
getMany(req) {
|
|
5877
|
+
var _a;
|
|
5878
|
+
if ((_a = req.trans) === null || _a === void 0 ? void 0 : _a.disableBlobResolve) {
|
|
5879
|
+
return downlevelTable.getMany(req);
|
|
5880
|
+
}
|
|
5881
|
+
return downlevelTable.getMany(req).then(results => {
|
|
5882
|
+
// Check if any results need resolution
|
|
5883
|
+
const needsResolution = results.some(r => r && hasUnresolvedBlobRefs(r));
|
|
5884
|
+
if (!needsResolution)
|
|
5885
|
+
return results;
|
|
5886
|
+
return Dexie.Promise.all(results.map((result, index) => {
|
|
5887
|
+
if (result && hasUnresolvedBlobRefs(result)) {
|
|
5888
|
+
return resolveAndSave(downlevelTable, req.trans, req.keys[index], result, blobSavingQueue, db);
|
|
5889
|
+
}
|
|
5890
|
+
return result;
|
|
5891
|
+
}));
|
|
5892
|
+
});
|
|
5893
|
+
},
|
|
5894
|
+
query(req) {
|
|
5895
|
+
var _a;
|
|
5896
|
+
if ((_a = req.trans) === null || _a === void 0 ? void 0 : _a.disableBlobResolve) {
|
|
5897
|
+
return downlevelTable.query(req);
|
|
5898
|
+
}
|
|
5899
|
+
return downlevelTable.query(req).then(result => {
|
|
5900
|
+
if (!result.result || !Array.isArray(result.result))
|
|
5901
|
+
return result;
|
|
5902
|
+
// Check if any results need resolution
|
|
5903
|
+
const needsResolution = result.result.some(r => r && hasUnresolvedBlobRefs(r));
|
|
5904
|
+
if (!needsResolution)
|
|
5905
|
+
return result;
|
|
5906
|
+
return Dexie.Promise.all(result.result.map(item => {
|
|
5907
|
+
if (item && hasUnresolvedBlobRefs(item)) {
|
|
5908
|
+
return resolveAndSave(downlevelTable, req.trans, undefined, item, blobSavingQueue, db);
|
|
5909
|
+
}
|
|
5910
|
+
return item;
|
|
5911
|
+
})).then(resolved => (Object.assign(Object.assign({}, result), { result: resolved })));
|
|
5912
|
+
});
|
|
5913
|
+
},
|
|
5914
|
+
openCursor(req) {
|
|
5915
|
+
var _a;
|
|
5916
|
+
if ((_a = req.trans) === null || _a === void 0 ? void 0 : _a.disableBlobResolve) {
|
|
5917
|
+
return downlevelTable.openCursor(req);
|
|
5918
|
+
}
|
|
5919
|
+
return downlevelTable.openCursor(req).then(cursor => {
|
|
5920
|
+
if (!cursor)
|
|
5921
|
+
return cursor; // No results, so no resolution needed
|
|
5922
|
+
if (!req.values)
|
|
5923
|
+
return cursor; // No values requested, so no resolution needed
|
|
5924
|
+
if (!dbUrl)
|
|
5925
|
+
return cursor; // No database URL configured, can't resolve blobs
|
|
5926
|
+
return createBlobResolvingCursor(cursor, downlevelTable, blobSavingQueue, db);
|
|
5927
|
+
});
|
|
5928
|
+
} });
|
|
5929
|
+
} });
|
|
5930
|
+
},
|
|
5931
|
+
};
|
|
5932
|
+
}
|
|
5933
|
+
/**
|
|
5934
|
+
* Create a cursor wrapper that resolves BlobRefs in values synchronously.
|
|
5935
|
+
*
|
|
5936
|
+
* Uses Object.create() to inherit all cursor methods, only overriding:
|
|
5937
|
+
* - start(): Resolves BlobRefs before calling the callback
|
|
5938
|
+
* - value: Getter that returns the resolved value
|
|
5939
|
+
*
|
|
5940
|
+
* Returns the cursor synchronously. Resolution happens in start() before
|
|
5941
|
+
* each onNext callback, ensuring cursor.value is always available.
|
|
5942
|
+
*/
|
|
5943
|
+
function createBlobResolvingCursor(cursor, table, blobSavingQueue, db) {
|
|
5944
|
+
// Create wrapped cursor using Object.create() - inherits everything
|
|
5945
|
+
const wrappedCursor = Object.create(cursor, {
|
|
5946
|
+
value: {
|
|
5947
|
+
value: cursor.value,
|
|
5948
|
+
enumerable: true,
|
|
5949
|
+
writable: true
|
|
5950
|
+
},
|
|
5951
|
+
start: {
|
|
5952
|
+
value(onNext) {
|
|
5953
|
+
// Override start to resolve BlobRefs before each callback
|
|
5954
|
+
return cursor.start(() => {
|
|
5955
|
+
const rawValue = cursor.value;
|
|
5956
|
+
if (!rawValue || !hasUnresolvedBlobRefs(rawValue)) {
|
|
5957
|
+
onNext();
|
|
5958
|
+
return;
|
|
5959
|
+
}
|
|
5960
|
+
resolveAndSave(table, cursor.trans, cursor.primaryKey, rawValue, blobSavingQueue, db, true).then(resolved => {
|
|
5961
|
+
wrappedCursor.value = resolved;
|
|
5962
|
+
onNext();
|
|
5963
|
+
}, err => {
|
|
5964
|
+
console.error('Failed to resolve BlobRefs for cursor value:', err);
|
|
5965
|
+
onNext();
|
|
5966
|
+
});
|
|
5967
|
+
});
|
|
5968
|
+
}
|
|
5969
|
+
}
|
|
5970
|
+
});
|
|
5971
|
+
return wrappedCursor;
|
|
5972
|
+
}
|
|
5973
|
+
/**
|
|
5974
|
+
* Resolve BlobRefs in an object and queue each blob for atomic saving.
|
|
5975
|
+
*
|
|
5976
|
+
* Uses Dexie.waitFor() only when needed:
|
|
5977
|
+
* - Skip waitFor for readonly ('r') transactions
|
|
5978
|
+
* - Skip waitFor for implicit transactions (most common in liveQuery)
|
|
5979
|
+
* - Use waitFor only for explicit rw transactions that need to stay alive
|
|
5980
|
+
*
|
|
5981
|
+
* Each resolved blob is queued individually with its keyPath for atomic
|
|
5982
|
+
* update using downCore transaction with the specific keyPath - this avoids race conditions.
|
|
5983
|
+
*
|
|
5984
|
+
* Returns Dexie.Promise to preserve PSD context.
|
|
5985
|
+
*/
|
|
5986
|
+
function resolveAndSave(table, trans, pKey, // optional. If missing, tries to extract from object using primary key path
|
|
5987
|
+
obj, blobSavingQueue, db, isCursorValue = false // Flag to indicate if we're resolving a cursor value (which may not have a primary key)
|
|
5988
|
+
) {
|
|
5989
|
+
var _a;
|
|
5990
|
+
try {
|
|
5991
|
+
// Determine if we need waitFor:
|
|
5992
|
+
// Skip waitFor ONLY if BOTH conditions are met:
|
|
5993
|
+
// 1. readonly transaction
|
|
5994
|
+
// 2. implicit (non-explicit) transaction
|
|
5995
|
+
//
|
|
5996
|
+
// Transaction.explicit is true when user called db.transaction() explicitly.
|
|
5997
|
+
// For implicit transactions (auto-created for single operations),
|
|
5998
|
+
// Dexie handles async automatically so no waitFor needed.
|
|
5999
|
+
const currentTx = Dexie.currentTransaction;
|
|
6000
|
+
const isReadonly = (currentTx === null || currentTx === void 0 ? void 0 : currentTx.mode) === 'readonly';
|
|
6001
|
+
const isExplicit = (currentTx === null || currentTx === void 0 ? void 0 : currentTx.explicit) === true;
|
|
6002
|
+
// Skip waitFor only for implicit readonly (most common case: liveQuery)
|
|
6003
|
+
const skipWaitFor = isReadonly && !isExplicit && !isCursorValue;
|
|
6004
|
+
const needsWaitFor = currentTx && !skipWaitFor;
|
|
6005
|
+
const dbUrl = ((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl) || '';
|
|
6006
|
+
// Collect resolved blobs with their keyPaths
|
|
6007
|
+
const resolvedBlobs = [];
|
|
6008
|
+
// Create the resolution promise with auth info
|
|
6009
|
+
const resolutionPromise = resolveAllBlobRefs(obj, dbUrl, resolvedBlobs, '', new WeakMap(), db.blobDownloadTracker);
|
|
6010
|
+
// Wrap with waitFor to keep transaction alive during fetch
|
|
6011
|
+
const resolvePromise = needsWaitFor
|
|
6012
|
+
? Dexie.waitFor(resolutionPromise)
|
|
6013
|
+
: Dexie.Promise.resolve(resolutionPromise);
|
|
6014
|
+
return resolvePromise.then(resolved => {
|
|
6015
|
+
// Get primary key from the object
|
|
6016
|
+
const primaryKey = table.schema.primaryKey;
|
|
6017
|
+
const key = pKey !== undefined ? pKey : primaryKey.keyPath
|
|
6018
|
+
? Dexie.getByKeyPath(obj, primaryKey.keyPath)
|
|
6019
|
+
: undefined;
|
|
6020
|
+
if (key !== undefined) {
|
|
6021
|
+
// Queue each resolved blob individually for atomic update
|
|
6022
|
+
// This uses setTimeout(fn, 0) to completely isolate from
|
|
6023
|
+
// Dexie's transaction context (avoids inheriting PSD)
|
|
6024
|
+
if (isReadonly) {
|
|
6025
|
+
blobSavingQueue.saveBlobs(table.name, key, resolvedBlobs);
|
|
6026
|
+
}
|
|
6027
|
+
else {
|
|
6028
|
+
// For rw transactions, we can save directly without queueing
|
|
6029
|
+
// since we're still in the same transaction context
|
|
6030
|
+
table.mutate({ type: 'put', keys: [key], values: [resolved], trans }).catch(err => {
|
|
6031
|
+
console.error(`Failed to save resolved blob on ${table.name}:${key}:`, err);
|
|
6032
|
+
});
|
|
6033
|
+
}
|
|
6034
|
+
}
|
|
6035
|
+
return resolved;
|
|
6036
|
+
}).catch(err => {
|
|
6037
|
+
console.error(`[dexie-cloud:blobResolve] Failed to resolve BlobRefs on ${table.name}:`, err);
|
|
6038
|
+
return obj; // Return original object on error - never block the read pipeline
|
|
6039
|
+
});
|
|
6040
|
+
}
|
|
6041
|
+
catch (err) {
|
|
6042
|
+
console.error(`[dexie-cloud:blobResolve] Sync error in resolveAndSave on ${table.name}:`, err);
|
|
6043
|
+
return Dexie.Promise.resolve(obj); // Never block reads
|
|
6044
|
+
}
|
|
6045
|
+
}
|
|
6046
|
+
|
|
4813
6047
|
function overrideParseStoresSpec(origFunc, dexie) {
|
|
4814
6048
|
return function (stores, dbSchema) {
|
|
4815
6049
|
var _a;
|
|
@@ -4862,6 +6096,11 @@ function overrideParseStoresSpec(origFunc, dexie) {
|
|
|
4862
6096
|
if (!/^\$/.test(tableName)) {
|
|
4863
6097
|
storesClone[`$${tableName}_mutations`] = '++rev';
|
|
4864
6098
|
cloudTableSchema.markedForSync = true;
|
|
6099
|
+
// Add sparse index for _hasBlobRefs (for BlobRef resolution tracking)
|
|
6100
|
+
// IndexedDB sparse indexes have zero overhead when the property doesn't exist
|
|
6101
|
+
if (!storesClone[tableName].includes('_hasBlobRefs')) {
|
|
6102
|
+
storesClone[tableName] += ',_hasBlobRefs';
|
|
6103
|
+
}
|
|
4865
6104
|
}
|
|
4866
6105
|
if (cloudTableSchema.deleted) {
|
|
4867
6106
|
cloudTableSchema.deleted = false;
|
|
@@ -5501,7 +6740,6 @@ function syncIfPossible(db, cloudOptions, cloudSchema, options) {
|
|
|
5501
6740
|
yield checkSyncRateLimitDelay(db);
|
|
5502
6741
|
yield performGuardedJob(db, CURRENT_SYNC_WORKER, () => sync(db, cloudOptions, cloudSchema, options));
|
|
5503
6742
|
ongoingSyncs.delete(db);
|
|
5504
|
-
console.debug('Done sync');
|
|
5505
6743
|
}
|
|
5506
6744
|
catch (error) {
|
|
5507
6745
|
ongoingSyncs.delete(db);
|
|
@@ -5516,8 +6754,6 @@ function syncIfPossible(db, cloudOptions, cloudSchema, options) {
|
|
|
5516
6754
|
}
|
|
5517
6755
|
}
|
|
5518
6756
|
|
|
5519
|
-
const SECONDS = 1000;
|
|
5520
|
-
|
|
5521
6757
|
function LocalSyncWorker(db, cloudOptions, cloudSchema) {
|
|
5522
6758
|
let localSyncEventSubscription = null;
|
|
5523
6759
|
let cancelToken = { cancelled: false };
|
|
@@ -5854,6 +7090,38 @@ const Styles = {
|
|
|
5854
7090
|
color: "#374151",
|
|
5855
7091
|
transition: "all 0.2s ease",
|
|
5856
7092
|
gap: "12px"
|
|
7093
|
+
},
|
|
7094
|
+
// Copy button for alerts with copyText
|
|
7095
|
+
CopyButton: {
|
|
7096
|
+
display: "inline-flex",
|
|
7097
|
+
alignItems: "center",
|
|
7098
|
+
gap: "4px",
|
|
7099
|
+
padding: "4px 10px",
|
|
7100
|
+
marginTop: "8px",
|
|
7101
|
+
border: "1px solid #d1d5db",
|
|
7102
|
+
borderRadius: "4px",
|
|
7103
|
+
backgroundColor: "#f9fafb",
|
|
7104
|
+
cursor: "pointer",
|
|
7105
|
+
fontSize: "12px",
|
|
7106
|
+
fontWeight: "500",
|
|
7107
|
+
color: "#374151",
|
|
7108
|
+
transition: "all 0.15s ease",
|
|
7109
|
+
fontFamily: "monospace"
|
|
7110
|
+
},
|
|
7111
|
+
CopyButtonCopied: {
|
|
7112
|
+
display: "inline-flex",
|
|
7113
|
+
alignItems: "center",
|
|
7114
|
+
gap: "4px",
|
|
7115
|
+
padding: "4px 10px",
|
|
7116
|
+
marginTop: "8px",
|
|
7117
|
+
border: "1px solid #22c55e",
|
|
7118
|
+
borderRadius: "4px",
|
|
7119
|
+
backgroundColor: "#f0fdf4",
|
|
7120
|
+
cursor: "default",
|
|
7121
|
+
fontSize: "12px",
|
|
7122
|
+
fontWeight: "500",
|
|
7123
|
+
color: "#16a34a",
|
|
7124
|
+
fontFamily: "monospace"
|
|
5857
7125
|
}};
|
|
5858
7126
|
|
|
5859
7127
|
function Dialog({ children, className }) {
|
|
@@ -5964,7 +7232,9 @@ function LoginDialog({ title, alerts, fields, options, submitLabel, cancelLabel,
|
|
|
5964
7232
|
return (_$1(Dialog, { className: "dxc-login-dlg" },
|
|
5965
7233
|
_$1(k$1, null,
|
|
5966
7234
|
_$1("h3", { style: Styles.WindowHeader }, title),
|
|
5967
|
-
alerts.map((alert, idx) => (_$1("
|
|
7235
|
+
alerts.map((alert, idx) => (_$1("div", { key: idx },
|
|
7236
|
+
_$1("p", { style: Styles.Alert[alert.type] }, resolveText(alert)),
|
|
7237
|
+
alert.copyText && _$1(CopyButton, { text: alert.copyText })))),
|
|
5968
7238
|
hasOptions && (_$1("div", { class: "dxc-options" }, hasMultipleGroups ? (
|
|
5969
7239
|
// Render with dividers between groups
|
|
5970
7240
|
Array.from(optionGroups.entries()).map(([groupName, groupOptions], groupIdx) => (_$1(k$1, { key: groupName },
|
|
@@ -6003,6 +7273,50 @@ function valueTransformer(type, value) {
|
|
|
6003
7273
|
return value;
|
|
6004
7274
|
}
|
|
6005
7275
|
}
|
|
7276
|
+
function CopyButton({ text }) {
|
|
7277
|
+
const [copied, setCopied] = d(false);
|
|
7278
|
+
const timeoutRef = A(null);
|
|
7279
|
+
// Cleanup timeout on unmount
|
|
7280
|
+
_(() => {
|
|
7281
|
+
return () => {
|
|
7282
|
+
if (timeoutRef.current !== null)
|
|
7283
|
+
clearTimeout(timeoutRef.current);
|
|
7284
|
+
};
|
|
7285
|
+
}, []);
|
|
7286
|
+
const scheduleCopiedReset = () => {
|
|
7287
|
+
if (timeoutRef.current !== null)
|
|
7288
|
+
clearTimeout(timeoutRef.current);
|
|
7289
|
+
setCopied(true);
|
|
7290
|
+
timeoutRef.current = setTimeout(() => {
|
|
7291
|
+
timeoutRef.current = null;
|
|
7292
|
+
setCopied(false);
|
|
7293
|
+
}, 2000);
|
|
7294
|
+
};
|
|
7295
|
+
const handleClick = () => {
|
|
7296
|
+
var _a;
|
|
7297
|
+
if (typeof navigator !== 'undefined' && ((_a = navigator.clipboard) === null || _a === void 0 ? void 0 : _a.writeText)) {
|
|
7298
|
+
navigator.clipboard.writeText(text).then(scheduleCopiedReset).catch(() => {
|
|
7299
|
+
fallbackCopy(text, scheduleCopiedReset);
|
|
7300
|
+
});
|
|
7301
|
+
}
|
|
7302
|
+
else {
|
|
7303
|
+
fallbackCopy(text, scheduleCopiedReset);
|
|
7304
|
+
}
|
|
7305
|
+
};
|
|
7306
|
+
return (_$1("button", { type: "button", style: copied ? Styles.CopyButtonCopied : Styles.CopyButton, onClick: handleClick, title: "Copy to clipboard" }, copied ? '✓ Copied!' : `📋 ${text}`));
|
|
7307
|
+
}
|
|
7308
|
+
function fallbackCopy(text, onSuccess) {
|
|
7309
|
+
const textarea = document.createElement('textarea');
|
|
7310
|
+
textarea.value = text;
|
|
7311
|
+
textarea.style.position = 'fixed';
|
|
7312
|
+
textarea.style.opacity = '0';
|
|
7313
|
+
document.body.appendChild(textarea);
|
|
7314
|
+
textarea.select();
|
|
7315
|
+
const success = document.execCommand('copy');
|
|
7316
|
+
document.body.removeChild(textarea);
|
|
7317
|
+
if (success)
|
|
7318
|
+
onSuccess();
|
|
7319
|
+
}
|
|
6006
7320
|
|
|
6007
7321
|
class LoginGui extends x {
|
|
6008
7322
|
constructor(props) {
|
|
@@ -6147,7 +7461,7 @@ function computeSyncState(db) {
|
|
|
6147
7461
|
|
|
6148
7462
|
function createSharedValueObservable(o, defaultValue) {
|
|
6149
7463
|
let currentValue = defaultValue;
|
|
6150
|
-
let shared = from(o).pipe(map$1((x) => (currentValue = x)), share({ resetOnRefCountZero: () => timer(1000) }));
|
|
7464
|
+
let shared = from(o).pipe(map$1((x) => (currentValue = x)), share$1({ resetOnRefCountZero: () => timer(1000) }));
|
|
6151
7465
|
const rv = new Observable((observer) => {
|
|
6152
7466
|
let didEmit = false;
|
|
6153
7467
|
const subscription = shared.subscribe({
|
|
@@ -6701,9 +8015,10 @@ function dexieCloud(dexie) {
|
|
|
6701
8015
|
currentUserEmitter.next(UNAUTHORIZED_USER);
|
|
6702
8016
|
});
|
|
6703
8017
|
const syncComplete = new Subject();
|
|
8018
|
+
const downloading$ = createDownloadingState();
|
|
6704
8019
|
dexie.cloud = {
|
|
6705
8020
|
// @ts-ignore
|
|
6706
|
-
version: "4.
|
|
8021
|
+
version: "4.4.0",
|
|
6707
8022
|
options: Object.assign({}, DEFAULT_OPTIONS),
|
|
6708
8023
|
schema: null,
|
|
6709
8024
|
get currentUserId() {
|
|
@@ -6718,6 +8033,7 @@ function dexieCloud(dexie) {
|
|
|
6718
8033
|
syncComplete,
|
|
6719
8034
|
},
|
|
6720
8035
|
persistedSyncState: new BehaviorSubject(undefined),
|
|
8036
|
+
blobProgress: observeBlobProgress(DexieCloudDB(dexie), downloading$),
|
|
6721
8037
|
userInteraction: new BehaviorSubject(undefined),
|
|
6722
8038
|
webSocketStatus: new BehaviorSubject('not-started'),
|
|
6723
8039
|
login(hint) {
|
|
@@ -6831,6 +8147,7 @@ function dexieCloud(dexie) {
|
|
|
6831
8147
|
var _a, _b;
|
|
6832
8148
|
return ((_b = (_a = this.db.cloud.schema) === null || _a === void 0 ? void 0 : _a[this.name]) === null || _b === void 0 ? void 0 : _b.idPrefix) || '';
|
|
6833
8149
|
};
|
|
8150
|
+
dexie.use(createBlobResolveMiddleware(DexieCloudDB(dexie)));
|
|
6834
8151
|
dexie.use(createMutationTrackingMiddleware({
|
|
6835
8152
|
currentUserObservable: dexie.cloud.currentUser,
|
|
6836
8153
|
db: DexieCloudDB(dexie),
|
|
@@ -6839,7 +8156,7 @@ function dexieCloud(dexie) {
|
|
|
6839
8156
|
dexie.use(createIdGenerationMiddleware(DexieCloudDB(dexie)));
|
|
6840
8157
|
function onDbReady(dexie) {
|
|
6841
8158
|
return __awaiter(this, void 0, void 0, function* () {
|
|
6842
|
-
var _a, _b, _c, _d, _e, _f, _g;
|
|
8159
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _j;
|
|
6843
8160
|
closed = false; // As Dexie calls us, we are not closed anymore. Maybe reopened? Remember db.ready event is registered with sticky flag!
|
|
6844
8161
|
const db = DexieCloudDB(dexie);
|
|
6845
8162
|
// Setup default GUI:
|
|
@@ -6853,6 +8170,25 @@ function dexieCloud(dexie) {
|
|
|
6853
8170
|
}
|
|
6854
8171
|
// Forward db.syncCompleteEvent to be publicly consumable via db.cloud.events.syncComplete:
|
|
6855
8172
|
subscriptions.push(db.syncCompleteEvent.subscribe(syncComplete));
|
|
8173
|
+
// Eager blob download: When blobMode='eager' (default), download unresolved blobs after sync
|
|
8174
|
+
const blobMode = (_c = (_b = db.cloud.options) === null || _b === void 0 ? void 0 : _b.blobMode) !== null && _c !== void 0 ? _c : 'eager';
|
|
8175
|
+
if (blobMode === 'eager') {
|
|
8176
|
+
let eagerBlobDownloadInFlight = null;
|
|
8177
|
+
const downloadBlobs = () => {
|
|
8178
|
+
if (eagerBlobDownloadInFlight)
|
|
8179
|
+
return;
|
|
8180
|
+
eagerBlobDownloadInFlight = Dexie.ignoreTransaction(() => downloadUnresolvedBlobs(db, downloading$))
|
|
8181
|
+
.catch(err => {
|
|
8182
|
+
console.error('[dexie-cloud] Eager blob download failed:', err);
|
|
8183
|
+
})
|
|
8184
|
+
.finally(() => {
|
|
8185
|
+
eagerBlobDownloadInFlight = null;
|
|
8186
|
+
});
|
|
8187
|
+
};
|
|
8188
|
+
setTimeout(downloadBlobs, 0); // Don't block ready event. Start downloading blobs in the background right after.
|
|
8189
|
+
// And also after every sync completes:
|
|
8190
|
+
subscriptions.push(db.syncCompleteEvent.subscribe(downloadBlobs));
|
|
8191
|
+
}
|
|
6856
8192
|
//verifyConfig(db.cloud.options); Not needed (yet at least!)
|
|
6857
8193
|
// Verify the user has allowed version increment.
|
|
6858
8194
|
if (!db.tables.every((table) => table.core)) {
|
|
@@ -7008,7 +8344,7 @@ function dexieCloud(dexie) {
|
|
|
7008
8344
|
// Continue with normal flow - user can try again
|
|
7009
8345
|
}
|
|
7010
8346
|
}
|
|
7011
|
-
const requireAuth = (
|
|
8347
|
+
const requireAuth = (_d = db.cloud.options) === null || _d === void 0 ? void 0 : _d.requireAuth;
|
|
7012
8348
|
if (requireAuth) {
|
|
7013
8349
|
if (db.cloud.isServiceWorkerDB) {
|
|
7014
8350
|
// If this is a service worker DB, we can't do authentication here,
|
|
@@ -7045,20 +8381,20 @@ function dexieCloud(dexie) {
|
|
|
7045
8381
|
localSyncWorker.stop();
|
|
7046
8382
|
localSyncWorker = null;
|
|
7047
8383
|
throwIfClosed();
|
|
7048
|
-
const doInitialSync = ((
|
|
8384
|
+
const doInitialSync = ((_e = db.cloud.options) === null || _e === void 0 ? void 0 : _e.databaseUrl) && (!initiallySynced || changedUser);
|
|
7049
8385
|
if (doInitialSync) {
|
|
7050
8386
|
// Do the initial sync directly in the browser thread no matter if we are using service worker or not.
|
|
7051
8387
|
yield performInitialSync(db, db.cloud.options, db.cloud.schema);
|
|
7052
8388
|
db.setInitiallySynced(true);
|
|
7053
8389
|
}
|
|
7054
8390
|
throwIfClosed();
|
|
7055
|
-
if (db.cloud.usingServiceWorker && ((
|
|
8391
|
+
if (db.cloud.usingServiceWorker && ((_f = db.cloud.options) === null || _f === void 0 ? void 0 : _f.databaseUrl)) {
|
|
7056
8392
|
if (!doInitialSync) {
|
|
7057
8393
|
registerSyncEvent(db, 'push').catch(() => { });
|
|
7058
8394
|
}
|
|
7059
8395
|
registerPeriodicSyncEvent(db).catch(() => { });
|
|
7060
8396
|
}
|
|
7061
|
-
else if (((
|
|
8397
|
+
else if (((_g = db.cloud.options) === null || _g === void 0 ? void 0 : _g.databaseUrl) &&
|
|
7062
8398
|
db.cloud.schema &&
|
|
7063
8399
|
!db.cloud.isServiceWorkerDB) {
|
|
7064
8400
|
// There's no SW. Start SyncWorker instead.
|
|
@@ -7087,8 +8423,8 @@ function dexieCloud(dexie) {
|
|
|
7087
8423
|
}));
|
|
7088
8424
|
}
|
|
7089
8425
|
// Connect WebSocket unless we are in a service worker or websocket is disabled.
|
|
7090
|
-
if (((
|
|
7091
|
-
!((
|
|
8426
|
+
if (((_h = db.cloud.options) === null || _h === void 0 ? void 0 : _h.databaseUrl) &&
|
|
8427
|
+
!((_j = db.cloud.options) === null || _j === void 0 ? void 0 : _j.disableWebSocket) &&
|
|
7092
8428
|
!IS_SERVICE_WORKER) {
|
|
7093
8429
|
subscriptions.push(connectWebSocket(db));
|
|
7094
8430
|
}
|
|
@@ -7096,7 +8432,7 @@ function dexieCloud(dexie) {
|
|
|
7096
8432
|
}
|
|
7097
8433
|
}
|
|
7098
8434
|
// @ts-ignore
|
|
7099
|
-
dexieCloud.version = "4.
|
|
8435
|
+
dexieCloud.version = "4.4.0";
|
|
7100
8436
|
Dexie.Cloud = dexieCloud;
|
|
7101
8437
|
|
|
7102
8438
|
// In case the SW lives for a while, let it reuse already opened connections:
|