dexie-cloud-addon 4.0.7 → 4.1.0-alpha.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/dist/modern/DexieCloudOptions.d.ts +4 -2
  2. package/dist/modern/WSObservable.d.ts +9 -6
  3. package/dist/modern/db/DexieCloudDB.d.ts +2 -0
  4. package/dist/modern/db/entities/PersistedSyncState.d.ts +7 -0
  5. package/dist/modern/dexie-cloud-addon.js +1752 -79
  6. package/dist/modern/dexie-cloud-addon.js.map +1 -1
  7. package/dist/modern/dexie-cloud-addon.min.js +1 -1
  8. package/dist/modern/dexie-cloud-addon.min.js.map +1 -1
  9. package/dist/modern/service-worker.js +1752 -79
  10. package/dist/modern/service-worker.js.map +1 -1
  11. package/dist/modern/service-worker.min.js +1 -1
  12. package/dist/modern/service-worker.min.js.map +1 -1
  13. package/dist/modern/sync/DEXIE_CLOUD_SYNCER_ID.d.ts +1 -0
  14. package/dist/modern/sync/syncWithServer.d.ts +2 -2
  15. package/dist/modern/yjs/Y.d.ts +3 -0
  16. package/dist/modern/yjs/YDexieCloudSyncState.d.ts +4 -0
  17. package/dist/modern/yjs/YTable.d.ts +2 -0
  18. package/dist/modern/yjs/applyYMessages.d.ts +5 -0
  19. package/dist/modern/yjs/awareness.d.ts +4 -0
  20. package/dist/modern/yjs/createYClientUpdateObservable.d.ts +4 -0
  21. package/dist/modern/yjs/createYHandler.d.ts +5 -0
  22. package/dist/modern/yjs/downloadYDocsFromServer.d.ts +3 -0
  23. package/dist/modern/yjs/getUpdatesTable.d.ts +3 -0
  24. package/dist/modern/yjs/listUpdatesSince.d.ts +2 -0
  25. package/dist/modern/yjs/listYClientMessagesAndStateVector.d.ts +26 -0
  26. package/dist/modern/yjs/updateYSyncStates.d.ts +6 -0
  27. package/dist/umd/DexieCloudOptions.d.ts +4 -2
  28. package/dist/umd/WSObservable.d.ts +9 -6
  29. package/dist/umd/db/DexieCloudDB.d.ts +2 -0
  30. package/dist/umd/db/entities/PersistedSyncState.d.ts +7 -0
  31. package/dist/umd/dexie-cloud-addon.js +1750 -77
  32. package/dist/umd/dexie-cloud-addon.js.map +1 -1
  33. package/dist/umd/dexie-cloud-addon.min.js +1 -1
  34. package/dist/umd/dexie-cloud-addon.min.js.map +1 -1
  35. package/dist/umd/service-worker.js +1750 -77
  36. package/dist/umd/service-worker.js.map +1 -1
  37. package/dist/umd/service-worker.min.js +1 -1
  38. package/dist/umd/service-worker.min.js.map +1 -1
  39. package/dist/umd/sync/DEXIE_CLOUD_SYNCER_ID.d.ts +1 -0
  40. package/dist/umd/sync/syncWithServer.d.ts +2 -2
  41. package/dist/umd/yjs/Y.d.ts +3 -0
  42. package/dist/umd/yjs/YDexieCloudSyncState.d.ts +4 -0
  43. package/dist/umd/yjs/YTable.d.ts +2 -0
  44. package/dist/umd/yjs/applyYMessages.d.ts +5 -0
  45. package/dist/umd/yjs/awareness.d.ts +4 -0
  46. package/dist/umd/yjs/createYClientUpdateObservable.d.ts +4 -0
  47. package/dist/umd/yjs/createYHandler.d.ts +5 -0
  48. package/dist/umd/yjs/downloadYDocsFromServer.d.ts +3 -0
  49. package/dist/umd/yjs/getUpdatesTable.d.ts +3 -0
  50. package/dist/umd/yjs/listUpdatesSince.d.ts +2 -0
  51. package/dist/umd/yjs/listYClientMessagesAndStateVector.d.ts +26 -0
  52. package/dist/umd/yjs/updateYSyncStates.d.ts +6 -0
  53. package/package.json +5 -4
  54. package/dist/modern/helpers/dbOnClosed.d.ts +0 -2
  55. package/dist/umd/helpers/dbOnClosed.d.ts +0 -2
@@ -8,7 +8,7 @@
8
8
  *
9
9
  * ==========================================================================
10
10
  *
11
- * Version 4.0.7, Sun May 26 2024
11
+ * Version 4.1.0-alpha.10, Wed Oct 16 2024
12
12
  *
13
13
  * https://dexie.org
14
14
  *
@@ -2245,6 +2245,1075 @@
2245
2245
  : url.pathname.split('/')[1];
2246
2246
  }
2247
2247
 
2248
+ /**
2249
+ * Common Math expressions.
2250
+ *
2251
+ * @module math
2252
+ */
2253
+
2254
+ const floor = Math.floor;
2255
+ const abs = Math.abs;
2256
+
2257
+ /**
2258
+ * @function
2259
+ * @param {number} a
2260
+ * @param {number} b
2261
+ * @return {number} The smaller element of a and b
2262
+ */
2263
+ const min = (a, b) => a < b ? a : b;
2264
+
2265
+ /**
2266
+ * @function
2267
+ * @param {number} a
2268
+ * @param {number} b
2269
+ * @return {number} The bigger element of a and b
2270
+ */
2271
+ const max = (a, b) => a > b ? a : b;
2272
+
2273
+ /**
2274
+ * @param {number} n
2275
+ * @return {boolean} Wether n is negative. This function also differentiates between -0 and +0
2276
+ */
2277
+ const isNegativeZero = n => n !== 0 ? n < 0 : 1 / n < 0;
2278
+
2279
+ /* eslint-env browser */
2280
+
2281
+ const BIT7 = 64;
2282
+ const BIT8 = 128;
2283
+ const BITS6 = 63;
2284
+ const BITS7 = 127;
2285
+ /**
2286
+ * @type {number}
2287
+ */
2288
+ const BITS31 = 0x7FFFFFFF;
2289
+
2290
+ /**
2291
+ * Utility helpers for working with numbers.
2292
+ *
2293
+ * @module number
2294
+ */
2295
+
2296
+
2297
+ const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER;
2298
+
2299
+ /* c8 ignore next */
2300
+ const isInteger = Number.isInteger || (num => typeof num === 'number' && isFinite(num) && floor(num) === num);
2301
+
2302
+ /**
2303
+ * Utility module to work with Arrays.
2304
+ *
2305
+ * @module array
2306
+ */
2307
+
2308
+
2309
+ const isArray = Array.isArray;
2310
+
2311
+ /**
2312
+ * @param {string} str
2313
+ * @return {Uint8Array}
2314
+ */
2315
+ const _encodeUtf8Polyfill = str => {
2316
+ const encodedString = unescape(encodeURIComponent(str));
2317
+ const len = encodedString.length;
2318
+ const buf = new Uint8Array(len);
2319
+ for (let i = 0; i < len; i++) {
2320
+ buf[i] = /** @type {number} */ (encodedString.codePointAt(i));
2321
+ }
2322
+ return buf
2323
+ };
2324
+
2325
+ /* c8 ignore next */
2326
+ const utf8TextEncoder = /** @type {TextEncoder} */ (typeof TextEncoder !== 'undefined' ? new TextEncoder() : null);
2327
+
2328
+ /**
2329
+ * @param {string} str
2330
+ * @return {Uint8Array}
2331
+ */
2332
+ const _encodeUtf8Native = str => utf8TextEncoder.encode(str);
2333
+
2334
+ /**
2335
+ * @param {string} str
2336
+ * @return {Uint8Array}
2337
+ */
2338
+ /* c8 ignore next */
2339
+ const encodeUtf8 = utf8TextEncoder ? _encodeUtf8Native : _encodeUtf8Polyfill;
2340
+
2341
+ /* c8 ignore next */
2342
+ let utf8TextDecoder = typeof TextDecoder === 'undefined' ? null : new TextDecoder('utf-8', { fatal: true, ignoreBOM: true });
2343
+
2344
+ /* c8 ignore start */
2345
+ if (utf8TextDecoder && utf8TextDecoder.decode(new Uint8Array()).length === 1) {
2346
+ // Safari doesn't handle BOM correctly.
2347
+ // This fixes a bug in Safari 13.0.5 where it produces a BOM the first time it is called.
2348
+ // utf8TextDecoder.decode(new Uint8Array()).length === 1 on the first call and
2349
+ // utf8TextDecoder.decode(new Uint8Array()).length === 1 on the second call
2350
+ // Another issue is that from then on no BOM chars are recognized anymore
2351
+ /* c8 ignore next */
2352
+ utf8TextDecoder = null;
2353
+ }
2354
+
2355
+ /**
2356
+ * Efficient schema-less binary encoding with support for variable length encoding.
2357
+ *
2358
+ * Use [lib0/encoding] with [lib0/decoding]. Every encoding function has a corresponding decoding function.
2359
+ *
2360
+ * Encodes numbers in little-endian order (least to most significant byte order)
2361
+ * and is compatible with Golang's binary encoding (https://golang.org/pkg/encoding/binary/)
2362
+ * which is also used in Protocol Buffers.
2363
+ *
2364
+ * ```js
2365
+ * // encoding step
2366
+ * const encoder = encoding.createEncoder()
2367
+ * encoding.writeVarUint(encoder, 256)
2368
+ * encoding.writeVarString(encoder, 'Hello world!')
2369
+ * const buf = encoding.toUint8Array(encoder)
2370
+ * ```
2371
+ *
2372
+ * ```js
2373
+ * // decoding step
2374
+ * const decoder = decoding.createDecoder(buf)
2375
+ * decoding.readVarUint(decoder) // => 256
2376
+ * decoding.readVarString(decoder) // => 'Hello world!'
2377
+ * decoding.hasContent(decoder) // => false - all data is read
2378
+ * ```
2379
+ *
2380
+ * @module encoding
2381
+ */
2382
+
2383
+
2384
+ /**
2385
+ * A BinaryEncoder handles the encoding to an Uint8Array.
2386
+ */
2387
+ class Encoder {
2388
+ constructor () {
2389
+ this.cpos = 0;
2390
+ this.cbuf = new Uint8Array(100);
2391
+ /**
2392
+ * @type {Array<Uint8Array>}
2393
+ */
2394
+ this.bufs = [];
2395
+ }
2396
+ }
2397
+
2398
+ /**
2399
+ * The current length of the encoded data.
2400
+ *
2401
+ * @function
2402
+ * @param {Encoder} encoder
2403
+ * @return {number}
2404
+ */
2405
+ const length = encoder => {
2406
+ let len = encoder.cpos;
2407
+ for (let i = 0; i < encoder.bufs.length; i++) {
2408
+ len += encoder.bufs[i].length;
2409
+ }
2410
+ return len
2411
+ };
2412
+
2413
+ /**
2414
+ * Transform to Uint8Array.
2415
+ *
2416
+ * @function
2417
+ * @param {Encoder} encoder
2418
+ * @return {Uint8Array} The created ArrayBuffer.
2419
+ */
2420
+ const toUint8Array = encoder => {
2421
+ const uint8arr = new Uint8Array(length(encoder));
2422
+ let curPos = 0;
2423
+ for (let i = 0; i < encoder.bufs.length; i++) {
2424
+ const d = encoder.bufs[i];
2425
+ uint8arr.set(d, curPos);
2426
+ curPos += d.length;
2427
+ }
2428
+ uint8arr.set(new Uint8Array(encoder.cbuf.buffer, 0, encoder.cpos), curPos);
2429
+ return uint8arr
2430
+ };
2431
+
2432
+ /**
2433
+ * Verify that it is possible to write `len` bytes wtihout checking. If
2434
+ * necessary, a new Buffer with the required length is attached.
2435
+ *
2436
+ * @param {Encoder} encoder
2437
+ * @param {number} len
2438
+ */
2439
+ const verifyLen = (encoder, len) => {
2440
+ const bufferLen = encoder.cbuf.length;
2441
+ if (bufferLen - encoder.cpos < len) {
2442
+ encoder.bufs.push(new Uint8Array(encoder.cbuf.buffer, 0, encoder.cpos));
2443
+ encoder.cbuf = new Uint8Array(max(bufferLen, len) * 2);
2444
+ encoder.cpos = 0;
2445
+ }
2446
+ };
2447
+
2448
+ /**
2449
+ * Write one byte to the encoder.
2450
+ *
2451
+ * @function
2452
+ * @param {Encoder} encoder
2453
+ * @param {number} num The byte that is to be encoded.
2454
+ */
2455
+ const write = (encoder, num) => {
2456
+ const bufferLen = encoder.cbuf.length;
2457
+ if (encoder.cpos === bufferLen) {
2458
+ encoder.bufs.push(encoder.cbuf);
2459
+ encoder.cbuf = new Uint8Array(bufferLen * 2);
2460
+ encoder.cpos = 0;
2461
+ }
2462
+ encoder.cbuf[encoder.cpos++] = num;
2463
+ };
2464
+
2465
+ /**
2466
+ * Write a variable length unsigned integer. Max encodable integer is 2^53.
2467
+ *
2468
+ * @function
2469
+ * @param {Encoder} encoder
2470
+ * @param {number} num The number that is to be encoded.
2471
+ */
2472
+ const writeVarUint = (encoder, num) => {
2473
+ while (num > BITS7) {
2474
+ write(encoder, BIT8 | (BITS7 & num));
2475
+ num = floor(num / 128); // shift >>> 7
2476
+ }
2477
+ write(encoder, BITS7 & num);
2478
+ };
2479
+
2480
+ /**
2481
+ * Write a variable length integer.
2482
+ *
2483
+ * We use the 7th bit instead for signaling that this is a negative number.
2484
+ *
2485
+ * @function
2486
+ * @param {Encoder} encoder
2487
+ * @param {number} num The number that is to be encoded.
2488
+ */
2489
+ const writeVarInt = (encoder, num) => {
2490
+ const isNegative = isNegativeZero(num);
2491
+ if (isNegative) {
2492
+ num = -num;
2493
+ }
2494
+ // |- whether to continue reading |- whether is negative |- number
2495
+ write(encoder, (num > BITS6 ? BIT8 : 0) | (isNegative ? BIT7 : 0) | (BITS6 & num));
2496
+ num = floor(num / 64); // shift >>> 6
2497
+ // We don't need to consider the case of num === 0 so we can use a different
2498
+ // pattern here than above.
2499
+ while (num > 0) {
2500
+ write(encoder, (num > BITS7 ? BIT8 : 0) | (BITS7 & num));
2501
+ num = floor(num / 128); // shift >>> 7
2502
+ }
2503
+ };
2504
+
2505
+ /**
2506
+ * A cache to store strings temporarily
2507
+ */
2508
+ const _strBuffer = new Uint8Array(30000);
2509
+ const _maxStrBSize = _strBuffer.length / 3;
2510
+
2511
+ /**
2512
+ * Write a variable length string.
2513
+ *
2514
+ * @function
2515
+ * @param {Encoder} encoder
2516
+ * @param {String} str The string that is to be encoded.
2517
+ */
2518
+ const _writeVarStringNative = (encoder, str) => {
2519
+ if (str.length < _maxStrBSize) {
2520
+ // We can encode the string into the existing buffer
2521
+ /* c8 ignore next */
2522
+ const written = utf8TextEncoder.encodeInto(str, _strBuffer).written || 0;
2523
+ writeVarUint(encoder, written);
2524
+ for (let i = 0; i < written; i++) {
2525
+ write(encoder, _strBuffer[i]);
2526
+ }
2527
+ } else {
2528
+ writeVarUint8Array(encoder, encodeUtf8(str));
2529
+ }
2530
+ };
2531
+
2532
+ /**
2533
+ * Write a variable length string.
2534
+ *
2535
+ * @function
2536
+ * @param {Encoder} encoder
2537
+ * @param {String} str The string that is to be encoded.
2538
+ */
2539
+ const _writeVarStringPolyfill = (encoder, str) => {
2540
+ const encodedString = unescape(encodeURIComponent(str));
2541
+ const len = encodedString.length;
2542
+ writeVarUint(encoder, len);
2543
+ for (let i = 0; i < len; i++) {
2544
+ write(encoder, /** @type {number} */ (encodedString.codePointAt(i)));
2545
+ }
2546
+ };
2547
+
2548
+ /**
2549
+ * Write a variable length string.
2550
+ *
2551
+ * @function
2552
+ * @param {Encoder} encoder
2553
+ * @param {String} str The string that is to be encoded.
2554
+ */
2555
+ /* c8 ignore next */
2556
+ const writeVarString = (utf8TextEncoder && /** @type {any} */ (utf8TextEncoder).encodeInto) ? _writeVarStringNative : _writeVarStringPolyfill;
2557
+
2558
+ /**
2559
+ * Append fixed-length Uint8Array to the encoder.
2560
+ *
2561
+ * @function
2562
+ * @param {Encoder} encoder
2563
+ * @param {Uint8Array} uint8Array
2564
+ */
2565
+ const writeUint8Array = (encoder, uint8Array) => {
2566
+ const bufferLen = encoder.cbuf.length;
2567
+ const cpos = encoder.cpos;
2568
+ const leftCopyLen = min(bufferLen - cpos, uint8Array.length);
2569
+ const rightCopyLen = uint8Array.length - leftCopyLen;
2570
+ encoder.cbuf.set(uint8Array.subarray(0, leftCopyLen), cpos);
2571
+ encoder.cpos += leftCopyLen;
2572
+ if (rightCopyLen > 0) {
2573
+ // Still something to write, write right half..
2574
+ // Append new buffer
2575
+ encoder.bufs.push(encoder.cbuf);
2576
+ // must have at least size of remaining buffer
2577
+ encoder.cbuf = new Uint8Array(max(bufferLen * 2, rightCopyLen));
2578
+ // copy array
2579
+ encoder.cbuf.set(uint8Array.subarray(leftCopyLen));
2580
+ encoder.cpos = rightCopyLen;
2581
+ }
2582
+ };
2583
+
2584
+ /**
2585
+ * Append an Uint8Array to Encoder.
2586
+ *
2587
+ * @function
2588
+ * @param {Encoder} encoder
2589
+ * @param {Uint8Array} uint8Array
2590
+ */
2591
+ const writeVarUint8Array = (encoder, uint8Array) => {
2592
+ writeVarUint(encoder, uint8Array.byteLength);
2593
+ writeUint8Array(encoder, uint8Array);
2594
+ };
2595
+
2596
+ /**
2597
+ * Create an DataView of the next `len` bytes. Use it to write data after
2598
+ * calling this function.
2599
+ *
2600
+ * ```js
2601
+ * // write float32 using DataView
2602
+ * const dv = writeOnDataView(encoder, 4)
2603
+ * dv.setFloat32(0, 1.1)
2604
+ * // read float32 using DataView
2605
+ * const dv = readFromDataView(encoder, 4)
2606
+ * dv.getFloat32(0) // => 1.100000023841858 (leaving it to the reader to find out why this is the correct result)
2607
+ * ```
2608
+ *
2609
+ * @param {Encoder} encoder
2610
+ * @param {number} len
2611
+ * @return {DataView}
2612
+ */
2613
+ const writeOnDataView = (encoder, len) => {
2614
+ verifyLen(encoder, len);
2615
+ const dview = new DataView(encoder.cbuf.buffer, encoder.cpos, len);
2616
+ encoder.cpos += len;
2617
+ return dview
2618
+ };
2619
+
2620
+ /**
2621
+ * @param {Encoder} encoder
2622
+ * @param {number} num
2623
+ */
2624
+ const writeFloat32 = (encoder, num) => writeOnDataView(encoder, 4).setFloat32(0, num, false);
2625
+
2626
+ /**
2627
+ * @param {Encoder} encoder
2628
+ * @param {number} num
2629
+ */
2630
+ const writeFloat64 = (encoder, num) => writeOnDataView(encoder, 8).setFloat64(0, num, false);
2631
+
2632
+ /**
2633
+ * @param {Encoder} encoder
2634
+ * @param {bigint} num
2635
+ */
2636
+ const writeBigInt64 = (encoder, num) => /** @type {any} */ (writeOnDataView(encoder, 8)).setBigInt64(0, num, false);
2637
+
2638
+ /**
2639
+ * @param {Encoder} encoder
2640
+ * @param {bigint} num
2641
+ */
2642
+ const writeBigUint64 = (encoder, num) => /** @type {any} */ (writeOnDataView(encoder, 8)).setBigUint64(0, num, false);
2643
+
2644
+ const floatTestBed = new DataView(new ArrayBuffer(4));
2645
+ /**
2646
+ * Check if a number can be encoded as a 32 bit float.
2647
+ *
2648
+ * @param {number} num
2649
+ * @return {boolean}
2650
+ */
2651
+ const isFloat32 = num => {
2652
+ floatTestBed.setFloat32(0, num);
2653
+ return floatTestBed.getFloat32(0) === num
2654
+ };
2655
+
2656
+ /**
2657
+ * Encode data with efficient binary format.
2658
+ *
2659
+ * Differences to JSON:
2660
+ * • Transforms data to a binary format (not to a string)
2661
+ * • Encodes undefined, NaN, and ArrayBuffer (these can't be represented in JSON)
2662
+ * • Numbers are efficiently encoded either as a variable length integer, as a
2663
+ * 32 bit float, as a 64 bit float, or as a 64 bit bigint.
2664
+ *
2665
+ * Encoding table:
2666
+ *
2667
+ * | Data Type | Prefix | Encoding Method | Comment |
2668
+ * | ------------------- | -------- | ------------------ | ------- |
2669
+ * | undefined | 127 | | Functions, symbol, and everything that cannot be identified is encoded as undefined |
2670
+ * | null | 126 | | |
2671
+ * | integer | 125 | writeVarInt | Only encodes 32 bit signed integers |
2672
+ * | float32 | 124 | writeFloat32 | |
2673
+ * | float64 | 123 | writeFloat64 | |
2674
+ * | bigint | 122 | writeBigInt64 | |
2675
+ * | boolean (false) | 121 | | True and false are different data types so we save the following byte |
2676
+ * | boolean (true) | 120 | | - 0b01111000 so the last bit determines whether true or false |
2677
+ * | string | 119 | writeVarString | |
2678
+ * | object<string,any> | 118 | custom | Writes {length} then {length} key-value pairs |
2679
+ * | array<any> | 117 | custom | Writes {length} then {length} json values |
2680
+ * | Uint8Array | 116 | writeVarUint8Array | We use Uint8Array for any kind of binary data |
2681
+ *
2682
+ * Reasons for the decreasing prefix:
2683
+ * We need the first bit for extendability (later we may want to encode the
2684
+ * prefix with writeVarUint). The remaining 7 bits are divided as follows:
2685
+ * [0-30] the beginning of the data range is used for custom purposes
2686
+ * (defined by the function that uses this library)
2687
+ * [31-127] the end of the data range is used for data encoding by
2688
+ * lib0/encoding.js
2689
+ *
2690
+ * @param {Encoder} encoder
2691
+ * @param {undefined|null|number|bigint|boolean|string|Object<string,any>|Array<any>|Uint8Array} data
2692
+ */
2693
+ const writeAny = (encoder, data) => {
2694
+ switch (typeof data) {
2695
+ case 'string':
2696
+ // TYPE 119: STRING
2697
+ write(encoder, 119);
2698
+ writeVarString(encoder, data);
2699
+ break
2700
+ case 'number':
2701
+ if (isInteger(data) && abs(data) <= BITS31) {
2702
+ // TYPE 125: INTEGER
2703
+ write(encoder, 125);
2704
+ writeVarInt(encoder, data);
2705
+ } else if (isFloat32(data)) {
2706
+ // TYPE 124: FLOAT32
2707
+ write(encoder, 124);
2708
+ writeFloat32(encoder, data);
2709
+ } else {
2710
+ // TYPE 123: FLOAT64
2711
+ write(encoder, 123);
2712
+ writeFloat64(encoder, data);
2713
+ }
2714
+ break
2715
+ case 'bigint':
2716
+ // TYPE 122: BigInt
2717
+ write(encoder, 122);
2718
+ writeBigInt64(encoder, data);
2719
+ break
2720
+ case 'object':
2721
+ if (data === null) {
2722
+ // TYPE 126: null
2723
+ write(encoder, 126);
2724
+ } else if (isArray(data)) {
2725
+ // TYPE 117: Array
2726
+ write(encoder, 117);
2727
+ writeVarUint(encoder, data.length);
2728
+ for (let i = 0; i < data.length; i++) {
2729
+ writeAny(encoder, data[i]);
2730
+ }
2731
+ } else if (data instanceof Uint8Array) {
2732
+ // TYPE 116: ArrayBuffer
2733
+ write(encoder, 116);
2734
+ writeVarUint8Array(encoder, data);
2735
+ } else {
2736
+ // TYPE 118: Object
2737
+ write(encoder, 118);
2738
+ const keys = Object.keys(data);
2739
+ writeVarUint(encoder, keys.length);
2740
+ for (let i = 0; i < keys.length; i++) {
2741
+ const key = keys[i];
2742
+ writeVarString(encoder, key);
2743
+ writeAny(encoder, data[key]);
2744
+ }
2745
+ }
2746
+ break
2747
+ case 'boolean':
2748
+ // TYPE 120/121: boolean (true/false)
2749
+ write(encoder, data ? 120 : 121);
2750
+ break
2751
+ default:
2752
+ // TYPE 127: undefined
2753
+ write(encoder, 127);
2754
+ }
2755
+ };
2756
+
2757
+ function encodeYMessage(msg) {
2758
+ const encoder = new Encoder();
2759
+ writeVarString(encoder, msg.type);
2760
+ writeVarString(encoder, msg.table);
2761
+ writeVarString(encoder, msg.prop);
2762
+ switch (msg.type) {
2763
+ case 'u-ack':
2764
+ case 'u-reject':
2765
+ writeBigUint64(encoder, BigInt(msg.i));
2766
+ break;
2767
+ default:
2768
+ writeAny(encoder, msg.k);
2769
+ switch (msg.type) {
2770
+ case 'aware':
2771
+ writeVarUint8Array(encoder, msg.u);
2772
+ break;
2773
+ case 'doc-open':
2774
+ writeAny(encoder, msg.serverRev);
2775
+ writeAny(encoder, msg.sv);
2776
+ break;
2777
+ case 'doc-close':
2778
+ break;
2779
+ case 'sv':
2780
+ writeVarUint8Array(encoder, msg.sv);
2781
+ break;
2782
+ case 'u-c':
2783
+ writeVarUint8Array(encoder, msg.u);
2784
+ writeBigUint64(encoder, BigInt(msg.i));
2785
+ break;
2786
+ case 'u-s':
2787
+ writeVarUint8Array(encoder, msg.u);
2788
+ break;
2789
+ }
2790
+ }
2791
+ return toUint8Array(encoder);
2792
+ }
2793
+
2794
+ /**
2795
+ * Error helpers.
2796
+ *
2797
+ * @module error
2798
+ */
2799
+
2800
+ /**
2801
+ * @param {string} s
2802
+ * @return {Error}
2803
+ */
2804
+ /* c8 ignore next */
2805
+ const create = s => new Error(s);
2806
+
2807
+ /**
2808
+ * Efficient schema-less binary decoding with support for variable length encoding.
2809
+ *
2810
+ * Use [lib0/decoding] with [lib0/encoding]. Every encoding function has a corresponding decoding function.
2811
+ *
2812
+ * Encodes numbers in little-endian order (least to most significant byte order)
2813
+ * and is compatible with Golang's binary encoding (https://golang.org/pkg/encoding/binary/)
2814
+ * which is also used in Protocol Buffers.
2815
+ *
2816
+ * ```js
2817
+ * // encoding step
2818
+ * const encoder = encoding.createEncoder()
2819
+ * encoding.writeVarUint(encoder, 256)
2820
+ * encoding.writeVarString(encoder, 'Hello world!')
2821
+ * const buf = encoding.toUint8Array(encoder)
2822
+ * ```
2823
+ *
2824
+ * ```js
2825
+ * // decoding step
2826
+ * const decoder = decoding.createDecoder(buf)
2827
+ * decoding.readVarUint(decoder) // => 256
2828
+ * decoding.readVarString(decoder) // => 'Hello world!'
2829
+ * decoding.hasContent(decoder) // => false - all data is read
2830
+ * ```
2831
+ *
2832
+ * @module decoding
2833
+ */
2834
+
2835
+
2836
+ const errorUnexpectedEndOfArray = create('Unexpected end of array');
2837
+ const errorIntegerOutOfRange = create('Integer out of Range');
2838
+
2839
+ /**
2840
+ * A Decoder handles the decoding of an Uint8Array.
2841
+ */
2842
+ class Decoder {
2843
+ /**
2844
+ * @param {Uint8Array} uint8Array Binary data to decode
2845
+ */
2846
+ constructor (uint8Array) {
2847
+ /**
2848
+ * Decoding target.
2849
+ *
2850
+ * @type {Uint8Array}
2851
+ */
2852
+ this.arr = uint8Array;
2853
+ /**
2854
+ * Current decoding position.
2855
+ *
2856
+ * @type {number}
2857
+ */
2858
+ this.pos = 0;
2859
+ }
2860
+ }
2861
+
2862
+ /**
2863
+ * @function
2864
+ * @param {Decoder} decoder
2865
+ * @return {boolean}
2866
+ */
2867
+ const hasContent = decoder => decoder.pos !== decoder.arr.length;
2868
+
2869
+ /**
2870
+ * Create an Uint8Array view of the next `len` bytes and advance the position by `len`.
2871
+ *
2872
+ * Important: The Uint8Array still points to the underlying ArrayBuffer. Make sure to discard the result as soon as possible to prevent any memory leaks.
2873
+ * Use `buffer.copyUint8Array` to copy the result into a new Uint8Array.
2874
+ *
2875
+ * @function
2876
+ * @param {Decoder} decoder The decoder instance
2877
+ * @param {number} len The length of bytes to read
2878
+ * @return {Uint8Array}
2879
+ */
2880
+ const readUint8Array = (decoder, len) => {
2881
+ const view = new Uint8Array(decoder.arr.buffer, decoder.pos + decoder.arr.byteOffset, len);
2882
+ decoder.pos += len;
2883
+ return view
2884
+ };
2885
+
2886
+ /**
2887
+ * Read variable length Uint8Array.
2888
+ *
2889
+ * Important: The Uint8Array still points to the underlying ArrayBuffer. Make sure to discard the result as soon as possible to prevent any memory leaks.
2890
+ * Use `buffer.copyUint8Array` to copy the result into a new Uint8Array.
2891
+ *
2892
+ * @function
2893
+ * @param {Decoder} decoder
2894
+ * @return {Uint8Array}
2895
+ */
2896
+ const readVarUint8Array = decoder => readUint8Array(decoder, readVarUint(decoder));
2897
+
2898
+ /**
2899
+ * Read one byte as unsigned integer.
2900
+ * @function
2901
+ * @param {Decoder} decoder The decoder instance
2902
+ * @return {number} Unsigned 8-bit integer
2903
+ */
2904
+ const readUint8 = decoder => decoder.arr[decoder.pos++];
2905
+
2906
+ /**
2907
+ * Read unsigned integer (32bit) with variable length.
2908
+ * 1/8th of the storage is used as encoding overhead.
2909
+ * * numbers < 2^7 is stored in one bytlength
2910
+ * * numbers < 2^14 is stored in two bylength
2911
+ *
2912
+ * @function
2913
+ * @param {Decoder} decoder
2914
+ * @return {number} An unsigned integer.length
2915
+ */
2916
+ const readVarUint = decoder => {
2917
+ let num = 0;
2918
+ let mult = 1;
2919
+ const len = decoder.arr.length;
2920
+ while (decoder.pos < len) {
2921
+ const r = decoder.arr[decoder.pos++];
2922
+ // num = num | ((r & binary.BITS7) << len)
2923
+ num = num + (r & BITS7) * mult; // shift $r << (7*#iterations) and add it to num
2924
+ mult *= 128; // next iteration, shift 7 "more" to the left
2925
+ if (r < BIT8) {
2926
+ return num
2927
+ }
2928
+ /* c8 ignore start */
2929
+ if (num > MAX_SAFE_INTEGER) {
2930
+ throw errorIntegerOutOfRange
2931
+ }
2932
+ /* c8 ignore stop */
2933
+ }
2934
+ throw errorUnexpectedEndOfArray
2935
+ };
2936
+
2937
+ /**
2938
+ * Read signed integer (32bit) with variable length.
2939
+ * 1/8th of the storage is used as encoding overhead.
2940
+ * * numbers < 2^7 is stored in one bytlength
2941
+ * * numbers < 2^14 is stored in two bylength
2942
+ * @todo This should probably create the inverse ~num if number is negative - but this would be a breaking change.
2943
+ *
2944
+ * @function
2945
+ * @param {Decoder} decoder
2946
+ * @return {number} An unsigned integer.length
2947
+ */
2948
+ const readVarInt = decoder => {
2949
+ let r = decoder.arr[decoder.pos++];
2950
+ let num = r & BITS6;
2951
+ let mult = 64;
2952
+ const sign = (r & BIT7) > 0 ? -1 : 1;
2953
+ if ((r & BIT8) === 0) {
2954
+ // don't continue reading
2955
+ return sign * num
2956
+ }
2957
+ const len = decoder.arr.length;
2958
+ while (decoder.pos < len) {
2959
+ r = decoder.arr[decoder.pos++];
2960
+ // num = num | ((r & binary.BITS7) << len)
2961
+ num = num + (r & BITS7) * mult;
2962
+ mult *= 128;
2963
+ if (r < BIT8) {
2964
+ return sign * num
2965
+ }
2966
+ /* c8 ignore start */
2967
+ if (num > MAX_SAFE_INTEGER) {
2968
+ throw errorIntegerOutOfRange
2969
+ }
2970
+ /* c8 ignore stop */
2971
+ }
2972
+ throw errorUnexpectedEndOfArray
2973
+ };
2974
+
2975
+ /**
2976
+ * We don't test this function anymore as we use native decoding/encoding by default now.
2977
+ * Better not modify this anymore..
2978
+ *
2979
+ * Transforming utf8 to a string is pretty expensive. The code performs 10x better
2980
+ * when String.fromCodePoint is fed with all characters as arguments.
2981
+ * But most environments have a maximum number of arguments per functions.
2982
+ * For effiency reasons we apply a maximum of 10000 characters at once.
2983
+ *
2984
+ * @function
2985
+ * @param {Decoder} decoder
2986
+ * @return {String} The read String.
2987
+ */
2988
+ /* c8 ignore start */
2989
+ const _readVarStringPolyfill = decoder => {
2990
+ let remainingLen = readVarUint(decoder);
2991
+ if (remainingLen === 0) {
2992
+ return ''
2993
+ } else {
2994
+ let encodedString = String.fromCodePoint(readUint8(decoder)); // remember to decrease remainingLen
2995
+ if (--remainingLen < 100) { // do not create a Uint8Array for small strings
2996
+ while (remainingLen--) {
2997
+ encodedString += String.fromCodePoint(readUint8(decoder));
2998
+ }
2999
+ } else {
3000
+ while (remainingLen > 0) {
3001
+ const nextLen = remainingLen < 10000 ? remainingLen : 10000;
3002
+ // this is dangerous, we create a fresh array view from the existing buffer
3003
+ const bytes = decoder.arr.subarray(decoder.pos, decoder.pos + nextLen);
3004
+ decoder.pos += nextLen;
3005
+ // Starting with ES5.1 we can supply a generic array-like object as arguments
3006
+ encodedString += String.fromCodePoint.apply(null, /** @type {any} */ (bytes));
3007
+ remainingLen -= nextLen;
3008
+ }
3009
+ }
3010
+ return decodeURIComponent(escape(encodedString))
3011
+ }
3012
+ };
3013
+ /* c8 ignore stop */
3014
+
3015
+ /**
3016
+ * @function
3017
+ * @param {Decoder} decoder
3018
+ * @return {String} The read String
3019
+ */
3020
+ const _readVarStringNative = decoder =>
3021
+ /** @type any */ (utf8TextDecoder).decode(readVarUint8Array(decoder));
3022
+
3023
+ /**
3024
+ * Read string of variable length
3025
+ * * varUint is used to store the length of the string
3026
+ *
3027
+ * @function
3028
+ * @param {Decoder} decoder
3029
+ * @return {String} The read String
3030
+ *
3031
+ */
3032
+ /* c8 ignore next */
3033
+ const readVarString = utf8TextDecoder ? _readVarStringNative : _readVarStringPolyfill;
3034
+
3035
+ /**
3036
+ * @param {Decoder} decoder
3037
+ * @param {number} len
3038
+ * @return {DataView}
3039
+ */
3040
+ const readFromDataView = (decoder, len) => {
3041
+ const dv = new DataView(decoder.arr.buffer, decoder.arr.byteOffset + decoder.pos, len);
3042
+ decoder.pos += len;
3043
+ return dv
3044
+ };
3045
+
3046
+ /**
3047
+ * @param {Decoder} decoder
3048
+ */
3049
+ const readFloat32 = decoder => readFromDataView(decoder, 4).getFloat32(0, false);
3050
+
3051
+ /**
3052
+ * @param {Decoder} decoder
3053
+ */
3054
+ const readFloat64 = decoder => readFromDataView(decoder, 8).getFloat64(0, false);
3055
+
3056
+ /**
3057
+ * @param {Decoder} decoder
3058
+ */
3059
+ const readBigInt64 = decoder => /** @type {any} */ (readFromDataView(decoder, 8)).getBigInt64(0, false);
3060
+
3061
+ /**
3062
+ * @param {Decoder} decoder
3063
+ */
3064
+ const readBigUint64 = decoder => /** @type {any} */ (readFromDataView(decoder, 8)).getBigUint64(0, false);
3065
+
3066
+ /**
3067
+ * @type {Array<function(Decoder):any>}
3068
+ */
3069
+ const readAnyLookupTable = [
3070
+ decoder => undefined, // CASE 127: undefined
3071
+ decoder => null, // CASE 126: null
3072
+ readVarInt, // CASE 125: integer
3073
+ readFloat32, // CASE 124: float32
3074
+ readFloat64, // CASE 123: float64
3075
+ readBigInt64, // CASE 122: bigint
3076
+ decoder => false, // CASE 121: boolean (false)
3077
+ decoder => true, // CASE 120: boolean (true)
3078
+ readVarString, // CASE 119: string
3079
+ decoder => { // CASE 118: object<string,any>
3080
+ const len = readVarUint(decoder);
3081
+ /**
3082
+ * @type {Object<string,any>}
3083
+ */
3084
+ const obj = {};
3085
+ for (let i = 0; i < len; i++) {
3086
+ const key = readVarString(decoder);
3087
+ obj[key] = readAny(decoder);
3088
+ }
3089
+ return obj
3090
+ },
3091
+ decoder => { // CASE 117: array<any>
3092
+ const len = readVarUint(decoder);
3093
+ const arr = [];
3094
+ for (let i = 0; i < len; i++) {
3095
+ arr.push(readAny(decoder));
3096
+ }
3097
+ return arr
3098
+ },
3099
+ readVarUint8Array // CASE 116: Uint8Array
3100
+ ];
3101
+
3102
+ /**
3103
+ * @param {Decoder} decoder
3104
+ */
3105
+ const readAny = decoder => readAnyLookupTable[127 - readUint8(decoder)](decoder);
3106
+
3107
+ function decodeYMessage(a) {
3108
+ const decoder = new Decoder(a);
3109
+ const type = readVarString(decoder);
3110
+ const table = readVarString(decoder);
3111
+ const prop = readVarString(decoder);
3112
+ switch (type) {
3113
+ case 'u-ack':
3114
+ case 'u-reject':
3115
+ return {
3116
+ type,
3117
+ table,
3118
+ prop,
3119
+ i: Number(readBigUint64(decoder)),
3120
+ };
3121
+ default: {
3122
+ const k = readAny(decoder);
3123
+ switch (type) {
3124
+ case 'in-sync':
3125
+ return { type, table, prop, k };
3126
+ case 'aware':
3127
+ return {
3128
+ type,
3129
+ table,
3130
+ prop,
3131
+ k,
3132
+ u: readVarUint8Array(decoder),
3133
+ };
3134
+ case 'doc-open':
3135
+ return {
3136
+ type,
3137
+ table,
3138
+ prop,
3139
+ k,
3140
+ serverRev: readAny(decoder),
3141
+ sv: readAny(decoder),
3142
+ };
3143
+ case 'doc-close':
3144
+ return { type, table, prop, k };
3145
+ case 'sv':
3146
+ return {
3147
+ type,
3148
+ table,
3149
+ prop,
3150
+ k,
3151
+ sv: readVarUint8Array(decoder),
3152
+ };
3153
+ case 'u-c':
3154
+ return {
3155
+ type,
3156
+ table,
3157
+ prop,
3158
+ k,
3159
+ u: readVarUint8Array(decoder),
3160
+ i: Number(readBigUint64(decoder)),
3161
+ };
3162
+ case 'u-s':
3163
+ return {
3164
+ type,
3165
+ table,
3166
+ prop,
3167
+ k,
3168
+ u: readVarUint8Array(decoder)
3169
+ };
3170
+ default:
3171
+ throw new TypeError(`Unknown message type: ${type}`);
3172
+ }
3173
+ }
3174
+ }
3175
+ }
3176
+
3177
+ async function asyncIterablePipeline(source, ...stages) {
3178
+ var _a, e_1, _b, _c;
3179
+ // Chain generators by sending outdata from one to another
3180
+ let result = source(); // Start with the source generator
3181
+ for (let i = 0; i < stages.length; i++) {
3182
+ result = stages[i](result); // Pass on the result to next generator
3183
+ }
3184
+ try {
3185
+ // Start running the machine. If the last stage is a sink, it will consume the data and never emit anything
3186
+ // to us here...
3187
+ for (var _d = true, result_1 = __asyncValues(result), result_1_1; result_1_1 = await result_1.next(), _a = result_1_1.done, !_a; _d = true) {
3188
+ _c = result_1_1.value;
3189
+ _d = false;
3190
+ const chunk = _c;
3191
+ }
3192
+ }
3193
+ catch (e_1_1) { e_1 = { error: e_1_1 }; }
3194
+ finally {
3195
+ try {
3196
+ if (!_d && !_a && (_b = result_1.return)) await _b.call(result_1);
3197
+ }
3198
+ finally { if (e_1) throw e_1.error; }
3199
+ }
3200
+ }
3201
+
3202
+ function consumeChunkedBinaryStream(source) {
3203
+ return __asyncGenerator(this, arguments, function* consumeChunkedBinaryStream_1() {
3204
+ var _a, e_1, _b, _c;
3205
+ let state = 0;
3206
+ let sizeBuf = new Uint8Array(4);
3207
+ let sizeBufPos = 0;
3208
+ let bufs = [];
3209
+ let len = 0;
3210
+ try {
3211
+ for (var _d = true, source_1 = __asyncValues(source), source_1_1; source_1_1 = yield __await(source_1.next()), _a = source_1_1.done, !_a; _d = true) {
3212
+ _c = source_1_1.value;
3213
+ _d = false;
3214
+ const chunk = _c;
3215
+ const dw = new DataView(chunk.buffer, chunk.byteOffset, chunk.byteLength);
3216
+ let pos = 0;
3217
+ while (pos < chunk.byteLength) {
3218
+ switch (state) {
3219
+ case 0:
3220
+ // Beginning of a size header
3221
+ if (pos + 4 > chunk.byteLength) {
3222
+ for (const b of chunk.slice(pos)) {
3223
+ if (sizeBufPos === 4)
3224
+ break;
3225
+ sizeBuf[sizeBufPos++] = b;
3226
+ ++pos;
3227
+ }
3228
+ if (sizeBufPos < 4) {
3229
+ // Need more bytes in order to read length.
3230
+ // Will go out from while loop as well because pos is defenitely = chunk.byteLength here.
3231
+ break;
3232
+ }
3233
+ }
3234
+ else if (sizeBufPos > 0 && sizeBufPos < 4) {
3235
+ for (const b of chunk.slice(pos, pos + 4 - sizeBufPos)) {
3236
+ sizeBuf[sizeBufPos++] = b;
3237
+ ++pos;
3238
+ }
3239
+ }
3240
+ // Intentional fall-through...
3241
+ case 1:
3242
+ len =
3243
+ sizeBufPos === 4
3244
+ ? new DataView(sizeBuf.buffer, 0, 4).getUint32(0, false)
3245
+ : dw.getUint32(pos, false);
3246
+ if (sizeBufPos)
3247
+ sizeBufPos = 0; // in this case pos is already forwarded
3248
+ else
3249
+ pos += 4; // else pos is not yet forwarded - that's why we do it now
3250
+ // Intentional fall-through...
3251
+ case 2:
3252
+ // Eat the chunk
3253
+ if (pos >= chunk.byteLength) {
3254
+ state = 2;
3255
+ break;
3256
+ }
3257
+ if (pos + len > chunk.byteLength) {
3258
+ bufs.push(chunk.slice(pos));
3259
+ len -= (chunk.byteLength - pos);
3260
+ state = 2;
3261
+ pos = chunk.byteLength; // will break while loop.
3262
+ }
3263
+ else {
3264
+ if (bufs.length > 0) {
3265
+ const concats = new Uint8Array(bufs.reduce((p, c) => p + c.byteLength, len));
3266
+ let p = 0;
3267
+ for (const buf of bufs) {
3268
+ concats.set(buf, p);
3269
+ p += buf.byteLength;
3270
+ }
3271
+ concats.set(chunk.slice(pos, pos + len), p);
3272
+ bufs = [];
3273
+ yield yield __await(concats);
3274
+ }
3275
+ else {
3276
+ yield yield __await(chunk.slice(pos, pos + len));
3277
+ }
3278
+ pos += len;
3279
+ state = 0;
3280
+ }
3281
+ break;
3282
+ }
3283
+ }
3284
+ }
3285
+ }
3286
+ catch (e_1_1) { e_1 = { error: e_1_1 }; }
3287
+ finally {
3288
+ try {
3289
+ if (!_d && !_a && (_b = source_1.return)) yield __await(_b.call(source_1));
3290
+ }
3291
+ finally { if (e_1) throw e_1.error; }
3292
+ }
3293
+ });
3294
+ }
3295
+
3296
+ function getFetchResponseBodyGenerator(res) {
3297
+ return function () {
3298
+ return __asyncGenerator(this, arguments, function* () {
3299
+ if (!res.body)
3300
+ throw new Error("Response body is not readable");
3301
+ const reader = res.body.getReader();
3302
+ try {
3303
+ while (true) {
3304
+ const { done, value } = yield __await(reader.read());
3305
+ if (done)
3306
+ return yield __await(void 0);
3307
+ yield yield __await(value);
3308
+ }
3309
+ }
3310
+ finally {
3311
+ reader.releaseLock();
3312
+ }
3313
+ });
3314
+ };
3315
+ }
3316
+
2248
3317
  function listSyncifiedChanges(tablesToSyncify, currentUser, schema, alreadySyncedRealms) {
2249
3318
  return __awaiter(this, void 0, void 0, function* () {
2250
3319
  const txid = `upload-${randomString$1(8)}`;
@@ -3311,7 +4380,7 @@
3311
4380
  }
3312
4381
 
3313
4382
  //import {BisonWebStreamReader} from "dreambase-library/dist/typeson-simplified/BisonWebStreamReader";
3314
- function syncWithServer(changes, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser) {
4383
+ function syncWithServer(changes, y, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser) {
3315
4384
  return __awaiter(this, void 0, void 0, function* () {
3316
4385
  //
3317
4386
  // Push changes to server using fetch
@@ -3349,6 +4418,7 @@
3349
4418
  : undefined,
3350
4419
  baseRevs,
3351
4420
  changes: encodeIdsForServer(db.dx.core.schema, currentUser, changes),
4421
+ y,
3352
4422
  };
3353
4423
  console.debug('Sync request', syncRequest);
3354
4424
  db.syncStateChangedEvent.next({
@@ -3562,6 +4632,357 @@
3562
4632
  });
3563
4633
  }
3564
4634
 
4635
+ const DEXIE_CLOUD_SYNCER_ID = 'dexie-cloud-syncer';
4636
+
4637
+ function listUpdatesSince(yTable, sinceIncluding) {
4638
+ return yTable
4639
+ .where('i')
4640
+ .between(sinceIncluding, Infinity, true)
4641
+ .toArray();
4642
+ }
4643
+
4644
+ function $Y(db) {
4645
+ const $Y = db.dx._options.Y;
4646
+ if (!$Y)
4647
+ throw new Error('Y library not supplied to Dexie constructor');
4648
+ return $Y;
4649
+ }
4650
+
4651
+ /** Queries the local database for YMessages to send to server.
4652
+ *
4653
+ * There are 2 messages that this function can provide:
4654
+ * YUpdateFromClientRequest ( for local updates )
4655
+ * YStateVector ( for state vector of foreign updates so that server can reduce the number of udpates to send back )
4656
+ *
4657
+ * Notice that we do not do a step 1 sync phase here to get a state vector from the server. Reason we can avoid
4658
+ * the 2-step sync is that we are client-server and not client-client here and we keep track of the client changes
4659
+ * sent to server by letting server acknowledge them. There is always a chance that some client update has already
4660
+ * been sent and that the client failed to receive the ack. However, if this happens it does not matter - the change
4661
+ * would be sent again and Yjs handles duplicate changes anyway. And it's rare so we earn the cost of roundtrips by
4662
+ * avoiding the step1 sync and instead keep track of this in the `unsentFrom` property of the SyncState.
4663
+ *
4664
+ * @param db
4665
+ * @returns
4666
+ */
4667
+ function listYClientMessagesAndStateVector(db, tablesToSync) {
4668
+ return __awaiter(this, void 0, void 0, function* () {
4669
+ const result = [];
4670
+ const lastUpdateIds = {};
4671
+ for (const table of tablesToSync) {
4672
+ if (table.schema.yProps) {
4673
+ for (const yProp of table.schema.yProps) {
4674
+ const Y = $Y(db); // This is how we retrieve the user-provided Y library
4675
+ const yTable = db.table(yProp.updatesTable); // the updates-table for this combo of table+propName
4676
+ const syncState = (yield yTable.get(DEXIE_CLOUD_SYNCER_ID));
4677
+ // unsentFrom = the `i` value of updates that aren't yet sent to server (or at least not acked by the server yet)
4678
+ const unsentFrom = (syncState === null || syncState === void 0 ? void 0 : syncState.unsentFrom) || 1;
4679
+ // receivedUntil = the `i` value of updates that both we and the server knows we already have (we know it by the outcome from last syncWithServer() because server keep track of its revision numbers
4680
+ const receivedUntil = (syncState === null || syncState === void 0 ? void 0 : syncState.receivedUntil) || 0;
4681
+ // Compute the least value of these two (but since receivedUntil is inclusive we need to add +1 to it)
4682
+ const unsyncedFrom = Math.min(unsentFrom, receivedUntil + 1);
4683
+ // Query all these updates for all docs of this table+prop combination
4684
+ const updates = yield listUpdatesSince(yTable, unsyncedFrom);
4685
+ if (updates.length > 0)
4686
+ lastUpdateIds[yTable.name] = updates[updates.length - 1].i;
4687
+ // Now sort them by document and whether they are local or not + ignore local updates already sent:
4688
+ const perDoc = {};
4689
+ for (const update of updates) {
4690
+ // Sort updates into buckets of the doc primary key + the flag (whether it's local or foreign)
4691
+ const isLocal = ((update.f || 0) & 0x01) === 0x01;
4692
+ if (isLocal && update.i < unsentFrom)
4693
+ continue; // This local update has already been sent and acked.
4694
+ const docKey = JSON.stringify(update.k) + '/' + isLocal;
4695
+ let entry = perDoc[docKey];
4696
+ if (!entry) {
4697
+ perDoc[docKey] = entry = {
4698
+ i: update.i,
4699
+ k: update.k,
4700
+ isLocal,
4701
+ u: [],
4702
+ };
4703
+ entry.u.push(update.u);
4704
+ }
4705
+ else {
4706
+ entry.u.push(update.u);
4707
+ entry.i = Math.max(update.i, entry.i);
4708
+ }
4709
+ }
4710
+ // Now, go through all these and:
4711
+ // * For local updates, compute a merged update per document.
4712
+ // * For foreign updates, compute a state vector to pass to server, so that server can
4713
+ // avoid re-sending updates that we already have (they might have been sent of websocket
4714
+ // and when that happens, we do not mark them in any way nor do we update receivedUntil -
4715
+ // we only update receivedUntil after a "full sync" (syncWithServer()))
4716
+ for (const { k, isLocal, u, i } of Object.values(perDoc)) {
4717
+ const mergedUpdate = u.length === 1 ? u[0] : Y.mergeUpdatesV2(u);
4718
+ if (isLocal) {
4719
+ result.push({
4720
+ type: 'u-c',
4721
+ table: table.name,
4722
+ prop: yProp.prop,
4723
+ k,
4724
+ u: mergedUpdate,
4725
+ i,
4726
+ });
4727
+ }
4728
+ else {
4729
+ const stateVector = Y.encodeStateVectorFromUpdateV2(mergedUpdate);
4730
+ result.push({
4731
+ type: 'sv',
4732
+ table: table.name,
4733
+ prop: yProp.prop,
4734
+ k,
4735
+ sv: stateVector,
4736
+ });
4737
+ }
4738
+ }
4739
+ }
4740
+ }
4741
+ }
4742
+ return {
4743
+ yMessages: result,
4744
+ lastUpdateIds
4745
+ };
4746
+ });
4747
+ }
4748
+
4749
+ function getUpdatesTable(db, table, ydocProp) {
4750
+ var _a, _b, _c;
4751
+ const utbl = (_c = (_b = (_a = db.table(table)) === null || _a === void 0 ? void 0 : _a.schema.yProps) === null || _b === void 0 ? void 0 : _b.find(p => p.prop === ydocProp)) === null || _c === void 0 ? void 0 : _c.updatesTable;
4752
+ if (!utbl)
4753
+ throw new Error(`No updatesTable found for ${table}.${ydocProp}`);
4754
+ return db.table(utbl);
4755
+ }
4756
+
4757
+ function applyYServerMessages(yMessages, db) {
4758
+ return __awaiter(this, void 0, void 0, function* () {
4759
+ const result = {};
4760
+ for (const m of yMessages) {
4761
+ switch (m.type) {
4762
+ case 'u-s': {
4763
+ const utbl = getUpdatesTable(db, m.table, m.prop);
4764
+ result[utbl.name] = yield utbl.add({
4765
+ k: m.k,
4766
+ u: m.u,
4767
+ });
4768
+ break;
4769
+ }
4770
+ case 'u-ack': {
4771
+ const utbl = getUpdatesTable(db, m.table, m.prop);
4772
+ yield db.transaction('rw', utbl, (tx) => __awaiter(this, void 0, void 0, function* () {
4773
+ let syncer = (yield tx
4774
+ .table(utbl.name)
4775
+ .get(DEXIE_CLOUD_SYNCER_ID));
4776
+ yield tx.table(utbl.name).put(Object.assign(Object.assign({}, (syncer || { i: DEXIE_CLOUD_SYNCER_ID })), { unsentFrom: Math.max((syncer === null || syncer === void 0 ? void 0 : syncer.unsentFrom) || 1, m.i + 1) }));
4777
+ }));
4778
+ break;
4779
+ }
4780
+ case 'u-reject': {
4781
+ // Acces control or constraint rejected the update.
4782
+ // We delete it. It's not going to be sent again.
4783
+ // What's missing is a way to notify consumers, such as Tiptap editor, that the update was rejected.
4784
+ // This is only an issue when the document is open. We could find the open document and
4785
+ // in a perfect world, we should send a reverse update to the open document to undo the change.
4786
+ // See my question in https://discuss.yjs.dev/t/generate-an-inverse-update/2765
4787
+ console.debug(`Y update rejected. Deleting it.`);
4788
+ const utbl = getUpdatesTable(db, m.table, m.prop);
4789
+ yield utbl.delete(m.i);
4790
+ break;
4791
+ }
4792
+ case 'in-sync': {
4793
+ const doc = Dexie.DexieYProvider.getDocCache(db.dx).find(m.table, m.k, m.prop);
4794
+ if (doc && !doc.isSynced) {
4795
+ doc.emit('sync', [true]);
4796
+ }
4797
+ break;
4798
+ }
4799
+ }
4800
+ }
4801
+ return result;
4802
+ });
4803
+ }
4804
+
4805
+ function updateYSyncStates(lastUpdateIdsBeforeSync, receivedUntilsAfterSync, db, serverRevision) {
4806
+ var _a, _b, _c, _d, _e;
4807
+ return __awaiter(this, void 0, void 0, function* () {
4808
+ // We want to update unsentFrom for each yTable to the value specified in first argument
4809
+ // because we got those values before we synced with server and here we are back from server
4810
+ // that has successfully received all those messages - no matter if the last update was a client or server update,
4811
+ // we can safely store unsentFrom to a value of the last update + 1 here.
4812
+ // We also want to update receivedUntil for each yTable to the value specified in the second argument,
4813
+ // because that contains the highest resulted id of each update from server after storing it.
4814
+ // We could do these two tasks separately, but that would require two update calls on the same YSyncState, so
4815
+ // to optimize the dexie calls, we merge these two maps into a single one so we can do a single update request
4816
+ // per yTable.
4817
+ const mergedSpec = {};
4818
+ for (const [yTable, lastUpdateId] of Object.entries(lastUpdateIdsBeforeSync)) {
4819
+ (_a = mergedSpec[yTable]) !== null && _a !== void 0 ? _a : (mergedSpec[yTable] = {});
4820
+ mergedSpec[yTable].unsentFrom = lastUpdateId + 1;
4821
+ }
4822
+ for (const [yTable, lastUpdateId] of Object.entries(receivedUntilsAfterSync)) {
4823
+ (_b = mergedSpec[yTable]) !== null && _b !== void 0 ? _b : (mergedSpec[yTable] = {});
4824
+ mergedSpec[yTable].receivedUntil = lastUpdateId;
4825
+ }
4826
+ // Now go through all yTables and update their YSyncStates:
4827
+ const allYTables = Object.values(db.dx._dbSchema)
4828
+ .filter((tblSchema) => tblSchema.yProps)
4829
+ .map((tblSchema) => tblSchema.yProps.map((yProp) => yProp.updatesTable))
4830
+ .flat();
4831
+ for (const yTable of allYTables) {
4832
+ const mergedEntry = mergedSpec[yTable];
4833
+ const unsentFrom = (_c = mergedEntry === null || mergedEntry === void 0 ? void 0 : mergedEntry.unsentFrom) !== null && _c !== void 0 ? _c : 1;
4834
+ const receivedUntil = (_e = (_d = mergedEntry === null || mergedEntry === void 0 ? void 0 : mergedEntry.receivedUntil) !== null && _d !== void 0 ? _d :
4835
+ // from local because we are in the same parent transaction (in sync.ts) that
4836
+ // applied all updates from the server
4837
+ (yield db
4838
+ .table(yTable)
4839
+ .where('i')
4840
+ .between(1, Infinity) // Because i might be string DEXIE_CLOUD_SYNCER_ID if not a number.
4841
+ .reverse()
4842
+ .limit(1)
4843
+ .primaryKeys())[0]) !== null && _e !== void 0 ? _e : 0;
4844
+ // We're already in a transaction, but for the sake of
4845
+ // code readability and correctness, let's launch an atomic sub transaction:
4846
+ yield db.transaction('rw', yTable, () => __awaiter(this, void 0, void 0, function* () {
4847
+ const state = yield db
4848
+ .table(yTable)
4849
+ .get(DEXIE_CLOUD_SYNCER_ID);
4850
+ if (!state) {
4851
+ yield db.table(yTable).add({
4852
+ i: DEXIE_CLOUD_SYNCER_ID,
4853
+ unsentFrom,
4854
+ receivedUntil,
4855
+ serverRev: serverRevision,
4856
+ });
4857
+ }
4858
+ else {
4859
+ state.unsentFrom = Math.max(unsentFrom, state.unsentFrom || 1);
4860
+ state.receivedUntil = Math.max(receivedUntil, state.receivedUntil || 0);
4861
+ state.serverRev = serverRevision;
4862
+ yield db.table(yTable).put(state);
4863
+ }
4864
+ }));
4865
+ }
4866
+ });
4867
+ }
4868
+
4869
+ const BINSTREAM_TYPE_REALMID = 1;
4870
+ const BINSTREAM_TYPE_TABLE_AND_PROP = 2;
4871
+ const BINSTREAM_TYPE_DOCUMENT = 3;
4872
+ function downloadYDocsFromServer(db, databaseUrl, { yDownloadedRealms, realms }) {
4873
+ return __awaiter(this, void 0, void 0, function* () {
4874
+ if (yDownloadedRealms &&
4875
+ realms &&
4876
+ realms.every((realmId) => yDownloadedRealms[realmId] === '*')) {
4877
+ return; // Already done!
4878
+ }
4879
+ console.debug('Downloading Y.Docs from added realms');
4880
+ const user = yield loadAccessToken(db);
4881
+ const headers = {
4882
+ 'Content-Type': 'application/json',
4883
+ Accept: 'application/octet-stream',
4884
+ };
4885
+ if (user) {
4886
+ headers.Authorization = `Bearer ${user.accessToken}`;
4887
+ }
4888
+ const res = yield fetch(`${databaseUrl}/y/download`, {
4889
+ body: TSON.stringify({ downloadedRealms: yDownloadedRealms || {} }),
4890
+ method: 'POST',
4891
+ headers,
4892
+ credentials: 'include',
4893
+ });
4894
+ if (!res.ok) {
4895
+ throw new Error(`Failed to download Yjs documents from server. Status: ${res.status}`);
4896
+ }
4897
+ yield asyncIterablePipeline(getFetchResponseBodyGenerator(res), consumeChunkedBinaryStream, consumeDownloadChunks);
4898
+ function consumeDownloadChunks(chunks) {
4899
+ return __asyncGenerator(this, arguments, function* consumeDownloadChunks_1() {
4900
+ var _a, e_1, _b, _c;
4901
+ let currentRealmId = null;
4902
+ let currentTable = null;
4903
+ let currentProp = null;
4904
+ let docsToInsert = [];
4905
+ function storeCollectedDocs(completedRealm) {
4906
+ return __awaiter(this, void 0, void 0, function* () {
4907
+ const lastDoc = docsToInsert[docsToInsert.length - 1];
4908
+ if (docsToInsert.length > 0) {
4909
+ if (!currentRealmId || !currentTable || !currentProp) {
4910
+ throw new Error(`Protocol error from ${databaseUrl}/y/download`);
4911
+ }
4912
+ const yTable = getUpdatesTable(db, currentTable, currentProp);
4913
+ yield yTable.bulkAdd(docsToInsert);
4914
+ docsToInsert = [];
4915
+ }
4916
+ if (currentRealmId &&
4917
+ ((currentTable && currentProp && lastDoc) || completedRealm)) {
4918
+ yield db.$syncState.update('syncState', (syncState) => {
4919
+ const yDownloadedRealms = syncState.yDownloadedRealms || {};
4920
+ yDownloadedRealms[currentRealmId] = completedRealm
4921
+ ? '*'
4922
+ : {
4923
+ tbl: currentTable,
4924
+ prop: currentProp,
4925
+ key: lastDoc.k,
4926
+ };
4927
+ syncState.yDownloadedRealms = yDownloadedRealms;
4928
+ });
4929
+ }
4930
+ });
4931
+ }
4932
+ try {
4933
+ try {
4934
+ for (var _d = true, chunks_1 = __asyncValues(chunks), chunks_1_1; chunks_1_1 = yield __await(chunks_1.next()), _a = chunks_1_1.done, !_a; _d = true) {
4935
+ _c = chunks_1_1.value;
4936
+ _d = false;
4937
+ const chunk = _c;
4938
+ const decoder = new Decoder(chunk);
4939
+ while (hasContent(decoder)) {
4940
+ switch (readUint8(decoder)) {
4941
+ case BINSTREAM_TYPE_REALMID:
4942
+ yield __await(storeCollectedDocs(true));
4943
+ currentRealmId = readVarString(decoder);
4944
+ break;
4945
+ case BINSTREAM_TYPE_TABLE_AND_PROP:
4946
+ yield __await(storeCollectedDocs(false)); // still on same realm
4947
+ currentTable = readVarString(decoder);
4948
+ currentProp = readVarString(decoder);
4949
+ break;
4950
+ case BINSTREAM_TYPE_DOCUMENT: {
4951
+ const k = readAny(decoder);
4952
+ const u = readVarUint8Array(decoder);
4953
+ docsToInsert.push({
4954
+ k,
4955
+ u,
4956
+ });
4957
+ break;
4958
+ }
4959
+ }
4960
+ }
4961
+ yield __await(storeCollectedDocs(false)); // Chunk full - migth still be on same realm
4962
+ }
4963
+ }
4964
+ catch (e_1_1) { e_1 = { error: e_1_1 }; }
4965
+ finally {
4966
+ try {
4967
+ if (!_d && !_a && (_b = chunks_1.return)) yield __await(_b.call(chunks_1));
4968
+ }
4969
+ finally { if (e_1) throw e_1.error; }
4970
+ }
4971
+ yield __await(storeCollectedDocs(true)); // Everything downloaded - finalize last downloaded realm to "*"
4972
+ }
4973
+ catch (error) {
4974
+ if (!(error instanceof Dexie.DexieError)) {
4975
+ // Network error might have happened.
4976
+ // Store what we've collected so far:
4977
+ yield __await(storeCollectedDocs(false));
4978
+ }
4979
+ throw error;
4980
+ }
4981
+ });
4982
+ }
4983
+ });
4984
+ }
4985
+
3565
4986
  const CURRENT_SYNC_WORKER = 'currentSyncWorker';
3566
4987
  function sync(db, options, schema, syncOptions) {
3567
4988
  return _sync
@@ -3650,10 +5071,11 @@
3650
5071
  //
3651
5072
  // List changes to sync
3652
5073
  //
3653
- const [clientChangeSet, syncState, baseRevs] = yield db.transaction('r', db.tables, () => __awaiter(this, void 0, void 0, function* () {
5074
+ const [clientChangeSet, syncState, baseRevs, { yMessages, lastUpdateIds }] = yield db.transaction('r', db.tables, () => __awaiter(this, void 0, void 0, function* () {
3654
5075
  const syncState = yield db.getPersistedSyncState();
3655
5076
  const baseRevs = yield db.$baseRevs.toArray();
3656
5077
  let clientChanges = yield listClientChanges(mutationTables);
5078
+ const yResults = yield listYClientMessagesAndStateVector(db, tablesToSync);
3657
5079
  throwIfCancelled(cancelToken);
3658
5080
  if (doSyncify) {
3659
5081
  const alreadySyncedRealms = [
@@ -3663,11 +5085,11 @@
3663
5085
  const syncificationInserts = yield listSyncifiedChanges(tablesToSyncify, currentUser, schema, alreadySyncedRealms);
3664
5086
  throwIfCancelled(cancelToken);
3665
5087
  clientChanges = clientChanges.concat(syncificationInserts);
3666
- return [clientChanges, syncState, baseRevs];
5088
+ return [clientChanges, syncState, baseRevs, yResults];
3667
5089
  }
3668
- return [clientChanges, syncState, baseRevs];
5090
+ return [clientChanges, syncState, baseRevs, yResults];
3669
5091
  }));
3670
- const pushSyncIsNeeded = clientChangeSet.some((set) => set.muts.some((mut) => mut.keys.length > 0));
5092
+ const pushSyncIsNeeded = clientChangeSet.some((set) => set.muts.some((mut) => mut.keys.length > 0)) || yMessages.some(m => m.type === 'u-c');
3671
5093
  if (justCheckIfNeeded) {
3672
5094
  console.debug('Sync is needed:', pushSyncIsNeeded);
3673
5095
  return pushSyncIsNeeded;
@@ -3682,12 +5104,12 @@
3682
5104
  // Push changes to server
3683
5105
  //
3684
5106
  throwIfCancelled(cancelToken);
3685
- const res = yield syncWithServer(clientChangeSet, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser);
5107
+ const res = yield syncWithServer(clientChangeSet, yMessages, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser);
3686
5108
  console.debug('Sync response', res);
3687
5109
  //
3688
5110
  // Apply changes locally and clear old change entries:
3689
5111
  //
3690
- const done = yield db.transaction('rw', db.tables, (tx) => __awaiter(this, void 0, void 0, function* () {
5112
+ const { done, newSyncState } = yield db.transaction('rw', db.tables, (tx) => __awaiter(this, void 0, void 0, function* () {
3691
5113
  // @ts-ignore
3692
5114
  tx.idbtrans.disableChangeTracking = true;
3693
5115
  // @ts-ignore
@@ -3779,17 +5201,35 @@
3779
5201
  // apply server changes
3780
5202
  //
3781
5203
  yield applyServerChanges(filteredChanges, db);
5204
+ if (res.yMessages) {
5205
+ //
5206
+ // apply yMessages
5207
+ //
5208
+ const receivedUntils = yield applyYServerMessages(res.yMessages, db);
5209
+ //
5210
+ // update Y SyncStates
5211
+ //
5212
+ yield updateYSyncStates(lastUpdateIds, receivedUntils, db, res.serverRevision);
5213
+ }
3782
5214
  //
3783
- // Update syncState
5215
+ // Update regular syncState
3784
5216
  //
3785
5217
  db.$syncState.put(newSyncState, 'syncState');
3786
- return addedClientChanges.length === 0;
5218
+ return {
5219
+ done: addedClientChanges.length === 0,
5220
+ newSyncState
5221
+ };
3787
5222
  }));
3788
5223
  if (!done) {
3789
5224
  console.debug('MORE SYNC NEEDED. Go for it again!');
3790
5225
  yield checkSyncRateLimitDelay(db);
3791
5226
  return yield _sync(db, options, schema, { isInitialSync, cancelToken });
3792
5227
  }
5228
+ const usingYProps = Object.values(schema).some(tbl => { var _a; return (_a = tbl.yProps) === null || _a === void 0 ? void 0 : _a.length; });
5229
+ const serverSupportsYprops = !!res.yMessages;
5230
+ if (usingYProps && serverSupportsYprops) {
5231
+ yield downloadYDocsFromServer(db, databaseUrl, newSyncState);
5232
+ }
3793
5233
  console.debug('SYNC DONE', { isInitialSync });
3794
5234
  db.syncCompleteEvent.next();
3795
5235
  return false; // Not needed anymore
@@ -3842,6 +5282,18 @@
3842
5282
  }
3843
5283
  }
3844
5284
  }
5285
+ if (rejectedRealms.size > 0) {
5286
+ // Remove rejected/deleted realms from yDownloadedRealms because of the following use case:
5287
+ // 1. User becomes added to the realm
5288
+ // 2. User syncs and all documents of the realm is downloaded (downloadYDocsFromServer.ts)
5289
+ // 3. User leaves the realm and all docs are deleted locally (built-in-trigger of deleting their rows in this file)
5290
+ // 4. User is yet again added to the realm. At this point, we must make sure the docs are not considered already downloaded.
5291
+ const updateSpec = {};
5292
+ for (const realmId of rejectedRealms) {
5293
+ updateSpec[`yDownloadedRealms.${realmId}`] = undefined; // Setting to undefined will delete the property
5294
+ }
5295
+ yield db.$syncState.update('syncState', updateSpec);
5296
+ }
3845
5297
  });
3846
5298
  }
3847
5299
  function filterServerChangesThroughAddedClientChanges(serverChanges, addedClientChanges) {
@@ -3853,13 +5305,15 @@
3853
5305
  return toDBOperationSet(changes);
3854
5306
  }
3855
5307
 
5308
+ const LIMIT_NUM_MESSAGES_PER_TIME = 10; // Allow a maximum of 10 messages per...
5309
+ const TIME_WINDOW = 10000; // ...10 seconds.
5310
+ const PAUSE_PERIOD = 1000; // Pause for 1 second if reached
3856
5311
  function MessagesFromServerConsumer(db) {
3857
5312
  const queue = [];
3858
5313
  const readyToServe = new rxjs.BehaviorSubject(true);
3859
5314
  const event = new rxjs.BehaviorSubject(null);
3860
5315
  let isWorking = false;
3861
- let loopWarning = 0;
3862
- let loopDetection = [0, 0, 0, 0, 0, 0, 0, 0, 0, Date.now()];
5316
+ let loopDetection = new Array(LIMIT_NUM_MESSAGES_PER_TIME).fill(0);
3863
5317
  event.subscribe(() => __awaiter(this, void 0, void 0, function* () {
3864
5318
  if (isWorking)
3865
5319
  return;
@@ -3873,20 +5327,11 @@
3873
5327
  }
3874
5328
  finally {
3875
5329
  if (loopDetection[loopDetection.length - 1] - loopDetection[0] <
3876
- 10000) {
5330
+ TIME_WINDOW) {
3877
5331
  // Ten loops within 10 seconds. Slow down!
3878
- if (Date.now() - loopWarning < 5000) {
3879
- // Last time we did this, we ended up here too. Wait for a minute.
3880
- console.warn(`Slowing down websocket loop for one minute`);
3881
- loopWarning = Date.now() + 60000;
3882
- yield new Promise((resolve) => setTimeout(resolve, 60000));
3883
- }
3884
- else {
3885
- // This is a one-time event. Just pause 10 seconds.
3886
- console.warn(`Slowing down websocket loop for 10 seconds`);
3887
- loopWarning = Date.now() + 10000;
3888
- yield new Promise((resolve) => setTimeout(resolve, 10000));
3889
- }
5332
+ // This is a one-time event. Just pause 10 seconds.
5333
+ console.warn(`Slowing down websocket loop for ${PAUSE_PERIOD} milliseconds`);
5334
+ yield new Promise((resolve) => setTimeout(resolve, PAUSE_PERIOD));
3890
5335
  }
3891
5336
  isWorking = false;
3892
5337
  readyToServe.next(true);
@@ -4158,6 +5603,7 @@
4158
5603
  };
4159
5604
  Object.assign(db, helperMethods);
4160
5605
  db.messageConsumer = MessagesFromServerConsumer(db);
5606
+ db.messageProducer = new rxjs.Subject();
4161
5607
  wm$1.set(dx.cloud, db);
4162
5608
  }
4163
5609
  return db;
@@ -4493,24 +5939,6 @@
4493
5939
  const DISABLE_SERVICEWORKER_STRATEGY = (isSafari && safariVersion <= 605) || // Disable for Safari for now.
4494
5940
  isFirefox; // Disable for Firefox for now. Seems to have a bug in reading CryptoKeys from IDB from service workers
4495
5941
 
4496
- /* Helper function to subscribe to database close no matter if it was unexpectedly closed or manually using db.close()
4497
- */
4498
- function dbOnClosed(db, handler) {
4499
- db.on.close.subscribe(handler);
4500
- // @ts-ignore
4501
- const origClose = db._close;
4502
- // @ts-ignore
4503
- db._close = function () {
4504
- origClose.call(this);
4505
- handler();
4506
- };
4507
- return () => {
4508
- db.on.close.unsubscribe(handler);
4509
- // @ts-ignore
4510
- db._close = origClose;
4511
- };
4512
- }
4513
-
4514
5942
  const IS_SERVICE_WORKER = typeof self !== "undefined" && "clients" in self && !self.document;
4515
5943
 
4516
5944
  function throwVersionIncrementNeeded() {
@@ -4976,13 +6404,18 @@
4976
6404
  values = values.filter((_, idx) => !failures[idx]);
4977
6405
  }
4978
6406
  const ts = Date.now();
6407
+ // Canonicalize req.criteria.index to null if it's on the primary key.
6408
+ const criteria = 'criteria' in req && req.criteria
6409
+ ? Object.assign(Object.assign({}, req.criteria), { index: req.criteria.index === schema.primaryKey.keyPath // Use null to inform server that criteria is on primary key
6410
+ ? null // This will disable the server from trying to log consistent operations where it shouldnt.
6411
+ : req.criteria.index }) : undefined;
4979
6412
  const mut = req.type === 'delete'
4980
6413
  ? {
4981
6414
  type: 'delete',
4982
6415
  ts,
4983
6416
  opNo,
4984
6417
  keys,
4985
- criteria: req.criteria,
6418
+ criteria,
4986
6419
  txid,
4987
6420
  userId,
4988
6421
  }
@@ -4996,14 +6429,14 @@
4996
6429
  userId,
4997
6430
  values,
4998
6431
  }
4999
- : req.criteria && req.changeSpec
6432
+ : criteria && req.changeSpec
5000
6433
  ? {
5001
6434
  // Common changeSpec for all keys
5002
6435
  type: 'modify',
5003
6436
  ts,
5004
6437
  opNo,
5005
6438
  keys,
5006
- criteria: req.criteria,
6439
+ criteria,
5007
6440
  changeSpec: req.changeSpec,
5008
6441
  txid,
5009
6442
  userId,
@@ -5031,7 +6464,7 @@
5031
6464
  if ('isAdditionalChunk' in req && req.isAdditionalChunk) {
5032
6465
  mut.isAdditionalChunk = true;
5033
6466
  }
5034
- return keys.length > 0 || ('criteria' in req && req.criteria)
6467
+ return keys.length > 0 || criteria
5035
6468
  ? mutsTable
5036
6469
  .mutate({ type: 'add', trans, values: [mut] }) // Log entry
5037
6470
  .then(() => res) // Return original response
@@ -5045,6 +6478,7 @@
5045
6478
 
5046
6479
  function overrideParseStoresSpec(origFunc, dexie) {
5047
6480
  return function (stores, dbSchema) {
6481
+ var _a;
5048
6482
  const storesClone = Object.assign(Object.assign({}, DEXIE_CLOUD_SCHEMA), stores);
5049
6483
  // Merge indexes of DEXIE_CLOUD_SCHEMA with stores
5050
6484
  Object.keys(DEXIE_CLOUD_SCHEMA).forEach((tableName) => {
@@ -5105,6 +6539,14 @@
5105
6539
  }
5106
6540
  });
5107
6541
  const rv = origFunc.call(this, storesClone, dbSchema);
6542
+ for (const [tableName, spec] of Object.entries(dbSchema)) {
6543
+ if ((_a = spec.yProps) === null || _a === void 0 ? void 0 : _a.length) {
6544
+ const cloudTableSchema = cloudSchema[tableName];
6545
+ if (cloudTableSchema) {
6546
+ cloudTableSchema.yProps = spec.yProps.map((yProp) => yProp.prop);
6547
+ }
6548
+ }
6549
+ }
5108
6550
  return rv;
5109
6551
  };
5110
6552
  }
@@ -5190,31 +6632,90 @@
5190
6632
  }
5191
6633
  }
5192
6634
 
6635
+ function createYClientUpdateObservable(db) {
6636
+ const yTableRecords = flatten(db.tables
6637
+ .filter((table) => { var _a, _b; return ((_b = (_a = db.cloud.schema) === null || _a === void 0 ? void 0 : _a[table.name]) === null || _b === void 0 ? void 0 : _b.markedForSync) && table.schema.yProps; })
6638
+ .map((table) => table.schema.yProps.map((p) => ({
6639
+ table: table.name,
6640
+ ydocProp: p.prop,
6641
+ updatesTable: p.updatesTable,
6642
+ }))));
6643
+ return rxjs.merge(...yTableRecords.map(({ table, ydocProp, updatesTable }) => {
6644
+ // Per updates table (table+prop combo), we first read syncer.unsentFrom,
6645
+ // and then start listening for updates since that number.
6646
+ const yTbl = db.table(updatesTable);
6647
+ return rxjs.from(yTbl.get(DEXIE_CLOUD_SYNCER_ID)).pipe(rxjs.switchMap((syncer) => {
6648
+ let currentUnsentFrom = (syncer === null || syncer === void 0 ? void 0 : syncer.unsentFrom) || 1;
6649
+ return rxjs.from(Dexie.liveQuery(() => __awaiter(this, void 0, void 0, function* () {
6650
+ const addedUpdates = yield listUpdatesSince(yTbl, currentUnsentFrom);
6651
+ return addedUpdates
6652
+ .filter((update) => update.f && update.f & 1) // Only include local updates
6653
+ .map((update) => {
6654
+ return {
6655
+ type: 'u-c',
6656
+ table,
6657
+ prop: ydocProp,
6658
+ k: update.k,
6659
+ u: update.u,
6660
+ i: update.i,
6661
+ };
6662
+ });
6663
+ }))).pipe(rxjs.tap((addedUpdates) => {
6664
+ // Update currentUnsentFrom to only listen for updates that will be newer than the ones we emitted.
6665
+ // (Before, we did this within the liveQuery, but that caused a bug because
6666
+ // a cancelled emittion of a liveQuery would update the currentUnsentFrom without
6667
+ // emitting anything, leading to that we jumped over some updates. Here we update it
6668
+ // after the liveQuery has emitted its updates)
6669
+ if (addedUpdates.length > 0) {
6670
+ currentUnsentFrom = addedUpdates.at(-1).i + 1;
6671
+ }
6672
+ }));
6673
+ }));
6674
+ })).pipe(
6675
+ // Flatten the array of messages.
6676
+ // If messageProducer emits empty array, nothing is emitted
6677
+ // but if messageProducer emits array of messages, they are
6678
+ // emitted one by one.
6679
+ rxjs.mergeMap((messages) => messages), rxjs.tap((message) => {
6680
+ console.debug('dexie-cloud emitting y-c', message);
6681
+ }));
6682
+ }
6683
+
6684
+ function getAwarenessLibrary(db) {
6685
+ var _a, _b;
6686
+ if (!((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.awarenessProtocol)) {
6687
+ throw new Dexie.MissingAPIError('awarenessProtocol was not provided to db.cloud.configure(). Please import * as awarenessProtocol from "y-protocols/awareness".');
6688
+ }
6689
+ return (_b = db.cloud.options) === null || _b === void 0 ? void 0 : _b.awarenessProtocol;
6690
+ }
6691
+ const awarenessWeakMap = new WeakMap();
6692
+ const getDocAwareness = (doc) => awarenessWeakMap.get(doc);
6693
+
5193
6694
  const SERVER_PING_TIMEOUT = 20000;
5194
6695
  const CLIENT_PING_INTERVAL = 30000;
5195
6696
  const FAIL_RETRY_WAIT_TIME = 60000;
5196
6697
  class WSObservable extends rxjs.Observable {
5197
- constructor(databaseUrl, rev, realmSetHash, clientIdentity, messageProducer, webSocketStatus, token, tokenExpiration) {
5198
- super((subscriber) => new WSConnection(databaseUrl, rev, realmSetHash, clientIdentity, token, tokenExpiration, subscriber, messageProducer, webSocketStatus));
6698
+ constructor(db, rev, realmSetHash, clientIdentity, messageProducer, webSocketStatus, user) {
6699
+ super((subscriber) => new WSConnection(db, rev, realmSetHash, clientIdentity, user, subscriber, messageProducer, webSocketStatus));
5199
6700
  }
5200
6701
  }
5201
6702
  let counter = 0;
5202
6703
  class WSConnection extends rxjs.Subscription {
5203
- constructor(databaseUrl, rev, realmSetHash, clientIdentity, token, tokenExpiration, subscriber, messageProducer, webSocketStatus) {
6704
+ constructor(db, rev, realmSetHash, clientIdentity, user, subscriber, messageProducer, webSocketStatus) {
5204
6705
  super(() => this.teardown());
5205
6706
  this.id = ++counter;
6707
+ this.subscriptions = new Set();
5206
6708
  this.reconnecting = false;
5207
- console.debug('New WebSocket Connection', this.id, token ? 'authorized' : 'unauthorized');
5208
- this.databaseUrl = databaseUrl;
6709
+ console.debug('New WebSocket Connection', this.id, user.accessToken ? 'authorized' : 'unauthorized');
6710
+ this.db = db;
6711
+ this.databaseUrl = db.cloud.options.databaseUrl;
5209
6712
  this.rev = rev;
5210
6713
  this.realmSetHash = realmSetHash;
5211
6714
  this.clientIdentity = clientIdentity;
5212
- this.token = token;
5213
- this.tokenExpiration = tokenExpiration;
6715
+ this.user = user;
5214
6716
  this.subscriber = subscriber;
5215
6717
  this.lastUserActivity = new Date();
5216
6718
  this.messageProducer = messageProducer;
5217
- this.messageProducerSubscription = null;
5218
6719
  this.webSocketStatus = webSocketStatus;
5219
6720
  this.connect();
5220
6721
  }
@@ -5235,10 +6736,10 @@
5235
6736
  catch (_a) { }
5236
6737
  }
5237
6738
  this.ws = null;
5238
- if (this.messageProducerSubscription) {
5239
- this.messageProducerSubscription.unsubscribe();
5240
- this.messageProducerSubscription = null;
6739
+ for (const sub of this.subscriptions) {
6740
+ sub.unsubscribe();
5241
6741
  }
6742
+ this.subscriptions.clear();
5242
6743
  }
5243
6744
  reconnect() {
5244
6745
  if (this.reconnecting)
@@ -5271,7 +6772,8 @@
5271
6772
  //console.debug('SyncStatus: DUBB: Ooops it was closed!');
5272
6773
  return;
5273
6774
  }
5274
- if (this.tokenExpiration && this.tokenExpiration < new Date()) {
6775
+ const tokenExpiration = this.user.accessTokenExpiration;
6776
+ if (tokenExpiration && tokenExpiration < new Date()) {
5275
6777
  this.subscriber.error(new TokenExpiredError()); // Will be handled in connectWebSocket.ts.
5276
6778
  return;
5277
6779
  }
@@ -5326,13 +6828,13 @@
5326
6828
  searchParams.set('rev', this.rev);
5327
6829
  searchParams.set('realmsHash', this.realmSetHash);
5328
6830
  searchParams.set('clientId', this.clientIdentity);
5329
- if (this.token) {
5330
- searchParams.set('token', this.token);
6831
+ if (this.user.accessToken) {
6832
+ searchParams.set('token', this.user.accessToken);
5331
6833
  }
5332
6834
  // Connect the WebSocket to given url:
5333
6835
  console.debug('dexie-cloud WebSocket create');
5334
6836
  const ws = (this.ws = new WebSocket(`${wsUrl}/changes?${searchParams}`));
5335
- //ws.binaryType = "arraybuffer"; // For future when subscribing to actual changes.
6837
+ ws.binaryType = "arraybuffer";
5336
6838
  ws.onclose = (event) => {
5337
6839
  if (!this.pinger)
5338
6840
  return;
@@ -5342,17 +6844,33 @@
5342
6844
  ws.onmessage = (event) => {
5343
6845
  if (!this.pinger)
5344
6846
  return;
5345
- console.debug('dexie-cloud WebSocket onmessage', event.data);
5346
6847
  this.lastServerActivity = new Date();
5347
6848
  try {
5348
- const msg = TSON.parse(event.data);
6849
+ const msg = typeof event.data === 'string'
6850
+ ? TSON.parse(event.data)
6851
+ : decodeYMessage(new Uint8Array(event.data));
6852
+ console.debug('dexie-cloud WebSocket onmessage', msg.type, msg);
5349
6853
  if (msg.type === 'error') {
5350
6854
  throw new Error(`Error message from dexie-cloud: ${msg.error}`);
5351
6855
  }
5352
- if (msg.type === 'rev') {
6856
+ else if (msg.type === 'rev') {
5353
6857
  this.rev = msg.rev; // No meaning but seems reasonable.
5354
6858
  }
5355
- if (msg.type !== 'pong') {
6859
+ else if (msg.type === 'aware') {
6860
+ const docCache = Dexie.DexieYProvider.getDocCache(this.db.dx);
6861
+ const doc = docCache.find(msg.table, msg.k, msg.prop);
6862
+ if (doc) {
6863
+ const awareness = getDocAwareness(doc);
6864
+ if (awareness) {
6865
+ const awap = getAwarenessLibrary(this.db);
6866
+ awap.applyAwarenessUpdate(awareness, msg.u, 'server');
6867
+ }
6868
+ }
6869
+ }
6870
+ else if (msg.type === 'u-ack' || msg.type === 'u-reject' || msg.type === 'u-s' || msg.type === 'in-sync') {
6871
+ applyYServerMessages([msg], this.db);
6872
+ }
6873
+ else if (msg.type !== 'pong') {
5356
6874
  this.subscriber.next(msg);
5357
6875
  }
5358
6876
  }
@@ -5380,16 +6898,27 @@
5380
6898
  }
5381
6899
  };
5382
6900
  });
5383
- this.messageProducerSubscription = this.messageProducer.subscribe((msg) => {
5384
- var _a;
6901
+ this.subscriptions.add(this.messageProducer.subscribe((msg) => {
6902
+ var _a, _b;
5385
6903
  if (!this.closed) {
5386
6904
  if (msg.type === 'ready' &&
5387
6905
  this.webSocketStatus.value !== 'connected') {
5388
6906
  this.webSocketStatus.next('connected');
5389
6907
  }
5390
- (_a = this.ws) === null || _a === void 0 ? void 0 : _a.send(TSON.stringify(msg));
6908
+ console.debug('dexie-cloud WebSocket send', msg.type, msg);
6909
+ if (msg.type === 'ready') {
6910
+ (_a = this.ws) === null || _a === void 0 ? void 0 : _a.send(TSON.stringify(msg));
6911
+ }
6912
+ else {
6913
+ // If it's not a "ready" message, it's an YMessage.
6914
+ // YMessages can be sent binary encoded.
6915
+ (_b = this.ws) === null || _b === void 0 ? void 0 : _b.send(encodeYMessage(msg));
6916
+ }
5391
6917
  }
5392
- });
6918
+ }));
6919
+ if (this.user.isLoggedIn && !isEagerSyncDisabled(this.db)) {
6920
+ this.subscriptions.add(createYClientUpdateObservable(this.db).subscribe(this.db.messageProducer));
6921
+ }
5393
6922
  }
5394
6923
  catch (error) {
5395
6924
  this.pauseUntil = new Date(Date.now() + FAIL_RETRY_WAIT_TIME);
@@ -5431,7 +6960,7 @@
5431
6960
  if (!((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl)) {
5432
6961
  throw new Error(`No database URL to connect WebSocket to`);
5433
6962
  }
5434
- const messageProducer = db.messageConsumer.readyToServe.pipe(filter((isReady) => isReady), // When consumer is ready for new messages, produce such a message to inform server about it
6963
+ const readyForChangesMessage = db.messageConsumer.readyToServe.pipe(filter((isReady) => isReady), // When consumer is ready for new messages, produce such a message to inform server about it
5435
6964
  switchMap(() => db.getPersistedSyncState()), // We need the info on which server revision we are at:
5436
6965
  filter((syncState) => syncState && syncState.serverRevision), // We wont send anything to server before inital sync has taken place
5437
6966
  switchMap((syncState) => __awaiter(this, void 0, void 0, function* () {
@@ -5442,6 +6971,7 @@
5442
6971
  realmSetHash: yield computeRealmSetHash(syncState)
5443
6972
  });
5444
6973
  })));
6974
+ const messageProducer = rxjs.merge(readyForChangesMessage, db.messageProducer);
5445
6975
  function createObservable() {
5446
6976
  return db.cloud.persistedSyncState.pipe(filter((syncState) => syncState === null || syncState === void 0 ? void 0 : syncState.serverRevision), // Don't connect before there's no initial sync performed.
5447
6977
  take(1), // Don't continue waking up whenever syncState change
@@ -5468,7 +6998,7 @@
5468
6998
  // If no new entries, server won't bother the client. If new entries, server sends only those
5469
6999
  // and the baseRev of the last from same client-ID.
5470
7000
  if (userLogin) {
5471
- return new WSObservable(db.cloud.options.databaseUrl, db.cloud.persistedSyncState.value.serverRevision, realmSetHash, db.cloud.persistedSyncState.value.clientIdentity, messageProducer, db.cloud.webSocketStatus, userLogin.accessToken, userLogin.accessTokenExpiration);
7001
+ return new WSObservable(db, db.cloud.persistedSyncState.value.serverRevision, realmSetHash, db.cloud.persistedSyncState.value.clientIdentity, messageProducer, db.cloud.webSocketStatus, userLogin);
5472
7002
  }
5473
7003
  else {
5474
7004
  return rxjs.from([]);
@@ -6274,6 +7804,130 @@
6274
7804
  })), []);
6275
7805
  });
6276
7806
 
7807
+ function createYHandler(db) {
7808
+ return (provider) => {
7809
+ var _a;
7810
+ const awap = getAwarenessLibrary(db);
7811
+ const doc = provider.doc;
7812
+ const { parentTable, parentId, parentProp, updatesTable } = doc.meta;
7813
+ if (!((_a = db.cloud.schema) === null || _a === void 0 ? void 0 : _a[parentTable].markedForSync)) {
7814
+ return; // The table that holds the doc is not marked for sync - leave it to dexie. No syncing, no awareness.
7815
+ }
7816
+ let awareness = new awap.Awareness(doc);
7817
+ awarenessWeakMap.set(doc, awareness);
7818
+ provider.awareness = awareness;
7819
+ awareness.on('update', ({ added, updated, removed }, origin) => {
7820
+ // Send the update
7821
+ const changedClients = added.concat(updated).concat(removed);
7822
+ const user = db.cloud.currentUser.value;
7823
+ if (origin !== 'server' && user.isLoggedIn && !isEagerSyncDisabled(db)) {
7824
+ const update = awap.encodeAwarenessUpdate(awareness, changedClients);
7825
+ db.messageProducer.next({
7826
+ type: 'aware',
7827
+ table: parentTable,
7828
+ prop: parentProp,
7829
+ k: doc.meta.parentId,
7830
+ u: update,
7831
+ });
7832
+ if (provider.destroyed) {
7833
+ // We're called from awareness.on('destroy') that did
7834
+ // removeAwarenessStates.
7835
+ // It's time to also send the doc-close message that dexie-cloud understands
7836
+ // and uses to stop subscribing for updates and awareness updates and brings
7837
+ // down the cached information in memory on the WS connection for this.
7838
+ db.messageProducer.next({
7839
+ type: 'doc-close',
7840
+ table: parentTable,
7841
+ prop: parentProp,
7842
+ k: doc.meta.parentId
7843
+ });
7844
+ }
7845
+ }
7846
+ });
7847
+ awareness.on('destroy', () => {
7848
+ // Signal to server that this provider is destroyed (the update event will be triggered, which
7849
+ // in turn will trigger db.messageProducer that will send the message to the server if WS is connected)
7850
+ awap.removeAwarenessStates(awareness, [doc.clientID], 'provider destroyed');
7851
+ });
7852
+ // Now wait til document is loaded and then open the document on the server
7853
+ provider.on('load', () => __awaiter(this, void 0, void 0, function* () {
7854
+ if (provider.destroyed)
7855
+ return;
7856
+ let connected = false;
7857
+ let currentFlowId = 1;
7858
+ const subscription = db.cloud.webSocketStatus.subscribe((wsStatus) => {
7859
+ if (provider.destroyed)
7860
+ return;
7861
+ // Keep "connected" state in a variable so we can check it after async operations
7862
+ connected = wsStatus === 'connected';
7863
+ // We are or got connected. Open the document on the server.
7864
+ const user = db.cloud.currentUser.value;
7865
+ if (wsStatus === "connected" && user.isLoggedIn && !isEagerSyncDisabled(db)) {
7866
+ ++currentFlowId;
7867
+ openDocumentOnServer().catch(error => {
7868
+ console.warn(`Error catched in createYHandler.ts: ${error}`);
7869
+ });
7870
+ }
7871
+ });
7872
+ // Wait until WebSocket is connected
7873
+ provider.addCleanupHandler(subscription);
7874
+ /** Sends an 'doc-open' message to server whenever websocket becomes
7875
+ * connected, or if it is already connected.
7876
+ * The flow is aborted in case websocket is disconnected while querying
7877
+ * information required to compute the state vector. Flow is also
7878
+ * aborted in case document or provider has been destroyed during
7879
+ * the async parts of the task.
7880
+ *
7881
+ * The state vector is only computed from the updates that have occured
7882
+ * after the last full sync - which could very often be zero - in which
7883
+ * case no state vector is sent (then the server already knows us by
7884
+ * revision)
7885
+ *
7886
+ * When server gets the doc-open message, it will authorized us for
7887
+ * whether we are allowed to read / write to this document, and then
7888
+ * keep the cached information in memory on the WS connection for this
7889
+ * particular document, as well as subscribe to updates and awareness updates
7890
+ * from other clients on the document.
7891
+ */
7892
+ function openDocumentOnServer(wsStatus) {
7893
+ return __awaiter(this, void 0, void 0, function* () {
7894
+ const myFlow = currentFlowId; // So we can abort when a new flow is started
7895
+ const yTbl = db.table(updatesTable);
7896
+ const syncState = yield yTbl.get(DEXIE_CLOUD_SYNCER_ID);
7897
+ // After every await, check if we still should be working on this task.
7898
+ if (provider.destroyed || currentFlowId !== myFlow || !connected)
7899
+ return;
7900
+ const receivedUntil = (syncState === null || syncState === void 0 ? void 0 : syncState.receivedUntil) || 0;
7901
+ const docOpenMsg = {
7902
+ type: 'doc-open',
7903
+ table: parentTable,
7904
+ prop: parentProp,
7905
+ k: parentId,
7906
+ serverRev: syncState === null || syncState === void 0 ? void 0 : syncState.serverRev,
7907
+ };
7908
+ const serverUpdatesSinceLastSync = yield yTbl
7909
+ .where('i')
7910
+ .between(receivedUntil, Infinity, false)
7911
+ .filter((update) => Dexie.cmp(update.k, parentId) === 0 && // Only updates for this document
7912
+ ((update.f || 0) & 1) === 0 // Don't include local changes
7913
+ )
7914
+ .toArray();
7915
+ // After every await, check if we still should be working on this task.
7916
+ if (provider.destroyed || currentFlowId !== myFlow || !connected)
7917
+ return;
7918
+ if (serverUpdatesSinceLastSync.length > 0) {
7919
+ const Y = $Y(db); // Get the Yjs library from Dexie constructor options
7920
+ const mergedUpdate = Y.mergeUpdatesV2(serverUpdatesSinceLastSync.map((update) => update.u));
7921
+ const stateVector = Y.encodeStateVectorFromUpdateV2(mergedUpdate);
7922
+ docOpenMsg.sv = stateVector;
7923
+ }
7924
+ db.messageProducer.next(docOpenMsg);
7925
+ });
7926
+ }
7927
+ }));
7928
+ };
7929
+ }
7930
+
6277
7931
  const DEFAULT_OPTIONS = {
6278
7932
  nameSuffix: true,
6279
7933
  };
@@ -6303,8 +7957,9 @@
6303
7957
  if (closed)
6304
7958
  throw new Dexie.DatabaseClosedError();
6305
7959
  }
6306
- dbOnClosed(dexie, () => {
7960
+ dexie.once('close', () => {
6307
7961
  subscriptions.forEach((subscription) => subscription.unsubscribe());
7962
+ subscriptions.splice(0, subscriptions.length);
6308
7963
  closed = true;
6309
7964
  localSyncWorker && localSyncWorker.stop();
6310
7965
  localSyncWorker = null;
@@ -6313,7 +7968,7 @@
6313
7968
  const syncComplete = new rxjs.Subject();
6314
7969
  dexie.cloud = {
6315
7970
  // @ts-ignore
6316
- version: "4.0.7",
7971
+ version: "4.1.0-alpha.10",
6317
7972
  options: Object.assign({}, DEFAULT_OPTIONS),
6318
7973
  schema: null,
6319
7974
  get currentUserId() {
@@ -6459,6 +8114,7 @@
6459
8114
  throw new Error(`Internal error`); // options cannot be null if configuredProgramatically is set.
6460
8115
  const newPersistedOptions = Object.assign({}, options);
6461
8116
  delete newPersistedOptions.fetchTokens;
8117
+ delete newPersistedOptions.awarenessProtocol;
6462
8118
  yield db.$syncState.put(newPersistedOptions, 'options');
6463
8119
  }
6464
8120
  if (((_h = db.cloud.options) === null || _h === void 0 ? void 0 : _h.tryUseServiceWorker) &&
@@ -6536,12 +8192,29 @@
6536
8192
  currentUserEmitter.pipe(skip(1), take(1)),
6537
8193
  db.cloud.persistedSyncState.pipe(skip(1), take(1)),
6538
8194
  ]));
8195
+ const yHandler = createYHandler(db);
8196
+ db.dx.on('y', yHandler);
8197
+ db.dx.once('close', () => {
8198
+ var _a;
8199
+ (_a = db.dx.on.y) === null || _a === void 0 ? void 0 : _a.unsubscribe(yHandler);
8200
+ });
6539
8201
  }
6540
8202
  // HERE: If requireAuth, do athentication now.
6541
8203
  let changedUser = false;
6542
8204
  const user = yield db.getCurrentUser();
6543
- if ((_c = db.cloud.options) === null || _c === void 0 ? void 0 : _c.requireAuth) {
6544
- if (!user.isLoggedIn) {
8205
+ const requireAuth = (_c = db.cloud.options) === null || _c === void 0 ? void 0 : _c.requireAuth;
8206
+ if (requireAuth) {
8207
+ if (typeof requireAuth === 'object') {
8208
+ // requireAuth contains login hints. Check if we already fulfil it:
8209
+ if (!user.isLoggedIn ||
8210
+ (requireAuth.userId && user.userId !== requireAuth.userId) ||
8211
+ (requireAuth.email && user.email !== requireAuth.email)) {
8212
+ // If not, login the configured user:
8213
+ changedUser = yield login(db, requireAuth);
8214
+ }
8215
+ }
8216
+ else if (!user.isLoggedIn) {
8217
+ // requireAuth is true and user is not logged in
6545
8218
  changedUser = yield login(db);
6546
8219
  }
6547
8220
  }
@@ -6597,7 +8270,7 @@
6597
8270
  }
6598
8271
  }
6599
8272
  // @ts-ignore
6600
- dexieCloud.version = "4.0.7";
8273
+ dexieCloud.version = "4.1.0-alpha.10";
6601
8274
  Dexie.Cloud = dexieCloud;
6602
8275
 
6603
8276
  // In case the SW lives for a while, let it reuse already opened connections: