dexie-cloud-addon 4.0.8 → 4.1.0-alpha.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/dist/modern/DexieCloudOptions.d.ts +1 -0
  2. package/dist/modern/WSObservable.d.ts +9 -6
  3. package/dist/modern/db/DexieCloudDB.d.ts +2 -0
  4. package/dist/modern/db/entities/PersistedSyncState.d.ts +7 -0
  5. package/dist/modern/define-ydoc-trigger.d.ts +2 -0
  6. package/dist/modern/dexie-cloud-addon.d.ts +1 -0
  7. package/dist/modern/dexie-cloud-addon.js +1914 -63
  8. package/dist/modern/dexie-cloud-addon.js.map +1 -1
  9. package/dist/modern/dexie-cloud-addon.min.js +1 -1
  10. package/dist/modern/dexie-cloud-addon.min.js.map +1 -1
  11. package/dist/modern/service-worker.js +1761 -62
  12. package/dist/modern/service-worker.js.map +1 -1
  13. package/dist/modern/service-worker.min.js +1 -1
  14. package/dist/modern/service-worker.min.js.map +1 -1
  15. package/dist/modern/sync/DEXIE_CLOUD_SYNCER_ID.d.ts +1 -0
  16. package/dist/modern/sync/syncWithServer.d.ts +2 -2
  17. package/dist/modern/yjs/Y.d.ts +3 -0
  18. package/dist/modern/yjs/YDexieCloudSyncState.d.ts +4 -0
  19. package/dist/modern/yjs/YTable.d.ts +2 -0
  20. package/dist/modern/yjs/applyYMessages.d.ts +5 -0
  21. package/dist/modern/yjs/awareness.d.ts +4 -0
  22. package/dist/modern/yjs/createYClientUpdateObservable.d.ts +4 -0
  23. package/dist/modern/yjs/createYHandler.d.ts +5 -0
  24. package/dist/modern/yjs/downloadYDocsFromServer.d.ts +3 -0
  25. package/dist/modern/yjs/getUpdatesTable.d.ts +3 -0
  26. package/dist/modern/yjs/listUpdatesSince.d.ts +2 -0
  27. package/dist/modern/yjs/listYClientMessagesAndStateVector.d.ts +26 -0
  28. package/dist/modern/yjs/updateYSyncStates.d.ts +6 -0
  29. package/dist/umd/DexieCloudOptions.d.ts +1 -0
  30. package/dist/umd/WSObservable.d.ts +9 -6
  31. package/dist/umd/db/DexieCloudDB.d.ts +2 -0
  32. package/dist/umd/db/entities/PersistedSyncState.d.ts +7 -0
  33. package/dist/umd/define-ydoc-trigger.d.ts +2 -0
  34. package/dist/umd/dexie-cloud-addon.d.ts +1 -0
  35. package/dist/umd/dexie-cloud-addon.js +1912 -60
  36. package/dist/umd/dexie-cloud-addon.js.map +1 -1
  37. package/dist/umd/dexie-cloud-addon.min.js +1 -1
  38. package/dist/umd/dexie-cloud-addon.min.js.map +1 -1
  39. package/dist/umd/service-worker.js +1759 -60
  40. package/dist/umd/service-worker.js.map +1 -1
  41. package/dist/umd/service-worker.min.js +1 -1
  42. package/dist/umd/service-worker.min.js.map +1 -1
  43. package/dist/umd/sync/DEXIE_CLOUD_SYNCER_ID.d.ts +1 -0
  44. package/dist/umd/sync/syncWithServer.d.ts +2 -2
  45. package/dist/umd/yjs/Y.d.ts +3 -0
  46. package/dist/umd/yjs/YDexieCloudSyncState.d.ts +4 -0
  47. package/dist/umd/yjs/YTable.d.ts +2 -0
  48. package/dist/umd/yjs/applyYMessages.d.ts +5 -0
  49. package/dist/umd/yjs/awareness.d.ts +4 -0
  50. package/dist/umd/yjs/createYClientUpdateObservable.d.ts +4 -0
  51. package/dist/umd/yjs/createYHandler.d.ts +5 -0
  52. package/dist/umd/yjs/downloadYDocsFromServer.d.ts +3 -0
  53. package/dist/umd/yjs/getUpdatesTable.d.ts +3 -0
  54. package/dist/umd/yjs/listUpdatesSince.d.ts +2 -0
  55. package/dist/umd/yjs/listYClientMessagesAndStateVector.d.ts +26 -0
  56. package/dist/umd/yjs/updateYSyncStates.d.ts +6 -0
  57. package/package.json +5 -4
  58. package/dist/modern/helpers/dbOnClosed.d.ts +0 -2
  59. package/dist/umd/helpers/dbOnClosed.d.ts +0 -2
@@ -8,7 +8,7 @@
8
8
  *
9
9
  * ==========================================================================
10
10
  *
11
- * Version 4.0.8, Tue Jun 04 2024
11
+ * Version 4.1.0-alpha.12, Wed Oct 16 2024
12
12
  *
13
13
  * https://dexie.org
14
14
  *
@@ -16,8 +16,8 @@
16
16
  *
17
17
  */
18
18
 
19
- import Dexie, { PropModification, cmp, liveQuery } from 'dexie';
20
- import { Observable as Observable$1, BehaviorSubject, firstValueFrom, Subject, from as from$1, filter as filter$1, fromEvent, of, merge, Subscription as Subscription$1, throwError, combineLatest, map as map$1, share, timer as timer$1 } from 'rxjs';
19
+ import Dexie, { PropModification, cmp, DexieYProvider, liveQuery } from 'dexie';
20
+ import { Observable as Observable$1, BehaviorSubject, firstValueFrom, Subject, from as from$1, filter as filter$1, fromEvent, of, merge, switchMap as switchMap$1, tap as tap$1, mergeMap as mergeMap$1, Subscription as Subscription$1, throwError, combineLatest, map as map$1, share, timer as timer$1 } from 'rxjs';
21
21
 
22
22
  /******************************************************************************
23
23
  Copyright (c) Microsoft Corporation.
@@ -2242,6 +2242,1075 @@ function getDbNameFromDbUrl(dbUrl) {
2242
2242
  : url.pathname.split('/')[1];
2243
2243
  }
2244
2244
 
2245
+ /**
2246
+ * Common Math expressions.
2247
+ *
2248
+ * @module math
2249
+ */
2250
+
2251
+ const floor = Math.floor;
2252
+ const abs = Math.abs;
2253
+
2254
+ /**
2255
+ * @function
2256
+ * @param {number} a
2257
+ * @param {number} b
2258
+ * @return {number} The smaller element of a and b
2259
+ */
2260
+ const min = (a, b) => a < b ? a : b;
2261
+
2262
+ /**
2263
+ * @function
2264
+ * @param {number} a
2265
+ * @param {number} b
2266
+ * @return {number} The bigger element of a and b
2267
+ */
2268
+ const max = (a, b) => a > b ? a : b;
2269
+
2270
+ /**
2271
+ * @param {number} n
2272
+ * @return {boolean} Wether n is negative. This function also differentiates between -0 and +0
2273
+ */
2274
+ const isNegativeZero = n => n !== 0 ? n < 0 : 1 / n < 0;
2275
+
2276
+ /* eslint-env browser */
2277
+
2278
+ const BIT7 = 64;
2279
+ const BIT8 = 128;
2280
+ const BITS6 = 63;
2281
+ const BITS7 = 127;
2282
+ /**
2283
+ * @type {number}
2284
+ */
2285
+ const BITS31 = 0x7FFFFFFF;
2286
+
2287
+ /**
2288
+ * Utility helpers for working with numbers.
2289
+ *
2290
+ * @module number
2291
+ */
2292
+
2293
+
2294
+ const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER;
2295
+
2296
+ /* c8 ignore next */
2297
+ const isInteger = Number.isInteger || (num => typeof num === 'number' && isFinite(num) && floor(num) === num);
2298
+
2299
+ /**
2300
+ * Utility module to work with Arrays.
2301
+ *
2302
+ * @module array
2303
+ */
2304
+
2305
+
2306
+ const isArray = Array.isArray;
2307
+
2308
+ /**
2309
+ * @param {string} str
2310
+ * @return {Uint8Array}
2311
+ */
2312
+ const _encodeUtf8Polyfill = str => {
2313
+ const encodedString = unescape(encodeURIComponent(str));
2314
+ const len = encodedString.length;
2315
+ const buf = new Uint8Array(len);
2316
+ for (let i = 0; i < len; i++) {
2317
+ buf[i] = /** @type {number} */ (encodedString.codePointAt(i));
2318
+ }
2319
+ return buf
2320
+ };
2321
+
2322
+ /* c8 ignore next */
2323
+ const utf8TextEncoder = /** @type {TextEncoder} */ (typeof TextEncoder !== 'undefined' ? new TextEncoder() : null);
2324
+
2325
+ /**
2326
+ * @param {string} str
2327
+ * @return {Uint8Array}
2328
+ */
2329
+ const _encodeUtf8Native = str => utf8TextEncoder.encode(str);
2330
+
2331
+ /**
2332
+ * @param {string} str
2333
+ * @return {Uint8Array}
2334
+ */
2335
+ /* c8 ignore next */
2336
+ const encodeUtf8 = utf8TextEncoder ? _encodeUtf8Native : _encodeUtf8Polyfill;
2337
+
2338
+ /* c8 ignore next */
2339
+ let utf8TextDecoder = typeof TextDecoder === 'undefined' ? null : new TextDecoder('utf-8', { fatal: true, ignoreBOM: true });
2340
+
2341
+ /* c8 ignore start */
2342
+ if (utf8TextDecoder && utf8TextDecoder.decode(new Uint8Array()).length === 1) {
2343
+ // Safari doesn't handle BOM correctly.
2344
+ // This fixes a bug in Safari 13.0.5 where it produces a BOM the first time it is called.
2345
+ // utf8TextDecoder.decode(new Uint8Array()).length === 1 on the first call and
2346
+ // utf8TextDecoder.decode(new Uint8Array()).length === 1 on the second call
2347
+ // Another issue is that from then on no BOM chars are recognized anymore
2348
+ /* c8 ignore next */
2349
+ utf8TextDecoder = null;
2350
+ }
2351
+
2352
+ /**
2353
+ * Efficient schema-less binary encoding with support for variable length encoding.
2354
+ *
2355
+ * Use [lib0/encoding] with [lib0/decoding]. Every encoding function has a corresponding decoding function.
2356
+ *
2357
+ * Encodes numbers in little-endian order (least to most significant byte order)
2358
+ * and is compatible with Golang's binary encoding (https://golang.org/pkg/encoding/binary/)
2359
+ * which is also used in Protocol Buffers.
2360
+ *
2361
+ * ```js
2362
+ * // encoding step
2363
+ * const encoder = encoding.createEncoder()
2364
+ * encoding.writeVarUint(encoder, 256)
2365
+ * encoding.writeVarString(encoder, 'Hello world!')
2366
+ * const buf = encoding.toUint8Array(encoder)
2367
+ * ```
2368
+ *
2369
+ * ```js
2370
+ * // decoding step
2371
+ * const decoder = decoding.createDecoder(buf)
2372
+ * decoding.readVarUint(decoder) // => 256
2373
+ * decoding.readVarString(decoder) // => 'Hello world!'
2374
+ * decoding.hasContent(decoder) // => false - all data is read
2375
+ * ```
2376
+ *
2377
+ * @module encoding
2378
+ */
2379
+
2380
+
2381
+ /**
2382
+ * A BinaryEncoder handles the encoding to an Uint8Array.
2383
+ */
2384
+ class Encoder {
2385
+ constructor () {
2386
+ this.cpos = 0;
2387
+ this.cbuf = new Uint8Array(100);
2388
+ /**
2389
+ * @type {Array<Uint8Array>}
2390
+ */
2391
+ this.bufs = [];
2392
+ }
2393
+ }
2394
+
2395
+ /**
2396
+ * The current length of the encoded data.
2397
+ *
2398
+ * @function
2399
+ * @param {Encoder} encoder
2400
+ * @return {number}
2401
+ */
2402
+ const length = encoder => {
2403
+ let len = encoder.cpos;
2404
+ for (let i = 0; i < encoder.bufs.length; i++) {
2405
+ len += encoder.bufs[i].length;
2406
+ }
2407
+ return len
2408
+ };
2409
+
2410
+ /**
2411
+ * Transform to Uint8Array.
2412
+ *
2413
+ * @function
2414
+ * @param {Encoder} encoder
2415
+ * @return {Uint8Array} The created ArrayBuffer.
2416
+ */
2417
+ const toUint8Array = encoder => {
2418
+ const uint8arr = new Uint8Array(length(encoder));
2419
+ let curPos = 0;
2420
+ for (let i = 0; i < encoder.bufs.length; i++) {
2421
+ const d = encoder.bufs[i];
2422
+ uint8arr.set(d, curPos);
2423
+ curPos += d.length;
2424
+ }
2425
+ uint8arr.set(new Uint8Array(encoder.cbuf.buffer, 0, encoder.cpos), curPos);
2426
+ return uint8arr
2427
+ };
2428
+
2429
+ /**
2430
+ * Verify that it is possible to write `len` bytes wtihout checking. If
2431
+ * necessary, a new Buffer with the required length is attached.
2432
+ *
2433
+ * @param {Encoder} encoder
2434
+ * @param {number} len
2435
+ */
2436
+ const verifyLen = (encoder, len) => {
2437
+ const bufferLen = encoder.cbuf.length;
2438
+ if (bufferLen - encoder.cpos < len) {
2439
+ encoder.bufs.push(new Uint8Array(encoder.cbuf.buffer, 0, encoder.cpos));
2440
+ encoder.cbuf = new Uint8Array(max(bufferLen, len) * 2);
2441
+ encoder.cpos = 0;
2442
+ }
2443
+ };
2444
+
2445
+ /**
2446
+ * Write one byte to the encoder.
2447
+ *
2448
+ * @function
2449
+ * @param {Encoder} encoder
2450
+ * @param {number} num The byte that is to be encoded.
2451
+ */
2452
+ const write = (encoder, num) => {
2453
+ const bufferLen = encoder.cbuf.length;
2454
+ if (encoder.cpos === bufferLen) {
2455
+ encoder.bufs.push(encoder.cbuf);
2456
+ encoder.cbuf = new Uint8Array(bufferLen * 2);
2457
+ encoder.cpos = 0;
2458
+ }
2459
+ encoder.cbuf[encoder.cpos++] = num;
2460
+ };
2461
+
2462
+ /**
2463
+ * Write a variable length unsigned integer. Max encodable integer is 2^53.
2464
+ *
2465
+ * @function
2466
+ * @param {Encoder} encoder
2467
+ * @param {number} num The number that is to be encoded.
2468
+ */
2469
+ const writeVarUint = (encoder, num) => {
2470
+ while (num > BITS7) {
2471
+ write(encoder, BIT8 | (BITS7 & num));
2472
+ num = floor(num / 128); // shift >>> 7
2473
+ }
2474
+ write(encoder, BITS7 & num);
2475
+ };
2476
+
2477
+ /**
2478
+ * Write a variable length integer.
2479
+ *
2480
+ * We use the 7th bit instead for signaling that this is a negative number.
2481
+ *
2482
+ * @function
2483
+ * @param {Encoder} encoder
2484
+ * @param {number} num The number that is to be encoded.
2485
+ */
2486
+ const writeVarInt = (encoder, num) => {
2487
+ const isNegative = isNegativeZero(num);
2488
+ if (isNegative) {
2489
+ num = -num;
2490
+ }
2491
+ // |- whether to continue reading |- whether is negative |- number
2492
+ write(encoder, (num > BITS6 ? BIT8 : 0) | (isNegative ? BIT7 : 0) | (BITS6 & num));
2493
+ num = floor(num / 64); // shift >>> 6
2494
+ // We don't need to consider the case of num === 0 so we can use a different
2495
+ // pattern here than above.
2496
+ while (num > 0) {
2497
+ write(encoder, (num > BITS7 ? BIT8 : 0) | (BITS7 & num));
2498
+ num = floor(num / 128); // shift >>> 7
2499
+ }
2500
+ };
2501
+
2502
+ /**
2503
+ * A cache to store strings temporarily
2504
+ */
2505
+ const _strBuffer = new Uint8Array(30000);
2506
+ const _maxStrBSize = _strBuffer.length / 3;
2507
+
2508
+ /**
2509
+ * Write a variable length string.
2510
+ *
2511
+ * @function
2512
+ * @param {Encoder} encoder
2513
+ * @param {String} str The string that is to be encoded.
2514
+ */
2515
+ const _writeVarStringNative = (encoder, str) => {
2516
+ if (str.length < _maxStrBSize) {
2517
+ // We can encode the string into the existing buffer
2518
+ /* c8 ignore next */
2519
+ const written = utf8TextEncoder.encodeInto(str, _strBuffer).written || 0;
2520
+ writeVarUint(encoder, written);
2521
+ for (let i = 0; i < written; i++) {
2522
+ write(encoder, _strBuffer[i]);
2523
+ }
2524
+ } else {
2525
+ writeVarUint8Array(encoder, encodeUtf8(str));
2526
+ }
2527
+ };
2528
+
2529
+ /**
2530
+ * Write a variable length string.
2531
+ *
2532
+ * @function
2533
+ * @param {Encoder} encoder
2534
+ * @param {String} str The string that is to be encoded.
2535
+ */
2536
+ const _writeVarStringPolyfill = (encoder, str) => {
2537
+ const encodedString = unescape(encodeURIComponent(str));
2538
+ const len = encodedString.length;
2539
+ writeVarUint(encoder, len);
2540
+ for (let i = 0; i < len; i++) {
2541
+ write(encoder, /** @type {number} */ (encodedString.codePointAt(i)));
2542
+ }
2543
+ };
2544
+
2545
+ /**
2546
+ * Write a variable length string.
2547
+ *
2548
+ * @function
2549
+ * @param {Encoder} encoder
2550
+ * @param {String} str The string that is to be encoded.
2551
+ */
2552
+ /* c8 ignore next */
2553
+ const writeVarString = (utf8TextEncoder && /** @type {any} */ (utf8TextEncoder).encodeInto) ? _writeVarStringNative : _writeVarStringPolyfill;
2554
+
2555
+ /**
2556
+ * Append fixed-length Uint8Array to the encoder.
2557
+ *
2558
+ * @function
2559
+ * @param {Encoder} encoder
2560
+ * @param {Uint8Array} uint8Array
2561
+ */
2562
+ const writeUint8Array = (encoder, uint8Array) => {
2563
+ const bufferLen = encoder.cbuf.length;
2564
+ const cpos = encoder.cpos;
2565
+ const leftCopyLen = min(bufferLen - cpos, uint8Array.length);
2566
+ const rightCopyLen = uint8Array.length - leftCopyLen;
2567
+ encoder.cbuf.set(uint8Array.subarray(0, leftCopyLen), cpos);
2568
+ encoder.cpos += leftCopyLen;
2569
+ if (rightCopyLen > 0) {
2570
+ // Still something to write, write right half..
2571
+ // Append new buffer
2572
+ encoder.bufs.push(encoder.cbuf);
2573
+ // must have at least size of remaining buffer
2574
+ encoder.cbuf = new Uint8Array(max(bufferLen * 2, rightCopyLen));
2575
+ // copy array
2576
+ encoder.cbuf.set(uint8Array.subarray(leftCopyLen));
2577
+ encoder.cpos = rightCopyLen;
2578
+ }
2579
+ };
2580
+
2581
+ /**
2582
+ * Append an Uint8Array to Encoder.
2583
+ *
2584
+ * @function
2585
+ * @param {Encoder} encoder
2586
+ * @param {Uint8Array} uint8Array
2587
+ */
2588
+ const writeVarUint8Array = (encoder, uint8Array) => {
2589
+ writeVarUint(encoder, uint8Array.byteLength);
2590
+ writeUint8Array(encoder, uint8Array);
2591
+ };
2592
+
2593
+ /**
2594
+ * Create an DataView of the next `len` bytes. Use it to write data after
2595
+ * calling this function.
2596
+ *
2597
+ * ```js
2598
+ * // write float32 using DataView
2599
+ * const dv = writeOnDataView(encoder, 4)
2600
+ * dv.setFloat32(0, 1.1)
2601
+ * // read float32 using DataView
2602
+ * const dv = readFromDataView(encoder, 4)
2603
+ * dv.getFloat32(0) // => 1.100000023841858 (leaving it to the reader to find out why this is the correct result)
2604
+ * ```
2605
+ *
2606
+ * @param {Encoder} encoder
2607
+ * @param {number} len
2608
+ * @return {DataView}
2609
+ */
2610
+ const writeOnDataView = (encoder, len) => {
2611
+ verifyLen(encoder, len);
2612
+ const dview = new DataView(encoder.cbuf.buffer, encoder.cpos, len);
2613
+ encoder.cpos += len;
2614
+ return dview
2615
+ };
2616
+
2617
+ /**
2618
+ * @param {Encoder} encoder
2619
+ * @param {number} num
2620
+ */
2621
+ const writeFloat32 = (encoder, num) => writeOnDataView(encoder, 4).setFloat32(0, num, false);
2622
+
2623
+ /**
2624
+ * @param {Encoder} encoder
2625
+ * @param {number} num
2626
+ */
2627
+ const writeFloat64 = (encoder, num) => writeOnDataView(encoder, 8).setFloat64(0, num, false);
2628
+
2629
+ /**
2630
+ * @param {Encoder} encoder
2631
+ * @param {bigint} num
2632
+ */
2633
+ const writeBigInt64 = (encoder, num) => /** @type {any} */ (writeOnDataView(encoder, 8)).setBigInt64(0, num, false);
2634
+
2635
+ /**
2636
+ * @param {Encoder} encoder
2637
+ * @param {bigint} num
2638
+ */
2639
+ const writeBigUint64 = (encoder, num) => /** @type {any} */ (writeOnDataView(encoder, 8)).setBigUint64(0, num, false);
2640
+
2641
+ const floatTestBed = new DataView(new ArrayBuffer(4));
2642
+ /**
2643
+ * Check if a number can be encoded as a 32 bit float.
2644
+ *
2645
+ * @param {number} num
2646
+ * @return {boolean}
2647
+ */
2648
+ const isFloat32 = num => {
2649
+ floatTestBed.setFloat32(0, num);
2650
+ return floatTestBed.getFloat32(0) === num
2651
+ };
2652
+
2653
+ /**
2654
+ * Encode data with efficient binary format.
2655
+ *
2656
+ * Differences to JSON:
2657
+ * • Transforms data to a binary format (not to a string)
2658
+ * • Encodes undefined, NaN, and ArrayBuffer (these can't be represented in JSON)
2659
+ * • Numbers are efficiently encoded either as a variable length integer, as a
2660
+ * 32 bit float, as a 64 bit float, or as a 64 bit bigint.
2661
+ *
2662
+ * Encoding table:
2663
+ *
2664
+ * | Data Type | Prefix | Encoding Method | Comment |
2665
+ * | ------------------- | -------- | ------------------ | ------- |
2666
+ * | undefined | 127 | | Functions, symbol, and everything that cannot be identified is encoded as undefined |
2667
+ * | null | 126 | | |
2668
+ * | integer | 125 | writeVarInt | Only encodes 32 bit signed integers |
2669
+ * | float32 | 124 | writeFloat32 | |
2670
+ * | float64 | 123 | writeFloat64 | |
2671
+ * | bigint | 122 | writeBigInt64 | |
2672
+ * | boolean (false) | 121 | | True and false are different data types so we save the following byte |
2673
+ * | boolean (true) | 120 | | - 0b01111000 so the last bit determines whether true or false |
2674
+ * | string | 119 | writeVarString | |
2675
+ * | object<string,any> | 118 | custom | Writes {length} then {length} key-value pairs |
2676
+ * | array<any> | 117 | custom | Writes {length} then {length} json values |
2677
+ * | Uint8Array | 116 | writeVarUint8Array | We use Uint8Array for any kind of binary data |
2678
+ *
2679
+ * Reasons for the decreasing prefix:
2680
+ * We need the first bit for extendability (later we may want to encode the
2681
+ * prefix with writeVarUint). The remaining 7 bits are divided as follows:
2682
+ * [0-30] the beginning of the data range is used for custom purposes
2683
+ * (defined by the function that uses this library)
2684
+ * [31-127] the end of the data range is used for data encoding by
2685
+ * lib0/encoding.js
2686
+ *
2687
+ * @param {Encoder} encoder
2688
+ * @param {undefined|null|number|bigint|boolean|string|Object<string,any>|Array<any>|Uint8Array} data
2689
+ */
2690
+ const writeAny = (encoder, data) => {
2691
+ switch (typeof data) {
2692
+ case 'string':
2693
+ // TYPE 119: STRING
2694
+ write(encoder, 119);
2695
+ writeVarString(encoder, data);
2696
+ break
2697
+ case 'number':
2698
+ if (isInteger(data) && abs(data) <= BITS31) {
2699
+ // TYPE 125: INTEGER
2700
+ write(encoder, 125);
2701
+ writeVarInt(encoder, data);
2702
+ } else if (isFloat32(data)) {
2703
+ // TYPE 124: FLOAT32
2704
+ write(encoder, 124);
2705
+ writeFloat32(encoder, data);
2706
+ } else {
2707
+ // TYPE 123: FLOAT64
2708
+ write(encoder, 123);
2709
+ writeFloat64(encoder, data);
2710
+ }
2711
+ break
2712
+ case 'bigint':
2713
+ // TYPE 122: BigInt
2714
+ write(encoder, 122);
2715
+ writeBigInt64(encoder, data);
2716
+ break
2717
+ case 'object':
2718
+ if (data === null) {
2719
+ // TYPE 126: null
2720
+ write(encoder, 126);
2721
+ } else if (isArray(data)) {
2722
+ // TYPE 117: Array
2723
+ write(encoder, 117);
2724
+ writeVarUint(encoder, data.length);
2725
+ for (let i = 0; i < data.length; i++) {
2726
+ writeAny(encoder, data[i]);
2727
+ }
2728
+ } else if (data instanceof Uint8Array) {
2729
+ // TYPE 116: ArrayBuffer
2730
+ write(encoder, 116);
2731
+ writeVarUint8Array(encoder, data);
2732
+ } else {
2733
+ // TYPE 118: Object
2734
+ write(encoder, 118);
2735
+ const keys = Object.keys(data);
2736
+ writeVarUint(encoder, keys.length);
2737
+ for (let i = 0; i < keys.length; i++) {
2738
+ const key = keys[i];
2739
+ writeVarString(encoder, key);
2740
+ writeAny(encoder, data[key]);
2741
+ }
2742
+ }
2743
+ break
2744
+ case 'boolean':
2745
+ // TYPE 120/121: boolean (true/false)
2746
+ write(encoder, data ? 120 : 121);
2747
+ break
2748
+ default:
2749
+ // TYPE 127: undefined
2750
+ write(encoder, 127);
2751
+ }
2752
+ };
2753
+
2754
+ function encodeYMessage(msg) {
2755
+ const encoder = new Encoder();
2756
+ writeVarString(encoder, msg.type);
2757
+ writeVarString(encoder, msg.table);
2758
+ writeVarString(encoder, msg.prop);
2759
+ switch (msg.type) {
2760
+ case 'u-ack':
2761
+ case 'u-reject':
2762
+ writeBigUint64(encoder, BigInt(msg.i));
2763
+ break;
2764
+ default:
2765
+ writeAny(encoder, msg.k);
2766
+ switch (msg.type) {
2767
+ case 'aware':
2768
+ writeVarUint8Array(encoder, msg.u);
2769
+ break;
2770
+ case 'doc-open':
2771
+ writeAny(encoder, msg.serverRev);
2772
+ writeAny(encoder, msg.sv);
2773
+ break;
2774
+ case 'doc-close':
2775
+ break;
2776
+ case 'sv':
2777
+ writeVarUint8Array(encoder, msg.sv);
2778
+ break;
2779
+ case 'u-c':
2780
+ writeVarUint8Array(encoder, msg.u);
2781
+ writeBigUint64(encoder, BigInt(msg.i));
2782
+ break;
2783
+ case 'u-s':
2784
+ writeVarUint8Array(encoder, msg.u);
2785
+ break;
2786
+ }
2787
+ }
2788
+ return toUint8Array(encoder);
2789
+ }
2790
+
2791
+ /**
2792
+ * Error helpers.
2793
+ *
2794
+ * @module error
2795
+ */
2796
+
2797
+ /**
2798
+ * @param {string} s
2799
+ * @return {Error}
2800
+ */
2801
+ /* c8 ignore next */
2802
+ const create = s => new Error(s);
2803
+
2804
+ /**
2805
+ * Efficient schema-less binary decoding with support for variable length encoding.
2806
+ *
2807
+ * Use [lib0/decoding] with [lib0/encoding]. Every encoding function has a corresponding decoding function.
2808
+ *
2809
+ * Encodes numbers in little-endian order (least to most significant byte order)
2810
+ * and is compatible with Golang's binary encoding (https://golang.org/pkg/encoding/binary/)
2811
+ * which is also used in Protocol Buffers.
2812
+ *
2813
+ * ```js
2814
+ * // encoding step
2815
+ * const encoder = encoding.createEncoder()
2816
+ * encoding.writeVarUint(encoder, 256)
2817
+ * encoding.writeVarString(encoder, 'Hello world!')
2818
+ * const buf = encoding.toUint8Array(encoder)
2819
+ * ```
2820
+ *
2821
+ * ```js
2822
+ * // decoding step
2823
+ * const decoder = decoding.createDecoder(buf)
2824
+ * decoding.readVarUint(decoder) // => 256
2825
+ * decoding.readVarString(decoder) // => 'Hello world!'
2826
+ * decoding.hasContent(decoder) // => false - all data is read
2827
+ * ```
2828
+ *
2829
+ * @module decoding
2830
+ */
2831
+
2832
+
2833
+ const errorUnexpectedEndOfArray = create('Unexpected end of array');
2834
+ const errorIntegerOutOfRange = create('Integer out of Range');
2835
+
2836
+ /**
2837
+ * A Decoder handles the decoding of an Uint8Array.
2838
+ */
2839
+ class Decoder {
2840
+ /**
2841
+ * @param {Uint8Array} uint8Array Binary data to decode
2842
+ */
2843
+ constructor (uint8Array) {
2844
+ /**
2845
+ * Decoding target.
2846
+ *
2847
+ * @type {Uint8Array}
2848
+ */
2849
+ this.arr = uint8Array;
2850
+ /**
2851
+ * Current decoding position.
2852
+ *
2853
+ * @type {number}
2854
+ */
2855
+ this.pos = 0;
2856
+ }
2857
+ }
2858
+
2859
+ /**
2860
+ * @function
2861
+ * @param {Decoder} decoder
2862
+ * @return {boolean}
2863
+ */
2864
+ const hasContent = decoder => decoder.pos !== decoder.arr.length;
2865
+
2866
+ /**
2867
+ * Create an Uint8Array view of the next `len` bytes and advance the position by `len`.
2868
+ *
2869
+ * Important: The Uint8Array still points to the underlying ArrayBuffer. Make sure to discard the result as soon as possible to prevent any memory leaks.
2870
+ * Use `buffer.copyUint8Array` to copy the result into a new Uint8Array.
2871
+ *
2872
+ * @function
2873
+ * @param {Decoder} decoder The decoder instance
2874
+ * @param {number} len The length of bytes to read
2875
+ * @return {Uint8Array}
2876
+ */
2877
+ const readUint8Array = (decoder, len) => {
2878
+ const view = new Uint8Array(decoder.arr.buffer, decoder.pos + decoder.arr.byteOffset, len);
2879
+ decoder.pos += len;
2880
+ return view
2881
+ };
2882
+
2883
+ /**
2884
+ * Read variable length Uint8Array.
2885
+ *
2886
+ * Important: The Uint8Array still points to the underlying ArrayBuffer. Make sure to discard the result as soon as possible to prevent any memory leaks.
2887
+ * Use `buffer.copyUint8Array` to copy the result into a new Uint8Array.
2888
+ *
2889
+ * @function
2890
+ * @param {Decoder} decoder
2891
+ * @return {Uint8Array}
2892
+ */
2893
+ const readVarUint8Array = decoder => readUint8Array(decoder, readVarUint(decoder));
2894
+
2895
+ /**
2896
+ * Read one byte as unsigned integer.
2897
+ * @function
2898
+ * @param {Decoder} decoder The decoder instance
2899
+ * @return {number} Unsigned 8-bit integer
2900
+ */
2901
+ const readUint8 = decoder => decoder.arr[decoder.pos++];
2902
+
2903
+ /**
2904
+ * Read unsigned integer (32bit) with variable length.
2905
+ * 1/8th of the storage is used as encoding overhead.
2906
+ * * numbers < 2^7 is stored in one bytlength
2907
+ * * numbers < 2^14 is stored in two bylength
2908
+ *
2909
+ * @function
2910
+ * @param {Decoder} decoder
2911
+ * @return {number} An unsigned integer.length
2912
+ */
2913
+ const readVarUint = decoder => {
2914
+ let num = 0;
2915
+ let mult = 1;
2916
+ const len = decoder.arr.length;
2917
+ while (decoder.pos < len) {
2918
+ const r = decoder.arr[decoder.pos++];
2919
+ // num = num | ((r & binary.BITS7) << len)
2920
+ num = num + (r & BITS7) * mult; // shift $r << (7*#iterations) and add it to num
2921
+ mult *= 128; // next iteration, shift 7 "more" to the left
2922
+ if (r < BIT8) {
2923
+ return num
2924
+ }
2925
+ /* c8 ignore start */
2926
+ if (num > MAX_SAFE_INTEGER) {
2927
+ throw errorIntegerOutOfRange
2928
+ }
2929
+ /* c8 ignore stop */
2930
+ }
2931
+ throw errorUnexpectedEndOfArray
2932
+ };
2933
+
2934
+ /**
2935
+ * Read signed integer (32bit) with variable length.
2936
+ * 1/8th of the storage is used as encoding overhead.
2937
+ * * numbers < 2^7 is stored in one bytlength
2938
+ * * numbers < 2^14 is stored in two bylength
2939
+ * @todo This should probably create the inverse ~num if number is negative - but this would be a breaking change.
2940
+ *
2941
+ * @function
2942
+ * @param {Decoder} decoder
2943
+ * @return {number} An unsigned integer.length
2944
+ */
2945
+ const readVarInt = decoder => {
2946
+ let r = decoder.arr[decoder.pos++];
2947
+ let num = r & BITS6;
2948
+ let mult = 64;
2949
+ const sign = (r & BIT7) > 0 ? -1 : 1;
2950
+ if ((r & BIT8) === 0) {
2951
+ // don't continue reading
2952
+ return sign * num
2953
+ }
2954
+ const len = decoder.arr.length;
2955
+ while (decoder.pos < len) {
2956
+ r = decoder.arr[decoder.pos++];
2957
+ // num = num | ((r & binary.BITS7) << len)
2958
+ num = num + (r & BITS7) * mult;
2959
+ mult *= 128;
2960
+ if (r < BIT8) {
2961
+ return sign * num
2962
+ }
2963
+ /* c8 ignore start */
2964
+ if (num > MAX_SAFE_INTEGER) {
2965
+ throw errorIntegerOutOfRange
2966
+ }
2967
+ /* c8 ignore stop */
2968
+ }
2969
+ throw errorUnexpectedEndOfArray
2970
+ };
2971
+
2972
+ /**
2973
+ * We don't test this function anymore as we use native decoding/encoding by default now.
2974
+ * Better not modify this anymore..
2975
+ *
2976
+ * Transforming utf8 to a string is pretty expensive. The code performs 10x better
2977
+ * when String.fromCodePoint is fed with all characters as arguments.
2978
+ * But most environments have a maximum number of arguments per functions.
2979
+ * For effiency reasons we apply a maximum of 10000 characters at once.
2980
+ *
2981
+ * @function
2982
+ * @param {Decoder} decoder
2983
+ * @return {String} The read String.
2984
+ */
2985
+ /* c8 ignore start */
2986
+ const _readVarStringPolyfill = decoder => {
2987
+ let remainingLen = readVarUint(decoder);
2988
+ if (remainingLen === 0) {
2989
+ return ''
2990
+ } else {
2991
+ let encodedString = String.fromCodePoint(readUint8(decoder)); // remember to decrease remainingLen
2992
+ if (--remainingLen < 100) { // do not create a Uint8Array for small strings
2993
+ while (remainingLen--) {
2994
+ encodedString += String.fromCodePoint(readUint8(decoder));
2995
+ }
2996
+ } else {
2997
+ while (remainingLen > 0) {
2998
+ const nextLen = remainingLen < 10000 ? remainingLen : 10000;
2999
+ // this is dangerous, we create a fresh array view from the existing buffer
3000
+ const bytes = decoder.arr.subarray(decoder.pos, decoder.pos + nextLen);
3001
+ decoder.pos += nextLen;
3002
+ // Starting with ES5.1 we can supply a generic array-like object as arguments
3003
+ encodedString += String.fromCodePoint.apply(null, /** @type {any} */ (bytes));
3004
+ remainingLen -= nextLen;
3005
+ }
3006
+ }
3007
+ return decodeURIComponent(escape(encodedString))
3008
+ }
3009
+ };
3010
+ /* c8 ignore stop */
3011
+
3012
+ /**
3013
+ * @function
3014
+ * @param {Decoder} decoder
3015
+ * @return {String} The read String
3016
+ */
3017
+ const _readVarStringNative = decoder =>
3018
+ /** @type any */ (utf8TextDecoder).decode(readVarUint8Array(decoder));
3019
+
3020
+ /**
3021
+ * Read string of variable length
3022
+ * * varUint is used to store the length of the string
3023
+ *
3024
+ * @function
3025
+ * @param {Decoder} decoder
3026
+ * @return {String} The read String
3027
+ *
3028
+ */
3029
+ /* c8 ignore next */
3030
+ const readVarString = utf8TextDecoder ? _readVarStringNative : _readVarStringPolyfill;
3031
+
3032
+ /**
3033
+ * @param {Decoder} decoder
3034
+ * @param {number} len
3035
+ * @return {DataView}
3036
+ */
3037
+ const readFromDataView = (decoder, len) => {
3038
+ const dv = new DataView(decoder.arr.buffer, decoder.arr.byteOffset + decoder.pos, len);
3039
+ decoder.pos += len;
3040
+ return dv
3041
+ };
3042
+
3043
+ /**
3044
+ * @param {Decoder} decoder
3045
+ */
3046
+ const readFloat32 = decoder => readFromDataView(decoder, 4).getFloat32(0, false);
3047
+
3048
+ /**
3049
+ * @param {Decoder} decoder
3050
+ */
3051
+ const readFloat64 = decoder => readFromDataView(decoder, 8).getFloat64(0, false);
3052
+
3053
+ /**
3054
+ * @param {Decoder} decoder
3055
+ */
3056
+ const readBigInt64 = decoder => /** @type {any} */ (readFromDataView(decoder, 8)).getBigInt64(0, false);
3057
+
3058
+ /**
3059
+ * @param {Decoder} decoder
3060
+ */
3061
+ const readBigUint64 = decoder => /** @type {any} */ (readFromDataView(decoder, 8)).getBigUint64(0, false);
3062
+
3063
+ /**
3064
+ * @type {Array<function(Decoder):any>}
3065
+ */
3066
+ const readAnyLookupTable = [
3067
+ decoder => undefined, // CASE 127: undefined
3068
+ decoder => null, // CASE 126: null
3069
+ readVarInt, // CASE 125: integer
3070
+ readFloat32, // CASE 124: float32
3071
+ readFloat64, // CASE 123: float64
3072
+ readBigInt64, // CASE 122: bigint
3073
+ decoder => false, // CASE 121: boolean (false)
3074
+ decoder => true, // CASE 120: boolean (true)
3075
+ readVarString, // CASE 119: string
3076
+ decoder => { // CASE 118: object<string,any>
3077
+ const len = readVarUint(decoder);
3078
+ /**
3079
+ * @type {Object<string,any>}
3080
+ */
3081
+ const obj = {};
3082
+ for (let i = 0; i < len; i++) {
3083
+ const key = readVarString(decoder);
3084
+ obj[key] = readAny(decoder);
3085
+ }
3086
+ return obj
3087
+ },
3088
+ decoder => { // CASE 117: array<any>
3089
+ const len = readVarUint(decoder);
3090
+ const arr = [];
3091
+ for (let i = 0; i < len; i++) {
3092
+ arr.push(readAny(decoder));
3093
+ }
3094
+ return arr
3095
+ },
3096
+ readVarUint8Array // CASE 116: Uint8Array
3097
+ ];
3098
+
3099
+ /**
3100
+ * @param {Decoder} decoder
3101
+ */
3102
+ const readAny = decoder => readAnyLookupTable[127 - readUint8(decoder)](decoder);
3103
+
3104
+ function decodeYMessage(a) {
3105
+ const decoder = new Decoder(a);
3106
+ const type = readVarString(decoder);
3107
+ const table = readVarString(decoder);
3108
+ const prop = readVarString(decoder);
3109
+ switch (type) {
3110
+ case 'u-ack':
3111
+ case 'u-reject':
3112
+ return {
3113
+ type,
3114
+ table,
3115
+ prop,
3116
+ i: Number(readBigUint64(decoder)),
3117
+ };
3118
+ default: {
3119
+ const k = readAny(decoder);
3120
+ switch (type) {
3121
+ case 'in-sync':
3122
+ return { type, table, prop, k };
3123
+ case 'aware':
3124
+ return {
3125
+ type,
3126
+ table,
3127
+ prop,
3128
+ k,
3129
+ u: readVarUint8Array(decoder),
3130
+ };
3131
+ case 'doc-open':
3132
+ return {
3133
+ type,
3134
+ table,
3135
+ prop,
3136
+ k,
3137
+ serverRev: readAny(decoder),
3138
+ sv: readAny(decoder),
3139
+ };
3140
+ case 'doc-close':
3141
+ return { type, table, prop, k };
3142
+ case 'sv':
3143
+ return {
3144
+ type,
3145
+ table,
3146
+ prop,
3147
+ k,
3148
+ sv: readVarUint8Array(decoder),
3149
+ };
3150
+ case 'u-c':
3151
+ return {
3152
+ type,
3153
+ table,
3154
+ prop,
3155
+ k,
3156
+ u: readVarUint8Array(decoder),
3157
+ i: Number(readBigUint64(decoder)),
3158
+ };
3159
+ case 'u-s':
3160
+ return {
3161
+ type,
3162
+ table,
3163
+ prop,
3164
+ k,
3165
+ u: readVarUint8Array(decoder)
3166
+ };
3167
+ default:
3168
+ throw new TypeError(`Unknown message type: ${type}`);
3169
+ }
3170
+ }
3171
+ }
3172
+ }
3173
+
3174
+ async function asyncIterablePipeline(source, ...stages) {
3175
+ var _a, e_1, _b, _c;
3176
+ // Chain generators by sending outdata from one to another
3177
+ let result = source(); // Start with the source generator
3178
+ for (let i = 0; i < stages.length; i++) {
3179
+ result = stages[i](result); // Pass on the result to next generator
3180
+ }
3181
+ try {
3182
+ // Start running the machine. If the last stage is a sink, it will consume the data and never emit anything
3183
+ // to us here...
3184
+ for (var _d = true, result_1 = __asyncValues(result), result_1_1; result_1_1 = await result_1.next(), _a = result_1_1.done, !_a; _d = true) {
3185
+ _c = result_1_1.value;
3186
+ _d = false;
3187
+ const chunk = _c;
3188
+ }
3189
+ }
3190
+ catch (e_1_1) { e_1 = { error: e_1_1 }; }
3191
+ finally {
3192
+ try {
3193
+ if (!_d && !_a && (_b = result_1.return)) await _b.call(result_1);
3194
+ }
3195
+ finally { if (e_1) throw e_1.error; }
3196
+ }
3197
+ }
3198
+
3199
+ function consumeChunkedBinaryStream(source) {
3200
+ return __asyncGenerator(this, arguments, function* consumeChunkedBinaryStream_1() {
3201
+ var _a, e_1, _b, _c;
3202
+ let state = 0;
3203
+ let sizeBuf = new Uint8Array(4);
3204
+ let sizeBufPos = 0;
3205
+ let bufs = [];
3206
+ let len = 0;
3207
+ try {
3208
+ for (var _d = true, source_1 = __asyncValues(source), source_1_1; source_1_1 = yield __await(source_1.next()), _a = source_1_1.done, !_a; _d = true) {
3209
+ _c = source_1_1.value;
3210
+ _d = false;
3211
+ const chunk = _c;
3212
+ const dw = new DataView(chunk.buffer, chunk.byteOffset, chunk.byteLength);
3213
+ let pos = 0;
3214
+ while (pos < chunk.byteLength) {
3215
+ switch (state) {
3216
+ case 0:
3217
+ // Beginning of a size header
3218
+ if (pos + 4 > chunk.byteLength) {
3219
+ for (const b of chunk.slice(pos)) {
3220
+ if (sizeBufPos === 4)
3221
+ break;
3222
+ sizeBuf[sizeBufPos++] = b;
3223
+ ++pos;
3224
+ }
3225
+ if (sizeBufPos < 4) {
3226
+ // Need more bytes in order to read length.
3227
+ // Will go out from while loop as well because pos is defenitely = chunk.byteLength here.
3228
+ break;
3229
+ }
3230
+ }
3231
+ else if (sizeBufPos > 0 && sizeBufPos < 4) {
3232
+ for (const b of chunk.slice(pos, pos + 4 - sizeBufPos)) {
3233
+ sizeBuf[sizeBufPos++] = b;
3234
+ ++pos;
3235
+ }
3236
+ }
3237
+ // Intentional fall-through...
3238
+ case 1:
3239
+ len =
3240
+ sizeBufPos === 4
3241
+ ? new DataView(sizeBuf.buffer, 0, 4).getUint32(0, false)
3242
+ : dw.getUint32(pos, false);
3243
+ if (sizeBufPos)
3244
+ sizeBufPos = 0; // in this case pos is already forwarded
3245
+ else
3246
+ pos += 4; // else pos is not yet forwarded - that's why we do it now
3247
+ // Intentional fall-through...
3248
+ case 2:
3249
+ // Eat the chunk
3250
+ if (pos >= chunk.byteLength) {
3251
+ state = 2;
3252
+ break;
3253
+ }
3254
+ if (pos + len > chunk.byteLength) {
3255
+ bufs.push(chunk.slice(pos));
3256
+ len -= (chunk.byteLength - pos);
3257
+ state = 2;
3258
+ pos = chunk.byteLength; // will break while loop.
3259
+ }
3260
+ else {
3261
+ if (bufs.length > 0) {
3262
+ const concats = new Uint8Array(bufs.reduce((p, c) => p + c.byteLength, len));
3263
+ let p = 0;
3264
+ for (const buf of bufs) {
3265
+ concats.set(buf, p);
3266
+ p += buf.byteLength;
3267
+ }
3268
+ concats.set(chunk.slice(pos, pos + len), p);
3269
+ bufs = [];
3270
+ yield yield __await(concats);
3271
+ }
3272
+ else {
3273
+ yield yield __await(chunk.slice(pos, pos + len));
3274
+ }
3275
+ pos += len;
3276
+ state = 0;
3277
+ }
3278
+ break;
3279
+ }
3280
+ }
3281
+ }
3282
+ }
3283
+ catch (e_1_1) { e_1 = { error: e_1_1 }; }
3284
+ finally {
3285
+ try {
3286
+ if (!_d && !_a && (_b = source_1.return)) yield __await(_b.call(source_1));
3287
+ }
3288
+ finally { if (e_1) throw e_1.error; }
3289
+ }
3290
+ });
3291
+ }
3292
+
3293
+ function getFetchResponseBodyGenerator(res) {
3294
+ return function () {
3295
+ return __asyncGenerator(this, arguments, function* () {
3296
+ if (!res.body)
3297
+ throw new Error("Response body is not readable");
3298
+ const reader = res.body.getReader();
3299
+ try {
3300
+ while (true) {
3301
+ const { done, value } = yield __await(reader.read());
3302
+ if (done)
3303
+ return yield __await(void 0);
3304
+ yield yield __await(value);
3305
+ }
3306
+ }
3307
+ finally {
3308
+ reader.releaseLock();
3309
+ }
3310
+ });
3311
+ };
3312
+ }
3313
+
2245
3314
  function listSyncifiedChanges(tablesToSyncify, currentUser, schema, alreadySyncedRealms) {
2246
3315
  return __awaiter(this, void 0, void 0, function* () {
2247
3316
  const txid = `upload-${randomString$1(8)}`;
@@ -3308,7 +4377,7 @@ function updateSyncRateLimitDelays(db, res) {
3308
4377
  }
3309
4378
 
3310
4379
  //import {BisonWebStreamReader} from "dreambase-library/dist/typeson-simplified/BisonWebStreamReader";
3311
- function syncWithServer(changes, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser) {
4380
+ function syncWithServer(changes, y, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser) {
3312
4381
  return __awaiter(this, void 0, void 0, function* () {
3313
4382
  //
3314
4383
  // Push changes to server using fetch
@@ -3346,6 +4415,7 @@ function syncWithServer(changes, syncState, baseRevs, db, databaseUrl, schema, c
3346
4415
  : undefined,
3347
4416
  baseRevs,
3348
4417
  changes: encodeIdsForServer(db.dx.core.schema, currentUser, changes),
4418
+ y,
3349
4419
  };
3350
4420
  console.debug('Sync request', syncRequest);
3351
4421
  db.syncStateChangedEvent.next({
@@ -3559,6 +4629,375 @@ function applyServerChanges(changes, db) {
3559
4629
  });
3560
4630
  }
3561
4631
 
4632
+ const DEXIE_CLOUD_SYNCER_ID = 'dexie-cloud-syncer';
4633
+
4634
+ function listUpdatesSince(yTable, sinceIncluding) {
4635
+ return yTable
4636
+ .where('i')
4637
+ .between(sinceIncluding, Infinity, true)
4638
+ .toArray();
4639
+ }
4640
+
4641
+ function $Y(db) {
4642
+ const $Y = db.dx._options.Y;
4643
+ if (!$Y)
4644
+ throw new Error('Y library not supplied to Dexie constructor');
4645
+ return $Y;
4646
+ }
4647
+
4648
+ /** Queries the local database for YMessages to send to server.
4649
+ *
4650
+ * There are 2 messages that this function can provide:
4651
+ * YUpdateFromClientRequest ( for local updates )
4652
+ * YStateVector ( for state vector of foreign updates so that server can reduce the number of udpates to send back )
4653
+ *
4654
+ * Notice that we do not do a step 1 sync phase here to get a state vector from the server. Reason we can avoid
4655
+ * the 2-step sync is that we are client-server and not client-client here and we keep track of the client changes
4656
+ * sent to server by letting server acknowledge them. There is always a chance that some client update has already
4657
+ * been sent and that the client failed to receive the ack. However, if this happens it does not matter - the change
4658
+ * would be sent again and Yjs handles duplicate changes anyway. And it's rare so we earn the cost of roundtrips by
4659
+ * avoiding the step1 sync and instead keep track of this in the `unsentFrom` property of the SyncState.
4660
+ *
4661
+ * @param db
4662
+ * @returns
4663
+ */
4664
+ function listYClientMessagesAndStateVector(db, tablesToSync) {
4665
+ return __awaiter(this, void 0, void 0, function* () {
4666
+ const result = [];
4667
+ const lastUpdateIds = {};
4668
+ for (const table of tablesToSync) {
4669
+ if (table.schema.yProps) {
4670
+ for (const yProp of table.schema.yProps) {
4671
+ const Y = $Y(db); // This is how we retrieve the user-provided Y library
4672
+ const yTable = db.table(yProp.updatesTable); // the updates-table for this combo of table+propName
4673
+ const syncState = (yield yTable.get(DEXIE_CLOUD_SYNCER_ID));
4674
+ // unsentFrom = the `i` value of updates that aren't yet sent to server (or at least not acked by the server yet)
4675
+ const unsentFrom = (syncState === null || syncState === void 0 ? void 0 : syncState.unsentFrom) || 1;
4676
+ // receivedUntil = the `i` value of updates that both we and the server knows we already have (we know it by the outcome from last syncWithServer() because server keep track of its revision numbers
4677
+ const receivedUntil = (syncState === null || syncState === void 0 ? void 0 : syncState.receivedUntil) || 0;
4678
+ // Compute the least value of these two (but since receivedUntil is inclusive we need to add +1 to it)
4679
+ const unsyncedFrom = Math.min(unsentFrom, receivedUntil + 1);
4680
+ // Query all these updates for all docs of this table+prop combination
4681
+ const updates = yield listUpdatesSince(yTable, unsyncedFrom);
4682
+ if (updates.length > 0)
4683
+ lastUpdateIds[yTable.name] = updates[updates.length - 1].i;
4684
+ // Now sort them by document and whether they are local or not + ignore local updates already sent:
4685
+ const perDoc = {};
4686
+ for (const update of updates) {
4687
+ // Sort updates into buckets of the doc primary key + the flag (whether it's local or foreign)
4688
+ const isLocal = ((update.f || 0) & 0x01) === 0x01;
4689
+ if (isLocal && update.i < unsentFrom)
4690
+ continue; // This local update has already been sent and acked.
4691
+ const docKey = JSON.stringify(update.k) + '/' + isLocal;
4692
+ let entry = perDoc[docKey];
4693
+ if (!entry) {
4694
+ perDoc[docKey] = entry = {
4695
+ i: update.i,
4696
+ k: update.k,
4697
+ isLocal,
4698
+ u: [],
4699
+ };
4700
+ entry.u.push(update.u);
4701
+ }
4702
+ else {
4703
+ entry.u.push(update.u);
4704
+ entry.i = Math.max(update.i, entry.i);
4705
+ }
4706
+ }
4707
+ // Now, go through all these and:
4708
+ // * For local updates, compute a merged update per document.
4709
+ // * For foreign updates, compute a state vector to pass to server, so that server can
4710
+ // avoid re-sending updates that we already have (they might have been sent of websocket
4711
+ // and when that happens, we do not mark them in any way nor do we update receivedUntil -
4712
+ // we only update receivedUntil after a "full sync" (syncWithServer()))
4713
+ for (const { k, isLocal, u, i } of Object.values(perDoc)) {
4714
+ const mergedUpdate = u.length === 1 ? u[0] : Y.mergeUpdatesV2(u);
4715
+ if (isLocal) {
4716
+ result.push({
4717
+ type: 'u-c',
4718
+ table: table.name,
4719
+ prop: yProp.prop,
4720
+ k,
4721
+ u: mergedUpdate,
4722
+ i,
4723
+ });
4724
+ }
4725
+ else {
4726
+ const stateVector = Y.encodeStateVectorFromUpdateV2(mergedUpdate);
4727
+ result.push({
4728
+ type: 'sv',
4729
+ table: table.name,
4730
+ prop: yProp.prop,
4731
+ k,
4732
+ sv: stateVector,
4733
+ });
4734
+ }
4735
+ }
4736
+ }
4737
+ }
4738
+ }
4739
+ return {
4740
+ yMessages: result,
4741
+ lastUpdateIds
4742
+ };
4743
+ });
4744
+ }
4745
+
4746
+ function getUpdatesTable(db, table, ydocProp) {
4747
+ var _a, _b, _c;
4748
+ const utbl = (_c = (_b = (_a = db.table(table)) === null || _a === void 0 ? void 0 : _a.schema.yProps) === null || _b === void 0 ? void 0 : _b.find(p => p.prop === ydocProp)) === null || _c === void 0 ? void 0 : _c.updatesTable;
4749
+ if (!utbl)
4750
+ throw new Error(`No updatesTable found for ${table}.${ydocProp}`);
4751
+ return db.table(utbl);
4752
+ }
4753
+
4754
+ function applyYServerMessages(yMessages, db) {
4755
+ var _a;
4756
+ return __awaiter(this, void 0, void 0, function* () {
4757
+ const result = {};
4758
+ for (const m of yMessages) {
4759
+ switch (m.type) {
4760
+ case 'u-s': {
4761
+ const utbl = getUpdatesTable(db, m.table, m.prop);
4762
+ result[utbl.name] = yield utbl.add({
4763
+ k: m.k,
4764
+ u: m.u,
4765
+ });
4766
+ break;
4767
+ }
4768
+ case 'u-ack': {
4769
+ const utbl = getUpdatesTable(db, m.table, m.prop);
4770
+ yield db.transaction('rw', utbl, (tx) => __awaiter(this, void 0, void 0, function* () {
4771
+ let syncer = (yield tx
4772
+ .table(utbl.name)
4773
+ .get(DEXIE_CLOUD_SYNCER_ID));
4774
+ yield tx.table(utbl.name).put(Object.assign(Object.assign({}, (syncer || { i: DEXIE_CLOUD_SYNCER_ID })), { unsentFrom: Math.max((syncer === null || syncer === void 0 ? void 0 : syncer.unsentFrom) || 1, m.i + 1) }));
4775
+ }));
4776
+ break;
4777
+ }
4778
+ case 'u-reject': {
4779
+ // Acces control or constraint rejected the update.
4780
+ // We delete it. It's not going to be sent again.
4781
+ // What's missing is a way to notify consumers, such as Tiptap editor, that the update was rejected.
4782
+ // This is only an issue when the document is open. We could find the open document and
4783
+ // in a perfect world, we should send a reverse update to the open document to undo the change.
4784
+ // See my question in https://discuss.yjs.dev/t/generate-an-inverse-update/2765
4785
+ console.debug(`Y update rejected. Deleting it.`);
4786
+ const utbl = getUpdatesTable(db, m.table, m.prop);
4787
+ // Delete the rejected update and all local updates since (avoid holes in the CRDT)
4788
+ // and destroy it's open document if there is one.
4789
+ const primaryKey = (_a = (yield utbl.get(m.i))) === null || _a === void 0 ? void 0 : _a.k;
4790
+ if (primaryKey != null) {
4791
+ yield db.transaction('rw', utbl, tx => {
4792
+ // @ts-ignore
4793
+ tx.idbtrans._rejecting_y_ypdate = true; // Inform ydoc triggers that we delete because of a rejection and not GC
4794
+ return utbl
4795
+ .where('i')
4796
+ .aboveOrEqual(m.i)
4797
+ .filter(u => cmp(u.k, primaryKey) === 0 && ((u.f || 0) & 1) === 1)
4798
+ .delete();
4799
+ });
4800
+ // Destroy active doc
4801
+ const activeDoc = DexieYProvider.getDocCache(db.dx).find(m.table, primaryKey, m.prop);
4802
+ if (activeDoc)
4803
+ activeDoc.destroy(); // Destroy the document so that editors don't continue to work on it
4804
+ }
4805
+ break;
4806
+ }
4807
+ case 'in-sync': {
4808
+ const doc = DexieYProvider.getDocCache(db.dx).find(m.table, m.k, m.prop);
4809
+ if (doc && !doc.isSynced) {
4810
+ doc.emit('sync', [true]);
4811
+ }
4812
+ break;
4813
+ }
4814
+ }
4815
+ }
4816
+ return result;
4817
+ });
4818
+ }
4819
+
4820
+ function updateYSyncStates(lastUpdateIdsBeforeSync, receivedUntilsAfterSync, db, serverRevision) {
4821
+ var _a, _b, _c, _d, _e;
4822
+ return __awaiter(this, void 0, void 0, function* () {
4823
+ // We want to update unsentFrom for each yTable to the value specified in first argument
4824
+ // because we got those values before we synced with server and here we are back from server
4825
+ // that has successfully received all those messages - no matter if the last update was a client or server update,
4826
+ // we can safely store unsentFrom to a value of the last update + 1 here.
4827
+ // We also want to update receivedUntil for each yTable to the value specified in the second argument,
4828
+ // because that contains the highest resulted id of each update from server after storing it.
4829
+ // We could do these two tasks separately, but that would require two update calls on the same YSyncState, so
4830
+ // to optimize the dexie calls, we merge these two maps into a single one so we can do a single update request
4831
+ // per yTable.
4832
+ const mergedSpec = {};
4833
+ for (const [yTable, lastUpdateId] of Object.entries(lastUpdateIdsBeforeSync)) {
4834
+ (_a = mergedSpec[yTable]) !== null && _a !== void 0 ? _a : (mergedSpec[yTable] = {});
4835
+ mergedSpec[yTable].unsentFrom = lastUpdateId + 1;
4836
+ }
4837
+ for (const [yTable, lastUpdateId] of Object.entries(receivedUntilsAfterSync)) {
4838
+ (_b = mergedSpec[yTable]) !== null && _b !== void 0 ? _b : (mergedSpec[yTable] = {});
4839
+ mergedSpec[yTable].receivedUntil = lastUpdateId;
4840
+ }
4841
+ // Now go through all yTables and update their YSyncStates:
4842
+ const allYTables = Object.values(db.dx._dbSchema)
4843
+ .filter((tblSchema) => tblSchema.yProps)
4844
+ .map((tblSchema) => tblSchema.yProps.map((yProp) => yProp.updatesTable))
4845
+ .flat();
4846
+ for (const yTable of allYTables) {
4847
+ const mergedEntry = mergedSpec[yTable];
4848
+ const unsentFrom = (_c = mergedEntry === null || mergedEntry === void 0 ? void 0 : mergedEntry.unsentFrom) !== null && _c !== void 0 ? _c : 1;
4849
+ const receivedUntil = (_e = (_d = mergedEntry === null || mergedEntry === void 0 ? void 0 : mergedEntry.receivedUntil) !== null && _d !== void 0 ? _d :
4850
+ // from local because we are in the same parent transaction (in sync.ts) that
4851
+ // applied all updates from the server
4852
+ (yield db
4853
+ .table(yTable)
4854
+ .where('i')
4855
+ .between(1, Infinity) // Because i might be string DEXIE_CLOUD_SYNCER_ID if not a number.
4856
+ .reverse()
4857
+ .limit(1)
4858
+ .primaryKeys())[0]) !== null && _e !== void 0 ? _e : 0;
4859
+ // We're already in a transaction, but for the sake of
4860
+ // code readability and correctness, let's launch an atomic sub transaction:
4861
+ yield db.transaction('rw', yTable, () => __awaiter(this, void 0, void 0, function* () {
4862
+ const state = yield db
4863
+ .table(yTable)
4864
+ .get(DEXIE_CLOUD_SYNCER_ID);
4865
+ if (!state) {
4866
+ yield db.table(yTable).add({
4867
+ i: DEXIE_CLOUD_SYNCER_ID,
4868
+ unsentFrom,
4869
+ receivedUntil,
4870
+ serverRev: serverRevision,
4871
+ });
4872
+ }
4873
+ else {
4874
+ state.unsentFrom = Math.max(unsentFrom, state.unsentFrom || 1);
4875
+ state.receivedUntil = Math.max(receivedUntil, state.receivedUntil || 0);
4876
+ state.serverRev = serverRevision;
4877
+ yield db.table(yTable).put(state);
4878
+ }
4879
+ }));
4880
+ }
4881
+ });
4882
+ }
4883
+
4884
+ const BINSTREAM_TYPE_REALMID = 1;
4885
+ const BINSTREAM_TYPE_TABLE_AND_PROP = 2;
4886
+ const BINSTREAM_TYPE_DOCUMENT = 3;
4887
+ function downloadYDocsFromServer(db, databaseUrl, { yDownloadedRealms, realms }) {
4888
+ return __awaiter(this, void 0, void 0, function* () {
4889
+ if (yDownloadedRealms &&
4890
+ realms &&
4891
+ realms.every((realmId) => yDownloadedRealms[realmId] === '*')) {
4892
+ return; // Already done!
4893
+ }
4894
+ console.debug('Downloading Y.Docs from added realms');
4895
+ const user = yield loadAccessToken(db);
4896
+ const headers = {
4897
+ 'Content-Type': 'application/json',
4898
+ Accept: 'application/octet-stream',
4899
+ };
4900
+ if (user) {
4901
+ headers.Authorization = `Bearer ${user.accessToken}`;
4902
+ }
4903
+ const res = yield fetch(`${databaseUrl}/y/download`, {
4904
+ body: TSON.stringify({ downloadedRealms: yDownloadedRealms || {} }),
4905
+ method: 'POST',
4906
+ headers,
4907
+ credentials: 'include',
4908
+ });
4909
+ if (!res.ok) {
4910
+ throw new Error(`Failed to download Yjs documents from server. Status: ${res.status}`);
4911
+ }
4912
+ yield asyncIterablePipeline(getFetchResponseBodyGenerator(res), consumeChunkedBinaryStream, consumeDownloadChunks);
4913
+ function consumeDownloadChunks(chunks) {
4914
+ return __asyncGenerator(this, arguments, function* consumeDownloadChunks_1() {
4915
+ var _a, e_1, _b, _c;
4916
+ let currentRealmId = null;
4917
+ let currentTable = null;
4918
+ let currentProp = null;
4919
+ let docsToInsert = [];
4920
+ function storeCollectedDocs(completedRealm) {
4921
+ return __awaiter(this, void 0, void 0, function* () {
4922
+ const lastDoc = docsToInsert[docsToInsert.length - 1];
4923
+ if (docsToInsert.length > 0) {
4924
+ if (!currentRealmId || !currentTable || !currentProp) {
4925
+ throw new Error(`Protocol error from ${databaseUrl}/y/download`);
4926
+ }
4927
+ const yTable = getUpdatesTable(db, currentTable, currentProp);
4928
+ yield yTable.bulkAdd(docsToInsert);
4929
+ docsToInsert = [];
4930
+ }
4931
+ if (currentRealmId &&
4932
+ ((currentTable && currentProp && lastDoc) || completedRealm)) {
4933
+ yield db.$syncState.update('syncState', (syncState) => {
4934
+ const yDownloadedRealms = syncState.yDownloadedRealms || {};
4935
+ yDownloadedRealms[currentRealmId] = completedRealm
4936
+ ? '*'
4937
+ : {
4938
+ tbl: currentTable,
4939
+ prop: currentProp,
4940
+ key: lastDoc.k,
4941
+ };
4942
+ syncState.yDownloadedRealms = yDownloadedRealms;
4943
+ });
4944
+ }
4945
+ });
4946
+ }
4947
+ try {
4948
+ try {
4949
+ for (var _d = true, chunks_1 = __asyncValues(chunks), chunks_1_1; chunks_1_1 = yield __await(chunks_1.next()), _a = chunks_1_1.done, !_a; _d = true) {
4950
+ _c = chunks_1_1.value;
4951
+ _d = false;
4952
+ const chunk = _c;
4953
+ const decoder = new Decoder(chunk);
4954
+ while (hasContent(decoder)) {
4955
+ switch (readUint8(decoder)) {
4956
+ case BINSTREAM_TYPE_REALMID:
4957
+ yield __await(storeCollectedDocs(true));
4958
+ currentRealmId = readVarString(decoder);
4959
+ break;
4960
+ case BINSTREAM_TYPE_TABLE_AND_PROP:
4961
+ yield __await(storeCollectedDocs(false)); // still on same realm
4962
+ currentTable = readVarString(decoder);
4963
+ currentProp = readVarString(decoder);
4964
+ break;
4965
+ case BINSTREAM_TYPE_DOCUMENT: {
4966
+ const k = readAny(decoder);
4967
+ const u = readVarUint8Array(decoder);
4968
+ docsToInsert.push({
4969
+ k,
4970
+ u,
4971
+ });
4972
+ break;
4973
+ }
4974
+ }
4975
+ }
4976
+ yield __await(storeCollectedDocs(false)); // Chunk full - migth still be on same realm
4977
+ }
4978
+ }
4979
+ catch (e_1_1) { e_1 = { error: e_1_1 }; }
4980
+ finally {
4981
+ try {
4982
+ if (!_d && !_a && (_b = chunks_1.return)) yield __await(_b.call(chunks_1));
4983
+ }
4984
+ finally { if (e_1) throw e_1.error; }
4985
+ }
4986
+ yield __await(storeCollectedDocs(true)); // Everything downloaded - finalize last downloaded realm to "*"
4987
+ }
4988
+ catch (error) {
4989
+ if (!(error instanceof Dexie.DexieError)) {
4990
+ // Network error might have happened.
4991
+ // Store what we've collected so far:
4992
+ yield __await(storeCollectedDocs(false));
4993
+ }
4994
+ throw error;
4995
+ }
4996
+ });
4997
+ }
4998
+ });
4999
+ }
5000
+
3562
5001
  const CURRENT_SYNC_WORKER = 'currentSyncWorker';
3563
5002
  function sync(db, options, schema, syncOptions) {
3564
5003
  return _sync
@@ -3647,10 +5086,11 @@ function _sync(db, options, schema, { isInitialSync, cancelToken, justCheckIfNee
3647
5086
  //
3648
5087
  // List changes to sync
3649
5088
  //
3650
- const [clientChangeSet, syncState, baseRevs] = yield db.transaction('r', db.tables, () => __awaiter(this, void 0, void 0, function* () {
5089
+ const [clientChangeSet, syncState, baseRevs, { yMessages, lastUpdateIds }] = yield db.transaction('r', db.tables, () => __awaiter(this, void 0, void 0, function* () {
3651
5090
  const syncState = yield db.getPersistedSyncState();
3652
5091
  const baseRevs = yield db.$baseRevs.toArray();
3653
5092
  let clientChanges = yield listClientChanges(mutationTables);
5093
+ const yResults = yield listYClientMessagesAndStateVector(db, tablesToSync);
3654
5094
  throwIfCancelled(cancelToken);
3655
5095
  if (doSyncify) {
3656
5096
  const alreadySyncedRealms = [
@@ -3660,11 +5100,11 @@ function _sync(db, options, schema, { isInitialSync, cancelToken, justCheckIfNee
3660
5100
  const syncificationInserts = yield listSyncifiedChanges(tablesToSyncify, currentUser, schema, alreadySyncedRealms);
3661
5101
  throwIfCancelled(cancelToken);
3662
5102
  clientChanges = clientChanges.concat(syncificationInserts);
3663
- return [clientChanges, syncState, baseRevs];
5103
+ return [clientChanges, syncState, baseRevs, yResults];
3664
5104
  }
3665
- return [clientChanges, syncState, baseRevs];
5105
+ return [clientChanges, syncState, baseRevs, yResults];
3666
5106
  }));
3667
- const pushSyncIsNeeded = clientChangeSet.some((set) => set.muts.some((mut) => mut.keys.length > 0));
5107
+ const pushSyncIsNeeded = clientChangeSet.some((set) => set.muts.some((mut) => mut.keys.length > 0)) || yMessages.some(m => m.type === 'u-c');
3668
5108
  if (justCheckIfNeeded) {
3669
5109
  console.debug('Sync is needed:', pushSyncIsNeeded);
3670
5110
  return pushSyncIsNeeded;
@@ -3679,12 +5119,12 @@ function _sync(db, options, schema, { isInitialSync, cancelToken, justCheckIfNee
3679
5119
  // Push changes to server
3680
5120
  //
3681
5121
  throwIfCancelled(cancelToken);
3682
- const res = yield syncWithServer(clientChangeSet, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser);
5122
+ const res = yield syncWithServer(clientChangeSet, yMessages, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser);
3683
5123
  console.debug('Sync response', res);
3684
5124
  //
3685
5125
  // Apply changes locally and clear old change entries:
3686
5126
  //
3687
- const done = yield db.transaction('rw', db.tables, (tx) => __awaiter(this, void 0, void 0, function* () {
5127
+ const { done, newSyncState } = yield db.transaction('rw', db.tables, (tx) => __awaiter(this, void 0, void 0, function* () {
3688
5128
  // @ts-ignore
3689
5129
  tx.idbtrans.disableChangeTracking = true;
3690
5130
  // @ts-ignore
@@ -3776,17 +5216,35 @@ function _sync(db, options, schema, { isInitialSync, cancelToken, justCheckIfNee
3776
5216
  // apply server changes
3777
5217
  //
3778
5218
  yield applyServerChanges(filteredChanges, db);
5219
+ if (res.yMessages) {
5220
+ //
5221
+ // apply yMessages
5222
+ //
5223
+ const receivedUntils = yield applyYServerMessages(res.yMessages, db);
5224
+ //
5225
+ // update Y SyncStates
5226
+ //
5227
+ yield updateYSyncStates(lastUpdateIds, receivedUntils, db, res.serverRevision);
5228
+ }
3779
5229
  //
3780
- // Update syncState
5230
+ // Update regular syncState
3781
5231
  //
3782
5232
  db.$syncState.put(newSyncState, 'syncState');
3783
- return addedClientChanges.length === 0;
5233
+ return {
5234
+ done: addedClientChanges.length === 0,
5235
+ newSyncState
5236
+ };
3784
5237
  }));
3785
5238
  if (!done) {
3786
5239
  console.debug('MORE SYNC NEEDED. Go for it again!');
3787
5240
  yield checkSyncRateLimitDelay(db);
3788
5241
  return yield _sync(db, options, schema, { isInitialSync, cancelToken });
3789
5242
  }
5243
+ const usingYProps = Object.values(schema).some(tbl => { var _a; return (_a = tbl.yProps) === null || _a === void 0 ? void 0 : _a.length; });
5244
+ const serverSupportsYprops = !!res.yMessages;
5245
+ if (usingYProps && serverSupportsYprops) {
5246
+ yield downloadYDocsFromServer(db, databaseUrl, newSyncState);
5247
+ }
3790
5248
  console.debug('SYNC DONE', { isInitialSync });
3791
5249
  db.syncCompleteEvent.next();
3792
5250
  return false; // Not needed anymore
@@ -3839,6 +5297,18 @@ function deleteObjectsFromRemovedRealms(db, res, prevState) {
3839
5297
  }
3840
5298
  }
3841
5299
  }
5300
+ if (rejectedRealms.size > 0) {
5301
+ // Remove rejected/deleted realms from yDownloadedRealms because of the following use case:
5302
+ // 1. User becomes added to the realm
5303
+ // 2. User syncs and all documents of the realm is downloaded (downloadYDocsFromServer.ts)
5304
+ // 3. User leaves the realm and all docs are deleted locally (built-in-trigger of deleting their rows in this file)
5305
+ // 4. User is yet again added to the realm. At this point, we must make sure the docs are not considered already downloaded.
5306
+ const updateSpec = {};
5307
+ for (const realmId of rejectedRealms) {
5308
+ updateSpec[`yDownloadedRealms.${realmId}`] = undefined; // Setting to undefined will delete the property
5309
+ }
5310
+ yield db.$syncState.update('syncState', updateSpec);
5311
+ }
3842
5312
  });
3843
5313
  }
3844
5314
  function filterServerChangesThroughAddedClientChanges(serverChanges, addedClientChanges) {
@@ -4148,6 +5618,7 @@ function DexieCloudDB(dx) {
4148
5618
  };
4149
5619
  Object.assign(db, helperMethods);
4150
5620
  db.messageConsumer = MessagesFromServerConsumer(db);
5621
+ db.messageProducer = new Subject();
4151
5622
  wm$1.set(dx.cloud, db);
4152
5623
  }
4153
5624
  return db;
@@ -4483,24 +5954,6 @@ const safariVersion = isSafari
4483
5954
  const DISABLE_SERVICEWORKER_STRATEGY = (isSafari && safariVersion <= 605) || // Disable for Safari for now.
4484
5955
  isFirefox; // Disable for Firefox for now. Seems to have a bug in reading CryptoKeys from IDB from service workers
4485
5956
 
4486
- /* Helper function to subscribe to database close no matter if it was unexpectedly closed or manually using db.close()
4487
- */
4488
- function dbOnClosed(db, handler) {
4489
- db.on.close.subscribe(handler);
4490
- // @ts-ignore
4491
- const origClose = db._close;
4492
- // @ts-ignore
4493
- db._close = function () {
4494
- origClose.call(this);
4495
- handler();
4496
- };
4497
- return () => {
4498
- db.on.close.unsubscribe(handler);
4499
- // @ts-ignore
4500
- db._close = origClose;
4501
- };
4502
- }
4503
-
4504
5957
  const IS_SERVICE_WORKER = typeof self !== "undefined" && "clients" in self && !self.document;
4505
5958
 
4506
5959
  function throwVersionIncrementNeeded() {
@@ -4966,13 +6419,18 @@ function createMutationTrackingMiddleware({ currentUserObservable, db, }) {
4966
6419
  values = values.filter((_, idx) => !failures[idx]);
4967
6420
  }
4968
6421
  const ts = Date.now();
6422
+ // Canonicalize req.criteria.index to null if it's on the primary key.
6423
+ const criteria = 'criteria' in req && req.criteria
6424
+ ? Object.assign(Object.assign({}, req.criteria), { index: req.criteria.index === schema.primaryKey.keyPath // Use null to inform server that criteria is on primary key
6425
+ ? null // This will disable the server from trying to log consistent operations where it shouldnt.
6426
+ : req.criteria.index }) : undefined;
4969
6427
  const mut = req.type === 'delete'
4970
6428
  ? {
4971
6429
  type: 'delete',
4972
6430
  ts,
4973
6431
  opNo,
4974
6432
  keys,
4975
- criteria: req.criteria,
6433
+ criteria,
4976
6434
  txid,
4977
6435
  userId,
4978
6436
  }
@@ -4986,14 +6444,14 @@ function createMutationTrackingMiddleware({ currentUserObservable, db, }) {
4986
6444
  userId,
4987
6445
  values,
4988
6446
  }
4989
- : req.criteria && req.changeSpec
6447
+ : criteria && req.changeSpec
4990
6448
  ? {
4991
6449
  // Common changeSpec for all keys
4992
6450
  type: 'modify',
4993
6451
  ts,
4994
6452
  opNo,
4995
6453
  keys,
4996
- criteria: req.criteria,
6454
+ criteria,
4997
6455
  changeSpec: req.changeSpec,
4998
6456
  txid,
4999
6457
  userId,
@@ -5021,7 +6479,7 @@ function createMutationTrackingMiddleware({ currentUserObservable, db, }) {
5021
6479
  if ('isAdditionalChunk' in req && req.isAdditionalChunk) {
5022
6480
  mut.isAdditionalChunk = true;
5023
6481
  }
5024
- return keys.length > 0 || ('criteria' in req && req.criteria)
6482
+ return keys.length > 0 || criteria
5025
6483
  ? mutsTable
5026
6484
  .mutate({ type: 'add', trans, values: [mut] }) // Log entry
5027
6485
  .then(() => res) // Return original response
@@ -5035,6 +6493,7 @@ function createMutationTrackingMiddleware({ currentUserObservable, db, }) {
5035
6493
 
5036
6494
  function overrideParseStoresSpec(origFunc, dexie) {
5037
6495
  return function (stores, dbSchema) {
6496
+ var _a;
5038
6497
  const storesClone = Object.assign(Object.assign({}, DEXIE_CLOUD_SCHEMA), stores);
5039
6498
  // Merge indexes of DEXIE_CLOUD_SCHEMA with stores
5040
6499
  Object.keys(DEXIE_CLOUD_SCHEMA).forEach((tableName) => {
@@ -5095,6 +6554,14 @@ function overrideParseStoresSpec(origFunc, dexie) {
5095
6554
  }
5096
6555
  });
5097
6556
  const rv = origFunc.call(this, storesClone, dbSchema);
6557
+ for (const [tableName, spec] of Object.entries(dbSchema)) {
6558
+ if ((_a = spec.yProps) === null || _a === void 0 ? void 0 : _a.length) {
6559
+ const cloudTableSchema = cloudSchema[tableName];
6560
+ if (cloudTableSchema) {
6561
+ cloudTableSchema.yProps = spec.yProps.map((yProp) => yProp.prop);
6562
+ }
6563
+ }
6564
+ }
5098
6565
  return rv;
5099
6566
  };
5100
6567
  }
@@ -5180,31 +6647,90 @@ class TokenExpiredError extends Error {
5180
6647
  }
5181
6648
  }
5182
6649
 
6650
+ function createYClientUpdateObservable(db) {
6651
+ const yTableRecords = flatten(db.tables
6652
+ .filter((table) => { var _a, _b; return ((_b = (_a = db.cloud.schema) === null || _a === void 0 ? void 0 : _a[table.name]) === null || _b === void 0 ? void 0 : _b.markedForSync) && table.schema.yProps; })
6653
+ .map((table) => table.schema.yProps.map((p) => ({
6654
+ table: table.name,
6655
+ ydocProp: p.prop,
6656
+ updatesTable: p.updatesTable,
6657
+ }))));
6658
+ return merge(...yTableRecords.map(({ table, ydocProp, updatesTable }) => {
6659
+ // Per updates table (table+prop combo), we first read syncer.unsentFrom,
6660
+ // and then start listening for updates since that number.
6661
+ const yTbl = db.table(updatesTable);
6662
+ return from$1(yTbl.get(DEXIE_CLOUD_SYNCER_ID)).pipe(switchMap$1((syncer) => {
6663
+ let currentUnsentFrom = (syncer === null || syncer === void 0 ? void 0 : syncer.unsentFrom) || 1;
6664
+ return from$1(liveQuery(() => __awaiter(this, void 0, void 0, function* () {
6665
+ const addedUpdates = yield listUpdatesSince(yTbl, currentUnsentFrom);
6666
+ return addedUpdates
6667
+ .filter((update) => update.f && update.f & 1) // Only include local updates
6668
+ .map((update) => {
6669
+ return {
6670
+ type: 'u-c',
6671
+ table,
6672
+ prop: ydocProp,
6673
+ k: update.k,
6674
+ u: update.u,
6675
+ i: update.i,
6676
+ };
6677
+ });
6678
+ }))).pipe(tap$1((addedUpdates) => {
6679
+ // Update currentUnsentFrom to only listen for updates that will be newer than the ones we emitted.
6680
+ // (Before, we did this within the liveQuery, but that caused a bug because
6681
+ // a cancelled emittion of a liveQuery would update the currentUnsentFrom without
6682
+ // emitting anything, leading to that we jumped over some updates. Here we update it
6683
+ // after the liveQuery has emitted its updates)
6684
+ if (addedUpdates.length > 0) {
6685
+ currentUnsentFrom = addedUpdates.at(-1).i + 1;
6686
+ }
6687
+ }));
6688
+ }));
6689
+ })).pipe(
6690
+ // Flatten the array of messages.
6691
+ // If messageProducer emits empty array, nothing is emitted
6692
+ // but if messageProducer emits array of messages, they are
6693
+ // emitted one by one.
6694
+ mergeMap$1((messages) => messages), tap$1((message) => {
6695
+ console.debug('dexie-cloud emitting y-c', message);
6696
+ }));
6697
+ }
6698
+
6699
+ function getAwarenessLibrary(db) {
6700
+ var _a, _b;
6701
+ if (!((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.awarenessProtocol)) {
6702
+ throw new Dexie.MissingAPIError('awarenessProtocol was not provided to db.cloud.configure(). Please import * as awarenessProtocol from "y-protocols/awareness".');
6703
+ }
6704
+ return (_b = db.cloud.options) === null || _b === void 0 ? void 0 : _b.awarenessProtocol;
6705
+ }
6706
+ const awarenessWeakMap = new WeakMap();
6707
+ const getDocAwareness = (doc) => awarenessWeakMap.get(doc);
6708
+
5183
6709
  const SERVER_PING_TIMEOUT = 20000;
5184
6710
  const CLIENT_PING_INTERVAL = 30000;
5185
6711
  const FAIL_RETRY_WAIT_TIME = 60000;
5186
6712
  class WSObservable extends Observable$1 {
5187
- constructor(databaseUrl, rev, realmSetHash, clientIdentity, messageProducer, webSocketStatus, token, tokenExpiration) {
5188
- super((subscriber) => new WSConnection(databaseUrl, rev, realmSetHash, clientIdentity, token, tokenExpiration, subscriber, messageProducer, webSocketStatus));
6713
+ constructor(db, rev, realmSetHash, clientIdentity, messageProducer, webSocketStatus, user) {
6714
+ super((subscriber) => new WSConnection(db, rev, realmSetHash, clientIdentity, user, subscriber, messageProducer, webSocketStatus));
5189
6715
  }
5190
6716
  }
5191
6717
  let counter = 0;
5192
6718
  class WSConnection extends Subscription$1 {
5193
- constructor(databaseUrl, rev, realmSetHash, clientIdentity, token, tokenExpiration, subscriber, messageProducer, webSocketStatus) {
6719
+ constructor(db, rev, realmSetHash, clientIdentity, user, subscriber, messageProducer, webSocketStatus) {
5194
6720
  super(() => this.teardown());
5195
6721
  this.id = ++counter;
6722
+ this.subscriptions = new Set();
5196
6723
  this.reconnecting = false;
5197
- console.debug('New WebSocket Connection', this.id, token ? 'authorized' : 'unauthorized');
5198
- this.databaseUrl = databaseUrl;
6724
+ console.debug('New WebSocket Connection', this.id, user.accessToken ? 'authorized' : 'unauthorized');
6725
+ this.db = db;
6726
+ this.databaseUrl = db.cloud.options.databaseUrl;
5199
6727
  this.rev = rev;
5200
6728
  this.realmSetHash = realmSetHash;
5201
6729
  this.clientIdentity = clientIdentity;
5202
- this.token = token;
5203
- this.tokenExpiration = tokenExpiration;
6730
+ this.user = user;
5204
6731
  this.subscriber = subscriber;
5205
6732
  this.lastUserActivity = new Date();
5206
6733
  this.messageProducer = messageProducer;
5207
- this.messageProducerSubscription = null;
5208
6734
  this.webSocketStatus = webSocketStatus;
5209
6735
  this.connect();
5210
6736
  }
@@ -5225,10 +6751,10 @@ class WSConnection extends Subscription$1 {
5225
6751
  catch (_a) { }
5226
6752
  }
5227
6753
  this.ws = null;
5228
- if (this.messageProducerSubscription) {
5229
- this.messageProducerSubscription.unsubscribe();
5230
- this.messageProducerSubscription = null;
6754
+ for (const sub of this.subscriptions) {
6755
+ sub.unsubscribe();
5231
6756
  }
6757
+ this.subscriptions.clear();
5232
6758
  }
5233
6759
  reconnect() {
5234
6760
  if (this.reconnecting)
@@ -5261,7 +6787,8 @@ class WSConnection extends Subscription$1 {
5261
6787
  //console.debug('SyncStatus: DUBB: Ooops it was closed!');
5262
6788
  return;
5263
6789
  }
5264
- if (this.tokenExpiration && this.tokenExpiration < new Date()) {
6790
+ const tokenExpiration = this.user.accessTokenExpiration;
6791
+ if (tokenExpiration && tokenExpiration < new Date()) {
5265
6792
  this.subscriber.error(new TokenExpiredError()); // Will be handled in connectWebSocket.ts.
5266
6793
  return;
5267
6794
  }
@@ -5316,13 +6843,13 @@ class WSConnection extends Subscription$1 {
5316
6843
  searchParams.set('rev', this.rev);
5317
6844
  searchParams.set('realmsHash', this.realmSetHash);
5318
6845
  searchParams.set('clientId', this.clientIdentity);
5319
- if (this.token) {
5320
- searchParams.set('token', this.token);
6846
+ if (this.user.accessToken) {
6847
+ searchParams.set('token', this.user.accessToken);
5321
6848
  }
5322
6849
  // Connect the WebSocket to given url:
5323
6850
  console.debug('dexie-cloud WebSocket create');
5324
6851
  const ws = (this.ws = new WebSocket(`${wsUrl}/changes?${searchParams}`));
5325
- //ws.binaryType = "arraybuffer"; // For future when subscribing to actual changes.
6852
+ ws.binaryType = "arraybuffer";
5326
6853
  ws.onclose = (event) => {
5327
6854
  if (!this.pinger)
5328
6855
  return;
@@ -5332,17 +6859,33 @@ class WSConnection extends Subscription$1 {
5332
6859
  ws.onmessage = (event) => {
5333
6860
  if (!this.pinger)
5334
6861
  return;
5335
- console.debug('dexie-cloud WebSocket onmessage', event.data);
5336
6862
  this.lastServerActivity = new Date();
5337
6863
  try {
5338
- const msg = TSON.parse(event.data);
6864
+ const msg = typeof event.data === 'string'
6865
+ ? TSON.parse(event.data)
6866
+ : decodeYMessage(new Uint8Array(event.data));
6867
+ console.debug('dexie-cloud WebSocket onmessage', msg.type, msg);
5339
6868
  if (msg.type === 'error') {
5340
6869
  throw new Error(`Error message from dexie-cloud: ${msg.error}`);
5341
6870
  }
5342
- if (msg.type === 'rev') {
6871
+ else if (msg.type === 'rev') {
5343
6872
  this.rev = msg.rev; // No meaning but seems reasonable.
5344
6873
  }
5345
- if (msg.type !== 'pong') {
6874
+ else if (msg.type === 'aware') {
6875
+ const docCache = DexieYProvider.getDocCache(this.db.dx);
6876
+ const doc = docCache.find(msg.table, msg.k, msg.prop);
6877
+ if (doc) {
6878
+ const awareness = getDocAwareness(doc);
6879
+ if (awareness) {
6880
+ const awap = getAwarenessLibrary(this.db);
6881
+ awap.applyAwarenessUpdate(awareness, msg.u, 'server');
6882
+ }
6883
+ }
6884
+ }
6885
+ else if (msg.type === 'u-ack' || msg.type === 'u-reject' || msg.type === 'u-s' || msg.type === 'in-sync') {
6886
+ applyYServerMessages([msg], this.db);
6887
+ }
6888
+ else if (msg.type !== 'pong') {
5346
6889
  this.subscriber.next(msg);
5347
6890
  }
5348
6891
  }
@@ -5370,16 +6913,27 @@ class WSConnection extends Subscription$1 {
5370
6913
  }
5371
6914
  };
5372
6915
  });
5373
- this.messageProducerSubscription = this.messageProducer.subscribe((msg) => {
5374
- var _a;
6916
+ this.subscriptions.add(this.messageProducer.subscribe((msg) => {
6917
+ var _a, _b;
5375
6918
  if (!this.closed) {
5376
6919
  if (msg.type === 'ready' &&
5377
6920
  this.webSocketStatus.value !== 'connected') {
5378
6921
  this.webSocketStatus.next('connected');
5379
6922
  }
5380
- (_a = this.ws) === null || _a === void 0 ? void 0 : _a.send(TSON.stringify(msg));
6923
+ console.debug('dexie-cloud WebSocket send', msg.type, msg);
6924
+ if (msg.type === 'ready') {
6925
+ (_a = this.ws) === null || _a === void 0 ? void 0 : _a.send(TSON.stringify(msg));
6926
+ }
6927
+ else {
6928
+ // If it's not a "ready" message, it's an YMessage.
6929
+ // YMessages can be sent binary encoded.
6930
+ (_b = this.ws) === null || _b === void 0 ? void 0 : _b.send(encodeYMessage(msg));
6931
+ }
5381
6932
  }
5382
- });
6933
+ }));
6934
+ if (this.user.isLoggedIn && !isEagerSyncDisabled(this.db)) {
6935
+ this.subscriptions.add(createYClientUpdateObservable(this.db).subscribe(this.db.messageProducer));
6936
+ }
5383
6937
  }
5384
6938
  catch (error) {
5385
6939
  this.pauseUntil = new Date(Date.now() + FAIL_RETRY_WAIT_TIME);
@@ -5421,7 +6975,7 @@ function connectWebSocket(db) {
5421
6975
  if (!((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl)) {
5422
6976
  throw new Error(`No database URL to connect WebSocket to`);
5423
6977
  }
5424
- const messageProducer = db.messageConsumer.readyToServe.pipe(filter((isReady) => isReady), // When consumer is ready for new messages, produce such a message to inform server about it
6978
+ const readyForChangesMessage = db.messageConsumer.readyToServe.pipe(filter((isReady) => isReady), // When consumer is ready for new messages, produce such a message to inform server about it
5425
6979
  switchMap(() => db.getPersistedSyncState()), // We need the info on which server revision we are at:
5426
6980
  filter((syncState) => syncState && syncState.serverRevision), // We wont send anything to server before inital sync has taken place
5427
6981
  switchMap((syncState) => __awaiter(this, void 0, void 0, function* () {
@@ -5432,6 +6986,7 @@ function connectWebSocket(db) {
5432
6986
  realmSetHash: yield computeRealmSetHash(syncState)
5433
6987
  });
5434
6988
  })));
6989
+ const messageProducer = merge(readyForChangesMessage, db.messageProducer);
5435
6990
  function createObservable() {
5436
6991
  return db.cloud.persistedSyncState.pipe(filter((syncState) => syncState === null || syncState === void 0 ? void 0 : syncState.serverRevision), // Don't connect before there's no initial sync performed.
5437
6992
  take(1), // Don't continue waking up whenever syncState change
@@ -5458,7 +7013,7 @@ function connectWebSocket(db) {
5458
7013
  // If no new entries, server won't bother the client. If new entries, server sends only those
5459
7014
  // and the baseRev of the last from same client-ID.
5460
7015
  if (userLogin) {
5461
- return new WSObservable(db.cloud.options.databaseUrl, db.cloud.persistedSyncState.value.serverRevision, realmSetHash, db.cloud.persistedSyncState.value.clientIdentity, messageProducer, db.cloud.webSocketStatus, userLogin.accessToken, userLogin.accessTokenExpiration);
7016
+ return new WSObservable(db, db.cloud.persistedSyncState.value.serverRevision, realmSetHash, db.cloud.persistedSyncState.value.clientIdentity, messageProducer, db.cloud.webSocketStatus, userLogin);
5462
7017
  }
5463
7018
  else {
5464
7019
  return from$1([]);
@@ -6264,6 +7819,142 @@ const getInvitesObservable = associate((db) => {
6264
7819
  })), []);
6265
7820
  });
6266
7821
 
7822
+ function createYHandler(db) {
7823
+ return (provider) => {
7824
+ var _a;
7825
+ const doc = provider.doc;
7826
+ const { parentTable } = doc.meta || {};
7827
+ if (!((_a = db.cloud.schema) === null || _a === void 0 ? void 0 : _a[parentTable].markedForSync)) {
7828
+ return; // The table that holds the doc is not marked for sync - leave it to dexie. No syncing, no awareness.
7829
+ }
7830
+ let awareness;
7831
+ Object.defineProperty(provider, "awareness", {
7832
+ get() {
7833
+ if (awareness)
7834
+ return awareness;
7835
+ awarenessWeakMap.set(doc, awareness);
7836
+ awareness = createAwareness(db, doc, provider);
7837
+ return awareness;
7838
+ }
7839
+ });
7840
+ };
7841
+ }
7842
+ function createAwareness(db, doc, provider) {
7843
+ const { parentTable, parentId, parentProp, updatesTable } = doc.meta;
7844
+ const awap = getAwarenessLibrary(db);
7845
+ const awareness = new awap.Awareness(doc);
7846
+ awareness.on('update', ({ added, updated, removed }, origin) => {
7847
+ // Send the update
7848
+ const changedClients = added.concat(updated).concat(removed);
7849
+ const user = db.cloud.currentUser.value;
7850
+ if (origin !== 'server' && user.isLoggedIn && !isEagerSyncDisabled(db)) {
7851
+ const update = awap.encodeAwarenessUpdate(awareness, changedClients);
7852
+ db.messageProducer.next({
7853
+ type: 'aware',
7854
+ table: parentTable,
7855
+ prop: parentProp,
7856
+ k: doc.meta.parentId,
7857
+ u: update,
7858
+ });
7859
+ if (provider.destroyed) {
7860
+ // We're called from awareness.on('destroy') that did
7861
+ // removeAwarenessStates.
7862
+ // It's time to also send the doc-close message that dexie-cloud understands
7863
+ // and uses to stop subscribing for updates and awareness updates and brings
7864
+ // down the cached information in memory on the WS connection for this.
7865
+ db.messageProducer.next({
7866
+ type: 'doc-close',
7867
+ table: parentTable,
7868
+ prop: parentProp,
7869
+ k: doc.meta.parentId
7870
+ });
7871
+ }
7872
+ }
7873
+ });
7874
+ awareness.on('destroy', () => {
7875
+ // Signal to server that this provider is destroyed (the update event will be triggered, which
7876
+ // in turn will trigger db.messageProducer that will send the message to the server if WS is connected)
7877
+ awap.removeAwarenessStates(awareness, [doc.clientID], 'provider destroyed');
7878
+ });
7879
+ // Open the document on the server
7880
+ (() => __awaiter(this, void 0, void 0, function* () {
7881
+ if (provider.destroyed)
7882
+ return;
7883
+ let connected = false;
7884
+ let currentFlowId = 1;
7885
+ const subscription = db.cloud.webSocketStatus.subscribe((wsStatus) => {
7886
+ if (provider.destroyed)
7887
+ return;
7888
+ // Keep "connected" state in a variable so we can check it after async operations
7889
+ connected = wsStatus === 'connected';
7890
+ // We are or got connected. Open the document on the server.
7891
+ const user = db.cloud.currentUser.value;
7892
+ if (wsStatus === "connected" && user.isLoggedIn && !isEagerSyncDisabled(db)) {
7893
+ ++currentFlowId;
7894
+ openDocumentOnServer().catch(error => {
7895
+ console.warn(`Error catched in createYHandler.ts: ${error}`);
7896
+ });
7897
+ }
7898
+ });
7899
+ // Wait until WebSocket is connected
7900
+ provider.addCleanupHandler(subscription);
7901
+ /** Sends an 'doc-open' message to server whenever websocket becomes
7902
+ * connected, or if it is already connected.
7903
+ * The flow is aborted in case websocket is disconnected while querying
7904
+ * information required to compute the state vector. Flow is also
7905
+ * aborted in case document or provider has been destroyed during
7906
+ * the async parts of the task.
7907
+ *
7908
+ * The state vector is only computed from the updates that have occured
7909
+ * after the last full sync - which could very often be zero - in which
7910
+ * case no state vector is sent (then the server already knows us by
7911
+ * revision)
7912
+ *
7913
+ * When server gets the doc-open message, it will authorized us for
7914
+ * whether we are allowed to read / write to this document, and then
7915
+ * keep the cached information in memory on the WS connection for this
7916
+ * particular document, as well as subscribe to updates and awareness updates
7917
+ * from other clients on the document.
7918
+ */
7919
+ function openDocumentOnServer(wsStatus) {
7920
+ return __awaiter(this, void 0, void 0, function* () {
7921
+ const myFlow = currentFlowId; // So we can abort when a new flow is started
7922
+ const yTbl = db.table(updatesTable);
7923
+ const syncState = yield yTbl.get(DEXIE_CLOUD_SYNCER_ID);
7924
+ // After every await, check if we still should be working on this task.
7925
+ if (provider.destroyed || currentFlowId !== myFlow || !connected)
7926
+ return;
7927
+ const receivedUntil = (syncState === null || syncState === void 0 ? void 0 : syncState.receivedUntil) || 0;
7928
+ const docOpenMsg = {
7929
+ type: 'doc-open',
7930
+ table: parentTable,
7931
+ prop: parentProp,
7932
+ k: parentId,
7933
+ serverRev: syncState === null || syncState === void 0 ? void 0 : syncState.serverRev,
7934
+ };
7935
+ const serverUpdatesSinceLastSync = yield yTbl
7936
+ .where('i')
7937
+ .between(receivedUntil, Infinity, false)
7938
+ .filter((update) => cmp(update.k, parentId) === 0 && // Only updates for this document
7939
+ ((update.f || 0) & 1) === 0 // Don't include local changes
7940
+ )
7941
+ .toArray();
7942
+ // After every await, check if we still should be working on this task.
7943
+ if (provider.destroyed || currentFlowId !== myFlow || !connected)
7944
+ return;
7945
+ if (serverUpdatesSinceLastSync.length > 0) {
7946
+ const Y = $Y(db); // Get the Yjs library from Dexie constructor options
7947
+ const mergedUpdate = Y.mergeUpdatesV2(serverUpdatesSinceLastSync.map((update) => update.u));
7948
+ const stateVector = Y.encodeStateVectorFromUpdateV2(mergedUpdate);
7949
+ docOpenMsg.sv = stateVector;
7950
+ }
7951
+ db.messageProducer.next(docOpenMsg);
7952
+ });
7953
+ }
7954
+ }))();
7955
+ return awareness;
7956
+ }
7957
+
6267
7958
  const DEFAULT_OPTIONS = {
6268
7959
  nameSuffix: true,
6269
7960
  };
@@ -6293,8 +7984,9 @@ function dexieCloud(dexie) {
6293
7984
  if (closed)
6294
7985
  throw new Dexie.DatabaseClosedError();
6295
7986
  }
6296
- dbOnClosed(dexie, () => {
7987
+ dexie.once('close', () => {
6297
7988
  subscriptions.forEach((subscription) => subscription.unsubscribe());
7989
+ subscriptions.splice(0, subscriptions.length);
6298
7990
  closed = true;
6299
7991
  localSyncWorker && localSyncWorker.stop();
6300
7992
  localSyncWorker = null;
@@ -6303,7 +7995,7 @@ function dexieCloud(dexie) {
6303
7995
  const syncComplete = new Subject();
6304
7996
  dexie.cloud = {
6305
7997
  // @ts-ignore
6306
- version: "4.0.8",
7998
+ version: "4.1.0-alpha.12",
6307
7999
  options: Object.assign({}, DEFAULT_OPTIONS),
6308
8000
  schema: null,
6309
8001
  get currentUserId() {
@@ -6449,6 +8141,7 @@ function dexieCloud(dexie) {
6449
8141
  throw new Error(`Internal error`); // options cannot be null if configuredProgramatically is set.
6450
8142
  const newPersistedOptions = Object.assign({}, options);
6451
8143
  delete newPersistedOptions.fetchTokens;
8144
+ delete newPersistedOptions.awarenessProtocol;
6452
8145
  yield db.$syncState.put(newPersistedOptions, 'options');
6453
8146
  }
6454
8147
  if (((_h = db.cloud.options) === null || _h === void 0 ? void 0 : _h.tryUseServiceWorker) &&
@@ -6526,6 +8219,12 @@ function dexieCloud(dexie) {
6526
8219
  currentUserEmitter.pipe(skip(1), take(1)),
6527
8220
  db.cloud.persistedSyncState.pipe(skip(1), take(1)),
6528
8221
  ]));
8222
+ const yHandler = createYHandler(db);
8223
+ db.dx.on('y', yHandler);
8224
+ db.dx.once('close', () => {
8225
+ var _a;
8226
+ (_a = db.dx.on.y) === null || _a === void 0 ? void 0 : _a.unsubscribe(yHandler);
8227
+ });
6529
8228
  }
6530
8229
  // HERE: If requireAuth, do athentication now.
6531
8230
  let changedUser = false;
@@ -6598,7 +8297,7 @@ function dexieCloud(dexie) {
6598
8297
  }
6599
8298
  }
6600
8299
  // @ts-ignore
6601
- dexieCloud.version = "4.0.8";
8300
+ dexieCloud.version = "4.1.0-alpha.12";
6602
8301
  Dexie.Cloud = dexieCloud;
6603
8302
 
6604
8303
  // In case the SW lives for a while, let it reuse already opened connections: