dexie-cloud-addon 4.0.8 → 4.1.0-alpha.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/dist/modern/DexieCloudOptions.d.ts +1 -0
  2. package/dist/modern/WSObservable.d.ts +9 -6
  3. package/dist/modern/db/DexieCloudDB.d.ts +2 -0
  4. package/dist/modern/db/entities/PersistedSyncState.d.ts +7 -0
  5. package/dist/modern/define-ydoc-trigger.d.ts +2 -0
  6. package/dist/modern/dexie-cloud-addon.d.ts +1 -0
  7. package/dist/modern/dexie-cloud-addon.js +1914 -63
  8. package/dist/modern/dexie-cloud-addon.js.map +1 -1
  9. package/dist/modern/dexie-cloud-addon.min.js +1 -1
  10. package/dist/modern/dexie-cloud-addon.min.js.map +1 -1
  11. package/dist/modern/service-worker.js +1761 -62
  12. package/dist/modern/service-worker.js.map +1 -1
  13. package/dist/modern/service-worker.min.js +1 -1
  14. package/dist/modern/service-worker.min.js.map +1 -1
  15. package/dist/modern/sync/DEXIE_CLOUD_SYNCER_ID.d.ts +1 -0
  16. package/dist/modern/sync/syncWithServer.d.ts +2 -2
  17. package/dist/modern/yjs/Y.d.ts +3 -0
  18. package/dist/modern/yjs/YDexieCloudSyncState.d.ts +4 -0
  19. package/dist/modern/yjs/YTable.d.ts +2 -0
  20. package/dist/modern/yjs/applyYMessages.d.ts +5 -0
  21. package/dist/modern/yjs/awareness.d.ts +4 -0
  22. package/dist/modern/yjs/createYClientUpdateObservable.d.ts +4 -0
  23. package/dist/modern/yjs/createYHandler.d.ts +5 -0
  24. package/dist/modern/yjs/downloadYDocsFromServer.d.ts +3 -0
  25. package/dist/modern/yjs/getUpdatesTable.d.ts +3 -0
  26. package/dist/modern/yjs/listUpdatesSince.d.ts +2 -0
  27. package/dist/modern/yjs/listYClientMessagesAndStateVector.d.ts +26 -0
  28. package/dist/modern/yjs/updateYSyncStates.d.ts +6 -0
  29. package/dist/umd/DexieCloudOptions.d.ts +1 -0
  30. package/dist/umd/WSObservable.d.ts +9 -6
  31. package/dist/umd/db/DexieCloudDB.d.ts +2 -0
  32. package/dist/umd/db/entities/PersistedSyncState.d.ts +7 -0
  33. package/dist/umd/define-ydoc-trigger.d.ts +2 -0
  34. package/dist/umd/dexie-cloud-addon.d.ts +1 -0
  35. package/dist/umd/dexie-cloud-addon.js +1912 -60
  36. package/dist/umd/dexie-cloud-addon.js.map +1 -1
  37. package/dist/umd/dexie-cloud-addon.min.js +1 -1
  38. package/dist/umd/dexie-cloud-addon.min.js.map +1 -1
  39. package/dist/umd/service-worker.js +1759 -60
  40. package/dist/umd/service-worker.js.map +1 -1
  41. package/dist/umd/service-worker.min.js +1 -1
  42. package/dist/umd/service-worker.min.js.map +1 -1
  43. package/dist/umd/sync/DEXIE_CLOUD_SYNCER_ID.d.ts +1 -0
  44. package/dist/umd/sync/syncWithServer.d.ts +2 -2
  45. package/dist/umd/yjs/Y.d.ts +3 -0
  46. package/dist/umd/yjs/YDexieCloudSyncState.d.ts +4 -0
  47. package/dist/umd/yjs/YTable.d.ts +2 -0
  48. package/dist/umd/yjs/applyYMessages.d.ts +5 -0
  49. package/dist/umd/yjs/awareness.d.ts +4 -0
  50. package/dist/umd/yjs/createYClientUpdateObservable.d.ts +4 -0
  51. package/dist/umd/yjs/createYHandler.d.ts +5 -0
  52. package/dist/umd/yjs/downloadYDocsFromServer.d.ts +3 -0
  53. package/dist/umd/yjs/getUpdatesTable.d.ts +3 -0
  54. package/dist/umd/yjs/listUpdatesSince.d.ts +2 -0
  55. package/dist/umd/yjs/listYClientMessagesAndStateVector.d.ts +26 -0
  56. package/dist/umd/yjs/updateYSyncStates.d.ts +6 -0
  57. package/package.json +5 -4
  58. package/dist/modern/helpers/dbOnClosed.d.ts +0 -2
  59. package/dist/umd/helpers/dbOnClosed.d.ts +0 -2
@@ -8,7 +8,7 @@
8
8
  *
9
9
  * ==========================================================================
10
10
  *
11
- * Version 4.0.8, Tue Jun 04 2024
11
+ * Version 4.1.0-alpha.12, Wed Oct 16 2024
12
12
  *
13
13
  * https://dexie.org
14
14
  *
@@ -470,6 +470,1075 @@
470
470
  : url.pathname.split('/')[1];
471
471
  }
472
472
 
473
+ /**
474
+ * Common Math expressions.
475
+ *
476
+ * @module math
477
+ */
478
+
479
+ const floor = Math.floor;
480
+ const abs = Math.abs;
481
+
482
+ /**
483
+ * @function
484
+ * @param {number} a
485
+ * @param {number} b
486
+ * @return {number} The smaller element of a and b
487
+ */
488
+ const min = (a, b) => a < b ? a : b;
489
+
490
+ /**
491
+ * @function
492
+ * @param {number} a
493
+ * @param {number} b
494
+ * @return {number} The bigger element of a and b
495
+ */
496
+ const max = (a, b) => a > b ? a : b;
497
+
498
+ /**
499
+ * @param {number} n
500
+ * @return {boolean} Wether n is negative. This function also differentiates between -0 and +0
501
+ */
502
+ const isNegativeZero = n => n !== 0 ? n < 0 : 1 / n < 0;
503
+
504
+ /* eslint-env browser */
505
+
506
+ const BIT7 = 64;
507
+ const BIT8 = 128;
508
+ const BITS6 = 63;
509
+ const BITS7 = 127;
510
+ /**
511
+ * @type {number}
512
+ */
513
+ const BITS31 = 0x7FFFFFFF;
514
+
515
+ /**
516
+ * Utility helpers for working with numbers.
517
+ *
518
+ * @module number
519
+ */
520
+
521
+
522
+ const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER;
523
+
524
+ /* c8 ignore next */
525
+ const isInteger = Number.isInteger || (num => typeof num === 'number' && isFinite(num) && floor(num) === num);
526
+
527
+ /**
528
+ * Utility module to work with Arrays.
529
+ *
530
+ * @module array
531
+ */
532
+
533
+
534
+ const isArray = Array.isArray;
535
+
536
+ /**
537
+ * @param {string} str
538
+ * @return {Uint8Array}
539
+ */
540
+ const _encodeUtf8Polyfill = str => {
541
+ const encodedString = unescape(encodeURIComponent(str));
542
+ const len = encodedString.length;
543
+ const buf = new Uint8Array(len);
544
+ for (let i = 0; i < len; i++) {
545
+ buf[i] = /** @type {number} */ (encodedString.codePointAt(i));
546
+ }
547
+ return buf
548
+ };
549
+
550
+ /* c8 ignore next */
551
+ const utf8TextEncoder = /** @type {TextEncoder} */ (typeof TextEncoder !== 'undefined' ? new TextEncoder() : null);
552
+
553
+ /**
554
+ * @param {string} str
555
+ * @return {Uint8Array}
556
+ */
557
+ const _encodeUtf8Native = str => utf8TextEncoder.encode(str);
558
+
559
+ /**
560
+ * @param {string} str
561
+ * @return {Uint8Array}
562
+ */
563
+ /* c8 ignore next */
564
+ const encodeUtf8 = utf8TextEncoder ? _encodeUtf8Native : _encodeUtf8Polyfill;
565
+
566
+ /* c8 ignore next */
567
+ let utf8TextDecoder = typeof TextDecoder === 'undefined' ? null : new TextDecoder('utf-8', { fatal: true, ignoreBOM: true });
568
+
569
+ /* c8 ignore start */
570
+ if (utf8TextDecoder && utf8TextDecoder.decode(new Uint8Array()).length === 1) {
571
+ // Safari doesn't handle BOM correctly.
572
+ // This fixes a bug in Safari 13.0.5 where it produces a BOM the first time it is called.
573
+ // utf8TextDecoder.decode(new Uint8Array()).length === 1 on the first call and
574
+ // utf8TextDecoder.decode(new Uint8Array()).length === 1 on the second call
575
+ // Another issue is that from then on no BOM chars are recognized anymore
576
+ /* c8 ignore next */
577
+ utf8TextDecoder = null;
578
+ }
579
+
580
+ /**
581
+ * Efficient schema-less binary encoding with support for variable length encoding.
582
+ *
583
+ * Use [lib0/encoding] with [lib0/decoding]. Every encoding function has a corresponding decoding function.
584
+ *
585
+ * Encodes numbers in little-endian order (least to most significant byte order)
586
+ * and is compatible with Golang's binary encoding (https://golang.org/pkg/encoding/binary/)
587
+ * which is also used in Protocol Buffers.
588
+ *
589
+ * ```js
590
+ * // encoding step
591
+ * const encoder = encoding.createEncoder()
592
+ * encoding.writeVarUint(encoder, 256)
593
+ * encoding.writeVarString(encoder, 'Hello world!')
594
+ * const buf = encoding.toUint8Array(encoder)
595
+ * ```
596
+ *
597
+ * ```js
598
+ * // decoding step
599
+ * const decoder = decoding.createDecoder(buf)
600
+ * decoding.readVarUint(decoder) // => 256
601
+ * decoding.readVarString(decoder) // => 'Hello world!'
602
+ * decoding.hasContent(decoder) // => false - all data is read
603
+ * ```
604
+ *
605
+ * @module encoding
606
+ */
607
+
608
+
609
+ /**
610
+ * A BinaryEncoder handles the encoding to an Uint8Array.
611
+ */
612
+ class Encoder {
613
+ constructor () {
614
+ this.cpos = 0;
615
+ this.cbuf = new Uint8Array(100);
616
+ /**
617
+ * @type {Array<Uint8Array>}
618
+ */
619
+ this.bufs = [];
620
+ }
621
+ }
622
+
623
+ /**
624
+ * The current length of the encoded data.
625
+ *
626
+ * @function
627
+ * @param {Encoder} encoder
628
+ * @return {number}
629
+ */
630
+ const length = encoder => {
631
+ let len = encoder.cpos;
632
+ for (let i = 0; i < encoder.bufs.length; i++) {
633
+ len += encoder.bufs[i].length;
634
+ }
635
+ return len
636
+ };
637
+
638
+ /**
639
+ * Transform to Uint8Array.
640
+ *
641
+ * @function
642
+ * @param {Encoder} encoder
643
+ * @return {Uint8Array} The created ArrayBuffer.
644
+ */
645
+ const toUint8Array = encoder => {
646
+ const uint8arr = new Uint8Array(length(encoder));
647
+ let curPos = 0;
648
+ for (let i = 0; i < encoder.bufs.length; i++) {
649
+ const d = encoder.bufs[i];
650
+ uint8arr.set(d, curPos);
651
+ curPos += d.length;
652
+ }
653
+ uint8arr.set(new Uint8Array(encoder.cbuf.buffer, 0, encoder.cpos), curPos);
654
+ return uint8arr
655
+ };
656
+
657
+ /**
658
+ * Verify that it is possible to write `len` bytes wtihout checking. If
659
+ * necessary, a new Buffer with the required length is attached.
660
+ *
661
+ * @param {Encoder} encoder
662
+ * @param {number} len
663
+ */
664
+ const verifyLen = (encoder, len) => {
665
+ const bufferLen = encoder.cbuf.length;
666
+ if (bufferLen - encoder.cpos < len) {
667
+ encoder.bufs.push(new Uint8Array(encoder.cbuf.buffer, 0, encoder.cpos));
668
+ encoder.cbuf = new Uint8Array(max(bufferLen, len) * 2);
669
+ encoder.cpos = 0;
670
+ }
671
+ };
672
+
673
+ /**
674
+ * Write one byte to the encoder.
675
+ *
676
+ * @function
677
+ * @param {Encoder} encoder
678
+ * @param {number} num The byte that is to be encoded.
679
+ */
680
+ const write = (encoder, num) => {
681
+ const bufferLen = encoder.cbuf.length;
682
+ if (encoder.cpos === bufferLen) {
683
+ encoder.bufs.push(encoder.cbuf);
684
+ encoder.cbuf = new Uint8Array(bufferLen * 2);
685
+ encoder.cpos = 0;
686
+ }
687
+ encoder.cbuf[encoder.cpos++] = num;
688
+ };
689
+
690
+ /**
691
+ * Write a variable length unsigned integer. Max encodable integer is 2^53.
692
+ *
693
+ * @function
694
+ * @param {Encoder} encoder
695
+ * @param {number} num The number that is to be encoded.
696
+ */
697
+ const writeVarUint = (encoder, num) => {
698
+ while (num > BITS7) {
699
+ write(encoder, BIT8 | (BITS7 & num));
700
+ num = floor(num / 128); // shift >>> 7
701
+ }
702
+ write(encoder, BITS7 & num);
703
+ };
704
+
705
+ /**
706
+ * Write a variable length integer.
707
+ *
708
+ * We use the 7th bit instead for signaling that this is a negative number.
709
+ *
710
+ * @function
711
+ * @param {Encoder} encoder
712
+ * @param {number} num The number that is to be encoded.
713
+ */
714
+ const writeVarInt = (encoder, num) => {
715
+ const isNegative = isNegativeZero(num);
716
+ if (isNegative) {
717
+ num = -num;
718
+ }
719
+ // |- whether to continue reading |- whether is negative |- number
720
+ write(encoder, (num > BITS6 ? BIT8 : 0) | (isNegative ? BIT7 : 0) | (BITS6 & num));
721
+ num = floor(num / 64); // shift >>> 6
722
+ // We don't need to consider the case of num === 0 so we can use a different
723
+ // pattern here than above.
724
+ while (num > 0) {
725
+ write(encoder, (num > BITS7 ? BIT8 : 0) | (BITS7 & num));
726
+ num = floor(num / 128); // shift >>> 7
727
+ }
728
+ };
729
+
730
+ /**
731
+ * A cache to store strings temporarily
732
+ */
733
+ const _strBuffer = new Uint8Array(30000);
734
+ const _maxStrBSize = _strBuffer.length / 3;
735
+
736
+ /**
737
+ * Write a variable length string.
738
+ *
739
+ * @function
740
+ * @param {Encoder} encoder
741
+ * @param {String} str The string that is to be encoded.
742
+ */
743
+ const _writeVarStringNative = (encoder, str) => {
744
+ if (str.length < _maxStrBSize) {
745
+ // We can encode the string into the existing buffer
746
+ /* c8 ignore next */
747
+ const written = utf8TextEncoder.encodeInto(str, _strBuffer).written || 0;
748
+ writeVarUint(encoder, written);
749
+ for (let i = 0; i < written; i++) {
750
+ write(encoder, _strBuffer[i]);
751
+ }
752
+ } else {
753
+ writeVarUint8Array(encoder, encodeUtf8(str));
754
+ }
755
+ };
756
+
757
+ /**
758
+ * Write a variable length string.
759
+ *
760
+ * @function
761
+ * @param {Encoder} encoder
762
+ * @param {String} str The string that is to be encoded.
763
+ */
764
+ const _writeVarStringPolyfill = (encoder, str) => {
765
+ const encodedString = unescape(encodeURIComponent(str));
766
+ const len = encodedString.length;
767
+ writeVarUint(encoder, len);
768
+ for (let i = 0; i < len; i++) {
769
+ write(encoder, /** @type {number} */ (encodedString.codePointAt(i)));
770
+ }
771
+ };
772
+
773
+ /**
774
+ * Write a variable length string.
775
+ *
776
+ * @function
777
+ * @param {Encoder} encoder
778
+ * @param {String} str The string that is to be encoded.
779
+ */
780
+ /* c8 ignore next */
781
+ const writeVarString = (utf8TextEncoder && /** @type {any} */ (utf8TextEncoder).encodeInto) ? _writeVarStringNative : _writeVarStringPolyfill;
782
+
783
+ /**
784
+ * Append fixed-length Uint8Array to the encoder.
785
+ *
786
+ * @function
787
+ * @param {Encoder} encoder
788
+ * @param {Uint8Array} uint8Array
789
+ */
790
+ const writeUint8Array = (encoder, uint8Array) => {
791
+ const bufferLen = encoder.cbuf.length;
792
+ const cpos = encoder.cpos;
793
+ const leftCopyLen = min(bufferLen - cpos, uint8Array.length);
794
+ const rightCopyLen = uint8Array.length - leftCopyLen;
795
+ encoder.cbuf.set(uint8Array.subarray(0, leftCopyLen), cpos);
796
+ encoder.cpos += leftCopyLen;
797
+ if (rightCopyLen > 0) {
798
+ // Still something to write, write right half..
799
+ // Append new buffer
800
+ encoder.bufs.push(encoder.cbuf);
801
+ // must have at least size of remaining buffer
802
+ encoder.cbuf = new Uint8Array(max(bufferLen * 2, rightCopyLen));
803
+ // copy array
804
+ encoder.cbuf.set(uint8Array.subarray(leftCopyLen));
805
+ encoder.cpos = rightCopyLen;
806
+ }
807
+ };
808
+
809
+ /**
810
+ * Append an Uint8Array to Encoder.
811
+ *
812
+ * @function
813
+ * @param {Encoder} encoder
814
+ * @param {Uint8Array} uint8Array
815
+ */
816
+ const writeVarUint8Array = (encoder, uint8Array) => {
817
+ writeVarUint(encoder, uint8Array.byteLength);
818
+ writeUint8Array(encoder, uint8Array);
819
+ };
820
+
821
+ /**
822
+ * Create an DataView of the next `len` bytes. Use it to write data after
823
+ * calling this function.
824
+ *
825
+ * ```js
826
+ * // write float32 using DataView
827
+ * const dv = writeOnDataView(encoder, 4)
828
+ * dv.setFloat32(0, 1.1)
829
+ * // read float32 using DataView
830
+ * const dv = readFromDataView(encoder, 4)
831
+ * dv.getFloat32(0) // => 1.100000023841858 (leaving it to the reader to find out why this is the correct result)
832
+ * ```
833
+ *
834
+ * @param {Encoder} encoder
835
+ * @param {number} len
836
+ * @return {DataView}
837
+ */
838
+ const writeOnDataView = (encoder, len) => {
839
+ verifyLen(encoder, len);
840
+ const dview = new DataView(encoder.cbuf.buffer, encoder.cpos, len);
841
+ encoder.cpos += len;
842
+ return dview
843
+ };
844
+
845
+ /**
846
+ * @param {Encoder} encoder
847
+ * @param {number} num
848
+ */
849
+ const writeFloat32 = (encoder, num) => writeOnDataView(encoder, 4).setFloat32(0, num, false);
850
+
851
+ /**
852
+ * @param {Encoder} encoder
853
+ * @param {number} num
854
+ */
855
+ const writeFloat64 = (encoder, num) => writeOnDataView(encoder, 8).setFloat64(0, num, false);
856
+
857
+ /**
858
+ * @param {Encoder} encoder
859
+ * @param {bigint} num
860
+ */
861
+ const writeBigInt64 = (encoder, num) => /** @type {any} */ (writeOnDataView(encoder, 8)).setBigInt64(0, num, false);
862
+
863
+ /**
864
+ * @param {Encoder} encoder
865
+ * @param {bigint} num
866
+ */
867
+ const writeBigUint64 = (encoder, num) => /** @type {any} */ (writeOnDataView(encoder, 8)).setBigUint64(0, num, false);
868
+
869
+ const floatTestBed = new DataView(new ArrayBuffer(4));
870
+ /**
871
+ * Check if a number can be encoded as a 32 bit float.
872
+ *
873
+ * @param {number} num
874
+ * @return {boolean}
875
+ */
876
+ const isFloat32 = num => {
877
+ floatTestBed.setFloat32(0, num);
878
+ return floatTestBed.getFloat32(0) === num
879
+ };
880
+
881
+ /**
882
+ * Encode data with efficient binary format.
883
+ *
884
+ * Differences to JSON:
885
+ * • Transforms data to a binary format (not to a string)
886
+ * • Encodes undefined, NaN, and ArrayBuffer (these can't be represented in JSON)
887
+ * • Numbers are efficiently encoded either as a variable length integer, as a
888
+ * 32 bit float, as a 64 bit float, or as a 64 bit bigint.
889
+ *
890
+ * Encoding table:
891
+ *
892
+ * | Data Type | Prefix | Encoding Method | Comment |
893
+ * | ------------------- | -------- | ------------------ | ------- |
894
+ * | undefined | 127 | | Functions, symbol, and everything that cannot be identified is encoded as undefined |
895
+ * | null | 126 | | |
896
+ * | integer | 125 | writeVarInt | Only encodes 32 bit signed integers |
897
+ * | float32 | 124 | writeFloat32 | |
898
+ * | float64 | 123 | writeFloat64 | |
899
+ * | bigint | 122 | writeBigInt64 | |
900
+ * | boolean (false) | 121 | | True and false are different data types so we save the following byte |
901
+ * | boolean (true) | 120 | | - 0b01111000 so the last bit determines whether true or false |
902
+ * | string | 119 | writeVarString | |
903
+ * | object<string,any> | 118 | custom | Writes {length} then {length} key-value pairs |
904
+ * | array<any> | 117 | custom | Writes {length} then {length} json values |
905
+ * | Uint8Array | 116 | writeVarUint8Array | We use Uint8Array for any kind of binary data |
906
+ *
907
+ * Reasons for the decreasing prefix:
908
+ * We need the first bit for extendability (later we may want to encode the
909
+ * prefix with writeVarUint). The remaining 7 bits are divided as follows:
910
+ * [0-30] the beginning of the data range is used for custom purposes
911
+ * (defined by the function that uses this library)
912
+ * [31-127] the end of the data range is used for data encoding by
913
+ * lib0/encoding.js
914
+ *
915
+ * @param {Encoder} encoder
916
+ * @param {undefined|null|number|bigint|boolean|string|Object<string,any>|Array<any>|Uint8Array} data
917
+ */
918
+ const writeAny = (encoder, data) => {
919
+ switch (typeof data) {
920
+ case 'string':
921
+ // TYPE 119: STRING
922
+ write(encoder, 119);
923
+ writeVarString(encoder, data);
924
+ break
925
+ case 'number':
926
+ if (isInteger(data) && abs(data) <= BITS31) {
927
+ // TYPE 125: INTEGER
928
+ write(encoder, 125);
929
+ writeVarInt(encoder, data);
930
+ } else if (isFloat32(data)) {
931
+ // TYPE 124: FLOAT32
932
+ write(encoder, 124);
933
+ writeFloat32(encoder, data);
934
+ } else {
935
+ // TYPE 123: FLOAT64
936
+ write(encoder, 123);
937
+ writeFloat64(encoder, data);
938
+ }
939
+ break
940
+ case 'bigint':
941
+ // TYPE 122: BigInt
942
+ write(encoder, 122);
943
+ writeBigInt64(encoder, data);
944
+ break
945
+ case 'object':
946
+ if (data === null) {
947
+ // TYPE 126: null
948
+ write(encoder, 126);
949
+ } else if (isArray(data)) {
950
+ // TYPE 117: Array
951
+ write(encoder, 117);
952
+ writeVarUint(encoder, data.length);
953
+ for (let i = 0; i < data.length; i++) {
954
+ writeAny(encoder, data[i]);
955
+ }
956
+ } else if (data instanceof Uint8Array) {
957
+ // TYPE 116: ArrayBuffer
958
+ write(encoder, 116);
959
+ writeVarUint8Array(encoder, data);
960
+ } else {
961
+ // TYPE 118: Object
962
+ write(encoder, 118);
963
+ const keys = Object.keys(data);
964
+ writeVarUint(encoder, keys.length);
965
+ for (let i = 0; i < keys.length; i++) {
966
+ const key = keys[i];
967
+ writeVarString(encoder, key);
968
+ writeAny(encoder, data[key]);
969
+ }
970
+ }
971
+ break
972
+ case 'boolean':
973
+ // TYPE 120/121: boolean (true/false)
974
+ write(encoder, data ? 120 : 121);
975
+ break
976
+ default:
977
+ // TYPE 127: undefined
978
+ write(encoder, 127);
979
+ }
980
+ };
981
+
982
+ function encodeYMessage(msg) {
983
+ const encoder = new Encoder();
984
+ writeVarString(encoder, msg.type);
985
+ writeVarString(encoder, msg.table);
986
+ writeVarString(encoder, msg.prop);
987
+ switch (msg.type) {
988
+ case 'u-ack':
989
+ case 'u-reject':
990
+ writeBigUint64(encoder, BigInt(msg.i));
991
+ break;
992
+ default:
993
+ writeAny(encoder, msg.k);
994
+ switch (msg.type) {
995
+ case 'aware':
996
+ writeVarUint8Array(encoder, msg.u);
997
+ break;
998
+ case 'doc-open':
999
+ writeAny(encoder, msg.serverRev);
1000
+ writeAny(encoder, msg.sv);
1001
+ break;
1002
+ case 'doc-close':
1003
+ break;
1004
+ case 'sv':
1005
+ writeVarUint8Array(encoder, msg.sv);
1006
+ break;
1007
+ case 'u-c':
1008
+ writeVarUint8Array(encoder, msg.u);
1009
+ writeBigUint64(encoder, BigInt(msg.i));
1010
+ break;
1011
+ case 'u-s':
1012
+ writeVarUint8Array(encoder, msg.u);
1013
+ break;
1014
+ }
1015
+ }
1016
+ return toUint8Array(encoder);
1017
+ }
1018
+
1019
+ /**
1020
+ * Error helpers.
1021
+ *
1022
+ * @module error
1023
+ */
1024
+
1025
+ /**
1026
+ * @param {string} s
1027
+ * @return {Error}
1028
+ */
1029
+ /* c8 ignore next */
1030
+ const create = s => new Error(s);
1031
+
1032
+ /**
1033
+ * Efficient schema-less binary decoding with support for variable length encoding.
1034
+ *
1035
+ * Use [lib0/decoding] with [lib0/encoding]. Every encoding function has a corresponding decoding function.
1036
+ *
1037
+ * Encodes numbers in little-endian order (least to most significant byte order)
1038
+ * and is compatible with Golang's binary encoding (https://golang.org/pkg/encoding/binary/)
1039
+ * which is also used in Protocol Buffers.
1040
+ *
1041
+ * ```js
1042
+ * // encoding step
1043
+ * const encoder = encoding.createEncoder()
1044
+ * encoding.writeVarUint(encoder, 256)
1045
+ * encoding.writeVarString(encoder, 'Hello world!')
1046
+ * const buf = encoding.toUint8Array(encoder)
1047
+ * ```
1048
+ *
1049
+ * ```js
1050
+ * // decoding step
1051
+ * const decoder = decoding.createDecoder(buf)
1052
+ * decoding.readVarUint(decoder) // => 256
1053
+ * decoding.readVarString(decoder) // => 'Hello world!'
1054
+ * decoding.hasContent(decoder) // => false - all data is read
1055
+ * ```
1056
+ *
1057
+ * @module decoding
1058
+ */
1059
+
1060
+
1061
+ const errorUnexpectedEndOfArray = create('Unexpected end of array');
1062
+ const errorIntegerOutOfRange = create('Integer out of Range');
1063
+
1064
+ /**
1065
+ * A Decoder handles the decoding of an Uint8Array.
1066
+ */
1067
+ class Decoder {
1068
+ /**
1069
+ * @param {Uint8Array} uint8Array Binary data to decode
1070
+ */
1071
+ constructor (uint8Array) {
1072
+ /**
1073
+ * Decoding target.
1074
+ *
1075
+ * @type {Uint8Array}
1076
+ */
1077
+ this.arr = uint8Array;
1078
+ /**
1079
+ * Current decoding position.
1080
+ *
1081
+ * @type {number}
1082
+ */
1083
+ this.pos = 0;
1084
+ }
1085
+ }
1086
+
1087
+ /**
1088
+ * @function
1089
+ * @param {Decoder} decoder
1090
+ * @return {boolean}
1091
+ */
1092
+ const hasContent = decoder => decoder.pos !== decoder.arr.length;
1093
+
1094
+ /**
1095
+ * Create an Uint8Array view of the next `len` bytes and advance the position by `len`.
1096
+ *
1097
+ * Important: The Uint8Array still points to the underlying ArrayBuffer. Make sure to discard the result as soon as possible to prevent any memory leaks.
1098
+ * Use `buffer.copyUint8Array` to copy the result into a new Uint8Array.
1099
+ *
1100
+ * @function
1101
+ * @param {Decoder} decoder The decoder instance
1102
+ * @param {number} len The length of bytes to read
1103
+ * @return {Uint8Array}
1104
+ */
1105
+ const readUint8Array = (decoder, len) => {
1106
+ const view = new Uint8Array(decoder.arr.buffer, decoder.pos + decoder.arr.byteOffset, len);
1107
+ decoder.pos += len;
1108
+ return view
1109
+ };
1110
+
1111
+ /**
1112
+ * Read variable length Uint8Array.
1113
+ *
1114
+ * Important: The Uint8Array still points to the underlying ArrayBuffer. Make sure to discard the result as soon as possible to prevent any memory leaks.
1115
+ * Use `buffer.copyUint8Array` to copy the result into a new Uint8Array.
1116
+ *
1117
+ * @function
1118
+ * @param {Decoder} decoder
1119
+ * @return {Uint8Array}
1120
+ */
1121
+ const readVarUint8Array = decoder => readUint8Array(decoder, readVarUint(decoder));
1122
+
1123
+ /**
1124
+ * Read one byte as unsigned integer.
1125
+ * @function
1126
+ * @param {Decoder} decoder The decoder instance
1127
+ * @return {number} Unsigned 8-bit integer
1128
+ */
1129
+ const readUint8 = decoder => decoder.arr[decoder.pos++];
1130
+
1131
+ /**
1132
+ * Read unsigned integer (32bit) with variable length.
1133
+ * 1/8th of the storage is used as encoding overhead.
1134
+ * * numbers < 2^7 is stored in one bytlength
1135
+ * * numbers < 2^14 is stored in two bylength
1136
+ *
1137
+ * @function
1138
+ * @param {Decoder} decoder
1139
+ * @return {number} An unsigned integer.length
1140
+ */
1141
+ const readVarUint = decoder => {
1142
+ let num = 0;
1143
+ let mult = 1;
1144
+ const len = decoder.arr.length;
1145
+ while (decoder.pos < len) {
1146
+ const r = decoder.arr[decoder.pos++];
1147
+ // num = num | ((r & binary.BITS7) << len)
1148
+ num = num + (r & BITS7) * mult; // shift $r << (7*#iterations) and add it to num
1149
+ mult *= 128; // next iteration, shift 7 "more" to the left
1150
+ if (r < BIT8) {
1151
+ return num
1152
+ }
1153
+ /* c8 ignore start */
1154
+ if (num > MAX_SAFE_INTEGER) {
1155
+ throw errorIntegerOutOfRange
1156
+ }
1157
+ /* c8 ignore stop */
1158
+ }
1159
+ throw errorUnexpectedEndOfArray
1160
+ };
1161
+
1162
+ /**
1163
+ * Read signed integer (32bit) with variable length.
1164
+ * 1/8th of the storage is used as encoding overhead.
1165
+ * * numbers < 2^7 is stored in one bytlength
1166
+ * * numbers < 2^14 is stored in two bylength
1167
+ * @todo This should probably create the inverse ~num if number is negative - but this would be a breaking change.
1168
+ *
1169
+ * @function
1170
+ * @param {Decoder} decoder
1171
+ * @return {number} An unsigned integer.length
1172
+ */
1173
+ const readVarInt = decoder => {
1174
+ let r = decoder.arr[decoder.pos++];
1175
+ let num = r & BITS6;
1176
+ let mult = 64;
1177
+ const sign = (r & BIT7) > 0 ? -1 : 1;
1178
+ if ((r & BIT8) === 0) {
1179
+ // don't continue reading
1180
+ return sign * num
1181
+ }
1182
+ const len = decoder.arr.length;
1183
+ while (decoder.pos < len) {
1184
+ r = decoder.arr[decoder.pos++];
1185
+ // num = num | ((r & binary.BITS7) << len)
1186
+ num = num + (r & BITS7) * mult;
1187
+ mult *= 128;
1188
+ if (r < BIT8) {
1189
+ return sign * num
1190
+ }
1191
+ /* c8 ignore start */
1192
+ if (num > MAX_SAFE_INTEGER) {
1193
+ throw errorIntegerOutOfRange
1194
+ }
1195
+ /* c8 ignore stop */
1196
+ }
1197
+ throw errorUnexpectedEndOfArray
1198
+ };
1199
+
1200
+ /**
1201
+ * We don't test this function anymore as we use native decoding/encoding by default now.
1202
+ * Better not modify this anymore..
1203
+ *
1204
+ * Transforming utf8 to a string is pretty expensive. The code performs 10x better
1205
+ * when String.fromCodePoint is fed with all characters as arguments.
1206
+ * But most environments have a maximum number of arguments per functions.
1207
+ * For effiency reasons we apply a maximum of 10000 characters at once.
1208
+ *
1209
+ * @function
1210
+ * @param {Decoder} decoder
1211
+ * @return {String} The read String.
1212
+ */
1213
+ /* c8 ignore start */
1214
+ const _readVarStringPolyfill = decoder => {
1215
+ let remainingLen = readVarUint(decoder);
1216
+ if (remainingLen === 0) {
1217
+ return ''
1218
+ } else {
1219
+ let encodedString = String.fromCodePoint(readUint8(decoder)); // remember to decrease remainingLen
1220
+ if (--remainingLen < 100) { // do not create a Uint8Array for small strings
1221
+ while (remainingLen--) {
1222
+ encodedString += String.fromCodePoint(readUint8(decoder));
1223
+ }
1224
+ } else {
1225
+ while (remainingLen > 0) {
1226
+ const nextLen = remainingLen < 10000 ? remainingLen : 10000;
1227
+ // this is dangerous, we create a fresh array view from the existing buffer
1228
+ const bytes = decoder.arr.subarray(decoder.pos, decoder.pos + nextLen);
1229
+ decoder.pos += nextLen;
1230
+ // Starting with ES5.1 we can supply a generic array-like object as arguments
1231
+ encodedString += String.fromCodePoint.apply(null, /** @type {any} */ (bytes));
1232
+ remainingLen -= nextLen;
1233
+ }
1234
+ }
1235
+ return decodeURIComponent(escape(encodedString))
1236
+ }
1237
+ };
1238
+ /* c8 ignore stop */
1239
+
1240
+ /**
1241
+ * @function
1242
+ * @param {Decoder} decoder
1243
+ * @return {String} The read String
1244
+ */
1245
+ const _readVarStringNative = decoder =>
1246
+ /** @type any */ (utf8TextDecoder).decode(readVarUint8Array(decoder));
1247
+
1248
+ /**
1249
+ * Read string of variable length
1250
+ * * varUint is used to store the length of the string
1251
+ *
1252
+ * @function
1253
+ * @param {Decoder} decoder
1254
+ * @return {String} The read String
1255
+ *
1256
+ */
1257
+ /* c8 ignore next */
1258
+ const readVarString = utf8TextDecoder ? _readVarStringNative : _readVarStringPolyfill;
1259
+
1260
+ /**
1261
+ * @param {Decoder} decoder
1262
+ * @param {number} len
1263
+ * @return {DataView}
1264
+ */
1265
+ const readFromDataView = (decoder, len) => {
1266
+ const dv = new DataView(decoder.arr.buffer, decoder.arr.byteOffset + decoder.pos, len);
1267
+ decoder.pos += len;
1268
+ return dv
1269
+ };
1270
+
1271
+ /**
1272
+ * @param {Decoder} decoder
1273
+ */
1274
+ const readFloat32 = decoder => readFromDataView(decoder, 4).getFloat32(0, false);
1275
+
1276
+ /**
1277
+ * @param {Decoder} decoder
1278
+ */
1279
+ const readFloat64 = decoder => readFromDataView(decoder, 8).getFloat64(0, false);
1280
+
1281
+ /**
1282
+ * @param {Decoder} decoder
1283
+ */
1284
+ const readBigInt64 = decoder => /** @type {any} */ (readFromDataView(decoder, 8)).getBigInt64(0, false);
1285
+
1286
+ /**
1287
+ * @param {Decoder} decoder
1288
+ */
1289
+ const readBigUint64 = decoder => /** @type {any} */ (readFromDataView(decoder, 8)).getBigUint64(0, false);
1290
+
1291
+ /**
1292
+ * @type {Array<function(Decoder):any>}
1293
+ */
1294
+ const readAnyLookupTable = [
1295
+ decoder => undefined, // CASE 127: undefined
1296
+ decoder => null, // CASE 126: null
1297
+ readVarInt, // CASE 125: integer
1298
+ readFloat32, // CASE 124: float32
1299
+ readFloat64, // CASE 123: float64
1300
+ readBigInt64, // CASE 122: bigint
1301
+ decoder => false, // CASE 121: boolean (false)
1302
+ decoder => true, // CASE 120: boolean (true)
1303
+ readVarString, // CASE 119: string
1304
+ decoder => { // CASE 118: object<string,any>
1305
+ const len = readVarUint(decoder);
1306
+ /**
1307
+ * @type {Object<string,any>}
1308
+ */
1309
+ const obj = {};
1310
+ for (let i = 0; i < len; i++) {
1311
+ const key = readVarString(decoder);
1312
+ obj[key] = readAny(decoder);
1313
+ }
1314
+ return obj
1315
+ },
1316
+ decoder => { // CASE 117: array<any>
1317
+ const len = readVarUint(decoder);
1318
+ const arr = [];
1319
+ for (let i = 0; i < len; i++) {
1320
+ arr.push(readAny(decoder));
1321
+ }
1322
+ return arr
1323
+ },
1324
+ readVarUint8Array // CASE 116: Uint8Array
1325
+ ];
1326
+
1327
+ /**
1328
+ * @param {Decoder} decoder
1329
+ */
1330
+ const readAny = decoder => readAnyLookupTable[127 - readUint8(decoder)](decoder);
1331
+
1332
+ function decodeYMessage(a) {
1333
+ const decoder = new Decoder(a);
1334
+ const type = readVarString(decoder);
1335
+ const table = readVarString(decoder);
1336
+ const prop = readVarString(decoder);
1337
+ switch (type) {
1338
+ case 'u-ack':
1339
+ case 'u-reject':
1340
+ return {
1341
+ type,
1342
+ table,
1343
+ prop,
1344
+ i: Number(readBigUint64(decoder)),
1345
+ };
1346
+ default: {
1347
+ const k = readAny(decoder);
1348
+ switch (type) {
1349
+ case 'in-sync':
1350
+ return { type, table, prop, k };
1351
+ case 'aware':
1352
+ return {
1353
+ type,
1354
+ table,
1355
+ prop,
1356
+ k,
1357
+ u: readVarUint8Array(decoder),
1358
+ };
1359
+ case 'doc-open':
1360
+ return {
1361
+ type,
1362
+ table,
1363
+ prop,
1364
+ k,
1365
+ serverRev: readAny(decoder),
1366
+ sv: readAny(decoder),
1367
+ };
1368
+ case 'doc-close':
1369
+ return { type, table, prop, k };
1370
+ case 'sv':
1371
+ return {
1372
+ type,
1373
+ table,
1374
+ prop,
1375
+ k,
1376
+ sv: readVarUint8Array(decoder),
1377
+ };
1378
+ case 'u-c':
1379
+ return {
1380
+ type,
1381
+ table,
1382
+ prop,
1383
+ k,
1384
+ u: readVarUint8Array(decoder),
1385
+ i: Number(readBigUint64(decoder)),
1386
+ };
1387
+ case 'u-s':
1388
+ return {
1389
+ type,
1390
+ table,
1391
+ prop,
1392
+ k,
1393
+ u: readVarUint8Array(decoder)
1394
+ };
1395
+ default:
1396
+ throw new TypeError(`Unknown message type: ${type}`);
1397
+ }
1398
+ }
1399
+ }
1400
+ }
1401
+
1402
+ async function asyncIterablePipeline(source, ...stages) {
1403
+ var _a, e_1, _b, _c;
1404
+ // Chain generators by sending outdata from one to another
1405
+ let result = source(); // Start with the source generator
1406
+ for (let i = 0; i < stages.length; i++) {
1407
+ result = stages[i](result); // Pass on the result to next generator
1408
+ }
1409
+ try {
1410
+ // Start running the machine. If the last stage is a sink, it will consume the data and never emit anything
1411
+ // to us here...
1412
+ for (var _d = true, result_1 = __asyncValues(result), result_1_1; result_1_1 = await result_1.next(), _a = result_1_1.done, !_a; _d = true) {
1413
+ _c = result_1_1.value;
1414
+ _d = false;
1415
+ const chunk = _c;
1416
+ }
1417
+ }
1418
+ catch (e_1_1) { e_1 = { error: e_1_1 }; }
1419
+ finally {
1420
+ try {
1421
+ if (!_d && !_a && (_b = result_1.return)) await _b.call(result_1);
1422
+ }
1423
+ finally { if (e_1) throw e_1.error; }
1424
+ }
1425
+ }
1426
+
1427
+ function consumeChunkedBinaryStream(source) {
1428
+ return __asyncGenerator(this, arguments, function* consumeChunkedBinaryStream_1() {
1429
+ var _a, e_1, _b, _c;
1430
+ let state = 0;
1431
+ let sizeBuf = new Uint8Array(4);
1432
+ let sizeBufPos = 0;
1433
+ let bufs = [];
1434
+ let len = 0;
1435
+ try {
1436
+ for (var _d = true, source_1 = __asyncValues(source), source_1_1; source_1_1 = yield __await(source_1.next()), _a = source_1_1.done, !_a; _d = true) {
1437
+ _c = source_1_1.value;
1438
+ _d = false;
1439
+ const chunk = _c;
1440
+ const dw = new DataView(chunk.buffer, chunk.byteOffset, chunk.byteLength);
1441
+ let pos = 0;
1442
+ while (pos < chunk.byteLength) {
1443
+ switch (state) {
1444
+ case 0:
1445
+ // Beginning of a size header
1446
+ if (pos + 4 > chunk.byteLength) {
1447
+ for (const b of chunk.slice(pos)) {
1448
+ if (sizeBufPos === 4)
1449
+ break;
1450
+ sizeBuf[sizeBufPos++] = b;
1451
+ ++pos;
1452
+ }
1453
+ if (sizeBufPos < 4) {
1454
+ // Need more bytes in order to read length.
1455
+ // Will go out from while loop as well because pos is defenitely = chunk.byteLength here.
1456
+ break;
1457
+ }
1458
+ }
1459
+ else if (sizeBufPos > 0 && sizeBufPos < 4) {
1460
+ for (const b of chunk.slice(pos, pos + 4 - sizeBufPos)) {
1461
+ sizeBuf[sizeBufPos++] = b;
1462
+ ++pos;
1463
+ }
1464
+ }
1465
+ // Intentional fall-through...
1466
+ case 1:
1467
+ len =
1468
+ sizeBufPos === 4
1469
+ ? new DataView(sizeBuf.buffer, 0, 4).getUint32(0, false)
1470
+ : dw.getUint32(pos, false);
1471
+ if (sizeBufPos)
1472
+ sizeBufPos = 0; // in this case pos is already forwarded
1473
+ else
1474
+ pos += 4; // else pos is not yet forwarded - that's why we do it now
1475
+ // Intentional fall-through...
1476
+ case 2:
1477
+ // Eat the chunk
1478
+ if (pos >= chunk.byteLength) {
1479
+ state = 2;
1480
+ break;
1481
+ }
1482
+ if (pos + len > chunk.byteLength) {
1483
+ bufs.push(chunk.slice(pos));
1484
+ len -= (chunk.byteLength - pos);
1485
+ state = 2;
1486
+ pos = chunk.byteLength; // will break while loop.
1487
+ }
1488
+ else {
1489
+ if (bufs.length > 0) {
1490
+ const concats = new Uint8Array(bufs.reduce((p, c) => p + c.byteLength, len));
1491
+ let p = 0;
1492
+ for (const buf of bufs) {
1493
+ concats.set(buf, p);
1494
+ p += buf.byteLength;
1495
+ }
1496
+ concats.set(chunk.slice(pos, pos + len), p);
1497
+ bufs = [];
1498
+ yield yield __await(concats);
1499
+ }
1500
+ else {
1501
+ yield yield __await(chunk.slice(pos, pos + len));
1502
+ }
1503
+ pos += len;
1504
+ state = 0;
1505
+ }
1506
+ break;
1507
+ }
1508
+ }
1509
+ }
1510
+ }
1511
+ catch (e_1_1) { e_1 = { error: e_1_1 }; }
1512
+ finally {
1513
+ try {
1514
+ if (!_d && !_a && (_b = source_1.return)) yield __await(_b.call(source_1));
1515
+ }
1516
+ finally { if (e_1) throw e_1.error; }
1517
+ }
1518
+ });
1519
+ }
1520
+
1521
+ function getFetchResponseBodyGenerator(res) {
1522
+ return function () {
1523
+ return __asyncGenerator(this, arguments, function* () {
1524
+ if (!res.body)
1525
+ throw new Error("Response body is not readable");
1526
+ const reader = res.body.getReader();
1527
+ try {
1528
+ while (true) {
1529
+ const { done, value } = yield __await(reader.read());
1530
+ if (done)
1531
+ return yield __await(void 0);
1532
+ yield yield __await(value);
1533
+ }
1534
+ }
1535
+ finally {
1536
+ reader.releaseLock();
1537
+ }
1538
+ });
1539
+ };
1540
+ }
1541
+
473
1542
  function isFunction(value) {
474
1543
  return typeof value === 'function';
475
1544
  }
@@ -3617,7 +4686,7 @@
3617
4686
  }
3618
4687
 
3619
4688
  //import {BisonWebStreamReader} from "dreambase-library/dist/typeson-simplified/BisonWebStreamReader";
3620
- function syncWithServer(changes, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser) {
4689
+ function syncWithServer(changes, y, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser) {
3621
4690
  return __awaiter(this, void 0, void 0, function* () {
3622
4691
  //
3623
4692
  // Push changes to server using fetch
@@ -3655,6 +4724,7 @@
3655
4724
  : undefined,
3656
4725
  baseRevs,
3657
4726
  changes: encodeIdsForServer(db.dx.core.schema, currentUser, changes),
4727
+ y,
3658
4728
  };
3659
4729
  console.debug('Sync request', syncRequest);
3660
4730
  db.syncStateChangedEvent.next({
@@ -3868,6 +4938,375 @@
3868
4938
  });
3869
4939
  }
3870
4940
 
4941
+ const DEXIE_CLOUD_SYNCER_ID = 'dexie-cloud-syncer';
4942
+
4943
+ function listUpdatesSince(yTable, sinceIncluding) {
4944
+ return yTable
4945
+ .where('i')
4946
+ .between(sinceIncluding, Infinity, true)
4947
+ .toArray();
4948
+ }
4949
+
4950
+ function $Y$1(db) {
4951
+ const $Y = db.dx._options.Y;
4952
+ if (!$Y)
4953
+ throw new Error('Y library not supplied to Dexie constructor');
4954
+ return $Y;
4955
+ }
4956
+
4957
+ /** Queries the local database for YMessages to send to server.
4958
+ *
4959
+ * There are 2 messages that this function can provide:
4960
+ * YUpdateFromClientRequest ( for local updates )
4961
+ * YStateVector ( for state vector of foreign updates so that server can reduce the number of udpates to send back )
4962
+ *
4963
+ * Notice that we do not do a step 1 sync phase here to get a state vector from the server. Reason we can avoid
4964
+ * the 2-step sync is that we are client-server and not client-client here and we keep track of the client changes
4965
+ * sent to server by letting server acknowledge them. There is always a chance that some client update has already
4966
+ * been sent and that the client failed to receive the ack. However, if this happens it does not matter - the change
4967
+ * would be sent again and Yjs handles duplicate changes anyway. And it's rare so we earn the cost of roundtrips by
4968
+ * avoiding the step1 sync and instead keep track of this in the `unsentFrom` property of the SyncState.
4969
+ *
4970
+ * @param db
4971
+ * @returns
4972
+ */
4973
+ function listYClientMessagesAndStateVector(db, tablesToSync) {
4974
+ return __awaiter(this, void 0, void 0, function* () {
4975
+ const result = [];
4976
+ const lastUpdateIds = {};
4977
+ for (const table of tablesToSync) {
4978
+ if (table.schema.yProps) {
4979
+ for (const yProp of table.schema.yProps) {
4980
+ const Y = $Y$1(db); // This is how we retrieve the user-provided Y library
4981
+ const yTable = db.table(yProp.updatesTable); // the updates-table for this combo of table+propName
4982
+ const syncState = (yield yTable.get(DEXIE_CLOUD_SYNCER_ID));
4983
+ // unsentFrom = the `i` value of updates that aren't yet sent to server (or at least not acked by the server yet)
4984
+ const unsentFrom = (syncState === null || syncState === void 0 ? void 0 : syncState.unsentFrom) || 1;
4985
+ // receivedUntil = the `i` value of updates that both we and the server knows we already have (we know it by the outcome from last syncWithServer() because server keep track of its revision numbers
4986
+ const receivedUntil = (syncState === null || syncState === void 0 ? void 0 : syncState.receivedUntil) || 0;
4987
+ // Compute the least value of these two (but since receivedUntil is inclusive we need to add +1 to it)
4988
+ const unsyncedFrom = Math.min(unsentFrom, receivedUntil + 1);
4989
+ // Query all these updates for all docs of this table+prop combination
4990
+ const updates = yield listUpdatesSince(yTable, unsyncedFrom);
4991
+ if (updates.length > 0)
4992
+ lastUpdateIds[yTable.name] = updates[updates.length - 1].i;
4993
+ // Now sort them by document and whether they are local or not + ignore local updates already sent:
4994
+ const perDoc = {};
4995
+ for (const update of updates) {
4996
+ // Sort updates into buckets of the doc primary key + the flag (whether it's local or foreign)
4997
+ const isLocal = ((update.f || 0) & 0x01) === 0x01;
4998
+ if (isLocal && update.i < unsentFrom)
4999
+ continue; // This local update has already been sent and acked.
5000
+ const docKey = JSON.stringify(update.k) + '/' + isLocal;
5001
+ let entry = perDoc[docKey];
5002
+ if (!entry) {
5003
+ perDoc[docKey] = entry = {
5004
+ i: update.i,
5005
+ k: update.k,
5006
+ isLocal,
5007
+ u: [],
5008
+ };
5009
+ entry.u.push(update.u);
5010
+ }
5011
+ else {
5012
+ entry.u.push(update.u);
5013
+ entry.i = Math.max(update.i, entry.i);
5014
+ }
5015
+ }
5016
+ // Now, go through all these and:
5017
+ // * For local updates, compute a merged update per document.
5018
+ // * For foreign updates, compute a state vector to pass to server, so that server can
5019
+ // avoid re-sending updates that we already have (they might have been sent of websocket
5020
+ // and when that happens, we do not mark them in any way nor do we update receivedUntil -
5021
+ // we only update receivedUntil after a "full sync" (syncWithServer()))
5022
+ for (const { k, isLocal, u, i } of Object.values(perDoc)) {
5023
+ const mergedUpdate = u.length === 1 ? u[0] : Y.mergeUpdatesV2(u);
5024
+ if (isLocal) {
5025
+ result.push({
5026
+ type: 'u-c',
5027
+ table: table.name,
5028
+ prop: yProp.prop,
5029
+ k,
5030
+ u: mergedUpdate,
5031
+ i,
5032
+ });
5033
+ }
5034
+ else {
5035
+ const stateVector = Y.encodeStateVectorFromUpdateV2(mergedUpdate);
5036
+ result.push({
5037
+ type: 'sv',
5038
+ table: table.name,
5039
+ prop: yProp.prop,
5040
+ k,
5041
+ sv: stateVector,
5042
+ });
5043
+ }
5044
+ }
5045
+ }
5046
+ }
5047
+ }
5048
+ return {
5049
+ yMessages: result,
5050
+ lastUpdateIds
5051
+ };
5052
+ });
5053
+ }
5054
+
5055
+ function getUpdatesTable(db, table, ydocProp) {
5056
+ var _a, _b, _c;
5057
+ const utbl = (_c = (_b = (_a = db.table(table)) === null || _a === void 0 ? void 0 : _a.schema.yProps) === null || _b === void 0 ? void 0 : _b.find(p => p.prop === ydocProp)) === null || _c === void 0 ? void 0 : _c.updatesTable;
5058
+ if (!utbl)
5059
+ throw new Error(`No updatesTable found for ${table}.${ydocProp}`);
5060
+ return db.table(utbl);
5061
+ }
5062
+
5063
+ function applyYServerMessages(yMessages, db) {
5064
+ var _a;
5065
+ return __awaiter(this, void 0, void 0, function* () {
5066
+ const result = {};
5067
+ for (const m of yMessages) {
5068
+ switch (m.type) {
5069
+ case 'u-s': {
5070
+ const utbl = getUpdatesTable(db, m.table, m.prop);
5071
+ result[utbl.name] = yield utbl.add({
5072
+ k: m.k,
5073
+ u: m.u,
5074
+ });
5075
+ break;
5076
+ }
5077
+ case 'u-ack': {
5078
+ const utbl = getUpdatesTable(db, m.table, m.prop);
5079
+ yield db.transaction('rw', utbl, (tx) => __awaiter(this, void 0, void 0, function* () {
5080
+ let syncer = (yield tx
5081
+ .table(utbl.name)
5082
+ .get(DEXIE_CLOUD_SYNCER_ID));
5083
+ yield tx.table(utbl.name).put(Object.assign(Object.assign({}, (syncer || { i: DEXIE_CLOUD_SYNCER_ID })), { unsentFrom: Math.max((syncer === null || syncer === void 0 ? void 0 : syncer.unsentFrom) || 1, m.i + 1) }));
5084
+ }));
5085
+ break;
5086
+ }
5087
+ case 'u-reject': {
5088
+ // Acces control or constraint rejected the update.
5089
+ // We delete it. It's not going to be sent again.
5090
+ // What's missing is a way to notify consumers, such as Tiptap editor, that the update was rejected.
5091
+ // This is only an issue when the document is open. We could find the open document and
5092
+ // in a perfect world, we should send a reverse update to the open document to undo the change.
5093
+ // See my question in https://discuss.yjs.dev/t/generate-an-inverse-update/2765
5094
+ console.debug(`Y update rejected. Deleting it.`);
5095
+ const utbl = getUpdatesTable(db, m.table, m.prop);
5096
+ // Delete the rejected update and all local updates since (avoid holes in the CRDT)
5097
+ // and destroy it's open document if there is one.
5098
+ const primaryKey = (_a = (yield utbl.get(m.i))) === null || _a === void 0 ? void 0 : _a.k;
5099
+ if (primaryKey != null) {
5100
+ yield db.transaction('rw', utbl, tx => {
5101
+ // @ts-ignore
5102
+ tx.idbtrans._rejecting_y_ypdate = true; // Inform ydoc triggers that we delete because of a rejection and not GC
5103
+ return utbl
5104
+ .where('i')
5105
+ .aboveOrEqual(m.i)
5106
+ .filter(u => Dexie.cmp(u.k, primaryKey) === 0 && ((u.f || 0) & 1) === 1)
5107
+ .delete();
5108
+ });
5109
+ // Destroy active doc
5110
+ const activeDoc = Dexie.DexieYProvider.getDocCache(db.dx).find(m.table, primaryKey, m.prop);
5111
+ if (activeDoc)
5112
+ activeDoc.destroy(); // Destroy the document so that editors don't continue to work on it
5113
+ }
5114
+ break;
5115
+ }
5116
+ case 'in-sync': {
5117
+ const doc = Dexie.DexieYProvider.getDocCache(db.dx).find(m.table, m.k, m.prop);
5118
+ if (doc && !doc.isSynced) {
5119
+ doc.emit('sync', [true]);
5120
+ }
5121
+ break;
5122
+ }
5123
+ }
5124
+ }
5125
+ return result;
5126
+ });
5127
+ }
5128
+
5129
+ function updateYSyncStates(lastUpdateIdsBeforeSync, receivedUntilsAfterSync, db, serverRevision) {
5130
+ var _a, _b, _c, _d, _e;
5131
+ return __awaiter(this, void 0, void 0, function* () {
5132
+ // We want to update unsentFrom for each yTable to the value specified in first argument
5133
+ // because we got those values before we synced with server and here we are back from server
5134
+ // that has successfully received all those messages - no matter if the last update was a client or server update,
5135
+ // we can safely store unsentFrom to a value of the last update + 1 here.
5136
+ // We also want to update receivedUntil for each yTable to the value specified in the second argument,
5137
+ // because that contains the highest resulted id of each update from server after storing it.
5138
+ // We could do these two tasks separately, but that would require two update calls on the same YSyncState, so
5139
+ // to optimize the dexie calls, we merge these two maps into a single one so we can do a single update request
5140
+ // per yTable.
5141
+ const mergedSpec = {};
5142
+ for (const [yTable, lastUpdateId] of Object.entries(lastUpdateIdsBeforeSync)) {
5143
+ (_a = mergedSpec[yTable]) !== null && _a !== void 0 ? _a : (mergedSpec[yTable] = {});
5144
+ mergedSpec[yTable].unsentFrom = lastUpdateId + 1;
5145
+ }
5146
+ for (const [yTable, lastUpdateId] of Object.entries(receivedUntilsAfterSync)) {
5147
+ (_b = mergedSpec[yTable]) !== null && _b !== void 0 ? _b : (mergedSpec[yTable] = {});
5148
+ mergedSpec[yTable].receivedUntil = lastUpdateId;
5149
+ }
5150
+ // Now go through all yTables and update their YSyncStates:
5151
+ const allYTables = Object.values(db.dx._dbSchema)
5152
+ .filter((tblSchema) => tblSchema.yProps)
5153
+ .map((tblSchema) => tblSchema.yProps.map((yProp) => yProp.updatesTable))
5154
+ .flat();
5155
+ for (const yTable of allYTables) {
5156
+ const mergedEntry = mergedSpec[yTable];
5157
+ const unsentFrom = (_c = mergedEntry === null || mergedEntry === void 0 ? void 0 : mergedEntry.unsentFrom) !== null && _c !== void 0 ? _c : 1;
5158
+ const receivedUntil = (_e = (_d = mergedEntry === null || mergedEntry === void 0 ? void 0 : mergedEntry.receivedUntil) !== null && _d !== void 0 ? _d :
5159
+ // from local because we are in the same parent transaction (in sync.ts) that
5160
+ // applied all updates from the server
5161
+ (yield db
5162
+ .table(yTable)
5163
+ .where('i')
5164
+ .between(1, Infinity) // Because i might be string DEXIE_CLOUD_SYNCER_ID if not a number.
5165
+ .reverse()
5166
+ .limit(1)
5167
+ .primaryKeys())[0]) !== null && _e !== void 0 ? _e : 0;
5168
+ // We're already in a transaction, but for the sake of
5169
+ // code readability and correctness, let's launch an atomic sub transaction:
5170
+ yield db.transaction('rw', yTable, () => __awaiter(this, void 0, void 0, function* () {
5171
+ const state = yield db
5172
+ .table(yTable)
5173
+ .get(DEXIE_CLOUD_SYNCER_ID);
5174
+ if (!state) {
5175
+ yield db.table(yTable).add({
5176
+ i: DEXIE_CLOUD_SYNCER_ID,
5177
+ unsentFrom,
5178
+ receivedUntil,
5179
+ serverRev: serverRevision,
5180
+ });
5181
+ }
5182
+ else {
5183
+ state.unsentFrom = Math.max(unsentFrom, state.unsentFrom || 1);
5184
+ state.receivedUntil = Math.max(receivedUntil, state.receivedUntil || 0);
5185
+ state.serverRev = serverRevision;
5186
+ yield db.table(yTable).put(state);
5187
+ }
5188
+ }));
5189
+ }
5190
+ });
5191
+ }
5192
+
5193
+ const BINSTREAM_TYPE_REALMID = 1;
5194
+ const BINSTREAM_TYPE_TABLE_AND_PROP = 2;
5195
+ const BINSTREAM_TYPE_DOCUMENT = 3;
5196
+ function downloadYDocsFromServer(db, databaseUrl, { yDownloadedRealms, realms }) {
5197
+ return __awaiter(this, void 0, void 0, function* () {
5198
+ if (yDownloadedRealms &&
5199
+ realms &&
5200
+ realms.every((realmId) => yDownloadedRealms[realmId] === '*')) {
5201
+ return; // Already done!
5202
+ }
5203
+ console.debug('Downloading Y.Docs from added realms');
5204
+ const user = yield loadAccessToken(db);
5205
+ const headers = {
5206
+ 'Content-Type': 'application/json',
5207
+ Accept: 'application/octet-stream',
5208
+ };
5209
+ if (user) {
5210
+ headers.Authorization = `Bearer ${user.accessToken}`;
5211
+ }
5212
+ const res = yield fetch(`${databaseUrl}/y/download`, {
5213
+ body: TSON.stringify({ downloadedRealms: yDownloadedRealms || {} }),
5214
+ method: 'POST',
5215
+ headers,
5216
+ credentials: 'include',
5217
+ });
5218
+ if (!res.ok) {
5219
+ throw new Error(`Failed to download Yjs documents from server. Status: ${res.status}`);
5220
+ }
5221
+ yield asyncIterablePipeline(getFetchResponseBodyGenerator(res), consumeChunkedBinaryStream, consumeDownloadChunks);
5222
+ function consumeDownloadChunks(chunks) {
5223
+ return __asyncGenerator(this, arguments, function* consumeDownloadChunks_1() {
5224
+ var _a, e_1, _b, _c;
5225
+ let currentRealmId = null;
5226
+ let currentTable = null;
5227
+ let currentProp = null;
5228
+ let docsToInsert = [];
5229
+ function storeCollectedDocs(completedRealm) {
5230
+ return __awaiter(this, void 0, void 0, function* () {
5231
+ const lastDoc = docsToInsert[docsToInsert.length - 1];
5232
+ if (docsToInsert.length > 0) {
5233
+ if (!currentRealmId || !currentTable || !currentProp) {
5234
+ throw new Error(`Protocol error from ${databaseUrl}/y/download`);
5235
+ }
5236
+ const yTable = getUpdatesTable(db, currentTable, currentProp);
5237
+ yield yTable.bulkAdd(docsToInsert);
5238
+ docsToInsert = [];
5239
+ }
5240
+ if (currentRealmId &&
5241
+ ((currentTable && currentProp && lastDoc) || completedRealm)) {
5242
+ yield db.$syncState.update('syncState', (syncState) => {
5243
+ const yDownloadedRealms = syncState.yDownloadedRealms || {};
5244
+ yDownloadedRealms[currentRealmId] = completedRealm
5245
+ ? '*'
5246
+ : {
5247
+ tbl: currentTable,
5248
+ prop: currentProp,
5249
+ key: lastDoc.k,
5250
+ };
5251
+ syncState.yDownloadedRealms = yDownloadedRealms;
5252
+ });
5253
+ }
5254
+ });
5255
+ }
5256
+ try {
5257
+ try {
5258
+ for (var _d = true, chunks_1 = __asyncValues(chunks), chunks_1_1; chunks_1_1 = yield __await(chunks_1.next()), _a = chunks_1_1.done, !_a; _d = true) {
5259
+ _c = chunks_1_1.value;
5260
+ _d = false;
5261
+ const chunk = _c;
5262
+ const decoder = new Decoder(chunk);
5263
+ while (hasContent(decoder)) {
5264
+ switch (readUint8(decoder)) {
5265
+ case BINSTREAM_TYPE_REALMID:
5266
+ yield __await(storeCollectedDocs(true));
5267
+ currentRealmId = readVarString(decoder);
5268
+ break;
5269
+ case BINSTREAM_TYPE_TABLE_AND_PROP:
5270
+ yield __await(storeCollectedDocs(false)); // still on same realm
5271
+ currentTable = readVarString(decoder);
5272
+ currentProp = readVarString(decoder);
5273
+ break;
5274
+ case BINSTREAM_TYPE_DOCUMENT: {
5275
+ const k = readAny(decoder);
5276
+ const u = readVarUint8Array(decoder);
5277
+ docsToInsert.push({
5278
+ k,
5279
+ u,
5280
+ });
5281
+ break;
5282
+ }
5283
+ }
5284
+ }
5285
+ yield __await(storeCollectedDocs(false)); // Chunk full - migth still be on same realm
5286
+ }
5287
+ }
5288
+ catch (e_1_1) { e_1 = { error: e_1_1 }; }
5289
+ finally {
5290
+ try {
5291
+ if (!_d && !_a && (_b = chunks_1.return)) yield __await(_b.call(chunks_1));
5292
+ }
5293
+ finally { if (e_1) throw e_1.error; }
5294
+ }
5295
+ yield __await(storeCollectedDocs(true)); // Everything downloaded - finalize last downloaded realm to "*"
5296
+ }
5297
+ catch (error) {
5298
+ if (!(error instanceof Dexie.DexieError)) {
5299
+ // Network error might have happened.
5300
+ // Store what we've collected so far:
5301
+ yield __await(storeCollectedDocs(false));
5302
+ }
5303
+ throw error;
5304
+ }
5305
+ });
5306
+ }
5307
+ });
5308
+ }
5309
+
3871
5310
  const CURRENT_SYNC_WORKER = 'currentSyncWorker';
3872
5311
  function sync(db, options, schema, syncOptions) {
3873
5312
  return _sync
@@ -3956,10 +5395,11 @@
3956
5395
  //
3957
5396
  // List changes to sync
3958
5397
  //
3959
- const [clientChangeSet, syncState, baseRevs] = yield db.transaction('r', db.tables, () => __awaiter(this, void 0, void 0, function* () {
5398
+ const [clientChangeSet, syncState, baseRevs, { yMessages, lastUpdateIds }] = yield db.transaction('r', db.tables, () => __awaiter(this, void 0, void 0, function* () {
3960
5399
  const syncState = yield db.getPersistedSyncState();
3961
5400
  const baseRevs = yield db.$baseRevs.toArray();
3962
5401
  let clientChanges = yield listClientChanges(mutationTables);
5402
+ const yResults = yield listYClientMessagesAndStateVector(db, tablesToSync);
3963
5403
  throwIfCancelled(cancelToken);
3964
5404
  if (doSyncify) {
3965
5405
  const alreadySyncedRealms = [
@@ -3969,11 +5409,11 @@
3969
5409
  const syncificationInserts = yield listSyncifiedChanges(tablesToSyncify, currentUser, schema, alreadySyncedRealms);
3970
5410
  throwIfCancelled(cancelToken);
3971
5411
  clientChanges = clientChanges.concat(syncificationInserts);
3972
- return [clientChanges, syncState, baseRevs];
5412
+ return [clientChanges, syncState, baseRevs, yResults];
3973
5413
  }
3974
- return [clientChanges, syncState, baseRevs];
5414
+ return [clientChanges, syncState, baseRevs, yResults];
3975
5415
  }));
3976
- const pushSyncIsNeeded = clientChangeSet.some((set) => set.muts.some((mut) => mut.keys.length > 0));
5416
+ const pushSyncIsNeeded = clientChangeSet.some((set) => set.muts.some((mut) => mut.keys.length > 0)) || yMessages.some(m => m.type === 'u-c');
3977
5417
  if (justCheckIfNeeded) {
3978
5418
  console.debug('Sync is needed:', pushSyncIsNeeded);
3979
5419
  return pushSyncIsNeeded;
@@ -3988,12 +5428,12 @@
3988
5428
  // Push changes to server
3989
5429
  //
3990
5430
  throwIfCancelled(cancelToken);
3991
- const res = yield syncWithServer(clientChangeSet, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser);
5431
+ const res = yield syncWithServer(clientChangeSet, yMessages, syncState, baseRevs, db, databaseUrl, schema, clientIdentity, currentUser);
3992
5432
  console.debug('Sync response', res);
3993
5433
  //
3994
5434
  // Apply changes locally and clear old change entries:
3995
5435
  //
3996
- const done = yield db.transaction('rw', db.tables, (tx) => __awaiter(this, void 0, void 0, function* () {
5436
+ const { done, newSyncState } = yield db.transaction('rw', db.tables, (tx) => __awaiter(this, void 0, void 0, function* () {
3997
5437
  // @ts-ignore
3998
5438
  tx.idbtrans.disableChangeTracking = true;
3999
5439
  // @ts-ignore
@@ -4085,17 +5525,35 @@
4085
5525
  // apply server changes
4086
5526
  //
4087
5527
  yield applyServerChanges(filteredChanges, db);
5528
+ if (res.yMessages) {
5529
+ //
5530
+ // apply yMessages
5531
+ //
5532
+ const receivedUntils = yield applyYServerMessages(res.yMessages, db);
5533
+ //
5534
+ // update Y SyncStates
5535
+ //
5536
+ yield updateYSyncStates(lastUpdateIds, receivedUntils, db, res.serverRevision);
5537
+ }
4088
5538
  //
4089
- // Update syncState
5539
+ // Update regular syncState
4090
5540
  //
4091
5541
  db.$syncState.put(newSyncState, 'syncState');
4092
- return addedClientChanges.length === 0;
5542
+ return {
5543
+ done: addedClientChanges.length === 0,
5544
+ newSyncState
5545
+ };
4093
5546
  }));
4094
5547
  if (!done) {
4095
5548
  console.debug('MORE SYNC NEEDED. Go for it again!');
4096
5549
  yield checkSyncRateLimitDelay(db);
4097
5550
  return yield _sync(db, options, schema, { isInitialSync, cancelToken });
4098
5551
  }
5552
+ const usingYProps = Object.values(schema).some(tbl => { var _a; return (_a = tbl.yProps) === null || _a === void 0 ? void 0 : _a.length; });
5553
+ const serverSupportsYprops = !!res.yMessages;
5554
+ if (usingYProps && serverSupportsYprops) {
5555
+ yield downloadYDocsFromServer(db, databaseUrl, newSyncState);
5556
+ }
4099
5557
  console.debug('SYNC DONE', { isInitialSync });
4100
5558
  db.syncCompleteEvent.next();
4101
5559
  return false; // Not needed anymore
@@ -4148,6 +5606,18 @@
4148
5606
  }
4149
5607
  }
4150
5608
  }
5609
+ if (rejectedRealms.size > 0) {
5610
+ // Remove rejected/deleted realms from yDownloadedRealms because of the following use case:
5611
+ // 1. User becomes added to the realm
5612
+ // 2. User syncs and all documents of the realm is downloaded (downloadYDocsFromServer.ts)
5613
+ // 3. User leaves the realm and all docs are deleted locally (built-in-trigger of deleting their rows in this file)
5614
+ // 4. User is yet again added to the realm. At this point, we must make sure the docs are not considered already downloaded.
5615
+ const updateSpec = {};
5616
+ for (const realmId of rejectedRealms) {
5617
+ updateSpec[`yDownloadedRealms.${realmId}`] = undefined; // Setting to undefined will delete the property
5618
+ }
5619
+ yield db.$syncState.update('syncState', updateSpec);
5620
+ }
4151
5621
  });
4152
5622
  }
4153
5623
  function filterServerChangesThroughAddedClientChanges(serverChanges, addedClientChanges) {
@@ -4457,6 +5927,7 @@
4457
5927
  };
4458
5928
  Object.assign(db, helperMethods);
4459
5929
  db.messageConsumer = MessagesFromServerConsumer(db);
5930
+ db.messageProducer = new rxjs.Subject();
4460
5931
  wm.set(dx.cloud, db);
4461
5932
  }
4462
5933
  return db;
@@ -4486,24 +5957,6 @@
4486
5957
  const DISABLE_SERVICEWORKER_STRATEGY = (isSafari && safariVersion <= 605) || // Disable for Safari for now.
4487
5958
  isFirefox; // Disable for Firefox for now. Seems to have a bug in reading CryptoKeys from IDB from service workers
4488
5959
 
4489
- /* Helper function to subscribe to database close no matter if it was unexpectedly closed or manually using db.close()
4490
- */
4491
- function dbOnClosed(db, handler) {
4492
- db.on.close.subscribe(handler);
4493
- // @ts-ignore
4494
- const origClose = db._close;
4495
- // @ts-ignore
4496
- db._close = function () {
4497
- origClose.call(this);
4498
- handler();
4499
- };
4500
- return () => {
4501
- db.on.close.unsubscribe(handler);
4502
- // @ts-ignore
4503
- db._close = origClose;
4504
- };
4505
- }
4506
-
4507
5960
  const IS_SERVICE_WORKER = typeof self !== "undefined" && "clients" in self && !self.document;
4508
5961
 
4509
5962
  function throwVersionIncrementNeeded() {
@@ -4969,13 +6422,18 @@
4969
6422
  values = values.filter((_, idx) => !failures[idx]);
4970
6423
  }
4971
6424
  const ts = Date.now();
6425
+ // Canonicalize req.criteria.index to null if it's on the primary key.
6426
+ const criteria = 'criteria' in req && req.criteria
6427
+ ? Object.assign(Object.assign({}, req.criteria), { index: req.criteria.index === schema.primaryKey.keyPath // Use null to inform server that criteria is on primary key
6428
+ ? null // This will disable the server from trying to log consistent operations where it shouldnt.
6429
+ : req.criteria.index }) : undefined;
4972
6430
  const mut = req.type === 'delete'
4973
6431
  ? {
4974
6432
  type: 'delete',
4975
6433
  ts,
4976
6434
  opNo,
4977
6435
  keys,
4978
- criteria: req.criteria,
6436
+ criteria,
4979
6437
  txid,
4980
6438
  userId,
4981
6439
  }
@@ -4989,14 +6447,14 @@
4989
6447
  userId,
4990
6448
  values,
4991
6449
  }
4992
- : req.criteria && req.changeSpec
6450
+ : criteria && req.changeSpec
4993
6451
  ? {
4994
6452
  // Common changeSpec for all keys
4995
6453
  type: 'modify',
4996
6454
  ts,
4997
6455
  opNo,
4998
6456
  keys,
4999
- criteria: req.criteria,
6457
+ criteria,
5000
6458
  changeSpec: req.changeSpec,
5001
6459
  txid,
5002
6460
  userId,
@@ -5024,7 +6482,7 @@
5024
6482
  if ('isAdditionalChunk' in req && req.isAdditionalChunk) {
5025
6483
  mut.isAdditionalChunk = true;
5026
6484
  }
5027
- return keys.length > 0 || ('criteria' in req && req.criteria)
6485
+ return keys.length > 0 || criteria
5028
6486
  ? mutsTable
5029
6487
  .mutate({ type: 'add', trans, values: [mut] }) // Log entry
5030
6488
  .then(() => res) // Return original response
@@ -5038,6 +6496,7 @@
5038
6496
 
5039
6497
  function overrideParseStoresSpec(origFunc, dexie) {
5040
6498
  return function (stores, dbSchema) {
6499
+ var _a;
5041
6500
  const storesClone = Object.assign(Object.assign({}, DEXIE_CLOUD_SCHEMA), stores);
5042
6501
  // Merge indexes of DEXIE_CLOUD_SCHEMA with stores
5043
6502
  Object.keys(DEXIE_CLOUD_SCHEMA).forEach((tableName) => {
@@ -5098,6 +6557,14 @@
5098
6557
  }
5099
6558
  });
5100
6559
  const rv = origFunc.call(this, storesClone, dbSchema);
6560
+ for (const [tableName, spec] of Object.entries(dbSchema)) {
6561
+ if ((_a = spec.yProps) === null || _a === void 0 ? void 0 : _a.length) {
6562
+ const cloudTableSchema = cloudSchema[tableName];
6563
+ if (cloudTableSchema) {
6564
+ cloudTableSchema.yProps = spec.yProps.map((yProp) => yProp.prop);
6565
+ }
6566
+ }
6567
+ }
5101
6568
  return rv;
5102
6569
  };
5103
6570
  }
@@ -5183,31 +6650,90 @@
5183
6650
  }
5184
6651
  }
5185
6652
 
6653
+ function createYClientUpdateObservable(db) {
6654
+ const yTableRecords = flatten(db.tables
6655
+ .filter((table) => { var _a, _b; return ((_b = (_a = db.cloud.schema) === null || _a === void 0 ? void 0 : _a[table.name]) === null || _b === void 0 ? void 0 : _b.markedForSync) && table.schema.yProps; })
6656
+ .map((table) => table.schema.yProps.map((p) => ({
6657
+ table: table.name,
6658
+ ydocProp: p.prop,
6659
+ updatesTable: p.updatesTable,
6660
+ }))));
6661
+ return rxjs.merge(...yTableRecords.map(({ table, ydocProp, updatesTable }) => {
6662
+ // Per updates table (table+prop combo), we first read syncer.unsentFrom,
6663
+ // and then start listening for updates since that number.
6664
+ const yTbl = db.table(updatesTable);
6665
+ return rxjs.from(yTbl.get(DEXIE_CLOUD_SYNCER_ID)).pipe(rxjs.switchMap((syncer) => {
6666
+ let currentUnsentFrom = (syncer === null || syncer === void 0 ? void 0 : syncer.unsentFrom) || 1;
6667
+ return rxjs.from(Dexie.liveQuery(() => __awaiter(this, void 0, void 0, function* () {
6668
+ const addedUpdates = yield listUpdatesSince(yTbl, currentUnsentFrom);
6669
+ return addedUpdates
6670
+ .filter((update) => update.f && update.f & 1) // Only include local updates
6671
+ .map((update) => {
6672
+ return {
6673
+ type: 'u-c',
6674
+ table,
6675
+ prop: ydocProp,
6676
+ k: update.k,
6677
+ u: update.u,
6678
+ i: update.i,
6679
+ };
6680
+ });
6681
+ }))).pipe(rxjs.tap((addedUpdates) => {
6682
+ // Update currentUnsentFrom to only listen for updates that will be newer than the ones we emitted.
6683
+ // (Before, we did this within the liveQuery, but that caused a bug because
6684
+ // a cancelled emittion of a liveQuery would update the currentUnsentFrom without
6685
+ // emitting anything, leading to that we jumped over some updates. Here we update it
6686
+ // after the liveQuery has emitted its updates)
6687
+ if (addedUpdates.length > 0) {
6688
+ currentUnsentFrom = addedUpdates.at(-1).i + 1;
6689
+ }
6690
+ }));
6691
+ }));
6692
+ })).pipe(
6693
+ // Flatten the array of messages.
6694
+ // If messageProducer emits empty array, nothing is emitted
6695
+ // but if messageProducer emits array of messages, they are
6696
+ // emitted one by one.
6697
+ rxjs.mergeMap((messages) => messages), rxjs.tap((message) => {
6698
+ console.debug('dexie-cloud emitting y-c', message);
6699
+ }));
6700
+ }
6701
+
6702
+ function getAwarenessLibrary(db) {
6703
+ var _a, _b;
6704
+ if (!((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.awarenessProtocol)) {
6705
+ throw new Dexie.MissingAPIError('awarenessProtocol was not provided to db.cloud.configure(). Please import * as awarenessProtocol from "y-protocols/awareness".');
6706
+ }
6707
+ return (_b = db.cloud.options) === null || _b === void 0 ? void 0 : _b.awarenessProtocol;
6708
+ }
6709
+ const awarenessWeakMap = new WeakMap();
6710
+ const getDocAwareness = (doc) => awarenessWeakMap.get(doc);
6711
+
5186
6712
  const SERVER_PING_TIMEOUT = 20000;
5187
6713
  const CLIENT_PING_INTERVAL = 30000;
5188
6714
  const FAIL_RETRY_WAIT_TIME = 60000;
5189
6715
  class WSObservable extends rxjs.Observable {
5190
- constructor(databaseUrl, rev, realmSetHash, clientIdentity, messageProducer, webSocketStatus, token, tokenExpiration) {
5191
- super((subscriber) => new WSConnection(databaseUrl, rev, realmSetHash, clientIdentity, token, tokenExpiration, subscriber, messageProducer, webSocketStatus));
6716
+ constructor(db, rev, realmSetHash, clientIdentity, messageProducer, webSocketStatus, user) {
6717
+ super((subscriber) => new WSConnection(db, rev, realmSetHash, clientIdentity, user, subscriber, messageProducer, webSocketStatus));
5192
6718
  }
5193
6719
  }
5194
6720
  let counter = 0;
5195
6721
  class WSConnection extends rxjs.Subscription {
5196
- constructor(databaseUrl, rev, realmSetHash, clientIdentity, token, tokenExpiration, subscriber, messageProducer, webSocketStatus) {
6722
+ constructor(db, rev, realmSetHash, clientIdentity, user, subscriber, messageProducer, webSocketStatus) {
5197
6723
  super(() => this.teardown());
5198
6724
  this.id = ++counter;
6725
+ this.subscriptions = new Set();
5199
6726
  this.reconnecting = false;
5200
- console.debug('New WebSocket Connection', this.id, token ? 'authorized' : 'unauthorized');
5201
- this.databaseUrl = databaseUrl;
6727
+ console.debug('New WebSocket Connection', this.id, user.accessToken ? 'authorized' : 'unauthorized');
6728
+ this.db = db;
6729
+ this.databaseUrl = db.cloud.options.databaseUrl;
5202
6730
  this.rev = rev;
5203
6731
  this.realmSetHash = realmSetHash;
5204
6732
  this.clientIdentity = clientIdentity;
5205
- this.token = token;
5206
- this.tokenExpiration = tokenExpiration;
6733
+ this.user = user;
5207
6734
  this.subscriber = subscriber;
5208
6735
  this.lastUserActivity = new Date();
5209
6736
  this.messageProducer = messageProducer;
5210
- this.messageProducerSubscription = null;
5211
6737
  this.webSocketStatus = webSocketStatus;
5212
6738
  this.connect();
5213
6739
  }
@@ -5228,10 +6754,10 @@
5228
6754
  catch (_a) { }
5229
6755
  }
5230
6756
  this.ws = null;
5231
- if (this.messageProducerSubscription) {
5232
- this.messageProducerSubscription.unsubscribe();
5233
- this.messageProducerSubscription = null;
6757
+ for (const sub of this.subscriptions) {
6758
+ sub.unsubscribe();
5234
6759
  }
6760
+ this.subscriptions.clear();
5235
6761
  }
5236
6762
  reconnect() {
5237
6763
  if (this.reconnecting)
@@ -5264,7 +6790,8 @@
5264
6790
  //console.debug('SyncStatus: DUBB: Ooops it was closed!');
5265
6791
  return;
5266
6792
  }
5267
- if (this.tokenExpiration && this.tokenExpiration < new Date()) {
6793
+ const tokenExpiration = this.user.accessTokenExpiration;
6794
+ if (tokenExpiration && tokenExpiration < new Date()) {
5268
6795
  this.subscriber.error(new TokenExpiredError()); // Will be handled in connectWebSocket.ts.
5269
6796
  return;
5270
6797
  }
@@ -5319,13 +6846,13 @@
5319
6846
  searchParams.set('rev', this.rev);
5320
6847
  searchParams.set('realmsHash', this.realmSetHash);
5321
6848
  searchParams.set('clientId', this.clientIdentity);
5322
- if (this.token) {
5323
- searchParams.set('token', this.token);
6849
+ if (this.user.accessToken) {
6850
+ searchParams.set('token', this.user.accessToken);
5324
6851
  }
5325
6852
  // Connect the WebSocket to given url:
5326
6853
  console.debug('dexie-cloud WebSocket create');
5327
6854
  const ws = (this.ws = new WebSocket(`${wsUrl}/changes?${searchParams}`));
5328
- //ws.binaryType = "arraybuffer"; // For future when subscribing to actual changes.
6855
+ ws.binaryType = "arraybuffer";
5329
6856
  ws.onclose = (event) => {
5330
6857
  if (!this.pinger)
5331
6858
  return;
@@ -5335,17 +6862,33 @@
5335
6862
  ws.onmessage = (event) => {
5336
6863
  if (!this.pinger)
5337
6864
  return;
5338
- console.debug('dexie-cloud WebSocket onmessage', event.data);
5339
6865
  this.lastServerActivity = new Date();
5340
6866
  try {
5341
- const msg = TSON.parse(event.data);
6867
+ const msg = typeof event.data === 'string'
6868
+ ? TSON.parse(event.data)
6869
+ : decodeYMessage(new Uint8Array(event.data));
6870
+ console.debug('dexie-cloud WebSocket onmessage', msg.type, msg);
5342
6871
  if (msg.type === 'error') {
5343
6872
  throw new Error(`Error message from dexie-cloud: ${msg.error}`);
5344
6873
  }
5345
- if (msg.type === 'rev') {
6874
+ else if (msg.type === 'rev') {
5346
6875
  this.rev = msg.rev; // No meaning but seems reasonable.
5347
6876
  }
5348
- if (msg.type !== 'pong') {
6877
+ else if (msg.type === 'aware') {
6878
+ const docCache = Dexie.DexieYProvider.getDocCache(this.db.dx);
6879
+ const doc = docCache.find(msg.table, msg.k, msg.prop);
6880
+ if (doc) {
6881
+ const awareness = getDocAwareness(doc);
6882
+ if (awareness) {
6883
+ const awap = getAwarenessLibrary(this.db);
6884
+ awap.applyAwarenessUpdate(awareness, msg.u, 'server');
6885
+ }
6886
+ }
6887
+ }
6888
+ else if (msg.type === 'u-ack' || msg.type === 'u-reject' || msg.type === 'u-s' || msg.type === 'in-sync') {
6889
+ applyYServerMessages([msg], this.db);
6890
+ }
6891
+ else if (msg.type !== 'pong') {
5349
6892
  this.subscriber.next(msg);
5350
6893
  }
5351
6894
  }
@@ -5373,16 +6916,27 @@
5373
6916
  }
5374
6917
  };
5375
6918
  });
5376
- this.messageProducerSubscription = this.messageProducer.subscribe((msg) => {
5377
- var _a;
6919
+ this.subscriptions.add(this.messageProducer.subscribe((msg) => {
6920
+ var _a, _b;
5378
6921
  if (!this.closed) {
5379
6922
  if (msg.type === 'ready' &&
5380
6923
  this.webSocketStatus.value !== 'connected') {
5381
6924
  this.webSocketStatus.next('connected');
5382
6925
  }
5383
- (_a = this.ws) === null || _a === void 0 ? void 0 : _a.send(TSON.stringify(msg));
6926
+ console.debug('dexie-cloud WebSocket send', msg.type, msg);
6927
+ if (msg.type === 'ready') {
6928
+ (_a = this.ws) === null || _a === void 0 ? void 0 : _a.send(TSON.stringify(msg));
6929
+ }
6930
+ else {
6931
+ // If it's not a "ready" message, it's an YMessage.
6932
+ // YMessages can be sent binary encoded.
6933
+ (_b = this.ws) === null || _b === void 0 ? void 0 : _b.send(encodeYMessage(msg));
6934
+ }
5384
6935
  }
5385
- });
6936
+ }));
6937
+ if (this.user.isLoggedIn && !isEagerSyncDisabled(this.db)) {
6938
+ this.subscriptions.add(createYClientUpdateObservable(this.db).subscribe(this.db.messageProducer));
6939
+ }
5386
6940
  }
5387
6941
  catch (error) {
5388
6942
  this.pauseUntil = new Date(Date.now() + FAIL_RETRY_WAIT_TIME);
@@ -5424,7 +6978,7 @@
5424
6978
  if (!((_a = db.cloud.options) === null || _a === void 0 ? void 0 : _a.databaseUrl)) {
5425
6979
  throw new Error(`No database URL to connect WebSocket to`);
5426
6980
  }
5427
- const messageProducer = db.messageConsumer.readyToServe.pipe(filter((isReady) => isReady), // When consumer is ready for new messages, produce such a message to inform server about it
6981
+ const readyForChangesMessage = db.messageConsumer.readyToServe.pipe(filter((isReady) => isReady), // When consumer is ready for new messages, produce such a message to inform server about it
5428
6982
  switchMap(() => db.getPersistedSyncState()), // We need the info on which server revision we are at:
5429
6983
  filter((syncState) => syncState && syncState.serverRevision), // We wont send anything to server before inital sync has taken place
5430
6984
  switchMap((syncState) => __awaiter(this, void 0, void 0, function* () {
@@ -5435,6 +6989,7 @@
5435
6989
  realmSetHash: yield computeRealmSetHash(syncState)
5436
6990
  });
5437
6991
  })));
6992
+ const messageProducer = rxjs.merge(readyForChangesMessage, db.messageProducer);
5438
6993
  function createObservable() {
5439
6994
  return db.cloud.persistedSyncState.pipe(filter((syncState) => syncState === null || syncState === void 0 ? void 0 : syncState.serverRevision), // Don't connect before there's no initial sync performed.
5440
6995
  take(1), // Don't continue waking up whenever syncState change
@@ -5461,7 +7016,7 @@
5461
7016
  // If no new entries, server won't bother the client. If new entries, server sends only those
5462
7017
  // and the baseRev of the last from same client-ID.
5463
7018
  if (userLogin) {
5464
- return new WSObservable(db.cloud.options.databaseUrl, db.cloud.persistedSyncState.value.serverRevision, realmSetHash, db.cloud.persistedSyncState.value.clientIdentity, messageProducer, db.cloud.webSocketStatus, userLogin.accessToken, userLogin.accessTokenExpiration);
7019
+ return new WSObservable(db, db.cloud.persistedSyncState.value.serverRevision, realmSetHash, db.cloud.persistedSyncState.value.clientIdentity, messageProducer, db.cloud.webSocketStatus, userLogin);
5465
7020
  }
5466
7021
  else {
5467
7022
  return rxjs.from([]);
@@ -6267,6 +7822,142 @@
6267
7822
  })), []);
6268
7823
  });
6269
7824
 
7825
+ function createYHandler(db) {
7826
+ return (provider) => {
7827
+ var _a;
7828
+ const doc = provider.doc;
7829
+ const { parentTable } = doc.meta || {};
7830
+ if (!((_a = db.cloud.schema) === null || _a === void 0 ? void 0 : _a[parentTable].markedForSync)) {
7831
+ return; // The table that holds the doc is not marked for sync - leave it to dexie. No syncing, no awareness.
7832
+ }
7833
+ let awareness;
7834
+ Object.defineProperty(provider, "awareness", {
7835
+ get() {
7836
+ if (awareness)
7837
+ return awareness;
7838
+ awarenessWeakMap.set(doc, awareness);
7839
+ awareness = createAwareness(db, doc, provider);
7840
+ return awareness;
7841
+ }
7842
+ });
7843
+ };
7844
+ }
7845
+ function createAwareness(db, doc, provider) {
7846
+ const { parentTable, parentId, parentProp, updatesTable } = doc.meta;
7847
+ const awap = getAwarenessLibrary(db);
7848
+ const awareness = new awap.Awareness(doc);
7849
+ awareness.on('update', ({ added, updated, removed }, origin) => {
7850
+ // Send the update
7851
+ const changedClients = added.concat(updated).concat(removed);
7852
+ const user = db.cloud.currentUser.value;
7853
+ if (origin !== 'server' && user.isLoggedIn && !isEagerSyncDisabled(db)) {
7854
+ const update = awap.encodeAwarenessUpdate(awareness, changedClients);
7855
+ db.messageProducer.next({
7856
+ type: 'aware',
7857
+ table: parentTable,
7858
+ prop: parentProp,
7859
+ k: doc.meta.parentId,
7860
+ u: update,
7861
+ });
7862
+ if (provider.destroyed) {
7863
+ // We're called from awareness.on('destroy') that did
7864
+ // removeAwarenessStates.
7865
+ // It's time to also send the doc-close message that dexie-cloud understands
7866
+ // and uses to stop subscribing for updates and awareness updates and brings
7867
+ // down the cached information in memory on the WS connection for this.
7868
+ db.messageProducer.next({
7869
+ type: 'doc-close',
7870
+ table: parentTable,
7871
+ prop: parentProp,
7872
+ k: doc.meta.parentId
7873
+ });
7874
+ }
7875
+ }
7876
+ });
7877
+ awareness.on('destroy', () => {
7878
+ // Signal to server that this provider is destroyed (the update event will be triggered, which
7879
+ // in turn will trigger db.messageProducer that will send the message to the server if WS is connected)
7880
+ awap.removeAwarenessStates(awareness, [doc.clientID], 'provider destroyed');
7881
+ });
7882
+ // Open the document on the server
7883
+ (() => __awaiter(this, void 0, void 0, function* () {
7884
+ if (provider.destroyed)
7885
+ return;
7886
+ let connected = false;
7887
+ let currentFlowId = 1;
7888
+ const subscription = db.cloud.webSocketStatus.subscribe((wsStatus) => {
7889
+ if (provider.destroyed)
7890
+ return;
7891
+ // Keep "connected" state in a variable so we can check it after async operations
7892
+ connected = wsStatus === 'connected';
7893
+ // We are or got connected. Open the document on the server.
7894
+ const user = db.cloud.currentUser.value;
7895
+ if (wsStatus === "connected" && user.isLoggedIn && !isEagerSyncDisabled(db)) {
7896
+ ++currentFlowId;
7897
+ openDocumentOnServer().catch(error => {
7898
+ console.warn(`Error catched in createYHandler.ts: ${error}`);
7899
+ });
7900
+ }
7901
+ });
7902
+ // Wait until WebSocket is connected
7903
+ provider.addCleanupHandler(subscription);
7904
+ /** Sends an 'doc-open' message to server whenever websocket becomes
7905
+ * connected, or if it is already connected.
7906
+ * The flow is aborted in case websocket is disconnected while querying
7907
+ * information required to compute the state vector. Flow is also
7908
+ * aborted in case document or provider has been destroyed during
7909
+ * the async parts of the task.
7910
+ *
7911
+ * The state vector is only computed from the updates that have occured
7912
+ * after the last full sync - which could very often be zero - in which
7913
+ * case no state vector is sent (then the server already knows us by
7914
+ * revision)
7915
+ *
7916
+ * When server gets the doc-open message, it will authorized us for
7917
+ * whether we are allowed to read / write to this document, and then
7918
+ * keep the cached information in memory on the WS connection for this
7919
+ * particular document, as well as subscribe to updates and awareness updates
7920
+ * from other clients on the document.
7921
+ */
7922
+ function openDocumentOnServer(wsStatus) {
7923
+ return __awaiter(this, void 0, void 0, function* () {
7924
+ const myFlow = currentFlowId; // So we can abort when a new flow is started
7925
+ const yTbl = db.table(updatesTable);
7926
+ const syncState = yield yTbl.get(DEXIE_CLOUD_SYNCER_ID);
7927
+ // After every await, check if we still should be working on this task.
7928
+ if (provider.destroyed || currentFlowId !== myFlow || !connected)
7929
+ return;
7930
+ const receivedUntil = (syncState === null || syncState === void 0 ? void 0 : syncState.receivedUntil) || 0;
7931
+ const docOpenMsg = {
7932
+ type: 'doc-open',
7933
+ table: parentTable,
7934
+ prop: parentProp,
7935
+ k: parentId,
7936
+ serverRev: syncState === null || syncState === void 0 ? void 0 : syncState.serverRev,
7937
+ };
7938
+ const serverUpdatesSinceLastSync = yield yTbl
7939
+ .where('i')
7940
+ .between(receivedUntil, Infinity, false)
7941
+ .filter((update) => Dexie.cmp(update.k, parentId) === 0 && // Only updates for this document
7942
+ ((update.f || 0) & 1) === 0 // Don't include local changes
7943
+ )
7944
+ .toArray();
7945
+ // After every await, check if we still should be working on this task.
7946
+ if (provider.destroyed || currentFlowId !== myFlow || !connected)
7947
+ return;
7948
+ if (serverUpdatesSinceLastSync.length > 0) {
7949
+ const Y = $Y$1(db); // Get the Yjs library from Dexie constructor options
7950
+ const mergedUpdate = Y.mergeUpdatesV2(serverUpdatesSinceLastSync.map((update) => update.u));
7951
+ const stateVector = Y.encodeStateVectorFromUpdateV2(mergedUpdate);
7952
+ docOpenMsg.sv = stateVector;
7953
+ }
7954
+ db.messageProducer.next(docOpenMsg);
7955
+ });
7956
+ }
7957
+ }))();
7958
+ return awareness;
7959
+ }
7960
+
6270
7961
  function getTiedRealmId(objectId) {
6271
7962
  return 'rlm~' + objectId;
6272
7963
  }
@@ -6303,8 +7994,9 @@
6303
7994
  if (closed)
6304
7995
  throw new Dexie.DatabaseClosedError();
6305
7996
  }
6306
- dbOnClosed(dexie, () => {
7997
+ dexie.once('close', () => {
6307
7998
  subscriptions.forEach((subscription) => subscription.unsubscribe());
7999
+ subscriptions.splice(0, subscriptions.length);
6308
8000
  closed = true;
6309
8001
  localSyncWorker && localSyncWorker.stop();
6310
8002
  localSyncWorker = null;
@@ -6313,7 +8005,7 @@
6313
8005
  const syncComplete = new rxjs.Subject();
6314
8006
  dexie.cloud = {
6315
8007
  // @ts-ignore
6316
- version: "4.0.8",
8008
+ version: "4.1.0-alpha.12",
6317
8009
  options: Object.assign({}, DEFAULT_OPTIONS),
6318
8010
  schema: null,
6319
8011
  get currentUserId() {
@@ -6459,6 +8151,7 @@
6459
8151
  throw new Error(`Internal error`); // options cannot be null if configuredProgramatically is set.
6460
8152
  const newPersistedOptions = Object.assign({}, options);
6461
8153
  delete newPersistedOptions.fetchTokens;
8154
+ delete newPersistedOptions.awarenessProtocol;
6462
8155
  yield db.$syncState.put(newPersistedOptions, 'options');
6463
8156
  }
6464
8157
  if (((_h = db.cloud.options) === null || _h === void 0 ? void 0 : _h.tryUseServiceWorker) &&
@@ -6536,6 +8229,12 @@
6536
8229
  currentUserEmitter.pipe(skip(1), take(1)),
6537
8230
  db.cloud.persistedSyncState.pipe(skip(1), take(1)),
6538
8231
  ]));
8232
+ const yHandler = createYHandler(db);
8233
+ db.dx.on('y', yHandler);
8234
+ db.dx.once('close', () => {
8235
+ var _a;
8236
+ (_a = db.dx.on.y) === null || _a === void 0 ? void 0 : _a.unsubscribe(yHandler);
8237
+ });
6539
8238
  }
6540
8239
  // HERE: If requireAuth, do athentication now.
6541
8240
  let changedUser = false;
@@ -6608,10 +8307,163 @@
6608
8307
  }
6609
8308
  }
6610
8309
  // @ts-ignore
6611
- dexieCloud.version = "4.0.8";
8310
+ dexieCloud.version = "4.1.0-alpha.12";
6612
8311
  Dexie.Cloud = dexieCloud;
6613
8312
 
8313
+ const ydocTriggers = {};
8314
+ const docIsAlreadyHooked = new WeakSet();
8315
+ const middlewares = new WeakMap();
8316
+ const createMiddleware = (db) => ({
8317
+ stack: 'dbcore',
8318
+ level: 10,
8319
+ name: 'yTriggerMiddleware',
8320
+ create: (down) => {
8321
+ return Object.assign(Object.assign({}, down), { transaction: (stores, mode, options) => {
8322
+ const idbtrans = down.transaction(stores, mode, options);
8323
+ idbtrans.addEventListener('complete', onTransactionCommitted);
8324
+ return idbtrans;
8325
+ }, table: (tblName) => {
8326
+ const coreTable = down.table(tblName);
8327
+ const triggerSpec = ydocTriggers[tblName];
8328
+ if (!triggerSpec)
8329
+ return coreTable;
8330
+ const { trigger, parentTable, prop } = triggerSpec;
8331
+ return Object.assign(Object.assign({}, coreTable), { mutate(req) {
8332
+ switch (req.type) {
8333
+ case 'add': {
8334
+ for (const obj of req.values) {
8335
+ const primaryKey = coreTable.schema.primaryKey.extractKey(obj);
8336
+ const doc = Dexie.DexieYProvider.getDocCache(db).find(parentTable, primaryKey, prop);
8337
+ if (doc) {
8338
+ if (!docIsAlreadyHooked.has(doc)) {
8339
+ hookToDoc(doc, primaryKey, trigger);
8340
+ docIsAlreadyHooked.add(doc);
8341
+ }
8342
+ }
8343
+ else {
8344
+ enqueueTrigger(tblName, primaryKey, trigger);
8345
+ }
8346
+ }
8347
+ break;
8348
+ }
8349
+ case 'delete':
8350
+ // @ts-ignore
8351
+ if (req.trans._rejecting_y_ypdate) {
8352
+ // The deletion came from a rejection, not garbage collection.
8353
+ // When that happens, let the triggers run to compute new values
8354
+ // based on the deleted updates.
8355
+ coreTable
8356
+ .getMany({
8357
+ keys: req.keys,
8358
+ trans: req.trans,
8359
+ cache: 'immutable',
8360
+ })
8361
+ .then((updates) => {
8362
+ const keySet = new Dexie.RangeSet();
8363
+ for (const { k } of updates) {
8364
+ keySet.addKey(k);
8365
+ }
8366
+ for (const key of keySet) {
8367
+ enqueueTrigger(tblName, key, trigger);
8368
+ }
8369
+ });
8370
+ }
8371
+ break;
8372
+ }
8373
+ return coreTable.mutate(req);
8374
+ } });
8375
+ } });
8376
+ },
8377
+ });
8378
+ let triggerExecPromise = null;
8379
+ let triggerScheduled = false;
8380
+ let scheduledTriggers = [];
8381
+ function $Y(db) {
8382
+ const $Y = db._options.Y;
8383
+ if (!$Y)
8384
+ throw new Error('Y library not supplied to Dexie constructor');
8385
+ return $Y;
8386
+ }
8387
+ function executeTriggers(triggersToRun) {
8388
+ return __awaiter(this, void 0, void 0, function* () {
8389
+ for (const { db, parentId, trigger, updatesTable } of triggersToRun) {
8390
+ // Load entire document into an Y.Doc instance:
8391
+ const updates = yield db
8392
+ .table(updatesTable)
8393
+ .where({ k: parentId })
8394
+ .toArray();
8395
+ const Y = $Y(db);
8396
+ const yDoc = new Y.Doc();
8397
+ for (const update of updates) {
8398
+ Y.applyUpdateV2(yDoc, update);
8399
+ }
8400
+ try {
8401
+ yield trigger(yDoc, parentId);
8402
+ }
8403
+ catch (error) {
8404
+ console.error(`Error in YDocTrigger ${error}`);
8405
+ }
8406
+ }
8407
+ });
8408
+ }
8409
+ function enqueueTrigger(updatesTable, parentId, trigger) {
8410
+ var _a;
8411
+ ((_a = scheduledTriggers[updatesTable]) !== null && _a !== void 0 ? _a : (scheduledTriggers[updatesTable] = [])).push({
8412
+ parentId,
8413
+ trigger,
8414
+ });
8415
+ }
8416
+ function onTransactionCommitted() {
8417
+ return __awaiter(this, void 0, void 0, function* () {
8418
+ if (!triggerScheduled && scheduledTriggers.length > 0) {
8419
+ triggerScheduled = true;
8420
+ if (triggerExecPromise)
8421
+ yield triggerExecPromise.catch(() => { });
8422
+ setTimeout(() => {
8423
+ // setTimeout() is to escape from Promise.PSD zones and never run within liveQueries or transaction scopes
8424
+ triggerScheduled = false;
8425
+ const triggersToRun = scheduledTriggers;
8426
+ scheduledTriggers = [];
8427
+ triggerExecPromise = executeTriggers(triggersToRun).finally(() => (triggerExecPromise = null));
8428
+ }, 0);
8429
+ }
8430
+ });
8431
+ }
8432
+ function hookToDoc(doc, parentId, trigger) {
8433
+ // From now on, keep listening to doc updates and execute the trigger when it happens there instead
8434
+ doc.on('updateV2', (update, origin) => {
8435
+ //Dexie.ignoreTransaction(()=>{
8436
+ trigger(doc, parentId);
8437
+ //});
8438
+ });
8439
+ /*
8440
+ NOT NEEDED because DexieYProvider's docCache will also listen to destroy and remove it from its cache:
8441
+ doc.on('destroy', ()=>{
8442
+ docIsAlreadyHooked.delete(doc);
8443
+ })
8444
+ */
8445
+ }
8446
+ function defineYDocTrigger(table, prop, trigger) {
8447
+ var _a, _b;
8448
+ const updatesTable = (_b = (_a = table.schema.yProps) === null || _a === void 0 ? void 0 : _a.find((p) => p.prop === prop)) === null || _b === void 0 ? void 0 : _b.updatesTable;
8449
+ if (!updatesTable)
8450
+ throw new Error(`Table ${table.name} does not have a Yjs property named ${prop}`);
8451
+ ydocTriggers[updatesTable] = {
8452
+ trigger,
8453
+ parentTable: table.name,
8454
+ prop,
8455
+ };
8456
+ const db = table.db._novip;
8457
+ let mw = middlewares.get(db);
8458
+ if (!mw) {
8459
+ mw = createMiddleware(db);
8460
+ middlewares.set(db, mw);
8461
+ }
8462
+ db.use(mw);
8463
+ }
8464
+
6614
8465
  exports.default = dexieCloud;
8466
+ exports.defineYDocTrigger = defineYDocTrigger;
6615
8467
  exports.dexieCloud = dexieCloud;
6616
8468
  exports.getTiedObjectId = getTiedObjectId;
6617
8469
  exports.getTiedRealmId = getTiedRealmId;