@automerge/automerge-repo 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (99) hide show
  1. package/.eslintrc +28 -0
  2. package/.mocharc.json +5 -0
  3. package/README.md +298 -0
  4. package/TODO.md +54 -0
  5. package/dist/DocCollection.d.ts +44 -0
  6. package/dist/DocCollection.d.ts.map +1 -0
  7. package/dist/DocCollection.js +85 -0
  8. package/dist/DocHandle.d.ts +78 -0
  9. package/dist/DocHandle.d.ts.map +1 -0
  10. package/dist/DocHandle.js +227 -0
  11. package/dist/EphemeralData.d.ts +27 -0
  12. package/dist/EphemeralData.d.ts.map +1 -0
  13. package/dist/EphemeralData.js +28 -0
  14. package/dist/Repo.d.ts +30 -0
  15. package/dist/Repo.d.ts.map +1 -0
  16. package/dist/Repo.js +97 -0
  17. package/dist/helpers/arraysAreEqual.d.ts +2 -0
  18. package/dist/helpers/arraysAreEqual.d.ts.map +1 -0
  19. package/dist/helpers/arraysAreEqual.js +1 -0
  20. package/dist/helpers/eventPromise.d.ts +5 -0
  21. package/dist/helpers/eventPromise.d.ts.map +1 -0
  22. package/dist/helpers/eventPromise.js +6 -0
  23. package/dist/helpers/headsAreSame.d.ts +3 -0
  24. package/dist/helpers/headsAreSame.d.ts.map +1 -0
  25. package/dist/helpers/headsAreSame.js +7 -0
  26. package/dist/helpers/mergeArrays.d.ts +2 -0
  27. package/dist/helpers/mergeArrays.d.ts.map +1 -0
  28. package/dist/helpers/mergeArrays.js +15 -0
  29. package/dist/helpers/pause.d.ts +3 -0
  30. package/dist/helpers/pause.d.ts.map +1 -0
  31. package/dist/helpers/pause.js +7 -0
  32. package/dist/helpers/withTimeout.d.ts +9 -0
  33. package/dist/helpers/withTimeout.d.ts.map +1 -0
  34. package/dist/helpers/withTimeout.js +22 -0
  35. package/dist/index.d.ts +13 -0
  36. package/dist/index.d.ts.map +1 -0
  37. package/dist/index.js +10 -0
  38. package/dist/network/NetworkAdapter.d.ts +37 -0
  39. package/dist/network/NetworkAdapter.d.ts.map +1 -0
  40. package/dist/network/NetworkAdapter.js +4 -0
  41. package/dist/network/NetworkSubsystem.d.ts +23 -0
  42. package/dist/network/NetworkSubsystem.d.ts.map +1 -0
  43. package/dist/network/NetworkSubsystem.js +89 -0
  44. package/dist/storage/StorageAdapter.d.ts +6 -0
  45. package/dist/storage/StorageAdapter.d.ts.map +1 -0
  46. package/dist/storage/StorageAdapter.js +2 -0
  47. package/dist/storage/StorageSubsystem.d.ts +12 -0
  48. package/dist/storage/StorageSubsystem.d.ts.map +1 -0
  49. package/dist/storage/StorageSubsystem.js +65 -0
  50. package/dist/synchronizer/CollectionSynchronizer.d.ts +24 -0
  51. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -0
  52. package/dist/synchronizer/CollectionSynchronizer.js +92 -0
  53. package/dist/synchronizer/DocSynchronizer.d.ts +18 -0
  54. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -0
  55. package/dist/synchronizer/DocSynchronizer.js +136 -0
  56. package/dist/synchronizer/Synchronizer.d.ts +10 -0
  57. package/dist/synchronizer/Synchronizer.d.ts.map +1 -0
  58. package/dist/synchronizer/Synchronizer.js +3 -0
  59. package/dist/test-utilities/adapter-tests.d.ts +21 -0
  60. package/dist/test-utilities/adapter-tests.d.ts.map +1 -0
  61. package/dist/test-utilities/adapter-tests.js +117 -0
  62. package/dist/types.d.ts +10 -0
  63. package/dist/types.d.ts.map +1 -0
  64. package/dist/types.js +1 -0
  65. package/fuzz/fuzz.ts +129 -0
  66. package/package.json +65 -0
  67. package/src/DocCollection.ts +123 -0
  68. package/src/DocHandle.ts +386 -0
  69. package/src/EphemeralData.ts +46 -0
  70. package/src/Repo.ts +155 -0
  71. package/src/helpers/arraysAreEqual.ts +2 -0
  72. package/src/helpers/eventPromise.ts +10 -0
  73. package/src/helpers/headsAreSame.ts +8 -0
  74. package/src/helpers/mergeArrays.ts +17 -0
  75. package/src/helpers/pause.ts +9 -0
  76. package/src/helpers/withTimeout.ts +28 -0
  77. package/src/index.ts +22 -0
  78. package/src/network/NetworkAdapter.ts +54 -0
  79. package/src/network/NetworkSubsystem.ts +130 -0
  80. package/src/storage/StorageAdapter.ts +5 -0
  81. package/src/storage/StorageSubsystem.ts +91 -0
  82. package/src/synchronizer/CollectionSynchronizer.ts +112 -0
  83. package/src/synchronizer/DocSynchronizer.ts +182 -0
  84. package/src/synchronizer/Synchronizer.ts +15 -0
  85. package/src/test-utilities/adapter-tests.ts +163 -0
  86. package/src/types.ts +3 -0
  87. package/test/CollectionSynchronizer.test.ts +73 -0
  88. package/test/DocCollection.test.ts +19 -0
  89. package/test/DocHandle.test.ts +281 -0
  90. package/test/DocSynchronizer.test.ts +68 -0
  91. package/test/EphemeralData.test.ts +44 -0
  92. package/test/Network.test.ts +13 -0
  93. package/test/Repo.test.ts +367 -0
  94. package/test/StorageSubsystem.test.ts +78 -0
  95. package/test/helpers/DummyNetworkAdapter.ts +8 -0
  96. package/test/helpers/DummyStorageAdapter.ts +23 -0
  97. package/test/helpers/getRandomItem.ts +4 -0
  98. package/test/types.ts +3 -0
  99. package/tsconfig.json +16 -0
@@ -0,0 +1,23 @@
1
+ import EventEmitter from "eventemitter3";
2
+ import { InboundMessagePayload, NetworkAdapter, PeerDisconnectedPayload } from "./NetworkAdapter.js";
3
+ import { ChannelId, PeerId } from "../types.js";
4
+ export declare class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
5
+ #private;
6
+ private adapters;
7
+ peerId: PeerId;
8
+ constructor(adapters: NetworkAdapter[], peerId?: PeerId);
9
+ addNetworkAdapter(networkAdapter: NetworkAdapter): void;
10
+ sendMessage(peerId: PeerId, channelId: ChannelId, message: Uint8Array, broadcast: boolean): void;
11
+ join(channelId: ChannelId): void;
12
+ leave(channelId: ChannelId): void;
13
+ }
14
+ export interface NetworkSubsystemEvents {
15
+ peer: (payload: PeerPayload) => void;
16
+ "peer-disconnected": (payload: PeerDisconnectedPayload) => void;
17
+ message: (payload: InboundMessagePayload) => void;
18
+ }
19
+ export interface PeerPayload {
20
+ peerId: PeerId;
21
+ channelId: ChannelId;
22
+ }
23
+ //# sourceMappingURL=NetworkSubsystem.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"NetworkSubsystem.d.ts","sourceRoot":"","sources":["../../src/network/NetworkSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EACL,qBAAqB,EACrB,cAAc,EACd,uBAAuB,EACxB,MAAM,qBAAqB,CAAA;AAC5B,OAAO,EAAE,SAAS,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAI/C,qBAAa,gBAAiB,SAAQ,YAAY,CAAC,sBAAsB,CAAC;;IAMtE,OAAO,CAAC,QAAQ;IACT,MAAM;gBADL,QAAQ,EAAE,cAAc,EAAE,EAC3B,MAAM,SAAiB;IAQhC,iBAAiB,CAAC,cAAc,EAAE,cAAc;IAsDhD,WAAW,CACT,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,UAAU,EACnB,SAAS,EAAE,OAAO;IAkBpB,IAAI,CAAC,SAAS,EAAE,SAAS;IAMzB,KAAK,CAAC,SAAS,EAAE,SAAS;CAK3B;AAQD,MAAM,WAAW,sBAAsB;IACrC,IAAI,EAAE,CAAC,OAAO,EAAE,WAAW,KAAK,IAAI,CAAA;IACpC,mBAAmB,EAAE,CAAC,OAAO,EAAE,uBAAuB,KAAK,IAAI,CAAA;IAC/D,OAAO,EAAE,CAAC,OAAO,EAAE,qBAAqB,KAAK,IAAI,CAAA;CAClD;AAED,MAAM,WAAW,WAAW;IAC1B,MAAM,EAAE,MAAM,CAAA;IACd,SAAS,EAAE,SAAS,CAAA;CACrB"}
@@ -0,0 +1,89 @@
1
+ import EventEmitter from "eventemitter3";
2
+ import debug from "debug";
3
+ export class NetworkSubsystem extends EventEmitter {
4
+ adapters;
5
+ peerId;
6
+ #log;
7
+ #adaptersByPeer = {};
8
+ #channels;
9
+ constructor(adapters, peerId = randomPeerId()) {
10
+ super();
11
+ this.adapters = adapters;
12
+ this.peerId = peerId;
13
+ this.#log = debug(`automerge-repo:network:${this.peerId}`);
14
+ this.#channels = [];
15
+ this.adapters.forEach(a => this.addNetworkAdapter(a));
16
+ }
17
+ addNetworkAdapter(networkAdapter) {
18
+ networkAdapter.connect(this.peerId);
19
+ networkAdapter.on("peer-candidate", ({ peerId, channelId }) => {
20
+ this.#log(`peer candidate: ${peerId} `);
21
+ // TODO: This is where authentication would happen
22
+ if (!this.#adaptersByPeer[peerId]) {
23
+ // TODO: handle losing a server here
24
+ this.#adaptersByPeer[peerId] = networkAdapter;
25
+ }
26
+ this.emit("peer", { peerId, channelId });
27
+ });
28
+ networkAdapter.on("peer-disconnected", ({ peerId }) => {
29
+ this.#log(`peer disconnected: ${peerId} `);
30
+ delete this.#adaptersByPeer[peerId];
31
+ this.emit("peer-disconnected", { peerId });
32
+ });
33
+ networkAdapter.on("message", msg => {
34
+ const { senderId, channelId, broadcast, message } = msg;
35
+ this.#log(`message from ${senderId}`);
36
+ // If we receive a broadcast message from a network adapter we need to re-broadcast it to all
37
+ // our other peers. This is the world's worst gossip protocol.
38
+ // TODO: This relies on the network forming a tree! If there are cycles, this approach will
39
+ // loop messages around forever.
40
+ if (broadcast) {
41
+ Object.entries(this.#adaptersByPeer)
42
+ .filter(([id]) => id !== senderId)
43
+ .forEach(([id, peer]) => {
44
+ peer.sendMessage(id, channelId, message, broadcast);
45
+ });
46
+ }
47
+ this.emit("message", msg);
48
+ });
49
+ networkAdapter.on("close", () => {
50
+ this.#log("adapter closed");
51
+ Object.entries(this.#adaptersByPeer).forEach(([peerId, other]) => {
52
+ if (other === networkAdapter) {
53
+ delete this.#adaptersByPeer[peerId];
54
+ }
55
+ });
56
+ });
57
+ this.#channels.forEach(c => networkAdapter.join(c));
58
+ }
59
+ sendMessage(peerId, channelId, message, broadcast) {
60
+ if (broadcast) {
61
+ Object.entries(this.#adaptersByPeer).forEach(([id, peer]) => {
62
+ this.#log(`sending broadcast to ${id}`);
63
+ peer.sendMessage(id, channelId, message, true);
64
+ });
65
+ }
66
+ else {
67
+ const peer = this.#adaptersByPeer[peerId];
68
+ if (!peer) {
69
+ this.#log(`Tried to send message but peer not found: ${peerId}`);
70
+ return;
71
+ }
72
+ this.#log(`Sending message to ${peerId}`);
73
+ peer.sendMessage(peerId, channelId, message, false);
74
+ }
75
+ }
76
+ join(channelId) {
77
+ this.#log(`Joining channel ${channelId}`);
78
+ this.#channels.push(channelId);
79
+ this.adapters.forEach(a => a.join(channelId));
80
+ }
81
+ leave(channelId) {
82
+ this.#log(`Leaving channel ${channelId}`);
83
+ this.#channels = this.#channels.filter(c => c !== channelId);
84
+ this.adapters.forEach(a => a.leave(channelId));
85
+ }
86
+ }
87
+ function randomPeerId() {
88
+ return `user-${Math.round(Math.random() * 100000)}`;
89
+ }
@@ -0,0 +1,6 @@
1
+ export declare abstract class StorageAdapter {
2
+ abstract load(docId: string): Promise<Uint8Array | null>;
3
+ abstract save(docId: string, data: Uint8Array): void;
4
+ abstract remove(docId: string): void;
5
+ }
6
+ //# sourceMappingURL=StorageAdapter.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"StorageAdapter.d.ts","sourceRoot":"","sources":["../../src/storage/StorageAdapter.ts"],"names":[],"mappings":"AAAA,8BAAsB,cAAc;IAClC,QAAQ,CAAC,IAAI,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,GAAG,IAAI,CAAC;IACxD,QAAQ,CAAC,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,EAAE,UAAU,GAAG,IAAI;IACpD,QAAQ,CAAC,MAAM,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;CACrC"}
@@ -0,0 +1,2 @@
1
+ export class StorageAdapter {
2
+ }
@@ -0,0 +1,12 @@
1
+ import * as A from "@automerge/automerge";
2
+ import { DocumentId } from "../types.js";
3
+ import { StorageAdapter } from "./StorageAdapter.js";
4
+ export declare class StorageSubsystem {
5
+ #private;
6
+ constructor(storageAdapter: StorageAdapter);
7
+ loadBinary(documentId: DocumentId): Promise<Uint8Array>;
8
+ load<T>(documentId: DocumentId, prevDoc?: A.Doc<T>): Promise<A.Doc<T>>;
9
+ save(documentId: DocumentId, doc: A.Doc<unknown>): void;
10
+ remove(documentId: DocumentId): void;
11
+ }
12
+ //# sourceMappingURL=StorageSubsystem.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,sBAAsB,CAAA;AACzC,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAExC,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAA;AAEpD,qBAAa,gBAAgB;;gBAIf,cAAc,EAAE,cAAc;IA+BpC,UAAU,CAAC,UAAU,EAAE,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;IAqBvD,IAAI,CAAC,CAAC,EACV,UAAU,EAAE,UAAU,EACtB,OAAO,GAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAe,GAC9B,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;IAMpB,IAAI,CAAC,UAAU,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC;IAQhD,MAAM,CAAC,UAAU,EAAE,UAAU;CAY9B"}
@@ -0,0 +1,65 @@
1
+ import * as A from "@automerge/automerge";
2
+ import { mergeArrays } from "../helpers/mergeArrays.js";
3
+ export class StorageSubsystem {
4
+ #storageAdapter;
5
+ #changeCount = {};
6
+ constructor(storageAdapter) {
7
+ this.#storageAdapter = storageAdapter;
8
+ }
9
+ #saveIncremental(documentId, doc) {
10
+ const binary = A.saveIncremental(doc);
11
+ if (binary && binary.length > 0) {
12
+ if (!this.#changeCount[documentId]) {
13
+ this.#changeCount[documentId] = 0;
14
+ }
15
+ this.#storageAdapter.save(`${documentId}.incremental.${this.#changeCount[documentId]}`, binary);
16
+ this.#changeCount[documentId]++;
17
+ }
18
+ }
19
+ #saveTotal(documentId, doc) {
20
+ const binary = A.save(doc);
21
+ this.#storageAdapter.save(`${documentId}.snapshot`, binary);
22
+ for (let i = 0; i < this.#changeCount[documentId]; i++) {
23
+ this.#storageAdapter.remove(`${documentId}.incremental.${i}`);
24
+ }
25
+ this.#changeCount[documentId] = 0;
26
+ }
27
+ async loadBinary(documentId) {
28
+ const result = [];
29
+ let binary = await this.#storageAdapter.load(`${documentId}.snapshot`);
30
+ if (binary && binary.length > 0) {
31
+ result.push(binary);
32
+ }
33
+ let index = 0;
34
+ while ((binary = await this.#storageAdapter.load(`${documentId}.incremental.${index}`))) {
35
+ this.#changeCount[documentId] = index + 1;
36
+ if (binary && binary.length > 0)
37
+ result.push(binary);
38
+ index += 1;
39
+ }
40
+ return mergeArrays(result);
41
+ }
42
+ async load(documentId, prevDoc = A.init()) {
43
+ const doc = A.loadIncremental(prevDoc, await this.loadBinary(documentId));
44
+ A.saveIncremental(doc);
45
+ return doc;
46
+ }
47
+ save(documentId, doc) {
48
+ if (this.#shouldCompact(documentId)) {
49
+ this.#saveTotal(documentId, doc);
50
+ }
51
+ else {
52
+ this.#saveIncremental(documentId, doc);
53
+ }
54
+ }
55
+ remove(documentId) {
56
+ this.#storageAdapter.remove(`${documentId}.snapshot`);
57
+ for (let i = 0; i < this.#changeCount[documentId]; i++) {
58
+ this.#storageAdapter.remove(`${documentId}.incremental.${i}`);
59
+ }
60
+ }
61
+ // TODO: make this, you know, good.
62
+ #shouldCompact(documentId) {
63
+ return this.#changeCount[documentId] >= 20;
64
+ }
65
+ }
@@ -0,0 +1,24 @@
1
+ import { DocCollection } from "../DocCollection.js";
2
+ import { ChannelId, DocumentId, PeerId } from "../types.js";
3
+ import { Synchronizer } from "./Synchronizer.js";
4
+ /** A CollectionSynchronizer is responsible for synchronizing a DocCollection with peers. */
5
+ export declare class CollectionSynchronizer extends Synchronizer {
6
+ #private;
7
+ private repo;
8
+ constructor(repo: DocCollection);
9
+ /**
10
+ * When we receive a sync message for a document we haven't got in memory, we
11
+ * register it with the repo and start synchronizing
12
+ */
13
+ receiveSyncMessage(peerId: PeerId, channelId: ChannelId, message: Uint8Array): Promise<void>;
14
+ /**
15
+ * Starts synchronizing the given document with all peers that we share it generously with.
16
+ */
17
+ addDocument(documentId: DocumentId): void;
18
+ removeDocument(documentId: DocumentId): void;
19
+ /** Adds a peer and maybe starts synchronizing with them */
20
+ addPeer(peerId: PeerId): void;
21
+ /** Removes a peer and stops synchronizing with them */
22
+ removePeer(peerId: PeerId): void;
23
+ }
24
+ //# sourceMappingURL=CollectionSynchronizer.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"CollectionSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/CollectionSynchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAA;AAEnD,OAAO,EAAE,SAAS,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAE3D,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAKhD,4FAA4F;AAC5F,qBAAa,sBAAuB,SAAQ,YAAY;;IAO1C,OAAO,CAAC,IAAI;gBAAJ,IAAI,EAAE,aAAa;IAiCvC;;;OAGG;IACG,kBAAkB,CACtB,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,UAAU;IAgBrB;;OAEG;IACH,WAAW,CAAC,UAAU,EAAE,UAAU;IAUlC,cAAc,CAAC,UAAU,EAAE,UAAU;IAIrC,2DAA2D;IAC3D,OAAO,CAAC,MAAM,EAAE,MAAM;IAWtB,uDAAuD;IACvD,UAAU,CAAC,MAAM,EAAE,MAAM;CAQ1B"}
@@ -0,0 +1,92 @@
1
+ import { DocSynchronizer } from "./DocSynchronizer.js";
2
+ import { Synchronizer } from "./Synchronizer.js";
3
+ import debug from "debug";
4
+ const log = debug("automerge-repo:collectionsync");
5
+ /** A CollectionSynchronizer is responsible for synchronizing a DocCollection with peers. */
6
+ export class CollectionSynchronizer extends Synchronizer {
7
+ repo;
8
+ /** The set of peers we are connected with */
9
+ #peers = new Set();
10
+ /** A map of documentIds to their synchronizers */
11
+ #docSynchronizers = {};
12
+ constructor(repo) {
13
+ super();
14
+ this.repo = repo;
15
+ }
16
+ /** Returns a synchronizer for the given document, creating one if it doesn't already exist. */
17
+ #fetchDocSynchronizer(documentId) {
18
+ if (!this.#docSynchronizers[documentId]) {
19
+ const handle = this.repo.find(documentId);
20
+ this.#docSynchronizers[documentId] = this.#initDocSynchronizer(handle);
21
+ }
22
+ return this.#docSynchronizers[documentId];
23
+ }
24
+ /** Creates a new docSynchronizer and sets it up to propagate messages */
25
+ #initDocSynchronizer(handle) {
26
+ const docSynchronizer = new DocSynchronizer(handle);
27
+ docSynchronizer.on("message", event => this.emit("message", event));
28
+ return docSynchronizer;
29
+ }
30
+ /** returns an array of peerIds that we share this document generously with */
31
+ async #documentGenerousPeers(documentId) {
32
+ const peers = Array.from(this.#peers);
33
+ const generousPeers = [];
34
+ for (const peerId of peers) {
35
+ const okToShare = await this.repo.sharePolicy(peerId, documentId);
36
+ if (okToShare)
37
+ generousPeers.push(peerId);
38
+ }
39
+ return generousPeers;
40
+ }
41
+ // PUBLIC
42
+ /**
43
+ * When we receive a sync message for a document we haven't got in memory, we
44
+ * register it with the repo and start synchronizing
45
+ */
46
+ async receiveSyncMessage(peerId, channelId, message) {
47
+ log(`onSyncMessage: ${peerId}, ${channelId}, ${message.byteLength}bytes`);
48
+ const documentId = channelId;
49
+ const docSynchronizer = await this.#fetchDocSynchronizer(documentId);
50
+ await docSynchronizer.receiveSyncMessage(peerId, channelId, message);
51
+ // Initiate sync with any new peers
52
+ const peers = await this.#documentGenerousPeers(documentId);
53
+ peers
54
+ .filter(peerId => !docSynchronizer.hasPeer(peerId))
55
+ .forEach(peerId => docSynchronizer.beginSync(peerId));
56
+ }
57
+ /**
58
+ * Starts synchronizing the given document with all peers that we share it generously with.
59
+ */
60
+ addDocument(documentId) {
61
+ const docSynchronizer = this.#fetchDocSynchronizer(documentId);
62
+ void this.#documentGenerousPeers(documentId).then(peers => {
63
+ peers.forEach(peerId => {
64
+ docSynchronizer.beginSync(peerId);
65
+ });
66
+ });
67
+ }
68
+ // TODO: implement this
69
+ removeDocument(documentId) {
70
+ throw new Error("not implemented");
71
+ }
72
+ /** Adds a peer and maybe starts synchronizing with them */
73
+ addPeer(peerId) {
74
+ log(`adding ${peerId} & synchronizing with them`);
75
+ this.#peers.add(peerId);
76
+ for (const docSynchronizer of Object.values(this.#docSynchronizers)) {
77
+ const { documentId } = docSynchronizer;
78
+ void this.repo.sharePolicy(peerId, documentId).then(okToShare => {
79
+ if (okToShare)
80
+ docSynchronizer.beginSync(peerId);
81
+ });
82
+ }
83
+ }
84
+ /** Removes a peer and stops synchronizing with them */
85
+ removePeer(peerId) {
86
+ log(`removing peer ${peerId}`);
87
+ this.#peers.delete(peerId);
88
+ for (const docSynchronizer of Object.values(this.#docSynchronizers)) {
89
+ docSynchronizer.endSync(peerId);
90
+ }
91
+ }
92
+ }
@@ -0,0 +1,18 @@
1
+ import { DocHandle } from "../DocHandle.js";
2
+ import { ChannelId, PeerId } from "../types.js";
3
+ import { Synchronizer } from "./Synchronizer.js";
4
+ /**
5
+ * DocSynchronizer takes a handle to an Automerge document, and receives & dispatches sync messages
6
+ * to bring it inline with all other peers' versions.
7
+ */
8
+ export declare class DocSynchronizer extends Synchronizer {
9
+ #private;
10
+ private handle;
11
+ constructor(handle: DocHandle<any>);
12
+ get documentId(): import("../types.js").DocumentId;
13
+ hasPeer(peerId: PeerId): boolean;
14
+ beginSync(peerId: PeerId): void;
15
+ endSync(peerId: PeerId): void;
16
+ receiveSyncMessage(peerId: PeerId, channelId: ChannelId, message: Uint8Array): void;
17
+ }
18
+ //# sourceMappingURL=DocSynchronizer.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"DocSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/DocSynchronizer.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAA;AAC3C,OAAO,EAAE,SAAS,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAC/C,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAIhD;;;GAGG;AACH,qBAAa,eAAgB,SAAQ,YAAY;;IAanC,OAAO,CAAC,MAAM;gBAAN,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC;IAgB1C,IAAI,UAAU,qCAEb;IAuED,OAAO,CAAC,MAAM,EAAE,MAAM;IAItB,SAAS,CAAC,MAAM,EAAE,MAAM;IAkBxB,OAAO,CAAC,MAAM,EAAE,MAAM;IAKtB,kBAAkB,CAChB,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,UAAU;CAsCtB"}
@@ -0,0 +1,136 @@
1
+ import * as A from "@automerge/automerge";
2
+ import { Synchronizer } from "./Synchronizer.js";
3
+ import debug from "debug";
4
+ /**
5
+ * DocSynchronizer takes a handle to an Automerge document, and receives & dispatches sync messages
6
+ * to bring it inline with all other peers' versions.
7
+ */
8
+ export class DocSynchronizer extends Synchronizer {
9
+ handle;
10
+ #log;
11
+ #conciseLog;
12
+ #opsLog;
13
+ /** Active peers */
14
+ #peers = [];
15
+ /** Sync state for each peer we've communicated with (including inactive peers) */
16
+ #syncStates = {};
17
+ #pendingSyncMessages = [];
18
+ constructor(handle) {
19
+ super();
20
+ this.handle = handle;
21
+ const docId = handle.documentId.slice(0, 5);
22
+ this.#conciseLog = debug(`automerge-repo:concise:docsync:${docId}`); // Only logs one line per receive/send
23
+ this.#log = debug(`automerge-repo:docsync:${docId}`);
24
+ this.#opsLog = debug(`automerge-repo:ops:docsync:${docId}`); // Log list of ops of each message
25
+ handle.on("change", () => this.#syncWithPeers());
26
+ // Process pending sync messages immediately after the handle becomes ready.
27
+ void (async () => {
28
+ await handle.loadAttemptedValue();
29
+ this.#processAllPendingSyncMessages();
30
+ })();
31
+ }
32
+ get documentId() {
33
+ return this.handle.documentId;
34
+ }
35
+ /// PRIVATE
36
+ async #syncWithPeers() {
37
+ this.#log(`syncWithPeers`);
38
+ const doc = await this.handle.value();
39
+ this.#peers.forEach(peerId => this.#sendSyncMessage(peerId, doc));
40
+ }
41
+ #getSyncState(peerId) {
42
+ if (!this.#peers.includes(peerId)) {
43
+ this.#log("adding a new peer", peerId);
44
+ this.#peers.push(peerId);
45
+ }
46
+ return this.#syncStates[peerId] ?? A.initSyncState();
47
+ }
48
+ #setSyncState(peerId, syncState) {
49
+ // TODO: we maybe should be persisting sync states. But we want to be careful about how often we
50
+ // do that, because it can generate a lot of disk activity.
51
+ // TODO: we only need to do this on reconnect
52
+ this.#syncStates[peerId] = syncState;
53
+ }
54
+ #sendSyncMessage(peerId, doc) {
55
+ this.#log(`sendSyncMessage ->${peerId}`);
56
+ const syncState = this.#getSyncState(peerId);
57
+ const [newSyncState, message] = A.generateSyncMessage(doc, syncState);
58
+ this.#setSyncState(peerId, newSyncState);
59
+ if (message) {
60
+ this.#logMessage(`sendSyncMessage 🡒 ${peerId}`, message);
61
+ const channelId = this.handle.documentId;
62
+ this.emit("message", {
63
+ targetId: peerId,
64
+ channelId,
65
+ message,
66
+ broadcast: false,
67
+ });
68
+ }
69
+ else {
70
+ this.#log(`sendSyncMessage ->${peerId} [no message generated]`);
71
+ }
72
+ }
73
+ #logMessage = (label, message) => {
74
+ // This is real expensive...
75
+ return;
76
+ const size = message.byteLength;
77
+ const logText = `${label} ${size}b`;
78
+ const decoded = A.decodeSyncMessage(message);
79
+ this.#conciseLog(logText);
80
+ this.#log(logText, decoded);
81
+ // expanding is expensive, so only do it if we're logging at this level
82
+ const expanded = this.#opsLog.enabled
83
+ ? decoded.changes.flatMap(change => A.decodeChange(change).ops.map(op => JSON.stringify(op)))
84
+ : null;
85
+ this.#opsLog(logText, expanded);
86
+ };
87
+ /// PUBLIC
88
+ hasPeer(peerId) {
89
+ return this.#peers.includes(peerId);
90
+ }
91
+ beginSync(peerId) {
92
+ this.#log(`beginSync: ${peerId}`);
93
+ // At this point if we don't have anything in our storage, we need to use an empty doc to sync
94
+ // with; but we don't want to surface that state to the front end
95
+ void this.handle.loadAttemptedValue().then(doc => {
96
+ // HACK: if we have a sync state already, we round-trip it through the encoding system to make
97
+ // sure state is preserved. This prevents an infinite loop caused by failed attempts to send
98
+ // messages during disconnection.
99
+ // TODO: cover that case with a test and remove this hack
100
+ const syncStateRaw = this.#getSyncState(peerId);
101
+ const syncState = A.decodeSyncState(A.encodeSyncState(syncStateRaw));
102
+ this.#setSyncState(peerId, syncState);
103
+ this.#sendSyncMessage(peerId, doc);
104
+ });
105
+ }
106
+ endSync(peerId) {
107
+ this.#log(`removing peer ${peerId}`);
108
+ this.#peers = this.#peers.filter(p => p !== peerId);
109
+ }
110
+ receiveSyncMessage(peerId, channelId, message) {
111
+ if (channelId !== this.documentId)
112
+ throw new Error(`channelId doesn't match documentId`);
113
+ // We need to block receiving the syncMessages until we've checked local storage
114
+ if (!this.handle.isReadyOrRequesting()) {
115
+ this.#pendingSyncMessages.push({ peerId, message });
116
+ return;
117
+ }
118
+ this.#processAllPendingSyncMessages();
119
+ this.#processSyncMessage(peerId, message);
120
+ }
121
+ #processSyncMessage(peerId, message) {
122
+ this.handle.update(doc => {
123
+ const [newDoc, newSyncState] = A.receiveSyncMessage(doc, this.#getSyncState(peerId), message);
124
+ this.#setSyncState(peerId, newSyncState);
125
+ // respond to just this peer (as required)
126
+ this.#sendSyncMessage(peerId, doc);
127
+ return newDoc;
128
+ });
129
+ }
130
+ #processAllPendingSyncMessages() {
131
+ for (const { peerId, message } of this.#pendingSyncMessages) {
132
+ this.#processSyncMessage(peerId, message);
133
+ }
134
+ this.#pendingSyncMessages = [];
135
+ }
136
+ }
@@ -0,0 +1,10 @@
1
+ import EventEmitter from "eventemitter3";
2
+ import { ChannelId, PeerId } from "../types.js";
3
+ import { MessagePayload } from "../network/NetworkAdapter.js";
4
+ export declare abstract class Synchronizer extends EventEmitter<SynchronizerEvents> {
5
+ abstract receiveSyncMessage(peerId: PeerId, channelId: ChannelId, message: Uint8Array): void;
6
+ }
7
+ export interface SynchronizerEvents {
8
+ message: (arg: MessagePayload) => void;
9
+ }
10
+ //# sourceMappingURL=Synchronizer.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"Synchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/Synchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EAAE,SAAS,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAC/C,OAAO,EAAE,cAAc,EAAE,MAAM,8BAA8B,CAAA;AAE7D,8BAAsB,YAAa,SAAQ,YAAY,CAAC,kBAAkB,CAAC;IACzE,QAAQ,CAAC,kBAAkB,CACzB,MAAM,EAAE,MAAM,EACd,SAAS,EAAE,SAAS,EACpB,OAAO,EAAE,UAAU,GAClB,IAAI;CACR;AAED,MAAM,WAAW,kBAAkB;IACjC,OAAO,EAAE,CAAC,GAAG,EAAE,cAAc,KAAK,IAAI,CAAA;CACvC"}
@@ -0,0 +1,3 @@
1
+ import EventEmitter from "eventemitter3";
2
+ export class Synchronizer extends EventEmitter {
3
+ }
@@ -0,0 +1,21 @@
1
+ import { type NetworkAdapter } from "../index.js";
2
+ /**
3
+ * Runs a series of tests against a set of three peers, each represented by one or more instantiated
4
+ * network adapters.
5
+ *
6
+ * The adapter `setup` function should return an object with the following properties:
7
+ *
8
+ * - `adapters`: A tuple representing three peers' network configuration. Each element can be either
9
+ * a single adapter or an array of adapters. Each will be used to instantiate a Repo for that
10
+ * peer.
11
+ * - `teardown`: An optional function that will be called after the tests have run. This can be used
12
+ * to clean up any resources that were created during the test.
13
+ */
14
+ export declare function runAdapterTests(_setup: SetupFn, title?: string): void;
15
+ type Network = NetworkAdapter | NetworkAdapter[];
16
+ export type SetupFn = () => Promise<{
17
+ adapters: [Network, Network, Network];
18
+ teardown?: () => void;
19
+ }>;
20
+ export {};
21
+ //# sourceMappingURL=adapter-tests.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"adapter-tests.d.ts","sourceRoot":"","sources":["../../src/test-utilities/adapter-tests.ts"],"names":[],"mappings":"AAAA,OAAO,EAAgB,KAAK,cAAc,EAAa,MAAM,aAAa,CAAA;AAQ1E;;;;;;;;;;;GAWG;AACH,wBAAgB,eAAe,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CA2HrE;AAID,KAAK,OAAO,GAAG,cAAc,GAAG,cAAc,EAAE,CAAA;AAEhD,MAAM,MAAM,OAAO,GAAG,MAAM,OAAO,CAAC;IAClC,QAAQ,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;IACrC,QAAQ,CAAC,EAAE,MAAM,IAAI,CAAA;CACtB,CAAC,CAAA"}
@@ -0,0 +1,117 @@
1
+ import { Repo } from "../index.js";
2
+ import { eventPromise, eventPromises, } from "../helpers/eventPromise.js";
3
+ import { assert } from "chai";
4
+ import { describe, it } from "mocha";
5
+ /**
6
+ * Runs a series of tests against a set of three peers, each represented by one or more instantiated
7
+ * network adapters.
8
+ *
9
+ * The adapter `setup` function should return an object with the following properties:
10
+ *
11
+ * - `adapters`: A tuple representing three peers' network configuration. Each element can be either
12
+ * a single adapter or an array of adapters. Each will be used to instantiate a Repo for that
13
+ * peer.
14
+ * - `teardown`: An optional function that will be called after the tests have run. This can be used
15
+ * to clean up any resources that were created during the test.
16
+ */
17
+ export function runAdapterTests(_setup, title) {
18
+ // Wrap the provided setup function
19
+ const setup = async () => {
20
+ const { adapters, teardown = NO_OP } = await _setup();
21
+ // these might be individual adapters or arrays of adapters; normalize them to arrays
22
+ const [a, b, c] = adapters.map(toArray);
23
+ return { adapters: [a, b, c], teardown };
24
+ };
25
+ describe(`Adapter acceptance tests ${title ? `(${title})` : ""}`, () => {
26
+ it("can sync 2 repos", async () => {
27
+ const doTest = async (a, b) => {
28
+ const aliceRepo = new Repo({ network: a, peerId: alice });
29
+ const bobRepo = new Repo({ network: b, peerId: bob });
30
+ // Alice creates a document
31
+ const aliceHandle = aliceRepo.create();
32
+ // Bob receives the document
33
+ await eventPromise(bobRepo, "document");
34
+ const bobHandle = bobRepo.find(aliceHandle.documentId);
35
+ // Alice changes the document
36
+ aliceHandle.change(d => {
37
+ d.foo = "bar";
38
+ });
39
+ // Bob receives the change
40
+ await eventPromise(bobHandle, "change");
41
+ assert.equal((await bobHandle.value()).foo, "bar");
42
+ // Bob changes the document
43
+ bobHandle.change(d => {
44
+ d.foo = "baz";
45
+ });
46
+ // Alice receives the change
47
+ await eventPromise(aliceHandle, "change");
48
+ assert.equal((await aliceHandle.value()).foo, "baz");
49
+ };
50
+ // Run the test in both directions, in case they're different types of adapters
51
+ {
52
+ const { adapters, teardown } = await setup();
53
+ const [x, y] = adapters;
54
+ await doTest(x, y); // x is Alice
55
+ teardown();
56
+ }
57
+ {
58
+ const { adapters, teardown } = await setup();
59
+ const [x, y] = adapters;
60
+ await doTest(y, x); // y is Alice
61
+ teardown();
62
+ }
63
+ });
64
+ it("can sync 3 repos", async () => {
65
+ const { adapters, teardown } = await setup();
66
+ const [a, b, c] = adapters;
67
+ const aliceRepo = new Repo({ network: a, peerId: alice });
68
+ const bobRepo = new Repo({ network: b, peerId: bob });
69
+ const charlieRepo = new Repo({ network: c, peerId: charlie });
70
+ // Alice creates a document
71
+ const aliceHandle = aliceRepo.create();
72
+ const documentId = aliceHandle.documentId;
73
+ // Bob and Charlie receive the document
74
+ await eventPromises([bobRepo, charlieRepo], "document");
75
+ const bobHandle = bobRepo.find(documentId);
76
+ const charlieHandle = charlieRepo.find(documentId);
77
+ // Alice changes the document
78
+ aliceHandle.change(d => {
79
+ d.foo = "bar";
80
+ });
81
+ // Bob and Charlie receive the change
82
+ await eventPromises([bobHandle, charlieHandle], "change");
83
+ assert.equal((await bobHandle.value()).foo, "bar");
84
+ assert.equal((await charlieHandle.value()).foo, "bar");
85
+ // Charlie changes the document
86
+ charlieHandle.change(d => {
87
+ d.foo = "baz";
88
+ });
89
+ // Alice and Bob receive the change
90
+ await eventPromises([aliceHandle, bobHandle], "change");
91
+ assert.equal((await bobHandle.value()).foo, "baz");
92
+ assert.equal((await charlieHandle.value()).foo, "baz");
93
+ teardown();
94
+ });
95
+ // TODO: with BroadcastChannel, this test never ends, because it goes into an infinite loop,
96
+ // because the network has cycles (see #92)
97
+ it.skip("can broadcast a message", async () => {
98
+ const { adapters, teardown } = await setup();
99
+ const [a, b, c] = adapters;
100
+ const aliceRepo = new Repo({ network: a, peerId: alice });
101
+ const bobRepo = new Repo({ network: b, peerId: bob });
102
+ const charlieRepo = new Repo({ network: c, peerId: charlie });
103
+ await eventPromises([aliceRepo, bobRepo, charlieRepo].map(r => r.networkSubsystem), "peer");
104
+ const channelId = "broadcast";
105
+ const alicePresenceData = { presence: "alice" };
106
+ aliceRepo.ephemeralData.broadcast(channelId, alicePresenceData);
107
+ const { data } = await eventPromise(charlieRepo.ephemeralData, "data");
108
+ assert.deepStrictEqual(data, alicePresenceData);
109
+ teardown();
110
+ });
111
+ });
112
+ }
113
+ const NO_OP = () => { };
114
+ const toArray = (x) => (Array.isArray(x) ? x : [x]);
115
+ const alice = "alice";
116
+ const bob = "bob";
117
+ const charlie = "charlie";