@automerge/automerge-repo 2.0.0-alpha.7 → 2.0.0-collectionsync-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/CollectionHandle.d.ts +14 -0
- package/dist/CollectionHandle.d.ts.map +1 -0
- package/dist/CollectionHandle.js +37 -0
- package/dist/DocHandle.d.ts +37 -6
- package/dist/DocHandle.d.ts.map +1 -1
- package/dist/DocHandle.js +64 -6
- package/dist/DocUrl.d.ts +47 -0
- package/dist/DocUrl.d.ts.map +1 -0
- package/dist/DocUrl.js +72 -0
- package/dist/EphemeralData.d.ts +20 -0
- package/dist/EphemeralData.d.ts.map +1 -0
- package/dist/EphemeralData.js +1 -0
- package/dist/Repo.d.ts +28 -7
- package/dist/Repo.d.ts.map +1 -1
- package/dist/Repo.js +142 -143
- package/dist/ferigan.d.ts +51 -0
- package/dist/ferigan.d.ts.map +1 -0
- package/dist/ferigan.js +98 -0
- package/dist/helpers/tests/storage-adapter-tests.d.ts +2 -2
- package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
- package/dist/helpers/tests/storage-adapter-tests.js +19 -39
- package/dist/index.d.ts +2 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -0
- package/dist/network/NetworkSubsystem.d.ts +1 -0
- package/dist/network/NetworkSubsystem.d.ts.map +1 -1
- package/dist/network/NetworkSubsystem.js +3 -0
- package/dist/network/messages.d.ts +7 -1
- package/dist/network/messages.d.ts.map +1 -1
- package/dist/network/messages.js +2 -1
- package/dist/src/DocHandle.d.ts +182 -0
- package/dist/src/DocHandle.d.ts.map +1 -0
- package/dist/src/DocHandle.js +405 -0
- package/dist/src/DocUrl.d.ts +49 -0
- package/dist/src/DocUrl.d.ts.map +1 -0
- package/dist/src/DocUrl.js +72 -0
- package/dist/src/EphemeralData.d.ts +19 -0
- package/dist/src/EphemeralData.d.ts.map +1 -0
- package/dist/src/EphemeralData.js +1 -0
- package/dist/src/Repo.d.ts +74 -0
- package/dist/src/Repo.d.ts.map +1 -0
- package/dist/src/Repo.js +208 -0
- package/dist/src/helpers/arraysAreEqual.d.ts +2 -0
- package/dist/src/helpers/arraysAreEqual.d.ts.map +1 -0
- package/dist/src/helpers/arraysAreEqual.js +2 -0
- package/dist/src/helpers/cbor.d.ts +4 -0
- package/dist/src/helpers/cbor.d.ts.map +1 -0
- package/dist/src/helpers/cbor.js +8 -0
- package/dist/src/helpers/eventPromise.d.ts +11 -0
- package/dist/src/helpers/eventPromise.d.ts.map +1 -0
- package/dist/src/helpers/eventPromise.js +7 -0
- package/dist/src/helpers/headsAreSame.d.ts +2 -0
- package/dist/src/helpers/headsAreSame.d.ts.map +1 -0
- package/dist/src/helpers/headsAreSame.js +4 -0
- package/dist/src/helpers/mergeArrays.d.ts +2 -0
- package/dist/src/helpers/mergeArrays.d.ts.map +1 -0
- package/dist/src/helpers/mergeArrays.js +15 -0
- package/dist/src/helpers/pause.d.ts +6 -0
- package/dist/src/helpers/pause.d.ts.map +1 -0
- package/dist/src/helpers/pause.js +10 -0
- package/dist/src/helpers/tests/network-adapter-tests.d.ts +21 -0
- package/dist/src/helpers/tests/network-adapter-tests.d.ts.map +1 -0
- package/dist/src/helpers/tests/network-adapter-tests.js +122 -0
- package/dist/src/helpers/withTimeout.d.ts +12 -0
- package/dist/src/helpers/withTimeout.d.ts.map +1 -0
- package/dist/src/helpers/withTimeout.js +24 -0
- package/dist/src/index.d.ts +53 -0
- package/dist/src/index.d.ts.map +1 -0
- package/dist/src/index.js +40 -0
- package/dist/src/network/NetworkAdapter.d.ts +26 -0
- package/dist/src/network/NetworkAdapter.d.ts.map +1 -0
- package/dist/src/network/NetworkAdapter.js +4 -0
- package/dist/src/network/NetworkSubsystem.d.ts +23 -0
- package/dist/src/network/NetworkSubsystem.d.ts.map +1 -0
- package/dist/src/network/NetworkSubsystem.js +120 -0
- package/dist/src/network/messages.d.ts +85 -0
- package/dist/src/network/messages.d.ts.map +1 -0
- package/dist/src/network/messages.js +23 -0
- package/dist/src/storage/StorageAdapter.d.ts +14 -0
- package/dist/src/storage/StorageAdapter.d.ts.map +1 -0
- package/dist/src/storage/StorageAdapter.js +1 -0
- package/dist/src/storage/StorageSubsystem.d.ts +12 -0
- package/dist/src/storage/StorageSubsystem.d.ts.map +1 -0
- package/dist/src/storage/StorageSubsystem.js +145 -0
- package/dist/src/synchronizer/CollectionSynchronizer.d.ts +25 -0
- package/dist/src/synchronizer/CollectionSynchronizer.d.ts.map +1 -0
- package/dist/src/synchronizer/CollectionSynchronizer.js +106 -0
- package/dist/src/synchronizer/DocSynchronizer.d.ts +29 -0
- package/dist/src/synchronizer/DocSynchronizer.d.ts.map +1 -0
- package/dist/src/synchronizer/DocSynchronizer.js +263 -0
- package/dist/src/synchronizer/Synchronizer.d.ts +9 -0
- package/dist/src/synchronizer/Synchronizer.d.ts.map +1 -0
- package/dist/src/synchronizer/Synchronizer.js +2 -0
- package/dist/src/types.d.ts +16 -0
- package/dist/src/types.d.ts.map +1 -0
- package/dist/src/types.js +1 -0
- package/dist/storage/StorageAdapter.d.ts +9 -0
- package/dist/storage/StorageAdapter.d.ts.map +1 -1
- package/dist/storage/StorageAdapter.js +33 -0
- package/dist/storage/StorageSubsystem.d.ts +12 -2
- package/dist/storage/StorageSubsystem.d.ts.map +1 -1
- package/dist/storage/StorageSubsystem.js +42 -100
- package/dist/synchronizer/CollectionSynchronizer.d.ts +4 -2
- package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.js +28 -15
- package/dist/synchronizer/DocSynchronizer.d.ts +6 -5
- package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/DocSynchronizer.js +76 -178
- package/dist/synchronizer/Synchronizer.d.ts +11 -0
- package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
- package/dist/test/CollectionSynchronizer.test.d.ts +2 -0
- package/dist/test/CollectionSynchronizer.test.d.ts.map +1 -0
- package/dist/test/CollectionSynchronizer.test.js +57 -0
- package/dist/test/DocHandle.test.d.ts +2 -0
- package/dist/test/DocHandle.test.d.ts.map +1 -0
- package/dist/test/DocHandle.test.js +238 -0
- package/dist/test/DocSynchronizer.test.d.ts +2 -0
- package/dist/test/DocSynchronizer.test.d.ts.map +1 -0
- package/dist/test/DocSynchronizer.test.js +111 -0
- package/dist/test/Network.test.d.ts +2 -0
- package/dist/test/Network.test.d.ts.map +1 -0
- package/dist/test/Network.test.js +11 -0
- package/dist/test/Repo.test.d.ts +2 -0
- package/dist/test/Repo.test.d.ts.map +1 -0
- package/dist/test/Repo.test.js +568 -0
- package/dist/test/StorageSubsystem.test.d.ts +2 -0
- package/dist/test/StorageSubsystem.test.d.ts.map +1 -0
- package/dist/test/StorageSubsystem.test.js +56 -0
- package/dist/test/helpers/DummyNetworkAdapter.d.ts +9 -0
- package/dist/test/helpers/DummyNetworkAdapter.d.ts.map +1 -0
- package/dist/test/helpers/DummyNetworkAdapter.js +15 -0
- package/dist/test/helpers/DummyStorageAdapter.d.ts +16 -0
- package/dist/test/helpers/DummyStorageAdapter.d.ts.map +1 -0
- package/dist/test/helpers/DummyStorageAdapter.js +33 -0
- package/dist/test/helpers/generate-large-object.d.ts +5 -0
- package/dist/test/helpers/generate-large-object.d.ts.map +1 -0
- package/dist/test/helpers/generate-large-object.js +9 -0
- package/dist/test/helpers/getRandomItem.d.ts +2 -0
- package/dist/test/helpers/getRandomItem.d.ts.map +1 -0
- package/dist/test/helpers/getRandomItem.js +4 -0
- package/dist/test/types.d.ts +4 -0
- package/dist/test/types.d.ts.map +1 -0
- package/dist/test/types.js +1 -0
- package/package.json +3 -3
- package/src/CollectionHandle.ts +54 -0
- package/src/DocHandle.ts +80 -8
- package/src/Repo.ts +192 -183
- package/src/ferigan.ts +184 -0
- package/src/helpers/tests/storage-adapter-tests.ts +31 -62
- package/src/index.ts +2 -0
- package/src/network/NetworkSubsystem.ts +4 -0
- package/src/network/messages.ts +11 -2
- package/src/storage/StorageAdapter.ts +42 -0
- package/src/storage/StorageSubsystem.ts +59 -119
- package/src/synchronizer/CollectionSynchronizer.ts +34 -26
- package/src/synchronizer/DocSynchronizer.ts +84 -231
- package/src/synchronizer/Synchronizer.ts +14 -0
- package/test/CollectionSynchronizer.test.ts +4 -2
- package/test/DocHandle.test.ts +72 -13
- package/test/DocSynchronizer.test.ts +6 -1
- package/test/RemoteHeadsSubscriptions.test.ts +1 -1
- package/test/Repo.test.ts +225 -117
- package/test/StorageSubsystem.test.ts +20 -16
- package/test/remoteHeads.test.ts +1 -1
|
@@ -1,10 +1,11 @@
|
|
|
1
1
|
import debug from "debug";
|
|
2
|
-
import { stringifyAutomergeUrl } from "../AutomergeUrl.js";
|
|
2
|
+
import { parseAutomergeUrl, stringifyAutomergeUrl } from "../AutomergeUrl.js";
|
|
3
3
|
import { DocSynchronizer } from "./DocSynchronizer.js";
|
|
4
4
|
import { Synchronizer } from "./Synchronizer.js";
|
|
5
5
|
const log = debug("automerge-repo:collectionsync");
|
|
6
6
|
/** A CollectionSynchronizer is responsible for synchronizing a DocCollection with peers. */
|
|
7
7
|
export class CollectionSynchronizer extends Synchronizer {
|
|
8
|
+
beelay;
|
|
8
9
|
repo;
|
|
9
10
|
/** The set of peers we are connected with */
|
|
10
11
|
#peers = new Set();
|
|
@@ -13,9 +14,12 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
13
14
|
docSynchronizers = {};
|
|
14
15
|
/** Used to determine if the document is know to the Collection and a synchronizer exists or is being set up */
|
|
15
16
|
#docSetUp = {};
|
|
16
|
-
|
|
17
|
+
#denylist;
|
|
18
|
+
constructor(beelay, repo, denylist = []) {
|
|
17
19
|
super();
|
|
20
|
+
this.beelay = beelay;
|
|
18
21
|
this.repo = repo;
|
|
22
|
+
this.#denylist = denylist.map(url => parseAutomergeUrl(url).documentId);
|
|
19
23
|
}
|
|
20
24
|
/** Returns a synchronizer for the given document, creating one if it doesn't already exist. */
|
|
21
25
|
#fetchDocSynchronizer(documentId) {
|
|
@@ -28,21 +32,13 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
28
32
|
/** Creates a new docSynchronizer and sets it up to propagate messages */
|
|
29
33
|
#initDocSynchronizer(handle) {
|
|
30
34
|
const docSynchronizer = new DocSynchronizer({
|
|
35
|
+
beelay: this.beelay,
|
|
31
36
|
handle,
|
|
32
|
-
onLoadSyncState: async (peerId) => {
|
|
33
|
-
if (!this.repo.storageSubsystem) {
|
|
34
|
-
return;
|
|
35
|
-
}
|
|
36
|
-
const { storageId, isEphemeral } = this.repo.peerMetadataByPeerId[peerId] || {};
|
|
37
|
-
if (!storageId || isEphemeral) {
|
|
38
|
-
return;
|
|
39
|
-
}
|
|
40
|
-
return this.repo.storageSubsystem.loadSyncState(handle.documentId, storageId);
|
|
41
|
-
},
|
|
42
37
|
});
|
|
43
38
|
docSynchronizer.on("message", event => this.emit("message", event));
|
|
44
39
|
docSynchronizer.on("open-doc", event => this.emit("open-doc", event));
|
|
45
40
|
docSynchronizer.on("sync-state", event => this.emit("sync-state", event));
|
|
41
|
+
docSynchronizer.on("metrics", event => this.emit("metrics", event));
|
|
46
42
|
return docSynchronizer;
|
|
47
43
|
}
|
|
48
44
|
/** returns an array of peerIds that we share this document generously with */
|
|
@@ -67,6 +63,18 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
67
63
|
if (!documentId) {
|
|
68
64
|
throw new Error("received a message with an invalid documentId");
|
|
69
65
|
}
|
|
66
|
+
if (this.#denylist.includes(documentId)) {
|
|
67
|
+
this.emit("metrics", {
|
|
68
|
+
type: "doc-denied",
|
|
69
|
+
documentId,
|
|
70
|
+
});
|
|
71
|
+
this.emit("message", {
|
|
72
|
+
type: "doc-unavailable",
|
|
73
|
+
documentId,
|
|
74
|
+
targetId: message.senderId,
|
|
75
|
+
});
|
|
76
|
+
return;
|
|
77
|
+
}
|
|
70
78
|
this.#docSetUp[documentId] = true;
|
|
71
79
|
const docSynchronizer = this.#fetchDocSynchronizer(documentId);
|
|
72
80
|
docSynchronizer.receiveMessage(message);
|
|
@@ -120,8 +128,13 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
120
128
|
return Array.from(this.#peers);
|
|
121
129
|
}
|
|
122
130
|
metrics() {
|
|
123
|
-
return
|
|
124
|
-
|
|
125
|
-
|
|
131
|
+
return {};
|
|
132
|
+
// return Object.fromEntries(
|
|
133
|
+
// Object.entries(this.docSynchronizers).map(
|
|
134
|
+
// ([documentId, synchronizer]) => {
|
|
135
|
+
// return [documentId, synchronizer.metrics()]
|
|
136
|
+
// }
|
|
137
|
+
// )
|
|
138
|
+
// )
|
|
126
139
|
}
|
|
127
140
|
}
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
import * as A from "@automerge/automerge/slim/next";
|
|
2
2
|
import { DocHandle } from "../DocHandle.js";
|
|
3
3
|
import { EphemeralMessage, RepoMessage, RequestMessage, SyncMessage } from "../network/messages.js";
|
|
4
|
-
import { PeerId } from "../types.js";
|
|
4
|
+
import { DocumentId, PeerId } from "../types.js";
|
|
5
5
|
import { Synchronizer } from "./Synchronizer.js";
|
|
6
6
|
type PeerDocumentStatus = "unknown" | "has" | "unavailable" | "wants";
|
|
7
7
|
interface DocSynchronizerConfig {
|
|
8
8
|
handle: DocHandle<unknown>;
|
|
9
|
-
|
|
9
|
+
beelay: A.beelay.Beelay;
|
|
10
10
|
}
|
|
11
11
|
/**
|
|
12
12
|
* DocSynchronizer takes a handle to an Automerge document, and receives & dispatches sync messages
|
|
@@ -15,11 +15,12 @@ interface DocSynchronizerConfig {
|
|
|
15
15
|
export declare class DocSynchronizer extends Synchronizer {
|
|
16
16
|
#private;
|
|
17
17
|
syncDebounceRate: number;
|
|
18
|
-
constructor({ handle,
|
|
18
|
+
constructor({ handle, beelay }: DocSynchronizerConfig);
|
|
19
19
|
get peerStates(): Record<PeerId, PeerDocumentStatus>;
|
|
20
|
-
get documentId():
|
|
20
|
+
get documentId(): DocumentId;
|
|
21
21
|
hasPeer(peerId: PeerId): boolean;
|
|
22
22
|
beginSync(peerIds: PeerId[]): void;
|
|
23
|
+
peerWantsDocument(peerId: PeerId): void;
|
|
23
24
|
endSync(peerId: PeerId): void;
|
|
24
25
|
receiveMessage(message: RepoMessage): void;
|
|
25
26
|
receiveEphemeralMessage(message: EphemeralMessage): void;
|
|
@@ -29,7 +30,7 @@ export declare class DocSynchronizer extends Synchronizer {
|
|
|
29
30
|
size: {
|
|
30
31
|
numOps: number;
|
|
31
32
|
numChanges: number;
|
|
32
|
-
};
|
|
33
|
+
} | undefined;
|
|
33
34
|
};
|
|
34
35
|
}
|
|
35
36
|
export {};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"DocSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/DocSynchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAGnD,OAAO,EACL,SAAS,EAKV,MAAM,iBAAiB,CAAA;AACxB,OAAO,EAEL,gBAAgB,EAEhB,WAAW,EACX,cAAc,EACd,WAAW,EAEZ,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;
|
|
1
|
+
{"version":3,"file":"DocSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/DocSynchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAGnD,OAAO,EACL,SAAS,EAKV,MAAM,iBAAiB,CAAA;AACxB,OAAO,EAEL,gBAAgB,EAEhB,WAAW,EACX,cAAc,EACd,WAAW,EAEZ,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EAAgB,UAAU,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAC9D,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAIhD,KAAK,kBAAkB,GAAG,SAAS,GAAG,KAAK,GAAG,aAAa,GAAG,OAAO,CAAA;AAOrE,UAAU,qBAAqB;IAC7B,MAAM,EAAE,SAAS,CAAC,OAAO,CAAC,CAAA;IAC1B,MAAM,EAAE,CAAC,CAAC,MAAM,CAAC,MAAM,CAAA;CACxB;AAED;;;GAGG;AACH,qBAAa,eAAgB,SAAQ,YAAY;;IAE/C,gBAAgB,SAAM;gBAaV,EAAE,MAAM,EAAE,MAAM,EAAE,EAAE,qBAAqB;IAgCrD,IAAI,UAAU,uCAEb;IAED,IAAI,UAAU,IAAI,UAAU,CAE3B;IAyBD,OAAO,CAAC,MAAM,EAAE,MAAM;IAItB,SAAS,CAAC,OAAO,EAAE,MAAM,EAAE;IAkD3B,iBAAiB,CAAC,MAAM,EAAE,MAAM;IAOhC,OAAO,CAAC,MAAM,EAAE,MAAM;IAMtB,cAAc,CAAC,OAAO,EAAE,WAAW;IAiBnC,uBAAuB,CAAC,OAAO,EAAE,gBAAgB;IAsBjD,kBAAkB,CAAC,OAAO,EAAE,WAAW,GAAG,cAAc;IA8BxD,OAAO,IAAI;QACT,KAAK,EAAE,MAAM,EAAE,CAAA;QACf,IAAI,EAAE;YAAE,MAAM,EAAE,MAAM,CAAC;YAAC,UAAU,EAAE,MAAM,CAAA;SAAE,GAAG,SAAS,CAAA;KACzD;CAMF"}
|
|
@@ -2,9 +2,8 @@ import * as A from "@automerge/automerge/slim/next";
|
|
|
2
2
|
import { decode } from "cbor-x";
|
|
3
3
|
import debug from "debug";
|
|
4
4
|
import { READY, REQUESTING, UNAVAILABLE, } from "../DocHandle.js";
|
|
5
|
-
import { isRequestMessage, } from "../network/messages.js";
|
|
6
5
|
import { Synchronizer } from "./Synchronizer.js";
|
|
7
|
-
import {
|
|
6
|
+
import { parseAutomergeUrl } from "../AutomergeUrl.js";
|
|
8
7
|
/**
|
|
9
8
|
* DocSynchronizer takes a handle to an Automerge document, and receives & dispatches sync messages
|
|
10
9
|
* to bring it inline with all other peers' versions.
|
|
@@ -14,43 +13,43 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
14
13
|
syncDebounceRate = 100;
|
|
15
14
|
/** Active peers */
|
|
16
15
|
#peers = [];
|
|
17
|
-
#pendingSyncStateCallbacks = {};
|
|
18
16
|
#peerDocumentStatuses = {};
|
|
19
|
-
|
|
20
|
-
#syncStates = {};
|
|
21
|
-
#pendingSyncMessages = [];
|
|
17
|
+
#lastSaveOffset = null;
|
|
22
18
|
#syncStarted = false;
|
|
19
|
+
#beelay;
|
|
23
20
|
#handle;
|
|
24
|
-
#
|
|
25
|
-
constructor({ handle,
|
|
21
|
+
#docId;
|
|
22
|
+
constructor({ handle, beelay }) {
|
|
26
23
|
super();
|
|
27
24
|
this.#handle = handle;
|
|
28
|
-
this.#
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
this.#log = debug(`automerge-repo:docsync:${docId}`);
|
|
32
|
-
handle.on("change", throttle(() => this.#syncWithPeers(), this.syncDebounceRate));
|
|
25
|
+
this.#beelay = beelay;
|
|
26
|
+
this.#docId = this.#handle.documentId;
|
|
27
|
+
this.#log = debug(`automerge-repo:docsync:${this.#handle.documentId}`);
|
|
33
28
|
handle.on("ephemeral-message-outbound", payload => this.#broadcastToPeers(payload));
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
29
|
+
handle.on("change", changeInfo => {
|
|
30
|
+
const newLinks = changeInfo.patches
|
|
31
|
+
.map(patch => {
|
|
32
|
+
if (patch.action === "put") {
|
|
33
|
+
if (patch.value instanceof A.Link) {
|
|
34
|
+
return patch.value;
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
return null;
|
|
38
|
+
})
|
|
39
|
+
.filter(v => v != null);
|
|
40
|
+
for (const link of newLinks) {
|
|
41
|
+
const { documentId: target } = parseAutomergeUrl(link.target);
|
|
42
|
+
this.#beelay.addLink({ from: this.#handle.documentId, to: target });
|
|
43
|
+
}
|
|
44
|
+
});
|
|
39
45
|
}
|
|
40
46
|
get peerStates() {
|
|
41
47
|
return this.#peerDocumentStatuses;
|
|
42
48
|
}
|
|
43
49
|
get documentId() {
|
|
44
|
-
return this.#
|
|
50
|
+
return this.#docId;
|
|
45
51
|
}
|
|
46
52
|
/// PRIVATE
|
|
47
|
-
async #syncWithPeers() {
|
|
48
|
-
this.#log(`syncWithPeers`);
|
|
49
|
-
const doc = await this.#handle.doc();
|
|
50
|
-
if (doc === undefined)
|
|
51
|
-
return;
|
|
52
|
-
this.#peers.forEach(peerId => this.#sendSyncMessage(peerId, doc));
|
|
53
|
-
}
|
|
54
53
|
async #broadcastToPeers({ data, }) {
|
|
55
54
|
this.#log(`broadcastToPeers`, this.#peers);
|
|
56
55
|
this.#peers.forEach(peerId => this.#sendEphemeralMessage(peerId, data));
|
|
@@ -60,145 +59,78 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
60
59
|
const message = {
|
|
61
60
|
type: "ephemeral",
|
|
62
61
|
targetId: peerId,
|
|
63
|
-
documentId: this
|
|
62
|
+
documentId: this.documentId,
|
|
64
63
|
data,
|
|
65
64
|
};
|
|
66
65
|
this.emit("message", message);
|
|
67
66
|
}
|
|
68
|
-
#withSyncState(peerId, callback) {
|
|
69
|
-
this.#addPeer(peerId);
|
|
70
|
-
if (!(peerId in this.#peerDocumentStatuses)) {
|
|
71
|
-
this.#peerDocumentStatuses[peerId] = "unknown";
|
|
72
|
-
}
|
|
73
|
-
const syncState = this.#syncStates[peerId];
|
|
74
|
-
if (syncState) {
|
|
75
|
-
callback(syncState);
|
|
76
|
-
return;
|
|
77
|
-
}
|
|
78
|
-
let pendingCallbacks = this.#pendingSyncStateCallbacks[peerId];
|
|
79
|
-
if (!pendingCallbacks) {
|
|
80
|
-
this.#onLoadSyncState(peerId)
|
|
81
|
-
.then(syncState => {
|
|
82
|
-
this.#initSyncState(peerId, syncState ?? A.initSyncState());
|
|
83
|
-
})
|
|
84
|
-
.catch(err => {
|
|
85
|
-
this.#log(`Error loading sync state for ${peerId}: ${err}`);
|
|
86
|
-
});
|
|
87
|
-
pendingCallbacks = this.#pendingSyncStateCallbacks[peerId] = [];
|
|
88
|
-
}
|
|
89
|
-
pendingCallbacks.push(callback);
|
|
90
|
-
}
|
|
91
|
-
#addPeer(peerId) {
|
|
92
|
-
if (!this.#peers.includes(peerId)) {
|
|
93
|
-
this.#peers.push(peerId);
|
|
94
|
-
this.emit("open-doc", { documentId: this.documentId, peerId });
|
|
95
|
-
}
|
|
96
|
-
}
|
|
97
|
-
#initSyncState(peerId, syncState) {
|
|
98
|
-
const pendingCallbacks = this.#pendingSyncStateCallbacks[peerId];
|
|
99
|
-
if (pendingCallbacks) {
|
|
100
|
-
for (const callback of pendingCallbacks) {
|
|
101
|
-
callback(syncState);
|
|
102
|
-
}
|
|
103
|
-
}
|
|
104
|
-
delete this.#pendingSyncStateCallbacks[peerId];
|
|
105
|
-
this.#syncStates[peerId] = syncState;
|
|
106
|
-
}
|
|
107
|
-
#setSyncState(peerId, syncState) {
|
|
108
|
-
this.#syncStates[peerId] = syncState;
|
|
109
|
-
this.emit("sync-state", {
|
|
110
|
-
peerId,
|
|
111
|
-
syncState,
|
|
112
|
-
documentId: this.#handle.documentId,
|
|
113
|
-
});
|
|
114
|
-
}
|
|
115
|
-
#sendSyncMessage(peerId, doc) {
|
|
116
|
-
this.#log(`sendSyncMessage ->${peerId}`);
|
|
117
|
-
this.#withSyncState(peerId, syncState => {
|
|
118
|
-
const [newSyncState, message] = A.generateSyncMessage(doc, syncState);
|
|
119
|
-
if (message) {
|
|
120
|
-
this.#setSyncState(peerId, newSyncState);
|
|
121
|
-
const isNew = A.getHeads(doc).length === 0;
|
|
122
|
-
if (!this.#handle.isReady() &&
|
|
123
|
-
isNew &&
|
|
124
|
-
newSyncState.sharedHeads.length === 0 &&
|
|
125
|
-
!Object.values(this.#peerDocumentStatuses).includes("has") &&
|
|
126
|
-
this.#peerDocumentStatuses[peerId] === "unknown") {
|
|
127
|
-
// we don't have the document (or access to it), so we request it
|
|
128
|
-
this.emit("message", {
|
|
129
|
-
type: "request",
|
|
130
|
-
targetId: peerId,
|
|
131
|
-
documentId: this.#handle.documentId,
|
|
132
|
-
data: message,
|
|
133
|
-
});
|
|
134
|
-
}
|
|
135
|
-
else {
|
|
136
|
-
this.emit("message", {
|
|
137
|
-
type: "sync",
|
|
138
|
-
targetId: peerId,
|
|
139
|
-
data: message,
|
|
140
|
-
documentId: this.#handle.documentId,
|
|
141
|
-
});
|
|
142
|
-
}
|
|
143
|
-
// if we have sent heads, then the peer now has or will have the document
|
|
144
|
-
if (!isNew) {
|
|
145
|
-
this.#peerDocumentStatuses[peerId] = "has";
|
|
146
|
-
}
|
|
147
|
-
}
|
|
148
|
-
});
|
|
149
|
-
}
|
|
150
67
|
/// PUBLIC
|
|
151
68
|
hasPeer(peerId) {
|
|
152
69
|
return this.#peers.includes(peerId);
|
|
153
70
|
}
|
|
154
71
|
beginSync(peerIds) {
|
|
155
|
-
|
|
156
|
-
// At this point if we don't have anything in our storage, we need to use an empty doc to sync
|
|
157
|
-
// with; but we don't want to surface that state to the front end
|
|
72
|
+
this.#log(`beginSync: ${peerIds.join(", ")}`);
|
|
158
73
|
const docPromise = this.#handle
|
|
159
|
-
.
|
|
74
|
+
.whenReady([READY, REQUESTING, UNAVAILABLE])
|
|
160
75
|
.then(doc => {
|
|
161
|
-
// we register out peers first, then say that sync has started
|
|
162
76
|
this.#syncStarted = true;
|
|
163
77
|
this.#checkDocUnavailable();
|
|
164
|
-
|
|
165
|
-
|
|
78
|
+
})
|
|
79
|
+
// TODO: handle this error
|
|
80
|
+
.catch(() => { });
|
|
81
|
+
peerIds.forEach(peerId => {
|
|
82
|
+
if (!this.#peers.includes(peerId)) {
|
|
83
|
+
this.#peers.push(peerId);
|
|
84
|
+
}
|
|
85
|
+
else {
|
|
166
86
|
return;
|
|
167
87
|
}
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
88
|
+
this.#peerDocumentStatuses[peerId] = "unknown";
|
|
89
|
+
docPromise.then(() => {
|
|
90
|
+
this.#syncStarted = true;
|
|
91
|
+
this.#log(`beginning sync with ${peerId} for doc: ${this.documentId}`);
|
|
92
|
+
this.#beelay
|
|
93
|
+
.syncDoc(this.documentId, peerId)
|
|
94
|
+
.then(({ snapshot, found }) => {
|
|
95
|
+
this.#peerDocumentStatuses[peerId] = found ? "has" : "unavailable";
|
|
96
|
+
// this.#log("synced snapshot: ", snapshot)
|
|
97
|
+
if (found) {
|
|
98
|
+
this.#beelay.loadDocument(this.#docId).then(commitOrBundles => {
|
|
99
|
+
if (commitOrBundles != null) {
|
|
100
|
+
this.#handle?.update(d => {
|
|
101
|
+
let doc = d;
|
|
102
|
+
for (const commitOrBundle of commitOrBundles) {
|
|
103
|
+
doc = A.loadIncremental(doc, commitOrBundle.contents);
|
|
104
|
+
}
|
|
105
|
+
return doc;
|
|
106
|
+
});
|
|
107
|
+
this.#checkDocUnavailable();
|
|
108
|
+
}
|
|
109
|
+
});
|
|
110
|
+
}
|
|
111
|
+
else {
|
|
112
|
+
this.#checkDocUnavailable();
|
|
185
113
|
}
|
|
186
|
-
|
|
187
|
-
.catch(err => {
|
|
188
|
-
this.#log(`Error loading doc for ${peerId}: ${err}`);
|
|
114
|
+
this.#beelay.listen(peerId, snapshot);
|
|
189
115
|
});
|
|
190
116
|
});
|
|
191
117
|
});
|
|
192
118
|
}
|
|
119
|
+
peerWantsDocument(peerId) {
|
|
120
|
+
this.#peerDocumentStatuses[peerId] = "wants";
|
|
121
|
+
if (!this.#peers.includes(peerId)) {
|
|
122
|
+
this.beginSync([peerId]);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
193
125
|
endSync(peerId) {
|
|
194
126
|
this.#log(`removing peer ${peerId}`);
|
|
195
127
|
this.#peers = this.#peers.filter(p => p !== peerId);
|
|
128
|
+
this.#beelay.cancelListens(peerId);
|
|
196
129
|
}
|
|
197
130
|
receiveMessage(message) {
|
|
198
131
|
switch (message.type) {
|
|
199
132
|
case "sync":
|
|
200
133
|
case "request":
|
|
201
|
-
this.receiveSyncMessage(message);
|
|
202
134
|
break;
|
|
203
135
|
case "ephemeral":
|
|
204
136
|
this.receiveEphemeralMessage(message);
|
|
@@ -212,7 +144,7 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
212
144
|
}
|
|
213
145
|
}
|
|
214
146
|
receiveEphemeralMessage(message) {
|
|
215
|
-
if (message.documentId !== this
|
|
147
|
+
if (message.documentId !== this.documentId)
|
|
216
148
|
throw new Error(`channelId doesn't match documentId`);
|
|
217
149
|
const { senderId, data } = message;
|
|
218
150
|
const contents = decode(new Uint8Array(data));
|
|
@@ -230,37 +162,7 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
230
162
|
});
|
|
231
163
|
});
|
|
232
164
|
}
|
|
233
|
-
receiveSyncMessage(message) {
|
|
234
|
-
if (message.documentId !== this.#handle.documentId)
|
|
235
|
-
throw new Error(`channelId doesn't match documentId`);
|
|
236
|
-
// We need to block receiving the syncMessages until we've checked local storage
|
|
237
|
-
if (!this.#handle.inState([READY, REQUESTING, UNAVAILABLE])) {
|
|
238
|
-
this.#pendingSyncMessages.push({ message, received: new Date() });
|
|
239
|
-
return;
|
|
240
|
-
}
|
|
241
|
-
this.#processAllPendingSyncMessages();
|
|
242
|
-
this.#processSyncMessage(message);
|
|
243
|
-
}
|
|
244
|
-
#processSyncMessage(message) {
|
|
245
|
-
if (isRequestMessage(message)) {
|
|
246
|
-
this.#peerDocumentStatuses[message.senderId] = "wants";
|
|
247
|
-
}
|
|
248
|
-
this.#checkDocUnavailable();
|
|
249
|
-
// if the message has heads, then the peer has the document
|
|
250
|
-
if (A.decodeSyncMessage(message.data).heads.length > 0) {
|
|
251
|
-
this.#peerDocumentStatuses[message.senderId] = "has";
|
|
252
|
-
}
|
|
253
|
-
this.#withSyncState(message.senderId, syncState => {
|
|
254
|
-
this.#handle.update(doc => {
|
|
255
|
-
const [newDoc, newSyncState] = A.receiveSyncMessage(doc, syncState, message.data);
|
|
256
|
-
this.#setSyncState(message.senderId, newSyncState);
|
|
257
|
-
// respond to just this peer (as required)
|
|
258
|
-
this.#sendSyncMessage(message.senderId, doc);
|
|
259
|
-
return newDoc;
|
|
260
|
-
});
|
|
261
|
-
this.#checkDocUnavailable();
|
|
262
|
-
});
|
|
263
|
-
}
|
|
165
|
+
receiveSyncMessage(message) { }
|
|
264
166
|
#checkDocUnavailable() {
|
|
265
167
|
// if we know none of the peers have the document, tell all our peers that we don't either
|
|
266
168
|
if (this.#syncStarted &&
|
|
@@ -272,24 +174,20 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
272
174
|
.forEach(peerId => {
|
|
273
175
|
const message = {
|
|
274
176
|
type: "doc-unavailable",
|
|
275
|
-
documentId: this
|
|
177
|
+
documentId: this.documentId,
|
|
276
178
|
targetId: peerId,
|
|
277
179
|
};
|
|
278
180
|
this.emit("message", message);
|
|
279
181
|
});
|
|
280
|
-
this.#handle
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
#processAllPendingSyncMessages() {
|
|
284
|
-
for (const message of this.#pendingSyncMessages) {
|
|
285
|
-
this.#processSyncMessage(message.message);
|
|
182
|
+
if (this.#handle) {
|
|
183
|
+
this.#handle.unavailable();
|
|
184
|
+
}
|
|
286
185
|
}
|
|
287
|
-
this.#pendingSyncMessages = [];
|
|
288
186
|
}
|
|
289
187
|
metrics() {
|
|
290
188
|
return {
|
|
291
189
|
peers: this.#peers,
|
|
292
|
-
size: this.#handle
|
|
190
|
+
size: this.#handle?.metrics(),
|
|
293
191
|
};
|
|
294
192
|
}
|
|
295
193
|
}
|
|
@@ -9,6 +9,7 @@ export interface SynchronizerEvents {
|
|
|
9
9
|
message: (payload: MessageContents) => void;
|
|
10
10
|
"sync-state": (payload: SyncStatePayload) => void;
|
|
11
11
|
"open-doc": (arg: OpenDocMessage) => void;
|
|
12
|
+
metrics: (arg: DocSyncMetrics) => void;
|
|
12
13
|
}
|
|
13
14
|
/** Notify the repo that the sync state has changed */
|
|
14
15
|
export interface SyncStatePayload {
|
|
@@ -16,4 +17,14 @@ export interface SyncStatePayload {
|
|
|
16
17
|
documentId: DocumentId;
|
|
17
18
|
syncState: SyncState;
|
|
18
19
|
}
|
|
20
|
+
export type DocSyncMetrics = {
|
|
21
|
+
type: "receive-sync-message";
|
|
22
|
+
documentId: DocumentId;
|
|
23
|
+
durationMillis: number;
|
|
24
|
+
numOps: number;
|
|
25
|
+
numChanges: number;
|
|
26
|
+
} | {
|
|
27
|
+
type: "doc-denied";
|
|
28
|
+
documentId: DocumentId;
|
|
29
|
+
};
|
|
19
30
|
//# sourceMappingURL=Synchronizer.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Synchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/Synchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAC5C,OAAO,EACL,eAAe,EACf,cAAc,EACd,WAAW,EACZ,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EAAE,SAAS,EAAE,MAAM,2BAA2B,CAAA;AACrD,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAEhD,8BAAsB,YAAa,SAAQ,YAAY,CAAC,kBAAkB,CAAC;IACzE,QAAQ,CAAC,cAAc,CAAC,OAAO,EAAE,WAAW,GAAG,IAAI;CACpD;AAED,MAAM,WAAW,kBAAkB;IACjC,OAAO,EAAE,CAAC,OAAO,EAAE,eAAe,KAAK,IAAI,CAAA;IAC3C,YAAY,EAAE,CAAC,OAAO,EAAE,gBAAgB,KAAK,IAAI,CAAA;IACjD,UAAU,EAAE,CAAC,GAAG,EAAE,cAAc,KAAK,IAAI,CAAA;
|
|
1
|
+
{"version":3,"file":"Synchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/Synchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAC5C,OAAO,EACL,eAAe,EACf,cAAc,EACd,WAAW,EACZ,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EAAE,SAAS,EAAE,MAAM,2BAA2B,CAAA;AACrD,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAEhD,8BAAsB,YAAa,SAAQ,YAAY,CAAC,kBAAkB,CAAC;IACzE,QAAQ,CAAC,cAAc,CAAC,OAAO,EAAE,WAAW,GAAG,IAAI;CACpD;AAED,MAAM,WAAW,kBAAkB;IACjC,OAAO,EAAE,CAAC,OAAO,EAAE,eAAe,KAAK,IAAI,CAAA;IAC3C,YAAY,EAAE,CAAC,OAAO,EAAE,gBAAgB,KAAK,IAAI,CAAA;IACjD,UAAU,EAAE,CAAC,GAAG,EAAE,cAAc,KAAK,IAAI,CAAA;IACzC,OAAO,EAAE,CAAC,GAAG,EAAE,cAAc,KAAK,IAAI,CAAA;CACvC;AAED,uDAAuD;AACvD,MAAM,WAAW,gBAAgB;IAC/B,MAAM,EAAE,MAAM,CAAA;IACd,UAAU,EAAE,UAAU,CAAA;IACtB,SAAS,EAAE,SAAS,CAAA;CACrB;AAED,MAAM,MAAM,cAAc,GACtB;IACE,IAAI,EAAE,sBAAsB,CAAA;IAC5B,UAAU,EAAE,UAAU,CAAA;IACtB,cAAc,EAAE,MAAM,CAAA;IACtB,MAAM,EAAE,MAAM,CAAA;IACd,UAAU,EAAE,MAAM,CAAA;CACnB,GACD;IACE,IAAI,EAAE,YAAY,CAAA;IAClB,UAAU,EAAE,UAAU,CAAA;CACvB,CAAA"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"CollectionSynchronizer.test.d.ts","sourceRoot":"","sources":["../../test/CollectionSynchronizer.test.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import assert from "assert"
|
|
2
|
+
import { beforeEach } from "mocha"
|
|
3
|
+
import { Repo } from "../src/index.js"
|
|
4
|
+
import { CollectionSynchronizer } from "../src/synchronizer/CollectionSynchronizer.js"
|
|
5
|
+
describe("CollectionSynchronizer", () => {
|
|
6
|
+
let repo
|
|
7
|
+
let synchronizer
|
|
8
|
+
beforeEach(() => {
|
|
9
|
+
repo = new Repo({
|
|
10
|
+
network: [],
|
|
11
|
+
})
|
|
12
|
+
synchronizer = new CollectionSynchronizer(repo)
|
|
13
|
+
})
|
|
14
|
+
it("is not null", async () => {
|
|
15
|
+
assert(synchronizer !== null)
|
|
16
|
+
})
|
|
17
|
+
it("starts synchronizing a document to peers when added", done => {
|
|
18
|
+
const handle = repo.create()
|
|
19
|
+
synchronizer.addPeer("peer1")
|
|
20
|
+
synchronizer.once("message", event => {
|
|
21
|
+
assert(event.targetId === "peer1")
|
|
22
|
+
assert(event.documentId === handle.documentId)
|
|
23
|
+
done()
|
|
24
|
+
})
|
|
25
|
+
synchronizer.addDocument(handle.documentId)
|
|
26
|
+
})
|
|
27
|
+
it("starts synchronizing existing documents when a peer is added", done => {
|
|
28
|
+
const handle = repo.create()
|
|
29
|
+
synchronizer.addDocument(handle.documentId)
|
|
30
|
+
synchronizer.once("message", event => {
|
|
31
|
+
assert(event.targetId === "peer1")
|
|
32
|
+
assert(event.documentId === handle.documentId)
|
|
33
|
+
done()
|
|
34
|
+
})
|
|
35
|
+
synchronizer.addPeer("peer1")
|
|
36
|
+
})
|
|
37
|
+
it("should not synchronize to a peer which is excluded from the share policy", done => {
|
|
38
|
+
const handle = repo.create()
|
|
39
|
+
repo.sharePolicy = async peerId => peerId !== "peer1"
|
|
40
|
+
synchronizer.addDocument(handle.documentId)
|
|
41
|
+
synchronizer.once("message", () => {
|
|
42
|
+
done(new Error("Should not have sent a message"))
|
|
43
|
+
})
|
|
44
|
+
synchronizer.addPeer("peer1")
|
|
45
|
+
setTimeout(done)
|
|
46
|
+
})
|
|
47
|
+
it("should not synchronize a document which is excluded from the share policy", done => {
|
|
48
|
+
const handle = repo.create()
|
|
49
|
+
repo.sharePolicy = async (_, documentId) => documentId !== handle.documentId
|
|
50
|
+
synchronizer.addPeer("peer2")
|
|
51
|
+
synchronizer.once("message", () => {
|
|
52
|
+
done(new Error("Should not have sent a message"))
|
|
53
|
+
})
|
|
54
|
+
synchronizer.addDocument(handle.documentId)
|
|
55
|
+
setTimeout(done)
|
|
56
|
+
})
|
|
57
|
+
})
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"DocHandle.test.d.ts","sourceRoot":"","sources":["../../test/DocHandle.test.ts"],"names":[],"mappings":""}
|