@automerge/automerge-repo 2.0.0-alpha.7 → 2.0.0-collectionsync-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/CollectionHandle.d.ts +14 -0
- package/dist/CollectionHandle.d.ts.map +1 -0
- package/dist/CollectionHandle.js +37 -0
- package/dist/DocHandle.d.ts +37 -6
- package/dist/DocHandle.d.ts.map +1 -1
- package/dist/DocHandle.js +64 -6
- package/dist/DocUrl.d.ts +47 -0
- package/dist/DocUrl.d.ts.map +1 -0
- package/dist/DocUrl.js +72 -0
- package/dist/EphemeralData.d.ts +20 -0
- package/dist/EphemeralData.d.ts.map +1 -0
- package/dist/EphemeralData.js +1 -0
- package/dist/Repo.d.ts +28 -7
- package/dist/Repo.d.ts.map +1 -1
- package/dist/Repo.js +142 -143
- package/dist/ferigan.d.ts +51 -0
- package/dist/ferigan.d.ts.map +1 -0
- package/dist/ferigan.js +98 -0
- package/dist/helpers/tests/storage-adapter-tests.d.ts +2 -2
- package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
- package/dist/helpers/tests/storage-adapter-tests.js +19 -39
- package/dist/index.d.ts +2 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -0
- package/dist/network/NetworkSubsystem.d.ts +1 -0
- package/dist/network/NetworkSubsystem.d.ts.map +1 -1
- package/dist/network/NetworkSubsystem.js +3 -0
- package/dist/network/messages.d.ts +7 -1
- package/dist/network/messages.d.ts.map +1 -1
- package/dist/network/messages.js +2 -1
- package/dist/src/DocHandle.d.ts +182 -0
- package/dist/src/DocHandle.d.ts.map +1 -0
- package/dist/src/DocHandle.js +405 -0
- package/dist/src/DocUrl.d.ts +49 -0
- package/dist/src/DocUrl.d.ts.map +1 -0
- package/dist/src/DocUrl.js +72 -0
- package/dist/src/EphemeralData.d.ts +19 -0
- package/dist/src/EphemeralData.d.ts.map +1 -0
- package/dist/src/EphemeralData.js +1 -0
- package/dist/src/Repo.d.ts +74 -0
- package/dist/src/Repo.d.ts.map +1 -0
- package/dist/src/Repo.js +208 -0
- package/dist/src/helpers/arraysAreEqual.d.ts +2 -0
- package/dist/src/helpers/arraysAreEqual.d.ts.map +1 -0
- package/dist/src/helpers/arraysAreEqual.js +2 -0
- package/dist/src/helpers/cbor.d.ts +4 -0
- package/dist/src/helpers/cbor.d.ts.map +1 -0
- package/dist/src/helpers/cbor.js +8 -0
- package/dist/src/helpers/eventPromise.d.ts +11 -0
- package/dist/src/helpers/eventPromise.d.ts.map +1 -0
- package/dist/src/helpers/eventPromise.js +7 -0
- package/dist/src/helpers/headsAreSame.d.ts +2 -0
- package/dist/src/helpers/headsAreSame.d.ts.map +1 -0
- package/dist/src/helpers/headsAreSame.js +4 -0
- package/dist/src/helpers/mergeArrays.d.ts +2 -0
- package/dist/src/helpers/mergeArrays.d.ts.map +1 -0
- package/dist/src/helpers/mergeArrays.js +15 -0
- package/dist/src/helpers/pause.d.ts +6 -0
- package/dist/src/helpers/pause.d.ts.map +1 -0
- package/dist/src/helpers/pause.js +10 -0
- package/dist/src/helpers/tests/network-adapter-tests.d.ts +21 -0
- package/dist/src/helpers/tests/network-adapter-tests.d.ts.map +1 -0
- package/dist/src/helpers/tests/network-adapter-tests.js +122 -0
- package/dist/src/helpers/withTimeout.d.ts +12 -0
- package/dist/src/helpers/withTimeout.d.ts.map +1 -0
- package/dist/src/helpers/withTimeout.js +24 -0
- package/dist/src/index.d.ts +53 -0
- package/dist/src/index.d.ts.map +1 -0
- package/dist/src/index.js +40 -0
- package/dist/src/network/NetworkAdapter.d.ts +26 -0
- package/dist/src/network/NetworkAdapter.d.ts.map +1 -0
- package/dist/src/network/NetworkAdapter.js +4 -0
- package/dist/src/network/NetworkSubsystem.d.ts +23 -0
- package/dist/src/network/NetworkSubsystem.d.ts.map +1 -0
- package/dist/src/network/NetworkSubsystem.js +120 -0
- package/dist/src/network/messages.d.ts +85 -0
- package/dist/src/network/messages.d.ts.map +1 -0
- package/dist/src/network/messages.js +23 -0
- package/dist/src/storage/StorageAdapter.d.ts +14 -0
- package/dist/src/storage/StorageAdapter.d.ts.map +1 -0
- package/dist/src/storage/StorageAdapter.js +1 -0
- package/dist/src/storage/StorageSubsystem.d.ts +12 -0
- package/dist/src/storage/StorageSubsystem.d.ts.map +1 -0
- package/dist/src/storage/StorageSubsystem.js +145 -0
- package/dist/src/synchronizer/CollectionSynchronizer.d.ts +25 -0
- package/dist/src/synchronizer/CollectionSynchronizer.d.ts.map +1 -0
- package/dist/src/synchronizer/CollectionSynchronizer.js +106 -0
- package/dist/src/synchronizer/DocSynchronizer.d.ts +29 -0
- package/dist/src/synchronizer/DocSynchronizer.d.ts.map +1 -0
- package/dist/src/synchronizer/DocSynchronizer.js +263 -0
- package/dist/src/synchronizer/Synchronizer.d.ts +9 -0
- package/dist/src/synchronizer/Synchronizer.d.ts.map +1 -0
- package/dist/src/synchronizer/Synchronizer.js +2 -0
- package/dist/src/types.d.ts +16 -0
- package/dist/src/types.d.ts.map +1 -0
- package/dist/src/types.js +1 -0
- package/dist/storage/StorageAdapter.d.ts +9 -0
- package/dist/storage/StorageAdapter.d.ts.map +1 -1
- package/dist/storage/StorageAdapter.js +33 -0
- package/dist/storage/StorageSubsystem.d.ts +12 -2
- package/dist/storage/StorageSubsystem.d.ts.map +1 -1
- package/dist/storage/StorageSubsystem.js +42 -100
- package/dist/synchronizer/CollectionSynchronizer.d.ts +4 -2
- package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.js +28 -15
- package/dist/synchronizer/DocSynchronizer.d.ts +6 -5
- package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/DocSynchronizer.js +76 -178
- package/dist/synchronizer/Synchronizer.d.ts +11 -0
- package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
- package/dist/test/CollectionSynchronizer.test.d.ts +2 -0
- package/dist/test/CollectionSynchronizer.test.d.ts.map +1 -0
- package/dist/test/CollectionSynchronizer.test.js +57 -0
- package/dist/test/DocHandle.test.d.ts +2 -0
- package/dist/test/DocHandle.test.d.ts.map +1 -0
- package/dist/test/DocHandle.test.js +238 -0
- package/dist/test/DocSynchronizer.test.d.ts +2 -0
- package/dist/test/DocSynchronizer.test.d.ts.map +1 -0
- package/dist/test/DocSynchronizer.test.js +111 -0
- package/dist/test/Network.test.d.ts +2 -0
- package/dist/test/Network.test.d.ts.map +1 -0
- package/dist/test/Network.test.js +11 -0
- package/dist/test/Repo.test.d.ts +2 -0
- package/dist/test/Repo.test.d.ts.map +1 -0
- package/dist/test/Repo.test.js +568 -0
- package/dist/test/StorageSubsystem.test.d.ts +2 -0
- package/dist/test/StorageSubsystem.test.d.ts.map +1 -0
- package/dist/test/StorageSubsystem.test.js +56 -0
- package/dist/test/helpers/DummyNetworkAdapter.d.ts +9 -0
- package/dist/test/helpers/DummyNetworkAdapter.d.ts.map +1 -0
- package/dist/test/helpers/DummyNetworkAdapter.js +15 -0
- package/dist/test/helpers/DummyStorageAdapter.d.ts +16 -0
- package/dist/test/helpers/DummyStorageAdapter.d.ts.map +1 -0
- package/dist/test/helpers/DummyStorageAdapter.js +33 -0
- package/dist/test/helpers/generate-large-object.d.ts +5 -0
- package/dist/test/helpers/generate-large-object.d.ts.map +1 -0
- package/dist/test/helpers/generate-large-object.js +9 -0
- package/dist/test/helpers/getRandomItem.d.ts +2 -0
- package/dist/test/helpers/getRandomItem.d.ts.map +1 -0
- package/dist/test/helpers/getRandomItem.js +4 -0
- package/dist/test/types.d.ts +4 -0
- package/dist/test/types.d.ts.map +1 -0
- package/dist/test/types.js +1 -0
- package/package.json +3 -3
- package/src/CollectionHandle.ts +54 -0
- package/src/DocHandle.ts +80 -8
- package/src/Repo.ts +192 -183
- package/src/ferigan.ts +184 -0
- package/src/helpers/tests/storage-adapter-tests.ts +31 -62
- package/src/index.ts +2 -0
- package/src/network/NetworkSubsystem.ts +4 -0
- package/src/network/messages.ts +11 -2
- package/src/storage/StorageAdapter.ts +42 -0
- package/src/storage/StorageSubsystem.ts +59 -119
- package/src/synchronizer/CollectionSynchronizer.ts +34 -26
- package/src/synchronizer/DocSynchronizer.ts +84 -231
- package/src/synchronizer/Synchronizer.ts +14 -0
- package/test/CollectionSynchronizer.test.ts +4 -2
- package/test/DocHandle.test.ts +72 -13
- package/test/DocSynchronizer.test.ts +6 -1
- package/test/RemoteHeadsSubscriptions.test.ts +1 -1
- package/test/Repo.test.ts +225 -117
- package/test/StorageSubsystem.test.ts +20 -16
- package/test/remoteHeads.test.ts +1 -1
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { EventEmitter } from "eventemitter3"
|
|
2
|
+
import { PeerId } from "../types.js"
|
|
3
|
+
import { NetworkAdapter, PeerDisconnectedPayload } from "./NetworkAdapter.js"
|
|
4
|
+
import { Message, MessageContents } from "./messages.js"
|
|
5
|
+
export declare class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
|
|
6
|
+
#private
|
|
7
|
+
peerId: PeerId
|
|
8
|
+
constructor(adapters: NetworkAdapter[], peerId?: PeerId)
|
|
9
|
+
addNetworkAdapter(networkAdapter: NetworkAdapter): void
|
|
10
|
+
send(message: MessageContents): void
|
|
11
|
+
isReady: () => boolean
|
|
12
|
+
whenReady: () => Promise<void>
|
|
13
|
+
}
|
|
14
|
+
export interface NetworkSubsystemEvents {
|
|
15
|
+
peer: (payload: PeerPayload) => void
|
|
16
|
+
"peer-disconnected": (payload: PeerDisconnectedPayload) => void
|
|
17
|
+
message: (payload: Message) => void
|
|
18
|
+
ready: () => void
|
|
19
|
+
}
|
|
20
|
+
export interface PeerPayload {
|
|
21
|
+
peerId: PeerId
|
|
22
|
+
}
|
|
23
|
+
//# sourceMappingURL=NetworkSubsystem.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"NetworkSubsystem.d.ts","sourceRoot":"","sources":["../../../src/network/NetworkSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAC5C,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AACpC,OAAO,EAAE,cAAc,EAAE,uBAAuB,EAAE,MAAM,qBAAqB,CAAA;AAE7E,OAAO,EAIL,OAAO,EACP,eAAe,EAChB,MAAM,eAAe,CAAA;AAUtB,qBAAa,gBAAiB,SAAQ,YAAY,CAAC,sBAAsB,CAAC;;IAUzB,MAAM;gBAAzC,QAAQ,EAAE,cAAc,EAAE,EAAS,MAAM,SAAiB;IAMtE,iBAAiB,CAAC,cAAc,EAAE,cAAc;IAsEhD,IAAI,CAAC,OAAO,EAAE,eAAe;IA2B7B,OAAO,gBAEN;IAED,SAAS,sBAUR;CACF;AAQD,MAAM,WAAW,sBAAsB;IACrC,IAAI,EAAE,CAAC,OAAO,EAAE,WAAW,KAAK,IAAI,CAAA;IACpC,mBAAmB,EAAE,CAAC,OAAO,EAAE,uBAAuB,KAAK,IAAI,CAAA;IAC/D,OAAO,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,IAAI,CAAA;IACnC,KAAK,EAAE,MAAM,IAAI,CAAA;CAClB;AAED,MAAM,WAAW,WAAW;IAC1B,MAAM,EAAE,MAAM,CAAA;CACf"}
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
import { EventEmitter } from "eventemitter3"
|
|
2
|
+
import { isEphemeralMessage, isValidMessage } from "./messages.js"
|
|
3
|
+
import debug from "debug"
|
|
4
|
+
const getEphemeralMessageSource = message =>
|
|
5
|
+
`${message.senderId}:${message.sessionId}`
|
|
6
|
+
export class NetworkSubsystem extends EventEmitter {
|
|
7
|
+
peerId
|
|
8
|
+
#log
|
|
9
|
+
#adaptersByPeer = {}
|
|
10
|
+
#count = 0
|
|
11
|
+
#sessionId = Math.random().toString(36).slice(2)
|
|
12
|
+
#ephemeralSessionCounts = {}
|
|
13
|
+
#readyAdapterCount = 0
|
|
14
|
+
#adapters = []
|
|
15
|
+
constructor(adapters, peerId = randomPeerId()) {
|
|
16
|
+
super()
|
|
17
|
+
this.peerId = peerId
|
|
18
|
+
this.#log = debug(`automerge-repo:network:${this.peerId}`)
|
|
19
|
+
adapters.forEach(a => this.addNetworkAdapter(a))
|
|
20
|
+
}
|
|
21
|
+
addNetworkAdapter(networkAdapter) {
|
|
22
|
+
this.#adapters.push(networkAdapter)
|
|
23
|
+
networkAdapter.once("ready", () => {
|
|
24
|
+
this.#readyAdapterCount++
|
|
25
|
+
this.#log(
|
|
26
|
+
"Adapters ready: ",
|
|
27
|
+
this.#readyAdapterCount,
|
|
28
|
+
"/",
|
|
29
|
+
this.#adapters.length
|
|
30
|
+
)
|
|
31
|
+
if (this.#readyAdapterCount === this.#adapters.length) {
|
|
32
|
+
this.emit("ready")
|
|
33
|
+
}
|
|
34
|
+
})
|
|
35
|
+
networkAdapter.on("peer-candidate", ({ peerId }) => {
|
|
36
|
+
this.#log(`peer candidate: ${peerId} `)
|
|
37
|
+
// TODO: This is where authentication would happen
|
|
38
|
+
if (!this.#adaptersByPeer[peerId]) {
|
|
39
|
+
// TODO: handle losing a server here
|
|
40
|
+
this.#adaptersByPeer[peerId] = networkAdapter
|
|
41
|
+
}
|
|
42
|
+
this.emit("peer", { peerId })
|
|
43
|
+
})
|
|
44
|
+
networkAdapter.on("peer-disconnected", ({ peerId }) => {
|
|
45
|
+
this.#log(`peer disconnected: ${peerId} `)
|
|
46
|
+
delete this.#adaptersByPeer[peerId]
|
|
47
|
+
this.emit("peer-disconnected", { peerId })
|
|
48
|
+
})
|
|
49
|
+
networkAdapter.on("message", msg => {
|
|
50
|
+
if (!isValidMessage(msg)) {
|
|
51
|
+
this.#log(`invalid message: ${JSON.stringify(msg)}`)
|
|
52
|
+
return
|
|
53
|
+
}
|
|
54
|
+
this.#log(`message from ${msg.senderId}`)
|
|
55
|
+
if (isEphemeralMessage(msg)) {
|
|
56
|
+
const source = getEphemeralMessageSource(msg)
|
|
57
|
+
if (
|
|
58
|
+
this.#ephemeralSessionCounts[source] === undefined ||
|
|
59
|
+
msg.count > this.#ephemeralSessionCounts[source]
|
|
60
|
+
) {
|
|
61
|
+
this.#ephemeralSessionCounts[source] = msg.count
|
|
62
|
+
this.emit("message", msg)
|
|
63
|
+
}
|
|
64
|
+
return
|
|
65
|
+
}
|
|
66
|
+
this.emit("message", msg)
|
|
67
|
+
})
|
|
68
|
+
networkAdapter.on("close", () => {
|
|
69
|
+
this.#log("adapter closed")
|
|
70
|
+
Object.entries(this.#adaptersByPeer).forEach(([peerId, other]) => {
|
|
71
|
+
if (other === networkAdapter) {
|
|
72
|
+
delete this.#adaptersByPeer[peerId]
|
|
73
|
+
}
|
|
74
|
+
})
|
|
75
|
+
})
|
|
76
|
+
networkAdapter.connect(this.peerId)
|
|
77
|
+
}
|
|
78
|
+
send(message) {
|
|
79
|
+
const peer = this.#adaptersByPeer[message.targetId]
|
|
80
|
+
if (!peer) {
|
|
81
|
+
this.#log(`Tried to send message but peer not found: ${message.targetId}`)
|
|
82
|
+
return
|
|
83
|
+
}
|
|
84
|
+
this.#log(`Sending message to ${message.targetId}`)
|
|
85
|
+
if (isEphemeralMessage(message)) {
|
|
86
|
+
const outbound =
|
|
87
|
+
"count" in message
|
|
88
|
+
? message
|
|
89
|
+
: {
|
|
90
|
+
...message,
|
|
91
|
+
count: ++this.#count,
|
|
92
|
+
sessionId: this.#sessionId,
|
|
93
|
+
senderId: this.peerId,
|
|
94
|
+
}
|
|
95
|
+
this.#log("Ephemeral message", outbound)
|
|
96
|
+
peer.send(outbound)
|
|
97
|
+
} else {
|
|
98
|
+
const outbound = { ...message, senderId: this.peerId }
|
|
99
|
+
this.#log("Sync message", outbound)
|
|
100
|
+
peer.send(outbound)
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
isReady = () => {
|
|
104
|
+
return this.#readyAdapterCount === this.#adapters.length
|
|
105
|
+
}
|
|
106
|
+
whenReady = async () => {
|
|
107
|
+
if (this.isReady()) {
|
|
108
|
+
return
|
|
109
|
+
} else {
|
|
110
|
+
return new Promise(resolve => {
|
|
111
|
+
this.once("ready", () => {
|
|
112
|
+
resolve()
|
|
113
|
+
})
|
|
114
|
+
})
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
function randomPeerId() {
|
|
119
|
+
return `user-${Math.round(Math.random() * 100000)}`
|
|
120
|
+
}
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
import { SessionId } from "../EphemeralData.js"
|
|
2
|
+
import { DocumentId, PeerId } from "../types.js"
|
|
3
|
+
export declare function isValidMessage(
|
|
4
|
+
message: NetworkAdapterMessage
|
|
5
|
+
): message is
|
|
6
|
+
| SyncMessage
|
|
7
|
+
| EphemeralMessage
|
|
8
|
+
| RequestMessage
|
|
9
|
+
| DocumentUnavailableMessage
|
|
10
|
+
export declare function isDocumentUnavailableMessage(
|
|
11
|
+
message: NetworkAdapterMessage
|
|
12
|
+
): message is DocumentUnavailableMessage
|
|
13
|
+
export declare function isRequestMessage(
|
|
14
|
+
message: NetworkAdapterMessage
|
|
15
|
+
): message is RequestMessage
|
|
16
|
+
export declare function isSyncMessage(
|
|
17
|
+
message: NetworkAdapterMessage
|
|
18
|
+
): message is SyncMessage
|
|
19
|
+
export declare function isEphemeralMessage(
|
|
20
|
+
message: NetworkAdapterMessage | MessageContents
|
|
21
|
+
): message is EphemeralMessage | EphemeralMessageContents
|
|
22
|
+
export interface SyncMessageEnvelope {
|
|
23
|
+
senderId: PeerId
|
|
24
|
+
}
|
|
25
|
+
export interface SyncMessageContents {
|
|
26
|
+
type: "sync"
|
|
27
|
+
data: Uint8Array
|
|
28
|
+
targetId: PeerId
|
|
29
|
+
documentId: DocumentId
|
|
30
|
+
}
|
|
31
|
+
export type SyncMessage = SyncMessageEnvelope & SyncMessageContents
|
|
32
|
+
export interface EphemeralMessageEnvelope {
|
|
33
|
+
senderId: PeerId
|
|
34
|
+
count: number
|
|
35
|
+
sessionId: SessionId
|
|
36
|
+
}
|
|
37
|
+
export interface EphemeralMessageContents {
|
|
38
|
+
type: "ephemeral"
|
|
39
|
+
targetId: PeerId
|
|
40
|
+
documentId: DocumentId
|
|
41
|
+
data: Uint8Array
|
|
42
|
+
}
|
|
43
|
+
export type EphemeralMessage = EphemeralMessageEnvelope &
|
|
44
|
+
EphemeralMessageContents
|
|
45
|
+
export interface DocumentUnavailableMessageContents {
|
|
46
|
+
type: "doc-unavailable"
|
|
47
|
+
documentId: DocumentId
|
|
48
|
+
targetId: PeerId
|
|
49
|
+
}
|
|
50
|
+
export type DocumentUnavailableMessage = SyncMessageEnvelope &
|
|
51
|
+
DocumentUnavailableMessageContents
|
|
52
|
+
export interface RequestMessageContents {
|
|
53
|
+
type: "request"
|
|
54
|
+
data: Uint8Array
|
|
55
|
+
targetId: PeerId
|
|
56
|
+
documentId: DocumentId
|
|
57
|
+
}
|
|
58
|
+
export type RequestMessage = SyncMessageEnvelope & RequestMessageContents
|
|
59
|
+
export type MessageContents =
|
|
60
|
+
| SyncMessageContents
|
|
61
|
+
| EphemeralMessageContents
|
|
62
|
+
| RequestMessageContents
|
|
63
|
+
| DocumentUnavailableMessageContents
|
|
64
|
+
export type Message =
|
|
65
|
+
| SyncMessage
|
|
66
|
+
| EphemeralMessage
|
|
67
|
+
| RequestMessage
|
|
68
|
+
| DocumentUnavailableMessage
|
|
69
|
+
export type SynchronizerMessage =
|
|
70
|
+
| SyncMessage
|
|
71
|
+
| RequestMessage
|
|
72
|
+
| DocumentUnavailableMessage
|
|
73
|
+
| EphemeralMessage
|
|
74
|
+
type ArriveMessage = {
|
|
75
|
+
senderId: PeerId
|
|
76
|
+
type: "arrive"
|
|
77
|
+
}
|
|
78
|
+
type WelcomeMessage = {
|
|
79
|
+
senderId: PeerId
|
|
80
|
+
targetId: PeerId
|
|
81
|
+
type: "welcome"
|
|
82
|
+
}
|
|
83
|
+
export type NetworkAdapterMessage = ArriveMessage | WelcomeMessage | Message
|
|
84
|
+
export {}
|
|
85
|
+
//# sourceMappingURL=messages.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"messages.d.ts","sourceRoot":"","sources":["../../../src/network/messages.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,SAAS,EAAE,MAAM,qBAAqB,CAAA;AAC/C,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAEhD,wBAAgB,cAAc,CAC5B,OAAO,EAAE,qBAAqB,GAC7B,OAAO,IACN,WAAW,GACX,gBAAgB,GAChB,cAAc,GACd,0BAA0B,CAU7B;AAED,wBAAgB,4BAA4B,CAC1C,OAAO,EAAE,qBAAqB,GAC7B,OAAO,IAAI,0BAA0B,CAEvC;AAED,wBAAgB,gBAAgB,CAC9B,OAAO,EAAE,qBAAqB,GAC7B,OAAO,IAAI,cAAc,CAE3B;AAED,wBAAgB,aAAa,CAC3B,OAAO,EAAE,qBAAqB,GAC7B,OAAO,IAAI,WAAW,CAExB;AAED,wBAAgB,kBAAkB,CAChC,OAAO,EAAE,qBAAqB,GAAG,eAAe,GAC/C,OAAO,IAAI,gBAAgB,GAAG,wBAAwB,CAExD;AAED,MAAM,WAAW,mBAAmB;IAClC,QAAQ,EAAE,MAAM,CAAA;CACjB;AAED,MAAM,WAAW,mBAAmB;IAClC,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,UAAU,CAAA;IAChB,QAAQ,EAAE,MAAM,CAAA;IAChB,UAAU,EAAE,UAAU,CAAA;CACvB;AAED,MAAM,MAAM,WAAW,GAAG,mBAAmB,GAAG,mBAAmB,CAAA;AAEnE,MAAM,WAAW,wBAAwB;IACvC,QAAQ,EAAE,MAAM,CAAA;IAChB,KAAK,EAAE,MAAM,CAAA;IACb,SAAS,EAAE,SAAS,CAAA;CACrB;AAED,MAAM,WAAW,wBAAwB;IACvC,IAAI,EAAE,WAAW,CAAA;IACjB,QAAQ,EAAE,MAAM,CAAA;IAChB,UAAU,EAAE,UAAU,CAAA;IACtB,IAAI,EAAE,UAAU,CAAA;CACjB;AAED,MAAM,MAAM,gBAAgB,GAAG,wBAAwB,GACrD,wBAAwB,CAAA;AAE1B,MAAM,WAAW,kCAAkC;IACjD,IAAI,EAAE,iBAAiB,CAAA;IACvB,UAAU,EAAE,UAAU,CAAA;IACtB,QAAQ,EAAE,MAAM,CAAA;CACjB;AAED,MAAM,MAAM,0BAA0B,GAAG,mBAAmB,GAC1D,kCAAkC,CAAA;AAEpC,MAAM,WAAW,sBAAsB;IACrC,IAAI,EAAE,SAAS,CAAA;IACf,IAAI,EAAE,UAAU,CAAA;IAChB,QAAQ,EAAE,MAAM,CAAA;IAChB,UAAU,EAAE,UAAU,CAAA;CACvB;AAED,MAAM,MAAM,cAAc,GAAG,mBAAmB,GAAG,sBAAsB,CAAA;AAEzE,MAAM,MAAM,eAAe,GACvB,mBAAmB,GACnB,wBAAwB,GACxB,sBAAsB,GACtB,kCAAkC,CAAA;AAEtC,MAAM,MAAM,OAAO,GACf,WAAW,GACX,gBAAgB,GAChB,cAAc,GACd,0BAA0B,CAAA;AAE9B,MAAM,MAAM,mBAAmB,GAC3B,WAAW,GACX,cAAc,GACd,0BAA0B,GAC1B,gBAAgB,CAAA;AAEpB,KAAK,aAAa,GAAG;IACnB,QAAQ,EAAE,MAAM,CAAA;IAChB,IAAI,EAAE,QAAQ,CAAA;CACf,CAAA;AAED,KAAK,cAAc,GAAG;IACpB,QAAQ,EAAE,MAAM,CAAA;IAChB,QAAQ,EAAE,MAAM,CAAA;IAChB,IAAI,EAAE,SAAS,CAAA;CAChB,CAAA;AAED,MAAM,MAAM,qBAAqB,GAAG,aAAa,GAAG,cAAc,GAAG,OAAO,CAAA"}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
export function isValidMessage(message) {
|
|
2
|
+
return (
|
|
3
|
+
typeof message === "object" &&
|
|
4
|
+
typeof message.type === "string" &&
|
|
5
|
+
typeof message.senderId === "string" &&
|
|
6
|
+
(isSyncMessage(message) ||
|
|
7
|
+
isEphemeralMessage(message) ||
|
|
8
|
+
isRequestMessage(message) ||
|
|
9
|
+
isDocumentUnavailableMessage(message))
|
|
10
|
+
)
|
|
11
|
+
}
|
|
12
|
+
export function isDocumentUnavailableMessage(message) {
|
|
13
|
+
return message.type === "doc-unavailable"
|
|
14
|
+
}
|
|
15
|
+
export function isRequestMessage(message) {
|
|
16
|
+
return message.type === "request"
|
|
17
|
+
}
|
|
18
|
+
export function isSyncMessage(message) {
|
|
19
|
+
return message.type === "sync"
|
|
20
|
+
}
|
|
21
|
+
export function isEphemeralMessage(message) {
|
|
22
|
+
return message.type === "ephemeral"
|
|
23
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
export declare abstract class StorageAdapter {
|
|
2
|
+
abstract load(key: StorageKey): Promise<Uint8Array | undefined>
|
|
3
|
+
abstract save(key: StorageKey, data: Uint8Array): Promise<void>
|
|
4
|
+
abstract remove(key: StorageKey): Promise<void>
|
|
5
|
+
abstract loadRange(keyPrefix: StorageKey): Promise<
|
|
6
|
+
{
|
|
7
|
+
key: StorageKey
|
|
8
|
+
data: Uint8Array
|
|
9
|
+
}[]
|
|
10
|
+
>
|
|
11
|
+
abstract removeRange(keyPrefix: StorageKey): Promise<void>
|
|
12
|
+
}
|
|
13
|
+
export type StorageKey = string[]
|
|
14
|
+
//# sourceMappingURL=StorageAdapter.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"StorageAdapter.d.ts","sourceRoot":"","sources":["../../../src/storage/StorageAdapter.ts"],"names":[],"mappings":"AAAA,8BAAsB,cAAc;IAMlC,QAAQ,CAAC,IAAI,CAAC,GAAG,EAAE,UAAU,GAAG,OAAO,CAAC,UAAU,GAAG,SAAS,CAAC;IAC/D,QAAQ,CAAC,IAAI,CAAC,GAAG,EAAE,UAAU,EAAE,IAAI,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC;IAC/D,QAAQ,CAAC,MAAM,CAAC,GAAG,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC;IAO/C,QAAQ,CAAC,SAAS,CAAC,SAAS,EAAE,UAAU,GAAG,OAAO,CAAC;QAAC,GAAG,EAAE,UAAU,CAAC;QAAC,IAAI,EAAE,UAAU,CAAA;KAAC,EAAE,CAAC;IACzF,QAAQ,CAAC,WAAW,CAAC,SAAS,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC;CAC3D;AAED,MAAM,MAAO,UAAU,GAAG,MAAM,EAAE,CAAA"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export class StorageAdapter {}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import * as A from "@automerge/automerge/next"
|
|
2
|
+
import { StorageAdapter } from "./StorageAdapter.js"
|
|
3
|
+
import { type DocumentId } from "../types.js"
|
|
4
|
+
export type ChunkType = "snapshot" | "incremental"
|
|
5
|
+
export declare class StorageSubsystem {
|
|
6
|
+
#private
|
|
7
|
+
constructor(storageAdapter: StorageAdapter)
|
|
8
|
+
loadDoc(documentId: DocumentId): Promise<A.Doc<unknown> | null>
|
|
9
|
+
saveDoc(documentId: DocumentId, doc: A.Doc<unknown>): Promise<void>
|
|
10
|
+
remove(documentId: DocumentId): Promise<void>
|
|
11
|
+
}
|
|
12
|
+
//# sourceMappingURL=StorageSubsystem.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,2BAA2B,CAAA;AAC9C,OAAO,EAAE,cAAc,EAAc,MAAM,qBAAqB,CAAA;AAEhE,OAAO,EAAE,KAAK,UAAU,EAAE,MAAM,aAAa,CAAA;AAa7C,MAAM,MAAM,SAAS,GAAG,UAAU,GAAG,aAAa,CAAA;AAelD,qBAAa,gBAAgB;;gBAQf,cAAc,EAAE,cAAc;IAuDpC,OAAO,CAAC,UAAU,EAAE,UAAU,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,IAAI,CAAC;IA0B/D,OAAO,CAAC,UAAU,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAanE,MAAM,CAAC,UAAU,EAAE,UAAU;CAmCpC"}
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
import * as A from "@automerge/automerge/next"
|
|
2
|
+
import * as sha256 from "fast-sha256"
|
|
3
|
+
import { mergeArrays } from "../helpers/mergeArrays.js"
|
|
4
|
+
import debug from "debug"
|
|
5
|
+
import { headsAreSame } from "../helpers/headsAreSame.js"
|
|
6
|
+
function keyHash(binary) {
|
|
7
|
+
const hash = sha256.hash(binary)
|
|
8
|
+
const hashArray = Array.from(new Uint8Array(hash)) // convert buffer to byte array
|
|
9
|
+
const hashHex = hashArray.map(b => ("00" + b.toString(16)).slice(-2)).join("") // convert bytes to hex string
|
|
10
|
+
return hashHex
|
|
11
|
+
}
|
|
12
|
+
function headsHash(heads) {
|
|
13
|
+
let encoder = new TextEncoder()
|
|
14
|
+
let headsbinary = mergeArrays(heads.map(h => encoder.encode(h)))
|
|
15
|
+
return keyHash(headsbinary)
|
|
16
|
+
}
|
|
17
|
+
export class StorageSubsystem {
|
|
18
|
+
#storageAdapter
|
|
19
|
+
#chunkInfos = new Map()
|
|
20
|
+
#storedHeads = new Map()
|
|
21
|
+
#log = debug(`automerge-repo:storage-subsystem`)
|
|
22
|
+
#snapshotting = false
|
|
23
|
+
constructor(storageAdapter) {
|
|
24
|
+
this.#storageAdapter = storageAdapter
|
|
25
|
+
}
|
|
26
|
+
async #saveIncremental(documentId, doc) {
|
|
27
|
+
const binary = A.saveSince(doc, this.#storedHeads.get(documentId) ?? [])
|
|
28
|
+
if (binary && binary.length > 0) {
|
|
29
|
+
const key = [documentId, "incremental", keyHash(binary)]
|
|
30
|
+
this.#log(`Saving incremental ${key} for document ${documentId}`)
|
|
31
|
+
await this.#storageAdapter.save(key, binary)
|
|
32
|
+
if (!this.#chunkInfos.has(documentId)) {
|
|
33
|
+
this.#chunkInfos.set(documentId, [])
|
|
34
|
+
}
|
|
35
|
+
this.#chunkInfos.get(documentId).push({
|
|
36
|
+
key,
|
|
37
|
+
type: "incremental",
|
|
38
|
+
size: binary.length,
|
|
39
|
+
})
|
|
40
|
+
this.#storedHeads.set(documentId, A.getHeads(doc))
|
|
41
|
+
} else {
|
|
42
|
+
return Promise.resolve()
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
async #saveTotal(documentId, doc, sourceChunks) {
|
|
46
|
+
this.#snapshotting = true
|
|
47
|
+
const binary = A.save(doc)
|
|
48
|
+
const snapshotHash = headsHash(A.getHeads(doc))
|
|
49
|
+
const key = [documentId, "snapshot", snapshotHash]
|
|
50
|
+
const oldKeys = new Set(
|
|
51
|
+
sourceChunks.map(c => c.key).filter(k => k[2] !== snapshotHash)
|
|
52
|
+
)
|
|
53
|
+
this.#log(`Saving snapshot ${key} for document ${documentId}`)
|
|
54
|
+
this.#log(`deleting old chunks ${Array.from(oldKeys)}`)
|
|
55
|
+
await this.#storageAdapter.save(key, binary)
|
|
56
|
+
for (const key of oldKeys) {
|
|
57
|
+
await this.#storageAdapter.remove(key)
|
|
58
|
+
}
|
|
59
|
+
const newChunkInfos =
|
|
60
|
+
this.#chunkInfos.get(documentId)?.filter(c => !oldKeys.has(c.key)) ?? []
|
|
61
|
+
newChunkInfos.push({ key, type: "snapshot", size: binary.length })
|
|
62
|
+
this.#chunkInfos.set(documentId, newChunkInfos)
|
|
63
|
+
this.#snapshotting = false
|
|
64
|
+
}
|
|
65
|
+
async loadDoc(documentId) {
|
|
66
|
+
const loaded = await this.#storageAdapter.loadRange([documentId])
|
|
67
|
+
const binaries = []
|
|
68
|
+
const chunkInfos = []
|
|
69
|
+
for (const chunk of loaded) {
|
|
70
|
+
const chunkType = chunkTypeFromKey(chunk.key)
|
|
71
|
+
if (chunkType == null) {
|
|
72
|
+
continue
|
|
73
|
+
}
|
|
74
|
+
chunkInfos.push({
|
|
75
|
+
key: chunk.key,
|
|
76
|
+
type: chunkType,
|
|
77
|
+
size: chunk.data.length,
|
|
78
|
+
})
|
|
79
|
+
binaries.push(chunk.data)
|
|
80
|
+
}
|
|
81
|
+
this.#chunkInfos.set(documentId, chunkInfos)
|
|
82
|
+
const binary = mergeArrays(binaries)
|
|
83
|
+
if (binary.length === 0) {
|
|
84
|
+
return null
|
|
85
|
+
}
|
|
86
|
+
const newDoc = A.loadIncremental(A.init(), binary)
|
|
87
|
+
this.#storedHeads.set(documentId, A.getHeads(newDoc))
|
|
88
|
+
return newDoc
|
|
89
|
+
}
|
|
90
|
+
async saveDoc(documentId, doc) {
|
|
91
|
+
if (!this.#shouldSave(documentId, doc)) {
|
|
92
|
+
return
|
|
93
|
+
}
|
|
94
|
+
let sourceChunks = this.#chunkInfos.get(documentId) ?? []
|
|
95
|
+
if (this.#shouldCompact(sourceChunks)) {
|
|
96
|
+
this.#saveTotal(documentId, doc, sourceChunks)
|
|
97
|
+
} else {
|
|
98
|
+
this.#saveIncremental(documentId, doc)
|
|
99
|
+
}
|
|
100
|
+
this.#storedHeads.set(documentId, A.getHeads(doc))
|
|
101
|
+
}
|
|
102
|
+
async remove(documentId) {
|
|
103
|
+
this.#storageAdapter.removeRange([documentId, "snapshot"])
|
|
104
|
+
this.#storageAdapter.removeRange([documentId, "incremental"])
|
|
105
|
+
}
|
|
106
|
+
#shouldSave(documentId, doc) {
|
|
107
|
+
const oldHeads = this.#storedHeads.get(documentId)
|
|
108
|
+
if (!oldHeads) {
|
|
109
|
+
return true
|
|
110
|
+
}
|
|
111
|
+
const newHeads = A.getHeads(doc)
|
|
112
|
+
if (headsAreSame(newHeads, oldHeads)) {
|
|
113
|
+
return false
|
|
114
|
+
}
|
|
115
|
+
return true
|
|
116
|
+
}
|
|
117
|
+
#shouldCompact(sourceChunks) {
|
|
118
|
+
if (this.#snapshotting) {
|
|
119
|
+
return false
|
|
120
|
+
}
|
|
121
|
+
// compact if the incremental size is greater than the snapshot size
|
|
122
|
+
let snapshotSize = 0
|
|
123
|
+
let incrementalSize = 0
|
|
124
|
+
for (const chunk of sourceChunks) {
|
|
125
|
+
if (chunk.type === "snapshot") {
|
|
126
|
+
snapshotSize += chunk.size
|
|
127
|
+
} else {
|
|
128
|
+
incrementalSize += chunk.size
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
return incrementalSize >= snapshotSize
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
function chunkTypeFromKey(key) {
|
|
135
|
+
if (key.length < 2) {
|
|
136
|
+
return null
|
|
137
|
+
}
|
|
138
|
+
const chunkTypeStr = key[key.length - 2]
|
|
139
|
+
if (chunkTypeStr === "snapshot" || chunkTypeStr === "incremental") {
|
|
140
|
+
const chunkType = chunkTypeStr
|
|
141
|
+
return chunkType
|
|
142
|
+
} else {
|
|
143
|
+
return null
|
|
144
|
+
}
|
|
145
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { Repo } from "../Repo.js"
|
|
2
|
+
import { PeerId, DocumentId } from "../types.js"
|
|
3
|
+
import { Synchronizer } from "./Synchronizer.js"
|
|
4
|
+
import { SynchronizerMessage } from "../network/messages.js"
|
|
5
|
+
/** A CollectionSynchronizer is responsible for synchronizing a DocCollection with peers. */
|
|
6
|
+
export declare class CollectionSynchronizer extends Synchronizer {
|
|
7
|
+
#private
|
|
8
|
+
private repo
|
|
9
|
+
constructor(repo: Repo)
|
|
10
|
+
/**
|
|
11
|
+
* When we receive a sync message for a document we haven't got in memory, we
|
|
12
|
+
* register it with the repo and start synchronizing
|
|
13
|
+
*/
|
|
14
|
+
receiveMessage(message: SynchronizerMessage): Promise<void>
|
|
15
|
+
/**
|
|
16
|
+
* Starts synchronizing the given document with all peers that we share it generously with.
|
|
17
|
+
*/
|
|
18
|
+
addDocument(documentId: DocumentId): void
|
|
19
|
+
removeDocument(documentId: DocumentId): void
|
|
20
|
+
/** Adds a peer and maybe starts synchronizing with them */
|
|
21
|
+
addPeer(peerId: PeerId): void
|
|
22
|
+
/** Removes a peer and stops synchronizing with them */
|
|
23
|
+
removePeer(peerId: PeerId): void
|
|
24
|
+
}
|
|
25
|
+
//# sourceMappingURL=CollectionSynchronizer.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"CollectionSynchronizer.d.ts","sourceRoot":"","sources":["../../../src/synchronizer/CollectionSynchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,YAAY,CAAA;AAOjC,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAEhD,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAGhD,OAAO,EAGL,mBAAmB,EAEpB,MAAM,wBAAwB,CAAA;AAG/B,4FAA4F;AAC5F,qBAAa,sBAAuB,SAAQ,YAAY;;IAU1C,OAAO,CAAC,IAAI;gBAAJ,IAAI,EAAE,IAAI;IAiC9B;;;OAGG;IACG,cAAc,CAAC,OAAO,EAAE,mBAAmB;IAyBjD;;OAEG;IACH,WAAW,CAAC,UAAU,EAAE,UAAU;IAYlC,cAAc,CAAC,UAAU,EAAE,UAAU;IAIrC,2DAA2D;IAC3D,OAAO,CAAC,MAAM,EAAE,MAAM;IAgBtB,uDAAuD;IACvD,UAAU,CAAC,MAAM,EAAE,MAAM;CAQ1B"}
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
import { stringifyAutomergeUrl } from "../DocUrl.js"
|
|
2
|
+
import { DocSynchronizer } from "./DocSynchronizer.js"
|
|
3
|
+
import { Synchronizer } from "./Synchronizer.js"
|
|
4
|
+
import debug from "debug"
|
|
5
|
+
const log = debug("automerge-repo:collectionsync")
|
|
6
|
+
/** A CollectionSynchronizer is responsible for synchronizing a DocCollection with peers. */
|
|
7
|
+
export class CollectionSynchronizer extends Synchronizer {
|
|
8
|
+
repo
|
|
9
|
+
/** The set of peers we are connected with */
|
|
10
|
+
#peers = new Set()
|
|
11
|
+
/** A map of documentIds to their synchronizers */
|
|
12
|
+
#docSynchronizers = {}
|
|
13
|
+
/** Used to determine if the document is know to the Collection and a synchronizer exists or is being set up */
|
|
14
|
+
#docSetUp = {}
|
|
15
|
+
constructor(repo) {
|
|
16
|
+
super()
|
|
17
|
+
this.repo = repo
|
|
18
|
+
}
|
|
19
|
+
/** Returns a synchronizer for the given document, creating one if it doesn't already exist. */
|
|
20
|
+
#fetchDocSynchronizer(documentId) {
|
|
21
|
+
if (!this.#docSynchronizers[documentId]) {
|
|
22
|
+
const handle = this.repo.find(stringifyAutomergeUrl({ documentId }))
|
|
23
|
+
this.#docSynchronizers[documentId] = this.#initDocSynchronizer(handle)
|
|
24
|
+
}
|
|
25
|
+
return this.#docSynchronizers[documentId]
|
|
26
|
+
}
|
|
27
|
+
/** Creates a new docSynchronizer and sets it up to propagate messages */
|
|
28
|
+
#initDocSynchronizer(handle) {
|
|
29
|
+
const docSynchronizer = new DocSynchronizer(handle)
|
|
30
|
+
docSynchronizer.on("message", event => this.emit("message", event))
|
|
31
|
+
return docSynchronizer
|
|
32
|
+
}
|
|
33
|
+
/** returns an array of peerIds that we share this document generously with */
|
|
34
|
+
async #documentGenerousPeers(documentId) {
|
|
35
|
+
const peers = Array.from(this.#peers)
|
|
36
|
+
const generousPeers = []
|
|
37
|
+
for (const peerId of peers) {
|
|
38
|
+
const okToShare = await this.repo.sharePolicy(peerId, documentId)
|
|
39
|
+
if (okToShare) generousPeers.push(peerId)
|
|
40
|
+
}
|
|
41
|
+
return generousPeers
|
|
42
|
+
}
|
|
43
|
+
// PUBLIC
|
|
44
|
+
/**
|
|
45
|
+
* When we receive a sync message for a document we haven't got in memory, we
|
|
46
|
+
* register it with the repo and start synchronizing
|
|
47
|
+
*/
|
|
48
|
+
async receiveMessage(message) {
|
|
49
|
+
log(
|
|
50
|
+
`onSyncMessage: ${message.senderId}, ${message.documentId}, ${
|
|
51
|
+
"data" in message ? message.data.byteLength + "bytes" : ""
|
|
52
|
+
}`
|
|
53
|
+
)
|
|
54
|
+
const documentId = message.documentId
|
|
55
|
+
if (!documentId) {
|
|
56
|
+
throw new Error("received a message with an invalid documentId")
|
|
57
|
+
}
|
|
58
|
+
this.#docSetUp[documentId] = true
|
|
59
|
+
const docSynchronizer = this.#fetchDocSynchronizer(documentId)
|
|
60
|
+
docSynchronizer.receiveMessage(message)
|
|
61
|
+
// Initiate sync with any new peers
|
|
62
|
+
const peers = await this.#documentGenerousPeers(documentId)
|
|
63
|
+
docSynchronizer.beginSync(
|
|
64
|
+
peers.filter(peerId => !docSynchronizer.hasPeer(peerId))
|
|
65
|
+
)
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* Starts synchronizing the given document with all peers that we share it generously with.
|
|
69
|
+
*/
|
|
70
|
+
addDocument(documentId) {
|
|
71
|
+
// HACK: this is a hack to prevent us from adding the same document twice
|
|
72
|
+
if (this.#docSetUp[documentId]) {
|
|
73
|
+
return
|
|
74
|
+
}
|
|
75
|
+
const docSynchronizer = this.#fetchDocSynchronizer(documentId)
|
|
76
|
+
void this.#documentGenerousPeers(documentId).then(peers => {
|
|
77
|
+
docSynchronizer.beginSync(peers)
|
|
78
|
+
})
|
|
79
|
+
}
|
|
80
|
+
// TODO: implement this
|
|
81
|
+
removeDocument(documentId) {
|
|
82
|
+
throw new Error("not implemented")
|
|
83
|
+
}
|
|
84
|
+
/** Adds a peer and maybe starts synchronizing with them */
|
|
85
|
+
addPeer(peerId) {
|
|
86
|
+
log(`adding ${peerId} & synchronizing with them`)
|
|
87
|
+
if (this.#peers.has(peerId)) {
|
|
88
|
+
return
|
|
89
|
+
}
|
|
90
|
+
this.#peers.add(peerId)
|
|
91
|
+
for (const docSynchronizer of Object.values(this.#docSynchronizers)) {
|
|
92
|
+
const { documentId } = docSynchronizer
|
|
93
|
+
this.repo.sharePolicy(peerId, documentId).then(okToShare => {
|
|
94
|
+
if (okToShare) docSynchronizer.beginSync([peerId])
|
|
95
|
+
})
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
/** Removes a peer and stops synchronizing with them */
|
|
99
|
+
removePeer(peerId) {
|
|
100
|
+
log(`removing peer ${peerId}`)
|
|
101
|
+
this.#peers.delete(peerId)
|
|
102
|
+
for (const docSynchronizer of Object.values(this.#docSynchronizers)) {
|
|
103
|
+
docSynchronizer.endSync(peerId)
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import { DocHandle } from "../DocHandle.js"
|
|
2
|
+
import { PeerId } from "../types.js"
|
|
3
|
+
import { Synchronizer } from "./Synchronizer.js"
|
|
4
|
+
import {
|
|
5
|
+
EphemeralMessage,
|
|
6
|
+
RequestMessage,
|
|
7
|
+
SynchronizerMessage,
|
|
8
|
+
SyncMessage,
|
|
9
|
+
} from "../network/messages.js"
|
|
10
|
+
type PeerDocumentStatus = "unknown" | "has" | "unavailable" | "wants"
|
|
11
|
+
/**
|
|
12
|
+
* DocSynchronizer takes a handle to an Automerge document, and receives & dispatches sync messages
|
|
13
|
+
* to bring it inline with all other peers' versions.
|
|
14
|
+
*/
|
|
15
|
+
export declare class DocSynchronizer extends Synchronizer {
|
|
16
|
+
#private
|
|
17
|
+
private handle
|
|
18
|
+
constructor(handle: DocHandle<any>)
|
|
19
|
+
get peerStates(): Record<PeerId, PeerDocumentStatus>
|
|
20
|
+
get documentId(): import("../types.js").DocumentId
|
|
21
|
+
hasPeer(peerId: PeerId): boolean
|
|
22
|
+
beginSync(peerIds: PeerId[]): void
|
|
23
|
+
endSync(peerId: PeerId): void
|
|
24
|
+
receiveMessage(message: SynchronizerMessage): void
|
|
25
|
+
receiveEphemeralMessage(message: EphemeralMessage): void
|
|
26
|
+
receiveSyncMessage(message: SyncMessage | RequestMessage): void
|
|
27
|
+
}
|
|
28
|
+
export {}
|
|
29
|
+
//# sourceMappingURL=DocSynchronizer.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"DocSynchronizer.d.ts","sourceRoot":"","sources":["../../../src/synchronizer/DocSynchronizer.ts"],"names":[],"mappings":"AACA,OAAO,EAEL,SAAS,EAKV,MAAM,iBAAiB,CAAA;AACxB,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AACpC,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAGhD,OAAO,EACL,gBAAgB,EAIhB,cAAc,EACd,mBAAmB,EACnB,WAAW,EACZ,MAAM,wBAAwB,CAAA;AAE/B,KAAK,kBAAkB,GAAG,SAAS,GAAG,KAAK,GAAG,aAAa,GAAG,OAAO,CAAA;AAGrE;;;GAGG;AACH,qBAAa,eAAgB,SAAQ,YAAY;;IAiBnC,OAAO,CAAC,MAAM;gBAAN,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC;IAoB1C,IAAI,UAAU,uCAEb;IAED,IAAI,UAAU,qCAEb;IAiHD,OAAO,CAAC,MAAM,EAAE,MAAM;IAItB,SAAS,CAAC,OAAO,EAAE,MAAM,EAAE;IA6B3B,OAAO,CAAC,MAAM,EAAE,MAAM;IAKtB,cAAc,CAAC,OAAO,EAAE,mBAAmB;IAkB3C,uBAAuB,CAAC,OAAO,EAAE,gBAAgB;IAuBjD,kBAAkB,CAAC,OAAO,EAAE,WAAW,GAAG,cAAc;CA2EzD"}
|