@automerge/automerge-repo 1.0.17 → 1.0.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/DocHandle.d.ts +15 -0
- package/dist/DocHandle.d.ts.map +1 -1
- package/dist/DocHandle.js +12 -0
- package/dist/Repo.d.ts +2 -0
- package/dist/Repo.d.ts.map +1 -1
- package/dist/Repo.js +17 -6
- package/dist/index.d.ts +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/network/messages.d.ts +7 -0
- package/dist/network/messages.d.ts.map +1 -1
- package/dist/storage/StorageSubsystem.d.ts +3 -1
- package/dist/storage/StorageSubsystem.d.ts.map +1 -1
- package/dist/storage/StorageSubsystem.js +10 -0
- package/dist/storage/chunkTypeFromKey.d.ts +1 -2
- package/dist/storage/chunkTypeFromKey.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.d.ts +2 -0
- package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.js +14 -1
- package/dist/synchronizer/DocSynchronizer.d.ts +6 -2
- package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/DocSynchronizer.js +119 -76
- package/dist/synchronizer/Synchronizer.d.ts +2 -1
- package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
- package/package.json +3 -5
- package/src/DocHandle.ts +25 -0
- package/src/Repo.ts +23 -6
- package/src/index.ts +1 -0
- package/src/network/messages.ts +8 -0
- package/src/storage/StorageSubsystem.ts +20 -2
- package/src/storage/chunkTypeFromKey.ts +1 -2
- package/src/synchronizer/CollectionSynchronizer.ts +19 -1
- package/src/synchronizer/DocSynchronizer.ts +168 -94
- package/src/synchronizer/Synchronizer.ts +6 -1
- package/test/DocHandle.test.ts +18 -1
- package/test/DocSynchronizer.test.ts +47 -16
- package/test/Repo.test.ts +159 -4
- package/test/StorageSubsystem.test.ts +30 -2
|
@@ -20,9 +20,20 @@ import {
|
|
|
20
20
|
import { PeerId } from "../types.js"
|
|
21
21
|
import { Synchronizer } from "./Synchronizer.js"
|
|
22
22
|
import { throttle } from "../helpers/throttle.js"
|
|
23
|
+
import { headsAreSame } from "../helpers/headsAreSame.js"
|
|
23
24
|
|
|
24
25
|
type PeerDocumentStatus = "unknown" | "has" | "unavailable" | "wants"
|
|
25
26
|
|
|
27
|
+
type PendingMessage = {
|
|
28
|
+
message: RequestMessage | SyncMessage
|
|
29
|
+
received: Date
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
interface DocSynchronizerConfig {
|
|
33
|
+
handle: DocHandle<unknown>
|
|
34
|
+
onLoadSyncState?: (peerId: PeerId) => A.SyncState | undefined
|
|
35
|
+
}
|
|
36
|
+
|
|
26
37
|
/**
|
|
27
38
|
* DocSynchronizer takes a handle to an Automerge document, and receives & dispatches sync messages
|
|
28
39
|
* to bring it inline with all other peers' versions.
|
|
@@ -34,17 +45,29 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
34
45
|
/** Active peers */
|
|
35
46
|
#peers: PeerId[] = []
|
|
36
47
|
|
|
48
|
+
#pendingSyncStateCallbacks: Record<
|
|
49
|
+
PeerId,
|
|
50
|
+
((syncState: A.SyncState) => void)[]
|
|
51
|
+
> = {}
|
|
52
|
+
|
|
37
53
|
#peerDocumentStatuses: Record<PeerId, PeerDocumentStatus> = {}
|
|
38
54
|
|
|
39
55
|
/** Sync state for each peer we've communicated with (including inactive peers) */
|
|
40
56
|
#syncStates: Record<PeerId, A.SyncState> = {}
|
|
41
57
|
|
|
42
|
-
#pendingSyncMessages: Array<
|
|
58
|
+
#pendingSyncMessages: Array<PendingMessage> = []
|
|
43
59
|
|
|
44
60
|
#syncStarted = false
|
|
45
61
|
|
|
46
|
-
|
|
62
|
+
#handle: DocHandle<unknown>
|
|
63
|
+
#onLoadSyncState: (peerId: PeerId) => Promise<A.SyncState | undefined>
|
|
64
|
+
|
|
65
|
+
constructor({ handle, onLoadSyncState }: DocSynchronizerConfig) {
|
|
47
66
|
super()
|
|
67
|
+
this.#handle = handle
|
|
68
|
+
this.#onLoadSyncState =
|
|
69
|
+
onLoadSyncState ?? (() => Promise.resolve(undefined))
|
|
70
|
+
|
|
48
71
|
const docId = handle.documentId.slice(0, 5)
|
|
49
72
|
this.#log = debug(`automerge-repo:docsync:${docId}`)
|
|
50
73
|
|
|
@@ -69,14 +92,14 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
69
92
|
}
|
|
70
93
|
|
|
71
94
|
get documentId() {
|
|
72
|
-
return this
|
|
95
|
+
return this.#handle.documentId
|
|
73
96
|
}
|
|
74
97
|
|
|
75
98
|
/// PRIVATE
|
|
76
99
|
|
|
77
100
|
async #syncWithPeers() {
|
|
78
101
|
this.#log(`syncWithPeers`)
|
|
79
|
-
const doc = await this
|
|
102
|
+
const doc = await this.#handle.doc()
|
|
80
103
|
if (doc === undefined) return
|
|
81
104
|
this.#peers.forEach(peerId => this.#sendSyncMessage(peerId, doc))
|
|
82
105
|
}
|
|
@@ -94,72 +117,111 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
94
117
|
const message: MessageContents<EphemeralMessage> = {
|
|
95
118
|
type: "ephemeral",
|
|
96
119
|
targetId: peerId,
|
|
97
|
-
documentId: this
|
|
120
|
+
documentId: this.#handle.documentId,
|
|
98
121
|
data,
|
|
99
122
|
}
|
|
100
123
|
this.emit("message", message)
|
|
101
124
|
}
|
|
102
125
|
|
|
103
|
-
#
|
|
126
|
+
#withSyncState(peerId: PeerId, callback: (syncState: A.SyncState) => void) {
|
|
104
127
|
if (!this.#peers.includes(peerId)) {
|
|
105
|
-
this.#log("adding a new peer", peerId)
|
|
106
128
|
this.#peers.push(peerId)
|
|
107
129
|
}
|
|
108
130
|
|
|
109
|
-
// when a peer is added, we don't know if it has the document or not
|
|
110
131
|
if (!(peerId in this.#peerDocumentStatuses)) {
|
|
111
132
|
this.#peerDocumentStatuses[peerId] = "unknown"
|
|
112
133
|
}
|
|
113
134
|
|
|
114
|
-
|
|
135
|
+
const syncState = this.#syncStates[peerId]
|
|
136
|
+
if (syncState) {
|
|
137
|
+
callback(syncState)
|
|
138
|
+
return
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
let pendingCallbacks = this.#pendingSyncStateCallbacks[peerId]
|
|
142
|
+
if (!pendingCallbacks) {
|
|
143
|
+
this.#onLoadSyncState(peerId).then(syncState => {
|
|
144
|
+
this.#initSyncState(peerId, syncState ?? A.initSyncState())
|
|
145
|
+
})
|
|
146
|
+
pendingCallbacks = this.#pendingSyncStateCallbacks[peerId] = []
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
pendingCallbacks.push(callback)
|
|
115
150
|
}
|
|
116
151
|
|
|
117
|
-
#
|
|
118
|
-
|
|
119
|
-
|
|
152
|
+
#initSyncState(peerId: PeerId, syncState: A.SyncState) {
|
|
153
|
+
const pendingCallbacks = this.#pendingSyncStateCallbacks[peerId]
|
|
154
|
+
if (pendingCallbacks) {
|
|
155
|
+
for (const callback of pendingCallbacks) {
|
|
156
|
+
callback(syncState)
|
|
157
|
+
}
|
|
158
|
+
}
|
|
120
159
|
|
|
121
|
-
|
|
160
|
+
delete this.#pendingSyncStateCallbacks[peerId]
|
|
122
161
|
|
|
123
162
|
this.#syncStates[peerId] = syncState
|
|
124
163
|
}
|
|
125
164
|
|
|
165
|
+
#setSyncState(peerId: PeerId, syncState: A.SyncState) {
|
|
166
|
+
const previousSyncState = this.#syncStates[peerId]
|
|
167
|
+
|
|
168
|
+
this.#syncStates[peerId] = syncState
|
|
169
|
+
|
|
170
|
+
const haveTheirSyncedHeadsChanged =
|
|
171
|
+
syncState.theirHeads &&
|
|
172
|
+
(!previousSyncState ||
|
|
173
|
+
!previousSyncState.theirHeads ||
|
|
174
|
+
!headsAreSame(previousSyncState.theirHeads, syncState.theirHeads))
|
|
175
|
+
|
|
176
|
+
if (haveTheirSyncedHeadsChanged) {
|
|
177
|
+
this.#handle.setRemoteHeads(peerId, syncState.theirHeads)
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
this.emit("sync-state", {
|
|
181
|
+
peerId,
|
|
182
|
+
syncState,
|
|
183
|
+
documentId: this.#handle.documentId,
|
|
184
|
+
})
|
|
185
|
+
}
|
|
186
|
+
|
|
126
187
|
#sendSyncMessage(peerId: PeerId, doc: A.Doc<unknown>) {
|
|
127
188
|
this.#log(`sendSyncMessage ->${peerId}`)
|
|
128
189
|
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
190
|
+
this.#withSyncState(peerId, syncState => {
|
|
191
|
+
const [newSyncState, message] = A.generateSyncMessage(doc, syncState)
|
|
192
|
+
if (message) {
|
|
193
|
+
this.#setSyncState(peerId, newSyncState)
|
|
194
|
+
const isNew = A.getHeads(doc).length === 0
|
|
195
|
+
|
|
196
|
+
if (
|
|
197
|
+
!this.#handle.isReady() &&
|
|
198
|
+
isNew &&
|
|
199
|
+
newSyncState.sharedHeads.length === 0 &&
|
|
200
|
+
!Object.values(this.#peerDocumentStatuses).includes("has") &&
|
|
201
|
+
this.#peerDocumentStatuses[peerId] === "unknown"
|
|
202
|
+
) {
|
|
203
|
+
// we don't have the document (or access to it), so we request it
|
|
204
|
+
this.emit("message", {
|
|
205
|
+
type: "request",
|
|
206
|
+
targetId: peerId,
|
|
207
|
+
documentId: this.#handle.documentId,
|
|
208
|
+
data: message,
|
|
209
|
+
} as RequestMessage)
|
|
210
|
+
} else {
|
|
211
|
+
this.emit("message", {
|
|
212
|
+
type: "sync",
|
|
213
|
+
targetId: peerId,
|
|
214
|
+
data: message,
|
|
215
|
+
documentId: this.#handle.documentId,
|
|
216
|
+
} as SyncMessage)
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
// if we have sent heads, then the peer now has or will have the document
|
|
220
|
+
if (!isNew) {
|
|
221
|
+
this.#peerDocumentStatuses[peerId] = "has"
|
|
222
|
+
}
|
|
161
223
|
}
|
|
162
|
-
}
|
|
224
|
+
})
|
|
163
225
|
}
|
|
164
226
|
|
|
165
227
|
/// PUBLIC
|
|
@@ -172,35 +234,45 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
172
234
|
const newPeers = new Set(
|
|
173
235
|
peerIds.filter(peerId => !this.#peers.includes(peerId))
|
|
174
236
|
)
|
|
175
|
-
this.#log(`beginSync: ${peerIds.join(", ")}`)
|
|
176
|
-
|
|
177
|
-
// HACK: if we have a sync state already, we round-trip it through the encoding system to make
|
|
178
|
-
// sure state is preserved. This prevents an infinite loop caused by failed attempts to send
|
|
179
|
-
// messages during disconnection.
|
|
180
|
-
// TODO: cover that case with a test and remove this hack
|
|
181
|
-
peerIds.forEach(peerId => {
|
|
182
|
-
const syncStateRaw = this.#getSyncState(peerId)
|
|
183
|
-
const syncState = A.decodeSyncState(A.encodeSyncState(syncStateRaw))
|
|
184
|
-
this.#setSyncState(peerId, syncState)
|
|
185
|
-
})
|
|
186
237
|
|
|
187
238
|
// At this point if we don't have anything in our storage, we need to use an empty doc to sync
|
|
188
239
|
// with; but we don't want to surface that state to the front end
|
|
189
|
-
void this.handle.doc([READY, REQUESTING, UNAVAILABLE]).then(doc => {
|
|
190
|
-
// we register out peers first, then say that sync has started
|
|
191
|
-
this.#syncStarted = true
|
|
192
|
-
this.#checkDocUnavailable()
|
|
193
240
|
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
241
|
+
const docPromise = this.#handle
|
|
242
|
+
.doc([READY, REQUESTING, UNAVAILABLE])
|
|
243
|
+
.then(doc => {
|
|
244
|
+
// we register out peers first, then say that sync has started
|
|
245
|
+
this.#syncStarted = true
|
|
246
|
+
this.#checkDocUnavailable()
|
|
247
|
+
|
|
248
|
+
const wasUnavailable = doc === undefined
|
|
249
|
+
if (wasUnavailable && newPeers.size == 0) {
|
|
250
|
+
return
|
|
251
|
+
}
|
|
201
252
|
|
|
202
|
-
|
|
203
|
-
|
|
253
|
+
// If the doc is unavailable we still need a blank document to generate
|
|
254
|
+
// the sync message from
|
|
255
|
+
return doc ?? A.init<unknown>()
|
|
256
|
+
})
|
|
257
|
+
|
|
258
|
+
this.#log(`beginSync: ${peerIds.join(", ")}`)
|
|
259
|
+
|
|
260
|
+
peerIds.forEach(peerId => {
|
|
261
|
+
this.#withSyncState(peerId, syncState => {
|
|
262
|
+
// HACK: if we have a sync state already, we round-trip it through the encoding system to make
|
|
263
|
+
// sure state is preserved. This prevents an infinite loop caused by failed attempts to send
|
|
264
|
+
// messages during disconnection.
|
|
265
|
+
// TODO: cover that case with a test and remove this hack
|
|
266
|
+
const reparsedSyncState = A.decodeSyncState(
|
|
267
|
+
A.encodeSyncState(syncState)
|
|
268
|
+
)
|
|
269
|
+
this.#setSyncState(peerId, reparsedSyncState)
|
|
270
|
+
|
|
271
|
+
docPromise.then(doc => {
|
|
272
|
+
if (doc) {
|
|
273
|
+
this.#sendSyncMessage(peerId, doc)
|
|
274
|
+
}
|
|
275
|
+
})
|
|
204
276
|
})
|
|
205
277
|
})
|
|
206
278
|
}
|
|
@@ -229,15 +301,15 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
229
301
|
}
|
|
230
302
|
|
|
231
303
|
receiveEphemeralMessage(message: EphemeralMessage) {
|
|
232
|
-
if (message.documentId !== this
|
|
304
|
+
if (message.documentId !== this.#handle.documentId)
|
|
233
305
|
throw new Error(`channelId doesn't match documentId`)
|
|
234
306
|
|
|
235
307
|
const { senderId, data } = message
|
|
236
308
|
|
|
237
309
|
const contents = decode(new Uint8Array(data))
|
|
238
310
|
|
|
239
|
-
this
|
|
240
|
-
handle: this
|
|
311
|
+
this.#handle.emit("ephemeral-message", {
|
|
312
|
+
handle: this.#handle,
|
|
241
313
|
senderId,
|
|
242
314
|
message: contents,
|
|
243
315
|
})
|
|
@@ -252,20 +324,20 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
252
324
|
}
|
|
253
325
|
|
|
254
326
|
receiveSyncMessage(message: SyncMessage | RequestMessage) {
|
|
255
|
-
if (message.documentId !== this
|
|
327
|
+
if (message.documentId !== this.#handle.documentId)
|
|
256
328
|
throw new Error(`channelId doesn't match documentId`)
|
|
257
329
|
|
|
258
330
|
// We need to block receiving the syncMessages until we've checked local storage
|
|
259
|
-
if (!this
|
|
260
|
-
this.#pendingSyncMessages.push(message)
|
|
331
|
+
if (!this.#handle.inState([READY, REQUESTING, UNAVAILABLE])) {
|
|
332
|
+
this.#pendingSyncMessages.push({ message, received: new Date() })
|
|
261
333
|
return
|
|
262
334
|
}
|
|
263
335
|
|
|
264
336
|
this.#processAllPendingSyncMessages()
|
|
265
|
-
this.#processSyncMessage(message)
|
|
337
|
+
this.#processSyncMessage(message, new Date())
|
|
266
338
|
}
|
|
267
339
|
|
|
268
|
-
#processSyncMessage(message: SyncMessage | RequestMessage) {
|
|
340
|
+
#processSyncMessage(message: SyncMessage | RequestMessage, received: Date) {
|
|
269
341
|
if (isRequestMessage(message)) {
|
|
270
342
|
this.#peerDocumentStatuses[message.senderId] = "wants"
|
|
271
343
|
}
|
|
@@ -277,28 +349,30 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
277
349
|
this.#peerDocumentStatuses[message.senderId] = "has"
|
|
278
350
|
}
|
|
279
351
|
|
|
280
|
-
this.
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
352
|
+
this.#withSyncState(message.senderId, syncState => {
|
|
353
|
+
this.#handle.update(doc => {
|
|
354
|
+
const [newDoc, newSyncState] = A.receiveSyncMessage(
|
|
355
|
+
doc,
|
|
356
|
+
syncState,
|
|
357
|
+
message.data
|
|
358
|
+
)
|
|
286
359
|
|
|
287
|
-
|
|
360
|
+
this.#setSyncState(message.senderId, newSyncState)
|
|
288
361
|
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
362
|
+
// respond to just this peer (as required)
|
|
363
|
+
this.#sendSyncMessage(message.senderId, doc)
|
|
364
|
+
return newDoc
|
|
365
|
+
})
|
|
293
366
|
|
|
294
|
-
|
|
367
|
+
this.#checkDocUnavailable()
|
|
368
|
+
})
|
|
295
369
|
}
|
|
296
370
|
|
|
297
371
|
#checkDocUnavailable() {
|
|
298
372
|
// if we know none of the peers have the document, tell all our peers that we don't either
|
|
299
373
|
if (
|
|
300
374
|
this.#syncStarted &&
|
|
301
|
-
this
|
|
375
|
+
this.#handle.inState([REQUESTING]) &&
|
|
302
376
|
this.#peers.every(
|
|
303
377
|
peerId =>
|
|
304
378
|
this.#peerDocumentStatuses[peerId] === "unavailable" ||
|
|
@@ -310,19 +384,19 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
310
384
|
.forEach(peerId => {
|
|
311
385
|
const message: MessageContents<DocumentUnavailableMessage> = {
|
|
312
386
|
type: "doc-unavailable",
|
|
313
|
-
documentId: this
|
|
387
|
+
documentId: this.#handle.documentId,
|
|
314
388
|
targetId: peerId,
|
|
315
389
|
}
|
|
316
390
|
this.emit("message", message)
|
|
317
391
|
})
|
|
318
392
|
|
|
319
|
-
this
|
|
393
|
+
this.#handle.unavailable()
|
|
320
394
|
}
|
|
321
395
|
}
|
|
322
396
|
|
|
323
397
|
#processAllPendingSyncMessages() {
|
|
324
398
|
for (const message of this.#pendingSyncMessages) {
|
|
325
|
-
this.#processSyncMessage(message)
|
|
399
|
+
this.#processSyncMessage(message.message, message.received)
|
|
326
400
|
}
|
|
327
401
|
|
|
328
402
|
this.#pendingSyncMessages = []
|
|
@@ -1,5 +1,9 @@
|
|
|
1
1
|
import { EventEmitter } from "eventemitter3"
|
|
2
|
-
import {
|
|
2
|
+
import {
|
|
3
|
+
MessageContents,
|
|
4
|
+
RepoMessage,
|
|
5
|
+
SyncStateMessage,
|
|
6
|
+
} from "../network/messages.js"
|
|
3
7
|
|
|
4
8
|
export abstract class Synchronizer extends EventEmitter<SynchronizerEvents> {
|
|
5
9
|
abstract receiveMessage(message: RepoMessage): void
|
|
@@ -7,4 +11,5 @@ export abstract class Synchronizer extends EventEmitter<SynchronizerEvents> {
|
|
|
7
11
|
|
|
8
12
|
export interface SynchronizerEvents {
|
|
9
13
|
message: (arg: MessageContents) => void
|
|
14
|
+
"sync-state": (arg: SyncStateMessage) => void
|
|
10
15
|
}
|
package/test/DocHandle.test.ts
CHANGED
|
@@ -5,7 +5,7 @@ import { describe, it } from "vitest"
|
|
|
5
5
|
import { generateAutomergeUrl, parseAutomergeUrl } from "../src/AutomergeUrl.js"
|
|
6
6
|
import { eventPromise } from "../src/helpers/eventPromise.js"
|
|
7
7
|
import { pause } from "../src/helpers/pause.js"
|
|
8
|
-
import { DocHandle, DocHandleChangePayload } from "../src/index.js"
|
|
8
|
+
import { DocHandle, DocHandleChangePayload, PeerId } from "../src/index.js"
|
|
9
9
|
import { TestDoc } from "./types.js"
|
|
10
10
|
|
|
11
11
|
describe("DocHandle", () => {
|
|
@@ -303,6 +303,23 @@ describe("DocHandle", () => {
|
|
|
303
303
|
assert(wasBar, "foo should have been bar as we changed at the old heads")
|
|
304
304
|
})
|
|
305
305
|
|
|
306
|
+
it("should allow to listen for remote head changes and manually read remote heads", async () => {
|
|
307
|
+
const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
|
|
308
|
+
const bob = "bob" as PeerId
|
|
309
|
+
|
|
310
|
+
const remoteHeadsMessagePromise = eventPromise(handle, "remote-heads")
|
|
311
|
+
|
|
312
|
+
handle.setRemoteHeads(bob, [])
|
|
313
|
+
|
|
314
|
+
const remoteHeadsMessage = await remoteHeadsMessagePromise
|
|
315
|
+
|
|
316
|
+
assert.strictEqual(remoteHeadsMessage.peerId, bob)
|
|
317
|
+
assert.deepStrictEqual(remoteHeadsMessage.heads, [])
|
|
318
|
+
|
|
319
|
+
// read remote heads manually
|
|
320
|
+
assert.deepStrictEqual(handle.getRemoteHeads(bob), [])
|
|
321
|
+
})
|
|
322
|
+
|
|
306
323
|
describe("ephemeral messaging", () => {
|
|
307
324
|
it("can broadcast a message for the network to send out", async () => {
|
|
308
325
|
const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
|
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
import assert from "assert"
|
|
2
|
+
import * as A from "@automerge/automerge"
|
|
2
3
|
import { describe, it } from "vitest"
|
|
3
|
-
import { DocHandle } from "../src/DocHandle.js"
|
|
4
4
|
import { generateAutomergeUrl, parseAutomergeUrl } from "../src/AutomergeUrl.js"
|
|
5
|
+
import { DocHandle } from "../src/DocHandle.js"
|
|
5
6
|
import { eventPromise } from "../src/helpers/eventPromise.js"
|
|
6
7
|
import {
|
|
7
8
|
DocumentUnavailableMessage,
|
|
@@ -22,7 +23,9 @@ describe("DocSynchronizer", () => {
|
|
|
22
23
|
const setup = () => {
|
|
23
24
|
const docId = parseAutomergeUrl(generateAutomergeUrl()).documentId
|
|
24
25
|
handle = new DocHandle<TestDoc>(docId, { isNew: true })
|
|
25
|
-
docSynchronizer = new DocSynchronizer(
|
|
26
|
+
docSynchronizer = new DocSynchronizer({
|
|
27
|
+
handle: handle as DocHandle<unknown>,
|
|
28
|
+
})
|
|
26
29
|
return { handle, docSynchronizer }
|
|
27
30
|
}
|
|
28
31
|
|
|
@@ -50,6 +53,27 @@ describe("DocSynchronizer", () => {
|
|
|
50
53
|
assert.equal(type, "sync")
|
|
51
54
|
})
|
|
52
55
|
|
|
56
|
+
it("emits a syncState message when the sync state is updated", async () => {
|
|
57
|
+
const { handle, docSynchronizer } = setup()
|
|
58
|
+
docSynchronizer.beginSync([alice])
|
|
59
|
+
handle.change(doc => {
|
|
60
|
+
doc.foo = "bar"
|
|
61
|
+
})
|
|
62
|
+
const message1 = await eventPromise(docSynchronizer, "sync-state")
|
|
63
|
+
const message2 = await eventPromise(docSynchronizer, "sync-state")
|
|
64
|
+
|
|
65
|
+
assert.equal(message1.peerId, "alice")
|
|
66
|
+
assert.equal(message1.documentId, handle.documentId)
|
|
67
|
+
assert.deepEqual(message1.syncState.lastSentHeads, [])
|
|
68
|
+
|
|
69
|
+
assert.equal(message2.peerId, "alice")
|
|
70
|
+
assert.equal(message2.documentId, handle.documentId)
|
|
71
|
+
assert.deepEqual(
|
|
72
|
+
message2.syncState.lastSentHeads,
|
|
73
|
+
A.getHeads(handle.docSync())
|
|
74
|
+
)
|
|
75
|
+
})
|
|
76
|
+
|
|
53
77
|
it("still syncs with a peer after it disconnects and reconnects", async () => {
|
|
54
78
|
const { handle, docSynchronizer } = setup()
|
|
55
79
|
|
|
@@ -81,7 +105,9 @@ describe("DocSynchronizer", () => {
|
|
|
81
105
|
const docId = parseAutomergeUrl(generateAutomergeUrl()).documentId
|
|
82
106
|
|
|
83
107
|
const handle = new DocHandle<TestDoc>(docId, { isNew: false })
|
|
84
|
-
docSynchronizer = new DocSynchronizer(
|
|
108
|
+
docSynchronizer = new DocSynchronizer({
|
|
109
|
+
handle: handle as DocHandle<unknown>,
|
|
110
|
+
})
|
|
85
111
|
docSynchronizer.beginSync([alice])
|
|
86
112
|
handle.request()
|
|
87
113
|
const message = await eventPromise(docSynchronizer, "message")
|
|
@@ -93,29 +119,34 @@ describe("DocSynchronizer", () => {
|
|
|
93
119
|
const docId = parseAutomergeUrl(generateAutomergeUrl()).documentId
|
|
94
120
|
|
|
95
121
|
const bobHandle = new DocHandle<TestDoc>(docId, { isNew: false })
|
|
96
|
-
const bobDocSynchronizer = new DocSynchronizer(
|
|
122
|
+
const bobDocSynchronizer = new DocSynchronizer({
|
|
123
|
+
handle: bobHandle as DocHandle<unknown>,
|
|
124
|
+
})
|
|
97
125
|
bobDocSynchronizer.beginSync([alice])
|
|
98
126
|
bobHandle.request()
|
|
99
127
|
const message = await eventPromise(bobDocSynchronizer, "message")
|
|
100
128
|
|
|
101
129
|
const aliceHandle = new DocHandle<TestDoc>(docId, { isNew: false })
|
|
102
|
-
const aliceDocSynchronizer = new DocSynchronizer(
|
|
130
|
+
const aliceDocSynchronizer = new DocSynchronizer({
|
|
131
|
+
handle: aliceHandle as DocHandle<unknown>,
|
|
132
|
+
})
|
|
103
133
|
aliceHandle.request()
|
|
104
134
|
|
|
105
135
|
aliceDocSynchronizer.receiveSyncMessage({ ...message, senderId: bob })
|
|
106
136
|
aliceDocSynchronizer.beginSync([charlie, bob])
|
|
107
137
|
|
|
108
|
-
const
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
)
|
|
138
|
+
const messages = await new Promise<MessageContents[]>(resolve => {
|
|
139
|
+
const messages: MessageContents[] = []
|
|
140
|
+
aliceDocSynchronizer.on("message", message => {
|
|
141
|
+
messages.push(message)
|
|
142
|
+
if (messages.length === 2) {
|
|
143
|
+
resolve(messages)
|
|
144
|
+
}
|
|
145
|
+
})
|
|
146
|
+
})
|
|
147
|
+
|
|
148
|
+
const bobMessage = messages.find(m => m.targetId === bob)
|
|
149
|
+
const charlieMessage = messages.find(m => m.targetId === charlie)
|
|
119
150
|
|
|
120
151
|
// the response should be a sync message, not a request message
|
|
121
152
|
assert.equal(charlieMessage.targetId, "charlie")
|