@automerge/automerge-repo 1.0.19 → 1.1.0-alpha.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/dist/DocHandle.d.ts +6 -5
  2. package/dist/DocHandle.d.ts.map +1 -1
  3. package/dist/DocHandle.js +7 -7
  4. package/dist/RemoteHeadsSubscriptions.d.ts +41 -0
  5. package/dist/RemoteHeadsSubscriptions.d.ts.map +1 -0
  6. package/dist/RemoteHeadsSubscriptions.js +224 -0
  7. package/dist/Repo.d.ts +11 -2
  8. package/dist/Repo.d.ts.map +1 -1
  9. package/dist/Repo.js +117 -8
  10. package/dist/index.d.ts +2 -2
  11. package/dist/index.d.ts.map +1 -1
  12. package/dist/network/NetworkAdapter.d.ts +15 -1
  13. package/dist/network/NetworkAdapter.d.ts.map +1 -1
  14. package/dist/network/NetworkAdapter.js +1 -0
  15. package/dist/network/NetworkSubsystem.d.ts +4 -2
  16. package/dist/network/NetworkSubsystem.d.ts.map +1 -1
  17. package/dist/network/NetworkSubsystem.js +8 -4
  18. package/dist/network/messages.d.ts +24 -1
  19. package/dist/network/messages.d.ts.map +1 -1
  20. package/dist/network/messages.js +5 -1
  21. package/dist/storage/StorageSubsystem.d.ts +5 -3
  22. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  23. package/dist/storage/StorageSubsystem.js +23 -5
  24. package/dist/storage/types.d.ts +4 -0
  25. package/dist/storage/types.d.ts.map +1 -1
  26. package/dist/synchronizer/CollectionSynchronizer.d.ts +2 -2
  27. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  28. package/dist/synchronizer/CollectionSynchronizer.js +7 -3
  29. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  30. package/dist/synchronizer/DocSynchronizer.js +0 -9
  31. package/package.json +3 -3
  32. package/src/DocHandle.ts +10 -9
  33. package/src/RemoteHeadsSubscriptions.ts +306 -0
  34. package/src/Repo.ts +172 -12
  35. package/src/index.ts +2 -0
  36. package/src/network/NetworkAdapter.ts +19 -1
  37. package/src/network/NetworkSubsystem.ts +17 -6
  38. package/src/network/messages.ts +30 -1
  39. package/src/storage/StorageSubsystem.ts +30 -7
  40. package/src/storage/types.ts +3 -0
  41. package/src/synchronizer/CollectionSynchronizer.ts +11 -5
  42. package/src/synchronizer/DocSynchronizer.ts +0 -12
  43. package/test/DocHandle.test.ts +0 -17
  44. package/test/RemoteHeadsSubscriptions.test.ts +343 -0
  45. package/test/Repo.test.ts +51 -15
  46. package/test/StorageSubsystem.test.ts +28 -6
  47. package/test/remoteHeads.test.ts +135 -0
@@ -1,5 +1,6 @@
1
1
  import { SyncState } from "@automerge/automerge"
2
2
  import { DocumentId, PeerId, SessionId } from "../types.js"
3
+ import { StorageId } from "../storage/types.js"
3
4
 
4
5
  /**
5
6
  * A sync message for a particular document
@@ -102,12 +103,32 @@ export type AuthMessage<TPayload = any> = {
102
103
  payload: TPayload
103
104
  }
104
105
 
106
+ export type RemoteSubscriptionControlMessage = {
107
+ type: "remote-subscription-change",
108
+ senderId: PeerId,
109
+ targetId: PeerId,
110
+ add?: StorageId[],
111
+ remove?: StorageId[],
112
+ }
113
+
114
+ export type RemoteHeadsChanged = {
115
+ type: "remote-heads-changed",
116
+ senderId: PeerId,
117
+ targetId: PeerId,
118
+ documentId: DocumentId,
119
+ newHeads: {[key: StorageId]: {heads: string[], timestamp: number}},
120
+ }
121
+
105
122
  /** These are message types that a {@link NetworkAdapter} surfaces to a {@link Repo}. */
106
123
  export type RepoMessage =
107
124
  | SyncMessage
108
125
  | EphemeralMessage
109
126
  | RequestMessage
110
127
  | DocumentUnavailableMessage
128
+ | RemoteSubscriptionControlMessage
129
+ | RemoteHeadsChanged
130
+
131
+ export type DocMessage = SyncMessage | EphemeralMessage | RequestMessage | DocumentUnavailableMessage
111
132
 
112
133
  /** These are all the message types that a {@link NetworkAdapter} might see. */
113
134
  export type Message = RepoMessage | AuthMessage
@@ -136,7 +157,9 @@ export const isValidRepoMessage = (message: Message): message is RepoMessage =>
136
157
  (isSyncMessage(message) ||
137
158
  isEphemeralMessage(message) ||
138
159
  isRequestMessage(message) ||
139
- isDocumentUnavailableMessage(message))
160
+ isDocumentUnavailableMessage(message) ||
161
+ isRemoteSubscriptionControlMessage(message) ||
162
+ isRemoteHeadsChanged(message))
140
163
 
141
164
  // prettier-ignore
142
165
  export const isDocumentUnavailableMessage = (msg: Message): msg is DocumentUnavailableMessage =>
@@ -150,3 +173,9 @@ export const isSyncMessage = (msg: Message): msg is SyncMessage =>
150
173
 
151
174
  export const isEphemeralMessage = (msg: Message): msg is EphemeralMessage =>
152
175
  msg.type === "ephemeral"
176
+
177
+ export const isRemoteSubscriptionControlMessage = (msg: Message): msg is RemoteSubscriptionControlMessage =>
178
+ msg.type === "remote-subscription-change"
179
+
180
+ export const isRemoteHeadsChanged = (msg: Message): msg is RemoteHeadsChanged =>
181
+ msg.type === "remote-heads-changed"
@@ -2,11 +2,12 @@ import * as A from "@automerge/automerge/next"
2
2
  import debug from "debug"
3
3
  import { headsAreSame } from "../helpers/headsAreSame.js"
4
4
  import { mergeArrays } from "../helpers/mergeArrays.js"
5
- import { PeerId, type DocumentId } from "../types.js"
5
+ import { type DocumentId } from "../types.js"
6
6
  import { StorageAdapter } from "./StorageAdapter.js"
7
- import { ChunkInfo, StorageKey } from "./types.js"
7
+ import { ChunkInfo, StorageKey, StorageId } from "./types.js"
8
8
  import { keyHash, headsHash } from "./keyHash.js"
9
9
  import { chunkTypeFromKey } from "./chunkTypeFromKey.js"
10
+ import * as Uuid from "uuid"
10
11
 
11
12
  /**
12
13
  * The storage subsystem is responsible for saving and loading Automerge documents to and from
@@ -31,6 +32,23 @@ export class StorageSubsystem {
31
32
  this.#storageAdapter = storageAdapter
32
33
  }
33
34
 
35
+ async id(): Promise<StorageId> {
36
+ let storedId = await this.#storageAdapter.load(["storage-adapter-id"])
37
+
38
+ let id: StorageId
39
+ if (storedId) {
40
+ id = new TextDecoder().decode(storedId) as StorageId
41
+ } else {
42
+ id = Uuid.v4() as StorageId
43
+ await this.#storageAdapter.save(
44
+ ["storage-adapter-id"],
45
+ new TextEncoder().encode(id)
46
+ )
47
+ }
48
+
49
+ return id
50
+ }
51
+
34
52
  // ARBITRARY KEY/VALUE STORAGE
35
53
 
36
54
  // The `load`, `save`, and `remove` methods are for generic key/value storage, as opposed to
@@ -211,19 +229,19 @@ export class StorageSubsystem {
211
229
 
212
230
  async loadSyncState(
213
231
  documentId: DocumentId,
214
- peerId: PeerId
232
+ storageId: StorageId
215
233
  ): Promise<A.SyncState | undefined> {
216
- const key = [documentId, "sync-state", peerId]
234
+ const key = [documentId, "sync-state", storageId]
217
235
  const loaded = await this.#storageAdapter.load(key)
218
236
  return loaded ? A.decodeSyncState(loaded) : undefined
219
237
  }
220
238
 
221
239
  async saveSyncState(
222
240
  documentId: DocumentId,
223
- peerId: PeerId,
241
+ storageId: StorageId,
224
242
  syncState: A.SyncState
225
243
  ): Promise<void> {
226
- const key = [documentId, "sync-state", peerId]
244
+ const key = [documentId, "sync-state", storageId]
227
245
  await this.#storageAdapter.save(key, A.encodeSyncState(syncState))
228
246
  }
229
247
 
@@ -261,6 +279,11 @@ export class StorageSubsystem {
261
279
  incrementalSize += chunk.size
262
280
  }
263
281
  }
264
- return incrementalSize >= snapshotSize
282
+ // if the file is currently small, don't worry, just compact
283
+ // this might seem a bit arbitrary (1k is arbitrary) but is designed to ensure compaction
284
+ // for documents with only a single large change on top of an empty (or nearly empty) document
285
+ // for example: imported NPM modules, images, etc.
286
+ // if we have even more incrementals (so far) than the snapshot, compact
287
+ return snapshotSize < 1024 || incrementalSize >= snapshotSize
265
288
  }
266
289
  }
@@ -37,3 +37,6 @@ export type ChunkType = "snapshot" | "incremental"
37
37
  * should not assume any particular structure.
38
38
  **/
39
39
  export type StorageKey = string[]
40
+
41
+ /** A branded type for storage IDs */
42
+ export type StorageId = string & { __storageId: true }
@@ -2,7 +2,7 @@ import debug from "debug"
2
2
  import { DocHandle } from "../DocHandle.js"
3
3
  import { stringifyAutomergeUrl } from "../AutomergeUrl.js"
4
4
  import { Repo } from "../Repo.js"
5
- import { RepoMessage } from "../network/messages.js"
5
+ import { DocMessage, RepoMessage } from "../network/messages.js"
6
6
  import { DocumentId, PeerId } from "../types.js"
7
7
  import { DocSynchronizer } from "./DocSynchronizer.js"
8
8
  import { Synchronizer } from "./Synchronizer.js"
@@ -37,14 +37,20 @@ export class CollectionSynchronizer extends Synchronizer {
37
37
  #initDocSynchronizer(handle: DocHandle<unknown>): DocSynchronizer {
38
38
  const docSynchronizer = new DocSynchronizer({
39
39
  handle,
40
- onLoadSyncState: peerId => {
40
+ onLoadSyncState: async peerId => {
41
41
  if (!this.repo.storageSubsystem) {
42
- return Promise.resolve(undefined)
42
+ return
43
+ }
44
+
45
+ const { storageId, isEphemeral } =
46
+ this.repo.peerMetadataByPeerId[peerId] || {}
47
+ if (!storageId || isEphemeral) {
48
+ return
43
49
  }
44
50
 
45
51
  return this.repo.storageSubsystem.loadSyncState(
46
52
  handle.documentId,
47
- peerId
53
+ storageId
48
54
  )
49
55
  },
50
56
  })
@@ -70,7 +76,7 @@ export class CollectionSynchronizer extends Synchronizer {
70
76
  * When we receive a sync message for a document we haven't got in memory, we
71
77
  * register it with the repo and start synchronizing
72
78
  */
73
- async receiveMessage(message: RepoMessage) {
79
+ async receiveMessage(message: DocMessage) {
74
80
  log(
75
81
  `onSyncMessage: ${message.senderId}, ${message.documentId}, ${
76
82
  "data" in message ? message.data.byteLength + "bytes" : ""
@@ -163,20 +163,8 @@ export class DocSynchronizer extends Synchronizer {
163
163
  }
164
164
 
165
165
  #setSyncState(peerId: PeerId, syncState: A.SyncState) {
166
- const previousSyncState = this.#syncStates[peerId]
167
-
168
166
  this.#syncStates[peerId] = syncState
169
167
 
170
- const haveTheirSyncedHeadsChanged =
171
- syncState.theirHeads &&
172
- (!previousSyncState ||
173
- !previousSyncState.theirHeads ||
174
- !headsAreSame(previousSyncState.theirHeads, syncState.theirHeads))
175
-
176
- if (haveTheirSyncedHeadsChanged) {
177
- this.#handle.setRemoteHeads(peerId, syncState.theirHeads)
178
- }
179
-
180
168
  this.emit("sync-state", {
181
169
  peerId,
182
170
  syncState,
@@ -303,23 +303,6 @@ describe("DocHandle", () => {
303
303
  assert(wasBar, "foo should have been bar as we changed at the old heads")
304
304
  })
305
305
 
306
- it("should allow to listen for remote head changes and manually read remote heads", async () => {
307
- const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
308
- const bob = "bob" as PeerId
309
-
310
- const remoteHeadsMessagePromise = eventPromise(handle, "remote-heads")
311
-
312
- handle.setRemoteHeads(bob, [])
313
-
314
- const remoteHeadsMessage = await remoteHeadsMessagePromise
315
-
316
- assert.strictEqual(remoteHeadsMessage.peerId, bob)
317
- assert.deepStrictEqual(remoteHeadsMessage.heads, [])
318
-
319
- // read remote heads manually
320
- assert.deepStrictEqual(handle.getRemoteHeads(bob), [])
321
- })
322
-
323
306
  describe("ephemeral messaging", () => {
324
307
  it("can broadcast a message for the network to send out", async () => {
325
308
  const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
@@ -0,0 +1,343 @@
1
+ import * as A from "@automerge/automerge"
2
+ import assert from "assert"
3
+ import { describe, it } from "vitest"
4
+ import { RemoteHeadsSubscriptions } from "../src/RemoteHeadsSubscriptions.js"
5
+ import { PeerId, StorageId } from "../src/index.js"
6
+ import { generateAutomergeUrl, parseAutomergeUrl } from "../src/AutomergeUrl.js"
7
+ import { pause } from "../src/helpers/pause.js"
8
+ import { EventEmitter } from "eventemitter3"
9
+ import {
10
+ RemoteHeadsChanged,
11
+ RemoteSubscriptionControlMessage,
12
+ } from "../src/network/messages.js"
13
+
14
+ describe("RepoHeadsSubscriptions", () => {
15
+ const storageA = "remote-a" as StorageId
16
+ const storageB = "remote-b" as StorageId
17
+ const storageC = "remote-c" as StorageId
18
+ const storageD = "remote-d" as StorageId
19
+ const peerA = "peer-a" as PeerId
20
+ const peerB = "peer-b" as PeerId
21
+ const peerC = "peer-c" as PeerId
22
+ const peerD = "peer-d" as PeerId
23
+
24
+ const { documentId: docA } = parseAutomergeUrl(generateAutomergeUrl())
25
+ const { documentId: docB } = parseAutomergeUrl(generateAutomergeUrl())
26
+ const { documentId: docC } = parseAutomergeUrl(generateAutomergeUrl())
27
+
28
+ const docAHeadsChangedForStorageB: RemoteHeadsChanged = {
29
+ type: "remote-heads-changed",
30
+ senderId: peerD,
31
+ targetId: peerA,
32
+ documentId: docA,
33
+ newHeads: {
34
+ [storageB]: {
35
+ heads: [],
36
+ timestamp: Date.now(),
37
+ },
38
+ },
39
+ }
40
+
41
+ const docBHeadsChangedForStorageB: RemoteHeadsChanged = {
42
+ type: "remote-heads-changed",
43
+ senderId: peerD,
44
+ targetId: peerA,
45
+ documentId: docB,
46
+ newHeads: {
47
+ [storageB]: {
48
+ heads: [],
49
+ timestamp: Date.now(),
50
+ },
51
+ },
52
+ }
53
+
54
+ const docBHeads = A.getHeads(
55
+ A.change(A.init(), doc => {
56
+ ;(doc as any).foo = "123"
57
+ })
58
+ )
59
+
60
+ const docBHeadsChangedForStorageB2: RemoteHeadsChanged = {
61
+ type: "remote-heads-changed",
62
+ senderId: peerD,
63
+ targetId: peerA,
64
+ documentId: docB,
65
+ newHeads: {
66
+ [storageB]: {
67
+ heads: docBHeads,
68
+ timestamp: Date.now() + 1,
69
+ },
70
+ },
71
+ }
72
+
73
+ const subscribePeerCToStorageB: RemoteSubscriptionControlMessage = {
74
+ type: "remote-subscription-change",
75
+ senderId: peerC,
76
+ targetId: peerA,
77
+ add: [storageB],
78
+ }
79
+
80
+ const unsubscribePeerCFromStorageB: RemoteSubscriptionControlMessage = {
81
+ type: "remote-subscription-change",
82
+ senderId: peerC,
83
+ targetId: peerA,
84
+ remove: [storageB],
85
+ }
86
+
87
+ it("should allow to subscribe and unsubscribe to storage ids", async () => {
88
+ const remoteHeadsSubscriptions = new RemoteHeadsSubscriptions()
89
+
90
+ const remoteHeadsMessages = waitForMessages(
91
+ remoteHeadsSubscriptions,
92
+ "remote-heads-changed"
93
+ )
94
+
95
+ const changeRemoteSubsAfterSubscribe = waitForMessages(
96
+ remoteHeadsSubscriptions,
97
+ "change-remote-subs"
98
+ )
99
+
100
+ // subscribe to storageB and change storageB heads
101
+ remoteHeadsSubscriptions.subscribeToRemotes([storageB])
102
+ remoteHeadsSubscriptions.handleRemoteHeads(docAHeadsChangedForStorageB)
103
+
104
+ // receive event for new heads of storageB
105
+ let messages = await remoteHeadsMessages
106
+ assert.strictEqual(messages.length, 1)
107
+ assert.strictEqual(messages[0].storageId, storageB)
108
+ assert.strictEqual(messages[0].documentId, docA)
109
+ assert.deepStrictEqual(messages[0].remoteHeads, [])
110
+
111
+ // receive event for add sub to storageB
112
+ messages = await changeRemoteSubsAfterSubscribe
113
+ assert.strictEqual(messages.length, 1)
114
+ assert.deepStrictEqual(messages[0].add, [storageB])
115
+ assert.deepStrictEqual(messages[0].remove, undefined)
116
+ assert.deepStrictEqual(messages[0].peers, [])
117
+
118
+ const remoteHeadsMessagesAfterUnsub = waitForMessages(
119
+ remoteHeadsSubscriptions,
120
+ "change-remote-subs"
121
+ )
122
+
123
+ // unsubscribe from storageB
124
+ remoteHeadsSubscriptions.unsubscribeFromRemotes([storageB])
125
+
126
+ // receive event for remove sub from storageB
127
+ messages = await remoteHeadsMessagesAfterUnsub
128
+ assert.strictEqual(messages.length, 1)
129
+ assert.deepStrictEqual(messages[0].add, undefined)
130
+ assert.deepStrictEqual(messages[0].remove, [storageB])
131
+ assert.deepStrictEqual(messages[0].peers, [])
132
+ })
133
+
134
+ it("should forward all changes to generous peers", async () => {
135
+ const remoteHeadsSubscriptions = new RemoteHeadsSubscriptions()
136
+
137
+ const notifyRemoteHeadsMessagesPromise = waitForMessages(
138
+ remoteHeadsSubscriptions,
139
+ "notify-remote-heads"
140
+ )
141
+
142
+ const changeRemoteSubsMessagesPromise = waitForMessages(
143
+ remoteHeadsSubscriptions,
144
+ "change-remote-subs"
145
+ )
146
+
147
+ remoteHeadsSubscriptions.addGenerousPeer(peerC)
148
+ remoteHeadsSubscriptions.subscribeToRemotes([storageB])
149
+
150
+ // change message for docA in storageB
151
+ remoteHeadsSubscriptions.handleRemoteHeads(docAHeadsChangedForStorageB)
152
+
153
+ // change heads directly, are not forwarded
154
+ remoteHeadsSubscriptions.handleImmediateRemoteHeadsChanged(
155
+ docC,
156
+ storageB,
157
+ []
158
+ )
159
+
160
+ // should forward remote-heads events
161
+ let messages = await notifyRemoteHeadsMessagesPromise
162
+ assert.strictEqual(messages.length, 1)
163
+ assert.strictEqual(messages[0].documentId, docA)
164
+ assert.strictEqual(messages[0].storageId, storageB)
165
+ assert.deepStrictEqual(messages[0].heads, [])
166
+
167
+ // should forward subscriptions to generous peer
168
+ messages = await changeRemoteSubsMessagesPromise
169
+ assert.strictEqual(messages.length, 1)
170
+ assert.deepStrictEqual(messages[0].add, [storageB])
171
+ assert.deepStrictEqual(messages[0].remove, undefined)
172
+ assert.deepStrictEqual(messages[0].peers, [peerC])
173
+
174
+ const changeRemoteSubsMessagesAfterUnsubPromise = waitForMessages(
175
+ remoteHeadsSubscriptions,
176
+ "change-remote-subs"
177
+ )
178
+
179
+ // unsubsscribe from storage B
180
+ remoteHeadsSubscriptions.unsubscribeFromRemotes([storageB])
181
+
182
+ // should forward unsubscribe to generous peer
183
+ messages = await changeRemoteSubsMessagesAfterUnsubPromise
184
+ assert.strictEqual(messages.length, 1)
185
+ assert.deepStrictEqual(messages[0].add, undefined)
186
+ assert.deepStrictEqual(messages[0].remove, [storageB])
187
+ assert.deepStrictEqual(messages[0].peers, [peerC])
188
+ })
189
+
190
+ it("should not notify generous peers of changed remote heads, if they send the heads originally", async () => {
191
+ const remoteHeadsSubscriptions = new RemoteHeadsSubscriptions()
192
+
193
+ const messagesPromise = waitForMessages(
194
+ remoteHeadsSubscriptions,
195
+ "notify-remote-heads"
196
+ )
197
+
198
+ remoteHeadsSubscriptions.addGenerousPeer(peerC)
199
+ remoteHeadsSubscriptions.subscribeToRemotes([storageB])
200
+ remoteHeadsSubscriptions.handleRemoteHeads({
201
+ type: "remote-heads-changed",
202
+ senderId: peerC,
203
+ targetId: peerA,
204
+ documentId: docA,
205
+ newHeads: {
206
+ [storageB]: {
207
+ heads: [],
208
+ timestamp: Date.now(),
209
+ },
210
+ },
211
+ })
212
+
213
+ const messages = await messagesPromise
214
+ assert.strictEqual(messages.length, 0)
215
+ })
216
+
217
+ it("should allow peers to subscribe and unsubscribe to storageIds", async () => {
218
+ const remoteHeadsSubscriptions = new RemoteHeadsSubscriptions()
219
+ remoteHeadsSubscriptions.subscribeToRemotes([storageB])
220
+
221
+ // subscribe peer c to storage b
222
+ remoteHeadsSubscriptions.handleControlMessage(subscribePeerCToStorageB)
223
+ const messagesAfterSubscribePromise = waitForMessages(
224
+ remoteHeadsSubscriptions,
225
+ "notify-remote-heads"
226
+ )
227
+
228
+ // change message for docA in storageB
229
+ remoteHeadsSubscriptions.handleRemoteHeads(docAHeadsChangedForStorageB)
230
+
231
+ // change heads directly
232
+ remoteHeadsSubscriptions.handleImmediateRemoteHeadsChanged(
233
+ docC,
234
+ storageB,
235
+ []
236
+ )
237
+
238
+ // expect peer c to be notified both changes
239
+ let messages = await messagesAfterSubscribePromise
240
+ assert.strictEqual(messages.length, 2)
241
+ assert.strictEqual(messages[0].documentId, docA)
242
+ assert.strictEqual(messages[0].storageId, storageB)
243
+ assert.deepStrictEqual(messages[0].heads, [])
244
+ assert.strictEqual(messages[1].documentId, docC)
245
+ assert.strictEqual(messages[1].storageId, storageB)
246
+ assert.deepStrictEqual(messages[1].heads, [])
247
+
248
+ // unsubscribe peer C
249
+ remoteHeadsSubscriptions.handleControlMessage(unsubscribePeerCFromStorageB)
250
+ const messagesAfteUnsubscribePromise = waitForMessages(
251
+ remoteHeadsSubscriptions,
252
+ "notify-remote-heads"
253
+ )
254
+
255
+ // heads of docB for storageB change
256
+ remoteHeadsSubscriptions.handleRemoteHeads(docBHeadsChangedForStorageB)
257
+
258
+ // expect not to be be notified
259
+ messages = await messagesAfteUnsubscribePromise
260
+ assert.strictEqual(messages.length, 0)
261
+ })
262
+
263
+ it("should ignore sync states with an older timestamp", async () => {
264
+ const remoteHeadsSubscription = new RemoteHeadsSubscriptions()
265
+
266
+ const messagesPromise = waitForMessages(
267
+ remoteHeadsSubscription,
268
+ "remote-heads-changed"
269
+ )
270
+
271
+ remoteHeadsSubscription.subscribeToRemotes([storageB])
272
+ remoteHeadsSubscription.handleRemoteHeads(docBHeadsChangedForStorageB2)
273
+
274
+ // send message with old heads
275
+ remoteHeadsSubscription.handleRemoteHeads(docBHeadsChangedForStorageB)
276
+
277
+ const messages = await messagesPromise
278
+ assert.strictEqual(messages.length, 1)
279
+ assert.strictEqual(messages[0].storageId, storageB)
280
+ assert.strictEqual(messages[0].documentId, docB)
281
+ assert.deepStrictEqual(messages[0].remoteHeads, docBHeads)
282
+ })
283
+
284
+ it("should remove subs of disconnected peers", async () => {
285
+ const remoteHeadsSubscriptions = new RemoteHeadsSubscriptions()
286
+
287
+ const messagesPromise = waitForMessages(
288
+ remoteHeadsSubscriptions,
289
+ "change-remote-subs"
290
+ )
291
+
292
+ remoteHeadsSubscriptions.handleControlMessage({
293
+ type: "remote-subscription-change",
294
+ senderId: peerB,
295
+ targetId: peerA,
296
+ add: [storageA, storageC],
297
+ })
298
+
299
+ remoteHeadsSubscriptions.handleControlMessage({
300
+ type: "remote-subscription-change",
301
+ senderId: peerC,
302
+ targetId: peerA,
303
+ add: [storageA, storageD],
304
+ })
305
+
306
+ remoteHeadsSubscriptions.removePeer(peerB)
307
+
308
+ const messages = await messagesPromise
309
+ assert.deepStrictEqual(messages.length, 3)
310
+
311
+ assert.deepStrictEqual(messages[0].add, [storageA, storageC])
312
+ assert.deepStrictEqual(messages[0].remove, [])
313
+ assert.deepStrictEqual(messages[0].peers, [])
314
+
315
+ assert.deepStrictEqual(messages[1].add, [storageD])
316
+ assert.deepStrictEqual(messages[1].remove, [])
317
+ assert.deepStrictEqual(messages[1].peers, [])
318
+
319
+ assert.deepStrictEqual(messages[2].add, undefined)
320
+ assert.deepStrictEqual(messages[2].remove, [storageC])
321
+ assert.deepStrictEqual(messages[2].peers, [])
322
+ })
323
+ })
324
+
325
+ async function waitForMessages(
326
+ emitter: EventEmitter,
327
+ event: string,
328
+ timeout: number = 100
329
+ ): Promise<any[]> {
330
+ const messages = []
331
+
332
+ const onEvent = message => {
333
+ messages.push(message)
334
+ }
335
+
336
+ emitter.on(event, onEvent)
337
+
338
+ await pause(timeout)
339
+
340
+ emitter.off(event)
341
+
342
+ return messages
343
+ }