@automerge/automerge-repo 1.0.0-alpha.2 → 1.0.0-alpha.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/dist/DocCollection.d.ts +2 -1
  2. package/dist/DocCollection.d.ts.map +1 -1
  3. package/dist/DocCollection.js +17 -8
  4. package/dist/DocHandle.d.ts +27 -4
  5. package/dist/DocHandle.d.ts.map +1 -1
  6. package/dist/DocHandle.js +44 -6
  7. package/dist/DocUrl.d.ts +3 -3
  8. package/dist/DocUrl.js +9 -9
  9. package/dist/EphemeralData.d.ts +8 -16
  10. package/dist/EphemeralData.d.ts.map +1 -1
  11. package/dist/EphemeralData.js +1 -28
  12. package/dist/Repo.d.ts +0 -2
  13. package/dist/Repo.d.ts.map +1 -1
  14. package/dist/Repo.js +13 -33
  15. package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
  16. package/dist/helpers/tests/network-adapter-tests.js +15 -13
  17. package/dist/index.d.ts +2 -1
  18. package/dist/index.d.ts.map +1 -1
  19. package/dist/network/NetworkAdapter.d.ts +4 -13
  20. package/dist/network/NetworkAdapter.d.ts.map +1 -1
  21. package/dist/network/NetworkSubsystem.d.ts +5 -4
  22. package/dist/network/NetworkSubsystem.d.ts.map +1 -1
  23. package/dist/network/NetworkSubsystem.js +39 -25
  24. package/dist/network/messages.d.ts +57 -0
  25. package/dist/network/messages.d.ts.map +1 -0
  26. package/dist/network/messages.js +21 -0
  27. package/dist/synchronizer/CollectionSynchronizer.d.ts +3 -2
  28. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  29. package/dist/synchronizer/CollectionSynchronizer.js +19 -13
  30. package/dist/synchronizer/DocSynchronizer.d.ts +9 -3
  31. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  32. package/dist/synchronizer/DocSynchronizer.js +145 -29
  33. package/dist/synchronizer/Synchronizer.d.ts +3 -4
  34. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  35. package/dist/types.d.ts +1 -3
  36. package/dist/types.d.ts.map +1 -1
  37. package/fuzz/fuzz.ts +4 -4
  38. package/package.json +2 -2
  39. package/src/DocCollection.ts +19 -9
  40. package/src/DocHandle.ts +87 -10
  41. package/src/DocUrl.ts +9 -9
  42. package/src/EphemeralData.ts +6 -36
  43. package/src/Repo.ts +15 -49
  44. package/src/helpers/tests/network-adapter-tests.ts +18 -14
  45. package/src/index.ts +12 -2
  46. package/src/network/NetworkAdapter.ts +4 -20
  47. package/src/network/NetworkSubsystem.ts +61 -38
  48. package/src/network/messages.ts +123 -0
  49. package/src/synchronizer/CollectionSynchronizer.ts +38 -19
  50. package/src/synchronizer/DocSynchronizer.ts +196 -38
  51. package/src/synchronizer/Synchronizer.ts +3 -8
  52. package/src/types.ts +4 -1
  53. package/test/CollectionSynchronizer.test.ts +6 -7
  54. package/test/DocHandle.test.ts +28 -13
  55. package/test/DocSynchronizer.test.ts +85 -9
  56. package/test/Repo.test.ts +221 -59
  57. package/test/StorageSubsystem.test.ts +2 -2
  58. package/test/helpers/DummyNetworkAdapter.ts +1 -1
  59. package/tsconfig.json +2 -1
  60. package/test/EphemeralData.test.ts +0 -44
@@ -5,11 +5,17 @@ import {
5
5
  binaryToDocumentId,
6
6
  stringifyAutomergeUrl,
7
7
  } from "../DocUrl.js"
8
- import { ChannelId, BinaryDocumentId, PeerId, DocumentId } from "../types.js"
8
+ import { PeerId, DocumentId } from "../types.js"
9
9
  import { DocSynchronizer } from "./DocSynchronizer.js"
10
10
  import { Synchronizer } from "./Synchronizer.js"
11
11
 
12
12
  import debug from "debug"
13
+ import {
14
+ DocumentUnavailableMessage,
15
+ RequestMessage,
16
+ SynchronizerMessage,
17
+ SyncMessage,
18
+ } from "../network/messages.js"
13
19
  const log = debug("automerge-repo:collectionsync")
14
20
 
15
21
  /** A CollectionSynchronizer is responsible for synchronizing a DocCollection with peers. */
@@ -20,6 +26,9 @@ export class CollectionSynchronizer extends Synchronizer {
20
26
  /** A map of documentIds to their synchronizers */
21
27
  #docSynchronizers: Record<DocumentId, DocSynchronizer> = {}
22
28
 
29
+ /** Used to determine if the document is know to the Collection and a synchronizer exists or is being set up */
30
+ #docSetUp: Record<DocumentId, boolean> = {}
31
+
23
32
  constructor(private repo: DocCollection) {
24
33
  super()
25
34
  }
@@ -57,37 +66,42 @@ export class CollectionSynchronizer extends Synchronizer {
57
66
  * When we receive a sync message for a document we haven't got in memory, we
58
67
  * register it with the repo and start synchronizing
59
68
  */
60
- async receiveSyncMessage(
61
- peerId: PeerId,
62
- channelId: ChannelId,
63
- message: Uint8Array
64
- ) {
65
- log(`onSyncMessage: ${peerId}, ${channelId}, ${message.byteLength}bytes`)
66
-
67
- const documentId = channelId as unknown as DocumentId
69
+ async receiveMessage(message: SynchronizerMessage) {
70
+ log(
71
+ `onSyncMessage: ${message.senderId}, ${message.documentId}, ${
72
+ "data" in message ? message.data.byteLength + "bytes" : ""
73
+ }`
74
+ )
75
+
76
+ const documentId = message.documentId
68
77
  if (!documentId) {
69
78
  throw new Error("received a message with an invalid documentId")
70
79
  }
71
- const docSynchronizer = await this.#fetchDocSynchronizer(documentId)
72
80
 
73
- await docSynchronizer.receiveSyncMessage(peerId, channelId, message)
81
+ this.#docSetUp[documentId] = true
82
+
83
+ const docSynchronizer = this.#fetchDocSynchronizer(documentId)
84
+
85
+ docSynchronizer.receiveMessage(message)
74
86
 
75
87
  // Initiate sync with any new peers
76
88
  const peers = await this.#documentGenerousPeers(documentId)
77
- peers
78
- .filter(peerId => !docSynchronizer.hasPeer(peerId))
79
- .forEach(peerId => docSynchronizer.beginSync(peerId))
89
+ docSynchronizer.beginSync(
90
+ peers.filter(peerId => !docSynchronizer.hasPeer(peerId))
91
+ )
80
92
  }
81
93
 
82
94
  /**
83
95
  * Starts synchronizing the given document with all peers that we share it generously with.
84
96
  */
85
97
  addDocument(documentId: DocumentId) {
98
+ // HACK: this is a hack to prevent us from adding the same document twice
99
+ if (this.#docSetUp[documentId]) {
100
+ return
101
+ }
86
102
  const docSynchronizer = this.#fetchDocSynchronizer(documentId)
87
103
  void this.#documentGenerousPeers(documentId).then(peers => {
88
- peers.forEach(peerId => {
89
- docSynchronizer.beginSync(peerId)
90
- })
104
+ docSynchronizer.beginSync(peers)
91
105
  })
92
106
  }
93
107
 
@@ -99,11 +113,16 @@ export class CollectionSynchronizer extends Synchronizer {
99
113
  /** Adds a peer and maybe starts synchronizing with them */
100
114
  addPeer(peerId: PeerId) {
101
115
  log(`adding ${peerId} & synchronizing with them`)
116
+
117
+ if (this.#peers.has(peerId)) {
118
+ return
119
+ }
120
+
102
121
  this.#peers.add(peerId)
103
122
  for (const docSynchronizer of Object.values(this.#docSynchronizers)) {
104
123
  const { documentId } = docSynchronizer
105
- void this.repo.sharePolicy(peerId, documentId).then(okToShare => {
106
- if (okToShare) docSynchronizer.beginSync(peerId)
124
+ this.repo.sharePolicy(peerId, documentId).then(okToShare => {
125
+ if (okToShare) docSynchronizer.beginSync([peerId])
107
126
  })
108
127
  }
109
128
  }
@@ -1,9 +1,27 @@
1
1
  import * as A from "@automerge/automerge"
2
- import { DocHandle, READY, REQUESTING } from "../DocHandle.js"
3
- import { ChannelId, PeerId } from "../types.js"
2
+ import {
3
+ DocHandle,
4
+ DocHandleOutboundEphemeralMessagePayload,
5
+ READY,
6
+ REQUESTING,
7
+ UNAVAILABLE,
8
+ } from "../DocHandle.js"
9
+ import { PeerId } from "../types.js"
4
10
  import { Synchronizer } from "./Synchronizer.js"
5
11
 
6
12
  import debug from "debug"
13
+ import {
14
+ EphemeralMessage,
15
+ isDocumentUnavailableMessage,
16
+ isRequestMessage,
17
+ Message,
18
+ RequestMessage,
19
+ SynchronizerMessage,
20
+ SyncMessage,
21
+ } from "../network/messages.js"
22
+
23
+ type PeerDocumentStatus = "unknown" | "has" | "unavailable" | "wants"
24
+ import { decode } from "cbor-x"
7
25
 
8
26
  /**
9
27
  * DocSynchronizer takes a handle to an Automerge document, and receives & dispatches sync messages
@@ -17,10 +35,14 @@ export class DocSynchronizer extends Synchronizer {
17
35
  /** Active peers */
18
36
  #peers: PeerId[] = []
19
37
 
38
+ #peerDocumentStatuses: Record<PeerId, PeerDocumentStatus> = {}
39
+
20
40
  /** Sync state for each peer we've communicated with (including inactive peers) */
21
41
  #syncStates: Record<PeerId, A.SyncState> = {}
22
42
 
23
- #pendingSyncMessages: Array<{ peerId: PeerId; message: Uint8Array }> = []
43
+ #pendingSyncMessages: Array<SyncMessage | RequestMessage> = []
44
+
45
+ #syncStarted = false
24
46
 
25
47
  constructor(private handle: DocHandle<any>) {
26
48
  super()
@@ -31,6 +53,10 @@ export class DocSynchronizer extends Synchronizer {
31
53
 
32
54
  handle.on("change", () => this.#syncWithPeers())
33
55
 
56
+ handle.on("ephemeral-message-outbound", payload =>
57
+ this.#broadcastToPeers(payload)
58
+ )
59
+
34
60
  // Process pending sync messages immediately after the handle becomes ready.
35
61
  void (async () => {
36
62
  await handle.doc([READY, REQUESTING])
@@ -38,6 +64,10 @@ export class DocSynchronizer extends Synchronizer {
38
64
  })()
39
65
  }
40
66
 
67
+ get peerStates() {
68
+ return this.#peerDocumentStatuses
69
+ }
70
+
41
71
  get documentId() {
42
72
  return this.handle.documentId
43
73
  }
@@ -47,15 +77,37 @@ export class DocSynchronizer extends Synchronizer {
47
77
  async #syncWithPeers() {
48
78
  this.#log(`syncWithPeers`)
49
79
  const doc = await this.handle.doc()
80
+ if (doc === undefined) return
50
81
  this.#peers.forEach(peerId => this.#sendSyncMessage(peerId, doc))
51
82
  }
52
83
 
84
+ async #broadcastToPeers({ data }: DocHandleOutboundEphemeralMessagePayload) {
85
+ this.#log(`broadcastToPeers`, this.#peers)
86
+ this.#peers.forEach(peerId => this.#sendEphemeralMessage(peerId, data))
87
+ }
88
+
89
+ #sendEphemeralMessage(peerId: PeerId, data: Uint8Array) {
90
+ this.#log(`sendEphemeralMessage ->${peerId}`)
91
+
92
+ this.emit("message", {
93
+ type: "ephemeral",
94
+ targetId: peerId,
95
+ documentId: this.handle.documentId,
96
+ data,
97
+ })
98
+ }
99
+
53
100
  #getSyncState(peerId: PeerId) {
54
101
  if (!this.#peers.includes(peerId)) {
55
102
  this.#log("adding a new peer", peerId)
56
103
  this.#peers.push(peerId)
57
104
  }
58
105
 
106
+ // when a peer is added, we don't know if it has the document or not
107
+ if (!(peerId in this.#peerDocumentStatuses)) {
108
+ this.#peerDocumentStatuses[peerId] = "unknown"
109
+ }
110
+
59
111
  return this.#syncStates[peerId] ?? A.initSyncState()
60
112
  }
61
113
 
@@ -77,16 +129,35 @@ export class DocSynchronizer extends Synchronizer {
77
129
  if (message) {
78
130
  this.#logMessage(`sendSyncMessage 🡒 ${peerId}`, message)
79
131
 
80
- const channelId = this.handle.documentId as string as ChannelId
81
-
82
- this.emit("message", {
83
- targetId: peerId,
84
- channelId,
85
- message,
86
- broadcast: false,
87
- })
88
- } else {
89
- this.#log(`sendSyncMessage ->${peerId} [no message generated]`)
132
+ const decoded = A.decodeSyncMessage(message)
133
+
134
+ if (
135
+ !this.handle.isReady() &&
136
+ decoded.heads.length === 0 &&
137
+ newSyncState.sharedHeads.length === 0 &&
138
+ !Object.values(this.#peerDocumentStatuses).includes("has") &&
139
+ this.#peerDocumentStatuses[peerId] === "unknown"
140
+ ) {
141
+ // we don't have the document (or access to it), so we request it
142
+ this.emit("message", {
143
+ type: "request",
144
+ targetId: peerId,
145
+ documentId: this.handle.documentId,
146
+ data: message,
147
+ })
148
+ } else {
149
+ this.emit("message", {
150
+ type: "sync",
151
+ targetId: peerId,
152
+ data: message,
153
+ documentId: this.handle.documentId,
154
+ })
155
+ }
156
+
157
+ // if we have sent heads, then the peer now has or will have the document
158
+ if (decoded.heads.length > 0) {
159
+ this.#peerDocumentStatuses[peerId] = "has"
160
+ }
90
161
  }
91
162
  }
92
163
 
@@ -104,8 +175,8 @@ export class DocSynchronizer extends Synchronizer {
104
175
  // expanding is expensive, so only do it if we're logging at this level
105
176
  const expanded = this.#opsLog.enabled
106
177
  ? decoded.changes.flatMap(change =>
107
- A.decodeChange(change).ops.map(op => JSON.stringify(op))
108
- )
178
+ A.decodeChange(change).ops.map(op => JSON.stringify(op))
179
+ )
109
180
  : null
110
181
  this.#opsLog(logText, expanded)
111
182
  }
@@ -116,21 +187,33 @@ export class DocSynchronizer extends Synchronizer {
116
187
  return this.#peers.includes(peerId)
117
188
  }
118
189
 
119
- beginSync(peerId: PeerId) {
120
- this.#log(`beginSync: ${peerId}`)
190
+ beginSync(peerIds: PeerId[]) {
191
+ this.#log(`beginSync: ${peerIds.join(", ")}`)
121
192
 
122
193
  // At this point if we don't have anything in our storage, we need to use an empty doc to sync
123
194
  // with; but we don't want to surface that state to the front end
124
- void this.handle.doc([READY, REQUESTING]).then(doc => {
195
+ void this.handle.doc([READY, REQUESTING, UNAVAILABLE]).then(doc => {
196
+ // if we don't have any peers, then we can say the document is unavailable
197
+
125
198
  // HACK: if we have a sync state already, we round-trip it through the encoding system to make
126
199
  // sure state is preserved. This prevents an infinite loop caused by failed attempts to send
127
200
  // messages during disconnection.
128
201
  // TODO: cover that case with a test and remove this hack
129
- const syncStateRaw = this.#getSyncState(peerId)
130
- const syncState = A.decodeSyncState(A.encodeSyncState(syncStateRaw))
131
- this.#setSyncState(peerId, syncState)
202
+ peerIds.forEach(peerId => {
203
+ const syncStateRaw = this.#getSyncState(peerId)
204
+ const syncState = A.decodeSyncState(A.encodeSyncState(syncStateRaw))
205
+ this.#setSyncState(peerId, syncState)
206
+ })
207
+
208
+ // we register out peers first, then say that sync has started
209
+ this.#syncStarted = true
210
+ this.#checkDocUnavailable()
132
211
 
133
- this.#sendSyncMessage(peerId, doc)
212
+ if (doc === undefined) return
213
+
214
+ peerIds.forEach(peerId => {
215
+ this.#sendSyncMessage(peerId, doc)
216
+ })
134
217
  })
135
218
  }
136
219
 
@@ -139,43 +222,118 @@ export class DocSynchronizer extends Synchronizer {
139
222
  this.#peers = this.#peers.filter(p => p !== peerId)
140
223
  }
141
224
 
142
- receiveSyncMessage(
143
- peerId: PeerId,
144
- channelId: ChannelId,
145
- message: Uint8Array
146
- ) {
147
- if ((channelId as string) !== (this.handle.documentId as string))
225
+ receiveMessage(message: SynchronizerMessage) {
226
+ switch (message.type) {
227
+ case "sync":
228
+ case "request":
229
+ this.receiveSyncMessage(message)
230
+ break
231
+ case "ephemeral":
232
+ this.receiveEphemeralMessage(message)
233
+ break
234
+ case "doc-unavailable":
235
+ this.#peerDocumentStatuses[message.senderId] = "unavailable"
236
+ this.#checkDocUnavailable()
237
+ break
238
+ default:
239
+ throw new Error(`unknown message type: ${message}`)
240
+ }
241
+ }
242
+
243
+ receiveEphemeralMessage(message: EphemeralMessage) {
244
+ if (message.documentId !== this.handle.documentId)
245
+ throw new Error(`channelId doesn't match documentId`)
246
+
247
+ const { senderId, data } = message
248
+
249
+ const contents = decode(data)
250
+
251
+ this.handle.emit("ephemeral-message", {
252
+ handle: this.handle,
253
+ senderId,
254
+ message: contents,
255
+ })
256
+
257
+ this.#peers.forEach(peerId => {
258
+ if (peerId === senderId) return
259
+ this.emit("message", {
260
+ ...message,
261
+ targetId: peerId,
262
+ })
263
+ })
264
+ }
265
+
266
+ receiveSyncMessage(message: SyncMessage | RequestMessage) {
267
+ if (message.documentId !== this.handle.documentId)
148
268
  throw new Error(`channelId doesn't match documentId`)
149
269
 
150
270
  // We need to block receiving the syncMessages until we've checked local storage
151
- if (!this.handle.inState([READY, REQUESTING])) {
152
- this.#pendingSyncMessages.push({ peerId, message })
271
+ if (!this.handle.inState([READY, REQUESTING, UNAVAILABLE])) {
272
+ this.#pendingSyncMessages.push(message)
153
273
  return
154
274
  }
155
275
 
156
276
  this.#processAllPendingSyncMessages()
157
- this.#processSyncMessage(peerId, message)
277
+ this.#processSyncMessage(message)
158
278
  }
159
279
 
160
- #processSyncMessage(peerId: PeerId, message: Uint8Array) {
280
+ #processSyncMessage(message: SyncMessage | RequestMessage) {
281
+ if (isRequestMessage(message)) {
282
+ this.#peerDocumentStatuses[message.senderId] = "wants"
283
+ }
284
+
285
+ this.#checkDocUnavailable()
286
+
287
+ // if the message has heads, then the peer has the document
288
+ if (A.decodeSyncMessage(message.data).heads.length > 0) {
289
+ this.#peerDocumentStatuses[message.senderId] = "has"
290
+ }
291
+
161
292
  this.handle.update(doc => {
162
293
  const [newDoc, newSyncState] = A.receiveSyncMessage(
163
294
  doc,
164
- this.#getSyncState(peerId),
165
- message
295
+ this.#getSyncState(message.senderId),
296
+ message.data
166
297
  )
167
298
 
168
- this.#setSyncState(peerId, newSyncState)
299
+ this.#setSyncState(message.senderId, newSyncState)
169
300
 
170
301
  // respond to just this peer (as required)
171
- this.#sendSyncMessage(peerId, doc)
302
+ this.#sendSyncMessage(message.senderId, doc)
172
303
  return newDoc
173
304
  })
305
+
306
+ this.#checkDocUnavailable()
307
+ }
308
+
309
+ #checkDocUnavailable() {
310
+ // if we know none of the peers have the document, tell all our peers that we don't either
311
+ if (
312
+ this.#syncStarted &&
313
+ this.handle.inState([REQUESTING]) &&
314
+ this.#peers.every(
315
+ peerId =>
316
+ this.#peerDocumentStatuses[peerId] === "unavailable" ||
317
+ this.#peerDocumentStatuses[peerId] === "wants"
318
+ )
319
+ ) {
320
+ this.#peers
321
+ .filter(peerId => this.#peerDocumentStatuses[peerId] === "wants")
322
+ .forEach(peerId => {
323
+ this.emit("message", {
324
+ type: "doc-unavailable",
325
+ documentId: this.handle.documentId,
326
+ targetId: peerId,
327
+ })
328
+ })
329
+
330
+ this.handle.unavailable()
331
+ }
174
332
  }
175
333
 
176
334
  #processAllPendingSyncMessages() {
177
- for (const { peerId, message } of this.#pendingSyncMessages) {
178
- this.#processSyncMessage(peerId, message)
335
+ for (const message of this.#pendingSyncMessages) {
336
+ this.#processSyncMessage(message)
179
337
  }
180
338
 
181
339
  this.#pendingSyncMessages = []
@@ -1,15 +1,10 @@
1
1
  import EventEmitter from "eventemitter3"
2
- import { ChannelId, PeerId } from "../types.js"
3
- import { MessagePayload } from "../network/NetworkAdapter.js"
2
+ import { Message, MessageContents } from "../network/messages.js"
4
3
 
5
4
  export abstract class Synchronizer extends EventEmitter<SynchronizerEvents> {
6
- abstract receiveSyncMessage(
7
- peerId: PeerId,
8
- channelId: ChannelId,
9
- message: Uint8Array
10
- ): void
5
+ abstract receiveMessage(message: Message): void
11
6
  }
12
7
 
13
8
  export interface SynchronizerEvents {
14
- message: (arg: MessagePayload) => void
9
+ message: (arg: MessageContents) => void
15
10
  }
package/src/types.ts CHANGED
@@ -3,4 +3,7 @@ export type AutomergeUrl = string & { __documentUrl: true } // for opening / lin
3
3
  export type BinaryDocumentId = Uint8Array & { __binaryDocumentId: true } // for storing / syncing
4
4
 
5
5
  export type PeerId = string & { __peerId: false }
6
- export type ChannelId = string & { __channelId: false }
6
+
7
+ export type DistributiveOmit<T, K extends keyof any> = T extends any
8
+ ? Omit<T, K>
9
+ : never
@@ -1,8 +1,7 @@
1
- import { CollectionSynchronizer } from "../src/synchronizer/CollectionSynchronizer.js"
2
- import { ChannelId, DocCollection, BinaryDocumentId, PeerId } from "../src"
3
1
  import assert from "assert"
4
2
  import { beforeEach } from "mocha"
5
- import { MessagePayload } from "../src/network/NetworkAdapter.js"
3
+ import { DocCollection, PeerId } from "../src"
4
+ import { CollectionSynchronizer } from "../src/synchronizer/CollectionSynchronizer.js"
6
5
 
7
6
  describe("CollectionSynchronizer", () => {
8
7
  let collection: DocCollection
@@ -21,9 +20,9 @@ describe("CollectionSynchronizer", () => {
21
20
  const handle = collection.create()
22
21
  synchronizer.addPeer("peer1" as PeerId)
23
22
 
24
- synchronizer.once("message", (event: MessagePayload) => {
23
+ synchronizer.once("message", event => {
25
24
  assert(event.targetId === "peer1")
26
- assert(event.channelId === (handle.documentId as unknown as ChannelId))
25
+ assert(event.documentId === handle.documentId)
27
26
  done()
28
27
  })
29
28
 
@@ -33,9 +32,9 @@ describe("CollectionSynchronizer", () => {
33
32
  it("starts synchronizing existing documents when a peer is added", done => {
34
33
  const handle = collection.create()
35
34
  synchronizer.addDocument(handle.documentId)
36
- synchronizer.once("message", (event: MessagePayload) => {
35
+ synchronizer.once("message", event => {
37
36
  assert(event.targetId === "peer1")
38
- assert(event.channelId === (handle.documentId as unknown as ChannelId))
37
+ assert(event.documentId === handle.documentId)
39
38
  done()
40
39
  })
41
40
  synchronizer.addPeer("peer1" as PeerId)
@@ -5,10 +5,12 @@ import { DocHandle, DocHandleChangePayload } from "../src"
5
5
  import { pause } from "../src/helpers/pause"
6
6
  import { TestDoc } from "./types.js"
7
7
  import { generateAutomergeUrl, parseAutomergeUrl } from "../src/DocUrl"
8
+ import { eventPromise } from "../src/helpers/eventPromise"
9
+ import { decode } from "cbor-x"
8
10
 
9
11
  describe("DocHandle", () => {
10
- const TEST_ID = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
11
- const BOGUS_ID = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
12
+ const TEST_ID = parseAutomergeUrl(generateAutomergeUrl()).documentId
13
+ const BOGUS_ID = parseAutomergeUrl(generateAutomergeUrl()).documentId
12
14
 
13
15
  const docFromMockStorage = (doc: A.Doc<{ foo: string }>) => {
14
16
  return A.change<{ foo: string }>(doc, d => (d.foo = "bar"))
@@ -28,8 +30,7 @@ describe("DocHandle", () => {
28
30
 
29
31
  assert.equal(handle.isReady(), true)
30
32
  const doc = await handle.doc()
31
- console.log("DOC", JSON.stringify(doc))
32
- assert.equal(doc.foo, "bar")
33
+ assert.equal(doc?.foo, "bar")
33
34
  })
34
35
 
35
36
  it("should allow sync access to the doc", async () => {
@@ -60,7 +61,7 @@ describe("DocHandle", () => {
60
61
  const doc = await handle.doc()
61
62
 
62
63
  assert.equal(handle.isReady(), true)
63
- assert.equal(doc.foo, "bar")
64
+ assert.equal(doc?.foo, "bar")
64
65
  })
65
66
 
66
67
  it("should block changes until ready()", async () => {
@@ -78,7 +79,7 @@ describe("DocHandle", () => {
78
79
  handle.change(d => (d.foo = "pizza"))
79
80
 
80
81
  const doc = await handle.doc()
81
- assert.equal(doc.foo, "pizza")
82
+ assert.equal(doc?.foo, "pizza")
82
83
  })
83
84
 
84
85
  it("should not be ready while requesting from the network", async () => {
@@ -89,7 +90,7 @@ describe("DocHandle", () => {
89
90
 
90
91
  assert.equal(handle.docSync(), undefined)
91
92
  assert.equal(handle.isReady(), false)
92
- assert.throws(() => handle.change(h => {}))
93
+ assert.throws(() => handle.change(_ => { }))
93
94
  })
94
95
 
95
96
  it("should become ready if the document is updated by the network", async () => {
@@ -105,7 +106,7 @@ describe("DocHandle", () => {
105
106
 
106
107
  const doc = await handle.doc()
107
108
  assert.equal(handle.isReady(), true)
108
- assert.equal(doc.foo, "bar")
109
+ assert.equal(doc?.foo, "bar")
109
110
  })
110
111
 
111
112
  it("should emit a change message when changes happen", async () => {
@@ -120,7 +121,7 @@ describe("DocHandle", () => {
120
121
  })
121
122
 
122
123
  const doc = await handle.doc()
123
- assert.equal(doc.foo, "bar")
124
+ assert.equal(doc?.foo, "bar")
124
125
 
125
126
  const changePayload = await p
126
127
  assert.deepStrictEqual(changePayload.doc, doc)
@@ -181,7 +182,7 @@ describe("DocHandle", () => {
181
182
  })
182
183
 
183
184
  const doc = await handle.doc()
184
- assert.equal(doc.foo, "baz")
185
+ assert.equal(doc?.foo, "baz")
185
186
 
186
187
  return p
187
188
  })
@@ -196,7 +197,7 @@ describe("DocHandle", () => {
196
197
 
197
198
  await p
198
199
  const doc = await handle.doc()
199
- assert.equal(doc.foo, "bar")
200
+ assert.equal(doc?.foo, "bar")
200
201
  })
201
202
 
202
203
  it("should not emit a patch message if no change happens", done => {
@@ -232,7 +233,7 @@ describe("DocHandle", () => {
232
233
 
233
234
  // now it should not time out
234
235
  const doc = await handle.doc()
235
- assert.equal(doc.foo, "bar")
236
+ assert.equal(doc?.foo, "bar")
236
237
  })
237
238
 
238
239
  it("should time out if the document is not updated from the network", async () => {
@@ -265,7 +266,7 @@ describe("DocHandle", () => {
265
266
  await pause(5)
266
267
 
267
268
  const doc = await handle.doc()
268
- assert.equal(doc.foo, "bar")
269
+ assert.equal(doc?.foo, "bar")
269
270
  })
270
271
 
271
272
  it("should emit a delete event when deleted", async () => {
@@ -301,4 +302,18 @@ describe("DocHandle", () => {
301
302
 
302
303
  assert(wasBar, "foo should have been bar as we changed at the old heads")
303
304
  })
305
+
306
+ describe("ephemeral messaging", () => {
307
+ it("can broadcast a message for the network to send out", async () => {
308
+ const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
309
+ const message = { foo: "bar" }
310
+
311
+ const promise = eventPromise(handle, "ephemeral-message-outbound")
312
+
313
+ handle.broadcast(message)
314
+
315
+ const { data } = await promise
316
+ assert.deepStrictEqual(decode(data), message)
317
+ })
318
+ })
304
319
  })