@automerge/automerge-repo 1.0.0-alpha.2 → 1.0.0-alpha.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/dist/DocCollection.d.ts +2 -1
  2. package/dist/DocCollection.d.ts.map +1 -1
  3. package/dist/DocCollection.js +17 -8
  4. package/dist/DocHandle.d.ts +27 -4
  5. package/dist/DocHandle.d.ts.map +1 -1
  6. package/dist/DocHandle.js +44 -6
  7. package/dist/DocUrl.d.ts +3 -3
  8. package/dist/DocUrl.js +9 -9
  9. package/dist/EphemeralData.d.ts +8 -16
  10. package/dist/EphemeralData.d.ts.map +1 -1
  11. package/dist/EphemeralData.js +1 -28
  12. package/dist/Repo.d.ts +0 -2
  13. package/dist/Repo.d.ts.map +1 -1
  14. package/dist/Repo.js +13 -33
  15. package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
  16. package/dist/helpers/tests/network-adapter-tests.js +15 -13
  17. package/dist/index.d.ts +2 -1
  18. package/dist/index.d.ts.map +1 -1
  19. package/dist/network/NetworkAdapter.d.ts +4 -13
  20. package/dist/network/NetworkAdapter.d.ts.map +1 -1
  21. package/dist/network/NetworkSubsystem.d.ts +5 -4
  22. package/dist/network/NetworkSubsystem.d.ts.map +1 -1
  23. package/dist/network/NetworkSubsystem.js +39 -25
  24. package/dist/network/messages.d.ts +57 -0
  25. package/dist/network/messages.d.ts.map +1 -0
  26. package/dist/network/messages.js +21 -0
  27. package/dist/synchronizer/CollectionSynchronizer.d.ts +3 -2
  28. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  29. package/dist/synchronizer/CollectionSynchronizer.js +19 -13
  30. package/dist/synchronizer/DocSynchronizer.d.ts +9 -3
  31. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  32. package/dist/synchronizer/DocSynchronizer.js +145 -29
  33. package/dist/synchronizer/Synchronizer.d.ts +3 -4
  34. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  35. package/dist/types.d.ts +1 -3
  36. package/dist/types.d.ts.map +1 -1
  37. package/fuzz/fuzz.ts +4 -4
  38. package/package.json +2 -2
  39. package/src/DocCollection.ts +19 -9
  40. package/src/DocHandle.ts +87 -10
  41. package/src/DocUrl.ts +9 -9
  42. package/src/EphemeralData.ts +6 -36
  43. package/src/Repo.ts +15 -49
  44. package/src/helpers/tests/network-adapter-tests.ts +18 -14
  45. package/src/index.ts +12 -2
  46. package/src/network/NetworkAdapter.ts +4 -20
  47. package/src/network/NetworkSubsystem.ts +61 -38
  48. package/src/network/messages.ts +123 -0
  49. package/src/synchronizer/CollectionSynchronizer.ts +38 -19
  50. package/src/synchronizer/DocSynchronizer.ts +196 -38
  51. package/src/synchronizer/Synchronizer.ts +3 -8
  52. package/src/types.ts +4 -1
  53. package/test/CollectionSynchronizer.test.ts +6 -7
  54. package/test/DocHandle.test.ts +28 -13
  55. package/test/DocSynchronizer.test.ts +85 -9
  56. package/test/Repo.test.ts +221 -59
  57. package/test/StorageSubsystem.test.ts +2 -2
  58. package/test/helpers/DummyNetworkAdapter.ts +1 -1
  59. package/tsconfig.json +2 -1
  60. package/test/EphemeralData.test.ts +0 -44
@@ -5,16 +5,25 @@ import { DocSynchronizer } from "../src/synchronizer/DocSynchronizer.js"
5
5
  import { eventPromise } from "../src/helpers/eventPromise.js"
6
6
  import { TestDoc } from "./types.js"
7
7
  import { parseAutomergeUrl, generateAutomergeUrl } from "../src/DocUrl.js"
8
+ import { SyncMessage } from "../src/index.js"
9
+ import {
10
+ DocumentUnavailableMessage,
11
+ DocumentUnavailableMessageContents,
12
+ MessageContents,
13
+ RequestMessageContents,
14
+ SyncMessageContents,
15
+ } from "../src/network/messages.js"
8
16
 
9
17
  const alice = "alice" as PeerId
10
18
  const bob = "bob" as PeerId
19
+ const charlie = "charlie" as PeerId
11
20
 
12
21
  describe("DocSynchronizer", () => {
13
22
  let handle: DocHandle<TestDoc>
14
23
  let docSynchronizer: DocSynchronizer
15
24
 
16
25
  const setup = () => {
17
- const docId = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
26
+ const docId = parseAutomergeUrl(generateAutomergeUrl()).documentId
18
27
  handle = new DocHandle<TestDoc>(docId, { isNew: true })
19
28
  docSynchronizer = new DocSynchronizer(handle)
20
29
  return { handle, docSynchronizer }
@@ -27,19 +36,21 @@ describe("DocSynchronizer", () => {
27
36
 
28
37
  it("emits a syncMessage when beginSync is called", async () => {
29
38
  const { docSynchronizer } = setup()
30
- docSynchronizer.beginSync(alice)
31
- const { targetId } = await eventPromise(docSynchronizer, "message")
39
+ docSynchronizer.beginSync([alice])
40
+ const { targetId, type } = await eventPromise(docSynchronizer, "message")
41
+ assert.equal(type, "sync")
32
42
  assert.equal(targetId, "alice")
33
43
  })
34
44
 
35
45
  it("emits a syncMessage to peers when the handle is updated", async () => {
36
46
  const { handle, docSynchronizer } = setup()
37
- docSynchronizer.beginSync(alice)
47
+ docSynchronizer.beginSync([alice])
38
48
  handle.change(doc => {
39
49
  doc.foo = "bar"
40
50
  })
41
- const { targetId } = await eventPromise(docSynchronizer, "message")
51
+ const { targetId, type } = await eventPromise(docSynchronizer, "message")
42
52
  assert.equal(targetId, "alice")
53
+ assert.equal(type, "sync")
43
54
  })
44
55
 
45
56
  it("still syncs with a peer after it disconnects and reconnects", async () => {
@@ -47,23 +58,88 @@ describe("DocSynchronizer", () => {
47
58
 
48
59
  // first connection
49
60
  {
50
- await docSynchronizer.beginSync(bob)
61
+ docSynchronizer.beginSync([bob])
51
62
  handle.change(doc => {
52
63
  doc.foo = "a change"
53
64
  })
54
- const { targetId } = await eventPromise(docSynchronizer, "message")
65
+ const { targetId, type } = await eventPromise(docSynchronizer, "message")
55
66
  assert.equal(targetId, "bob")
67
+ assert.equal(type, "sync")
56
68
  docSynchronizer.endSync(bob)
57
69
  }
58
70
 
59
71
  // second connection
60
72
  {
61
- await docSynchronizer.beginSync(bob)
73
+ docSynchronizer.beginSync([bob])
62
74
  handle.change(doc => {
63
75
  doc.foo = "another change"
64
76
  })
65
- const { targetId } = await eventPromise(docSynchronizer, "message")
77
+ const { targetId, type } = await eventPromise(docSynchronizer, "message")
66
78
  assert.equal(targetId, "bob")
79
+ assert.equal(type, "sync")
67
80
  }
68
81
  })
82
+
83
+ it("emits a requestMessage if the local handle is being requested", async () => {
84
+ const docId = parseAutomergeUrl(generateAutomergeUrl()).documentId
85
+
86
+ const handle = new DocHandle<TestDoc>(docId, { isNew: false })
87
+ docSynchronizer = new DocSynchronizer(handle)
88
+ docSynchronizer.beginSync([alice])
89
+ handle.request()
90
+ const message = await eventPromise(docSynchronizer, "message")
91
+ assert.equal(message.targetId, "alice")
92
+ assert.equal(message.type, "request")
93
+ })
94
+
95
+ it("emits the correct sequence of messages when a document is not found then not available", async () => {
96
+ const docId = parseAutomergeUrl(generateAutomergeUrl()).documentId
97
+
98
+ const bobHandle = new DocHandle<TestDoc>(docId, { isNew: false })
99
+ const bobDocSynchronizer = new DocSynchronizer(bobHandle)
100
+ bobDocSynchronizer.beginSync([alice])
101
+ bobHandle.request()
102
+ const message = await eventPromise(bobDocSynchronizer, "message")
103
+
104
+ const aliceHandle = new DocHandle<TestDoc>(docId, { isNew: false })
105
+ const aliceDocSynchronizer = new DocSynchronizer(aliceHandle)
106
+ aliceHandle.request()
107
+
108
+ aliceDocSynchronizer.receiveSyncMessage({ ...message, senderId: bob })
109
+ aliceDocSynchronizer.beginSync([charlie, bob])
110
+
111
+ const [charlieMessage, bobMessage] = await new Promise<MessageContents[]>(
112
+ resolve => {
113
+ const messages: MessageContents[] = []
114
+ aliceDocSynchronizer.on("message", message => {
115
+ messages.push(message)
116
+ if (messages.length === 2) {
117
+ resolve(messages)
118
+ }
119
+ })
120
+ }
121
+ )
122
+
123
+ // the response should be a sync message, not a request message
124
+ assert.equal(charlieMessage.targetId, "charlie")
125
+ assert.equal(charlieMessage.type, "request")
126
+ assert.equal(bobMessage.targetId, "bob")
127
+ assert.equal(bobMessage.type, "sync")
128
+
129
+ const docUnavailableMessage = {
130
+ type: "doc-unavailable",
131
+ targetId: alice,
132
+ senderId: charlie,
133
+ documentId: docId,
134
+ } satisfies DocumentUnavailableMessage
135
+
136
+ const p = eventPromise(aliceDocSynchronizer, "message")
137
+
138
+ aliceDocSynchronizer.receiveMessage(docUnavailableMessage)
139
+
140
+ const message2 = await p
141
+
142
+ assert.equal(message2.targetId, "bob")
143
+ assert.equal(message2.type, "doc-unavailable")
144
+ })
69
145
  })
package/test/Repo.test.ts CHANGED
@@ -1,10 +1,9 @@
1
1
  import assert from "assert"
2
2
  import { MessageChannelNetworkAdapter } from "@automerge/automerge-repo-network-messagechannel"
3
- import * as A from "@automerge/automerge"
3
+ import { BroadcastChannelNetworkAdapter } from "@automerge/automerge-repo-network-broadcastchannel"
4
4
 
5
5
  import {
6
6
  AutomergeUrl,
7
- ChannelId,
8
7
  DocHandle,
9
8
  DocumentId,
10
9
  PeerId,
@@ -17,11 +16,8 @@ import { DummyNetworkAdapter } from "./helpers/DummyNetworkAdapter.js"
17
16
  import { DummyStorageAdapter } from "./helpers/DummyStorageAdapter.js"
18
17
  import { getRandomItem } from "./helpers/getRandomItem.js"
19
18
  import { TestDoc } from "./types.js"
20
- import {
21
- binaryToDocumentId,
22
- generateAutomergeUrl,
23
- stringifyAutomergeUrl,
24
- } from "../src/DocUrl"
19
+ import { generateAutomergeUrl, stringifyAutomergeUrl } from "../src/DocUrl"
20
+ import { READY } from "../src/DocHandle"
25
21
 
26
22
  describe("Repo", () => {
27
23
  describe("single repo", () => {
@@ -55,10 +51,9 @@ describe("Repo", () => {
55
51
  d.foo = "bar"
56
52
  })
57
53
  const v = await handle.doc()
58
- console.log("V is ", v)
59
54
  assert.equal(handle.isReady(), true)
60
55
 
61
- assert.equal(v.foo, "bar")
56
+ assert.equal(v?.foo, "bar")
62
57
  })
63
58
 
64
59
  it("throws an error if we try to find a handle with an invalid AutomergeUrl", async () => {
@@ -75,10 +70,17 @@ describe("Repo", () => {
75
70
  const handle = repo.find<TestDoc>(generateAutomergeUrl())
76
71
  assert.equal(handle.isReady(), false)
77
72
 
78
- return assert.rejects(
79
- rejectOnTimeout(handle.doc(), 10),
80
- "This document should not exist"
81
- )
73
+ const doc = await handle.doc()
74
+ assert.equal(doc, undefined)
75
+ })
76
+
77
+ it("fires an 'unavailable' event when you don't have the document locally and network to connect to", async () => {
78
+ const { repo } = setup()
79
+ const url = generateAutomergeUrl()
80
+ const handle = repo.find<TestDoc>(url)
81
+ assert.equal(handle.isReady(), false)
82
+
83
+ await eventPromise(handle, "unavailable")
82
84
  })
83
85
 
84
86
  it("can find a created document", async () => {
@@ -95,7 +97,7 @@ describe("Repo", () => {
95
97
  assert.equal(handle.isReady(), true)
96
98
 
97
99
  const v = await bobHandle.doc()
98
- assert.equal(v.foo, "bar")
100
+ assert.equal(v?.foo, "bar")
99
101
  })
100
102
 
101
103
  it("saves the document when changed and can find it again", async () => {
@@ -118,7 +120,7 @@ describe("Repo", () => {
118
120
  const bobHandle = repo2.find<TestDoc>(handle.url)
119
121
 
120
122
  const v = await bobHandle.doc()
121
- assert.equal(v.foo, "bar")
123
+ assert.equal(v?.foo, "bar")
122
124
  })
123
125
 
124
126
  it("can delete an existing document", async () => {
@@ -173,8 +175,8 @@ describe("Repo", () => {
173
175
  })
174
176
  assert.equal(handle.isReady(), true)
175
177
 
176
- repo.on("delete-document", ({ encodedDocumentId }) => {
177
- assert.equal(encodedDocumentId, handle.documentId)
178
+ repo.on("delete-document", ({ documentId }) => {
179
+ assert.equal(documentId, handle.documentId)
178
180
 
179
181
  done()
180
182
  })
@@ -242,7 +244,24 @@ describe("Repo", () => {
242
244
  })
243
245
 
244
246
  describe("sync", async () => {
245
- const setup = async () => {
247
+ const charlieExcludedDocuments: DocumentId[] = []
248
+ const bobExcludedDocuments: DocumentId[] = []
249
+
250
+ const sharePolicy: SharePolicy = async (peerId, documentId) => {
251
+ if (documentId === undefined) return false
252
+
253
+ // make sure that charlie never gets excluded documents
254
+ if (charlieExcludedDocuments.includes(documentId) && peerId === "charlie")
255
+ return false
256
+
257
+ // make sure that bob never gets excluded documents
258
+ if (bobExcludedDocuments.includes(documentId) && peerId === "bob")
259
+ return false
260
+
261
+ return true
262
+ }
263
+
264
+ const setupRepos = (connectAlice = true) => {
246
265
  // Set up three repos; connect Alice to Bob, and Bob to Charlie
247
266
 
248
267
  const aliceBobChannel = new MessageChannel()
@@ -251,28 +270,10 @@ describe("Repo", () => {
251
270
  const { port1: aliceToBob, port2: bobToAlice } = aliceBobChannel
252
271
  const { port1: bobToCharlie, port2: charlieToBob } = bobCharlieChannel
253
272
 
254
- const charlieExcludedDocuments: DocumentId[] = []
255
- const bobExcludedDocuments: DocumentId[] = []
256
-
257
- const sharePolicy: SharePolicy = async (peerId, documentId) => {
258
- if (documentId === undefined) return false
259
-
260
- // make sure that charlie never gets excluded documents
261
- if (
262
- charlieExcludedDocuments.includes(documentId) &&
263
- peerId === "charlie"
264
- )
265
- return false
266
-
267
- // make sure that charlie never gets excluded documents
268
- if (bobExcludedDocuments.includes(documentId) && peerId === "bob")
269
- return false
270
-
271
- return true
272
- }
273
+ const aliceNetworkAdapter = new MessageChannelNetworkAdapter(aliceToBob)
273
274
 
274
275
  const aliceRepo = new Repo({
275
- network: [new MessageChannelNetworkAdapter(aliceToBob)],
276
+ network: connectAlice ? [aliceNetworkAdapter] : [],
276
277
  peerId: "alice" as PeerId,
277
278
  sharePolicy,
278
279
  })
@@ -291,6 +292,24 @@ describe("Repo", () => {
291
292
  peerId: "charlie" as PeerId,
292
293
  })
293
294
 
295
+ const teardown = () => {
296
+ aliceBobChannel.port1.close()
297
+ bobCharlieChannel.port1.close()
298
+ }
299
+
300
+ return {
301
+ teardown,
302
+ aliceRepo,
303
+ bobRepo,
304
+ charlieRepo,
305
+ aliceNetworkAdapter,
306
+ }
307
+ }
308
+
309
+ const setup = async (connectAlice = true) => {
310
+ const { teardown, aliceRepo, bobRepo, charlieRepo, aliceNetworkAdapter } =
311
+ setupRepos(connectAlice)
312
+
294
313
  const aliceHandle = aliceRepo.create<TestDoc>()
295
314
  aliceHandle.change(d => {
296
315
  d.foo = "bar"
@@ -311,16 +330,13 @@ describe("Repo", () => {
311
330
  })
312
331
 
313
332
  await Promise.all([
314
- eventPromise(aliceRepo.networkSubsystem, "peer"),
333
+ ...(connectAlice
334
+ ? [eventPromise(aliceRepo.networkSubsystem, "peer")]
335
+ : []),
315
336
  eventPromise(bobRepo.networkSubsystem, "peer"),
316
337
  eventPromise(charlieRepo.networkSubsystem, "peer"),
317
338
  ])
318
339
 
319
- const teardown = () => {
320
- aliceBobChannel.port1.close()
321
- bobCharlieChannel.port1.close()
322
- }
323
-
324
340
  return {
325
341
  aliceRepo,
326
342
  bobRepo,
@@ -329,6 +345,7 @@ describe("Repo", () => {
329
345
  notForCharlie,
330
346
  notForBob,
331
347
  teardown,
348
+ aliceNetworkAdapter,
332
349
  }
333
350
  }
334
351
 
@@ -395,13 +412,59 @@ describe("Repo", () => {
395
412
 
396
413
  it("doesn't find a document which doesn't exist anywhere on the network", async () => {
397
414
  const { charlieRepo } = await setup()
398
- const handle = charlieRepo.find<TestDoc>(generateAutomergeUrl())
415
+ const url = generateAutomergeUrl()
416
+ const handle = charlieRepo.find<TestDoc>(url)
399
417
  assert.equal(handle.isReady(), false)
400
418
 
401
- return assert.rejects(
402
- rejectOnTimeout(handle.doc(), 10),
403
- "This document should not exist"
404
- )
419
+ const doc = await handle.doc()
420
+ assert.equal(doc, undefined)
421
+ })
422
+
423
+ it("fires an 'unavailable' event when a document is not available on the network", async () => {
424
+ const { charlieRepo } = await setup()
425
+ const url = generateAutomergeUrl()
426
+ const handle = charlieRepo.find<TestDoc>(url)
427
+ assert.equal(handle.isReady(), false)
428
+
429
+ await Promise.all([
430
+ eventPromise(handle, "unavailable"),
431
+ eventPromise(charlieRepo, "unavailable-document"),
432
+ ])
433
+
434
+ // make sure it fires a second time if the doc is still unavailable
435
+ const handle2 = charlieRepo.find<TestDoc>(url)
436
+ assert.equal(handle2.isReady(), false)
437
+ await eventPromise(handle2, "unavailable")
438
+ })
439
+
440
+ it("a previously unavailable document syncs over the network if a peer with it connects", async () => {
441
+ const {
442
+ charlieRepo,
443
+ notForCharlie,
444
+ aliceRepo,
445
+ teardown,
446
+ aliceNetworkAdapter,
447
+ } = await setup(false)
448
+
449
+ const url = stringifyAutomergeUrl({ documentId: notForCharlie })
450
+ const handle = charlieRepo.find<TestDoc>(url)
451
+ assert.equal(handle.isReady(), false)
452
+
453
+ await eventPromise(handle, "unavailable")
454
+
455
+ aliceRepo.networkSubsystem.addNetworkAdapter(aliceNetworkAdapter)
456
+
457
+ await eventPromise(aliceRepo.networkSubsystem, "peer")
458
+
459
+ const doc = await handle.doc([READY])
460
+ assert.deepStrictEqual(doc, { foo: "baz" })
461
+
462
+ // an additional find should also return the correct resolved document
463
+ const handle2 = charlieRepo.find<TestDoc>(url)
464
+ const doc2 = await handle2.doc()
465
+ assert.deepStrictEqual(doc2, { foo: "baz" })
466
+
467
+ teardown()
405
468
  })
406
469
 
407
470
  it("a deleted document from charlieRepo can be refetched", async () => {
@@ -426,17 +489,40 @@ describe("Repo", () => {
426
489
  teardown()
427
490
  })
428
491
 
429
- it("can broadcast a message", async () => {
430
- const { aliceRepo, bobRepo, teardown } = await setup()
492
+ const setupMeshNetwork = async () => {
493
+ const aliceRepo = new Repo({
494
+ network: [new BroadcastChannelNetworkAdapter()],
495
+ peerId: "alice" as PeerId,
496
+ })
497
+
498
+ const bobRepo = new Repo({
499
+ network: [new BroadcastChannelNetworkAdapter()],
500
+ peerId: "bob" as PeerId,
501
+ })
431
502
 
432
- const channelId = "broadcast" as ChannelId
433
- const data = { presence: "bob" }
503
+ const charlieRepo = new Repo({
504
+ network: [new BroadcastChannelNetworkAdapter()],
505
+ peerId: "charlie" as PeerId,
506
+ })
434
507
 
435
- bobRepo.ephemeralData.broadcast(channelId, data)
436
- const d = await eventPromise(aliceRepo.ephemeralData, "data")
508
+ // pause to let the network set up
509
+ await pause(50)
437
510
 
438
- assert.deepStrictEqual(d.data, data)
439
- teardown()
511
+ return {
512
+ aliceRepo,
513
+ bobRepo,
514
+ charlieRepo,
515
+ }
516
+ }
517
+
518
+ it("can emit an 'unavailable' event when it's not found on the network", async () => {
519
+ const { charlieRepo } = await setupMeshNetwork()
520
+
521
+ const url = generateAutomergeUrl()
522
+ const handle = charlieRepo.find<TestDoc>(url)
523
+ assert.equal(handle.isReady(), false)
524
+
525
+ await eventPromise(handle, "unavailable")
440
526
  })
441
527
 
442
528
  it("syncs a bunch of changes", async () => {
@@ -452,9 +538,9 @@ describe("Repo", () => {
452
538
  const doc =
453
539
  Math.random() < 0.5
454
540
  ? // heads, create a new doc
455
- repo.create<TestDoc>()
541
+ repo.create<TestDoc>()
456
542
  : // tails, pick a random doc
457
- (getRandomItem(docs) as DocHandle<TestDoc>)
543
+ (getRandomItem(docs) as DocHandle<TestDoc>)
458
544
 
459
545
  // make sure the doc is ready
460
546
  if (!doc.isReady()) {
@@ -470,5 +556,81 @@ describe("Repo", () => {
470
556
 
471
557
  teardown()
472
558
  })
559
+
560
+ it("can broadcast a message to peers with the correct document only", async () => {
561
+ const { aliceRepo, bobRepo, charlieRepo, notForCharlie, teardown } =
562
+ await setup()
563
+
564
+ const data = { presence: "alice" }
565
+
566
+ const aliceHandle = aliceRepo.find<TestDoc>(
567
+ stringifyAutomergeUrl({ documentId: notForCharlie })
568
+ )
569
+ const bobHandle = bobRepo.find<TestDoc>(
570
+ stringifyAutomergeUrl({ documentId: notForCharlie })
571
+ )
572
+
573
+ await pause(50)
574
+
575
+ const charliePromise = new Promise<void>((resolve, reject) => {
576
+ charlieRepo.networkSubsystem.on("message", message => {
577
+ if (
578
+ message.type === "ephemeral" &&
579
+ message.documentId === notForCharlie
580
+ ) {
581
+ reject(new Error("Charlie should not receive this message"))
582
+ }
583
+ })
584
+ setTimeout(resolve, 100)
585
+ })
586
+
587
+ aliceHandle.broadcast(data)
588
+ const { message } = await eventPromise(bobHandle, "ephemeral-message")
589
+
590
+ assert.deepStrictEqual(message, data)
591
+ assert.equal(charlieRepo.handles[notForCharlie], undefined, "charlie no")
592
+
593
+ await charliePromise
594
+ teardown()
595
+ })
596
+
597
+ it("can broadcast a message without entering into an infinite loop", async () => {
598
+ const { aliceRepo, bobRepo, charlieRepo } = await setupMeshNetwork()
599
+
600
+ // pause to let the network set up
601
+ await pause(50)
602
+ const message = { presence: "alex" }
603
+
604
+ const aliceHandle = aliceRepo.create<TestDoc>()
605
+
606
+ const bobHandle = bobRepo.find(aliceHandle.url)
607
+ const charlieHandle = charlieRepo.find(aliceHandle.url)
608
+
609
+ const aliceDoesntGetIt = new Promise<void>((resolve, reject) => {
610
+ setTimeout(() => {
611
+ resolve()
612
+ }, 100)
613
+
614
+ aliceHandle.on("ephemeral-message", () => {
615
+ reject("alice got the message")
616
+ })
617
+ })
618
+
619
+ const bobGotIt = eventPromise(bobHandle, "ephemeral-message")
620
+ const charlieGotIt = eventPromise(charlieHandle, "ephemeral-message")
621
+
622
+ // let things get in sync and peers meet one another
623
+ await pause(50)
624
+ aliceHandle.broadcast(message)
625
+
626
+ const [bob, charlie] = await Promise.all([
627
+ bobGotIt,
628
+ charlieGotIt,
629
+ aliceDoesntGetIt,
630
+ ])
631
+
632
+ assert.deepStrictEqual(bob.message, message)
633
+ assert.deepStrictEqual(charlie.message, message)
634
+ })
473
635
  })
474
636
  })
@@ -31,7 +31,7 @@ describe("StorageSubsystem", () => {
31
31
  })
32
32
 
33
33
  // save it to storage
34
- const key = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
34
+ const key = parseAutomergeUrl(generateAutomergeUrl()).documentId
35
35
  await storage.saveDoc(key, doc)
36
36
 
37
37
  // reload it from storage
@@ -52,7 +52,7 @@ describe("StorageSubsystem", () => {
52
52
  })
53
53
 
54
54
  // save it to storage
55
- const key = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
55
+ const key = parseAutomergeUrl(generateAutomergeUrl()).documentId
56
56
  storage.saveDoc(key, doc)
57
57
 
58
58
  // create new storage subsystem to simulate a new process
@@ -1,7 +1,7 @@
1
1
  import { NetworkAdapter } from "../../src"
2
2
 
3
3
  export class DummyNetworkAdapter extends NetworkAdapter {
4
- sendMessage() {}
4
+ send() {}
5
5
  connect(_: string) {}
6
6
  join() {}
7
7
  leave() {}
package/tsconfig.json CHANGED
@@ -12,5 +12,6 @@
12
12
  "strict": true,
13
13
  "skipLibCheck": true
14
14
  },
15
- "include": ["src/**/*.ts"]
15
+ "include": ["src/**/*.ts"],
16
+ "exclude": ["dist"]
16
17
  }
@@ -1,44 +0,0 @@
1
- import assert from "assert"
2
- import * as CBOR from "cbor-x"
3
- import { EphemeralData } from "../src/EphemeralData.js"
4
- import { ChannelId, PeerId } from "../src/types.js"
5
-
6
- describe("EphemeralData", () => {
7
- const ephemeral = new EphemeralData()
8
- const otherPeerId = "other_peer" as PeerId
9
- const destinationChannelId = "channel_id" as ChannelId
10
- const messageData = { foo: "bar" }
11
-
12
- it("should emit a network message on broadcast()", done => {
13
- ephemeral.on("message", event => {
14
- try {
15
- const { targetId, channelId, message, broadcast } = event
16
- assert.deepStrictEqual(CBOR.decode(message), messageData)
17
- assert.strictEqual(broadcast, true)
18
- assert.strictEqual(channelId, channelId)
19
- done()
20
- } catch (e) {
21
- done(e)
22
- }
23
- })
24
- ephemeral.broadcast(destinationChannelId, messageData)
25
- })
26
-
27
- it("should emit a data event on receive()", done => {
28
- ephemeral.on("data", ({ peerId, channelId, data }) => {
29
- try {
30
- assert.deepStrictEqual(peerId, otherPeerId)
31
- assert.deepStrictEqual(channelId, destinationChannelId)
32
- assert.deepStrictEqual(data, messageData)
33
- done()
34
- } catch (e) {
35
- done(e)
36
- }
37
- })
38
- ephemeral.receive(
39
- otherPeerId,
40
- ("m/" + destinationChannelId) as ChannelId, // TODO: this is nonsense
41
- CBOR.encode(messageData)
42
- )
43
- })
44
- })