@automerge/automerge-repo 1.0.0-alpha.2 → 1.0.0-alpha.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. package/dist/DocCollection.d.ts +4 -2
  2. package/dist/DocCollection.d.ts.map +1 -1
  3. package/dist/DocCollection.js +20 -11
  4. package/dist/DocHandle.d.ts +34 -6
  5. package/dist/DocHandle.d.ts.map +1 -1
  6. package/dist/DocHandle.js +69 -9
  7. package/dist/DocUrl.d.ts +4 -4
  8. package/dist/DocUrl.d.ts.map +1 -1
  9. package/dist/DocUrl.js +9 -9
  10. package/dist/EphemeralData.d.ts +8 -16
  11. package/dist/EphemeralData.d.ts.map +1 -1
  12. package/dist/EphemeralData.js +1 -28
  13. package/dist/Repo.d.ts +0 -2
  14. package/dist/Repo.d.ts.map +1 -1
  15. package/dist/Repo.js +37 -39
  16. package/dist/helpers/cbor.d.ts +4 -0
  17. package/dist/helpers/cbor.d.ts.map +1 -0
  18. package/dist/helpers/cbor.js +8 -0
  19. package/dist/helpers/eventPromise.d.ts +1 -1
  20. package/dist/helpers/eventPromise.d.ts.map +1 -1
  21. package/dist/helpers/headsAreSame.d.ts +0 -1
  22. package/dist/helpers/headsAreSame.d.ts.map +1 -1
  23. package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
  24. package/dist/helpers/tests/network-adapter-tests.js +15 -13
  25. package/dist/index.d.ts +3 -1
  26. package/dist/index.d.ts.map +1 -1
  27. package/dist/index.js +1 -0
  28. package/dist/network/NetworkAdapter.d.ts +6 -15
  29. package/dist/network/NetworkAdapter.d.ts.map +1 -1
  30. package/dist/network/NetworkAdapter.js +1 -1
  31. package/dist/network/NetworkSubsystem.d.ts +9 -6
  32. package/dist/network/NetworkSubsystem.d.ts.map +1 -1
  33. package/dist/network/NetworkSubsystem.js +69 -32
  34. package/dist/network/messages.d.ts +57 -0
  35. package/dist/network/messages.d.ts.map +1 -0
  36. package/dist/network/messages.js +21 -0
  37. package/dist/storage/StorageSubsystem.d.ts +1 -1
  38. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  39. package/dist/storage/StorageSubsystem.js +2 -2
  40. package/dist/synchronizer/CollectionSynchronizer.d.ts +3 -2
  41. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  42. package/dist/synchronizer/CollectionSynchronizer.js +19 -13
  43. package/dist/synchronizer/DocSynchronizer.d.ts +9 -3
  44. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  45. package/dist/synchronizer/DocSynchronizer.js +149 -34
  46. package/dist/synchronizer/Synchronizer.d.ts +4 -5
  47. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  48. package/dist/synchronizer/Synchronizer.js +1 -1
  49. package/dist/types.d.ts +1 -3
  50. package/dist/types.d.ts.map +1 -1
  51. package/fuzz/fuzz.ts +5 -5
  52. package/package.json +3 -3
  53. package/src/DocCollection.ts +23 -12
  54. package/src/DocHandle.ts +120 -13
  55. package/src/DocUrl.ts +10 -10
  56. package/src/EphemeralData.ts +6 -36
  57. package/src/Repo.ts +37 -55
  58. package/src/helpers/cbor.ts +10 -0
  59. package/src/helpers/eventPromise.ts +1 -1
  60. package/src/helpers/headsAreSame.ts +1 -1
  61. package/src/helpers/tests/network-adapter-tests.ts +18 -14
  62. package/src/index.ts +14 -2
  63. package/src/network/NetworkAdapter.ts +6 -22
  64. package/src/network/NetworkSubsystem.ts +94 -44
  65. package/src/network/messages.ts +123 -0
  66. package/src/storage/StorageSubsystem.ts +2 -2
  67. package/src/synchronizer/CollectionSynchronizer.ts +38 -19
  68. package/src/synchronizer/DocSynchronizer.ts +201 -43
  69. package/src/synchronizer/Synchronizer.ts +4 -9
  70. package/src/types.ts +4 -1
  71. package/test/CollectionSynchronizer.test.ts +6 -7
  72. package/test/DocCollection.test.ts +2 -2
  73. package/test/DocHandle.test.ts +32 -17
  74. package/test/DocSynchronizer.test.ts +85 -9
  75. package/test/Repo.test.ts +267 -63
  76. package/test/StorageSubsystem.test.ts +4 -5
  77. package/test/helpers/DummyNetworkAdapter.ts +12 -3
  78. package/test/helpers/DummyStorageAdapter.ts +1 -1
  79. package/tsconfig.json +4 -3
  80. package/test/EphemeralData.test.ts +0 -44
@@ -5,16 +5,25 @@ import { DocSynchronizer } from "../src/synchronizer/DocSynchronizer.js"
5
5
  import { eventPromise } from "../src/helpers/eventPromise.js"
6
6
  import { TestDoc } from "./types.js"
7
7
  import { parseAutomergeUrl, generateAutomergeUrl } from "../src/DocUrl.js"
8
+ import { SyncMessage } from "../src/index.js"
9
+ import {
10
+ DocumentUnavailableMessage,
11
+ DocumentUnavailableMessageContents,
12
+ MessageContents,
13
+ RequestMessageContents,
14
+ SyncMessageContents,
15
+ } from "../src/network/messages.js"
8
16
 
9
17
  const alice = "alice" as PeerId
10
18
  const bob = "bob" as PeerId
19
+ const charlie = "charlie" as PeerId
11
20
 
12
21
  describe("DocSynchronizer", () => {
13
22
  let handle: DocHandle<TestDoc>
14
23
  let docSynchronizer: DocSynchronizer
15
24
 
16
25
  const setup = () => {
17
- const docId = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
26
+ const docId = parseAutomergeUrl(generateAutomergeUrl()).documentId
18
27
  handle = new DocHandle<TestDoc>(docId, { isNew: true })
19
28
  docSynchronizer = new DocSynchronizer(handle)
20
29
  return { handle, docSynchronizer }
@@ -27,19 +36,21 @@ describe("DocSynchronizer", () => {
27
36
 
28
37
  it("emits a syncMessage when beginSync is called", async () => {
29
38
  const { docSynchronizer } = setup()
30
- docSynchronizer.beginSync(alice)
31
- const { targetId } = await eventPromise(docSynchronizer, "message")
39
+ docSynchronizer.beginSync([alice])
40
+ const { targetId, type } = await eventPromise(docSynchronizer, "message")
41
+ assert.equal(type, "sync")
32
42
  assert.equal(targetId, "alice")
33
43
  })
34
44
 
35
45
  it("emits a syncMessage to peers when the handle is updated", async () => {
36
46
  const { handle, docSynchronizer } = setup()
37
- docSynchronizer.beginSync(alice)
47
+ docSynchronizer.beginSync([alice])
38
48
  handle.change(doc => {
39
49
  doc.foo = "bar"
40
50
  })
41
- const { targetId } = await eventPromise(docSynchronizer, "message")
51
+ const { targetId, type } = await eventPromise(docSynchronizer, "message")
42
52
  assert.equal(targetId, "alice")
53
+ assert.equal(type, "sync")
43
54
  })
44
55
 
45
56
  it("still syncs with a peer after it disconnects and reconnects", async () => {
@@ -47,23 +58,88 @@ describe("DocSynchronizer", () => {
47
58
 
48
59
  // first connection
49
60
  {
50
- await docSynchronizer.beginSync(bob)
61
+ docSynchronizer.beginSync([bob])
51
62
  handle.change(doc => {
52
63
  doc.foo = "a change"
53
64
  })
54
- const { targetId } = await eventPromise(docSynchronizer, "message")
65
+ const { targetId, type } = await eventPromise(docSynchronizer, "message")
55
66
  assert.equal(targetId, "bob")
67
+ assert.equal(type, "sync")
56
68
  docSynchronizer.endSync(bob)
57
69
  }
58
70
 
59
71
  // second connection
60
72
  {
61
- await docSynchronizer.beginSync(bob)
73
+ docSynchronizer.beginSync([bob])
62
74
  handle.change(doc => {
63
75
  doc.foo = "another change"
64
76
  })
65
- const { targetId } = await eventPromise(docSynchronizer, "message")
77
+ const { targetId, type } = await eventPromise(docSynchronizer, "message")
66
78
  assert.equal(targetId, "bob")
79
+ assert.equal(type, "sync")
67
80
  }
68
81
  })
82
+
83
+ it("emits a requestMessage if the local handle is being requested", async () => {
84
+ const docId = parseAutomergeUrl(generateAutomergeUrl()).documentId
85
+
86
+ const handle = new DocHandle<TestDoc>(docId, { isNew: false })
87
+ docSynchronizer = new DocSynchronizer(handle)
88
+ docSynchronizer.beginSync([alice])
89
+ handle.request()
90
+ const message = await eventPromise(docSynchronizer, "message")
91
+ assert.equal(message.targetId, "alice")
92
+ assert.equal(message.type, "request")
93
+ })
94
+
95
+ it("emits the correct sequence of messages when a document is not found then not available", async () => {
96
+ const docId = parseAutomergeUrl(generateAutomergeUrl()).documentId
97
+
98
+ const bobHandle = new DocHandle<TestDoc>(docId, { isNew: false })
99
+ const bobDocSynchronizer = new DocSynchronizer(bobHandle)
100
+ bobDocSynchronizer.beginSync([alice])
101
+ bobHandle.request()
102
+ const message = await eventPromise(bobDocSynchronizer, "message")
103
+
104
+ const aliceHandle = new DocHandle<TestDoc>(docId, { isNew: false })
105
+ const aliceDocSynchronizer = new DocSynchronizer(aliceHandle)
106
+ aliceHandle.request()
107
+
108
+ aliceDocSynchronizer.receiveSyncMessage({ ...message, senderId: bob })
109
+ aliceDocSynchronizer.beginSync([charlie, bob])
110
+
111
+ const [charlieMessage, bobMessage] = await new Promise<MessageContents[]>(
112
+ resolve => {
113
+ const messages: MessageContents[] = []
114
+ aliceDocSynchronizer.on("message", message => {
115
+ messages.push(message)
116
+ if (messages.length === 2) {
117
+ resolve(messages)
118
+ }
119
+ })
120
+ }
121
+ )
122
+
123
+ // the response should be a sync message, not a request message
124
+ assert.equal(charlieMessage.targetId, "charlie")
125
+ assert.equal(charlieMessage.type, "request")
126
+ assert.equal(bobMessage.targetId, "bob")
127
+ assert.equal(bobMessage.type, "sync")
128
+
129
+ const docUnavailableMessage = {
130
+ type: "doc-unavailable",
131
+ targetId: alice,
132
+ senderId: charlie,
133
+ documentId: docId,
134
+ } satisfies DocumentUnavailableMessage
135
+
136
+ const p = eventPromise(aliceDocSynchronizer, "message")
137
+
138
+ aliceDocSynchronizer.receiveMessage(docUnavailableMessage)
139
+
140
+ const message2 = await p
141
+
142
+ assert.equal(message2.targetId, "bob")
143
+ assert.equal(message2.type, "doc-unavailable")
144
+ })
69
145
  })
package/test/Repo.test.ts CHANGED
@@ -1,15 +1,14 @@
1
1
  import assert from "assert"
2
2
  import { MessageChannelNetworkAdapter } from "@automerge/automerge-repo-network-messagechannel"
3
- import * as A from "@automerge/automerge"
3
+ import { BroadcastChannelNetworkAdapter } from "@automerge/automerge-repo-network-broadcastchannel"
4
4
 
5
5
  import {
6
6
  AutomergeUrl,
7
- ChannelId,
8
7
  DocHandle,
9
8
  DocumentId,
10
9
  PeerId,
11
10
  SharePolicy,
12
- } from "../src"
11
+ } from "../src/index.js"
13
12
  import { eventPromise } from "../src/helpers/eventPromise.js"
14
13
  import { pause, rejectOnTimeout } from "../src/helpers/pause.js"
15
14
  import { Repo } from "../src/Repo.js"
@@ -17,22 +16,20 @@ import { DummyNetworkAdapter } from "./helpers/DummyNetworkAdapter.js"
17
16
  import { DummyStorageAdapter } from "./helpers/DummyStorageAdapter.js"
18
17
  import { getRandomItem } from "./helpers/getRandomItem.js"
19
18
  import { TestDoc } from "./types.js"
20
- import {
21
- binaryToDocumentId,
22
- generateAutomergeUrl,
23
- stringifyAutomergeUrl,
24
- } from "../src/DocUrl"
19
+ import { generateAutomergeUrl, stringifyAutomergeUrl } from "../src/DocUrl.js"
20
+ import { READY, AWAITING_NETWORK } from "../src/DocHandle.js"
25
21
 
26
22
  describe("Repo", () => {
27
23
  describe("single repo", () => {
28
- const setup = () => {
24
+ const setup = (networkReady = true) => {
29
25
  const storageAdapter = new DummyStorageAdapter()
26
+ const networkAdapter = new DummyNetworkAdapter(networkReady)
30
27
 
31
28
  const repo = new Repo({
32
29
  storage: storageAdapter,
33
- network: [new DummyNetworkAdapter()],
30
+ network: [networkAdapter],
34
31
  })
35
- return { repo, storageAdapter }
32
+ return { repo, storageAdapter, networkAdapter }
36
33
  }
37
34
 
38
35
  it("can instantiate a Repo", () => {
@@ -55,10 +52,9 @@ describe("Repo", () => {
55
52
  d.foo = "bar"
56
53
  })
57
54
  const v = await handle.doc()
58
- console.log("V is ", v)
59
55
  assert.equal(handle.isReady(), true)
60
56
 
61
- assert.equal(v.foo, "bar")
57
+ assert.equal(v?.foo, "bar")
62
58
  })
63
59
 
64
60
  it("throws an error if we try to find a handle with an invalid AutomergeUrl", async () => {
@@ -75,10 +71,34 @@ describe("Repo", () => {
75
71
  const handle = repo.find<TestDoc>(generateAutomergeUrl())
76
72
  assert.equal(handle.isReady(), false)
77
73
 
78
- return assert.rejects(
79
- rejectOnTimeout(handle.doc(), 10),
80
- "This document should not exist"
81
- )
74
+ const doc = await handle.doc()
75
+ assert.equal(doc, undefined)
76
+ })
77
+
78
+ it("fires an 'unavailable' event when you don't have the document locally and network to connect to", async () => {
79
+ const { repo } = setup()
80
+ const url = generateAutomergeUrl()
81
+ const handle = repo.find<TestDoc>(url)
82
+ assert.equal(handle.isReady(), false)
83
+
84
+ await eventPromise(handle, "unavailable")
85
+ })
86
+
87
+ it("doesn't mark a document as unavailable until network adapters are ready", async () => {
88
+ const { repo, networkAdapter } = setup(false)
89
+ const url = generateAutomergeUrl()
90
+ const handle = repo.find<TestDoc>(url)
91
+
92
+ let wasUnavailable = false
93
+ handle.on("unavailable", () => {
94
+ wasUnavailable = true
95
+ })
96
+ await pause(50)
97
+ assert.equal(wasUnavailable, false)
98
+
99
+ networkAdapter.emit("ready", { network: networkAdapter })
100
+ await eventPromise(handle, "unavailable")
101
+
82
102
  })
83
103
 
84
104
  it("can find a created document", async () => {
@@ -95,7 +115,22 @@ describe("Repo", () => {
95
115
  assert.equal(handle.isReady(), true)
96
116
 
97
117
  const v = await bobHandle.doc()
98
- assert.equal(v.foo, "bar")
118
+ assert.equal(v?.foo, "bar")
119
+ })
120
+
121
+ it("saves the document when creating it", async () => {
122
+ const { repo, storageAdapter } = setup()
123
+ const handle = repo.create<TestDoc>()
124
+
125
+ const repo2 = new Repo({
126
+ storage: storageAdapter,
127
+ network: [],
128
+ })
129
+
130
+ const bobHandle = repo2.find<TestDoc>(handle.url)
131
+ await bobHandle.whenReady()
132
+ assert.equal(bobHandle.isReady(), true)
133
+
99
134
  })
100
135
 
101
136
  it("saves the document when changed and can find it again", async () => {
@@ -118,7 +153,7 @@ describe("Repo", () => {
118
153
  const bobHandle = repo2.find<TestDoc>(handle.url)
119
154
 
120
155
  const v = await bobHandle.doc()
121
- assert.equal(v.foo, "bar")
156
+ assert.equal(v?.foo, "bar")
122
157
  })
123
158
 
124
159
  it("can delete an existing document", async () => {
@@ -173,8 +208,8 @@ describe("Repo", () => {
173
208
  })
174
209
  assert.equal(handle.isReady(), true)
175
210
 
176
- repo.on("delete-document", ({ encodedDocumentId }) => {
177
- assert.equal(encodedDocumentId, handle.documentId)
211
+ repo.on("delete-document", ({ documentId }) => {
212
+ assert.equal(documentId, handle.documentId)
178
213
 
179
214
  done()
180
215
  })
@@ -242,7 +277,24 @@ describe("Repo", () => {
242
277
  })
243
278
 
244
279
  describe("sync", async () => {
245
- const setup = async () => {
280
+ const charlieExcludedDocuments: DocumentId[] = []
281
+ const bobExcludedDocuments: DocumentId[] = []
282
+
283
+ const sharePolicy: SharePolicy = async (peerId, documentId) => {
284
+ if (documentId === undefined) return false
285
+
286
+ // make sure that charlie never gets excluded documents
287
+ if (charlieExcludedDocuments.includes(documentId) && peerId === "charlie")
288
+ return false
289
+
290
+ // make sure that bob never gets excluded documents
291
+ if (bobExcludedDocuments.includes(documentId) && peerId === "bob")
292
+ return false
293
+
294
+ return true
295
+ }
296
+
297
+ const setupRepos = (connectAlice = true) => {
246
298
  // Set up three repos; connect Alice to Bob, and Bob to Charlie
247
299
 
248
300
  const aliceBobChannel = new MessageChannel()
@@ -251,28 +303,10 @@ describe("Repo", () => {
251
303
  const { port1: aliceToBob, port2: bobToAlice } = aliceBobChannel
252
304
  const { port1: bobToCharlie, port2: charlieToBob } = bobCharlieChannel
253
305
 
254
- const charlieExcludedDocuments: DocumentId[] = []
255
- const bobExcludedDocuments: DocumentId[] = []
256
-
257
- const sharePolicy: SharePolicy = async (peerId, documentId) => {
258
- if (documentId === undefined) return false
259
-
260
- // make sure that charlie never gets excluded documents
261
- if (
262
- charlieExcludedDocuments.includes(documentId) &&
263
- peerId === "charlie"
264
- )
265
- return false
266
-
267
- // make sure that charlie never gets excluded documents
268
- if (bobExcludedDocuments.includes(documentId) && peerId === "bob")
269
- return false
270
-
271
- return true
272
- }
306
+ const aliceNetworkAdapter = new MessageChannelNetworkAdapter(aliceToBob)
273
307
 
274
308
  const aliceRepo = new Repo({
275
- network: [new MessageChannelNetworkAdapter(aliceToBob)],
309
+ network: connectAlice ? [aliceNetworkAdapter] : [],
276
310
  peerId: "alice" as PeerId,
277
311
  sharePolicy,
278
312
  })
@@ -291,6 +325,33 @@ describe("Repo", () => {
291
325
  peerId: "charlie" as PeerId,
292
326
  })
293
327
 
328
+ const teardown = () => {
329
+ aliceBobChannel.port1.close()
330
+ bobCharlieChannel.port1.close()
331
+ }
332
+
333
+ function doConnectAlice() {
334
+ aliceRepo.networkSubsystem.addNetworkAdapter(new MessageChannelNetworkAdapter(aliceToBob))
335
+ //bobRepo.networkSubsystem.addNetworkAdapter(new MessageChannelNetworkAdapter(bobToAlice))
336
+ }
337
+
338
+ if (connectAlice) {
339
+ doConnectAlice()
340
+ }
341
+
342
+ return {
343
+ teardown,
344
+ aliceRepo,
345
+ bobRepo,
346
+ charlieRepo,
347
+ connectAliceToBob: doConnectAlice,
348
+ }
349
+ }
350
+
351
+ const setup = async (connectAlice = true) => {
352
+ const { teardown, aliceRepo, bobRepo, charlieRepo, connectAliceToBob } =
353
+ setupRepos(connectAlice)
354
+
294
355
  const aliceHandle = aliceRepo.create<TestDoc>()
295
356
  aliceHandle.change(d => {
296
357
  d.foo = "bar"
@@ -311,16 +372,13 @@ describe("Repo", () => {
311
372
  })
312
373
 
313
374
  await Promise.all([
314
- eventPromise(aliceRepo.networkSubsystem, "peer"),
375
+ ...(connectAlice
376
+ ? [eventPromise(aliceRepo.networkSubsystem, "peer")]
377
+ : []),
315
378
  eventPromise(bobRepo.networkSubsystem, "peer"),
316
379
  eventPromise(charlieRepo.networkSubsystem, "peer"),
317
380
  ])
318
381
 
319
- const teardown = () => {
320
- aliceBobChannel.port1.close()
321
- bobCharlieChannel.port1.close()
322
- }
323
-
324
382
  return {
325
383
  aliceRepo,
326
384
  bobRepo,
@@ -329,6 +387,7 @@ describe("Repo", () => {
329
387
  notForCharlie,
330
388
  notForBob,
331
389
  teardown,
390
+ connectAliceToBob,
332
391
  }
333
392
  }
334
393
 
@@ -395,13 +454,59 @@ describe("Repo", () => {
395
454
 
396
455
  it("doesn't find a document which doesn't exist anywhere on the network", async () => {
397
456
  const { charlieRepo } = await setup()
398
- const handle = charlieRepo.find<TestDoc>(generateAutomergeUrl())
457
+ const url = generateAutomergeUrl()
458
+ const handle = charlieRepo.find<TestDoc>(url)
399
459
  assert.equal(handle.isReady(), false)
400
460
 
401
- return assert.rejects(
402
- rejectOnTimeout(handle.doc(), 10),
403
- "This document should not exist"
404
- )
461
+ const doc = await handle.doc()
462
+ assert.equal(doc, undefined)
463
+ })
464
+
465
+ it("fires an 'unavailable' event when a document is not available on the network", async () => {
466
+ const { charlieRepo } = await setup()
467
+ const url = generateAutomergeUrl()
468
+ const handle = charlieRepo.find<TestDoc>(url)
469
+ assert.equal(handle.isReady(), false)
470
+
471
+ await Promise.all([
472
+ eventPromise(handle, "unavailable"),
473
+ eventPromise(charlieRepo, "unavailable-document"),
474
+ ])
475
+
476
+ // make sure it fires a second time if the doc is still unavailable
477
+ const handle2 = charlieRepo.find<TestDoc>(url)
478
+ assert.equal(handle2.isReady(), false)
479
+ await eventPromise(handle2, "unavailable")
480
+ })
481
+
482
+ it("a previously unavailable document syncs over the network if a peer with it connects", async () => {
483
+ const {
484
+ charlieRepo,
485
+ notForCharlie,
486
+ aliceRepo,
487
+ teardown,
488
+ connectAliceToBob,
489
+ } = await setup(false)
490
+
491
+ const url = stringifyAutomergeUrl({ documentId: notForCharlie })
492
+ const handle = charlieRepo.find<TestDoc>(url)
493
+ assert.equal(handle.isReady(), false)
494
+
495
+ await eventPromise(handle, "unavailable")
496
+
497
+ connectAliceToBob()
498
+
499
+ await eventPromise(aliceRepo.networkSubsystem, "peer")
500
+
501
+ const doc = await handle.doc([READY])
502
+ assert.deepStrictEqual(doc, { foo: "baz" })
503
+
504
+ // an additional find should also return the correct resolved document
505
+ const handle2 = charlieRepo.find<TestDoc>(url)
506
+ const doc2 = await handle2.doc()
507
+ assert.deepStrictEqual(doc2, { foo: "baz" })
508
+
509
+ teardown()
405
510
  })
406
511
 
407
512
  it("a deleted document from charlieRepo can be refetched", async () => {
@@ -426,17 +531,40 @@ describe("Repo", () => {
426
531
  teardown()
427
532
  })
428
533
 
429
- it("can broadcast a message", async () => {
430
- const { aliceRepo, bobRepo, teardown } = await setup()
534
+ const setupMeshNetwork = async () => {
535
+ const aliceRepo = new Repo({
536
+ network: [new BroadcastChannelNetworkAdapter()],
537
+ peerId: "alice" as PeerId,
538
+ })
431
539
 
432
- const channelId = "broadcast" as ChannelId
433
- const data = { presence: "bob" }
540
+ const bobRepo = new Repo({
541
+ network: [new BroadcastChannelNetworkAdapter()],
542
+ peerId: "bob" as PeerId,
543
+ })
434
544
 
435
- bobRepo.ephemeralData.broadcast(channelId, data)
436
- const d = await eventPromise(aliceRepo.ephemeralData, "data")
545
+ const charlieRepo = new Repo({
546
+ network: [new BroadcastChannelNetworkAdapter()],
547
+ peerId: "charlie" as PeerId,
548
+ })
437
549
 
438
- assert.deepStrictEqual(d.data, data)
439
- teardown()
550
+ // pause to let the network set up
551
+ await pause(50)
552
+
553
+ return {
554
+ aliceRepo,
555
+ bobRepo,
556
+ charlieRepo,
557
+ }
558
+ }
559
+
560
+ it("can emit an 'unavailable' event when it's not found on the network", async () => {
561
+ const { charlieRepo } = await setupMeshNetwork()
562
+
563
+ const url = generateAutomergeUrl()
564
+ const handle = charlieRepo.find<TestDoc>(url)
565
+ assert.equal(handle.isReady(), false)
566
+
567
+ await eventPromise(handle, "unavailable")
440
568
  })
441
569
 
442
570
  it("syncs a bunch of changes", async () => {
@@ -452,9 +580,9 @@ describe("Repo", () => {
452
580
  const doc =
453
581
  Math.random() < 0.5
454
582
  ? // heads, create a new doc
455
- repo.create<TestDoc>()
583
+ repo.create<TestDoc>()
456
584
  : // tails, pick a random doc
457
- (getRandomItem(docs) as DocHandle<TestDoc>)
585
+ (getRandomItem(docs) as DocHandle<TestDoc>)
458
586
 
459
587
  // make sure the doc is ready
460
588
  if (!doc.isReady()) {
@@ -470,5 +598,81 @@ describe("Repo", () => {
470
598
 
471
599
  teardown()
472
600
  })
601
+
602
+ it("can broadcast a message to peers with the correct document only", async () => {
603
+ const { aliceRepo, bobRepo, charlieRepo, notForCharlie, teardown } =
604
+ await setup()
605
+
606
+ const data = { presence: "alice" }
607
+
608
+ const aliceHandle = aliceRepo.find<TestDoc>(
609
+ stringifyAutomergeUrl({ documentId: notForCharlie })
610
+ )
611
+ const bobHandle = bobRepo.find<TestDoc>(
612
+ stringifyAutomergeUrl({ documentId: notForCharlie })
613
+ )
614
+
615
+ await pause(50)
616
+
617
+ const charliePromise = new Promise<void>((resolve, reject) => {
618
+ charlieRepo.networkSubsystem.on("message", message => {
619
+ if (
620
+ message.type === "ephemeral" &&
621
+ message.documentId === notForCharlie
622
+ ) {
623
+ reject(new Error("Charlie should not receive this message"))
624
+ }
625
+ })
626
+ setTimeout(resolve, 100)
627
+ })
628
+
629
+ aliceHandle.broadcast(data)
630
+ const { message } = await eventPromise(bobHandle, "ephemeral-message")
631
+
632
+ assert.deepStrictEqual(message, data)
633
+ assert.equal(charlieRepo.handles[notForCharlie], undefined, "charlie no")
634
+
635
+ await charliePromise
636
+ teardown()
637
+ })
638
+
639
+ it("can broadcast a message without entering into an infinite loop", async () => {
640
+ const { aliceRepo, bobRepo, charlieRepo } = await setupMeshNetwork()
641
+
642
+ // pause to let the network set up
643
+ await pause(50)
644
+ const message = { presence: "alex" }
645
+
646
+ const aliceHandle = aliceRepo.create<TestDoc>()
647
+
648
+ const bobHandle = bobRepo.find(aliceHandle.url)
649
+ const charlieHandle = charlieRepo.find(aliceHandle.url)
650
+
651
+ const aliceDoesntGetIt = new Promise<void>((resolve, reject) => {
652
+ setTimeout(() => {
653
+ resolve()
654
+ }, 100)
655
+
656
+ aliceHandle.on("ephemeral-message", () => {
657
+ reject("alice got the message")
658
+ })
659
+ })
660
+
661
+ const bobGotIt = eventPromise(bobHandle, "ephemeral-message")
662
+ const charlieGotIt = eventPromise(charlieHandle, "ephemeral-message")
663
+
664
+ // let things get in sync and peers meet one another
665
+ await pause(50)
666
+ aliceHandle.broadcast(message)
667
+
668
+ const [bob, charlie] = await Promise.all([
669
+ bobGotIt,
670
+ charlieGotIt,
671
+ aliceDoesntGetIt,
672
+ ])
673
+
674
+ assert.deepStrictEqual(bob.message, message)
675
+ assert.deepStrictEqual(charlie.message, message)
676
+ })
473
677
  })
474
678
  })
@@ -4,13 +4,12 @@ import path from "path"
4
4
 
5
5
  import assert from "assert"
6
6
 
7
- import A from "@automerge/automerge"
7
+ import * as A from "@automerge/automerge/next"
8
8
 
9
9
  import { DummyStorageAdapter } from "./helpers/DummyStorageAdapter.js"
10
10
  import { NodeFSStorageAdapter } from "@automerge/automerge-repo-storage-nodefs"
11
11
 
12
- import { StorageSubsystem } from "../src"
13
- import { TestDoc } from "./types.js"
12
+ import { StorageSubsystem } from "../src/index.js"
14
13
  import { generateAutomergeUrl, parseAutomergeUrl } from "../src/DocUrl.js"
15
14
 
16
15
  const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "automerge-repo-tests"))
@@ -31,7 +30,7 @@ describe("StorageSubsystem", () => {
31
30
  })
32
31
 
33
32
  // save it to storage
34
- const key = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
33
+ const key = parseAutomergeUrl(generateAutomergeUrl()).documentId
35
34
  await storage.saveDoc(key, doc)
36
35
 
37
36
  // reload it from storage
@@ -52,7 +51,7 @@ describe("StorageSubsystem", () => {
52
51
  })
53
52
 
54
53
  // save it to storage
55
- const key = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
54
+ const key = parseAutomergeUrl(generateAutomergeUrl()).documentId
56
55
  storage.saveDoc(key, doc)
57
56
 
58
57
  // create new storage subsystem to simulate a new process
@@ -1,8 +1,17 @@
1
- import { NetworkAdapter } from "../../src"
1
+ import { NetworkAdapter } from "../../src/index.js"
2
2
 
3
3
  export class DummyNetworkAdapter extends NetworkAdapter {
4
- sendMessage() {}
5
- connect(_: string) {}
4
+ #startReady = true
5
+ constructor(startReady: boolean) {
6
+ super()
7
+ this.#startReady = startReady
8
+ }
9
+ send() {}
10
+ connect(_: string) {
11
+ if (this.#startReady) {
12
+ this.emit("ready", { network: this })
13
+ }
14
+ }
6
15
  join() {}
7
16
  leave() {}
8
17
  }
@@ -1,4 +1,4 @@
1
- import { StorageAdapter, type StorageKey } from "../../src"
1
+ import { StorageAdapter, type StorageKey } from "../../src/index.js"
2
2
 
3
3
  export class DummyStorageAdapter implements StorageAdapter {
4
4
  #data: Record<string, Uint8Array> = {}
package/tsconfig.json CHANGED
@@ -2,8 +2,8 @@
2
2
  "compilerOptions": {
3
3
  "target": "ESNext",
4
4
  "jsx": "react",
5
- "module": "ESNext",
6
- "moduleResolution": "node",
5
+ "module": "NodeNext",
6
+ "moduleResolution": "Node16",
7
7
  "declaration": true,
8
8
  "declarationMap": true,
9
9
  "outDir": "./dist",
@@ -12,5 +12,6 @@
12
12
  "strict": true,
13
13
  "skipLibCheck": true
14
14
  },
15
- "include": ["src/**/*.ts"]
15
+ "include": ["src/**/*.ts"],
16
+ "exclude": ["dist"]
16
17
  }