@automerge/automerge-repo 1.0.7 → 1.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/dist/DocHandle.d.ts +1 -1
  2. package/dist/DocHandle.d.ts.map +1 -1
  3. package/dist/DocHandle.js +2 -2
  4. package/dist/DocUrl.d.ts.map +1 -1
  5. package/dist/Repo.d.ts +6 -3
  6. package/dist/Repo.d.ts.map +1 -1
  7. package/dist/Repo.js +13 -8
  8. package/dist/helpers/debounce.d.ts +14 -0
  9. package/dist/helpers/debounce.d.ts.map +1 -0
  10. package/dist/helpers/debounce.js +21 -0
  11. package/dist/helpers/throttle.d.ts +28 -0
  12. package/dist/helpers/throttle.d.ts.map +1 -0
  13. package/dist/helpers/throttle.js +39 -0
  14. package/dist/index.d.ts +3 -3
  15. package/dist/index.d.ts.map +1 -1
  16. package/dist/index.js +3 -3
  17. package/dist/network/NetworkSubsystem.d.ts +1 -1
  18. package/dist/network/NetworkSubsystem.d.ts.map +1 -1
  19. package/dist/storage/StorageSubsystem.d.ts +1 -1
  20. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  21. package/dist/storage/StorageSubsystem.js +2 -2
  22. package/dist/synchronizer/CollectionSynchronizer.d.ts +1 -1
  23. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  24. package/dist/synchronizer/CollectionSynchronizer.js +1 -1
  25. package/dist/synchronizer/DocSynchronizer.d.ts +1 -0
  26. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  27. package/dist/synchronizer/DocSynchronizer.js +3 -5
  28. package/dist/synchronizer/Synchronizer.d.ts +1 -1
  29. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  30. package/fuzz/fuzz.ts +8 -6
  31. package/fuzz/tsconfig.json +8 -0
  32. package/package.json +2 -2
  33. package/src/DocHandle.ts +3 -4
  34. package/src/DocUrl.ts +3 -1
  35. package/src/Repo.ts +27 -16
  36. package/src/helpers/debounce.ts +25 -0
  37. package/src/helpers/headsAreSame.ts +1 -1
  38. package/src/helpers/throttle.ts +43 -0
  39. package/src/index.ts +3 -3
  40. package/src/network/NetworkSubsystem.ts +1 -1
  41. package/src/storage/StorageSubsystem.ts +4 -4
  42. package/src/synchronizer/CollectionSynchronizer.ts +2 -2
  43. package/src/synchronizer/DocSynchronizer.ts +10 -7
  44. package/src/synchronizer/Synchronizer.ts +1 -1
  45. package/test/CollectionSynchronizer.test.ts +1 -1
  46. package/test/DocHandle.test.ts +1 -1
  47. package/test/DocSynchronizer.test.ts +0 -4
  48. package/test/Repo.test.ts +115 -124
  49. package/test/helpers/DummyNetworkAdapter.ts +9 -4
  50. package/test/helpers/DummyStorageAdapter.ts +4 -2
  51. package/dist/EphemeralData.d.ts +0 -20
  52. package/dist/EphemeralData.d.ts.map +0 -1
  53. package/dist/EphemeralData.js +0 -1
  54. package/src/EphemeralData.ts +0 -17
@@ -0,0 +1,43 @@
1
+ /** Throttle
2
+ * Returns a function with a built in throttle timer that runs after `delay` ms.
3
+ *
4
+ * This function differs from a conventional `throttle` in that it ensures the final
5
+ * call will also execute and delays sending the first one until `delay` ms to allow
6
+ * additional work to accumulate.
7
+ *
8
+ * Here's a diagram:
9
+ *
10
+ * calls +----++++++-----++----
11
+ * dlay ^--v ^--v^--v ^--v
12
+ * execs ---+----+---+------+--
13
+ *
14
+ * The goal in this design is to create batches of changes without flooding
15
+ * communication or storage systems while still feeling responsive.
16
+ * (By default we communicate at 10hz / every 100ms.)
17
+ *
18
+ * Note that the args go inside the parameter and you should be careful not to
19
+ * recreate the function on each usage. (In React, see useMemo().)
20
+ *
21
+ *
22
+ * Example usage:
23
+ * const callback = debounce((ev) => { doSomethingExpensiveOrOccasional() }, 100)
24
+ * target.addEventListener('frequent-event', callback);
25
+ *
26
+ */
27
+
28
+ export const throttle = <F extends (...args: Parameters<F>) => ReturnType<F>>(
29
+ fn: F,
30
+ delay: number
31
+ ) => {
32
+ let lastCall = Date.now()
33
+ let wait
34
+ let timeout: ReturnType<typeof setTimeout>
35
+ return function (...args: Parameters<F>) {
36
+ wait = lastCall + delay - Date.now()
37
+ clearTimeout(timeout)
38
+ timeout = setTimeout(() => {
39
+ fn.apply(null, args)
40
+ lastCall = Date.now()
41
+ }, wait)
42
+ }
43
+ }
package/src/index.ts CHANGED
@@ -26,16 +26,16 @@
26
26
  * ```
27
27
  */
28
28
 
29
- export { Repo } from "./Repo.js"
30
29
  export { DocHandle } from "./DocHandle.js"
31
- export { NetworkAdapter } from "./network/NetworkAdapter.js"
32
- export { StorageAdapter } from "./storage/StorageAdapter.js"
33
30
  export {
34
31
  isValidAutomergeUrl,
35
32
  parseAutomergeUrl,
36
33
  stringifyAutomergeUrl,
37
34
  } from "./DocUrl.js"
35
+ export { Repo } from "./Repo.js"
36
+ export { NetworkAdapter } from "./network/NetworkAdapter.js"
38
37
  export { isValidRepoMessage } from "./network/messages.js"
38
+ export { StorageAdapter } from "./storage/StorageAdapter.js"
39
39
 
40
40
  /** @hidden **/
41
41
  export * as cbor from "./helpers/cbor.js"
@@ -4,8 +4,8 @@ import { PeerId, SessionId } from "../types.js"
4
4
  import { NetworkAdapter, PeerDisconnectedPayload } from "./NetworkAdapter.js"
5
5
  import {
6
6
  EphemeralMessage,
7
- RepoMessage,
8
7
  MessageContents,
8
+ RepoMessage,
9
9
  isEphemeralMessage,
10
10
  isValidRepoMessage,
11
11
  } from "./messages.js"
@@ -1,10 +1,10 @@
1
1
  import * as A from "@automerge/automerge/next"
2
- import { StorageAdapter, StorageKey } from "./StorageAdapter.js"
3
- import * as sha256 from "fast-sha256"
4
- import { type DocumentId } from "../types.js"
5
- import { mergeArrays } from "../helpers/mergeArrays.js"
6
2
  import debug from "debug"
3
+ import * as sha256 from "fast-sha256"
7
4
  import { headsAreSame } from "../helpers/headsAreSame.js"
5
+ import { mergeArrays } from "../helpers/mergeArrays.js"
6
+ import { type DocumentId } from "../types.js"
7
+ import { StorageAdapter, StorageKey } from "./StorageAdapter.js"
8
8
 
9
9
  // Metadata about a chunk of data loaded from storage. This is stored on the
10
10
  // StorageSubsystem so when we are compacting we know what chunks we can safely delete
@@ -1,12 +1,12 @@
1
+ import debug from "debug"
1
2
  import { DocHandle } from "../DocHandle.js"
2
3
  import { stringifyAutomergeUrl } from "../DocUrl.js"
3
4
  import { Repo } from "../Repo.js"
5
+ import { RepoMessage } from "../network/messages.js"
4
6
  import { DocumentId, PeerId } from "../types.js"
5
7
  import { DocSynchronizer } from "./DocSynchronizer.js"
6
8
  import { Synchronizer } from "./Synchronizer.js"
7
9
 
8
- import debug from "debug"
9
- import { RepoMessage } from "../network/messages.js"
10
10
  const log = debug("automerge-repo:collectionsync")
11
11
 
12
12
  /** A CollectionSynchronizer is responsible for synchronizing a DocCollection with peers. */
@@ -11,14 +11,15 @@ import {
11
11
  import {
12
12
  DocumentUnavailableMessage,
13
13
  EphemeralMessage,
14
- RepoMessage,
15
14
  MessageContents,
15
+ RepoMessage,
16
16
  RequestMessage,
17
17
  SyncMessage,
18
18
  isRequestMessage,
19
19
  } from "../network/messages.js"
20
20
  import { PeerId } from "../types.js"
21
21
  import { Synchronizer } from "./Synchronizer.js"
22
+ import { throttle } from "../helpers/throttle.js"
22
23
 
23
24
  type PeerDocumentStatus = "unknown" | "has" | "unavailable" | "wants"
24
25
 
@@ -28,8 +29,7 @@ type PeerDocumentStatus = "unknown" | "has" | "unavailable" | "wants"
28
29
  */
29
30
  export class DocSynchronizer extends Synchronizer {
30
31
  #log: debug.Debugger
31
- #conciseLog: debug.Debugger
32
- #opsLog: debug.Debugger
32
+ syncDebounceRate = 100
33
33
 
34
34
  /** Active peers */
35
35
  #peers: PeerId[] = []
@@ -46,11 +46,12 @@ export class DocSynchronizer extends Synchronizer {
46
46
  constructor(private handle: DocHandle<unknown>) {
47
47
  super()
48
48
  const docId = handle.documentId.slice(0, 5)
49
- this.#conciseLog = debug(`automerge-repo:concise:docsync:${docId}`) // Only logs one line per receive/send
50
49
  this.#log = debug(`automerge-repo:docsync:${docId}`)
51
- this.#opsLog = debug(`automerge-repo:ops:docsync:${docId}`) // Log list of ops of each message
52
50
 
53
- handle.on("change", () => this.#syncWithPeers())
51
+ handle.on(
52
+ "change",
53
+ throttle(() => this.#syncWithPeers(), this.syncDebounceRate)
54
+ )
54
55
 
55
56
  handle.on("ephemeral-message-outbound", payload =>
56
57
  this.#broadcastToPeers(payload)
@@ -168,7 +169,9 @@ export class DocSynchronizer extends Synchronizer {
168
169
  }
169
170
 
170
171
  beginSync(peerIds: PeerId[]) {
171
- const newPeers = new Set(peerIds.filter(peerId => !this.#peers.includes(peerId)))
172
+ const newPeers = new Set(
173
+ peerIds.filter(peerId => !this.#peers.includes(peerId))
174
+ )
172
175
  this.#log(`beginSync: ${peerIds.join(", ")}`)
173
176
 
174
177
  // HACK: if we have a sync state already, we round-trip it through the encoding system to make
@@ -1,5 +1,5 @@
1
1
  import { EventEmitter } from "eventemitter3"
2
- import { RepoMessage, MessageContents } from "../network/messages.js"
2
+ import { MessageContents, RepoMessage } from "../network/messages.js"
3
3
 
4
4
  export abstract class Synchronizer extends EventEmitter<SynchronizerEvents> {
5
5
  abstract receiveMessage(message: RepoMessage): void
@@ -1,5 +1,5 @@
1
1
  import assert from "assert"
2
- import { describe, it, beforeEach } from "vitest"
2
+ import { beforeEach, describe, it } from "vitest"
3
3
  import { PeerId, Repo } from "../src/index.js"
4
4
  import { CollectionSynchronizer } from "../src/synchronizer/CollectionSynchronizer.js"
5
5
 
@@ -1,6 +1,6 @@
1
1
  import * as A from "@automerge/automerge/next"
2
- import { decode } from "cbor-x"
3
2
  import assert from "assert"
3
+ import { decode } from "cbor-x"
4
4
  import { describe, it } from "vitest"
5
5
  import { generateAutomergeUrl, parseAutomergeUrl } from "../src/DocUrl.js"
6
6
  import { eventPromise } from "../src/helpers/eventPromise.js"
@@ -2,7 +2,6 @@ import assert from "assert"
2
2
  import { describe, it } from "vitest"
3
3
  import { DocHandle } from "../src/DocHandle.js"
4
4
  import { generateAutomergeUrl, parseAutomergeUrl } from "../src/DocUrl.js"
5
- import { generateAutomergeUrl, parseAutomergeUrl } from "../src/DocUrl.js"
6
5
  import { eventPromise } from "../src/helpers/eventPromise.js"
7
6
  import {
8
7
  DocumentUnavailableMessage,
@@ -11,9 +10,6 @@ import {
11
10
  import { DocSynchronizer } from "../src/synchronizer/DocSynchronizer.js"
12
11
  import { PeerId } from "../src/types.js"
13
12
  import { TestDoc } from "./types.js"
14
- import { DocSynchronizer } from "../src/synchronizer/DocSynchronizer.js"
15
- import { PeerId } from "../src/types.js"
16
- import { TestDoc } from "./types.js"
17
13
 
18
14
  const alice = "alice" as PeerId
19
15
  const bob = "bob" as PeerId
package/test/Repo.test.ts CHANGED
@@ -25,10 +25,10 @@ import { getRandomItem } from "./helpers/getRandomItem.js"
25
25
  import { TestDoc } from "./types.js"
26
26
 
27
27
  describe("Repo", () => {
28
- describe("single repo", () => {
29
- const setup = (networkReady = true) => {
28
+ describe("local only", () => {
29
+ const setup = ({ startReady = true } = {}) => {
30
30
  const storageAdapter = new DummyStorageAdapter()
31
- const networkAdapter = new DummyNetworkAdapter(networkReady)
31
+ const networkAdapter = new DummyNetworkAdapter({ startReady })
32
32
 
33
33
  const repo = new Repo({
34
34
  storage: storageAdapter,
@@ -182,7 +182,7 @@ describe("Repo", () => {
182
182
  })
183
183
 
184
184
  it("doesn't mark a document as unavailable until network adapters are ready", async () => {
185
- const { repo, networkAdapter } = setup(false)
185
+ const { repo, networkAdapter } = setup({ startReady: false })
186
186
  const url = generateAutomergeUrl()
187
187
  const handle = repo.find<TestDoc>(url)
188
188
 
@@ -238,7 +238,7 @@ describe("Repo", () => {
238
238
 
239
239
  assert.equal(handle.isReady(), true)
240
240
 
241
- await pause()
241
+ await pause(150)
242
242
 
243
243
  const repo2 = new Repo({
244
244
  storage: storageAdapter,
@@ -377,25 +377,28 @@ describe("Repo", () => {
377
377
  })
378
378
  })
379
379
 
380
- describe("sync", async () => {
381
- const charlieExcludedDocuments: DocumentId[] = []
382
- const bobExcludedDocuments: DocumentId[] = []
380
+ describe("with peers (linear network)", async () => {
381
+ const setup = async ({ connectAlice = true } = {}) => {
382
+ const charlieExcludedDocuments: DocumentId[] = []
383
+ const bobExcludedDocuments: DocumentId[] = []
383
384
 
384
- const sharePolicy: SharePolicy = async (peerId, documentId) => {
385
- if (documentId === undefined) return false
385
+ const sharePolicy: SharePolicy = async (peerId, documentId) => {
386
+ if (documentId === undefined) return false
386
387
 
387
- // make sure that charlie never gets excluded documents
388
- if (charlieExcludedDocuments.includes(documentId) && peerId === "charlie")
389
- return false
388
+ // make sure that charlie never gets excluded documents
389
+ if (
390
+ charlieExcludedDocuments.includes(documentId) &&
391
+ peerId === "charlie"
392
+ )
393
+ return false
390
394
 
391
- // make sure that bob never gets excluded documents
392
- if (bobExcludedDocuments.includes(documentId) && peerId === "bob")
393
- return false
395
+ // make sure that bob never gets excluded documents
396
+ if (bobExcludedDocuments.includes(documentId) && peerId === "bob")
397
+ return false
394
398
 
395
- return true
396
- }
399
+ return true
400
+ }
397
401
 
398
- const setupLinearNetwork = (connectAlice = true) => {
399
402
  // Set up three repos; connect Alice to Bob, and Bob to Charlie
400
403
 
401
404
  const abChannel = new MessageChannel()
@@ -431,81 +434,16 @@ describe("Repo", () => {
431
434
  bcChannel.port1.close()
432
435
  }
433
436
 
434
- function doConnectAlice() {
437
+ function connectAliceToBob() {
435
438
  aliceRepo.networkSubsystem.addNetworkAdapter(
436
439
  new MessageChannelNetworkAdapter(ab)
437
440
  )
438
- //bobRepo.networkSubsystem.addNetworkAdapter(new MessageChannelNetworkAdapter(bobToAlice))
439
441
  }
440
442
 
441
443
  if (connectAlice) {
442
- doConnectAlice()
443
- }
444
-
445
- return {
446
- teardown,
447
- aliceRepo,
448
- bobRepo,
449
- charlieRepo,
450
- connectAliceToBob: doConnectAlice,
451
- }
452
- }
453
-
454
- const setupMeshNetwork = () => {
455
- // Set up three repos; connect Alice to Bob, Bob to Charlie, and Alice to Charlie
456
-
457
- const abChannel = new MessageChannel()
458
- const bcChannel = new MessageChannel()
459
- const acChannel = new MessageChannel()
460
-
461
- const { port1: ab, port2: ba } = abChannel
462
- const { port1: bc, port2: cb } = bcChannel
463
- const { port1: ac, port2: ca } = acChannel
464
-
465
- const aliceRepo = new Repo({
466
- network: [
467
- new MessageChannelNetworkAdapter(ab),
468
- new MessageChannelNetworkAdapter(ac),
469
- ],
470
- peerId: "alice" as PeerId,
471
- sharePolicy,
472
- })
473
-
474
- const bobRepo = new Repo({
475
- network: [
476
- new MessageChannelNetworkAdapter(ba),
477
- new MessageChannelNetworkAdapter(bc),
478
- ],
479
- peerId: "bob" as PeerId,
480
- sharePolicy,
481
- })
482
-
483
- const charlieRepo = new Repo({
484
- network: [
485
- new MessageChannelNetworkAdapter(ca),
486
- new MessageChannelNetworkAdapter(cb),
487
- ],
488
- peerId: "charlie" as PeerId,
489
- })
490
-
491
- const teardown = () => {
492
- abChannel.port1.close()
493
- bcChannel.port1.close()
494
- acChannel.port1.close()
444
+ connectAliceToBob()
495
445
  }
496
446
 
497
- return {
498
- teardown,
499
- aliceRepo,
500
- bobRepo,
501
- charlieRepo,
502
- }
503
- }
504
-
505
- const setup = async (connectAlice = true) => {
506
- const { teardown, aliceRepo, bobRepo, charlieRepo, connectAliceToBob } =
507
- setupLinearNetwork(connectAlice)
508
-
509
447
  const aliceHandle = aliceRepo.create<TestDoc>()
510
448
  aliceHandle.change(d => {
511
449
  d.foo = "bar"
@@ -607,17 +545,19 @@ describe("Repo", () => {
607
545
  })
608
546
 
609
547
  it("doesn't find a document which doesn't exist anywhere on the network", async () => {
610
- const { charlieRepo } = await setup()
548
+ const { charlieRepo, teardown } = await setup()
611
549
  const url = generateAutomergeUrl()
612
550
  const handle = charlieRepo.find<TestDoc>(url)
613
551
  assert.equal(handle.isReady(), false)
614
552
 
615
553
  const doc = await handle.doc()
616
554
  assert.equal(doc, undefined)
555
+
556
+ teardown()
617
557
  })
618
558
 
619
559
  it("fires an 'unavailable' event when a document is not available on the network", async () => {
620
- const { charlieRepo } = await setup()
560
+ const { charlieRepo, teardown } = await setup()
621
561
  const url = generateAutomergeUrl()
622
562
  const handle = charlieRepo.find<TestDoc>(url)
623
563
  assert.equal(handle.isReady(), false)
@@ -631,6 +571,8 @@ describe("Repo", () => {
631
571
  const handle2 = charlieRepo.find<TestDoc>(url)
632
572
  assert.equal(handle2.isReady(), false)
633
573
  await eventPromise(handle2, "unavailable")
574
+
575
+ teardown()
634
576
  })
635
577
 
636
578
  it("a previously unavailable document syncs over the network if a peer with it connects", async () => {
@@ -640,7 +582,7 @@ describe("Repo", () => {
640
582
  aliceRepo,
641
583
  teardown,
642
584
  connectAliceToBob,
643
- } = await setup(false)
585
+ } = await setup({ connectAlice: false })
644
586
 
645
587
  const url = stringifyAutomergeUrl({ documentId: notForCharlie })
646
588
  const handle = charlieRepo.find<TestDoc>(url)
@@ -690,7 +632,7 @@ describe("Repo", () => {
690
632
  const a = new Repo({
691
633
  network: [new MessageChannelNetworkAdapter(ab)],
692
634
  peerId: "a" as PeerId,
693
- sharePolicy: async () => true
635
+ sharePolicy: async () => true,
694
636
  })
695
637
 
696
638
  const handle = a.find(url)
@@ -710,7 +652,6 @@ describe("Repo", () => {
710
652
  // The empty repo should be notified of the new peer, send it a request
711
653
  // and eventually resolve the handle to "READY"
712
654
  await handle.whenReady()
713
-
714
655
  })
715
656
 
716
657
  it("a deleted document from charlieRepo can be refetched", async () => {
@@ -736,13 +677,14 @@ describe("Repo", () => {
736
677
  })
737
678
 
738
679
  it("can emit an 'unavailable' event when it's not found on the network", async () => {
739
- const { charlieRepo } = setupMeshNetwork()
680
+ const { charlieRepo, teardown } = await setup()
740
681
 
741
682
  const url = generateAutomergeUrl()
742
683
  const handle = charlieRepo.find<TestDoc>(url)
743
684
  assert.equal(handle.isReady(), false)
744
685
 
745
686
  await eventPromise(handle, "unavailable")
687
+ teardown()
746
688
  })
747
689
 
748
690
  it("syncs a bunch of changes", async () => {
@@ -772,7 +714,6 @@ describe("Repo", () => {
772
714
  d.foo = Math.random().toString()
773
715
  })
774
716
  }
775
- await pause(500)
776
717
 
777
718
  teardown()
778
719
  })
@@ -790,8 +731,6 @@ describe("Repo", () => {
790
731
  stringifyAutomergeUrl({ documentId: notForCharlie })
791
732
  )
792
733
 
793
- await pause(50)
794
-
795
734
  const charliePromise = new Promise<void>((resolve, reject) => {
796
735
  charlieRepo.networkSubsystem.on("message", message => {
797
736
  if (
@@ -813,49 +752,98 @@ describe("Repo", () => {
813
752
  await charliePromise
814
753
  teardown()
815
754
  })
755
+ })
816
756
 
817
- it("can broadcast a message without entering into an infinite loop", async () => {
818
- const { aliceRepo, bobRepo, charlieRepo } = setupMeshNetwork()
757
+ describe("with peers (mesh network)", () => {
758
+ const setup = async () => {
759
+ // Set up three repos; connect Alice to Bob, Bob to Charlie, and Alice to Charlie
760
+
761
+ const abChannel = new MessageChannel()
762
+ const bcChannel = new MessageChannel()
763
+ const acChannel = new MessageChannel()
764
+
765
+ const { port1: ab, port2: ba } = abChannel
766
+ const { port1: bc, port2: cb } = bcChannel
767
+ const { port1: ac, port2: ca } = acChannel
768
+
769
+ const aliceRepo = new Repo({
770
+ network: [
771
+ new MessageChannelNetworkAdapter(ab),
772
+ new MessageChannelNetworkAdapter(ac),
773
+ ],
774
+ peerId: "alice" as PeerId,
775
+ })
819
776
 
820
- const message = { presence: "alex" }
777
+ const bobRepo = new Repo({
778
+ network: [
779
+ new MessageChannelNetworkAdapter(ba),
780
+ new MessageChannelNetworkAdapter(bc),
781
+ ],
782
+ peerId: "bob" as PeerId,
783
+ })
784
+
785
+ const charlieRepo = new Repo({
786
+ network: [
787
+ new MessageChannelNetworkAdapter(ca),
788
+ new MessageChannelNetworkAdapter(cb),
789
+ ],
790
+ peerId: "charlie" as PeerId,
791
+ })
792
+
793
+ const teardown = () => {
794
+ abChannel.port1.close()
795
+ bcChannel.port1.close()
796
+ acChannel.port1.close()
797
+ }
798
+
799
+ await Promise.all([
800
+ eventPromise(aliceRepo.networkSubsystem, "peer"),
801
+ eventPromise(bobRepo.networkSubsystem, "peer"),
802
+ eventPromise(charlieRepo.networkSubsystem, "peer"),
803
+ ])
804
+
805
+ return {
806
+ teardown,
807
+ aliceRepo,
808
+ bobRepo,
809
+ charlieRepo,
810
+ }
811
+ }
812
+
813
+ it("can broadcast a message without entering into an infinite loop", async () => {
814
+ const { aliceRepo, bobRepo, charlieRepo, teardown } = await setup()
821
815
 
822
816
  const aliceHandle = aliceRepo.create<TestDoc>()
823
817
 
824
818
  const bobHandle = bobRepo.find(aliceHandle.url)
825
819
  const charlieHandle = charlieRepo.find(aliceHandle.url)
826
820
 
827
- const aliceDoesntGetIt = new Promise<void>((resolve, reject) => {
828
- setTimeout(() => {
829
- resolve()
830
- }, 100)
831
-
832
- aliceHandle.on("ephemeral-message", () => {
833
- reject(new Error("alice got the message"))
834
- })
821
+ // Alice should not receive her own ephemeral message
822
+ aliceHandle.on("ephemeral-message", () => {
823
+ throw new Error("Alice should not receive her own ephemeral message")
835
824
  })
836
825
 
826
+ // Bob and Charlie should receive Alice's ephemeral message
837
827
  const bobGotIt = eventPromise(bobHandle, "ephemeral-message")
838
828
  const charlieGotIt = eventPromise(charlieHandle, "ephemeral-message")
839
829
 
840
- // let things get in sync and peers meet one another
830
+ // let peers meet and sync up
841
831
  await pause(50)
832
+
833
+ // Alice sends an ephemeral message
834
+ const message = { foo: "bar" }
842
835
  aliceHandle.broadcast(message)
843
836
 
844
- const [bob, charlie] = await Promise.all([
845
- bobGotIt,
846
- charlieGotIt,
847
- aliceDoesntGetIt,
848
- ])
837
+ const [bob, charlie] = await Promise.all([bobGotIt, charlieGotIt])
849
838
 
850
839
  assert.deepStrictEqual(bob.message, message)
851
840
  assert.deepStrictEqual(charlie.message, message)
841
+
842
+ teardown()
852
843
  })
853
844
 
854
845
  it("notifies peers when a document is cloned", async () => {
855
- const { bobRepo, charlieRepo } = setupMeshNetwork()
856
-
857
- // pause to let the network set up
858
- await pause(50)
846
+ const { bobRepo, charlieRepo, teardown } = await setup()
859
847
 
860
848
  const handle = bobRepo.create<TestDoc>()
861
849
  handle.change(d => {
@@ -869,13 +857,12 @@ describe("Repo", () => {
869
857
  const charlieHandle = charlieRepo.find(handle2.url)
870
858
  await charlieHandle.doc()
871
859
  assert.deepStrictEqual(charlieHandle.docSync(), { foo: "bar" })
860
+
861
+ teardown()
872
862
  })
873
863
 
874
864
  it("notifies peers when a document is merged", async () => {
875
- const { bobRepo, charlieRepo } = setupMeshNetwork()
876
-
877
- // pause to let the network set up
878
- await pause(50)
865
+ const { bobRepo, charlieRepo, teardown } = await setup()
879
866
 
880
867
  const handle = bobRepo.create<TestDoc>()
881
868
  handle.change(d => {
@@ -897,19 +884,23 @@ describe("Repo", () => {
897
884
  handle.merge(handle2)
898
885
 
899
886
  // wait for the network to do it's thang
900
- await pause(50)
887
+ await pause(350)
901
888
 
902
889
  await charlieHandle.doc()
903
890
  assert.deepStrictEqual(charlieHandle.docSync(), { foo: "baz" })
891
+
892
+ teardown()
904
893
  })
905
894
  })
906
895
  })
907
896
 
897
+ const warn = console.warn
898
+ const NO_OP = () => {}
899
+
908
900
  const disableConsoleWarn = () => {
909
- console["_warn"] = console.warn
910
- console.warn = () => {}
901
+ console.warn = NO_OP
911
902
  }
912
903
 
913
904
  const reenableConsoleWarn = () => {
914
- console.warn = console["_warn"]
905
+ console.warn = warn
915
906
  }
@@ -1,16 +1,21 @@
1
1
  import { NetworkAdapter } from "../../src/index.js"
2
2
 
3
3
  export class DummyNetworkAdapter extends NetworkAdapter {
4
- #startReady = true
5
- constructor(startReady: boolean) {
4
+ #startReady: boolean
5
+
6
+ constructor({ startReady = true }: Options = {}) {
6
7
  super()
7
8
  this.#startReady = startReady
8
9
  }
9
- send() { }
10
+ send() {}
10
11
  connect(_: string) {
11
12
  if (this.#startReady) {
12
13
  this.emit("ready", { network: this })
13
14
  }
14
15
  }
15
- disconnect() { }
16
+ disconnect() {}
17
+ }
18
+
19
+ type Options = {
20
+ startReady?: boolean
16
21
  }
@@ -11,10 +11,12 @@ export class DummyStorageAdapter implements StorageAdapter {
11
11
  return key.split(".")
12
12
  }
13
13
 
14
- async loadRange(keyPrefix: StorageKey): Promise<{data: Uint8Array, key: StorageKey}[]> {
14
+ async loadRange(
15
+ keyPrefix: StorageKey
16
+ ): Promise<{ data: Uint8Array; key: StorageKey }[]> {
15
17
  const range = Object.entries(this.#data)
16
18
  .filter(([key, _]) => key.startsWith(this.#keyToString(keyPrefix)))
17
- .map(([key, data]) => ({key: this.#stringToKey(key), data}))
19
+ .map(([key, data]) => ({ key: this.#stringToKey(key), data }))
18
20
  return Promise.resolve(range)
19
21
  }
20
22
 
@@ -1,20 +0,0 @@
1
- import { DocumentId, PeerId } from "./index.js";
2
- import { EphemeralMessage, MessageContents } from "./network/messages.js";
3
- /** A randomly generated string created when the {@link Repo} starts up */
4
- export type SessionId = string & {
5
- __SessionId: false;
6
- };
7
- export interface EphemeralDataPayload {
8
- documentId: DocumentId;
9
- peerId: PeerId;
10
- data: {
11
- peerId: PeerId;
12
- documentId: DocumentId;
13
- data: unknown;
14
- };
15
- }
16
- export type EphemeralDataMessageEvents = {
17
- message: (event: MessageContents<EphemeralMessage>) => void;
18
- data: (event: EphemeralDataPayload) => void;
19
- };
20
- //# sourceMappingURL=EphemeralData.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"EphemeralData.d.ts","sourceRoot":"","sources":["../src/EphemeralData.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAC/C,OAAO,EAAE,gBAAgB,EAAE,eAAe,EAAE,MAAM,uBAAuB,CAAA;AAGzE,0EAA0E;AAC1E,MAAM,MAAM,SAAS,GAAG,MAAM,GAAG;IAAE,WAAW,EAAE,KAAK,CAAA;CAAE,CAAA;AAEvD,MAAM,WAAW,oBAAoB;IACnC,UAAU,EAAE,UAAU,CAAA;IACtB,MAAM,EAAE,MAAM,CAAA;IACd,IAAI,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,UAAU,CAAC;QAAC,IAAI,EAAE,OAAO,CAAA;KAAE,CAAA;CAChE;AAED,MAAM,MAAM,0BAA0B,GAAG;IACvC,OAAO,EAAE,CAAC,KAAK,EAAE,eAAe,CAAC,gBAAgB,CAAC,KAAK,IAAI,CAAA;IAC3D,IAAI,EAAE,CAAC,KAAK,EAAE,oBAAoB,KAAK,IAAI,CAAA;CAC5C,CAAA"}