@automerge/automerge-repo 1.0.6 → 1.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. package/.eslintrc +1 -1
  2. package/dist/DocHandle.d.ts +7 -7
  3. package/dist/DocHandle.d.ts.map +1 -1
  4. package/dist/DocHandle.js +3 -7
  5. package/dist/EphemeralData.d.ts +2 -2
  6. package/dist/EphemeralData.d.ts.map +1 -1
  7. package/dist/Repo.d.ts.map +1 -1
  8. package/dist/Repo.js +7 -11
  9. package/dist/helpers/cbor.d.ts +2 -2
  10. package/dist/helpers/cbor.d.ts.map +1 -1
  11. package/dist/helpers/cbor.js +1 -1
  12. package/dist/helpers/pause.d.ts.map +1 -1
  13. package/dist/helpers/pause.js +3 -1
  14. package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
  15. package/dist/helpers/tests/network-adapter-tests.js +2 -2
  16. package/dist/index.d.ts +11 -9
  17. package/dist/index.d.ts.map +1 -1
  18. package/dist/index.js +4 -4
  19. package/dist/network/NetworkAdapter.d.ts +3 -3
  20. package/dist/network/NetworkAdapter.d.ts.map +1 -1
  21. package/dist/network/NetworkSubsystem.d.ts +2 -2
  22. package/dist/network/NetworkSubsystem.d.ts.map +1 -1
  23. package/dist/network/NetworkSubsystem.js +30 -18
  24. package/dist/network/messages.d.ts +38 -68
  25. package/dist/network/messages.d.ts.map +1 -1
  26. package/dist/network/messages.js +13 -21
  27. package/dist/storage/StorageSubsystem.js +7 -7
  28. package/dist/synchronizer/CollectionSynchronizer.d.ts +3 -3
  29. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  30. package/dist/synchronizer/CollectionSynchronizer.js +2 -2
  31. package/dist/synchronizer/DocSynchronizer.d.ts +3 -3
  32. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  33. package/dist/synchronizer/DocSynchronizer.js +22 -29
  34. package/dist/synchronizer/Synchronizer.d.ts +2 -2
  35. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  36. package/dist/types.d.ts +5 -1
  37. package/dist/types.d.ts.map +1 -1
  38. package/package.json +5 -13
  39. package/src/DocHandle.ts +9 -11
  40. package/src/EphemeralData.ts +2 -2
  41. package/src/Repo.ts +10 -14
  42. package/src/helpers/cbor.ts +4 -4
  43. package/src/helpers/pause.ts +7 -2
  44. package/src/helpers/tests/network-adapter-tests.ts +3 -3
  45. package/src/helpers/withTimeout.ts +2 -2
  46. package/src/index.ts +36 -29
  47. package/src/network/NetworkAdapter.ts +7 -3
  48. package/src/network/NetworkSubsystem.ts +31 -23
  49. package/src/network/messages.ts +88 -151
  50. package/src/storage/StorageSubsystem.ts +8 -8
  51. package/src/synchronizer/CollectionSynchronizer.ts +6 -15
  52. package/src/synchronizer/DocSynchronizer.ts +34 -48
  53. package/src/synchronizer/Synchronizer.ts +2 -2
  54. package/src/types.ts +8 -3
  55. package/test/CollectionSynchronizer.test.ts +58 -53
  56. package/test/DocHandle.test.ts +35 -36
  57. package/test/DocSynchronizer.test.ts +9 -8
  58. package/test/Network.test.ts +1 -0
  59. package/test/Repo.test.ts +177 -97
  60. package/test/StorageSubsystem.test.ts +6 -9
  61. package/test/tsconfig.json +8 -0
  62. package/typedoc.json +3 -3
  63. package/.mocharc.json +0 -5
@@ -1,28 +1,26 @@
1
1
  import * as A from "@automerge/automerge/next"
2
+ import { decode } from "cbor-x"
3
+ import debug from "debug"
2
4
  import {
3
- AWAITING_NETWORK,
4
5
  DocHandle,
5
6
  DocHandleOutboundEphemeralMessagePayload,
6
7
  READY,
7
8
  REQUESTING,
8
9
  UNAVAILABLE,
9
10
  } from "../DocHandle.js"
10
- import { PeerId } from "../types.js"
11
- import { Synchronizer } from "./Synchronizer.js"
12
-
13
- import debug from "debug"
14
11
  import {
12
+ DocumentUnavailableMessage,
15
13
  EphemeralMessage,
16
- isDocumentUnavailableMessage,
17
- isRequestMessage,
18
- Message,
14
+ RepoMessage,
15
+ MessageContents,
19
16
  RequestMessage,
20
- SynchronizerMessage,
21
17
  SyncMessage,
18
+ isRequestMessage,
22
19
  } from "../network/messages.js"
20
+ import { PeerId } from "../types.js"
21
+ import { Synchronizer } from "./Synchronizer.js"
23
22
 
24
23
  type PeerDocumentStatus = "unknown" | "has" | "unavailable" | "wants"
25
- import { decode } from "cbor-x"
26
24
 
27
25
  /**
28
26
  * DocSynchronizer takes a handle to an Automerge document, and receives & dispatches sync messages
@@ -45,7 +43,7 @@ export class DocSynchronizer extends Synchronizer {
45
43
 
46
44
  #syncStarted = false
47
45
 
48
- constructor(private handle: DocHandle<any>) {
46
+ constructor(private handle: DocHandle<unknown>) {
49
47
  super()
50
48
  const docId = handle.documentId.slice(0, 5)
51
49
  this.#conciseLog = debug(`automerge-repo:concise:docsync:${docId}`) // Only logs one line per receive/send
@@ -82,7 +80,9 @@ export class DocSynchronizer extends Synchronizer {
82
80
  this.#peers.forEach(peerId => this.#sendSyncMessage(peerId, doc))
83
81
  }
84
82
 
85
- async #broadcastToPeers({ data }: DocHandleOutboundEphemeralMessagePayload) {
83
+ async #broadcastToPeers({
84
+ data,
85
+ }: DocHandleOutboundEphemeralMessagePayload<unknown>) {
86
86
  this.#log(`broadcastToPeers`, this.#peers)
87
87
  this.#peers.forEach(peerId => this.#sendEphemeralMessage(peerId, data))
88
88
  }
@@ -90,12 +90,13 @@ export class DocSynchronizer extends Synchronizer {
90
90
  #sendEphemeralMessage(peerId: PeerId, data: Uint8Array) {
91
91
  this.#log(`sendEphemeralMessage ->${peerId}`)
92
92
 
93
- this.emit("message", {
93
+ const message: MessageContents<EphemeralMessage> = {
94
94
  type: "ephemeral",
95
95
  targetId: peerId,
96
96
  documentId: this.handle.documentId,
97
97
  data,
98
- })
98
+ }
99
+ this.emit("message", message)
99
100
  }
100
101
 
101
102
  #getSyncState(peerId: PeerId) {
@@ -128,13 +129,11 @@ export class DocSynchronizer extends Synchronizer {
128
129
  const [newSyncState, message] = A.generateSyncMessage(doc, syncState)
129
130
  this.#setSyncState(peerId, newSyncState)
130
131
  if (message) {
131
- this.#logMessage(`sendSyncMessage 🡒 ${peerId}`, message)
132
-
133
- const decoded = A.decodeSyncMessage(message)
132
+ const isNew = A.getHeads(doc).length === 0
134
133
 
135
134
  if (
136
135
  !this.handle.isReady() &&
137
- decoded.heads.length === 0 &&
136
+ isNew &&
138
137
  newSyncState.sharedHeads.length === 0 &&
139
138
  !Object.values(this.#peerDocumentStatuses).includes("has") &&
140
139
  this.#peerDocumentStatuses[peerId] === "unknown"
@@ -145,43 +144,23 @@ export class DocSynchronizer extends Synchronizer {
145
144
  targetId: peerId,
146
145
  documentId: this.handle.documentId,
147
146
  data: message,
148
- })
147
+ } as RequestMessage)
149
148
  } else {
150
149
  this.emit("message", {
151
150
  type: "sync",
152
151
  targetId: peerId,
153
152
  data: message,
154
153
  documentId: this.handle.documentId,
155
- })
154
+ } as SyncMessage)
156
155
  }
157
156
 
158
157
  // if we have sent heads, then the peer now has or will have the document
159
- if (decoded.heads.length > 0) {
158
+ if (!isNew) {
160
159
  this.#peerDocumentStatuses[peerId] = "has"
161
160
  }
162
161
  }
163
162
  }
164
163
 
165
- #logMessage = (label: string, message: Uint8Array) => {
166
- // This is real expensive...
167
- return
168
-
169
- const size = message.byteLength
170
- const logText = `${label} ${size}b`
171
- const decoded = A.decodeSyncMessage(message)
172
-
173
- this.#conciseLog(logText)
174
- this.#log(logText, decoded)
175
-
176
- // expanding is expensive, so only do it if we're logging at this level
177
- const expanded = this.#opsLog.enabled
178
- ? decoded.changes.flatMap((change: A.Change) =>
179
- A.decodeChange(change).ops.map((op: any) => JSON.stringify(op))
180
- )
181
- : null
182
- this.#opsLog(logText, expanded)
183
- }
184
-
185
164
  /// PUBLIC
186
165
 
187
166
  hasPeer(peerId: PeerId) {
@@ -189,6 +168,7 @@ export class DocSynchronizer extends Synchronizer {
189
168
  }
190
169
 
191
170
  beginSync(peerIds: PeerId[]) {
171
+ const newPeers = new Set(peerIds.filter(peerId => !this.#peers.includes(peerId)))
192
172
  this.#log(`beginSync: ${peerIds.join(", ")}`)
193
173
 
194
174
  // HACK: if we have a sync state already, we round-trip it through the encoding system to make
@@ -204,15 +184,20 @@ export class DocSynchronizer extends Synchronizer {
204
184
  // At this point if we don't have anything in our storage, we need to use an empty doc to sync
205
185
  // with; but we don't want to surface that state to the front end
206
186
  void this.handle.doc([READY, REQUESTING, UNAVAILABLE]).then(doc => {
207
-
208
187
  // we register out peers first, then say that sync has started
209
188
  this.#syncStarted = true
210
189
  this.#checkDocUnavailable()
211
190
 
212
- if (doc === undefined) return
191
+ const wasUnavailable = doc === undefined
192
+ if (wasUnavailable && newPeers.size == 0) {
193
+ return
194
+ }
195
+ // If the doc is unavailable we still need a blank document to generate
196
+ // the sync message from
197
+ const theDoc = doc ?? A.init<unknown>()
213
198
 
214
199
  peerIds.forEach(peerId => {
215
- this.#sendSyncMessage(peerId, doc)
200
+ this.#sendSyncMessage(peerId, theDoc)
216
201
  })
217
202
  })
218
203
  }
@@ -222,7 +207,7 @@ export class DocSynchronizer extends Synchronizer {
222
207
  this.#peers = this.#peers.filter(p => p !== peerId)
223
208
  }
224
209
 
225
- receiveMessage(message: SynchronizerMessage) {
210
+ receiveMessage(message: RepoMessage) {
226
211
  switch (message.type) {
227
212
  case "sync":
228
213
  case "request":
@@ -246,7 +231,7 @@ export class DocSynchronizer extends Synchronizer {
246
231
 
247
232
  const { senderId, data } = message
248
233
 
249
- const contents = decode(data)
234
+ const contents = decode(new Uint8Array(data))
250
235
 
251
236
  this.handle.emit("ephemeral-message", {
252
237
  handle: this.handle,
@@ -320,11 +305,12 @@ export class DocSynchronizer extends Synchronizer {
320
305
  this.#peers
321
306
  .filter(peerId => this.#peerDocumentStatuses[peerId] === "wants")
322
307
  .forEach(peerId => {
323
- this.emit("message", {
308
+ const message: MessageContents<DocumentUnavailableMessage> = {
324
309
  type: "doc-unavailable",
325
310
  documentId: this.handle.documentId,
326
311
  targetId: peerId,
327
- })
312
+ }
313
+ this.emit("message", message)
328
314
  })
329
315
 
330
316
  this.handle.unavailable()
@@ -1,8 +1,8 @@
1
1
  import { EventEmitter } from "eventemitter3"
2
- import { Message, MessageContents } from "../network/messages.js"
2
+ import { RepoMessage, MessageContents } from "../network/messages.js"
3
3
 
4
4
  export abstract class Synchronizer extends EventEmitter<SynchronizerEvents> {
5
- abstract receiveMessage(message: Message): void
5
+ abstract receiveMessage(message: RepoMessage): void
6
6
  }
7
7
 
8
8
  export interface SynchronizerEvents {
package/src/types.ts CHANGED
@@ -1,17 +1,22 @@
1
1
  /** The ID of a document. Typically you should use a {@link AutomergeUrl} instead.
2
2
  */
3
3
  export type DocumentId = string & { __documentId: true } // for logging
4
- /** A branded string representing a URL for a document
4
+
5
+ /** A branded string representing a URL for a document
5
6
  *
6
7
  * @remarks
7
- * An automerge URL has the form `automerge:<base58 encoded string>`. This
8
+ * An automerge URL has the form `automerge:<base58 encoded string>`. This
8
9
  * type is returned from various routines which validate a url.
9
10
  *
10
11
  */
11
12
  export type AutomergeUrl = string & { __documentUrl: true } // for opening / linking
13
+
12
14
  /** A document ID as a Uint8Array instead of a bas58 encoded string. Typically you should use a {@link AutomergeUrl} instead.
13
15
  */
14
16
  export type BinaryDocumentId = Uint8Array & { __binaryDocumentId: true } // for storing / syncing
15
17
 
16
18
  /** A branded type for peer IDs */
17
- export type PeerId = string & { __peerId: false }
19
+ export type PeerId = string & { __peerId: true }
20
+
21
+ /** A randomly generated string created when the {@link Repo} starts up */
22
+ export type SessionId = string & { __SessionId: true }
@@ -1,5 +1,5 @@
1
1
  import assert from "assert"
2
- import { beforeEach } from "mocha"
2
+ import { describe, it, beforeEach } from "vitest"
3
3
  import { PeerId, Repo } from "../src/index.js"
4
4
  import { CollectionSynchronizer } from "../src/synchronizer/CollectionSynchronizer.js"
5
5
 
@@ -18,56 +18,61 @@ describe("CollectionSynchronizer", () => {
18
18
  assert(synchronizer !== null)
19
19
  })
20
20
 
21
- it("starts synchronizing a document to peers when added", done => {
22
- const handle = repo.create()
23
- synchronizer.addPeer("peer1" as PeerId)
24
-
25
- synchronizer.once("message", event => {
26
- assert(event.targetId === "peer1")
27
- assert(event.documentId === handle.documentId)
28
- done()
29
- })
30
-
31
- synchronizer.addDocument(handle.documentId)
32
- })
33
-
34
- it("starts synchronizing existing documents when a peer is added", done => {
35
- const handle = repo.create()
36
- synchronizer.addDocument(handle.documentId)
37
- synchronizer.once("message", event => {
38
- assert(event.targetId === "peer1")
39
- assert(event.documentId === handle.documentId)
40
- done()
41
- })
42
- synchronizer.addPeer("peer1" as PeerId)
43
- })
44
-
45
- it("should not synchronize to a peer which is excluded from the share policy", done => {
46
- const handle = repo.create()
47
-
48
- repo.sharePolicy = async (peerId: PeerId) => peerId !== "peer1"
49
-
50
- synchronizer.addDocument(handle.documentId)
51
- synchronizer.once("message", () => {
52
- done(new Error("Should not have sent a message"))
53
- })
54
- synchronizer.addPeer("peer1" as PeerId)
55
-
56
- setTimeout(done)
57
- })
58
-
59
- it("should not synchronize a document which is excluded from the share policy", done => {
60
- const handle = repo.create()
61
- repo.sharePolicy = async (_, documentId) => documentId !== handle.documentId
62
-
63
- synchronizer.addPeer("peer2" as PeerId)
64
-
65
- synchronizer.once("message", () => {
66
- done(new Error("Should not have sent a message"))
67
- })
68
-
69
- synchronizer.addDocument(handle.documentId)
70
-
71
- setTimeout(done)
72
- })
21
+ it("starts synchronizing a document to peers when added", () =>
22
+ new Promise<void>(done => {
23
+ const handle = repo.create()
24
+ synchronizer.addPeer("peer1" as PeerId)
25
+
26
+ synchronizer.once("message", event => {
27
+ assert(event.targetId === "peer1")
28
+ assert(event.documentId === handle.documentId)
29
+ done()
30
+ })
31
+
32
+ synchronizer.addDocument(handle.documentId)
33
+ }))
34
+
35
+ it("starts synchronizing existing documents when a peer is added", () =>
36
+ new Promise<void>(done => {
37
+ const handle = repo.create()
38
+ synchronizer.addDocument(handle.documentId)
39
+ synchronizer.once("message", event => {
40
+ assert(event.targetId === "peer1")
41
+ assert(event.documentId === handle.documentId)
42
+ done()
43
+ })
44
+ synchronizer.addPeer("peer1" as PeerId)
45
+ }))
46
+
47
+ it("should not synchronize to a peer which is excluded from the share policy", () =>
48
+ new Promise<void>((done, reject) => {
49
+ const handle = repo.create()
50
+
51
+ repo.sharePolicy = async (peerId: PeerId) => peerId !== "peer1"
52
+
53
+ synchronizer.addDocument(handle.documentId)
54
+ synchronizer.once("message", () => {
55
+ reject(new Error("Should not have sent a message"))
56
+ })
57
+ synchronizer.addPeer("peer1" as PeerId)
58
+
59
+ setTimeout(done)
60
+ }))
61
+
62
+ it("should not synchronize a document which is excluded from the share policy", () =>
63
+ new Promise<void>((done, reject) => {
64
+ const handle = repo.create()
65
+ repo.sharePolicy = async (_, documentId) =>
66
+ documentId !== handle.documentId
67
+
68
+ synchronizer.addPeer("peer2" as PeerId)
69
+
70
+ synchronizer.once("message", () => {
71
+ reject(new Error("Should not have sent a message"))
72
+ })
73
+
74
+ synchronizer.addDocument(handle.documentId)
75
+
76
+ setTimeout(done)
77
+ }))
73
78
  })
@@ -1,16 +1,15 @@
1
1
  import * as A from "@automerge/automerge/next"
2
+ import { decode } from "cbor-x"
2
3
  import assert from "assert"
3
- import { it } from "mocha"
4
- import { DocHandle, DocHandleChangePayload } from "../src/index.js"
5
- import { pause } from "../src/helpers/pause.js"
6
- import { TestDoc } from "./types.js"
4
+ import { describe, it } from "vitest"
7
5
  import { generateAutomergeUrl, parseAutomergeUrl } from "../src/DocUrl.js"
8
6
  import { eventPromise } from "../src/helpers/eventPromise.js"
9
- import { decode } from "cbor-x"
7
+ import { pause } from "../src/helpers/pause.js"
8
+ import { DocHandle, DocHandleChangePayload } from "../src/index.js"
9
+ import { TestDoc } from "./types.js"
10
10
 
11
11
  describe("DocHandle", () => {
12
12
  const TEST_ID = parseAutomergeUrl(generateAutomergeUrl()).documentId
13
- const BOGUS_ID = parseAutomergeUrl(generateAutomergeUrl()).documentId
14
13
 
15
14
  const docFromMockStorage = (doc: A.Doc<{ foo: string }>) => {
16
15
  return A.change<{ foo: string }>(doc, d => (d.foo = "bar"))
@@ -90,7 +89,7 @@ describe("DocHandle", () => {
90
89
 
91
90
  assert.equal(handle.docSync(), undefined)
92
91
  assert.equal(handle.isReady(), false)
93
- assert.throws(() => handle.change(_ => { }))
92
+ assert.throws(() => handle.change(_ => {}))
94
93
  })
95
94
 
96
95
  it("should become ready if the document is updated by the network", async () => {
@@ -128,16 +127,17 @@ describe("DocHandle", () => {
128
127
  assert.deepStrictEqual(changePayload.handle, handle)
129
128
  })
130
129
 
131
- it("should not emit a change message if no change happens via update", done => {
132
- const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
133
- handle.once("change", () => {
134
- done(new Error("shouldn't have changed"))
135
- })
136
- handle.update(d => {
137
- setTimeout(done, 0)
138
- return d
139
- })
140
- })
130
+ it("should not emit a change message if no change happens via update", () =>
131
+ new Promise<void>((done, reject) => {
132
+ const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
133
+ handle.once("change", () => {
134
+ reject(new Error("shouldn't have changed"))
135
+ })
136
+ handle.update(d => {
137
+ setTimeout(done, 0)
138
+ return d
139
+ })
140
+ }))
141
141
 
142
142
  it("should update the internal doc prior to emitting the change message", async () => {
143
143
  const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
@@ -200,28 +200,27 @@ describe("DocHandle", () => {
200
200
  assert.equal(doc?.foo, "bar")
201
201
  })
202
202
 
203
- it("should not emit a patch message if no change happens", done => {
204
- const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
205
- handle.on("change", () => {
206
- done(new Error("shouldn't have changed"))
207
- })
208
- handle.change(_doc => {
209
- // do nothing
210
- setTimeout(done, 0)
211
- })
212
- })
203
+ it("should not emit a patch message if no change happens", () =>
204
+ new Promise<void>((done, reject) => {
205
+ const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
206
+ handle.on("change", () => {
207
+ reject(new Error("shouldn't have changed"))
208
+ })
209
+ handle.change(_doc => {
210
+ // do nothing
211
+ setTimeout(done, 0)
212
+ })
213
+ }))
213
214
 
214
- it("should time out if the document is not loaded", async () => {
215
+ it("should be undefined if loading the document times out", async () => {
215
216
  // set docHandle time out after 5 ms
216
217
  const handle = new DocHandle<TestDoc>(TEST_ID, { timeoutDelay: 5 })
217
218
 
218
- // we're not going to load
219
- await pause(10)
219
+ const doc = await handle.doc()
220
220
 
221
- assert.equal(handle.state, "failed")
221
+ assert.equal(doc, undefined)
222
222
 
223
- // so it should time out
224
- return assert.rejects(handle.doc, "DocHandle timed out")
223
+ assert.equal(handle.state, "failed")
225
224
  })
226
225
 
227
226
  it("should not time out if the document is loaded in time", async () => {
@@ -236,7 +235,7 @@ describe("DocHandle", () => {
236
235
  assert.equal(doc?.foo, "bar")
237
236
  })
238
237
 
239
- it("should time out if the document is not updated from the network", async () => {
238
+ it("should be undefined if loading from the network times out", async () => {
240
239
  // set docHandle time out after 5 ms
241
240
  const handle = new DocHandle<TestDoc>(TEST_ID, { timeoutDelay: 5 })
242
241
 
@@ -246,8 +245,8 @@ describe("DocHandle", () => {
246
245
  // there's no update
247
246
  await pause(10)
248
247
 
249
- // so it should time out
250
- return assert.rejects(handle.doc, "DocHandle timed out")
248
+ const doc = await handle.doc()
249
+ assert.equal(doc, undefined)
251
250
  })
252
251
 
253
252
  it("should not time out if the document is updated in time", async () => {
@@ -1,18 +1,19 @@
1
1
  import assert from "assert"
2
- import { BinaryDocumentId, PeerId } from "../src/types.js"
2
+ import { describe, it } from "vitest"
3
3
  import { DocHandle } from "../src/DocHandle.js"
4
- import { DocSynchronizer } from "../src/synchronizer/DocSynchronizer.js"
4
+ import { generateAutomergeUrl, parseAutomergeUrl } from "../src/DocUrl.js"
5
+ import { generateAutomergeUrl, parseAutomergeUrl } from "../src/DocUrl.js"
5
6
  import { eventPromise } from "../src/helpers/eventPromise.js"
6
- import { TestDoc } from "./types.js"
7
- import { parseAutomergeUrl, generateAutomergeUrl } from "../src/DocUrl.js"
8
- import { SyncMessage } from "../src/index.js"
9
7
  import {
10
8
  DocumentUnavailableMessage,
11
- DocumentUnavailableMessageContents,
12
9
  MessageContents,
13
- RequestMessageContents,
14
- SyncMessageContents,
15
10
  } from "../src/network/messages.js"
11
+ import { DocSynchronizer } from "../src/synchronizer/DocSynchronizer.js"
12
+ import { PeerId } from "../src/types.js"
13
+ import { TestDoc } from "./types.js"
14
+ import { DocSynchronizer } from "../src/synchronizer/DocSynchronizer.js"
15
+ import { PeerId } from "../src/types.js"
16
+ import { TestDoc } from "./types.js"
16
17
 
17
18
  const alice = "alice" as PeerId
18
19
  const bob = "bob" as PeerId
@@ -1,4 +1,5 @@
1
1
  import assert from "assert"
2
+ import { describe, it } from "vitest"
2
3
  import { NetworkSubsystem } from "../src/network/NetworkSubsystem.js"
3
4
 
4
5
  // Note: The sync tests in `Repo.test.ts` exercise the network subsystem, and the suite in