@automerge/automerge-repo 1.1.0-alpha.6 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/README.md +12 -7
  2. package/dist/AutomergeUrl.js +2 -2
  3. package/dist/DocHandle.d.ts +10 -4
  4. package/dist/DocHandle.d.ts.map +1 -1
  5. package/dist/DocHandle.js +17 -8
  6. package/dist/RemoteHeadsSubscriptions.js +3 -3
  7. package/dist/Repo.d.ts +23 -6
  8. package/dist/Repo.d.ts.map +1 -1
  9. package/dist/Repo.js +104 -71
  10. package/dist/helpers/debounce.js +1 -1
  11. package/dist/helpers/pause.d.ts +0 -1
  12. package/dist/helpers/pause.d.ts.map +1 -1
  13. package/dist/helpers/pause.js +2 -8
  14. package/dist/helpers/throttle.js +1 -1
  15. package/dist/helpers/withTimeout.d.ts.map +1 -1
  16. package/dist/helpers/withTimeout.js +2 -0
  17. package/dist/index.d.ts +1 -1
  18. package/dist/index.d.ts.map +1 -1
  19. package/dist/index.js +1 -1
  20. package/dist/network/NetworkAdapter.d.ts.map +1 -1
  21. package/dist/network/NetworkAdapter.js +2 -1
  22. package/dist/network/NetworkSubsystem.d.ts.map +1 -1
  23. package/dist/network/NetworkSubsystem.js +5 -3
  24. package/dist/network/messages.d.ts +43 -38
  25. package/dist/network/messages.d.ts.map +1 -1
  26. package/dist/network/messages.js +7 -9
  27. package/dist/storage/StorageSubsystem.js +1 -1
  28. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  29. package/dist/synchronizer/CollectionSynchronizer.js +1 -0
  30. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  31. package/dist/synchronizer/DocSynchronizer.js +13 -5
  32. package/dist/synchronizer/Synchronizer.d.ts +11 -3
  33. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  34. package/package.json +4 -4
  35. package/src/AutomergeUrl.ts +2 -2
  36. package/src/DocHandle.ts +34 -12
  37. package/src/RemoteHeadsSubscriptions.ts +3 -3
  38. package/src/Repo.ts +130 -81
  39. package/src/helpers/debounce.ts +1 -1
  40. package/src/helpers/pause.ts +3 -11
  41. package/src/helpers/throttle.ts +1 -1
  42. package/src/helpers/withTimeout.ts +2 -0
  43. package/src/index.ts +1 -1
  44. package/src/network/NetworkAdapter.ts +5 -3
  45. package/src/network/NetworkSubsystem.ts +5 -4
  46. package/src/network/messages.ts +60 -63
  47. package/src/storage/StorageSubsystem.ts +1 -1
  48. package/src/synchronizer/CollectionSynchronizer.ts +2 -1
  49. package/src/synchronizer/DocSynchronizer.ts +19 -11
  50. package/src/synchronizer/Synchronizer.ts +11 -3
  51. package/test/CollectionSynchronizer.test.ts +7 -5
  52. package/test/DocHandle.test.ts +11 -2
  53. package/test/RemoteHeadsSubscriptions.test.ts +53 -50
  54. package/test/Repo.test.ts +64 -2
  55. package/test/StorageSubsystem.test.ts +1 -1
  56. package/test/helpers/collectMessages.ts +19 -0
  57. package/test/remoteHeads.test.ts +141 -120
  58. package/.eslintrc +0 -28
  59. package/test/helpers/waitForMessages.ts +0 -22
package/src/Repo.ts CHANGED
@@ -7,17 +7,18 @@ import {
7
7
  parseAutomergeUrl,
8
8
  } from "./AutomergeUrl.js"
9
9
  import { DocHandle, DocHandleEncodedChangePayload } from "./DocHandle.js"
10
+ import { RemoteHeadsSubscriptions } from "./RemoteHeadsSubscriptions.js"
11
+ import { headsAreSame } from "./helpers/headsAreSame.js"
10
12
  import { throttle } from "./helpers/throttle.js"
11
13
  import { NetworkAdapter, type PeerMetadata } from "./network/NetworkAdapter.js"
12
14
  import { NetworkSubsystem } from "./network/NetworkSubsystem.js"
15
+ import { RepoMessage } from "./network/messages.js"
13
16
  import { StorageAdapter } from "./storage/StorageAdapter.js"
14
17
  import { StorageSubsystem } from "./storage/StorageSubsystem.js"
18
+ import { StorageId } from "./storage/types.js"
15
19
  import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js"
20
+ import { SyncStatePayload } from "./synchronizer/Synchronizer.js"
16
21
  import type { AnyDocumentId, DocumentId, PeerId } from "./types.js"
17
- import { RepoMessage, SyncStateMessage } from "./network/messages.js"
18
- import { StorageId } from "./storage/types.js"
19
- import { RemoteHeadsSubscriptions } from "./RemoteHeadsSubscriptions.js"
20
- import { headsAreSame } from "./helpers/headsAreSame.js"
21
22
 
22
23
  /** A Repo is a collection of documents with networking, syncing, and storage capabilities. */
23
24
  /** The `Repo` is the main entry point of this library
@@ -52,6 +53,7 @@ export class Repo extends EventEmitter<RepoEvents> {
52
53
  peerMetadataByPeerId: Record<PeerId, PeerMetadata> = {}
53
54
 
54
55
  #remoteHeadsSubscriptions = new RemoteHeadsSubscriptions()
56
+ #remoteHeadsGossipingEnabled = false
55
57
 
56
58
  constructor({
57
59
  storage,
@@ -59,8 +61,10 @@ export class Repo extends EventEmitter<RepoEvents> {
59
61
  peerId,
60
62
  sharePolicy,
61
63
  isEphemeral = storage === undefined,
64
+ enableRemoteHeadsGossiping = false,
62
65
  }: RepoConfig) {
63
66
  super()
67
+ this.#remoteHeadsGossipingEnabled = enableRemoteHeadsGossiping
64
68
  this.#log = debug(`automerge-repo:repo`)
65
69
  this.sharePolicy = sharePolicy ?? this.sharePolicy
66
70
 
@@ -77,10 +81,7 @@ export class Repo extends EventEmitter<RepoEvents> {
77
81
  }: DocHandleEncodedChangePayload<any>) => {
78
82
  void storageSubsystem.saveDoc(handle.documentId, doc)
79
83
  }
80
- const debouncedSaveFn = handle.on(
81
- "heads-changed",
82
- throttle(saveFn, this.saveDebounceRate)
83
- )
84
+ handle.on("heads-changed", throttle(saveFn, this.saveDebounceRate))
84
85
 
85
86
  if (isNew) {
86
87
  // this is a new document, immediately save it
@@ -140,9 +141,11 @@ export class Repo extends EventEmitter<RepoEvents> {
140
141
  networkSubsystem.send(message)
141
142
  })
142
143
 
143
- this.#synchronizer.on("open-doc", ({ peerId, documentId }) => {
144
- this.#remoteHeadsSubscriptions.subscribePeerToDoc(peerId, documentId)
145
- })
144
+ if (this.#remoteHeadsGossipingEnabled) {
145
+ this.#synchronizer.on("open-doc", ({ peerId, documentId }) => {
146
+ this.#remoteHeadsSubscriptions.subscribePeerToDoc(peerId, documentId)
147
+ })
148
+ }
146
149
 
147
150
  // STORAGE
148
151
  // The storage subsystem has access to some form of persistence, and deals with save and loading documents.
@@ -153,7 +156,8 @@ export class Repo extends EventEmitter<RepoEvents> {
153
156
  // The network subsystem deals with sending and receiving messages to and from peers.
154
157
 
155
158
  const myPeerMetadata: Promise<PeerMetadata> = new Promise(
156
- async (resolve, reject) =>
159
+ // eslint-disable-next-line no-async-promise-executor -- TODO: fix
160
+ async resolve =>
157
161
  resolve({
158
162
  storageId: await storageSubsystem?.id(),
159
163
  isEphemeral,
@@ -177,7 +181,7 @@ export class Repo extends EventEmitter<RepoEvents> {
177
181
 
178
182
  this.sharePolicy(peerId)
179
183
  .then(shouldShare => {
180
- if (shouldShare) {
184
+ if (shouldShare && this.#remoteHeadsGossipingEnabled) {
181
185
  this.#remoteHeadsSubscriptions.addGenerousPeer(peerId)
182
186
  }
183
187
  })
@@ -217,7 +221,7 @@ export class Repo extends EventEmitter<RepoEvents> {
217
221
  if (haveHeadsChanged) {
218
222
  handle.setRemoteHeads(storageId, message.syncState.theirHeads)
219
223
 
220
- if (storageId) {
224
+ if (storageId && this.#remoteHeadsGossipingEnabled) {
221
225
  this.#remoteHeadsSubscriptions.handleImmediateRemoteHeadsChanged(
222
226
  message.documentId,
223
227
  storageId,
@@ -227,45 +231,51 @@ export class Repo extends EventEmitter<RepoEvents> {
227
231
  }
228
232
  })
229
233
 
230
- this.#remoteHeadsSubscriptions.on("notify-remote-heads", message => {
231
- this.networkSubsystem.send({
232
- type: "remote-heads-changed",
233
- targetId: message.targetId,
234
- documentId: message.documentId,
235
- newHeads: {
236
- [message.storageId]: {
237
- heads: message.heads,
238
- timestamp: message.timestamp,
234
+ if (this.#remoteHeadsGossipingEnabled) {
235
+ this.#remoteHeadsSubscriptions.on("notify-remote-heads", message => {
236
+ this.networkSubsystem.send({
237
+ type: "remote-heads-changed",
238
+ targetId: message.targetId,
239
+ documentId: message.documentId,
240
+ newHeads: {
241
+ [message.storageId]: {
242
+ heads: message.heads,
243
+ timestamp: message.timestamp,
244
+ },
239
245
  },
240
- },
246
+ })
241
247
  })
242
- })
243
248
 
244
- this.#remoteHeadsSubscriptions.on("change-remote-subs", message => {
245
- this.#log("change-remote-subs", message)
246
- for (const peer of message.peers) {
247
- this.networkSubsystem.send({
248
- type: "remote-subscription-change",
249
- targetId: peer,
250
- add: message.add,
251
- remove: message.remove,
252
- })
253
- }
254
- })
249
+ this.#remoteHeadsSubscriptions.on("change-remote-subs", message => {
250
+ this.#log("change-remote-subs", message)
251
+ for (const peer of message.peers) {
252
+ this.networkSubsystem.send({
253
+ type: "remote-subscription-change",
254
+ targetId: peer,
255
+ add: message.add,
256
+ remove: message.remove,
257
+ })
258
+ }
259
+ })
255
260
 
256
- this.#remoteHeadsSubscriptions.on("remote-heads-changed", message => {
257
- const handle = this.#handleCache[message.documentId]
258
- handle.setRemoteHeads(message.storageId, message.remoteHeads)
259
- })
261
+ this.#remoteHeadsSubscriptions.on("remote-heads-changed", message => {
262
+ const handle = this.#handleCache[message.documentId]
263
+ handle.setRemoteHeads(message.storageId, message.remoteHeads)
264
+ })
265
+ }
260
266
  }
261
267
 
262
268
  #receiveMessage(message: RepoMessage) {
263
269
  switch (message.type) {
264
270
  case "remote-subscription-change":
265
- this.#remoteHeadsSubscriptions.handleControlMessage(message)
271
+ if (this.#remoteHeadsGossipingEnabled) {
272
+ this.#remoteHeadsSubscriptions.handleControlMessage(message)
273
+ }
266
274
  break
267
275
  case "remote-heads-changed":
268
- this.#remoteHeadsSubscriptions.handleRemoteHeads(message)
276
+ if (this.#remoteHeadsGossipingEnabled) {
277
+ this.#remoteHeadsSubscriptions.handleRemoteHeads(message)
278
+ }
269
279
  break
270
280
  case "sync":
271
281
  case "request":
@@ -279,17 +289,17 @@ export class Repo extends EventEmitter<RepoEvents> {
279
289
 
280
290
  #throttledSaveSyncStateHandlers: Record<
281
291
  StorageId,
282
- (message: SyncStateMessage) => void
292
+ (payload: SyncStatePayload) => void
283
293
  > = {}
284
294
 
285
295
  /** saves sync state throttled per storage id, if a peer doesn't have a storage id it's sync state is not persisted */
286
- #saveSyncState(message: SyncStateMessage) {
296
+ #saveSyncState(payload: SyncStatePayload) {
287
297
  if (!this.storageSubsystem) {
288
298
  return
289
299
  }
290
300
 
291
301
  const { storageId, isEphemeral } =
292
- this.peerMetadataByPeerId[message.peerId] || {}
302
+ this.peerMetadataByPeerId[payload.peerId] || {}
293
303
 
294
304
  if (!storageId || isEphemeral) {
295
305
  return
@@ -298,30 +308,37 @@ export class Repo extends EventEmitter<RepoEvents> {
298
308
  let handler = this.#throttledSaveSyncStateHandlers[storageId]
299
309
  if (!handler) {
300
310
  handler = this.#throttledSaveSyncStateHandlers[storageId] = throttle(
301
- ({ documentId, syncState }: SyncStateMessage) => {
302
- this.storageSubsystem!.saveSyncState(documentId, storageId, syncState)
311
+ ({ documentId, syncState }: SyncStatePayload) => {
312
+ void this.storageSubsystem!.saveSyncState(
313
+ documentId,
314
+ storageId,
315
+ syncState
316
+ )
303
317
  },
304
318
  this.saveDebounceRate
305
319
  )
306
320
  }
307
321
 
308
- handler(message)
322
+ handler(payload)
309
323
  }
310
324
 
311
325
  /** Returns an existing handle if we have it; creates one otherwise. */
312
- #getHandle<T>(
326
+ #getHandle<T>({
327
+ documentId,
328
+ isNew,
329
+ initialValue,
330
+ }: {
313
331
  /** The documentId of the handle to look up or create */
314
- documentId: DocumentId,
315
-
316
- /** If we know we're creating a new document, specify this so we can have access to it immediately */
332
+ documentId: DocumentId /** If we know we're creating a new document, specify this so we can have access to it immediately */
317
333
  isNew: boolean
318
- ) {
334
+ initialValue?: T
335
+ }) {
319
336
  // If we have the handle cached, return it
320
337
  if (this.#handleCache[documentId]) return this.#handleCache[documentId]
321
338
 
322
339
  // If not, create a new handle, cache it, and return it
323
340
  if (!documentId) throw new Error(`Invalid documentId ${documentId}`)
324
- const handle = new DocHandle<T>(documentId, { isNew })
341
+ const handle = new DocHandle<T>(documentId, { isNew, initialValue })
325
342
  this.#handleCache[documentId] = handle
326
343
  return handle
327
344
  }
@@ -341,32 +358,18 @@ export class Repo extends EventEmitter<RepoEvents> {
341
358
  }
342
359
 
343
360
  /**
344
- * Creates a new document and returns a handle to it. The initial value of the document is
345
- * an empty object `{}`. Its documentId is generated by the system. we emit a `document` event
346
- * to advertise interest in the document.
361
+ * Creates a new document and returns a handle to it. The initial value of the document is an
362
+ * empty object `{}` unless an initial value is provided. Its documentId is generated by the
363
+ * system. we emit a `document` event to advertise interest in the document.
347
364
  */
348
- create<T>(): DocHandle<T> {
349
- // TODO:
350
- // either
351
- // - pass an initial value and do something like this to ensure that you get a valid initial value
352
-
353
- // const myInitialValue = {
354
- // tasks: [],
355
- // filter: "all",
356
- //
357
- // const guaranteeInitialValue = (doc: any) => {
358
- // if (!doc.tasks) doc.tasks = []
359
- // if (!doc.filter) doc.filter = "all"
360
-
361
- // return { ...myInitialValue, ...doc }
362
- // }
363
-
364
- // or
365
- // - pass a "reify" function that takes a `<any>` and returns `<T>`
366
-
365
+ create<T>(initialValue?: T): DocHandle<T> {
367
366
  // Generate a new UUID and store it in the buffer
368
367
  const { documentId } = parseAutomergeUrl(generateAutomergeUrl())
369
- const handle = this.#getHandle<T>(documentId, true) as DocHandle<T>
368
+ const handle = this.#getHandle<T>({
369
+ documentId,
370
+ isNew: true,
371
+ initialValue,
372
+ }) as DocHandle<T>
370
373
  this.emit("document", { handle, isNew: true })
371
374
  return handle
372
375
  }
@@ -432,7 +435,10 @@ export class Repo extends EventEmitter<RepoEvents> {
432
435
  return this.#handleCache[documentId]
433
436
  }
434
437
 
435
- const handle = this.#getHandle<T>(documentId, false) as DocHandle<T>
438
+ const handle = this.#getHandle<T>({
439
+ documentId,
440
+ isNew: false,
441
+ }) as DocHandle<T>
436
442
  this.emit("document", { handle, isNew: false })
437
443
  return handle
438
444
  }
@@ -443,16 +449,54 @@ export class Repo extends EventEmitter<RepoEvents> {
443
449
  ) {
444
450
  const documentId = interpretAsDocumentId(id)
445
451
 
446
- const handle = this.#getHandle(documentId, false)
452
+ const handle = this.#getHandle({ documentId, isNew: false })
447
453
  handle.delete()
448
454
 
449
455
  delete this.#handleCache[documentId]
450
456
  this.emit("delete-document", { documentId })
451
457
  }
452
458
 
459
+ /**
460
+ * Exports a document to a binary format.
461
+ * @param id - The url or documentId of the handle to export
462
+ *
463
+ * @returns Promise<Uint8Array | undefined> - A Promise containing the binary document,
464
+ * or undefined if the document is unavailable.
465
+ */
466
+ async export(id: AnyDocumentId): Promise<Uint8Array | undefined> {
467
+ const documentId = interpretAsDocumentId(id)
468
+
469
+ const handle = this.#getHandle({ documentId, isNew: false })
470
+ const doc = await handle.doc()
471
+ if (!doc) return undefined
472
+ return Automerge.save(doc)
473
+ }
474
+
475
+ /**
476
+ * Imports document binary into the repo.
477
+ * @param binary - The binary to import
478
+ */
479
+ import<T>(binary: Uint8Array) {
480
+ const doc = Automerge.load<T>(binary)
481
+
482
+ const handle = this.create<T>()
483
+
484
+ handle.update(() => {
485
+ return Automerge.clone(doc)
486
+ })
487
+
488
+ return handle
489
+ }
490
+
453
491
  subscribeToRemotes = (remotes: StorageId[]) => {
454
- this.#log("subscribeToRemotes", { remotes })
455
- this.#remoteHeadsSubscriptions.subscribeToRemotes(remotes)
492
+ if (this.#remoteHeadsGossipingEnabled) {
493
+ this.#log("subscribeToRemotes", { remotes })
494
+ this.#remoteHeadsSubscriptions.subscribeToRemotes(remotes)
495
+ } else {
496
+ this.#log(
497
+ "WARN: subscribeToRemotes called but remote heads gossiping is not enabled"
498
+ )
499
+ }
456
500
  }
457
501
 
458
502
  storageId = async (): Promise<StorageId | undefined> => {
@@ -483,6 +527,11 @@ export interface RepoConfig {
483
527
  * all peers). A server only syncs documents that a peer explicitly requests by ID.
484
528
  */
485
529
  sharePolicy?: SharePolicy
530
+
531
+ /**
532
+ * Whether to enable the experimental remote heads gossiping feature
533
+ */
534
+ enableRemoteHeadsGossiping?: boolean
486
535
  }
487
536
 
488
537
  /** A function that determines whether we should share a document with a peer
@@ -19,7 +19,7 @@ export const throttle = <F extends (...args: Parameters<F>) => ReturnType<F>>(
19
19
  return function (...args: Parameters<F>) {
20
20
  clearTimeout(timeout)
21
21
  timeout = setTimeout(() => {
22
- fn.apply(null, args)
22
+ fn(...args)
23
23
  }, rate)
24
24
  }
25
25
  }
@@ -1,14 +1,6 @@
1
+ /* c8 ignore start */
2
+
1
3
  export const pause = (t = 0) =>
2
4
  new Promise<void>(resolve => setTimeout(() => resolve(), t))
3
5
 
4
- export function rejectOnTimeout<T>(
5
- promise: Promise<T>,
6
- millis: number
7
- ): Promise<T> {
8
- return Promise.race([
9
- promise,
10
- pause(millis).then(() => {
11
- throw new Error("timeout exceeded")
12
- }),
13
- ])
14
- }
6
+ /* c8 ignore end */
@@ -36,7 +36,7 @@ export const throttle = <F extends (...args: Parameters<F>) => ReturnType<F>>(
36
36
  wait = lastCall + delay - Date.now()
37
37
  clearTimeout(timeout)
38
38
  timeout = setTimeout(() => {
39
- fn.apply(null, args)
39
+ fn(...args)
40
40
  lastCall = Date.now()
41
41
  }, wait)
42
42
  }
@@ -1,3 +1,4 @@
1
+ /* c8 ignore start */
1
2
  /**
2
3
  * If `promise` is resolved before `t` ms elapse, the timeout is cleared and the result of the
3
4
  * promise is returned. If the timeout ends first, a `TimeoutError` is thrown.
@@ -26,3 +27,4 @@ export class TimeoutError extends Error {
26
27
  this.name = "TimeoutError"
27
28
  }
28
29
  }
30
+ /* c8 ignore end */
package/src/index.ts CHANGED
@@ -34,7 +34,7 @@ export {
34
34
  } from "./AutomergeUrl.js"
35
35
  export { Repo } from "./Repo.js"
36
36
  export { NetworkAdapter } from "./network/NetworkAdapter.js"
37
- export { isValidRepoMessage } from "./network/messages.js"
37
+ export { isRepoMessage } from "./network/messages.js"
38
38
  export { StorageAdapter } from "./storage/StorageAdapter.js"
39
39
 
40
40
  /** @hidden **/
@@ -1,13 +1,15 @@
1
+ /* c8 ignore start */
2
+
1
3
  import { EventEmitter } from "eventemitter3"
2
4
  import { PeerId } from "../types.js"
3
5
  import { Message } from "./messages.js"
4
6
  import { StorageId } from "../storage/types.js"
5
7
 
6
- /**
8
+ /**
7
9
  * Describes a peer intent to the system
8
10
  * storageId: the key for syncState to decide what the other peer already has
9
11
  * isEphemeral: to decide if we bother recording this peer's sync state
10
- *
12
+ *
11
13
  */
12
14
  export interface PeerMetadata {
13
15
  storageId?: StorageId
@@ -22,7 +24,7 @@ export interface PeerMetadata {
22
24
  * until the adapter emits a `ready` event before it starts trying to use it
23
25
  */
24
26
  export abstract class NetworkAdapter extends EventEmitter<NetworkAdapterEvents> {
25
- peerId?: PeerId // hmmm, maybe not
27
+ peerId?: PeerId
26
28
  peerMetadata?: PeerMetadata
27
29
 
28
30
  /** Called by the {@link Repo} to start the connection process
@@ -11,9 +11,8 @@ import {
11
11
  MessageContents,
12
12
  RepoMessage,
13
13
  isEphemeralMessage,
14
- isValidRepoMessage,
14
+ isRepoMessage,
15
15
  } from "./messages.js"
16
- import { StorageId } from "../storage/types.js"
17
16
 
18
17
  type EphemeralMessageSource = `${PeerId}:${SessionId}`
19
18
 
@@ -74,7 +73,7 @@ export class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
74
73
  })
75
74
 
76
75
  networkAdapter.on("message", msg => {
77
- if (!isValidRepoMessage(msg)) {
76
+ if (!isRepoMessage(msg)) {
78
77
  this.#log(`invalid message: ${JSON.stringify(msg)}`)
79
78
  return
80
79
  }
@@ -108,6 +107,8 @@ export class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
108
107
 
109
108
  this.peerMetadata.then(peerMetadata => {
110
109
  networkAdapter.connect(this.peerId, peerMetadata)
110
+ }).catch(err => {
111
+ this.#log("error connecting to network", err)
111
112
  })
112
113
  }
113
114
 
@@ -145,7 +146,7 @@ export class NetworkSubsystem extends EventEmitter<NetworkSubsystemEvents> {
145
146
  }
146
147
 
147
148
  const outbound = prepareMessage(message)
148
- this.#log("sending message", outbound)
149
+ this.#log("sending message %o", outbound)
149
150
  peer.send(outbound as RepoMessage)
150
151
  }
151
152