@automerge/automerge-repo 1.1.0-alpha.7 → 1.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/README.md +5 -3
  2. package/dist/AutomergeUrl.js +1 -1
  3. package/dist/DocHandle.d.ts +10 -4
  4. package/dist/DocHandle.d.ts.map +1 -1
  5. package/dist/DocHandle.js +21 -13
  6. package/dist/Repo.d.ts +22 -10
  7. package/dist/Repo.d.ts.map +1 -1
  8. package/dist/Repo.js +90 -76
  9. package/dist/helpers/pause.d.ts +0 -1
  10. package/dist/helpers/pause.d.ts.map +1 -1
  11. package/dist/helpers/pause.js +2 -8
  12. package/dist/helpers/tests/network-adapter-tests.d.ts +2 -2
  13. package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
  14. package/dist/helpers/tests/network-adapter-tests.js +16 -1
  15. package/dist/helpers/withTimeout.d.ts.map +1 -1
  16. package/dist/helpers/withTimeout.js +2 -0
  17. package/dist/index.d.ts +4 -2
  18. package/dist/index.d.ts.map +1 -1
  19. package/dist/index.js +1 -1
  20. package/dist/network/NetworkAdapter.d.ts +4 -34
  21. package/dist/network/NetworkAdapter.d.ts.map +1 -1
  22. package/dist/network/NetworkAdapter.js +2 -0
  23. package/dist/network/NetworkAdapterInterface.d.ts +61 -0
  24. package/dist/network/NetworkAdapterInterface.d.ts.map +1 -0
  25. package/dist/network/NetworkAdapterInterface.js +2 -0
  26. package/dist/network/NetworkSubsystem.d.ts +3 -3
  27. package/dist/network/NetworkSubsystem.d.ts.map +1 -1
  28. package/dist/network/NetworkSubsystem.js +7 -5
  29. package/dist/network/messages.d.ts +43 -38
  30. package/dist/network/messages.d.ts.map +1 -1
  31. package/dist/network/messages.js +7 -9
  32. package/dist/storage/StorageAdapter.d.ts +3 -1
  33. package/dist/storage/StorageAdapter.d.ts.map +1 -1
  34. package/dist/storage/StorageAdapter.js +1 -0
  35. package/dist/storage/StorageAdapterInterface.d.ts +30 -0
  36. package/dist/storage/StorageAdapterInterface.d.ts.map +1 -0
  37. package/dist/storage/StorageAdapterInterface.js +1 -0
  38. package/dist/storage/StorageSubsystem.d.ts +2 -2
  39. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  40. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  41. package/dist/synchronizer/CollectionSynchronizer.js +1 -0
  42. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  43. package/dist/synchronizer/DocSynchronizer.js +13 -9
  44. package/dist/synchronizer/Synchronizer.d.ts +11 -3
  45. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  46. package/package.json +3 -4
  47. package/src/AutomergeUrl.ts +1 -1
  48. package/src/DocHandle.ts +40 -19
  49. package/src/Repo.ts +123 -98
  50. package/src/helpers/pause.ts +3 -11
  51. package/src/helpers/tests/network-adapter-tests.ts +30 -4
  52. package/src/helpers/withTimeout.ts +2 -0
  53. package/src/index.ts +4 -2
  54. package/src/network/NetworkAdapter.ts +9 -45
  55. package/src/network/NetworkAdapterInterface.ts +77 -0
  56. package/src/network/NetworkSubsystem.ts +16 -14
  57. package/src/network/messages.ts +60 -63
  58. package/src/storage/StorageAdapter.ts +3 -1
  59. package/src/storage/StorageAdapterInterface.ts +34 -0
  60. package/src/storage/StorageSubsystem.ts +3 -3
  61. package/src/synchronizer/CollectionSynchronizer.ts +1 -0
  62. package/src/synchronizer/DocSynchronizer.ts +22 -18
  63. package/src/synchronizer/Synchronizer.ts +11 -3
  64. package/test/CollectionSynchronizer.test.ts +7 -5
  65. package/test/DocHandle.test.ts +35 -3
  66. package/test/RemoteHeadsSubscriptions.test.ts +49 -49
  67. package/test/Repo.test.ts +71 -2
  68. package/test/StorageSubsystem.test.ts +1 -1
  69. package/test/helpers/DummyNetworkAdapter.ts +37 -5
  70. package/test/helpers/collectMessages.ts +19 -0
  71. package/test/remoteHeads.test.ts +142 -119
  72. package/.eslintrc +0 -28
  73. package/test/helpers/waitForMessages.ts +0 -22
@@ -1,11 +1,19 @@
1
1
  import { EventEmitter } from "eventemitter3";
2
- import { MessageContents, OpenDocMessage, RepoMessage, SyncStateMessage } from "../network/messages.js";
2
+ import { MessageContents, OpenDocMessage, RepoMessage } from "../network/messages.js";
3
+ import { SyncState } from "@automerge/automerge";
4
+ import { PeerId, DocumentId } from "../types.js";
3
5
  export declare abstract class Synchronizer extends EventEmitter<SynchronizerEvents> {
4
6
  abstract receiveMessage(message: RepoMessage): void;
5
7
  }
6
8
  export interface SynchronizerEvents {
7
- message: (arg: MessageContents) => void;
8
- "sync-state": (arg: SyncStateMessage) => void;
9
+ message: (payload: MessageContents) => void;
10
+ "sync-state": (payload: SyncStatePayload) => void;
9
11
  "open-doc": (arg: OpenDocMessage) => void;
10
12
  }
13
+ /** Notify the repo that the sync state has changed */
14
+ export interface SyncStatePayload {
15
+ peerId: PeerId;
16
+ documentId: DocumentId;
17
+ syncState: SyncState;
18
+ }
11
19
  //# sourceMappingURL=Synchronizer.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"Synchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/Synchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAC5C,OAAO,EACL,eAAe,EACf,cAAc,EACd,WAAW,EACX,gBAAgB,EACjB,MAAM,wBAAwB,CAAA;AAE/B,8BAAsB,YAAa,SAAQ,YAAY,CAAC,kBAAkB,CAAC;IACzE,QAAQ,CAAC,cAAc,CAAC,OAAO,EAAE,WAAW,GAAG,IAAI;CACpD;AAED,MAAM,WAAW,kBAAkB;IACjC,OAAO,EAAE,CAAC,GAAG,EAAE,eAAe,KAAK,IAAI,CAAA;IACvC,YAAY,EAAE,CAAC,GAAG,EAAE,gBAAgB,KAAK,IAAI,CAAA;IAC7C,UAAU,EAAE,CAAC,GAAG,EAAE,cAAc,KAAK,IAAI,CAAA;CAC1C"}
1
+ {"version":3,"file":"Synchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/Synchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAC5C,OAAO,EACL,eAAe,EACf,cAAc,EACd,WAAW,EACZ,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EAAE,SAAS,EAAE,MAAM,sBAAsB,CAAA;AAChD,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAEhD,8BAAsB,YAAa,SAAQ,YAAY,CAAC,kBAAkB,CAAC;IACzE,QAAQ,CAAC,cAAc,CAAC,OAAO,EAAE,WAAW,GAAG,IAAI;CACpD;AAED,MAAM,WAAW,kBAAkB;IACjC,OAAO,EAAE,CAAC,OAAO,EAAE,eAAe,KAAK,IAAI,CAAA;IAC3C,YAAY,EAAE,CAAC,OAAO,EAAE,gBAAgB,KAAK,IAAI,CAAA;IACjD,UAAU,EAAE,CAAC,GAAG,EAAE,cAAc,KAAK,IAAI,CAAA;CAC1C;AAED,uDAAuD;AACvD,MAAM,WAAW,gBAAgB;IAC/B,MAAM,EAAE,MAAM,CAAA;IACd,UAAU,EAAE,UAAU,CAAA;IACtB,SAAS,EAAE,SAAS,CAAA;CACrB"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@automerge/automerge-repo",
3
- "version": "1.1.0-alpha.7",
3
+ "version": "1.1.1",
4
4
  "description": "A repository object to manage a collection of automerge documents",
5
5
  "repository": "https://github.com/automerge/automerge-repo/tree/master/packages/automerge-repo",
6
6
  "author": "Peter van Hardenberg <pvh@pvh.ca>",
@@ -9,9 +9,8 @@
9
9
  "main": "dist/index.js",
10
10
  "scripts": {
11
11
  "build": "tsc",
12
- "lint": "eslint --ext .ts src",
13
12
  "watch": "npm-watch build",
14
- "test:coverage": "c8 --reporter=lcov --reporter=html --reporter=text yarn test",
13
+ "test:coverage": "c8 --reporter=lcov --reporter=html --reporter=text pnpm test",
15
14
  "test": "vitest",
16
15
  "test:watch": "npm-watch test",
17
16
  "fuzz": "ts-node --esm --experimentalSpecifierResolution=node fuzz/fuzz.ts"
@@ -56,5 +55,5 @@
56
55
  "publishConfig": {
57
56
  "access": "public"
58
57
  },
59
- "gitHead": "9a4711e39c93273d992c5686257246ddfaaafddd"
58
+ "gitHead": "7e0681014b8c5f672e2abc2a653a954ccb6d7aba"
60
59
  }
@@ -13,7 +13,7 @@ export const urlPrefix = "automerge:"
13
13
  /** Given an Automerge URL, returns the DocumentId in both base58check-encoded form and binary form */
14
14
  export const parseAutomergeUrl = (url: AutomergeUrl) => {
15
15
  const regex = new RegExp(`^${urlPrefix}(\\w+)$`)
16
- const [_, docMatch] = url.match(regex) || []
16
+ const [, docMatch] = url.match(regex) || []
17
17
  const documentId = docMatch as DocumentId
18
18
  const binaryDocumentId = documentIdToBinary(documentId)
19
19
 
package/src/DocHandle.ts CHANGED
@@ -39,7 +39,7 @@ export class DocHandle<T> //
39
39
  #log: debug.Debugger
40
40
 
41
41
  #machine: DocHandleXstateMachine<T>
42
- #timeoutDelay: number
42
+ #timeoutDelay = 60_000
43
43
  #remoteHeads: Record<StorageId, A.Heads> = {}
44
44
 
45
45
  /** The URL of this document
@@ -54,20 +54,30 @@ export class DocHandle<T> //
54
54
  /** @hidden */
55
55
  constructor(
56
56
  public documentId: DocumentId,
57
- { isNew = false, timeoutDelay = 60_000 }: DocHandleOptions = {}
57
+ options: DocHandleOptions<T> = {}
58
58
  ) {
59
59
  super()
60
- this.#timeoutDelay = timeoutDelay
61
- this.#log = debug(`automerge-repo:dochandle:${this.documentId.slice(0, 5)}`)
62
60
 
63
- // initial doc
64
- let doc = A.init<T>()
61
+ this.documentId = documentId
62
+
63
+ if ("timeoutDelay" in options && options.timeoutDelay) {
64
+ this.#timeoutDelay = options.timeoutDelay
65
+ }
65
66
 
66
- // Make an empty change so that we have something to save to disk
67
+ let doc: T
68
+ const isNew = "isNew" in options && options.isNew
67
69
  if (isNew) {
68
- doc = A.emptyChange(doc, {})
70
+ // T should really be constrained to extend `Record<string, unknown>` (an automerge doc can't be
71
+ // e.g. a primitive, an array, etc. - it must be an object). But adding that constraint creates
72
+ // a bunch of other problems elsewhere so for now we'll just cast it here to make Automerge happy.
73
+ doc = A.from(options.initialValue as Record<string, unknown>) as T
74
+ doc = A.emptyChange<T>(doc)
75
+ } else {
76
+ doc = A.init<T>()
69
77
  }
70
78
 
79
+ this.#log = debug(`automerge-repo:dochandle:${this.documentId.slice(0, 5)}`)
80
+
71
81
  /**
72
82
  * Internally we use a state machine to orchestrate document loading and/or syncing, in order to
73
83
  * avoid requesting data we already have, or surfacing intermediate values to the consumer.
@@ -234,13 +244,12 @@ export class DocHandle<T> //
234
244
 
235
245
  /** Returns a promise that resolves when the docHandle is in one of the given states */
236
246
  #statePromise(awaitStates: HandleState | HandleState[]) {
237
- if (!Array.isArray(awaitStates)) awaitStates = [awaitStates]
238
- return Promise.any(
239
- awaitStates.map(state =>
240
- waitFor(this.#machine, s => s.matches(state), {
241
- timeout: this.#timeoutDelay * 2, // use a longer delay here so as not to race with other delays
242
- })
243
- )
247
+ const awaitStatesArray = Array.isArray(awaitStates) ? awaitStates : [awaitStates]
248
+ return waitFor(
249
+ this.#machine,
250
+ s => awaitStatesArray.some((state) => s.matches(state)),
251
+ // use a longer delay here so as not to race with other delays
252
+ {timeout: this.#timeoutDelay * 2}
244
253
  )
245
254
  }
246
255
 
@@ -451,10 +460,22 @@ export class DocHandle<T> //
451
460
  // WRAPPER CLASS TYPES
452
461
 
453
462
  /** @hidden */
454
- export interface DocHandleOptions {
455
- isNew?: boolean
456
- timeoutDelay?: number
457
- }
463
+ export type DocHandleOptions<T> =
464
+ // NEW DOCUMENTS
465
+ | {
466
+ /** If we know this is a new document (because we're creating it) this should be set to true. */
467
+ isNew: true
468
+
469
+ /** The initial value of the document. */
470
+ initialValue?: T
471
+ }
472
+ // EXISTING DOCUMENTS
473
+ | {
474
+ isNew?: false
475
+
476
+ /** The number of milliseconds before we mark this document as unavailable if we don't have it and nobody shares it with us. */
477
+ timeoutDelay?: number
478
+ }
458
479
 
459
480
  export interface DocHandleMessagePayload {
460
481
  destinationId: PeerId
package/src/Repo.ts CHANGED
@@ -2,22 +2,23 @@ import { next as Automerge } from "@automerge/automerge"
2
2
  import debug from "debug"
3
3
  import { EventEmitter } from "eventemitter3"
4
4
  import {
5
- generateAutomergeUrl,
6
- interpretAsDocumentId,
7
- parseAutomergeUrl,
5
+ generateAutomergeUrl,
6
+ interpretAsDocumentId,
7
+ parseAutomergeUrl,
8
8
  } from "./AutomergeUrl.js"
9
9
  import { DocHandle, DocHandleEncodedChangePayload } from "./DocHandle.js"
10
+ import { RemoteHeadsSubscriptions } from "./RemoteHeadsSubscriptions.js"
11
+ import { headsAreSame } from "./helpers/headsAreSame.js"
10
12
  import { throttle } from "./helpers/throttle.js"
11
- import { NetworkAdapter, type PeerMetadata } from "./network/NetworkAdapter.js"
13
+ import { NetworkAdapterInterface, type PeerMetadata } from "./network/NetworkAdapterInterface.js"
12
14
  import { NetworkSubsystem } from "./network/NetworkSubsystem.js"
13
- import { StorageAdapter } from "./storage/StorageAdapter.js"
15
+ import { RepoMessage } from "./network/messages.js"
16
+ import { StorageAdapterInterface } from "./storage/StorageAdapterInterface.js"
14
17
  import { StorageSubsystem } from "./storage/StorageSubsystem.js"
18
+ import { StorageId } from "./storage/types.js"
15
19
  import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js"
20
+ import { SyncStatePayload } from "./synchronizer/Synchronizer.js"
16
21
  import type { AnyDocumentId, DocumentId, PeerId } from "./types.js"
17
- import { RepoMessage, SyncStateMessage } from "./network/messages.js"
18
- import { StorageId } from "./storage/types.js"
19
- import { RemoteHeadsSubscriptions } from "./RemoteHeadsSubscriptions.js"
20
- import { headsAreSame } from "./helpers/headsAreSame.js"
21
22
 
22
23
  /** A Repo is a collection of documents with networking, syncing, and storage capabilities. */
23
24
  /** The `Repo` is the main entry point of this library
@@ -52,6 +53,7 @@ export class Repo extends EventEmitter<RepoEvents> {
52
53
  peerMetadataByPeerId: Record<PeerId, PeerMetadata> = {}
53
54
 
54
55
  #remoteHeadsSubscriptions = new RemoteHeadsSubscriptions()
56
+ #remoteHeadsGossipingEnabled = false
55
57
 
56
58
  constructor({
57
59
  storage,
@@ -59,8 +61,10 @@ export class Repo extends EventEmitter<RepoEvents> {
59
61
  peerId,
60
62
  sharePolicy,
61
63
  isEphemeral = storage === undefined,
64
+ enableRemoteHeadsGossiping = false,
62
65
  }: RepoConfig) {
63
66
  super()
67
+ this.#remoteHeadsGossipingEnabled = enableRemoteHeadsGossiping
64
68
  this.#log = debug(`automerge-repo:repo`)
65
69
  this.sharePolicy = sharePolicy ?? this.sharePolicy
66
70
 
@@ -77,10 +81,7 @@ export class Repo extends EventEmitter<RepoEvents> {
77
81
  }: DocHandleEncodedChangePayload<any>) => {
78
82
  void storageSubsystem.saveDoc(handle.documentId, doc)
79
83
  }
80
- handle.on(
81
- "heads-changed",
82
- throttle(saveFn, this.saveDebounceRate)
83
- )
84
+ handle.on("heads-changed", throttle(saveFn, this.saveDebounceRate))
84
85
 
85
86
  if (isNew) {
86
87
  // this is a new document, immediately save it
@@ -140,9 +141,11 @@ export class Repo extends EventEmitter<RepoEvents> {
140
141
  networkSubsystem.send(message)
141
142
  })
142
143
 
143
- this.#synchronizer.on("open-doc", ({ peerId, documentId }) => {
144
- this.#remoteHeadsSubscriptions.subscribePeerToDoc(peerId, documentId)
145
- })
144
+ if (this.#remoteHeadsGossipingEnabled) {
145
+ this.#synchronizer.on("open-doc", ({ peerId, documentId }) => {
146
+ this.#remoteHeadsSubscriptions.subscribePeerToDoc(peerId, documentId)
147
+ })
148
+ }
146
149
 
147
150
  // STORAGE
148
151
  // The storage subsystem has access to some form of persistence, and deals with save and loading documents.
@@ -152,14 +155,10 @@ export class Repo extends EventEmitter<RepoEvents> {
152
155
  // NETWORK
153
156
  // The network subsystem deals with sending and receiving messages to and from peers.
154
157
 
155
- const myPeerMetadata: Promise<PeerMetadata> = new Promise(
156
- // eslint-disable-next-line no-async-promise-executor -- TODO: fix
157
- async (resolve) =>
158
- resolve({
159
- storageId: await storageSubsystem?.id(),
160
- isEphemeral,
161
- } as PeerMetadata)
162
- )
158
+ const myPeerMetadata: Promise<PeerMetadata> = (async () => ({
159
+ storageId: await storageSubsystem?.id(),
160
+ isEphemeral,
161
+ }))()
163
162
 
164
163
  const networkSubsystem = new NetworkSubsystem(
165
164
  network,
@@ -178,7 +177,7 @@ export class Repo extends EventEmitter<RepoEvents> {
178
177
 
179
178
  this.sharePolicy(peerId)
180
179
  .then(shouldShare => {
181
- if (shouldShare) {
180
+ if (shouldShare && this.#remoteHeadsGossipingEnabled) {
182
181
  this.#remoteHeadsSubscriptions.addGenerousPeer(peerId)
183
182
  }
184
183
  })
@@ -218,7 +217,7 @@ export class Repo extends EventEmitter<RepoEvents> {
218
217
  if (haveHeadsChanged) {
219
218
  handle.setRemoteHeads(storageId, message.syncState.theirHeads)
220
219
 
221
- if (storageId) {
220
+ if (storageId && this.#remoteHeadsGossipingEnabled) {
222
221
  this.#remoteHeadsSubscriptions.handleImmediateRemoteHeadsChanged(
223
222
  message.documentId,
224
223
  storageId,
@@ -228,45 +227,51 @@ export class Repo extends EventEmitter<RepoEvents> {
228
227
  }
229
228
  })
230
229
 
231
- this.#remoteHeadsSubscriptions.on("notify-remote-heads", message => {
232
- this.networkSubsystem.send({
233
- type: "remote-heads-changed",
234
- targetId: message.targetId,
235
- documentId: message.documentId,
236
- newHeads: {
237
- [message.storageId]: {
238
- heads: message.heads,
239
- timestamp: message.timestamp,
230
+ if (this.#remoteHeadsGossipingEnabled) {
231
+ this.#remoteHeadsSubscriptions.on("notify-remote-heads", message => {
232
+ this.networkSubsystem.send({
233
+ type: "remote-heads-changed",
234
+ targetId: message.targetId,
235
+ documentId: message.documentId,
236
+ newHeads: {
237
+ [message.storageId]: {
238
+ heads: message.heads,
239
+ timestamp: message.timestamp,
240
+ },
240
241
  },
241
- },
242
+ })
242
243
  })
243
- })
244
244
 
245
- this.#remoteHeadsSubscriptions.on("change-remote-subs", message => {
246
- this.#log("change-remote-subs", message)
247
- for (const peer of message.peers) {
248
- this.networkSubsystem.send({
249
- type: "remote-subscription-change",
250
- targetId: peer,
251
- add: message.add,
252
- remove: message.remove,
253
- })
254
- }
255
- })
245
+ this.#remoteHeadsSubscriptions.on("change-remote-subs", message => {
246
+ this.#log("change-remote-subs", message)
247
+ for (const peer of message.peers) {
248
+ this.networkSubsystem.send({
249
+ type: "remote-subscription-change",
250
+ targetId: peer,
251
+ add: message.add,
252
+ remove: message.remove,
253
+ })
254
+ }
255
+ })
256
256
 
257
- this.#remoteHeadsSubscriptions.on("remote-heads-changed", message => {
258
- const handle = this.#handleCache[message.documentId]
259
- handle.setRemoteHeads(message.storageId, message.remoteHeads)
260
- })
257
+ this.#remoteHeadsSubscriptions.on("remote-heads-changed", message => {
258
+ const handle = this.#handleCache[message.documentId]
259
+ handle.setRemoteHeads(message.storageId, message.remoteHeads)
260
+ })
261
+ }
261
262
  }
262
263
 
263
264
  #receiveMessage(message: RepoMessage) {
264
265
  switch (message.type) {
265
266
  case "remote-subscription-change":
266
- this.#remoteHeadsSubscriptions.handleControlMessage(message)
267
+ if (this.#remoteHeadsGossipingEnabled) {
268
+ this.#remoteHeadsSubscriptions.handleControlMessage(message)
269
+ }
267
270
  break
268
271
  case "remote-heads-changed":
269
- this.#remoteHeadsSubscriptions.handleRemoteHeads(message)
272
+ if (this.#remoteHeadsGossipingEnabled) {
273
+ this.#remoteHeadsSubscriptions.handleRemoteHeads(message)
274
+ }
270
275
  break
271
276
  case "sync":
272
277
  case "request":
@@ -280,17 +285,17 @@ export class Repo extends EventEmitter<RepoEvents> {
280
285
 
281
286
  #throttledSaveSyncStateHandlers: Record<
282
287
  StorageId,
283
- (message: SyncStateMessage) => void
288
+ (payload: SyncStatePayload) => void
284
289
  > = {}
285
290
 
286
291
  /** saves sync state throttled per storage id, if a peer doesn't have a storage id it's sync state is not persisted */
287
- #saveSyncState(message: SyncStateMessage) {
292
+ #saveSyncState(payload: SyncStatePayload) {
288
293
  if (!this.storageSubsystem) {
289
294
  return
290
295
  }
291
296
 
292
297
  const { storageId, isEphemeral } =
293
- this.peerMetadataByPeerId[message.peerId] || {}
298
+ this.peerMetadataByPeerId[payload.peerId] || {}
294
299
 
295
300
  if (!storageId || isEphemeral) {
296
301
  return
@@ -299,33 +304,37 @@ export class Repo extends EventEmitter<RepoEvents> {
299
304
  let handler = this.#throttledSaveSyncStateHandlers[storageId]
300
305
  if (!handler) {
301
306
  handler = this.#throttledSaveSyncStateHandlers[storageId] = throttle(
302
- ({ documentId, syncState }: SyncStateMessage) => {
303
- this.storageSubsystem!.saveSyncState(documentId, storageId, syncState)
304
- .catch(err => {
305
- this.#log("error saving sync state", { err })
306
- })
307
+ ({ documentId, syncState }: SyncStatePayload) => {
308
+ void this.storageSubsystem!.saveSyncState(
309
+ documentId,
310
+ storageId,
311
+ syncState
312
+ )
307
313
  },
308
314
  this.saveDebounceRate
309
315
  )
310
316
  }
311
317
 
312
- handler(message)
318
+ handler(payload)
313
319
  }
314
320
 
315
321
  /** Returns an existing handle if we have it; creates one otherwise. */
316
- #getHandle<T>(
322
+ #getHandle<T>({
323
+ documentId,
324
+ isNew,
325
+ initialValue,
326
+ }: {
317
327
  /** The documentId of the handle to look up or create */
318
- documentId: DocumentId,
319
-
320
- /** If we know we're creating a new document, specify this so we can have access to it immediately */
328
+ documentId: DocumentId /** If we know we're creating a new document, specify this so we can have access to it immediately */
321
329
  isNew: boolean
322
- ) {
330
+ initialValue?: T
331
+ }) {
323
332
  // If we have the handle cached, return it
324
333
  if (this.#handleCache[documentId]) return this.#handleCache[documentId]
325
334
 
326
335
  // If not, create a new handle, cache it, and return it
327
336
  if (!documentId) throw new Error(`Invalid documentId ${documentId}`)
328
- const handle = new DocHandle<T>(documentId, { isNew })
337
+ const handle = new DocHandle<T>(documentId, { isNew, initialValue })
329
338
  this.#handleCache[documentId] = handle
330
339
  return handle
331
340
  }
@@ -345,32 +354,18 @@ export class Repo extends EventEmitter<RepoEvents> {
345
354
  }
346
355
 
347
356
  /**
348
- * Creates a new document and returns a handle to it. The initial value of the document is
349
- * an empty object `{}`. Its documentId is generated by the system. we emit a `document` event
350
- * to advertise interest in the document.
357
+ * Creates a new document and returns a handle to it. The initial value of the document is an
358
+ * empty object `{}` unless an initial value is provided. Its documentId is generated by the
359
+ * system. we emit a `document` event to advertise interest in the document.
351
360
  */
352
- create<T>(): DocHandle<T> {
353
- // TODO:
354
- // either
355
- // - pass an initial value and do something like this to ensure that you get a valid initial value
356
-
357
- // const myInitialValue = {
358
- // tasks: [],
359
- // filter: "all",
360
- //
361
- // const guaranteeInitialValue = (doc: any) => {
362
- // if (!doc.tasks) doc.tasks = []
363
- // if (!doc.filter) doc.filter = "all"
364
-
365
- // return { ...myInitialValue, ...doc }
366
- // }
367
-
368
- // or
369
- // - pass a "reify" function that takes a `<any>` and returns `<T>`
370
-
361
+ create<T>(initialValue?: T): DocHandle<T> {
371
362
  // Generate a new UUID and store it in the buffer
372
363
  const { documentId } = parseAutomergeUrl(generateAutomergeUrl())
373
- const handle = this.#getHandle<T>(documentId, true) as DocHandle<T>
364
+ const handle = this.#getHandle<T>({
365
+ documentId,
366
+ isNew: true,
367
+ initialValue,
368
+ }) as DocHandle<T>
374
369
  this.emit("document", { handle, isNew: true })
375
370
  return handle
376
371
  }
@@ -436,7 +431,10 @@ export class Repo extends EventEmitter<RepoEvents> {
436
431
  return this.#handleCache[documentId]
437
432
  }
438
433
 
439
- const handle = this.#getHandle<T>(documentId, false) as DocHandle<T>
434
+ const handle = this.#getHandle<T>({
435
+ documentId,
436
+ isNew: false,
437
+ }) as DocHandle<T>
440
438
  this.emit("document", { handle, isNew: false })
441
439
  return handle
442
440
  }
@@ -447,13 +445,29 @@ export class Repo extends EventEmitter<RepoEvents> {
447
445
  ) {
448
446
  const documentId = interpretAsDocumentId(id)
449
447
 
450
- const handle = this.#getHandle(documentId, false)
448
+ const handle = this.#getHandle({ documentId, isNew: false })
451
449
  handle.delete()
452
450
 
453
451
  delete this.#handleCache[documentId]
454
452
  this.emit("delete-document", { documentId })
455
453
  }
456
454
 
455
+ /**
456
+ * Exports a document to a binary format.
457
+ * @param id - The url or documentId of the handle to export
458
+ *
459
+ * @returns Promise<Uint8Array | undefined> - A Promise containing the binary document,
460
+ * or undefined if the document is unavailable.
461
+ */
462
+ async export(id: AnyDocumentId): Promise<Uint8Array | undefined> {
463
+ const documentId = interpretAsDocumentId(id)
464
+
465
+ const handle = this.#getHandle({ documentId, isNew: false })
466
+ const doc = await handle.doc()
467
+ if (!doc) return undefined
468
+ return Automerge.save(doc)
469
+ }
470
+
457
471
  /**
458
472
  * Imports document binary into the repo.
459
473
  * @param binary - The binary to import
@@ -471,8 +485,14 @@ export class Repo extends EventEmitter<RepoEvents> {
471
485
  }
472
486
 
473
487
  subscribeToRemotes = (remotes: StorageId[]) => {
474
- this.#log("subscribeToRemotes", { remotes })
475
- this.#remoteHeadsSubscriptions.subscribeToRemotes(remotes)
488
+ if (this.#remoteHeadsGossipingEnabled) {
489
+ this.#log("subscribeToRemotes", { remotes })
490
+ this.#remoteHeadsSubscriptions.subscribeToRemotes(remotes)
491
+ } else {
492
+ this.#log(
493
+ "WARN: subscribeToRemotes called but remote heads gossiping is not enabled"
494
+ )
495
+ }
476
496
  }
477
497
 
478
498
  storageId = async (): Promise<StorageId | undefined> => {
@@ -493,16 +513,21 @@ export interface RepoConfig {
493
513
  isEphemeral?: boolean
494
514
 
495
515
  /** A storage adapter can be provided, or not */
496
- storage?: StorageAdapter
516
+ storage?: StorageAdapterInterface
497
517
 
498
518
  /** One or more network adapters must be provided */
499
- network: NetworkAdapter[]
519
+ network: NetworkAdapterInterface[]
500
520
 
501
521
  /**
502
522
  * Normal peers typically share generously with everyone (meaning we sync all our documents with
503
523
  * all peers). A server only syncs documents that a peer explicitly requests by ID.
504
524
  */
505
525
  sharePolicy?: SharePolicy
526
+
527
+ /**
528
+ * Whether to enable the experimental remote heads gossiping feature
529
+ */
530
+ enableRemoteHeadsGossiping?: boolean
506
531
  }
507
532
 
508
533
  /** A function that determines whether we should share a document with a peer
@@ -1,14 +1,6 @@
1
+ /* c8 ignore start */
2
+
1
3
  export const pause = (t = 0) =>
2
4
  new Promise<void>(resolve => setTimeout(() => resolve(), t))
3
5
 
4
- export function rejectOnTimeout<T>(
5
- promise: Promise<T>,
6
- millis: number
7
- ): Promise<T> {
8
- return Promise.race([
9
- promise,
10
- pause(millis).then(() => {
11
- throw new Error("timeout exceeded")
12
- }),
13
- ])
14
- }
6
+ /* c8 ignore end */
@@ -1,6 +1,7 @@
1
1
  import assert from "assert"
2
- import { describe, it } from "vitest"
3
- import { PeerId, Repo, type NetworkAdapter } from "../../index.js"
2
+ import { describe, expect, it } from "vitest"
3
+ import { PeerId, PeerMetadata, Repo, StorageId } from "../../index.js"
4
+ import type { NetworkAdapterInterface } from "../../network/NetworkAdapterInterface.js"
4
5
  import { eventPromise, eventPromises } from "../eventPromise.js"
5
6
  import { pause } from "../pause.js"
6
7
 
@@ -29,7 +30,10 @@ export function runAdapterTests(_setup: SetupFn, title?: string): void {
29
30
 
30
31
  describe(`Adapter acceptance tests ${title ? `(${title})` : ""}`, () => {
31
32
  it("can sync 2 repos", async () => {
32
- const doTest = async (a: NetworkAdapter[], b: NetworkAdapter[]) => {
33
+ const doTest = async (
34
+ a: NetworkAdapterInterface[],
35
+ b: NetworkAdapterInterface[]
36
+ ) => {
33
37
  const aliceRepo = new Repo({ network: a, peerId: alice })
34
38
  const bobRepo = new Repo({ network: b, peerId: bob })
35
39
 
@@ -141,12 +145,34 @@ export function runAdapterTests(_setup: SetupFn, title?: string): void {
141
145
  assert.deepStrictEqual(message, alicePresenceData)
142
146
  teardown()
143
147
  })
148
+
149
+ it("emits a peer-candidate event with proper peer metadata when a peer connects", async () => {
150
+ const { adapters, teardown } = await setup()
151
+ const a = adapters[0][0]
152
+ const b = adapters[1][0]
153
+
154
+ const bPromise = eventPromise(b, "peer-candidate")
155
+
156
+ const aPeerMetadata: PeerMetadata = { storageId: "a" as StorageId }
157
+
158
+ b.connect("b" as PeerId, { storageId: "b" as StorageId })
159
+ a.connect("a" as PeerId, aPeerMetadata)
160
+
161
+ const peerCandidate = await bPromise
162
+
163
+ expect(peerCandidate).toMatchObject({
164
+ peerId: "a",
165
+ peerMetadata: aPeerMetadata,
166
+ })
167
+
168
+ teardown()
169
+ })
144
170
  })
145
171
  }
146
172
 
147
173
  const NO_OP = () => {}
148
174
 
149
- type Network = NetworkAdapter | NetworkAdapter[]
175
+ type Network = NetworkAdapterInterface | NetworkAdapterInterface[]
150
176
 
151
177
  export type SetupFn = () => Promise<{
152
178
  adapters: [Network, Network, Network]
@@ -1,3 +1,4 @@
1
+ /* c8 ignore start */
1
2
  /**
2
3
  * If `promise` is resolved before `t` ms elapse, the timeout is cleared and the result of the
3
4
  * promise is returned. If the timeout ends first, a `TimeoutError` is thrown.
@@ -26,3 +27,4 @@ export class TimeoutError extends Error {
26
27
  this.name = "TimeoutError"
27
28
  }
28
29
  }
30
+ /* c8 ignore end */
package/src/index.ts CHANGED
@@ -34,8 +34,10 @@ export {
34
34
  } from "./AutomergeUrl.js"
35
35
  export { Repo } from "./Repo.js"
36
36
  export { NetworkAdapter } from "./network/NetworkAdapter.js"
37
- export { isValidRepoMessage } from "./network/messages.js"
37
+ export type { NetworkAdapterInterface } from "./network/NetworkAdapterInterface.js"
38
+ export { isRepoMessage } from "./network/messages.js"
38
39
  export { StorageAdapter } from "./storage/StorageAdapter.js"
40
+ export type { StorageAdapterInterface } from "./storage/StorageAdapterInterface.js"
39
41
 
40
42
  /** @hidden **/
41
43
  export * as cbor from "./helpers/cbor.js"
@@ -68,7 +70,7 @@ export type {
68
70
  PeerCandidatePayload,
69
71
  PeerDisconnectedPayload,
70
72
  PeerMetadata,
71
- } from "./network/NetworkAdapter.js"
73
+ } from "./network/NetworkAdapterInterface.js"
72
74
 
73
75
  export type {
74
76
  DocumentUnavailableMessage,