@automerge/automerge-repo 1.1.0-alpha.1 → 1.1.0-alpha.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/README.md +12 -7
  2. package/dist/AutomergeUrl.js +2 -2
  3. package/dist/RemoteHeadsSubscriptions.d.ts +1 -0
  4. package/dist/RemoteHeadsSubscriptions.d.ts.map +1 -1
  5. package/dist/RemoteHeadsSubscriptions.js +76 -16
  6. package/dist/Repo.d.ts +23 -10
  7. package/dist/Repo.d.ts.map +1 -1
  8. package/dist/Repo.js +103 -54
  9. package/dist/helpers/debounce.js +1 -1
  10. package/dist/helpers/pause.d.ts.map +1 -1
  11. package/dist/helpers/pause.js +2 -0
  12. package/dist/helpers/throttle.js +1 -1
  13. package/dist/helpers/withTimeout.d.ts.map +1 -1
  14. package/dist/helpers/withTimeout.js +2 -0
  15. package/dist/index.d.ts +2 -2
  16. package/dist/index.d.ts.map +1 -1
  17. package/dist/index.js +1 -1
  18. package/dist/network/NetworkAdapter.d.ts +14 -7
  19. package/dist/network/NetworkAdapter.d.ts.map +1 -1
  20. package/dist/network/NetworkAdapter.js +3 -3
  21. package/dist/network/NetworkSubsystem.d.ts +4 -8
  22. package/dist/network/NetworkSubsystem.d.ts.map +1 -1
  23. package/dist/network/NetworkSubsystem.js +12 -13
  24. package/dist/network/messages.d.ts +48 -38
  25. package/dist/network/messages.d.ts.map +1 -1
  26. package/dist/network/messages.js +7 -9
  27. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  28. package/dist/storage/StorageSubsystem.js +7 -2
  29. package/dist/storage/keyHash.d.ts.map +1 -1
  30. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  31. package/dist/synchronizer/CollectionSynchronizer.js +5 -3
  32. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  33. package/dist/synchronizer/DocSynchronizer.js +20 -8
  34. package/dist/synchronizer/Synchronizer.d.ts +12 -3
  35. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  36. package/package.json +6 -6
  37. package/src/AutomergeUrl.ts +2 -2
  38. package/src/RemoteHeadsSubscriptions.ts +85 -16
  39. package/src/Repo.ts +131 -68
  40. package/src/helpers/debounce.ts +1 -1
  41. package/src/helpers/pause.ts +4 -0
  42. package/src/helpers/throttle.ts +1 -1
  43. package/src/helpers/withTimeout.ts +2 -0
  44. package/src/index.ts +2 -1
  45. package/src/network/NetworkAdapter.ts +18 -12
  46. package/src/network/NetworkSubsystem.ts +23 -24
  47. package/src/network/messages.ts +77 -68
  48. package/src/storage/StorageSubsystem.ts +7 -2
  49. package/src/storage/keyHash.ts +2 -0
  50. package/src/synchronizer/CollectionSynchronizer.ts +7 -4
  51. package/src/synchronizer/DocSynchronizer.ts +27 -15
  52. package/src/synchronizer/Synchronizer.ts +13 -3
  53. package/test/RemoteHeadsSubscriptions.test.ts +34 -24
  54. package/test/Repo.test.ts +57 -2
  55. package/test/StorageSubsystem.test.ts +1 -1
  56. package/test/helpers/waitForMessages.ts +22 -0
  57. package/test/remoteHeads.test.ts +197 -72
  58. package/.eslintrc +0 -28
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@automerge/automerge-repo",
3
- "version": "1.1.0-alpha.1",
3
+ "version": "1.1.0-alpha.13",
4
4
  "description": "A repository object to manage a collection of automerge documents",
5
5
  "repository": "https://github.com/automerge/automerge-repo/tree/master/packages/automerge-repo",
6
6
  "author": "Peter van Hardenberg <pvh@pvh.ca>",
@@ -10,20 +10,20 @@
10
10
  "scripts": {
11
11
  "build": "tsc",
12
12
  "watch": "npm-watch build",
13
- "test:coverage": "c8 --reporter=lcov --reporter=html --reporter=text yarn test",
13
+ "test:coverage": "c8 --reporter=lcov --reporter=html --reporter=text pnpm test",
14
14
  "test": "vitest",
15
15
  "test:watch": "npm-watch test",
16
- "test:log": "cross-env DEBUG='automerge-repo:*' yarn test",
17
16
  "fuzz": "ts-node --esm --experimentalSpecifierResolution=node fuzz/fuzz.ts"
18
17
  },
19
18
  "browser": {
20
19
  "crypto": false
21
20
  },
22
21
  "devDependencies": {
23
- "http-server": "^14.1.0"
22
+ "http-server": "^14.1.0",
23
+ "vite": "^5.0.8"
24
24
  },
25
25
  "dependencies": {
26
- "@automerge/automerge": "^2.1.8-alpha.1",
26
+ "@automerge/automerge": "^2.1.9",
27
27
  "bs58check": "^3.0.1",
28
28
  "cbor-x": "^1.3.0",
29
29
  "debug": "^4.3.4",
@@ -55,5 +55,5 @@
55
55
  "publishConfig": {
56
56
  "access": "public"
57
57
  },
58
- "gitHead": "11805d698f860bd6ffb3ca028d3b57e718690b5a"
58
+ "gitHead": "f4ce1376d900ad98f00a638626be9611077460b5"
59
59
  }
@@ -13,7 +13,7 @@ export const urlPrefix = "automerge:"
13
13
  /** Given an Automerge URL, returns the DocumentId in both base58check-encoded form and binary form */
14
14
  export const parseAutomergeUrl = (url: AutomergeUrl) => {
15
15
  const regex = new RegExp(`^${urlPrefix}(\\w+)$`)
16
- const [_, docMatch] = url.match(regex) || []
16
+ const [, docMatch] = url.match(regex) || []
17
17
  const documentId = docMatch as DocumentId
18
18
  const binaryDocumentId = documentIdToBinary(documentId)
19
19
 
@@ -33,7 +33,7 @@ export const parseAutomergeUrl = (url: AutomergeUrl) => {
33
33
  export const stringifyAutomergeUrl = (
34
34
  arg: UrlOptions | DocumentId | BinaryDocumentId
35
35
  ) => {
36
- let documentId =
36
+ const documentId =
37
37
  arg instanceof Uint8Array || typeof arg === "string"
38
38
  ? arg
39
39
  : "documentId" in arg
@@ -44,6 +44,9 @@ export class RemoteHeadsSubscriptions extends EventEmitter<RemoteHeadsSubscripti
44
44
  #theirSubscriptions: Map<StorageId, Set<PeerId>> = new Map()
45
45
  // Peers we will always share remote heads with even if they are not subscribed
46
46
  #generousPeers: Set<PeerId> = new Set()
47
+ // Documents each peer has open, we need this information so we only send remote heads of documents that the peer knows
48
+ #subscribedDocsByPeer: Map<PeerId, Set<DocumentId>> = new Map()
49
+
47
50
  #log = debug("automerge-repo:remote-heads-subscriptions")
48
51
 
49
52
  subscribeToRemotes(remotes: StorageId[]) {
@@ -89,11 +92,17 @@ export class RemoteHeadsSubscriptions extends EventEmitter<RemoteHeadsSubscripti
89
92
  handleControlMessage(control: RemoteSubscriptionControlMessage) {
90
93
  const remotesToAdd: StorageId[] = []
91
94
  const remotesToRemove: StorageId[] = []
95
+ const addedRemotesWeKnow: StorageId[] = []
92
96
 
93
97
  this.#log("handleControlMessage", control)
94
98
  if (control.add) {
95
99
  for (const remote of control.add) {
96
100
  let theirSubs = this.#theirSubscriptions.get(remote)
101
+
102
+ if (this.#ourSubscriptions.has(remote) || theirSubs) {
103
+ addedRemotesWeKnow.push(remote)
104
+ }
105
+
97
106
  if (!theirSubs) {
98
107
  theirSubs = new Set()
99
108
  this.#theirSubscriptions.set(remote, theirSubs)
@@ -128,6 +137,30 @@ export class RemoteHeadsSubscriptions extends EventEmitter<RemoteHeadsSubscripti
128
137
  remove: remotesToRemove,
129
138
  })
130
139
  }
140
+
141
+ // send all our stored heads of documents the peer knows for the remotes they've added
142
+ for (const remote of addedRemotesWeKnow) {
143
+ const subscribedDocs = this.#subscribedDocsByPeer.get(control.senderId)
144
+ if (subscribedDocs) {
145
+ for (const documentId of subscribedDocs) {
146
+ const knownHeads = this.#knownHeads.get(documentId)
147
+ if (!knownHeads) {
148
+ continue
149
+ }
150
+
151
+ const lastHeads = knownHeads.get(remote)
152
+ if (lastHeads) {
153
+ this.emit("notify-remote-heads", {
154
+ targetId: control.senderId,
155
+ documentId,
156
+ heads: lastHeads.heads,
157
+ timestamp: lastHeads.timestamp,
158
+ storageId: remote,
159
+ })
160
+ }
161
+ }
162
+ }
163
+ }
131
164
  }
132
165
 
133
166
  /** A peer we are not directly connected to has changed their heads */
@@ -165,13 +198,15 @@ export class RemoteHeadsSubscriptions extends EventEmitter<RemoteHeadsSubscripti
165
198
  const theirSubs = this.#theirSubscriptions.get(event.storageId)
166
199
  if (theirSubs) {
167
200
  for (const peerId of theirSubs) {
168
- this.emit("notify-remote-heads", {
169
- targetId: peerId,
170
- documentId: event.documentId,
171
- heads: event.remoteHeads,
172
- timestamp: event.timestamp,
173
- storageId: event.storageId,
174
- })
201
+ if (this.#isPeerSubscribedToDoc(peerId, event.documentId)) {
202
+ this.emit("notify-remote-heads", {
203
+ targetId: peerId,
204
+ documentId: event.documentId,
205
+ heads: event.remoteHeads,
206
+ timestamp: event.timestamp,
207
+ storageId: event.storageId,
208
+ })
209
+ }
175
210
  }
176
211
  }
177
212
  }
@@ -200,13 +235,15 @@ export class RemoteHeadsSubscriptions extends EventEmitter<RemoteHeadsSubscripti
200
235
  const theirSubs = this.#theirSubscriptions.get(storageId)
201
236
  if (theirSubs) {
202
237
  for (const peerId of theirSubs) {
203
- this.emit("notify-remote-heads", {
204
- targetId: peerId,
205
- documentId: documentId,
206
- heads: heads,
207
- timestamp: timestamp,
208
- storageId: storageId,
209
- })
238
+ if (this.#isPeerSubscribedToDoc(peerId, documentId)) {
239
+ this.emit("notify-remote-heads", {
240
+ targetId: peerId,
241
+ documentId: documentId,
242
+ heads: heads,
243
+ timestamp: timestamp,
244
+ storageId: storageId,
245
+ })
246
+ }
210
247
  }
211
248
  }
212
249
  }
@@ -241,6 +278,7 @@ export class RemoteHeadsSubscriptions extends EventEmitter<RemoteHeadsSubscripti
241
278
  const remotesToRemove = []
242
279
 
243
280
  this.#generousPeers.delete(peerId)
281
+ this.#subscribedDocsByPeer.delete(peerId)
244
282
 
245
283
  for (const [storageId, peerIds] of this.#theirSubscriptions) {
246
284
  if (peerIds.has(peerId)) {
@@ -261,6 +299,37 @@ export class RemoteHeadsSubscriptions extends EventEmitter<RemoteHeadsSubscripti
261
299
  }
262
300
  }
263
301
 
302
+ subscribePeerToDoc(peerId: PeerId, documentId: DocumentId) {
303
+ let subscribedDocs = this.#subscribedDocsByPeer.get(peerId)
304
+ if (!subscribedDocs) {
305
+ subscribedDocs = new Set()
306
+ this.#subscribedDocsByPeer.set(peerId, subscribedDocs)
307
+ }
308
+
309
+ subscribedDocs.add(documentId)
310
+
311
+ const remoteHeads = this.#knownHeads.get(documentId)
312
+ if (remoteHeads) {
313
+ for (const [storageId, lastHeads] of remoteHeads) {
314
+ const subscribedPeers = this.#theirSubscriptions.get(storageId)
315
+ if (subscribedPeers && subscribedPeers.has(peerId)) {
316
+ this.emit("notify-remote-heads", {
317
+ targetId: peerId,
318
+ documentId,
319
+ heads: lastHeads.heads,
320
+ timestamp: lastHeads.timestamp,
321
+ storageId,
322
+ })
323
+ }
324
+ }
325
+ }
326
+ }
327
+
328
+ #isPeerSubscribedToDoc(peerId: PeerId, documentId: DocumentId) {
329
+ const subscribedDocs = this.#subscribedDocsByPeer.get(peerId)
330
+ return subscribedDocs && subscribedDocs.has(documentId)
331
+ }
332
+
264
333
  /** Returns the (document, storageId) pairs which have changed after processing msg */
265
334
  #changedHeads(msg: RemoteHeadsChanged): {
266
335
  documentId: DocumentId
@@ -279,12 +348,12 @@ export class RemoteHeadsSubscriptions extends EventEmitter<RemoteHeadsSubscripti
279
348
  }
280
349
  let remote = this.#knownHeads.get(documentId)
281
350
  if (!remote) {
282
- remote = new Map([[storageId as StorageId, { heads, timestamp }]])
351
+ remote = new Map()
283
352
  this.#knownHeads.set(documentId, remote)
284
353
  }
285
354
 
286
355
  const docRemote = remote.get(storageId as StorageId)
287
- if (docRemote && docRemote.timestamp > timestamp) {
356
+ if (docRemote && docRemote.timestamp >= timestamp) {
288
357
  continue
289
358
  } else {
290
359
  remote.set(storageId as StorageId, { timestamp, heads })
package/src/Repo.ts CHANGED
@@ -7,17 +7,18 @@ import {
7
7
  parseAutomergeUrl,
8
8
  } from "./AutomergeUrl.js"
9
9
  import { DocHandle, DocHandleEncodedChangePayload } from "./DocHandle.js"
10
+ import { RemoteHeadsSubscriptions } from "./RemoteHeadsSubscriptions.js"
11
+ import { headsAreSame } from "./helpers/headsAreSame.js"
10
12
  import { throttle } from "./helpers/throttle.js"
11
- import { NetworkAdapter } from "./network/NetworkAdapter.js"
13
+ import { NetworkAdapter, type PeerMetadata } from "./network/NetworkAdapter.js"
12
14
  import { NetworkSubsystem } from "./network/NetworkSubsystem.js"
15
+ import { RepoMessage } from "./network/messages.js"
13
16
  import { StorageAdapter } from "./storage/StorageAdapter.js"
14
17
  import { StorageSubsystem } from "./storage/StorageSubsystem.js"
18
+ import { StorageId } from "./storage/types.js"
15
19
  import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js"
20
+ import { SyncStatePayload } from "./synchronizer/Synchronizer.js"
16
21
  import type { AnyDocumentId, DocumentId, PeerId } from "./types.js"
17
- import { RepoMessage, SyncStateMessage } from "./network/messages.js"
18
- import { StorageId } from "./storage/types.js"
19
- import { RemoteHeadsSubscriptions } from "./RemoteHeadsSubscriptions.js"
20
- import { headsAreSame } from "./helpers/headsAreSame.js"
21
22
 
22
23
  /** A Repo is a collection of documents with networking, syncing, and storage capabilities. */
23
24
  /** The `Repo` is the main entry point of this library
@@ -47,11 +48,12 @@ export class Repo extends EventEmitter<RepoEvents> {
47
48
  /** @hidden */
48
49
  sharePolicy: SharePolicy = async () => true
49
50
 
50
- /** maps peer id to to persistance information (storageId, isEphemeral), access by collection synchronizer */
51
+ /** maps peer id to to persistence information (storageId, isEphemeral), access by collection synchronizer */
51
52
  /** @hidden */
52
- persistanceInfoByPeerId: Record<PeerId, PersistanceInfo> = {}
53
+ peerMetadataByPeerId: Record<PeerId, PeerMetadata> = {}
53
54
 
54
55
  #remoteHeadsSubscriptions = new RemoteHeadsSubscriptions()
56
+ #remoteHeadsGossipingEnabled = false
55
57
 
56
58
  constructor({
57
59
  storage,
@@ -59,8 +61,10 @@ export class Repo extends EventEmitter<RepoEvents> {
59
61
  peerId,
60
62
  sharePolicy,
61
63
  isEphemeral = storage === undefined,
64
+ enableRemoteHeadsGossiping = false,
62
65
  }: RepoConfig) {
63
66
  super()
67
+ this.#remoteHeadsGossipingEnabled = enableRemoteHeadsGossiping
64
68
  this.#log = debug(`automerge-repo:repo`)
65
69
  this.sharePolicy = sharePolicy ?? this.sharePolicy
66
70
 
@@ -77,10 +81,7 @@ export class Repo extends EventEmitter<RepoEvents> {
77
81
  }: DocHandleEncodedChangePayload<any>) => {
78
82
  void storageSubsystem.saveDoc(handle.documentId, doc)
79
83
  }
80
- const debouncedSaveFn = handle.on(
81
- "heads-changed",
82
- throttle(saveFn, this.saveDebounceRate)
83
- )
84
+ handle.on("heads-changed", throttle(saveFn, this.saveDebounceRate))
84
85
 
85
86
  if (isNew) {
86
87
  // this is a new document, immediately save it
@@ -140,6 +141,12 @@ export class Repo extends EventEmitter<RepoEvents> {
140
141
  networkSubsystem.send(message)
141
142
  })
142
143
 
144
+ if (this.#remoteHeadsGossipingEnabled) {
145
+ this.#synchronizer.on("open-doc", ({ peerId, documentId }) => {
146
+ this.#remoteHeadsSubscriptions.subscribePeerToDoc(peerId, documentId)
147
+ })
148
+ }
149
+
143
150
  // STORAGE
144
151
  // The storage subsystem has access to some form of persistence, and deals with save and loading documents.
145
152
  const storageSubsystem = storage ? new StorageSubsystem(storage) : undefined
@@ -147,28 +154,34 @@ export class Repo extends EventEmitter<RepoEvents> {
147
154
 
148
155
  // NETWORK
149
156
  // The network subsystem deals with sending and receiving messages to and from peers.
157
+
158
+ const myPeerMetadata: Promise<PeerMetadata> = new Promise(
159
+ // eslint-disable-next-line no-async-promise-executor -- TODO: fix
160
+ async resolve =>
161
+ resolve({
162
+ storageId: await storageSubsystem?.id(),
163
+ isEphemeral,
164
+ } as PeerMetadata)
165
+ )
166
+
150
167
  const networkSubsystem = new NetworkSubsystem(
151
168
  network,
152
169
  peerId,
153
- storageSubsystem?.id() ?? Promise.resolve(undefined),
154
- isEphemeral
170
+ myPeerMetadata
155
171
  )
156
172
  this.networkSubsystem = networkSubsystem
157
173
 
158
174
  // When we get a new peer, register it with the synchronizer
159
- networkSubsystem.on("peer", async ({ peerId, storageId, isEphemeral }) => {
175
+ networkSubsystem.on("peer", async ({ peerId, peerMetadata }) => {
160
176
  this.#log("peer connected", { peerId })
161
177
 
162
- if (storageId) {
163
- this.persistanceInfoByPeerId[peerId] = {
164
- storageId,
165
- isEphemeral,
166
- }
178
+ if (peerMetadata) {
179
+ this.peerMetadataByPeerId[peerId] = { ...peerMetadata }
167
180
  }
168
181
 
169
182
  this.sharePolicy(peerId)
170
183
  .then(shouldShare => {
171
- if (shouldShare) {
184
+ if (shouldShare && this.#remoteHeadsGossipingEnabled) {
172
185
  this.#remoteHeadsSubscriptions.addGenerousPeer(peerId)
173
186
  }
174
187
  })
@@ -195,12 +208,11 @@ export class Repo extends EventEmitter<RepoEvents> {
195
208
 
196
209
  const handle = this.#handleCache[message.documentId]
197
210
 
198
- const info = this.persistanceInfoByPeerId[message.peerId]
199
- if (!info) {
211
+ const { storageId } = this.peerMetadataByPeerId[message.peerId] || {}
212
+ if (!storageId) {
200
213
  return
201
214
  }
202
215
 
203
- const { storageId } = info
204
216
  const heads = handle.getRemoteHeads(storageId)
205
217
  const haveHeadsChanged =
206
218
  message.syncState.theirHeads &&
@@ -209,7 +221,7 @@ export class Repo extends EventEmitter<RepoEvents> {
209
221
  if (haveHeadsChanged) {
210
222
  handle.setRemoteHeads(storageId, message.syncState.theirHeads)
211
223
 
212
- if (storageId) {
224
+ if (storageId && this.#remoteHeadsGossipingEnabled) {
213
225
  this.#remoteHeadsSubscriptions.handleImmediateRemoteHeadsChanged(
214
226
  message.documentId,
215
227
  storageId,
@@ -219,45 +231,51 @@ export class Repo extends EventEmitter<RepoEvents> {
219
231
  }
220
232
  })
221
233
 
222
- this.#remoteHeadsSubscriptions.on("notify-remote-heads", message => {
223
- this.networkSubsystem.send({
224
- type: "remote-heads-changed",
225
- targetId: message.targetId,
226
- documentId: message.documentId,
227
- newHeads: {
228
- [message.storageId]: {
229
- heads: message.heads,
230
- timestamp: message.timestamp,
234
+ if (this.#remoteHeadsGossipingEnabled) {
235
+ this.#remoteHeadsSubscriptions.on("notify-remote-heads", message => {
236
+ this.networkSubsystem.send({
237
+ type: "remote-heads-changed",
238
+ targetId: message.targetId,
239
+ documentId: message.documentId,
240
+ newHeads: {
241
+ [message.storageId]: {
242
+ heads: message.heads,
243
+ timestamp: message.timestamp,
244
+ },
231
245
  },
232
- },
246
+ })
233
247
  })
234
- })
235
248
 
236
- this.#remoteHeadsSubscriptions.on("change-remote-subs", message => {
237
- this.#log("change-remote-subs", message)
238
- for (const peer of message.peers) {
239
- this.networkSubsystem.send({
240
- type: "remote-subscription-change",
241
- targetId: peer,
242
- add: message.add,
243
- remove: message.remove,
244
- })
245
- }
246
- })
249
+ this.#remoteHeadsSubscriptions.on("change-remote-subs", message => {
250
+ this.#log("change-remote-subs", message)
251
+ for (const peer of message.peers) {
252
+ this.networkSubsystem.send({
253
+ type: "remote-subscription-change",
254
+ targetId: peer,
255
+ add: message.add,
256
+ remove: message.remove,
257
+ })
258
+ }
259
+ })
247
260
 
248
- this.#remoteHeadsSubscriptions.on("remote-heads-changed", message => {
249
- const handle = this.#handleCache[message.documentId]
250
- handle.setRemoteHeads(message.storageId, message.remoteHeads)
251
- })
261
+ this.#remoteHeadsSubscriptions.on("remote-heads-changed", message => {
262
+ const handle = this.#handleCache[message.documentId]
263
+ handle.setRemoteHeads(message.storageId, message.remoteHeads)
264
+ })
265
+ }
252
266
  }
253
267
 
254
268
  #receiveMessage(message: RepoMessage) {
255
269
  switch (message.type) {
256
270
  case "remote-subscription-change":
257
- this.#remoteHeadsSubscriptions.handleControlMessage(message)
271
+ if (this.#remoteHeadsGossipingEnabled) {
272
+ this.#remoteHeadsSubscriptions.handleControlMessage(message)
273
+ }
258
274
  break
259
275
  case "remote-heads-changed":
260
- this.#remoteHeadsSubscriptions.handleRemoteHeads(message)
276
+ if (this.#remoteHeadsGossipingEnabled) {
277
+ this.#remoteHeadsSubscriptions.handleRemoteHeads(message)
278
+ }
261
279
  break
262
280
  case "sync":
263
281
  case "request":
@@ -271,34 +289,37 @@ export class Repo extends EventEmitter<RepoEvents> {
271
289
 
272
290
  #throttledSaveSyncStateHandlers: Record<
273
291
  StorageId,
274
- (message: SyncStateMessage) => void
292
+ (payload: SyncStatePayload) => void
275
293
  > = {}
276
294
 
277
295
  /** saves sync state throttled per storage id, if a peer doesn't have a storage id it's sync state is not persisted */
278
- #saveSyncState(message: SyncStateMessage) {
296
+ #saveSyncState(payload: SyncStatePayload) {
279
297
  if (!this.storageSubsystem) {
280
298
  return
281
299
  }
282
300
 
283
- const persistanceInfo = this.persistanceInfoByPeerId[message.peerId]
301
+ const { storageId, isEphemeral } =
302
+ this.peerMetadataByPeerId[payload.peerId] || {}
284
303
 
285
- if (!persistanceInfo || persistanceInfo.isEphemeral) {
304
+ if (!storageId || isEphemeral) {
286
305
  return
287
306
  }
288
307
 
289
- const { storageId } = persistanceInfo
290
-
291
308
  let handler = this.#throttledSaveSyncStateHandlers[storageId]
292
309
  if (!handler) {
293
310
  handler = this.#throttledSaveSyncStateHandlers[storageId] = throttle(
294
- ({ documentId, syncState }: SyncStateMessage) => {
295
- this.storageSubsystem!.saveSyncState(documentId, storageId, syncState)
311
+ ({ documentId, syncState }: SyncStatePayload) => {
312
+ void this.storageSubsystem!.saveSyncState(
313
+ documentId,
314
+ storageId,
315
+ syncState
316
+ )
296
317
  },
297
318
  this.saveDebounceRate
298
319
  )
299
320
  }
300
321
 
301
- handler(message)
322
+ handler(payload)
302
323
  }
303
324
 
304
325
  /** Returns an existing handle if we have it; creates one otherwise. */
@@ -329,6 +350,10 @@ export class Repo extends EventEmitter<RepoEvents> {
329
350
  return this.#synchronizer.peers
330
351
  }
331
352
 
353
+ getStorageIdOfPeer(peerId: PeerId): StorageId | undefined {
354
+ return this.peerMetadataByPeerId[peerId]?.storageId
355
+ }
356
+
332
357
  /**
333
358
  * Creates a new document and returns a handle to it. The initial value of the document is
334
359
  * an empty object `{}`. Its documentId is generated by the system. we emit a `document` event
@@ -439,9 +464,47 @@ export class Repo extends EventEmitter<RepoEvents> {
439
464
  this.emit("delete-document", { documentId })
440
465
  }
441
466
 
467
+ /**
468
+ * Exports a document to a binary format.
469
+ * @param id - The url or documentId of the handle to export
470
+ *
471
+ * @returns Promise<Uint8Array | undefined> - A Promise containing the binary document,
472
+ * or undefined if the document is unavailable.
473
+ */
474
+ async export(id: AnyDocumentId): Promise<Uint8Array | undefined> {
475
+ const documentId = interpretAsDocumentId(id)
476
+
477
+ const handle = this.#getHandle(documentId, false)
478
+ const doc = await handle.doc()
479
+ if (!doc) return undefined
480
+ return Automerge.save(doc)
481
+ }
482
+
483
+ /**
484
+ * Imports document binary into the repo.
485
+ * @param binary - The binary to import
486
+ */
487
+ import<T>(binary: Uint8Array) {
488
+ const doc = Automerge.load<T>(binary)
489
+
490
+ const handle = this.create<T>()
491
+
492
+ handle.update(() => {
493
+ return Automerge.clone(doc)
494
+ })
495
+
496
+ return handle
497
+ }
498
+
442
499
  subscribeToRemotes = (remotes: StorageId[]) => {
443
- this.#log("subscribeToRemotes", { remotes })
444
- this.#remoteHeadsSubscriptions.subscribeToRemotes(remotes)
500
+ if (this.#remoteHeadsGossipingEnabled) {
501
+ this.#log("subscribeToRemotes", { remotes })
502
+ this.#remoteHeadsSubscriptions.subscribeToRemotes(remotes)
503
+ } else {
504
+ this.#log(
505
+ "WARN: subscribeToRemotes called but remote heads gossiping is not enabled"
506
+ )
507
+ }
445
508
  }
446
509
 
447
510
  storageId = async (): Promise<StorageId | undefined> => {
@@ -453,11 +516,6 @@ export class Repo extends EventEmitter<RepoEvents> {
453
516
  }
454
517
  }
455
518
 
456
- interface PersistanceInfo {
457
- storageId: StorageId
458
- isEphemeral: boolean
459
- }
460
-
461
519
  export interface RepoConfig {
462
520
  /** Our unique identifier */
463
521
  peerId?: PeerId
@@ -477,6 +535,11 @@ export interface RepoConfig {
477
535
  * all peers). A server only syncs documents that a peer explicitly requests by ID.
478
536
  */
479
537
  sharePolicy?: SharePolicy
538
+
539
+ /**
540
+ * Whether to enable the experimental remote heads gossiping feature
541
+ */
542
+ enableRemoteHeadsGossiping?: boolean
480
543
  }
481
544
 
482
545
  /** A function that determines whether we should share a document with a peer
@@ -19,7 +19,7 @@ export const throttle = <F extends (...args: Parameters<F>) => ReturnType<F>>(
19
19
  return function (...args: Parameters<F>) {
20
20
  clearTimeout(timeout)
21
21
  timeout = setTimeout(() => {
22
- fn.apply(null, args)
22
+ fn(...args)
23
23
  }, rate)
24
24
  }
25
25
  }
@@ -1,3 +1,5 @@
1
+ /* c8 ignore start */
2
+
1
3
  export const pause = (t = 0) =>
2
4
  new Promise<void>(resolve => setTimeout(() => resolve(), t))
3
5
 
@@ -12,3 +14,5 @@ export function rejectOnTimeout<T>(
12
14
  }),
13
15
  ])
14
16
  }
17
+
18
+ /* c8 ignore end */
@@ -36,7 +36,7 @@ export const throttle = <F extends (...args: Parameters<F>) => ReturnType<F>>(
36
36
  wait = lastCall + delay - Date.now()
37
37
  clearTimeout(timeout)
38
38
  timeout = setTimeout(() => {
39
- fn.apply(null, args)
39
+ fn(...args)
40
40
  lastCall = Date.now()
41
41
  }, wait)
42
42
  }
@@ -1,3 +1,4 @@
1
+ /* c8 ignore start */
1
2
  /**
2
3
  * If `promise` is resolved before `t` ms elapse, the timeout is cleared and the result of the
3
4
  * promise is returned. If the timeout ends first, a `TimeoutError` is thrown.
@@ -26,3 +27,4 @@ export class TimeoutError extends Error {
26
27
  this.name = "TimeoutError"
27
28
  }
28
29
  }
30
+ /* c8 ignore end */
package/src/index.ts CHANGED
@@ -34,7 +34,7 @@ export {
34
34
  } from "./AutomergeUrl.js"
35
35
  export { Repo } from "./Repo.js"
36
36
  export { NetworkAdapter } from "./network/NetworkAdapter.js"
37
- export { isValidRepoMessage } from "./network/messages.js"
37
+ export { isRepoMessage } from "./network/messages.js"
38
38
  export { StorageAdapter } from "./storage/StorageAdapter.js"
39
39
 
40
40
  /** @hidden **/
@@ -67,6 +67,7 @@ export type {
67
67
  OpenPayload,
68
68
  PeerCandidatePayload,
69
69
  PeerDisconnectedPayload,
70
+ PeerMetadata,
70
71
  } from "./network/NetworkAdapter.js"
71
72
 
72
73
  export type {