@automerge/automerge-repo 2.0.0-alpha.7 → 2.0.0-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. package/README.md +8 -8
  2. package/dist/AutomergeUrl.d.ts +17 -5
  3. package/dist/AutomergeUrl.d.ts.map +1 -1
  4. package/dist/AutomergeUrl.js +71 -24
  5. package/dist/DocHandle.d.ts +68 -45
  6. package/dist/DocHandle.d.ts.map +1 -1
  7. package/dist/DocHandle.js +166 -69
  8. package/dist/FindProgress.d.ts +30 -0
  9. package/dist/FindProgress.d.ts.map +1 -0
  10. package/dist/FindProgress.js +1 -0
  11. package/dist/RemoteHeadsSubscriptions.d.ts +4 -5
  12. package/dist/RemoteHeadsSubscriptions.d.ts.map +1 -1
  13. package/dist/RemoteHeadsSubscriptions.js +4 -1
  14. package/dist/Repo.d.ts +46 -6
  15. package/dist/Repo.d.ts.map +1 -1
  16. package/dist/Repo.js +252 -67
  17. package/dist/helpers/abortable.d.ts +36 -0
  18. package/dist/helpers/abortable.d.ts.map +1 -0
  19. package/dist/helpers/abortable.js +47 -0
  20. package/dist/helpers/arraysAreEqual.d.ts.map +1 -1
  21. package/dist/helpers/bufferFromHex.d.ts +3 -0
  22. package/dist/helpers/bufferFromHex.d.ts.map +1 -0
  23. package/dist/helpers/bufferFromHex.js +13 -0
  24. package/dist/helpers/debounce.d.ts.map +1 -1
  25. package/dist/helpers/eventPromise.d.ts.map +1 -1
  26. package/dist/helpers/headsAreSame.d.ts +2 -2
  27. package/dist/helpers/headsAreSame.d.ts.map +1 -1
  28. package/dist/helpers/mergeArrays.d.ts +1 -1
  29. package/dist/helpers/mergeArrays.d.ts.map +1 -1
  30. package/dist/helpers/pause.d.ts.map +1 -1
  31. package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
  32. package/dist/helpers/tests/network-adapter-tests.js +13 -13
  33. package/dist/helpers/tests/storage-adapter-tests.d.ts +2 -2
  34. package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
  35. package/dist/helpers/tests/storage-adapter-tests.js +25 -48
  36. package/dist/helpers/throttle.d.ts.map +1 -1
  37. package/dist/helpers/withTimeout.d.ts.map +1 -1
  38. package/dist/index.d.ts +2 -1
  39. package/dist/index.d.ts.map +1 -1
  40. package/dist/index.js +1 -1
  41. package/dist/network/messages.d.ts.map +1 -1
  42. package/dist/storage/StorageSubsystem.d.ts +15 -1
  43. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  44. package/dist/storage/StorageSubsystem.js +50 -14
  45. package/dist/synchronizer/CollectionSynchronizer.d.ts +4 -3
  46. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  47. package/dist/synchronizer/CollectionSynchronizer.js +34 -15
  48. package/dist/synchronizer/DocSynchronizer.d.ts +3 -2
  49. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  50. package/dist/synchronizer/DocSynchronizer.js +51 -27
  51. package/dist/synchronizer/Synchronizer.d.ts +11 -0
  52. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  53. package/dist/types.d.ts +4 -1
  54. package/dist/types.d.ts.map +1 -1
  55. package/fuzz/fuzz.ts +3 -3
  56. package/package.json +3 -4
  57. package/src/AutomergeUrl.ts +101 -26
  58. package/src/DocHandle.ts +235 -82
  59. package/src/FindProgress.ts +48 -0
  60. package/src/RemoteHeadsSubscriptions.ts +11 -9
  61. package/src/Repo.ts +368 -74
  62. package/src/helpers/abortable.ts +62 -0
  63. package/src/helpers/bufferFromHex.ts +14 -0
  64. package/src/helpers/headsAreSame.ts +2 -2
  65. package/src/helpers/tests/network-adapter-tests.ts +14 -13
  66. package/src/helpers/tests/storage-adapter-tests.ts +44 -86
  67. package/src/index.ts +7 -0
  68. package/src/storage/StorageSubsystem.ts +66 -16
  69. package/src/synchronizer/CollectionSynchronizer.ts +37 -16
  70. package/src/synchronizer/DocSynchronizer.ts +59 -32
  71. package/src/synchronizer/Synchronizer.ts +14 -0
  72. package/src/types.ts +4 -1
  73. package/test/AutomergeUrl.test.ts +130 -0
  74. package/test/CollectionSynchronizer.test.ts +4 -4
  75. package/test/DocHandle.test.ts +181 -38
  76. package/test/DocSynchronizer.test.ts +10 -3
  77. package/test/Repo.test.ts +376 -203
  78. package/test/StorageSubsystem.test.ts +80 -1
  79. package/test/remoteHeads.test.ts +27 -12
@@ -30,6 +30,7 @@ type PendingMessage = {
30
30
 
31
31
  interface DocSynchronizerConfig {
32
32
  handle: DocHandle<unknown>
33
+ peerId: PeerId
33
34
  onLoadSyncState?: (peerId: PeerId) => Promise<A.SyncState | undefined>
34
35
  }
35
36
 
@@ -56,13 +57,17 @@ export class DocSynchronizer extends Synchronizer {
56
57
 
57
58
  #pendingSyncMessages: Array<PendingMessage> = []
58
59
 
60
+ // We keep this around at least in part for debugging.
61
+ // eslint-disable-next-line no-unused-private-class-members
62
+ #peerId: PeerId
59
63
  #syncStarted = false
60
64
 
61
65
  #handle: DocHandle<unknown>
62
66
  #onLoadSyncState: (peerId: PeerId) => Promise<A.SyncState | undefined>
63
67
 
64
- constructor({ handle, onLoadSyncState }: DocSynchronizerConfig) {
68
+ constructor({ handle, peerId, onLoadSyncState }: DocSynchronizerConfig) {
65
69
  super()
70
+ this.#peerId = peerId
66
71
  this.#handle = handle
67
72
  this.#onLoadSyncState =
68
73
  onLoadSyncState ?? (() => Promise.resolve(undefined))
@@ -81,7 +86,6 @@ export class DocSynchronizer extends Synchronizer {
81
86
 
82
87
  // Process pending sync messages immediately after the handle becomes ready.
83
88
  void (async () => {
84
- await handle.doc([READY, REQUESTING])
85
89
  this.#processAllPendingSyncMessages()
86
90
  })()
87
91
  }
@@ -97,10 +101,13 @@ export class DocSynchronizer extends Synchronizer {
97
101
  /// PRIVATE
98
102
 
99
103
  async #syncWithPeers() {
100
- this.#log(`syncWithPeers`)
101
- const doc = await this.#handle.doc()
102
- if (doc === undefined) return
103
- this.#peers.forEach(peerId => this.#sendSyncMessage(peerId, doc))
104
+ try {
105
+ await this.#handle.whenReady()
106
+ const doc = this.#handle.doc() // XXX THIS ONE IS WEIRD
107
+ this.#peers.forEach(peerId => this.#sendSyncMessage(peerId, doc))
108
+ } catch (e) {
109
+ console.log("sync with peers threw an exception")
110
+ }
104
111
  }
105
112
 
106
113
  async #broadcastToPeers({
@@ -226,32 +233,26 @@ export class DocSynchronizer extends Synchronizer {
226
233
  return this.#peers.includes(peerId)
227
234
  }
228
235
 
229
- beginSync(peerIds: PeerId[]) {
230
- const noPeersWithDocument = peerIds.every(
231
- peerId => this.#peerDocumentStatuses[peerId] in ["unavailable", "wants"]
232
- )
233
-
234
- // At this point if we don't have anything in our storage, we need to use an empty doc to sync
235
- // with; but we don't want to surface that state to the front end
236
-
237
- const docPromise = this.#handle
238
- .doc([READY, REQUESTING, UNAVAILABLE])
239
- .then(doc => {
240
- // we register out peers first, then say that sync has started
236
+ async beginSync(peerIds: PeerId[]) {
237
+ void this.#handle
238
+ .whenReady([READY, REQUESTING, UNAVAILABLE])
239
+ .then(() => {
240
+ this.#syncStarted = true
241
+ this.#checkDocUnavailable()
242
+ })
243
+ .catch(e => {
244
+ console.log("caught whenready", e)
241
245
  this.#syncStarted = true
242
246
  this.#checkDocUnavailable()
243
-
244
- const wasUnavailable = doc === undefined
245
- if (wasUnavailable && noPeersWithDocument) {
246
- return
247
- }
248
-
249
- // If the doc is unavailable we still need a blank document to generate
250
- // the sync message from
251
- return doc ?? A.init<unknown>()
252
247
  })
253
248
 
254
- this.#log(`beginSync: ${peerIds.join(", ")}`)
249
+ const peersWithDocument = this.#peers.some(peerId => {
250
+ return this.#peerDocumentStatuses[peerId] == "has"
251
+ })
252
+
253
+ if (peersWithDocument) {
254
+ await this.#handle.whenReady()
255
+ }
255
256
 
256
257
  peerIds.forEach(peerId => {
257
258
  this.#withSyncState(peerId, syncState => {
@@ -264,11 +265,28 @@ export class DocSynchronizer extends Synchronizer {
264
265
  )
265
266
  this.#setSyncState(peerId, reparsedSyncState)
266
267
 
267
- docPromise
268
- .then(doc => {
269
- if (doc) {
270
- this.#sendSyncMessage(peerId, doc)
268
+ // At this point if we don't have anything in our storage, we need to use an empty doc to sync
269
+ // with; but we don't want to surface that state to the front end
270
+ this.#handle
271
+ .whenReady([READY, REQUESTING, UNAVAILABLE])
272
+ .then(() => {
273
+ const doc = this.#handle.isReady()
274
+ ? this.#handle.doc()
275
+ : A.init<unknown>()
276
+
277
+ const noPeersWithDocument = peerIds.every(
278
+ peerId =>
279
+ this.#peerDocumentStatuses[peerId] in ["unavailable", "wants"]
280
+ )
281
+
282
+ const wasUnavailable = doc === undefined
283
+ if (wasUnavailable && noPeersWithDocument) {
284
+ return
271
285
  }
286
+
287
+ // If the doc is unavailable we still need a blank document to generate
288
+ // the sync message from
289
+ this.#sendSyncMessage(peerId, doc ?? A.init<unknown>())
272
290
  })
273
291
  .catch(err => {
274
292
  this.#log(`Error loading doc for ${peerId}: ${err}`)
@@ -351,11 +369,20 @@ export class DocSynchronizer extends Synchronizer {
351
369
 
352
370
  this.#withSyncState(message.senderId, syncState => {
353
371
  this.#handle.update(doc => {
372
+ const start = performance.now()
373
+
354
374
  const [newDoc, newSyncState] = A.receiveSyncMessage(
355
375
  doc,
356
376
  syncState,
357
377
  message.data
358
378
  )
379
+ const end = performance.now()
380
+ this.emit("metrics", {
381
+ type: "receive-sync-message",
382
+ documentId: this.#handle.documentId,
383
+ durationMillis: end - start,
384
+ ...A.stats(doc),
385
+ })
359
386
 
360
387
  this.#setSyncState(message.senderId, newSyncState)
361
388
 
@@ -15,6 +15,7 @@ export interface SynchronizerEvents {
15
15
  message: (payload: MessageContents) => void
16
16
  "sync-state": (payload: SyncStatePayload) => void
17
17
  "open-doc": (arg: OpenDocMessage) => void
18
+ metrics: (arg: DocSyncMetrics) => void
18
19
  }
19
20
 
20
21
  /** Notify the repo that the sync state has changed */
@@ -23,3 +24,16 @@ export interface SyncStatePayload {
23
24
  documentId: DocumentId
24
25
  syncState: SyncState
25
26
  }
27
+
28
+ export type DocSyncMetrics =
29
+ | {
30
+ type: "receive-sync-message"
31
+ documentId: DocumentId
32
+ durationMillis: number
33
+ numOps: number
34
+ numChanges: number
35
+ }
36
+ | {
37
+ type: "doc-denied"
38
+ documentId: DocumentId
39
+ }
package/src/types.ts CHANGED
@@ -27,8 +27,11 @@ export type AnyDocumentId =
27
27
  | BinaryDocumentId
28
28
  | LegacyDocumentId
29
29
 
30
+ // We need to define our own version of heads because the AutomergeHeads type is not bs58check encoded
31
+ export type UrlHeads = string[] & { __automergeUrlHeads: unknown }
32
+
30
33
  /** A branded type for peer IDs */
31
34
  export type PeerId = string & { __peerId: true }
32
35
 
33
36
  /** A randomly generated string created when the {@link Repo} starts up */
34
- export type SessionId = string & { __SessionId: true }
37
+ export type SessionId = string & { __sessionId: true }
@@ -3,9 +3,11 @@ import bs58check from "bs58check"
3
3
  import { describe, it } from "vitest"
4
4
  import {
5
5
  generateAutomergeUrl,
6
+ getHeadsFromUrl,
6
7
  isValidAutomergeUrl,
7
8
  parseAutomergeUrl,
8
9
  stringifyAutomergeUrl,
10
+ UrlHeads,
9
11
  } from "../src/AutomergeUrl.js"
10
12
  import type {
11
13
  AutomergeUrl,
@@ -102,3 +104,131 @@ describe("AutomergeUrl", () => {
102
104
  })
103
105
  })
104
106
  })
107
+
108
+ describe("AutomergeUrl with heads", () => {
109
+ // Create some sample encoded heads for testing
110
+ const head1 = bs58check.encode(new Uint8Array([1, 2, 3, 4])) as string
111
+ const head2 = bs58check.encode(new Uint8Array([5, 6, 7, 8])) as string
112
+ const goodHeads = [head1, head2] as UrlHeads
113
+ const urlWithHeads = `${goodUrl}#${head1}|${head2}` as AutomergeUrl
114
+ const invalidHead = "not-base58-encoded"
115
+ const invalidHeads = [invalidHead] as UrlHeads
116
+
117
+ describe("stringifyAutomergeUrl", () => {
118
+ it("should stringify a url with heads", () => {
119
+ const url = stringifyAutomergeUrl({
120
+ documentId: goodDocumentId,
121
+ heads: goodHeads,
122
+ })
123
+ assert.strictEqual(url, urlWithHeads)
124
+ })
125
+
126
+ it("should throw if heads are not valid base58check", () => {
127
+ assert.throws(() =>
128
+ stringifyAutomergeUrl({
129
+ documentId: goodDocumentId,
130
+ heads: invalidHeads,
131
+ })
132
+ )
133
+ })
134
+ })
135
+
136
+ describe("parseAutomergeUrl", () => {
137
+ it("should parse a url with heads", () => {
138
+ const { documentId, heads } = parseAutomergeUrl(urlWithHeads)
139
+ assert.equal(documentId, goodDocumentId)
140
+ assert.deepEqual(heads, [head1, head2])
141
+ })
142
+
143
+ it("should parse a url without heads", () => {
144
+ const { documentId, heads } = parseAutomergeUrl(goodUrl)
145
+ assert.equal(documentId, goodDocumentId)
146
+ assert.equal(heads, undefined)
147
+ })
148
+
149
+ it("should throw on url with invalid heads encoding", () => {
150
+ const badUrl = `${goodUrl}#${invalidHead}` as AutomergeUrl
151
+ assert.throws(() => parseAutomergeUrl(badUrl))
152
+ })
153
+ })
154
+
155
+ describe("isValidAutomergeUrl", () => {
156
+ it("should return true for a valid url with heads", () => {
157
+ assert(isValidAutomergeUrl(urlWithHeads) === true)
158
+ })
159
+
160
+ it("should return false for a url with invalid heads", () => {
161
+ const badUrl = `${goodUrl}#${invalidHead}` as AutomergeUrl
162
+ assert(isValidAutomergeUrl(badUrl) === false)
163
+ })
164
+ })
165
+
166
+ describe("getHeadsFromUrl", () => {
167
+ it("should return heads from a valid url", () => {
168
+ const heads = getHeadsFromUrl(urlWithHeads)
169
+ assert.deepEqual(heads, [head1, head2])
170
+ })
171
+
172
+ it("should return undefined for url without heads", () => {
173
+ const heads = getHeadsFromUrl(goodUrl)
174
+ assert.equal(heads, undefined)
175
+ })
176
+ })
177
+ it("should handle a single head correctly", () => {
178
+ const urlWithOneHead = `${goodUrl}#${head1}` as AutomergeUrl
179
+ const { heads } = parseAutomergeUrl(urlWithOneHead)
180
+ assert.deepEqual(heads, [head1])
181
+ })
182
+
183
+ it("should round-trip urls with heads", () => {
184
+ const originalUrl = urlWithHeads
185
+ const parsed = parseAutomergeUrl(originalUrl)
186
+ const roundTripped = stringifyAutomergeUrl({
187
+ documentId: parsed.documentId,
188
+ heads: parsed.heads,
189
+ })
190
+ assert.equal(roundTripped, originalUrl)
191
+ })
192
+
193
+ describe("should reject malformed urls", () => {
194
+ it("should reject urls with trailing delimiter", () => {
195
+ assert(!isValidAutomergeUrl(`${goodUrl}#${head1}:` as AutomergeUrl))
196
+ })
197
+
198
+ it("should reject urls with empty head", () => {
199
+ assert(!isValidAutomergeUrl(`${goodUrl}#|${head1}` as AutomergeUrl))
200
+ })
201
+
202
+ it("should reject urls with multiple hash characters", () => {
203
+ assert(
204
+ !isValidAutomergeUrl(`${goodUrl}#${head1}#${head2}` as AutomergeUrl)
205
+ )
206
+ })
207
+ })
208
+ })
209
+
210
+ describe("empty heads section", () => {
211
+ it("should treat bare # as empty heads array", () => {
212
+ const urlWithEmptyHeads = `${goodUrl}#` as AutomergeUrl
213
+ const { heads } = parseAutomergeUrl(urlWithEmptyHeads)
214
+ assert.deepEqual(heads, [])
215
+ })
216
+
217
+ it("should round-trip empty heads array", () => {
218
+ const original = `${goodUrl}#` as AutomergeUrl
219
+ const parsed = parseAutomergeUrl(original)
220
+ const roundTripped = stringifyAutomergeUrl({
221
+ documentId: parsed.documentId,
222
+ heads: parsed.heads,
223
+ })
224
+ assert.equal(roundTripped, original)
225
+ })
226
+
227
+ it("should distinguish between no heads and empty heads", () => {
228
+ const noHeads = parseAutomergeUrl(goodUrl)
229
+ const emptyHeads = parseAutomergeUrl(`${goodUrl}#` as AutomergeUrl)
230
+
231
+ assert.equal(noHeads.heads, undefined)
232
+ assert.deepEqual(emptyHeads.heads, [])
233
+ })
234
+ })
@@ -28,13 +28,13 @@ describe("CollectionSynchronizer", () => {
28
28
  done()
29
29
  })
30
30
 
31
- synchronizer.addDocument(handle.documentId)
31
+ synchronizer.addDocument(handle)
32
32
  }))
33
33
 
34
34
  it("starts synchronizing existing documents when a peer is added", () =>
35
35
  new Promise<void>(done => {
36
36
  const handle = repo.create()
37
- synchronizer.addDocument(handle.documentId)
37
+ synchronizer.addDocument(handle)
38
38
  synchronizer.once("message", event => {
39
39
  const { targetId, documentId } = event as SyncMessage
40
40
  assert(targetId === "peer1")
@@ -50,7 +50,7 @@ describe("CollectionSynchronizer", () => {
50
50
 
51
51
  repo.sharePolicy = async (peerId: PeerId) => peerId !== "peer1"
52
52
 
53
- synchronizer.addDocument(handle.documentId)
53
+ synchronizer.addDocument(handle)
54
54
  synchronizer.once("message", () => {
55
55
  reject(new Error("Should not have sent a message"))
56
56
  })
@@ -71,7 +71,7 @@ describe("CollectionSynchronizer", () => {
71
71
  reject(new Error("Should not have sent a message"))
72
72
  })
73
73
 
74
- synchronizer.addDocument(handle.documentId)
74
+ synchronizer.addDocument(handle)
75
75
 
76
76
  setTimeout(done)
77
77
  }))