@automerge/automerge-repo 2.0.0-alpha.2 → 2.0.0-alpha.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. package/README.md +5 -6
  2. package/dist/AutomergeUrl.d.ts +17 -5
  3. package/dist/AutomergeUrl.d.ts.map +1 -1
  4. package/dist/AutomergeUrl.js +71 -24
  5. package/dist/DocHandle.d.ts +89 -20
  6. package/dist/DocHandle.d.ts.map +1 -1
  7. package/dist/DocHandle.js +189 -28
  8. package/dist/FindProgress.d.ts +30 -0
  9. package/dist/FindProgress.d.ts.map +1 -0
  10. package/dist/FindProgress.js +1 -0
  11. package/dist/RemoteHeadsSubscriptions.d.ts +4 -5
  12. package/dist/RemoteHeadsSubscriptions.d.ts.map +1 -1
  13. package/dist/RemoteHeadsSubscriptions.js +4 -1
  14. package/dist/Repo.d.ts +44 -6
  15. package/dist/Repo.d.ts.map +1 -1
  16. package/dist/Repo.js +226 -87
  17. package/dist/entrypoints/fullfat.d.ts +1 -0
  18. package/dist/entrypoints/fullfat.d.ts.map +1 -1
  19. package/dist/entrypoints/fullfat.js +1 -2
  20. package/dist/helpers/abortable.d.ts +39 -0
  21. package/dist/helpers/abortable.d.ts.map +1 -0
  22. package/dist/helpers/abortable.js +45 -0
  23. package/dist/helpers/bufferFromHex.d.ts +3 -0
  24. package/dist/helpers/bufferFromHex.d.ts.map +1 -0
  25. package/dist/helpers/bufferFromHex.js +13 -0
  26. package/dist/helpers/headsAreSame.d.ts +2 -2
  27. package/dist/helpers/headsAreSame.d.ts.map +1 -1
  28. package/dist/helpers/mergeArrays.d.ts +1 -1
  29. package/dist/helpers/mergeArrays.d.ts.map +1 -1
  30. package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
  31. package/dist/helpers/tests/network-adapter-tests.js +13 -13
  32. package/dist/helpers/tests/storage-adapter-tests.d.ts +2 -2
  33. package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
  34. package/dist/helpers/tests/storage-adapter-tests.js +25 -48
  35. package/dist/index.d.ts +1 -1
  36. package/dist/index.d.ts.map +1 -1
  37. package/dist/index.js +1 -1
  38. package/dist/storage/StorageSubsystem.d.ts +11 -1
  39. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  40. package/dist/storage/StorageSubsystem.js +20 -4
  41. package/dist/synchronizer/CollectionSynchronizer.d.ts +17 -3
  42. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  43. package/dist/synchronizer/CollectionSynchronizer.js +43 -18
  44. package/dist/synchronizer/DocSynchronizer.d.ts +10 -2
  45. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  46. package/dist/synchronizer/DocSynchronizer.js +30 -8
  47. package/dist/synchronizer/Synchronizer.d.ts +11 -0
  48. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  49. package/dist/types.d.ts +4 -1
  50. package/dist/types.d.ts.map +1 -1
  51. package/fuzz/fuzz.ts +3 -3
  52. package/package.json +3 -3
  53. package/src/AutomergeUrl.ts +101 -26
  54. package/src/DocHandle.ts +256 -38
  55. package/src/FindProgress.ts +48 -0
  56. package/src/RemoteHeadsSubscriptions.ts +11 -9
  57. package/src/Repo.ts +310 -95
  58. package/src/entrypoints/fullfat.ts +1 -2
  59. package/src/helpers/abortable.ts +61 -0
  60. package/src/helpers/bufferFromHex.ts +14 -0
  61. package/src/helpers/headsAreSame.ts +2 -2
  62. package/src/helpers/tests/network-adapter-tests.ts +14 -13
  63. package/src/helpers/tests/storage-adapter-tests.ts +44 -86
  64. package/src/index.ts +2 -0
  65. package/src/storage/StorageSubsystem.ts +29 -4
  66. package/src/synchronizer/CollectionSynchronizer.ts +56 -19
  67. package/src/synchronizer/DocSynchronizer.ts +34 -9
  68. package/src/synchronizer/Synchronizer.ts +14 -0
  69. package/src/types.ts +4 -1
  70. package/test/AutomergeUrl.test.ts +130 -0
  71. package/test/CollectionSynchronizer.test.ts +4 -4
  72. package/test/DocHandle.test.ts +189 -29
  73. package/test/DocSynchronizer.test.ts +10 -3
  74. package/test/Repo.test.ts +377 -191
  75. package/test/StorageSubsystem.test.ts +17 -0
  76. package/test/remoteHeads.test.ts +27 -12
package/test/Repo.test.ts CHANGED
@@ -3,8 +3,11 @@ import { MessageChannelNetworkAdapter } from "../../automerge-repo-network-messa
3
3
  import assert from "assert"
4
4
  import * as Uuid from "uuid"
5
5
  import { describe, expect, it } from "vitest"
6
- import { parseAutomergeUrl } from "../src/AutomergeUrl.js"
7
6
  import {
7
+ encodeHeads,
8
+ getHeadsFromUrl,
9
+ isValidAutomergeUrl,
10
+ parseAutomergeUrl,
8
11
  generateAutomergeUrl,
9
12
  stringifyAutomergeUrl,
10
13
  } from "../src/AutomergeUrl.js"
@@ -13,6 +16,7 @@ import { eventPromise } from "../src/helpers/eventPromise.js"
13
16
  import { pause } from "../src/helpers/pause.js"
14
17
  import {
15
18
  AnyDocumentId,
19
+ UrlHeads,
16
20
  AutomergeUrl,
17
21
  DocHandle,
18
22
  DocumentId,
@@ -72,35 +76,34 @@ describe("Repo", () => {
72
76
  it("can create a document with an initial value", async () => {
73
77
  const { repo } = setup()
74
78
  const handle = repo.create({ foo: "bar" })
75
- await handle.doc()
76
- assert.equal(handle.docSync().foo, "bar")
79
+ assert.equal(handle.doc().foo, "bar")
77
80
  })
78
81
 
79
- it("can find a document by url", () => {
82
+ it("can find a document by url", async () => {
80
83
  const { repo } = setup()
81
84
  const handle = repo.create<TestDoc>()
82
85
  handle.change((d: TestDoc) => {
83
86
  d.foo = "bar"
84
87
  })
85
88
 
86
- const handle2 = repo.find(handle.url)
89
+ const handle2 = await repo.find(handle.url)
87
90
  assert.equal(handle, handle2)
88
- assert.deepEqual(handle2.docSync(), { foo: "bar" })
91
+ assert.deepEqual(handle2.doc(), { foo: "bar" })
89
92
  })
90
93
 
91
- it("can find a document by its unprefixed document ID", () => {
94
+ it("can find a document by its unprefixed document ID", async () => {
92
95
  const { repo } = setup()
93
96
  const handle = repo.create<TestDoc>()
94
97
  handle.change((d: TestDoc) => {
95
98
  d.foo = "bar"
96
99
  })
97
100
 
98
- const handle2 = repo.find(handle.documentId)
101
+ const handle2 = await repo.find(handle.documentId)
99
102
  assert.equal(handle, handle2)
100
- assert.deepEqual(handle2.docSync(), { foo: "bar" })
103
+ assert.deepEqual(handle2.doc(), { foo: "bar" })
101
104
  })
102
105
 
103
- it("can find a document by legacy UUID (for now)", () => {
106
+ it("can find a document by legacy UUID (for now)", async () => {
104
107
  disableConsoleWarn()
105
108
 
106
109
  const { repo } = setup()
@@ -113,9 +116,9 @@ describe("Repo", () => {
113
116
  const { binaryDocumentId } = parseAutomergeUrl(url)
114
117
  const legacyDocId = Uuid.stringify(binaryDocumentId) as LegacyDocumentId
115
118
 
116
- const handle2 = repo.find(legacyDocId)
119
+ const handle2 = await repo.find(legacyDocId)
117
120
  assert.equal(handle, handle2)
118
- assert.deepEqual(handle2.docSync(), { foo: "bar" })
121
+ assert.deepEqual(handle2.doc(), { foo: "bar" })
119
122
 
120
123
  reenableConsoleWarn()
121
124
  })
@@ -126,7 +129,7 @@ describe("Repo", () => {
126
129
  handle.change(d => {
127
130
  d.foo = "bar"
128
131
  })
129
- const v = await handle.doc()
132
+ const v = handle.doc()
130
133
  assert.equal(handle.isReady(), true)
131
134
  assert.equal(v.foo, "bar")
132
135
  })
@@ -140,8 +143,8 @@ describe("Repo", () => {
140
143
  const handle2 = repo.clone(handle)
141
144
  assert.equal(handle2.isReady(), true)
142
145
  assert.notEqual(handle.documentId, handle2.documentId)
143
- assert.deepStrictEqual(handle.docSync(), handle2.docSync())
144
- assert.deepStrictEqual(handle2.docSync(), { foo: "bar" })
146
+ assert.deepStrictEqual(handle.doc(), handle2.doc())
147
+ assert.deepStrictEqual(handle2.doc(), { foo: "bar" })
145
148
  })
146
149
 
147
150
  it("the cloned documents are distinct", () => {
@@ -159,9 +162,9 @@ describe("Repo", () => {
159
162
  d.baz = "baz"
160
163
  })
161
164
 
162
- assert.notDeepStrictEqual(handle.docSync(), handle2.docSync())
163
- assert.deepStrictEqual(handle.docSync(), { foo: "bar", bar: "bif" })
164
- assert.deepStrictEqual(handle2.docSync(), { foo: "bar", baz: "baz" })
165
+ assert.notDeepStrictEqual(handle.doc(), handle2.doc())
166
+ assert.deepStrictEqual(handle.doc(), { foo: "bar", bar: "bif" })
167
+ assert.deepStrictEqual(handle2.doc(), { foo: "bar", baz: "baz" })
165
168
  })
166
169
 
167
170
  it("the cloned documents can merge", () => {
@@ -181,59 +184,47 @@ describe("Repo", () => {
181
184
 
182
185
  handle.merge(handle2)
183
186
 
184
- assert.deepStrictEqual(handle.docSync(), {
187
+ assert.deepStrictEqual(handle.doc(), {
185
188
  foo: "bar",
186
189
  bar: "bif",
187
190
  baz: "baz",
188
191
  })
189
192
  // only the one handle should be changed
190
- assert.deepStrictEqual(handle2.docSync(), { foo: "bar", baz: "baz" })
193
+ assert.deepStrictEqual(handle2.doc(), { foo: "bar", baz: "baz" })
191
194
  })
192
195
 
193
196
  it("throws an error if we try to find a handle with an invalid AutomergeUrl", async () => {
194
197
  const { repo } = setup()
195
- try {
196
- repo.find<TestDoc>("invalid-url" as unknown as AutomergeUrl)
197
- } catch (e: any) {
198
- assert.equal(e.message, "Invalid AutomergeUrl: 'invalid-url'")
199
- }
198
+ await expect(async () => {
199
+ await repo.find<TestDoc>("invalid-url" as unknown as AutomergeUrl)
200
+ }).rejects.toThrow("Invalid AutomergeUrl: 'invalid-url'")
200
201
  })
201
202
 
202
203
  it("doesn't find a document that doesn't exist", async () => {
203
204
  const { repo } = setup()
204
- const handle = repo.find<TestDoc>(generateAutomergeUrl())
205
-
206
- await handle.whenReady(["ready", "unavailable"])
207
-
208
- assert.equal(handle.isReady(), false)
209
- assert.equal(handle.state, "unavailable")
210
- const doc = await handle.doc()
211
- assert.equal(doc, undefined)
212
- })
213
-
214
- it("emits an unavailable event when you don't have the document locally and are not connected to anyone", async () => {
215
- const { repo } = setup()
216
- const url = generateAutomergeUrl()
217
- const handle = repo.find<TestDoc>(url)
218
- assert.equal(handle.isReady(), false)
219
- await eventPromise(handle, "unavailable")
205
+ await expect(async () => {
206
+ await repo.find<TestDoc>(generateAutomergeUrl())
207
+ }).rejects.toThrow(/Document (.*) is unavailable/)
220
208
  })
221
209
 
222
210
  it("doesn't mark a document as unavailable until network adapters are ready", async () => {
223
211
  const { repo, networkAdapter } = setup({ startReady: false })
224
212
  const url = generateAutomergeUrl()
225
- const handle = repo.find<TestDoc>(url)
226
213
 
227
- let wasUnavailable = false
228
- handle.on("unavailable", () => {
229
- wasUnavailable = true
230
- })
214
+ const attemptedFind = repo.find<TestDoc>(url)
231
215
 
232
- await pause(50)
233
- assert.equal(wasUnavailable, false)
216
+ // First verify it stays pending for 50ms
217
+ await expect(
218
+ Promise.race([attemptedFind, pause(50)])
219
+ ).resolves.toBeUndefined()
234
220
 
221
+ // Trigger the rejection
235
222
  networkAdapter.forceReady()
236
- await eventPromise(handle, "unavailable")
223
+
224
+ // Now verify it rejects
225
+ await expect(attemptedFind).rejects.toThrow(
226
+ /Document (.*) is unavailable/
227
+ )
237
228
  })
238
229
 
239
230
  it("can find a created document", async () => {
@@ -244,18 +235,18 @@ describe("Repo", () => {
244
235
  })
245
236
  assert.equal(handle.isReady(), true)
246
237
 
247
- const bobHandle = repo.find<TestDoc>(handle.url)
238
+ const bobHandle = await repo.find<TestDoc>(handle.url)
248
239
 
249
240
  assert.equal(handle, bobHandle)
250
241
  assert.equal(handle.isReady(), true)
251
242
 
252
- const v = await bobHandle.doc()
243
+ const v = bobHandle.doc()
253
244
  assert.equal(v?.foo, "bar")
254
245
  })
255
246
 
256
247
  it("saves the document when creating it", async () => {
257
248
  const { repo, storageAdapter } = setup()
258
- const handle = repo.create<TestDoc>()
249
+ const handle = repo.create<TestDoc>({ foo: "saved" })
259
250
 
260
251
  const repo2 = new Repo({
261
252
  storage: storageAdapter,
@@ -263,9 +254,9 @@ describe("Repo", () => {
263
254
 
264
255
  await repo.flush()
265
256
 
266
- const bobHandle = repo2.find<TestDoc>(handle.url)
257
+ const bobHandle = await repo2.find<TestDoc>(handle.url)
267
258
  await bobHandle.whenReady()
268
- assert.equal(bobHandle.isReady(), true)
259
+ assert.deepEqual(bobHandle.doc(), { foo: "saved" })
269
260
  })
270
261
 
271
262
  it("saves the document when changed and can find it again", async () => {
@@ -284,9 +275,9 @@ describe("Repo", () => {
284
275
  storage: storageAdapter,
285
276
  })
286
277
 
287
- const bobHandle = repo2.find<TestDoc>(handle.url)
278
+ const bobHandle = await repo2.find<TestDoc>(handle.url)
288
279
 
289
- const v = await bobHandle.doc()
280
+ const v = bobHandle.doc()
290
281
  assert.equal(v?.foo, "bar")
291
282
  })
292
283
 
@@ -298,7 +289,7 @@ describe("Repo", () => {
298
289
  })
299
290
  // we now have a snapshot and an incremental change in storage
300
291
  assert.equal(handle.isReady(), true)
301
- const foo = await handle.doc()
292
+ const foo = handle.doc()
302
293
  assert.equal(foo?.foo, "bar")
303
294
 
304
295
  await pause()
@@ -315,7 +306,7 @@ describe("Repo", () => {
315
306
  d.foo = "bar"
316
307
  })
317
308
  assert.equal(handle.isReady(), true)
318
- await handle.doc()
309
+ await handle.whenReady()
319
310
 
320
311
  await pause()
321
312
  repo.delete(handle.url)
@@ -352,7 +343,7 @@ describe("Repo", () => {
352
343
 
353
344
  const exported = await repo.export(handle.documentId)
354
345
  const loaded = A.load(exported)
355
- const doc = await handle.doc()
346
+ const doc = handle.doc()
356
347
  assert.deepEqual(doc, loaded)
357
348
  })
358
349
 
@@ -386,9 +377,7 @@ describe("Repo", () => {
386
377
  const repo2 = new Repo({
387
378
  storage,
388
379
  })
389
- const handle2 = repo2.find(handle.url)
390
- await handle2.doc()
391
-
380
+ const handle2 = await repo2.find(handle.url)
392
381
  assert.deepEqual(storage.keys(), initialKeys)
393
382
  })
394
383
 
@@ -414,9 +403,7 @@ describe("Repo", () => {
414
403
  const repo2 = new Repo({
415
404
  storage,
416
405
  })
417
- const handle2 = repo2.find(handle.url)
418
- await handle2.doc()
419
-
406
+ const handle2 = await repo2.find(handle.url)
420
407
  assert(storage.keys().length !== 0)
421
408
  }
422
409
  })
@@ -456,7 +443,7 @@ describe("Repo", () => {
456
443
 
457
444
  const handle = repo.import<TestDoc>(saved)
458
445
  assert.equal(handle.isReady(), true)
459
- const v = await handle.doc()
446
+ const v = handle.doc()
460
447
  assert.equal(v?.foo, "bar")
461
448
 
462
449
  expect(A.getHistory(v)).toEqual(A.getHistory(updatedDoc))
@@ -475,7 +462,7 @@ describe("Repo", () => {
475
462
  const { repo } = setup()
476
463
  // @ts-ignore - passing something other than UInt8Array
477
464
  const handle = repo.import<TestDoc>(A.from({ foo: 123 }))
478
- const doc = await handle.doc()
465
+ const doc = handle.doc()
479
466
  expect(doc).toEqual({})
480
467
  })
481
468
 
@@ -483,9 +470,39 @@ describe("Repo", () => {
483
470
  const { repo } = setup()
484
471
  // @ts-ignore - passing something other than UInt8Array
485
472
  const handle = repo.import<TestDoc>({ foo: 123 })
486
- const doc = await handle.doc()
473
+ const doc = handle.doc()
487
474
  expect(doc).toEqual({})
488
475
  })
476
+
477
+ describe("handle cache", () => {
478
+ it("contains doc handle", async () => {
479
+ const { repo } = setup()
480
+ const handle = repo.create({ foo: "bar" })
481
+ assert(repo.handles[handle.documentId])
482
+ })
483
+
484
+ it("delete removes doc handle", async () => {
485
+ const { repo } = setup()
486
+ const handle = repo.create({ foo: "bar" })
487
+ await repo.delete(handle.documentId)
488
+ assert(repo.handles[handle.documentId] === undefined)
489
+ })
490
+
491
+ it("removeFromCache removes doc handle", async () => {
492
+ const { repo } = setup()
493
+ const handle = repo.create({ foo: "bar" })
494
+ await repo.removeFromCache(handle.documentId)
495
+ assert(repo.handles[handle.documentId] === undefined)
496
+ })
497
+
498
+ it("removeFromCache for documentId not found", async () => {
499
+ const { repo } = setup()
500
+ const badDocumentId = "badbadbad" as DocumentId
501
+ const handleCacheSize = Object.keys(repo.handles).length
502
+ await repo.removeFromCache(badDocumentId)
503
+ assert(Object.keys(repo.handles).length === handleCacheSize)
504
+ })
505
+ })
489
506
  })
490
507
 
491
508
  describe("flush behaviour", () => {
@@ -532,8 +549,8 @@ describe("Repo", () => {
532
549
 
533
550
  it("should not be in a new repo yet because the storage is slow", async () => {
534
551
  const { pausedStorage, repo, handle, handle2 } = setup()
535
- expect((await handle.doc()).foo).toEqual("first")
536
- expect((await handle2.doc()).foo).toEqual("second")
552
+ expect((await handle).doc().foo).toEqual("first")
553
+ expect((await handle2).doc().foo).toEqual("second")
537
554
 
538
555
  // Reload repo
539
556
  const repo2 = new Repo({
@@ -541,9 +558,10 @@ describe("Repo", () => {
541
558
  })
542
559
 
543
560
  // Could not find the document that is not yet saved because of slow storage.
544
- const reloadedHandle = repo2.find<{ foo: string }>(handle.url)
561
+ await expect(async () => {
562
+ const reloadedHandle = await repo2.find<{ foo: string }>(handle.url)
563
+ }).rejects.toThrow(/Document (.*) is unavailable/)
545
564
  expect(pausedStorage.keys()).to.deep.equal([])
546
- expect(await reloadedHandle.doc()).toEqual(undefined)
547
565
  })
548
566
 
549
567
  it("should be visible to a new repo after flush()", async () => {
@@ -563,10 +581,10 @@ describe("Repo", () => {
563
581
  })
564
582
 
565
583
  expect(
566
- (await repo.find<{ foo: string }>(handle.documentId).doc()).foo
584
+ (await repo.find<{ foo: string }>(handle.documentId)).doc().foo
567
585
  ).toEqual("first")
568
586
  expect(
569
- (await repo.find<{ foo: string }>(handle2.documentId).doc()).foo
587
+ (await repo.find<{ foo: string }>(handle2.documentId)).doc().foo
570
588
  ).toEqual("second")
571
589
  }
572
590
  })
@@ -588,13 +606,13 @@ describe("Repo", () => {
588
606
  })
589
607
 
590
608
  expect(
591
- (await repo.find<{ foo: string }>(handle.documentId).doc()).foo
609
+ (await repo.find<{ foo: string }>(handle.documentId)).doc().foo
592
610
  ).toEqual("first")
593
611
  // Really, it's okay if the second one is also flushed but I'm forcing the issue
594
612
  // in the test storage engine above to make sure the behaviour is as documented
595
- expect(
596
- await repo.find<{ foo: string }>(handle2.documentId).doc()
597
- ).toEqual(undefined)
613
+ await expect(async () => {
614
+ ;(await repo.find<{ foo: string }>(handle2.documentId)).doc()
615
+ }).rejects.toThrow(/Document (.*) is unavailable/)
598
616
  }
599
617
  })
600
618
 
@@ -642,7 +660,7 @@ describe("Repo", () => {
642
660
 
643
661
  if (idx < numberOfPeers - 1) {
644
662
  network.push(pair[0])
645
- pair[0].whenReady()
663
+ networkReady.push(pair[0].whenReady())
646
664
  }
647
665
 
648
666
  const repo = new Repo({
@@ -673,7 +691,6 @@ describe("Repo", () => {
673
691
  }
674
692
 
675
693
  await connectedPromise
676
-
677
694
  return { repos }
678
695
  }
679
696
 
@@ -685,10 +702,14 @@ describe("Repo", () => {
685
702
  d.foo = "bar"
686
703
  })
687
704
 
688
- const handleN = repos[numberOfPeers - 1].find<TestDoc>(handle0.url)
705
+ const handleN = await repos[numberOfPeers - 1].find<TestDoc>(handle0.url)
706
+ assert.deepStrictEqual(handleN.doc(), { foo: "bar" })
689
707
 
690
- await handleN.whenReady()
691
- assert.deepStrictEqual(handleN.docSync(), { foo: "bar" })
708
+ const handleNBack = repos[numberOfPeers - 1].create({
709
+ foo: "reverse-trip",
710
+ })
711
+ const handle0Back = await repos[0].find<TestDoc>(handleNBack.url)
712
+ assert.deepStrictEqual(handle0Back.doc(), { foo: "reverse-trip" })
692
713
  })
693
714
 
694
715
  const setup = async ({
@@ -815,9 +836,8 @@ describe("Repo", () => {
815
836
  it("changes are replicated from aliceRepo to bobRepo", async () => {
816
837
  const { bobRepo, aliceHandle, teardown } = await setup()
817
838
 
818
- const bobHandle = bobRepo.find<TestDoc>(aliceHandle.url)
819
- await eventPromise(bobHandle, "change")
820
- const bobDoc = await bobHandle.doc()
839
+ const bobHandle = await bobRepo.find<TestDoc>(aliceHandle.url)
840
+ const bobDoc = bobHandle.doc()
821
841
  assert.deepStrictEqual(bobDoc, { foo: "bar" })
822
842
  teardown()
823
843
  })
@@ -825,13 +845,51 @@ describe("Repo", () => {
825
845
  it("can load a document from aliceRepo on charlieRepo", async () => {
826
846
  const { charlieRepo, aliceHandle, teardown } = await setup()
827
847
 
828
- const handle3 = charlieRepo.find<TestDoc>(aliceHandle.url)
829
- await eventPromise(handle3, "change")
830
- const doc3 = await handle3.doc()
848
+ const handle3 = await charlieRepo.find<TestDoc>(aliceHandle.url)
849
+ const doc3 = handle3.doc()
831
850
  assert.deepStrictEqual(doc3, { foo: "bar" })
832
851
  teardown()
833
852
  })
834
853
 
854
+ it("synchronizes changes from bobRepo to charlieRepo when loading from storage", async () => {
855
+ const { bobRepo, bobStorage, teardown } = await setup()
856
+
857
+ // We create a repo that uses bobStorage to put a document into its imaginary disk
858
+ // without it knowing about it
859
+ const bobRepo2 = new Repo({
860
+ storage: bobStorage,
861
+ })
862
+ const inStorageHandle = bobRepo2.create<TestDoc>({
863
+ foo: "foundOnFakeDisk",
864
+ })
865
+ await bobRepo2.flush()
866
+
867
+ // Now, let's load it on the original bob repo (which shares a "disk")
868
+ const bobFoundIt = await bobRepo.find<TestDoc>(inStorageHandle.url)
869
+
870
+ // Before checking if it syncs, make sure we have it!
871
+ // (This behaviour is mostly test-validation, we are already testing load/save elsewhere.)
872
+ assert.deepStrictEqual(bobFoundIt.doc(), { foo: "foundOnFakeDisk" })
873
+
874
+ await pause(10)
875
+
876
+ // We should have a docSynchronizer and its peers should be alice and charlie
877
+ assert.strictEqual(
878
+ bobRepo.synchronizer.docSynchronizers[bobFoundIt.documentId]?.hasPeer(
879
+ "alice" as PeerId
880
+ ),
881
+ true
882
+ )
883
+ assert.strictEqual(
884
+ bobRepo.synchronizer.docSynchronizers[bobFoundIt.documentId]?.hasPeer(
885
+ "charlie" as PeerId
886
+ ),
887
+ true
888
+ )
889
+
890
+ teardown()
891
+ })
892
+
835
893
  it("charlieRepo doesn't have a document it's not supposed to have", async () => {
836
894
  const { aliceRepo, bobRepo, charlieRepo, notForCharlie, teardown } =
837
895
  await setup()
@@ -851,11 +909,8 @@ describe("Repo", () => {
851
909
  it("charlieRepo can request a document not initially shared with it", async () => {
852
910
  const { charlieRepo, notForCharlie, teardown } = await setup()
853
911
 
854
- const handle = charlieRepo.find<TestDoc>(notForCharlie)
855
-
856
- await pause(50)
857
-
858
- const doc = await handle.doc()
912
+ const handle = await charlieRepo.find<TestDoc>(notForCharlie)
913
+ const doc = handle.doc()
859
914
 
860
915
  assert.deepStrictEqual(doc, { foo: "baz" })
861
916
 
@@ -865,11 +920,11 @@ describe("Repo", () => {
865
920
  it("charlieRepo can request a document across a network of multiple peers", async () => {
866
921
  const { charlieRepo, notForBob, teardown } = await setup()
867
922
 
868
- const handle = charlieRepo.find<TestDoc>(notForBob)
923
+ const handle = await charlieRepo.find<TestDoc>(notForBob)
869
924
 
870
925
  await pause(50)
871
926
 
872
- const doc = await handle.doc()
927
+ const doc = handle.doc()
873
928
  assert.deepStrictEqual(doc, { foo: "bap" })
874
929
 
875
930
  teardown()
@@ -878,42 +933,10 @@ describe("Repo", () => {
878
933
  it("doesn't find a document which doesn't exist anywhere on the network", async () => {
879
934
  const { charlieRepo, teardown } = await setup()
880
935
  const url = generateAutomergeUrl()
881
- const handle = charlieRepo.find<TestDoc>(url)
882
- assert.equal(handle.isReady(), false)
883
-
884
- const doc = await handle.doc()
885
- assert.equal(doc, undefined)
886
936
 
887
- teardown()
888
- })
889
-
890
- it("emits an unavailable event when it's not found on the network", async () => {
891
- const { aliceRepo, teardown } = await setup()
892
- const url = generateAutomergeUrl()
893
- const handle = aliceRepo.find(url)
894
- assert.equal(handle.isReady(), false)
895
- await eventPromise(handle, "unavailable")
896
- teardown()
897
- })
898
-
899
- it("emits an unavailable event every time an unavailable doc is requested", async () => {
900
- const { charlieRepo, teardown } = await setup()
901
- const url = generateAutomergeUrl()
902
- const handle = charlieRepo.find<TestDoc>(url)
903
- assert.equal(handle.isReady(), false)
904
-
905
- await Promise.all([
906
- eventPromise(handle, "unavailable"),
907
- eventPromise(charlieRepo, "unavailable-document"),
908
- ])
909
-
910
- // make sure it emits a second time if the doc is still unavailable
911
- const handle2 = charlieRepo.find<TestDoc>(url)
912
- assert.equal(handle2.isReady(), false)
913
- await Promise.all([
914
- eventPromise(handle, "unavailable"),
915
- eventPromise(charlieRepo, "unavailable-document"),
916
- ])
937
+ await expect(charlieRepo.find<TestDoc>(url)).rejects.toThrow(
938
+ /Document (.*) is unavailable/
939
+ )
917
940
 
918
941
  teardown()
919
942
  })
@@ -928,21 +951,23 @@ describe("Repo", () => {
928
951
  } = await setup({ connectAlice: false })
929
952
 
930
953
  const url = stringifyAutomergeUrl({ documentId: notForCharlie })
931
- const handle = charlieRepo.find<TestDoc>(url)
932
- assert.equal(handle.isReady(), false)
933
-
934
- await eventPromise(handle, "unavailable")
954
+ await expect(charlieRepo.find<TestDoc>(url)).rejects.toThrow(
955
+ /Document (.*) is unavailable/
956
+ )
935
957
 
936
958
  connectAliceToBob()
937
959
 
938
960
  await eventPromise(aliceRepo.networkSubsystem, "peer")
939
961
 
940
- const doc = await handle.doc(["ready"])
962
+ // Not sure why we need this pause here, but... we do.
963
+ await pause(150)
964
+ const handle = await charlieRepo.find<TestDoc>(url)
965
+ const doc = handle.doc()
941
966
  assert.deepStrictEqual(doc, { foo: "baz" })
942
967
 
943
968
  // an additional find should also return the correct resolved document
944
- const handle2 = charlieRepo.find<TestDoc>(url)
945
- const doc2 = await handle2.doc()
969
+ const handle2 = await charlieRepo.find<TestDoc>(url)
970
+ const doc2 = handle2.doc()
946
971
  assert.deepStrictEqual(doc2, { foo: "baz" })
947
972
 
948
973
  teardown()
@@ -978,11 +1003,9 @@ describe("Repo", () => {
978
1003
  sharePolicy: async () => true,
979
1004
  })
980
1005
 
981
- const handle = a.find(url)
982
-
983
- // We expect this to be unavailable as there is no connected peer and
984
- // the repo has no storage.
985
- await eventPromise(handle, "unavailable")
1006
+ await expect(a.find<TestDoc>(url)).rejects.toThrow(
1007
+ /Document (.*) is unavailable/
1008
+ )
986
1009
 
987
1010
  // Now create a repo pointing at the storage containing the document and
988
1011
  // connect it to the other end of the MessageChannel
@@ -992,9 +1015,14 @@ describe("Repo", () => {
992
1015
  network: [new MessageChannelNetworkAdapter(ba)],
993
1016
  })
994
1017
 
1018
+ // We need a proper peer status API so we can tell when the
1019
+ // peer is connected. For now we just wait a bit.
1020
+ await pause(50)
1021
+
995
1022
  // The empty repo should be notified of the new peer, send it a request
996
1023
  // and eventually resolve the handle to "READY"
997
- await handle.whenReady()
1024
+ const handle = await a.find<TestDoc>(url)
1025
+ expect(handle.state).toBe("ready")
998
1026
  })
999
1027
 
1000
1028
  it("a deleted document from charlieRepo can be refetched", async () => {
@@ -1010,9 +1038,8 @@ describe("Repo", () => {
1010
1038
  })
1011
1039
  await changePromise
1012
1040
 
1013
- const handle3 = charlieRepo.find<TestDoc>(aliceHandle.url)
1014
- await eventPromise(handle3, "change")
1015
- const doc3 = await handle3.doc()
1041
+ const handle3 = await charlieRepo.find<TestDoc>(aliceHandle.url)
1042
+ const doc3 = handle3.doc()
1016
1043
 
1017
1044
  assert.deepStrictEqual(doc3, { foo: "baz" })
1018
1045
 
@@ -1038,7 +1065,7 @@ describe("Repo", () => {
1038
1065
 
1039
1066
  // make sure the doc is ready
1040
1067
  if (!doc.isReady()) {
1041
- await doc.doc()
1068
+ await doc.whenReady()
1042
1069
  }
1043
1070
 
1044
1071
  // make a random change to it
@@ -1056,10 +1083,10 @@ describe("Repo", () => {
1056
1083
 
1057
1084
  const data = { presence: "alice" }
1058
1085
 
1059
- const aliceHandle = aliceRepo.find<TestDoc>(
1086
+ const aliceHandle = await aliceRepo.find<TestDoc>(
1060
1087
  stringifyAutomergeUrl({ documentId: notForCharlie })
1061
1088
  )
1062
- const bobHandle = bobRepo.find<TestDoc>(
1089
+ const bobHandle = await bobRepo.find<TestDoc>(
1063
1090
  stringifyAutomergeUrl({ documentId: notForCharlie })
1064
1091
  )
1065
1092
 
@@ -1102,7 +1129,10 @@ describe("Repo", () => {
1102
1129
  bobHandle.documentId,
1103
1130
  await charlieRepo!.storageSubsystem.id()
1104
1131
  )
1105
- assert.deepStrictEqual(storedSyncState.sharedHeads, bobHandle.heads())
1132
+ assert.deepStrictEqual(
1133
+ encodeHeads(storedSyncState.sharedHeads),
1134
+ bobHandle.heads()
1135
+ )
1106
1136
 
1107
1137
  teardown()
1108
1138
  })
@@ -1202,14 +1232,14 @@ describe("Repo", () => {
1202
1232
 
1203
1233
  const nextRemoteHeadsPromise = new Promise<{
1204
1234
  storageId: StorageId
1205
- heads: A.Heads
1235
+ heads: UrlHeads
1206
1236
  }>(resolve => {
1207
1237
  handle.on("remote-heads", ({ storageId, heads }) => {
1208
1238
  resolve({ storageId, heads })
1209
1239
  })
1210
1240
  })
1211
1241
 
1212
- const charlieHandle = charlieRepo.find<TestDoc>(handle.url)
1242
+ const charlieHandle = await charlieRepo.find<TestDoc>(handle.url)
1213
1243
  await charlieHandle.whenReady()
1214
1244
 
1215
1245
  // make a change on charlie
@@ -1247,34 +1277,6 @@ describe("Repo", () => {
1247
1277
  })
1248
1278
  })
1249
1279
 
1250
- it("peer receives a document when connection is recovered", async () => {
1251
- const alice = "alice" as PeerId
1252
- const bob = "bob" as PeerId
1253
- const [aliceAdapter, bobAdapter] = DummyNetworkAdapter.createConnectedPair()
1254
- const aliceRepo = new Repo({
1255
- network: [aliceAdapter],
1256
- peerId: alice,
1257
- })
1258
- const bobRepo = new Repo({
1259
- network: [bobAdapter],
1260
- peerId: bob,
1261
- })
1262
- const aliceDoc = aliceRepo.create()
1263
- aliceDoc.change((doc: any) => (doc.text = "Hello world"))
1264
-
1265
- const bobDoc = bobRepo.find(aliceDoc.url)
1266
- await eventPromise(bobDoc, "unavailable")
1267
-
1268
- aliceAdapter.peerCandidate(bob)
1269
- // Bob isn't yet connected to Alice and can't respond to her sync message
1270
- await pause(100)
1271
- bobAdapter.peerCandidate(alice)
1272
-
1273
- await bobDoc.whenReady()
1274
-
1275
- assert.equal(bobDoc.isReady(), true)
1276
- })
1277
-
1278
1280
  describe("with peers (mesh network)", () => {
1279
1281
  const setup = async () => {
1280
1282
  // Set up three repos; connect Alice to Bob, Bob to Charlie, and Alice to Charlie
@@ -1336,8 +1338,8 @@ describe("Repo", () => {
1336
1338
 
1337
1339
  const aliceHandle = aliceRepo.create<TestDoc>()
1338
1340
 
1339
- const bobHandle = bobRepo.find(aliceHandle.url)
1340
- const charlieHandle = charlieRepo.find(aliceHandle.url)
1341
+ const bobHandle = await bobRepo.find(aliceHandle.url)
1342
+ const charlieHandle = await charlieRepo.find(aliceHandle.url)
1341
1343
 
1342
1344
  // Alice should not receive her own ephemeral message
1343
1345
  aliceHandle.on("ephemeral-message", () => {
@@ -1375,9 +1377,8 @@ describe("Repo", () => {
1375
1377
  // pause to let the sync happen
1376
1378
  await pause(50)
1377
1379
 
1378
- const charlieHandle = charlieRepo.find(handle2.url)
1379
- await charlieHandle.doc()
1380
- assert.deepStrictEqual(charlieHandle.docSync(), { foo: "bar" })
1380
+ const charlieHandle = await charlieRepo.find(handle2.url)
1381
+ assert.deepStrictEqual(charlieHandle.doc(), { foo: "bar" })
1381
1382
 
1382
1383
  teardown()
1383
1384
  })
@@ -1394,9 +1395,8 @@ describe("Repo", () => {
1394
1395
  // pause to let the sync happen
1395
1396
  await pause(50)
1396
1397
 
1397
- const charlieHandle = charlieRepo.find(handle2.url)
1398
- await charlieHandle.doc()
1399
- assert.deepStrictEqual(charlieHandle.docSync(), { foo: "bar" })
1398
+ const charlieHandle = await charlieRepo.find(handle2.url)
1399
+ assert.deepStrictEqual(charlieHandle.doc(), { foo: "bar" })
1400
1400
 
1401
1401
  // now make a change to doc2 on bobs side and merge it into doc1
1402
1402
  handle2.change(d => {
@@ -1407,12 +1407,198 @@ describe("Repo", () => {
1407
1407
  // wait for the network to do it's thang
1408
1408
  await pause(350)
1409
1409
 
1410
- await charlieHandle.doc()
1411
- assert.deepStrictEqual(charlieHandle.docSync(), { foo: "baz" })
1410
+ assert.deepStrictEqual(charlieHandle.doc(), { foo: "baz" })
1412
1411
 
1413
1412
  teardown()
1414
1413
  })
1415
1414
  })
1415
+
1416
+ describe("the denylist", () => {
1417
+ it("should immediately return an unavailable message in response to a request for a denylisted document", async () => {
1418
+ const storage = new DummyStorageAdapter()
1419
+
1420
+ // first create the document in storage
1421
+ const dummyRepo = new Repo({ network: [], storage })
1422
+ const doc = dummyRepo.create({ foo: "bar" })
1423
+ await dummyRepo.flush()
1424
+
1425
+ // Check that the document actually is in storage
1426
+ let docId = doc.documentId
1427
+ assert(storage.keys().some((k: string) => k.includes(docId)))
1428
+
1429
+ const channel = new MessageChannel()
1430
+ const { port1: clientToServer, port2: serverToClient } = channel
1431
+ const server = new Repo({
1432
+ network: [new MessageChannelNetworkAdapter(serverToClient)],
1433
+ storage,
1434
+ denylist: [doc.url],
1435
+ })
1436
+ const client = new Repo({
1437
+ network: [new MessageChannelNetworkAdapter(clientToServer)],
1438
+ })
1439
+
1440
+ await Promise.all([
1441
+ eventPromise(server.networkSubsystem, "peer"),
1442
+ eventPromise(client.networkSubsystem, "peer"),
1443
+ ])
1444
+
1445
+ await expect(async () => {
1446
+ const clientDoc = await client.find(doc.url)
1447
+ }).rejects.toThrow(/Document (.*) is unavailable/)
1448
+
1449
+ const openDocs = Object.keys(server.metrics().documents).length
1450
+ assert.deepEqual(openDocs, 0)
1451
+ })
1452
+ })
1453
+ })
1454
+
1455
+ describe("Repo heads-in-URLs functionality", () => {
1456
+ const setup = () => {
1457
+ const repo = new Repo({})
1458
+ const handle = repo.create()
1459
+ handle.change((doc: any) => (doc.title = "Hello World"))
1460
+ return { repo, handle }
1461
+ }
1462
+
1463
+ it("finds a document view by URL with heads", async () => {
1464
+ const { repo, handle } = setup()
1465
+ const heads = handle.heads()!
1466
+ const url = stringifyAutomergeUrl({ documentId: handle.documentId, heads })
1467
+ const view = await repo.find(url)
1468
+ expect(view.doc()).toEqual({ title: "Hello World" })
1469
+ })
1470
+
1471
+ it("returns a view, not the actual handle, when finding by URL with heads", async () => {
1472
+ const { repo, handle } = setup()
1473
+ const heads = handle.heads()!
1474
+ await handle.change((doc: any) => (doc.title = "Changed"))
1475
+ const url = stringifyAutomergeUrl({ documentId: handle.documentId, heads })
1476
+ const view = await repo.find(url)
1477
+ expect(view.doc()).toEqual({ title: "Hello World" })
1478
+ expect(handle.doc()).toEqual({ title: "Changed" })
1479
+ })
1480
+
1481
+ it("changes to a document view do not affect the original", async () => {
1482
+ const { repo, handle } = setup()
1483
+ const heads = handle.heads()!
1484
+ const url = stringifyAutomergeUrl({ documentId: handle.documentId, heads })
1485
+ const view = await repo.find(url)
1486
+ expect(() =>
1487
+ view.change((doc: any) => (doc.title = "Changed in View"))
1488
+ ).toThrow()
1489
+ expect(handle.doc()).toEqual({ title: "Hello World" })
1490
+ })
1491
+
1492
+ it("document views are read-only", async () => {
1493
+ const { repo, handle } = setup()
1494
+ const heads = handle.heads()!
1495
+ const url = stringifyAutomergeUrl({ documentId: handle.documentId, heads })
1496
+ const view = await repo.find(url)
1497
+ expect(() => view.change((doc: any) => (doc.title = "Changed"))).toThrow()
1498
+ })
1499
+
1500
+ it("finds the latest document when given a URL without heads", async () => {
1501
+ const { repo, handle } = setup()
1502
+ await handle.change((doc: any) => (doc.title = "Changed"))
1503
+ const found = await repo.find(handle.url)
1504
+ expect(found.doc()).toEqual({ title: "Changed" })
1505
+ })
1506
+
1507
+ it("getHeadsFromUrl returns heads array if present or undefined", () => {
1508
+ const { repo, handle } = setup()
1509
+ const heads = handle.heads()!
1510
+ const url = stringifyAutomergeUrl({ documentId: handle.documentId, heads })
1511
+ expect(getHeadsFromUrl(url)).toEqual(heads)
1512
+
1513
+ const urlWithoutHeads = generateAutomergeUrl()
1514
+ expect(getHeadsFromUrl(urlWithoutHeads)).toBeUndefined()
1515
+ })
1516
+
1517
+ it("isValidAutomergeUrl returns true for valid URLs", () => {
1518
+ const { repo, handle } = setup()
1519
+ const url = generateAutomergeUrl()
1520
+ expect(isValidAutomergeUrl(url)).toBe(true)
1521
+
1522
+ const urlWithHeads = stringifyAutomergeUrl({
1523
+ documentId: handle.documentId,
1524
+ heads: handle.heads()!,
1525
+ })
1526
+ expect(isValidAutomergeUrl(urlWithHeads)).toBe(true)
1527
+ })
1528
+
1529
+ it("isValidAutomergeUrl returns false for invalid URLs", () => {
1530
+ const { repo, handle } = setup()
1531
+ expect(isValidAutomergeUrl("not a url")).toBe(false)
1532
+ expect(isValidAutomergeUrl("automerge:invalidid")).toBe(false)
1533
+ expect(isValidAutomergeUrl("automerge:validid#invalidhead")).toBe(false)
1534
+ })
1535
+
1536
+ it("parseAutomergeUrl extracts documentId and heads", () => {
1537
+ const { repo, handle } = setup()
1538
+ const url = stringifyAutomergeUrl({
1539
+ documentId: handle.documentId,
1540
+ heads: handle.heads()!,
1541
+ })
1542
+ const parsed = parseAutomergeUrl(url)
1543
+ expect(parsed.documentId).toBe(handle.documentId)
1544
+ expect(parsed.heads).toEqual(handle.heads())
1545
+ })
1546
+
1547
+ it("stringifyAutomergeUrl creates valid URL", () => {
1548
+ const { repo, handle } = setup()
1549
+ const url = stringifyAutomergeUrl({
1550
+ documentId: handle.documentId,
1551
+ heads: handle.heads()!,
1552
+ })
1553
+ expect(isValidAutomergeUrl(url)).toBe(true)
1554
+ const parsed = parseAutomergeUrl(url)
1555
+ expect(parsed.documentId).toBe(handle.documentId)
1556
+ expect(parsed.heads).toEqual(handle.heads())
1557
+ })
1558
+ })
1559
+
1560
+ describe("Repo.find() abort behavior", () => {
1561
+ it("aborts immediately if signal is already aborted", async () => {
1562
+ const repo = new Repo()
1563
+ const controller = new AbortController()
1564
+ controller.abort()
1565
+
1566
+ await expect(
1567
+ repo.find(generateAutomergeUrl(), { signal: controller.signal })
1568
+ ).rejects.toThrow("Operation aborted")
1569
+ })
1570
+
1571
+ it("can abort while waiting for ready state", async () => {
1572
+ // Create a repo with no network adapters so document can't become ready
1573
+ const repo = new Repo()
1574
+ const url = generateAutomergeUrl()
1575
+
1576
+ const controller = new AbortController()
1577
+
1578
+ // Start find and abort after a moment
1579
+ const findPromise = repo.find(url, { signal: controller.signal })
1580
+ controller.abort()
1581
+
1582
+ await expect(findPromise).rejects.toThrow("Operation aborted")
1583
+ await expect(findPromise).rejects.not.toThrow("unavailable")
1584
+ })
1585
+
1586
+ it("returns handle immediately when allow unavailable is true, even with abort signal", async () => {
1587
+ const repo = new Repo()
1588
+ const controller = new AbortController()
1589
+ const url = generateAutomergeUrl()
1590
+
1591
+ const handle = await repo.find(url, {
1592
+ allowableStates: ["unavailable"],
1593
+ signal: controller.signal,
1594
+ })
1595
+
1596
+ expect(handle).toBeDefined()
1597
+
1598
+ // Abort shouldn't affect the result since we skipped ready
1599
+ controller.abort()
1600
+ expect(handle.url).toBe(url)
1601
+ })
1416
1602
  })
1417
1603
 
1418
1604
  const warn = console.warn