@automerge/automerge-repo 2.0.0-alpha.20 → 2.0.0-alpha.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/test/Repo.test.ts CHANGED
@@ -8,9 +8,6 @@ import {
8
8
  getHeadsFromUrl,
9
9
  isValidAutomergeUrl,
10
10
  parseAutomergeUrl,
11
- UrlHeads,
12
- } from "../src/AutomergeUrl.js"
13
- import {
14
11
  generateAutomergeUrl,
15
12
  stringifyAutomergeUrl,
16
13
  } from "../src/AutomergeUrl.js"
@@ -19,6 +16,7 @@ import { eventPromise } from "../src/helpers/eventPromise.js"
19
16
  import { pause } from "../src/helpers/pause.js"
20
17
  import {
21
18
  AnyDocumentId,
19
+ UrlHeads,
22
20
  AutomergeUrl,
23
21
  DocHandle,
24
22
  DocumentId,
@@ -78,35 +76,34 @@ describe("Repo", () => {
78
76
  it("can create a document with an initial value", async () => {
79
77
  const { repo } = setup()
80
78
  const handle = repo.create({ foo: "bar" })
81
- await handle.doc()
82
- assert.equal(handle.docSync().foo, "bar")
79
+ assert.equal(handle.doc().foo, "bar")
83
80
  })
84
81
 
85
- it("can find a document by url", () => {
82
+ it("can find a document by url", async () => {
86
83
  const { repo } = setup()
87
84
  const handle = repo.create<TestDoc>()
88
85
  handle.change((d: TestDoc) => {
89
86
  d.foo = "bar"
90
87
  })
91
88
 
92
- const handle2 = repo.find(handle.url)
89
+ const handle2 = await repo.find(handle.url)
93
90
  assert.equal(handle, handle2)
94
- assert.deepEqual(handle2.docSync(), { foo: "bar" })
91
+ assert.deepEqual(handle2.doc(), { foo: "bar" })
95
92
  })
96
93
 
97
- it("can find a document by its unprefixed document ID", () => {
94
+ it("can find a document by its unprefixed document ID", async () => {
98
95
  const { repo } = setup()
99
96
  const handle = repo.create<TestDoc>()
100
97
  handle.change((d: TestDoc) => {
101
98
  d.foo = "bar"
102
99
  })
103
100
 
104
- const handle2 = repo.find(handle.documentId)
101
+ const handle2 = await repo.find(handle.documentId)
105
102
  assert.equal(handle, handle2)
106
- assert.deepEqual(handle2.docSync(), { foo: "bar" })
103
+ assert.deepEqual(handle2.doc(), { foo: "bar" })
107
104
  })
108
105
 
109
- it("can find a document by legacy UUID (for now)", () => {
106
+ it("can find a document by legacy UUID (for now)", async () => {
110
107
  disableConsoleWarn()
111
108
 
112
109
  const { repo } = setup()
@@ -119,9 +116,9 @@ describe("Repo", () => {
119
116
  const { binaryDocumentId } = parseAutomergeUrl(url)
120
117
  const legacyDocId = Uuid.stringify(binaryDocumentId) as LegacyDocumentId
121
118
 
122
- const handle2 = repo.find(legacyDocId)
119
+ const handle2 = await repo.find(legacyDocId)
123
120
  assert.equal(handle, handle2)
124
- assert.deepEqual(handle2.docSync(), { foo: "bar" })
121
+ assert.deepEqual(handle2.doc(), { foo: "bar" })
125
122
 
126
123
  reenableConsoleWarn()
127
124
  })
@@ -132,7 +129,7 @@ describe("Repo", () => {
132
129
  handle.change(d => {
133
130
  d.foo = "bar"
134
131
  })
135
- const v = await handle.doc()
132
+ const v = handle.doc()
136
133
  assert.equal(handle.isReady(), true)
137
134
  assert.equal(v.foo, "bar")
138
135
  })
@@ -146,8 +143,8 @@ describe("Repo", () => {
146
143
  const handle2 = repo.clone(handle)
147
144
  assert.equal(handle2.isReady(), true)
148
145
  assert.notEqual(handle.documentId, handle2.documentId)
149
- assert.deepStrictEqual(handle.docSync(), handle2.docSync())
150
- assert.deepStrictEqual(handle2.docSync(), { foo: "bar" })
146
+ assert.deepStrictEqual(handle.doc(), handle2.doc())
147
+ assert.deepStrictEqual(handle2.doc(), { foo: "bar" })
151
148
  })
152
149
 
153
150
  it("the cloned documents are distinct", () => {
@@ -165,9 +162,9 @@ describe("Repo", () => {
165
162
  d.baz = "baz"
166
163
  })
167
164
 
168
- assert.notDeepStrictEqual(handle.docSync(), handle2.docSync())
169
- assert.deepStrictEqual(handle.docSync(), { foo: "bar", bar: "bif" })
170
- assert.deepStrictEqual(handle2.docSync(), { foo: "bar", baz: "baz" })
165
+ assert.notDeepStrictEqual(handle.doc(), handle2.doc())
166
+ assert.deepStrictEqual(handle.doc(), { foo: "bar", bar: "bif" })
167
+ assert.deepStrictEqual(handle2.doc(), { foo: "bar", baz: "baz" })
171
168
  })
172
169
 
173
170
  it("the cloned documents can merge", () => {
@@ -187,59 +184,47 @@ describe("Repo", () => {
187
184
 
188
185
  handle.merge(handle2)
189
186
 
190
- assert.deepStrictEqual(handle.docSync(), {
187
+ assert.deepStrictEqual(handle.doc(), {
191
188
  foo: "bar",
192
189
  bar: "bif",
193
190
  baz: "baz",
194
191
  })
195
192
  // only the one handle should be changed
196
- assert.deepStrictEqual(handle2.docSync(), { foo: "bar", baz: "baz" })
193
+ assert.deepStrictEqual(handle2.doc(), { foo: "bar", baz: "baz" })
197
194
  })
198
195
 
199
196
  it("throws an error if we try to find a handle with an invalid AutomergeUrl", async () => {
200
197
  const { repo } = setup()
201
- try {
202
- repo.find<TestDoc>("invalid-url" as unknown as AutomergeUrl)
203
- } catch (e: any) {
204
- assert.equal(e.message, "Invalid AutomergeUrl: 'invalid-url'")
205
- }
198
+ await expect(async () => {
199
+ await repo.find<TestDoc>("invalid-url" as unknown as AutomergeUrl)
200
+ }).rejects.toThrow("Invalid AutomergeUrl: 'invalid-url'")
206
201
  })
207
202
 
208
203
  it("doesn't find a document that doesn't exist", async () => {
209
204
  const { repo } = setup()
210
- const handle = repo.find<TestDoc>(generateAutomergeUrl())
211
-
212
- await handle.whenReady(["ready", "unavailable"])
213
-
214
- assert.equal(handle.isReady(), false)
215
- assert.equal(handle.state, "unavailable")
216
- const doc = await handle.doc()
217
- assert.equal(doc, undefined)
218
- })
219
-
220
- it("emits an unavailable event when you don't have the document locally and are not connected to anyone", async () => {
221
- const { repo } = setup()
222
- const url = generateAutomergeUrl()
223
- const handle = repo.find<TestDoc>(url)
224
- assert.equal(handle.isReady(), false)
225
- await eventPromise(handle, "unavailable")
205
+ await expect(async () => {
206
+ await repo.find<TestDoc>(generateAutomergeUrl())
207
+ }).rejects.toThrow(/Document (.*) is unavailable/)
226
208
  })
227
209
 
228
210
  it("doesn't mark a document as unavailable until network adapters are ready", async () => {
229
211
  const { repo, networkAdapter } = setup({ startReady: false })
230
212
  const url = generateAutomergeUrl()
231
- const handle = repo.find<TestDoc>(url)
232
213
 
233
- let wasUnavailable = false
234
- handle.on("unavailable", () => {
235
- wasUnavailable = true
236
- })
214
+ const attemptedFind = repo.find<TestDoc>(url)
237
215
 
238
- await pause(50)
239
- assert.equal(wasUnavailable, false)
216
+ // First verify it stays pending for 50ms
217
+ await expect(
218
+ Promise.race([attemptedFind, pause(50)])
219
+ ).resolves.toBeUndefined()
240
220
 
221
+ // Trigger the rejection
241
222
  networkAdapter.forceReady()
242
- await eventPromise(handle, "unavailable")
223
+
224
+ // Now verify it rejects
225
+ await expect(attemptedFind).rejects.toThrow(
226
+ /Document (.*) is unavailable/
227
+ )
243
228
  })
244
229
 
245
230
  it("can find a created document", async () => {
@@ -250,18 +235,18 @@ describe("Repo", () => {
250
235
  })
251
236
  assert.equal(handle.isReady(), true)
252
237
 
253
- const bobHandle = repo.find<TestDoc>(handle.url)
238
+ const bobHandle = await repo.find<TestDoc>(handle.url)
254
239
 
255
240
  assert.equal(handle, bobHandle)
256
241
  assert.equal(handle.isReady(), true)
257
242
 
258
- const v = await bobHandle.doc()
243
+ const v = bobHandle.doc()
259
244
  assert.equal(v?.foo, "bar")
260
245
  })
261
246
 
262
247
  it("saves the document when creating it", async () => {
263
248
  const { repo, storageAdapter } = setup()
264
- const handle = repo.create<TestDoc>()
249
+ const handle = repo.create<TestDoc>({ foo: "saved" })
265
250
 
266
251
  const repo2 = new Repo({
267
252
  storage: storageAdapter,
@@ -269,9 +254,9 @@ describe("Repo", () => {
269
254
 
270
255
  await repo.flush()
271
256
 
272
- const bobHandle = repo2.find<TestDoc>(handle.url)
257
+ const bobHandle = await repo2.find<TestDoc>(handle.url)
273
258
  await bobHandle.whenReady()
274
- assert.equal(bobHandle.isReady(), true)
259
+ assert.deepEqual(bobHandle.doc(), { foo: "saved" })
275
260
  })
276
261
 
277
262
  it("saves the document when changed and can find it again", async () => {
@@ -290,9 +275,9 @@ describe("Repo", () => {
290
275
  storage: storageAdapter,
291
276
  })
292
277
 
293
- const bobHandle = repo2.find<TestDoc>(handle.url)
278
+ const bobHandle = await repo2.find<TestDoc>(handle.url)
294
279
 
295
- const v = await bobHandle.doc()
280
+ const v = bobHandle.doc()
296
281
  assert.equal(v?.foo, "bar")
297
282
  })
298
283
 
@@ -304,7 +289,7 @@ describe("Repo", () => {
304
289
  })
305
290
  // we now have a snapshot and an incremental change in storage
306
291
  assert.equal(handle.isReady(), true)
307
- const foo = await handle.doc()
292
+ const foo = handle.doc()
308
293
  assert.equal(foo?.foo, "bar")
309
294
 
310
295
  await pause()
@@ -321,7 +306,7 @@ describe("Repo", () => {
321
306
  d.foo = "bar"
322
307
  })
323
308
  assert.equal(handle.isReady(), true)
324
- await handle.doc()
309
+ await handle.whenReady()
325
310
 
326
311
  await pause()
327
312
  repo.delete(handle.url)
@@ -358,7 +343,7 @@ describe("Repo", () => {
358
343
 
359
344
  const exported = await repo.export(handle.documentId)
360
345
  const loaded = A.load(exported)
361
- const doc = await handle.doc()
346
+ const doc = handle.doc()
362
347
  assert.deepEqual(doc, loaded)
363
348
  })
364
349
 
@@ -392,9 +377,7 @@ describe("Repo", () => {
392
377
  const repo2 = new Repo({
393
378
  storage,
394
379
  })
395
- const handle2 = repo2.find(handle.url)
396
- await handle2.doc()
397
-
380
+ const handle2 = await repo2.find(handle.url)
398
381
  assert.deepEqual(storage.keys(), initialKeys)
399
382
  })
400
383
 
@@ -420,9 +403,7 @@ describe("Repo", () => {
420
403
  const repo2 = new Repo({
421
404
  storage,
422
405
  })
423
- const handle2 = repo2.find(handle.url)
424
- await handle2.doc()
425
-
406
+ const handle2 = await repo2.find(handle.url)
426
407
  assert(storage.keys().length !== 0)
427
408
  }
428
409
  })
@@ -462,7 +443,7 @@ describe("Repo", () => {
462
443
 
463
444
  const handle = repo.import<TestDoc>(saved)
464
445
  assert.equal(handle.isReady(), true)
465
- const v = await handle.doc()
446
+ const v = handle.doc()
466
447
  assert.equal(v?.foo, "bar")
467
448
 
468
449
  expect(A.getHistory(v)).toEqual(A.getHistory(updatedDoc))
@@ -481,7 +462,7 @@ describe("Repo", () => {
481
462
  const { repo } = setup()
482
463
  // @ts-ignore - passing something other than UInt8Array
483
464
  const handle = repo.import<TestDoc>(A.from({ foo: 123 }))
484
- const doc = await handle.doc()
465
+ const doc = handle.doc()
485
466
  expect(doc).toEqual({})
486
467
  })
487
468
 
@@ -489,7 +470,7 @@ describe("Repo", () => {
489
470
  const { repo } = setup()
490
471
  // @ts-ignore - passing something other than UInt8Array
491
472
  const handle = repo.import<TestDoc>({ foo: 123 })
492
- const doc = await handle.doc()
473
+ const doc = handle.doc()
493
474
  expect(doc).toEqual({})
494
475
  })
495
476
 
@@ -497,14 +478,12 @@ describe("Repo", () => {
497
478
  it("contains doc handle", async () => {
498
479
  const { repo } = setup()
499
480
  const handle = repo.create({ foo: "bar" })
500
- await handle.doc()
501
481
  assert(repo.handles[handle.documentId])
502
482
  })
503
483
 
504
484
  it("delete removes doc handle", async () => {
505
485
  const { repo } = setup()
506
486
  const handle = repo.create({ foo: "bar" })
507
- await handle.doc()
508
487
  await repo.delete(handle.documentId)
509
488
  assert(repo.handles[handle.documentId] === undefined)
510
489
  })
@@ -512,7 +491,6 @@ describe("Repo", () => {
512
491
  it("removeFromCache removes doc handle", async () => {
513
492
  const { repo } = setup()
514
493
  const handle = repo.create({ foo: "bar" })
515
- await handle.doc()
516
494
  await repo.removeFromCache(handle.documentId)
517
495
  assert(repo.handles[handle.documentId] === undefined)
518
496
  })
@@ -571,8 +549,8 @@ describe("Repo", () => {
571
549
 
572
550
  it("should not be in a new repo yet because the storage is slow", async () => {
573
551
  const { pausedStorage, repo, handle, handle2 } = setup()
574
- expect((await handle.doc()).foo).toEqual("first")
575
- expect((await handle2.doc()).foo).toEqual("second")
552
+ expect((await handle).doc().foo).toEqual("first")
553
+ expect((await handle2).doc().foo).toEqual("second")
576
554
 
577
555
  // Reload repo
578
556
  const repo2 = new Repo({
@@ -580,9 +558,10 @@ describe("Repo", () => {
580
558
  })
581
559
 
582
560
  // Could not find the document that is not yet saved because of slow storage.
583
- const reloadedHandle = repo2.find<{ foo: string }>(handle.url)
561
+ await expect(async () => {
562
+ const reloadedHandle = await repo2.find<{ foo: string }>(handle.url)
563
+ }).rejects.toThrow(/Document (.*) is unavailable/)
584
564
  expect(pausedStorage.keys()).to.deep.equal([])
585
- expect(await reloadedHandle.doc()).toEqual(undefined)
586
565
  })
587
566
 
588
567
  it("should be visible to a new repo after flush()", async () => {
@@ -602,10 +581,10 @@ describe("Repo", () => {
602
581
  })
603
582
 
604
583
  expect(
605
- (await repo.find<{ foo: string }>(handle.documentId).doc()).foo
584
+ (await repo.find<{ foo: string }>(handle.documentId)).doc().foo
606
585
  ).toEqual("first")
607
586
  expect(
608
- (await repo.find<{ foo: string }>(handle2.documentId).doc()).foo
587
+ (await repo.find<{ foo: string }>(handle2.documentId)).doc().foo
609
588
  ).toEqual("second")
610
589
  }
611
590
  })
@@ -627,13 +606,13 @@ describe("Repo", () => {
627
606
  })
628
607
 
629
608
  expect(
630
- (await repo.find<{ foo: string }>(handle.documentId).doc()).foo
609
+ (await repo.find<{ foo: string }>(handle.documentId)).doc().foo
631
610
  ).toEqual("first")
632
611
  // Really, it's okay if the second one is also flushed but I'm forcing the issue
633
612
  // in the test storage engine above to make sure the behaviour is as documented
634
- expect(
635
- await repo.find<{ foo: string }>(handle2.documentId).doc()
636
- ).toEqual(undefined)
613
+ await expect(async () => {
614
+ ;(await repo.find<{ foo: string }>(handle2.documentId)).doc()
615
+ }).rejects.toThrow(/Document (.*) is unavailable/)
637
616
  }
638
617
  })
639
618
 
@@ -681,7 +660,7 @@ describe("Repo", () => {
681
660
 
682
661
  if (idx < numberOfPeers - 1) {
683
662
  network.push(pair[0])
684
- pair[0].whenReady()
663
+ networkReady.push(pair[0].whenReady())
685
664
  }
686
665
 
687
666
  const repo = new Repo({
@@ -712,7 +691,6 @@ describe("Repo", () => {
712
691
  }
713
692
 
714
693
  await connectedPromise
715
-
716
694
  return { repos }
717
695
  }
718
696
 
@@ -724,10 +702,14 @@ describe("Repo", () => {
724
702
  d.foo = "bar"
725
703
  })
726
704
 
727
- const handleN = repos[numberOfPeers - 1].find<TestDoc>(handle0.url)
705
+ const handleN = await repos[numberOfPeers - 1].find<TestDoc>(handle0.url)
706
+ assert.deepStrictEqual(handleN.doc(), { foo: "bar" })
728
707
 
729
- await handleN.whenReady()
730
- assert.deepStrictEqual(handleN.docSync(), { foo: "bar" })
708
+ const handleNBack = repos[numberOfPeers - 1].create({
709
+ foo: "reverse-trip",
710
+ })
711
+ const handle0Back = await repos[0].find<TestDoc>(handleNBack.url)
712
+ assert.deepStrictEqual(handle0Back.doc(), { foo: "reverse-trip" })
731
713
  })
732
714
 
733
715
  const setup = async ({
@@ -854,9 +836,8 @@ describe("Repo", () => {
854
836
  it("changes are replicated from aliceRepo to bobRepo", async () => {
855
837
  const { bobRepo, aliceHandle, teardown } = await setup()
856
838
 
857
- const bobHandle = bobRepo.find<TestDoc>(aliceHandle.url)
858
- await eventPromise(bobHandle, "change")
859
- const bobDoc = await bobHandle.doc()
839
+ const bobHandle = await bobRepo.find<TestDoc>(aliceHandle.url)
840
+ const bobDoc = bobHandle.doc()
860
841
  assert.deepStrictEqual(bobDoc, { foo: "bar" })
861
842
  teardown()
862
843
  })
@@ -864,9 +845,8 @@ describe("Repo", () => {
864
845
  it("can load a document from aliceRepo on charlieRepo", async () => {
865
846
  const { charlieRepo, aliceHandle, teardown } = await setup()
866
847
 
867
- const handle3 = charlieRepo.find<TestDoc>(aliceHandle.url)
868
- await eventPromise(handle3, "change")
869
- const doc3 = await handle3.doc()
848
+ const handle3 = await charlieRepo.find<TestDoc>(aliceHandle.url)
849
+ const doc3 = handle3.doc()
870
850
  assert.deepStrictEqual(doc3, { foo: "bar" })
871
851
  teardown()
872
852
  })
@@ -885,12 +865,11 @@ describe("Repo", () => {
885
865
  await bobRepo2.flush()
886
866
 
887
867
  // Now, let's load it on the original bob repo (which shares a "disk")
888
- const bobFoundIt = bobRepo.find<TestDoc>(inStorageHandle.url)
889
- await bobFoundIt.whenReady()
868
+ const bobFoundIt = await bobRepo.find<TestDoc>(inStorageHandle.url)
890
869
 
891
870
  // Before checking if it syncs, make sure we have it!
892
871
  // (This behaviour is mostly test-validation, we are already testing load/save elsewhere.)
893
- assert.deepStrictEqual(await bobFoundIt.doc(), { foo: "foundOnFakeDisk" })
872
+ assert.deepStrictEqual(bobFoundIt.doc(), { foo: "foundOnFakeDisk" })
894
873
 
895
874
  await pause(10)
896
875
 
@@ -930,11 +909,8 @@ describe("Repo", () => {
930
909
  it("charlieRepo can request a document not initially shared with it", async () => {
931
910
  const { charlieRepo, notForCharlie, teardown } = await setup()
932
911
 
933
- const handle = charlieRepo.find<TestDoc>(notForCharlie)
934
-
935
- await pause(50)
936
-
937
- const doc = await handle.doc()
912
+ const handle = await charlieRepo.find<TestDoc>(notForCharlie)
913
+ const doc = handle.doc()
938
914
 
939
915
  assert.deepStrictEqual(doc, { foo: "baz" })
940
916
 
@@ -944,11 +920,11 @@ describe("Repo", () => {
944
920
  it("charlieRepo can request a document across a network of multiple peers", async () => {
945
921
  const { charlieRepo, notForBob, teardown } = await setup()
946
922
 
947
- const handle = charlieRepo.find<TestDoc>(notForBob)
923
+ const handle = await charlieRepo.find<TestDoc>(notForBob)
948
924
 
949
925
  await pause(50)
950
926
 
951
- const doc = await handle.doc()
927
+ const doc = handle.doc()
952
928
  assert.deepStrictEqual(doc, { foo: "bap" })
953
929
 
954
930
  teardown()
@@ -957,42 +933,10 @@ describe("Repo", () => {
957
933
  it("doesn't find a document which doesn't exist anywhere on the network", async () => {
958
934
  const { charlieRepo, teardown } = await setup()
959
935
  const url = generateAutomergeUrl()
960
- const handle = charlieRepo.find<TestDoc>(url)
961
- assert.equal(handle.isReady(), false)
962
-
963
- const doc = await handle.doc()
964
- assert.equal(doc, undefined)
965
-
966
- teardown()
967
- })
968
936
 
969
- it("emits an unavailable event when it's not found on the network", async () => {
970
- const { aliceRepo, teardown } = await setup()
971
- const url = generateAutomergeUrl()
972
- const handle = aliceRepo.find(url)
973
- assert.equal(handle.isReady(), false)
974
- await eventPromise(handle, "unavailable")
975
- teardown()
976
- })
977
-
978
- it("emits an unavailable event every time an unavailable doc is requested", async () => {
979
- const { charlieRepo, teardown } = await setup()
980
- const url = generateAutomergeUrl()
981
- const handle = charlieRepo.find<TestDoc>(url)
982
- assert.equal(handle.isReady(), false)
983
-
984
- await Promise.all([
985
- eventPromise(handle, "unavailable"),
986
- eventPromise(charlieRepo, "unavailable-document"),
987
- ])
988
-
989
- // make sure it emits a second time if the doc is still unavailable
990
- const handle2 = charlieRepo.find<TestDoc>(url)
991
- assert.equal(handle2.isReady(), false)
992
- await Promise.all([
993
- eventPromise(handle, "unavailable"),
994
- eventPromise(charlieRepo, "unavailable-document"),
995
- ])
937
+ await expect(charlieRepo.find<TestDoc>(url)).rejects.toThrow(
938
+ /Document (.*) is unavailable/
939
+ )
996
940
 
997
941
  teardown()
998
942
  })
@@ -1007,21 +951,23 @@ describe("Repo", () => {
1007
951
  } = await setup({ connectAlice: false })
1008
952
 
1009
953
  const url = stringifyAutomergeUrl({ documentId: notForCharlie })
1010
- const handle = charlieRepo.find<TestDoc>(url)
1011
- assert.equal(handle.isReady(), false)
1012
-
1013
- await eventPromise(handle, "unavailable")
954
+ await expect(charlieRepo.find<TestDoc>(url)).rejects.toThrow(
955
+ /Document (.*) is unavailable/
956
+ )
1014
957
 
1015
958
  connectAliceToBob()
1016
959
 
1017
960
  await eventPromise(aliceRepo.networkSubsystem, "peer")
1018
961
 
1019
- const doc = await handle.doc(["ready"])
962
+ // Not sure why we need this pause here, but... we do.
963
+ await pause(150)
964
+ const handle = await charlieRepo.find<TestDoc>(url)
965
+ const doc = handle.doc()
1020
966
  assert.deepStrictEqual(doc, { foo: "baz" })
1021
967
 
1022
968
  // an additional find should also return the correct resolved document
1023
- const handle2 = charlieRepo.find<TestDoc>(url)
1024
- const doc2 = await handle2.doc()
969
+ const handle2 = await charlieRepo.find<TestDoc>(url)
970
+ const doc2 = handle2.doc()
1025
971
  assert.deepStrictEqual(doc2, { foo: "baz" })
1026
972
 
1027
973
  teardown()
@@ -1057,11 +1003,9 @@ describe("Repo", () => {
1057
1003
  sharePolicy: async () => true,
1058
1004
  })
1059
1005
 
1060
- const handle = a.find(url)
1061
-
1062
- // We expect this to be unavailable as there is no connected peer and
1063
- // the repo has no storage.
1064
- await eventPromise(handle, "unavailable")
1006
+ await expect(a.find<TestDoc>(url)).rejects.toThrow(
1007
+ /Document (.*) is unavailable/
1008
+ )
1065
1009
 
1066
1010
  // Now create a repo pointing at the storage containing the document and
1067
1011
  // connect it to the other end of the MessageChannel
@@ -1071,9 +1015,14 @@ describe("Repo", () => {
1071
1015
  network: [new MessageChannelNetworkAdapter(ba)],
1072
1016
  })
1073
1017
 
1018
+ // We need a proper peer status API so we can tell when the
1019
+ // peer is connected. For now we just wait a bit.
1020
+ await pause(50)
1021
+
1074
1022
  // The empty repo should be notified of the new peer, send it a request
1075
1023
  // and eventually resolve the handle to "READY"
1076
- await handle.whenReady()
1024
+ const handle = await a.find<TestDoc>(url)
1025
+ expect(handle.state).toBe("ready")
1077
1026
  })
1078
1027
 
1079
1028
  it("a deleted document from charlieRepo can be refetched", async () => {
@@ -1089,9 +1038,8 @@ describe("Repo", () => {
1089
1038
  })
1090
1039
  await changePromise
1091
1040
 
1092
- const handle3 = charlieRepo.find<TestDoc>(aliceHandle.url)
1093
- await eventPromise(handle3, "change")
1094
- const doc3 = await handle3.doc()
1041
+ const handle3 = await charlieRepo.find<TestDoc>(aliceHandle.url)
1042
+ const doc3 = handle3.doc()
1095
1043
 
1096
1044
  assert.deepStrictEqual(doc3, { foo: "baz" })
1097
1045
 
@@ -1117,7 +1065,7 @@ describe("Repo", () => {
1117
1065
 
1118
1066
  // make sure the doc is ready
1119
1067
  if (!doc.isReady()) {
1120
- await doc.doc()
1068
+ await doc.whenReady()
1121
1069
  }
1122
1070
 
1123
1071
  // make a random change to it
@@ -1135,10 +1083,10 @@ describe("Repo", () => {
1135
1083
 
1136
1084
  const data = { presence: "alice" }
1137
1085
 
1138
- const aliceHandle = aliceRepo.find<TestDoc>(
1086
+ const aliceHandle = await aliceRepo.find<TestDoc>(
1139
1087
  stringifyAutomergeUrl({ documentId: notForCharlie })
1140
1088
  )
1141
- const bobHandle = bobRepo.find<TestDoc>(
1089
+ const bobHandle = await bobRepo.find<TestDoc>(
1142
1090
  stringifyAutomergeUrl({ documentId: notForCharlie })
1143
1091
  )
1144
1092
 
@@ -1291,7 +1239,7 @@ describe("Repo", () => {
1291
1239
  })
1292
1240
  })
1293
1241
 
1294
- const charlieHandle = charlieRepo.find<TestDoc>(handle.url)
1242
+ const charlieHandle = await charlieRepo.find<TestDoc>(handle.url)
1295
1243
  await charlieHandle.whenReady()
1296
1244
 
1297
1245
  // make a change on charlie
@@ -1329,34 +1277,6 @@ describe("Repo", () => {
1329
1277
  })
1330
1278
  })
1331
1279
 
1332
- it("peer receives a document when connection is recovered", async () => {
1333
- const alice = "alice" as PeerId
1334
- const bob = "bob" as PeerId
1335
- const [aliceAdapter, bobAdapter] = DummyNetworkAdapter.createConnectedPair()
1336
- const aliceRepo = new Repo({
1337
- network: [aliceAdapter],
1338
- peerId: alice,
1339
- })
1340
- const bobRepo = new Repo({
1341
- network: [bobAdapter],
1342
- peerId: bob,
1343
- })
1344
- const aliceDoc = aliceRepo.create()
1345
- aliceDoc.change((doc: any) => (doc.text = "Hello world"))
1346
-
1347
- const bobDoc = bobRepo.find(aliceDoc.url)
1348
- await eventPromise(bobDoc, "unavailable")
1349
-
1350
- aliceAdapter.peerCandidate(bob)
1351
- // Bob isn't yet connected to Alice and can't respond to her sync message
1352
- await pause(100)
1353
- bobAdapter.peerCandidate(alice)
1354
-
1355
- await bobDoc.whenReady()
1356
-
1357
- assert.equal(bobDoc.isReady(), true)
1358
- })
1359
-
1360
1280
  describe("with peers (mesh network)", () => {
1361
1281
  const setup = async () => {
1362
1282
  // Set up three repos; connect Alice to Bob, Bob to Charlie, and Alice to Charlie
@@ -1418,8 +1338,8 @@ describe("Repo", () => {
1418
1338
 
1419
1339
  const aliceHandle = aliceRepo.create<TestDoc>()
1420
1340
 
1421
- const bobHandle = bobRepo.find(aliceHandle.url)
1422
- const charlieHandle = charlieRepo.find(aliceHandle.url)
1341
+ const bobHandle = await bobRepo.find(aliceHandle.url)
1342
+ const charlieHandle = await charlieRepo.find(aliceHandle.url)
1423
1343
 
1424
1344
  // Alice should not receive her own ephemeral message
1425
1345
  aliceHandle.on("ephemeral-message", () => {
@@ -1457,9 +1377,8 @@ describe("Repo", () => {
1457
1377
  // pause to let the sync happen
1458
1378
  await pause(50)
1459
1379
 
1460
- const charlieHandle = charlieRepo.find(handle2.url)
1461
- await charlieHandle.doc()
1462
- assert.deepStrictEqual(charlieHandle.docSync(), { foo: "bar" })
1380
+ const charlieHandle = await charlieRepo.find(handle2.url)
1381
+ assert.deepStrictEqual(charlieHandle.doc(), { foo: "bar" })
1463
1382
 
1464
1383
  teardown()
1465
1384
  })
@@ -1476,9 +1395,8 @@ describe("Repo", () => {
1476
1395
  // pause to let the sync happen
1477
1396
  await pause(50)
1478
1397
 
1479
- const charlieHandle = charlieRepo.find(handle2.url)
1480
- await charlieHandle.doc()
1481
- assert.deepStrictEqual(charlieHandle.docSync(), { foo: "bar" })
1398
+ const charlieHandle = await charlieRepo.find(handle2.url)
1399
+ assert.deepStrictEqual(charlieHandle.doc(), { foo: "bar" })
1482
1400
 
1483
1401
  // now make a change to doc2 on bobs side and merge it into doc1
1484
1402
  handle2.change(d => {
@@ -1489,8 +1407,7 @@ describe("Repo", () => {
1489
1407
  // wait for the network to do it's thang
1490
1408
  await pause(350)
1491
1409
 
1492
- await charlieHandle.doc()
1493
- assert.deepStrictEqual(charlieHandle.docSync(), { foo: "baz" })
1410
+ assert.deepStrictEqual(charlieHandle.doc(), { foo: "baz" })
1494
1411
 
1495
1412
  teardown()
1496
1413
  })
@@ -1525,9 +1442,9 @@ describe("Repo", () => {
1525
1442
  eventPromise(client.networkSubsystem, "peer"),
1526
1443
  ])
1527
1444
 
1528
- const clientDoc = client.find(doc.url)
1529
- await pause(100)
1530
- assert.strictEqual(clientDoc.docSync(), undefined)
1445
+ await expect(async () => {
1446
+ const clientDoc = await client.find(doc.url)
1447
+ }).rejects.toThrow(/Document (.*) is unavailable/)
1531
1448
 
1532
1449
  const openDocs = Object.keys(server.metrics().documents).length
1533
1450
  assert.deepEqual(openDocs, 0)
@@ -1547,8 +1464,8 @@ describe("Repo heads-in-URLs functionality", () => {
1547
1464
  const { repo, handle } = setup()
1548
1465
  const heads = handle.heads()!
1549
1466
  const url = stringifyAutomergeUrl({ documentId: handle.documentId, heads })
1550
- const view = repo.find(url)
1551
- expect(view.docSync()).toEqual({ title: "Hello World" })
1467
+ const view = await repo.find(url)
1468
+ expect(view.doc()).toEqual({ title: "Hello World" })
1552
1469
  })
1553
1470
 
1554
1471
  it("returns a view, not the actual handle, when finding by URL with heads", async () => {
@@ -1556,35 +1473,35 @@ describe("Repo heads-in-URLs functionality", () => {
1556
1473
  const heads = handle.heads()!
1557
1474
  await handle.change((doc: any) => (doc.title = "Changed"))
1558
1475
  const url = stringifyAutomergeUrl({ documentId: handle.documentId, heads })
1559
- const view = repo.find(url)
1560
- expect(view.docSync()).toEqual({ title: "Hello World" })
1561
- expect(handle.docSync()).toEqual({ title: "Changed" })
1476
+ const view = await repo.find(url)
1477
+ expect(view.doc()).toEqual({ title: "Hello World" })
1478
+ expect(handle.doc()).toEqual({ title: "Changed" })
1562
1479
  })
1563
1480
 
1564
1481
  it("changes to a document view do not affect the original", async () => {
1565
1482
  const { repo, handle } = setup()
1566
1483
  const heads = handle.heads()!
1567
1484
  const url = stringifyAutomergeUrl({ documentId: handle.documentId, heads })
1568
- const view = repo.find(url)
1485
+ const view = await repo.find(url)
1569
1486
  expect(() =>
1570
1487
  view.change((doc: any) => (doc.title = "Changed in View"))
1571
1488
  ).toThrow()
1572
- expect(handle.docSync()).toEqual({ title: "Hello World" })
1489
+ expect(handle.doc()).toEqual({ title: "Hello World" })
1573
1490
  })
1574
1491
 
1575
1492
  it("document views are read-only", async () => {
1576
1493
  const { repo, handle } = setup()
1577
1494
  const heads = handle.heads()!
1578
1495
  const url = stringifyAutomergeUrl({ documentId: handle.documentId, heads })
1579
- const view = repo.find(url)
1496
+ const view = await repo.find(url)
1580
1497
  expect(() => view.change((doc: any) => (doc.title = "Changed"))).toThrow()
1581
1498
  })
1582
1499
 
1583
1500
  it("finds the latest document when given a URL without heads", async () => {
1584
1501
  const { repo, handle } = setup()
1585
1502
  await handle.change((doc: any) => (doc.title = "Changed"))
1586
- const found = repo.find(handle.url)
1587
- expect(found.docSync()).toEqual({ title: "Changed" })
1503
+ const found = await repo.find(handle.url)
1504
+ expect(found.doc()).toEqual({ title: "Changed" })
1588
1505
  })
1589
1506
 
1590
1507
  it("getHeadsFromUrl returns heads array if present or undefined", () => {
@@ -1640,6 +1557,50 @@ describe("Repo heads-in-URLs functionality", () => {
1640
1557
  })
1641
1558
  })
1642
1559
 
1560
+ describe("Repo.find() abort behavior", () => {
1561
+ it("aborts immediately if signal is already aborted", async () => {
1562
+ const repo = new Repo()
1563
+ const controller = new AbortController()
1564
+ controller.abort()
1565
+
1566
+ await expect(
1567
+ repo.find(generateAutomergeUrl(), { signal: controller.signal })
1568
+ ).rejects.toThrow("Operation aborted")
1569
+ })
1570
+
1571
+ it("can abort while waiting for ready state", async () => {
1572
+ // Create a repo with no network adapters so document can't become ready
1573
+ const repo = new Repo()
1574
+ const url = generateAutomergeUrl()
1575
+
1576
+ const controller = new AbortController()
1577
+
1578
+ // Start find and abort after a moment
1579
+ const findPromise = repo.find(url, { signal: controller.signal })
1580
+ controller.abort()
1581
+
1582
+ await expect(findPromise).rejects.toThrow("Operation aborted")
1583
+ await expect(findPromise).rejects.not.toThrow("unavailable")
1584
+ })
1585
+
1586
+ it("returns handle immediately when allow unavailable is true, even with abort signal", async () => {
1587
+ const repo = new Repo()
1588
+ const controller = new AbortController()
1589
+ const url = generateAutomergeUrl()
1590
+
1591
+ const handle = await repo.find(url, {
1592
+ allowableStates: ["unavailable"],
1593
+ signal: controller.signal,
1594
+ })
1595
+
1596
+ expect(handle).toBeDefined()
1597
+
1598
+ // Abort shouldn't affect the result since we skipped ready
1599
+ controller.abort()
1600
+ expect(handle.url).toBe(url)
1601
+ })
1602
+ })
1603
+
1643
1604
  const warn = console.warn
1644
1605
  const NO_OP = () => {}
1645
1606