@automerge/automerge-repo 2.0.0-alpha.20 → 2.0.0-alpha.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/Repo.ts CHANGED
@@ -39,6 +39,8 @@ import type {
39
39
  DocumentId,
40
40
  PeerId,
41
41
  } from "./types.js"
42
+ import { abortable, AbortOptions } from "./helpers/abortable.js"
43
+ import { FindProgress, FindProgressWithMethods } from "./FindProgress.js"
42
44
 
43
45
  function randomPeerId() {
44
46
  return ("peer-" + Math.random().toString(36).slice(4)) as PeerId
@@ -260,18 +262,8 @@ export class Repo extends EventEmitter<RepoEvents> {
260
262
  handle.on("heads-changed", throttle(saveFn, this.saveDebounceRate))
261
263
  }
262
264
 
263
- handle.on("unavailable", () => {
264
- this.#log("document unavailable", { documentId: handle.documentId })
265
- this.emit("unavailable-document", {
266
- documentId: handle.documentId,
267
- })
268
- })
269
-
270
265
  // Register the document with the synchronizer. This advertises our interest in the document.
271
- this.synchronizer.addDocument(handle.documentId)
272
-
273
- // Preserve the old event in case anyone was using it.
274
- this.emit("document", { handle })
266
+ this.synchronizer.addDocument(handle)
275
267
  }
276
268
 
277
269
  #receiveMessage(message: RepoMessage) {
@@ -402,8 +394,6 @@ export class Repo extends EventEmitter<RepoEvents> {
402
394
  * Any peers this `Repo` is connected to for whom `sharePolicy` returns `true` will
403
395
  * be notified of the newly created DocHandle.
404
396
  *
405
- * @throws if the cloned handle is not yet ready or if
406
- * `clonedHandle.docSync()` returns `undefined` (i.e. the handle is unavailable).
407
397
  */
408
398
  clone<T>(clonedHandle: DocHandle<T>) {
409
399
  if (!clonedHandle.isReady()) {
@@ -413,11 +403,7 @@ export class Repo extends EventEmitter<RepoEvents> {
413
403
  )
414
404
  }
415
405
 
416
- const sourceDoc = clonedHandle.docSync()
417
- if (!sourceDoc) {
418
- throw new Error("Cloned handle doesn't have a document.")
419
- }
420
-
406
+ const sourceDoc = clonedHandle.doc()
421
407
  const handle = this.create<T>()
422
408
 
423
409
  handle.update(() => {
@@ -428,63 +414,196 @@ export class Repo extends EventEmitter<RepoEvents> {
428
414
  return handle
429
415
  }
430
416
 
431
- /**
432
- * Retrieves a document by id. It gets data from the local system, but also emits a `document`
433
- * event to advertise interest in the document.
434
- */
435
- find<T>(
436
- /** The url or documentId of the handle to retrieve */
437
- id: AnyDocumentId
438
- ): DocHandle<T> {
417
+ findWithProgress<T>(
418
+ id: AnyDocumentId,
419
+ options: AbortOptions = {}
420
+ ): FindProgressWithMethods<T> | FindProgress<T> {
421
+ const { signal } = options
422
+ const abortPromise = abortable(signal)
423
+
439
424
  const { documentId, heads } = isValidAutomergeUrl(id)
440
425
  ? parseAutomergeUrl(id)
441
426
  : { documentId: interpretAsDocumentId(id), heads: undefined }
442
427
 
443
- const cachedHandle = this.#handleCache[documentId]
444
- if (cachedHandle) {
445
- if (cachedHandle.isUnavailable()) {
446
- // this ensures that the event fires after the handle has been returned
447
- setTimeout(() => {
448
- cachedHandle.emit("unavailable", {
449
- handle: cachedHandle,
450
- })
451
- })
428
+ // Check cache first - return plain FindStep for terminal states
429
+ if (this.#handleCache[documentId]) {
430
+ const handle = this.#handleCache[documentId]
431
+ if (handle.state === UNAVAILABLE) {
432
+ const result = {
433
+ state: "unavailable" as const,
434
+ error: new Error(`Document ${id} is unavailable`),
435
+ handle,
436
+ }
437
+ return result
438
+ }
439
+ if (handle.state === DELETED) {
440
+ return {
441
+ state: "failed",
442
+ error: new Error(`Document ${id} was deleted`),
443
+ handle,
444
+ }
445
+ }
446
+ if (handle.state === READY) {
447
+ // If we already have the handle, return it immediately (or a view of the handle if heads are specified)
448
+ return {
449
+ state: "ready",
450
+ // TODO: this handle needs to be cached (or at least avoid running clone)
451
+ handle: heads ? handle.view(heads) : handle,
452
+ }
452
453
  }
453
- // If we already have the handle, return it immediately (or a view of the handle if heads are specified)
454
- return heads ? cachedHandle.view(heads) : cachedHandle
455
454
  }
456
455
 
457
- // If we don't already have the handle, make an empty one and try loading it
458
- const handle = this.#getHandle<T>({
459
- documentId,
460
- }) as DocHandle<T>
456
+ // the generator takes over `this`, so we need an alias to the repo this
457
+ // eslint-disable-next-line @typescript-eslint/no-this-alias
458
+ const that = this
459
+ async function* progressGenerator(): AsyncGenerator<FindProgress<T>> {
460
+ try {
461
+ const handle = that.#getHandle<T>({ documentId })
462
+ yield { state: "loading", progress: 25, handle }
461
463
 
462
- // Loading & network is going to be asynchronous no matter what,
463
- // but we want to return the handle immediately.
464
- const attemptLoad = this.storageSubsystem
465
- ? this.storageSubsystem.loadDoc(handle.documentId)
466
- : Promise.resolve(null)
464
+ const loadingPromise = await (that.storageSubsystem
465
+ ? that.storageSubsystem.loadDoc(handle.documentId)
466
+ : Promise.resolve(null))
467
+
468
+ const loadedDoc = await Promise.race([loadingPromise, abortPromise])
467
469
 
468
- attemptLoad
469
- .then(async loadedDoc => {
470
470
  if (loadedDoc) {
471
- // uhhhh, sorry if you're reading this because we were lying to the type system
472
471
  handle.update(() => loadedDoc as Automerge.Doc<T>)
473
472
  handle.doneLoading()
473
+ yield { state: "loading", progress: 50, handle }
474
474
  } else {
475
- // we want to wait for the network subsystem to be ready before
476
- // we request the document. this prevents entering unavailable during initialization.
477
- await this.networkSubsystem.whenReady()
475
+ await Promise.race([that.networkSubsystem.whenReady(), abortPromise])
478
476
  handle.request()
477
+ yield { state: "loading", progress: 75, handle }
479
478
  }
480
- this.#registerHandleWithSubsystems(handle)
481
- })
482
- .catch(err => {
483
- this.#log("error waiting for network", { err })
484
- })
485
479
 
486
- // If we already have the handle, return it immediately (or a view of the handle if heads are specified)
487
- return heads ? handle.view(heads) : handle
480
+ that.#registerHandleWithSubsystems(handle)
481
+
482
+ await Promise.race([
483
+ handle.whenReady([READY, UNAVAILABLE]),
484
+ abortPromise,
485
+ ])
486
+
487
+ if (handle.state === UNAVAILABLE) {
488
+ yield { state: "unavailable", handle }
489
+ }
490
+ if (handle.state === DELETED) {
491
+ throw new Error(`Document ${id} was deleted`)
492
+ }
493
+
494
+ yield { state: "ready", handle }
495
+ } catch (error) {
496
+ yield {
497
+ state: "failed",
498
+ error: error instanceof Error ? error : new Error(String(error)),
499
+ handle,
500
+ }
501
+ }
502
+ }
503
+
504
+ const iterator = progressGenerator()
505
+
506
+ const next = async () => {
507
+ const result = await iterator.next()
508
+ return { ...result.value, next }
509
+ }
510
+
511
+ const untilReady = async (allowableStates: string[]) => {
512
+ for await (const state of iterator) {
513
+ if (allowableStates.includes(state.handle.state)) {
514
+ return state.handle
515
+ }
516
+ if (state.state === "unavailable") {
517
+ throw new Error(`Document ${id} is unavailable`)
518
+ }
519
+ if (state.state === "ready") return state.handle
520
+ if (state.state === "failed") throw state.error
521
+ }
522
+ throw new Error("Iterator completed without reaching ready state")
523
+ }
524
+
525
+ const handle = this.#getHandle<T>({ documentId })
526
+ const initial = { state: "loading" as const, progress: 0, handle }
527
+ return { ...initial, next, untilReady }
528
+ }
529
+
530
+ async find<T>(
531
+ id: AnyDocumentId,
532
+ options: RepoFindOptions & AbortOptions = {}
533
+ ): Promise<DocHandle<T>> {
534
+ const { allowableStates = ["ready"], signal } = options
535
+ const progress = this.findWithProgress<T>(id, { signal })
536
+
537
+ /*if (allowableStates.includes(progress.state)) {
538
+ console.log("returning early")
539
+ return progress.handle
540
+ }*/
541
+
542
+ if ("untilReady" in progress) {
543
+ this.#registerHandleWithSubsystems(progress.handle)
544
+ return progress.untilReady(allowableStates)
545
+ } else {
546
+ return progress.handle
547
+ }
548
+ }
549
+
550
+ /**
551
+ * Loads a document without waiting for ready state
552
+ */
553
+ async #loadDocument<T>(documentId: DocumentId): Promise<DocHandle<T>> {
554
+ // If we have the handle cached, return it
555
+ if (this.#handleCache[documentId]) {
556
+ return this.#handleCache[documentId]
557
+ }
558
+
559
+ // If we don't already have the handle, make an empty one and try loading it
560
+ const handle = this.#getHandle<T>({ documentId })
561
+ const loadedDoc = await (this.storageSubsystem
562
+ ? this.storageSubsystem.loadDoc(handle.documentId)
563
+ : Promise.resolve(null))
564
+
565
+ if (loadedDoc) {
566
+ // We need to cast this to <T> because loadDoc operates in <unknowns>.
567
+ // This is really where we ought to be validating the input matches <T>.
568
+ handle.update(() => loadedDoc as Automerge.Doc<T>)
569
+ handle.doneLoading()
570
+ } else {
571
+ // Because the network subsystem might still be booting up, we wait
572
+ // here so that we don't immediately give up loading because we're still
573
+ // making our initial connection to a sync server.
574
+ await this.networkSubsystem.whenReady()
575
+ handle.request()
576
+ }
577
+
578
+ this.#registerHandleWithSubsystems(handle)
579
+ return handle
580
+ }
581
+
582
+ /**
583
+ * Retrieves a document by id. It gets data from the local system, but also emits a `document`
584
+ * event to advertise interest in the document.
585
+ */
586
+ async findClassic<T>(
587
+ /** The url or documentId of the handle to retrieve */
588
+ id: AnyDocumentId,
589
+ options: RepoFindOptions & AbortOptions = {}
590
+ ): Promise<DocHandle<T>> {
591
+ const documentId = interpretAsDocumentId(id)
592
+ const { allowableStates, signal } = options
593
+
594
+ return Promise.race([
595
+ (async () => {
596
+ const handle = await this.#loadDocument<T>(documentId)
597
+ if (!allowableStates) {
598
+ await handle.whenReady([READY, UNAVAILABLE])
599
+ if (handle.state === UNAVAILABLE && !signal?.aborted) {
600
+ throw new Error(`Document ${id} is unavailable`)
601
+ }
602
+ }
603
+ return handle
604
+ })(),
605
+ abortable(signal),
606
+ ])
488
607
  }
489
608
 
490
609
  delete(
@@ -511,8 +630,7 @@ export class Repo extends EventEmitter<RepoEvents> {
511
630
  const documentId = interpretAsDocumentId(id)
512
631
 
513
632
  const handle = this.#getHandle({ documentId })
514
- const doc = await handle.doc()
515
- if (!doc) return undefined
633
+ const doc = handle.doc()
516
634
  return Automerge.save(doc)
517
635
  }
518
636
 
@@ -566,11 +684,7 @@ export class Repo extends EventEmitter<RepoEvents> {
566
684
  : Object.values(this.#handleCache)
567
685
  await Promise.all(
568
686
  handles.map(async handle => {
569
- const doc = handle.docSync()
570
- if (!doc) {
571
- return
572
- }
573
- return this.storageSubsystem!.saveDoc(handle.documentId, doc)
687
+ return this.storageSubsystem!.saveDoc(handle.documentId, handle.doc())
574
688
  })
575
689
  )
576
690
  }
@@ -589,7 +703,9 @@ export class Repo extends EventEmitter<RepoEvents> {
589
703
  return
590
704
  }
591
705
  const handle = this.#getHandle({ documentId })
592
- const doc = await handle.doc([READY, UNLOADED, DELETED, UNAVAILABLE])
706
+ await handle.whenReady([READY, UNLOADED, DELETED, UNAVAILABLE])
707
+ const doc = handle.doc()
708
+ // because this is an internal-ish function, we'll be extra careful about undefined docs here
593
709
  if (doc) {
594
710
  if (handle.isReady()) {
595
711
  handle.unload()
@@ -677,6 +793,10 @@ export interface RepoEvents {
677
793
  "doc-metrics": (arg: DocMetrics) => void
678
794
  }
679
795
 
796
+ export interface RepoFindOptions {
797
+ allowableStates?: string[]
798
+ }
799
+
680
800
  export interface DocumentPayload {
681
801
  handle: DocHandle<any>
682
802
  }
@@ -0,0 +1,61 @@
1
+ /**
2
+ * Creates a promise that rejects when the signal is aborted.
3
+ *
4
+ * @remarks
5
+ * This utility creates a promise that rejects when the provided AbortSignal is aborted.
6
+ * It's designed to be used with Promise.race() to make operations abortable.
7
+ *
8
+ * @example
9
+ * ```typescript
10
+ * const controller = new AbortController();
11
+ *
12
+ * try {
13
+ * const result = await Promise.race([
14
+ * fetch('https://api.example.com/data'),
15
+ * abortable(controller.signal)
16
+ * ]);
17
+ * } catch (err) {
18
+ * if (err.name === 'AbortError') {
19
+ * console.log('The operation was aborted');
20
+ * }
21
+ * }
22
+ *
23
+ * // Later, to abort:
24
+ * controller.abort();
25
+ * ```
26
+ *
27
+ * @param signal - An AbortSignal that can be used to abort the operation
28
+ * @param cleanup - Optional cleanup function that will be called if aborted
29
+ * @returns A promise that rejects with AbortError when the signal is aborted
30
+ * @throws {DOMException} With name "AbortError" when aborted
31
+ */
32
+ export function abortable(
33
+ signal?: AbortSignal,
34
+ cleanup?: () => void
35
+ ): Promise<never> {
36
+ if (signal?.aborted) {
37
+ throw new DOMException("Operation aborted", "AbortError")
38
+ }
39
+
40
+ if (!signal) {
41
+ return new Promise(() => {}) // Never resolves
42
+ }
43
+
44
+ return new Promise((_, reject) => {
45
+ signal.addEventListener(
46
+ "abort",
47
+ () => {
48
+ cleanup?.()
49
+ reject(new DOMException("Operation aborted", "AbortError"))
50
+ },
51
+ { once: true }
52
+ )
53
+ })
54
+ }
55
+
56
+ /**
57
+ * Include this type in an options object to pass an AbortSignal to a function.
58
+ */
59
+ export interface AbortOptions {
60
+ signal?: AbortSignal
61
+ }
@@ -49,9 +49,10 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
49
49
  // Alice creates a document
50
50
  const aliceHandle = aliceRepo.create<TestDoc>()
51
51
 
52
- // Bob receives the document
53
- await eventPromise(bobRepo, "document")
54
- const bobHandle = bobRepo.find<TestDoc>(aliceHandle.url)
52
+ // TODO: ... let connections complete. this shouldn't be necessary.
53
+ await pause(50)
54
+
55
+ const bobHandle = await bobRepo.find<TestDoc>(aliceHandle.url)
55
56
 
56
57
  // Alice changes the document
57
58
  aliceHandle.change(d => {
@@ -60,7 +61,7 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
60
61
 
61
62
  // Bob receives the change
62
63
  await eventPromise(bobHandle, "change")
63
- assert.equal((await bobHandle.doc())?.foo, "bar")
64
+ assert.equal((await bobHandle).doc()?.foo, "bar")
64
65
 
65
66
  // Bob changes the document
66
67
  bobHandle.change(d => {
@@ -69,7 +70,7 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
69
70
 
70
71
  // Alice receives the change
71
72
  await eventPromise(aliceHandle, "change")
72
- assert.equal((await aliceHandle.doc())?.foo, "baz")
73
+ assert.equal(aliceHandle.doc().foo, "baz")
73
74
  }
74
75
 
75
76
  // Run the test in both directions, in case they're different types of adapters
@@ -100,9 +101,9 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
100
101
  const docUrl = aliceHandle.url
101
102
 
102
103
  // Bob and Charlie receive the document
103
- await eventPromises([bobRepo, charlieRepo], "document")
104
- const bobHandle = bobRepo.find<TestDoc>(docUrl)
105
- const charlieHandle = charlieRepo.find<TestDoc>(docUrl)
104
+ await pause(50)
105
+ const bobHandle = await bobRepo.find<TestDoc>(docUrl)
106
+ const charlieHandle = await charlieRepo.find<TestDoc>(docUrl)
106
107
 
107
108
  // Alice changes the document
108
109
  aliceHandle.change(d => {
@@ -111,8 +112,8 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
111
112
 
112
113
  // Bob and Charlie receive the change
113
114
  await eventPromises([bobHandle, charlieHandle], "change")
114
- assert.equal((await bobHandle.doc())?.foo, "bar")
115
- assert.equal((await charlieHandle.doc())?.foo, "bar")
115
+ assert.equal(bobHandle.doc().foo, "bar")
116
+ assert.equal(charlieHandle.doc().foo, "bar")
116
117
 
117
118
  // Charlie changes the document
118
119
  charlieHandle.change(d => {
@@ -121,8 +122,8 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
121
122
 
122
123
  // Alice and Bob receive the change
123
124
  await eventPromises([aliceHandle, bobHandle], "change")
124
- assert.equal((await bobHandle.doc())?.foo, "baz")
125
- assert.equal((await charlieHandle.doc())?.foo, "baz")
125
+ assert.equal(bobHandle.doc().foo, "baz")
126
+ assert.equal(charlieHandle.doc().foo, "baz")
126
127
 
127
128
  teardown()
128
129
  })
@@ -141,7 +142,7 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
141
142
  )
142
143
 
143
144
  const aliceHandle = aliceRepo.create<TestDoc>()
144
- const charlieHandle = charlieRepo.find(aliceHandle.url)
145
+ const charlieHandle = await charlieRepo.find(aliceHandle.url)
145
146
 
146
147
  // pause to give charlie a chance to let alice know it wants the doc
147
148
  await pause(100)
@@ -1,6 +1,6 @@
1
1
  import debug from "debug"
2
2
  import { DocHandle } from "../DocHandle.js"
3
- import { parseAutomergeUrl, stringifyAutomergeUrl } from "../AutomergeUrl.js"
3
+ import { parseAutomergeUrl } from "../AutomergeUrl.js"
4
4
  import { Repo } from "../Repo.js"
5
5
  import { DocMessage } from "../network/messages.js"
6
6
  import { AutomergeUrl, DocumentId, PeerId } from "../types.js"
@@ -29,18 +29,19 @@ export class CollectionSynchronizer extends Synchronizer {
29
29
  }
30
30
 
31
31
  /** Returns a synchronizer for the given document, creating one if it doesn't already exist. */
32
- #fetchDocSynchronizer(documentId: DocumentId) {
33
- if (!this.docSynchronizers[documentId]) {
34
- const handle = this.repo.find(stringifyAutomergeUrl({ documentId }))
35
- this.docSynchronizers[documentId] = this.#initDocSynchronizer(handle)
32
+ #fetchDocSynchronizer(handle: DocHandle<unknown>) {
33
+ if (!this.docSynchronizers[handle.documentId]) {
34
+ this.docSynchronizers[handle.documentId] =
35
+ this.#initDocSynchronizer(handle)
36
36
  }
37
- return this.docSynchronizers[documentId]
37
+ return this.docSynchronizers[handle.documentId]
38
38
  }
39
39
 
40
40
  /** Creates a new docSynchronizer and sets it up to propagate messages */
41
41
  #initDocSynchronizer(handle: DocHandle<unknown>): DocSynchronizer {
42
42
  const docSynchronizer = new DocSynchronizer({
43
43
  handle,
44
+ peerId: this.repo.networkSubsystem.peerId,
44
45
  onLoadSyncState: async peerId => {
45
46
  if (!this.repo.storageSubsystem) {
46
47
  return
@@ -109,13 +110,16 @@ export class CollectionSynchronizer extends Synchronizer {
109
110
 
110
111
  this.#docSetUp[documentId] = true
111
112
 
112
- const docSynchronizer = this.#fetchDocSynchronizer(documentId)
113
+ const handle = await this.repo.find(documentId, {
114
+ allowableStates: ["ready", "unavailable", "requesting"],
115
+ })
116
+ const docSynchronizer = this.#fetchDocSynchronizer(handle)
113
117
 
114
118
  docSynchronizer.receiveMessage(message)
115
119
 
116
120
  // Initiate sync with any new peers
117
121
  const peers = await this.#documentGenerousPeers(documentId)
118
- docSynchronizer.beginSync(
122
+ void docSynchronizer.beginSync(
119
123
  peers.filter(peerId => !docSynchronizer.hasPeer(peerId))
120
124
  )
121
125
  }
@@ -123,14 +127,14 @@ export class CollectionSynchronizer extends Synchronizer {
123
127
  /**
124
128
  * Starts synchronizing the given document with all peers that we share it generously with.
125
129
  */
126
- addDocument(documentId: DocumentId) {
130
+ addDocument(handle: DocHandle<unknown>) {
127
131
  // HACK: this is a hack to prevent us from adding the same document twice
128
- if (this.#docSetUp[documentId]) {
132
+ if (this.#docSetUp[handle.documentId]) {
129
133
  return
130
134
  }
131
- const docSynchronizer = this.#fetchDocSynchronizer(documentId)
132
- void this.#documentGenerousPeers(documentId).then(peers => {
133
- docSynchronizer.beginSync(peers)
135
+ const docSynchronizer = this.#fetchDocSynchronizer(handle)
136
+ void this.#documentGenerousPeers(handle.documentId).then(peers => {
137
+ void docSynchronizer.beginSync(peers)
134
138
  })
135
139
  }
136
140
 
@@ -152,7 +156,7 @@ export class CollectionSynchronizer extends Synchronizer {
152
156
  for (const docSynchronizer of Object.values(this.docSynchronizers)) {
153
157
  const { documentId } = docSynchronizer
154
158
  void this.repo.sharePolicy(peerId, documentId).then(okToShare => {
155
- if (okToShare) docSynchronizer.beginSync([peerId])
159
+ if (okToShare) void docSynchronizer.beginSync([peerId])
156
160
  })
157
161
  }
158
162
  }
@@ -30,6 +30,7 @@ type PendingMessage = {
30
30
 
31
31
  interface DocSynchronizerConfig {
32
32
  handle: DocHandle<unknown>
33
+ peerId: PeerId
33
34
  onLoadSyncState?: (peerId: PeerId) => Promise<A.SyncState | undefined>
34
35
  }
35
36
 
@@ -56,13 +57,17 @@ export class DocSynchronizer extends Synchronizer {
56
57
 
57
58
  #pendingSyncMessages: Array<PendingMessage> = []
58
59
 
60
+ // We keep this around at least in part for debugging.
61
+ // eslint-disable-next-line no-unused-private-class-members
62
+ #peerId: PeerId
59
63
  #syncStarted = false
60
64
 
61
65
  #handle: DocHandle<unknown>
62
66
  #onLoadSyncState: (peerId: PeerId) => Promise<A.SyncState | undefined>
63
67
 
64
- constructor({ handle, onLoadSyncState }: DocSynchronizerConfig) {
68
+ constructor({ handle, peerId, onLoadSyncState }: DocSynchronizerConfig) {
65
69
  super()
70
+ this.#peerId = peerId
66
71
  this.#handle = handle
67
72
  this.#onLoadSyncState =
68
73
  onLoadSyncState ?? (() => Promise.resolve(undefined))
@@ -81,7 +86,7 @@ export class DocSynchronizer extends Synchronizer {
81
86
 
82
87
  // Process pending sync messages immediately after the handle becomes ready.
83
88
  void (async () => {
84
- await handle.doc([READY, REQUESTING])
89
+ await handle.whenReady([READY, REQUESTING])
85
90
  this.#processAllPendingSyncMessages()
86
91
  })()
87
92
  }
@@ -97,8 +102,7 @@ export class DocSynchronizer extends Synchronizer {
97
102
  /// PRIVATE
98
103
 
99
104
  async #syncWithPeers() {
100
- this.#log(`syncWithPeers`)
101
- const doc = await this.#handle.doc()
105
+ const doc = await this.#handle.legacyAsyncDoc() // XXX THIS ONE IS WEIRD
102
106
  if (doc === undefined) return
103
107
  this.#peers.forEach(peerId => this.#sendSyncMessage(peerId, doc))
104
108
  }
@@ -226,16 +230,15 @@ export class DocSynchronizer extends Synchronizer {
226
230
  return this.#peers.includes(peerId)
227
231
  }
228
232
 
229
- beginSync(peerIds: PeerId[]) {
233
+ async beginSync(peerIds: PeerId[]) {
230
234
  const noPeersWithDocument = peerIds.every(
231
235
  peerId => this.#peerDocumentStatuses[peerId] in ["unavailable", "wants"]
232
236
  )
233
237
 
234
238
  // At this point if we don't have anything in our storage, we need to use an empty doc to sync
235
239
  // with; but we don't want to surface that state to the front end
236
-
237
- const docPromise = this.#handle
238
- .doc([READY, REQUESTING, UNAVAILABLE])
240
+ const docPromise = this.#handle // TODO THIS IS ALSO WEIRD
241
+ .legacyAsyncDoc([READY, REQUESTING, UNAVAILABLE])
239
242
  .then(doc => {
240
243
  // we register out peers first, then say that sync has started
241
244
  this.#syncStarted = true
@@ -251,7 +254,13 @@ export class DocSynchronizer extends Synchronizer {
251
254
  return doc ?? A.init<unknown>()
252
255
  })
253
256
 
254
- this.#log(`beginSync: ${peerIds.join(", ")}`)
257
+ const peersWithDocument = this.#peers.some(peerId => {
258
+ return this.#peerDocumentStatuses[peerId] == "has"
259
+ })
260
+
261
+ if (peersWithDocument) {
262
+ await this.#handle.whenReady()
263
+ }
255
264
 
256
265
  peerIds.forEach(peerId => {
257
266
  this.#withSyncState(peerId, syncState => {
@@ -352,6 +361,7 @@ export class DocSynchronizer extends Synchronizer {
352
361
  this.#withSyncState(message.senderId, syncState => {
353
362
  this.#handle.update(doc => {
354
363
  const start = performance.now()
364
+
355
365
  const [newDoc, newSyncState] = A.receiveSyncMessage(
356
366
  doc,
357
367
  syncState,
@@ -28,13 +28,13 @@ describe("CollectionSynchronizer", () => {
28
28
  done()
29
29
  })
30
30
 
31
- synchronizer.addDocument(handle.documentId)
31
+ synchronizer.addDocument(handle)
32
32
  }))
33
33
 
34
34
  it("starts synchronizing existing documents when a peer is added", () =>
35
35
  new Promise<void>(done => {
36
36
  const handle = repo.create()
37
- synchronizer.addDocument(handle.documentId)
37
+ synchronizer.addDocument(handle)
38
38
  synchronizer.once("message", event => {
39
39
  const { targetId, documentId } = event as SyncMessage
40
40
  assert(targetId === "peer1")
@@ -50,7 +50,7 @@ describe("CollectionSynchronizer", () => {
50
50
 
51
51
  repo.sharePolicy = async (peerId: PeerId) => peerId !== "peer1"
52
52
 
53
- synchronizer.addDocument(handle.documentId)
53
+ synchronizer.addDocument(handle)
54
54
  synchronizer.once("message", () => {
55
55
  reject(new Error("Should not have sent a message"))
56
56
  })
@@ -71,7 +71,7 @@ describe("CollectionSynchronizer", () => {
71
71
  reject(new Error("Should not have sent a message"))
72
72
  })
73
73
 
74
- synchronizer.addDocument(handle.documentId)
74
+ synchronizer.addDocument(handle)
75
75
 
76
76
  setTimeout(done)
77
77
  }))