@automerge/automerge-repo 2.0.0-alpha.7 → 2.0.0-beta.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. package/README.md +8 -8
  2. package/dist/AutomergeUrl.d.ts +17 -5
  3. package/dist/AutomergeUrl.d.ts.map +1 -1
  4. package/dist/AutomergeUrl.js +71 -24
  5. package/dist/DocHandle.d.ts +68 -45
  6. package/dist/DocHandle.d.ts.map +1 -1
  7. package/dist/DocHandle.js +166 -69
  8. package/dist/FindProgress.d.ts +30 -0
  9. package/dist/FindProgress.d.ts.map +1 -0
  10. package/dist/FindProgress.js +1 -0
  11. package/dist/RemoteHeadsSubscriptions.d.ts +4 -5
  12. package/dist/RemoteHeadsSubscriptions.d.ts.map +1 -1
  13. package/dist/RemoteHeadsSubscriptions.js +4 -1
  14. package/dist/Repo.d.ts +46 -6
  15. package/dist/Repo.d.ts.map +1 -1
  16. package/dist/Repo.js +252 -67
  17. package/dist/helpers/abortable.d.ts +39 -0
  18. package/dist/helpers/abortable.d.ts.map +1 -0
  19. package/dist/helpers/abortable.js +45 -0
  20. package/dist/helpers/arraysAreEqual.d.ts.map +1 -1
  21. package/dist/helpers/bufferFromHex.d.ts +3 -0
  22. package/dist/helpers/bufferFromHex.d.ts.map +1 -0
  23. package/dist/helpers/bufferFromHex.js +13 -0
  24. package/dist/helpers/debounce.d.ts.map +1 -1
  25. package/dist/helpers/eventPromise.d.ts.map +1 -1
  26. package/dist/helpers/headsAreSame.d.ts +2 -2
  27. package/dist/helpers/headsAreSame.d.ts.map +1 -1
  28. package/dist/helpers/mergeArrays.d.ts +1 -1
  29. package/dist/helpers/mergeArrays.d.ts.map +1 -1
  30. package/dist/helpers/pause.d.ts.map +1 -1
  31. package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
  32. package/dist/helpers/tests/network-adapter-tests.js +13 -13
  33. package/dist/helpers/tests/storage-adapter-tests.d.ts +2 -2
  34. package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
  35. package/dist/helpers/tests/storage-adapter-tests.js +25 -48
  36. package/dist/helpers/throttle.d.ts.map +1 -1
  37. package/dist/helpers/withTimeout.d.ts.map +1 -1
  38. package/dist/index.d.ts +2 -1
  39. package/dist/index.d.ts.map +1 -1
  40. package/dist/index.js +1 -1
  41. package/dist/network/messages.d.ts.map +1 -1
  42. package/dist/storage/StorageSubsystem.d.ts +15 -1
  43. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  44. package/dist/storage/StorageSubsystem.js +50 -14
  45. package/dist/synchronizer/CollectionSynchronizer.d.ts +4 -3
  46. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  47. package/dist/synchronizer/CollectionSynchronizer.js +34 -15
  48. package/dist/synchronizer/DocSynchronizer.d.ts +3 -2
  49. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  50. package/dist/synchronizer/DocSynchronizer.js +51 -27
  51. package/dist/synchronizer/Synchronizer.d.ts +11 -0
  52. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  53. package/dist/types.d.ts +4 -1
  54. package/dist/types.d.ts.map +1 -1
  55. package/fuzz/fuzz.ts +3 -3
  56. package/package.json +3 -4
  57. package/src/AutomergeUrl.ts +101 -26
  58. package/src/DocHandle.ts +235 -82
  59. package/src/FindProgress.ts +48 -0
  60. package/src/RemoteHeadsSubscriptions.ts +11 -9
  61. package/src/Repo.ts +364 -74
  62. package/src/helpers/abortable.ts +61 -0
  63. package/src/helpers/bufferFromHex.ts +14 -0
  64. package/src/helpers/headsAreSame.ts +2 -2
  65. package/src/helpers/tests/network-adapter-tests.ts +14 -13
  66. package/src/helpers/tests/storage-adapter-tests.ts +44 -86
  67. package/src/index.ts +7 -0
  68. package/src/storage/StorageSubsystem.ts +66 -16
  69. package/src/synchronizer/CollectionSynchronizer.ts +37 -16
  70. package/src/synchronizer/DocSynchronizer.ts +59 -32
  71. package/src/synchronizer/Synchronizer.ts +14 -0
  72. package/src/types.ts +4 -1
  73. package/test/AutomergeUrl.test.ts +130 -0
  74. package/test/CollectionSynchronizer.test.ts +4 -4
  75. package/test/DocHandle.test.ts +181 -38
  76. package/test/DocSynchronizer.test.ts +10 -3
  77. package/test/Repo.test.ts +376 -203
  78. package/test/StorageSubsystem.test.ts +80 -1
  79. package/test/remoteHeads.test.ts +27 -12
package/src/Repo.ts CHANGED
@@ -2,11 +2,20 @@ import { next as Automerge } from "@automerge/automerge/slim"
2
2
  import debug from "debug"
3
3
  import { EventEmitter } from "eventemitter3"
4
4
  import {
5
+ encodeHeads,
5
6
  generateAutomergeUrl,
6
7
  interpretAsDocumentId,
8
+ isValidAutomergeUrl,
7
9
  parseAutomergeUrl,
8
10
  } from "./AutomergeUrl.js"
9
- import { DocHandle, DocHandleEncodedChangePayload } from "./DocHandle.js"
11
+ import {
12
+ DELETED,
13
+ DocHandle,
14
+ DocHandleEncodedChangePayload,
15
+ READY,
16
+ UNAVAILABLE,
17
+ UNLOADED,
18
+ } from "./DocHandle.js"
10
19
  import { RemoteHeadsSubscriptions } from "./RemoteHeadsSubscriptions.js"
11
20
  import { headsAreSame } from "./helpers/headsAreSame.js"
12
21
  import { throttle } from "./helpers/throttle.js"
@@ -20,8 +29,30 @@ import { StorageAdapterInterface } from "./storage/StorageAdapterInterface.js"
20
29
  import { StorageSubsystem } from "./storage/StorageSubsystem.js"
21
30
  import { StorageId } from "./storage/types.js"
22
31
  import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js"
23
- import { SyncStatePayload } from "./synchronizer/Synchronizer.js"
24
- import type { AnyDocumentId, DocumentId, PeerId } from "./types.js"
32
+ import {
33
+ DocSyncMetrics,
34
+ SyncStatePayload,
35
+ } from "./synchronizer/Synchronizer.js"
36
+ import type {
37
+ AnyDocumentId,
38
+ AutomergeUrl,
39
+ DocumentId,
40
+ PeerId,
41
+ } from "./types.js"
42
+ import { abortable, AbortOptions } from "./helpers/abortable.js"
43
+ import { FindProgress } from "./FindProgress.js"
44
+
45
+ export type FindProgressWithMethods<T> = FindProgress<T> & {
46
+ untilReady: (allowableStates: string[]) => Promise<DocHandle<T>>
47
+ peek: () => FindProgress<T>
48
+ subscribe: (callback: (progress: FindProgress<T>) => void) => () => void
49
+ }
50
+
51
+ export type ProgressSignal<T> = {
52
+ peek: () => FindProgress<T>
53
+ subscribe: (callback: (progress: FindProgress<T>) => void) => () => void
54
+ untilReady: (allowableStates: string[]) => Promise<DocHandle<T>>
55
+ }
25
56
 
26
57
  function randomPeerId() {
27
58
  return ("peer-" + Math.random().toString(36).slice(4)) as PeerId
@@ -62,6 +93,7 @@ export class Repo extends EventEmitter<RepoEvents> {
62
93
 
63
94
  #remoteHeadsSubscriptions = new RemoteHeadsSubscriptions()
64
95
  #remoteHeadsGossipingEnabled = false
96
+ #progressCache: Record<DocumentId, FindProgress<any>> = {}
65
97
 
66
98
  constructor({
67
99
  storage,
@@ -70,6 +102,7 @@ export class Repo extends EventEmitter<RepoEvents> {
70
102
  sharePolicy,
71
103
  isEphemeral = storage === undefined,
72
104
  enableRemoteHeadsGossiping = false,
105
+ denylist = [],
73
106
  }: RepoConfig = {}) {
74
107
  super()
75
108
  this.#remoteHeadsGossipingEnabled = enableRemoteHeadsGossiping
@@ -89,7 +122,7 @@ export class Repo extends EventEmitter<RepoEvents> {
89
122
 
90
123
  // SYNCHRONIZER
91
124
  // The synchronizer uses the network subsystem to keep documents in sync with peers.
92
- this.synchronizer = new CollectionSynchronizer(this)
125
+ this.synchronizer = new CollectionSynchronizer(this, denylist)
93
126
 
94
127
  // When the synchronizer emits messages, send them to peers
95
128
  this.synchronizer.on("message", message => {
@@ -97,6 +130,9 @@ export class Repo extends EventEmitter<RepoEvents> {
97
130
  networkSubsystem.send(message)
98
131
  })
99
132
 
133
+ // Forward metrics from doc synchronizers
134
+ this.synchronizer.on("metrics", event => this.emit("doc-metrics", event))
135
+
100
136
  if (this.#remoteHeadsGossipingEnabled) {
101
137
  this.synchronizer.on("open-doc", ({ peerId, documentId }) => {
102
138
  this.#remoteHeadsSubscriptions.subscribePeerToDoc(peerId, documentId)
@@ -106,6 +142,12 @@ export class Repo extends EventEmitter<RepoEvents> {
106
142
  // STORAGE
107
143
  // The storage subsystem has access to some form of persistence, and deals with save and loading documents.
108
144
  const storageSubsystem = storage ? new StorageSubsystem(storage) : undefined
145
+ if (storageSubsystem) {
146
+ storageSubsystem.on("document-loaded", event =>
147
+ this.emit("doc-metrics", { type: "doc-loaded", ...event })
148
+ )
149
+ }
150
+
109
151
  this.storageSubsystem = storageSubsystem
110
152
 
111
153
  // NETWORK
@@ -168,16 +210,20 @@ export class Repo extends EventEmitter<RepoEvents> {
168
210
  const heads = handle.getRemoteHeads(storageId)
169
211
  const haveHeadsChanged =
170
212
  message.syncState.theirHeads &&
171
- (!heads || !headsAreSame(heads, message.syncState.theirHeads))
213
+ (!heads ||
214
+ !headsAreSame(heads, encodeHeads(message.syncState.theirHeads)))
172
215
 
173
216
  if (haveHeadsChanged && message.syncState.theirHeads) {
174
- handle.setRemoteHeads(storageId, message.syncState.theirHeads)
217
+ handle.setRemoteHeads(
218
+ storageId,
219
+ encodeHeads(message.syncState.theirHeads)
220
+ )
175
221
 
176
222
  if (storageId && this.#remoteHeadsGossipingEnabled) {
177
223
  this.#remoteHeadsSubscriptions.handleImmediateRemoteHeadsChanged(
178
224
  message.documentId,
179
225
  storageId,
180
- message.syncState.theirHeads
226
+ encodeHeads(message.syncState.theirHeads)
181
227
  )
182
228
  }
183
229
  }
@@ -229,18 +275,8 @@ export class Repo extends EventEmitter<RepoEvents> {
229
275
  handle.on("heads-changed", throttle(saveFn, this.saveDebounceRate))
230
276
  }
231
277
 
232
- handle.on("unavailable", () => {
233
- this.#log("document unavailable", { documentId: handle.documentId })
234
- this.emit("unavailable-document", {
235
- documentId: handle.documentId,
236
- })
237
- })
238
-
239
278
  // Register the document with the synchronizer. This advertises our interest in the document.
240
- this.synchronizer.addDocument(handle.documentId)
241
-
242
- // Preserve the old event in case anyone was using it.
243
- this.emit("document", { handle })
279
+ this.synchronizer.addDocument(handle)
244
280
  }
245
281
 
246
282
  #receiveMessage(message: RepoMessage) {
@@ -371,22 +407,16 @@ export class Repo extends EventEmitter<RepoEvents> {
371
407
  * Any peers this `Repo` is connected to for whom `sharePolicy` returns `true` will
372
408
  * be notified of the newly created DocHandle.
373
409
  *
374
- * @throws if the cloned handle is not yet ready or if
375
- * `clonedHandle.docSync()` returns `undefined` (i.e. the handle is unavailable).
376
410
  */
377
411
  clone<T>(clonedHandle: DocHandle<T>) {
378
412
  if (!clonedHandle.isReady()) {
379
413
  throw new Error(
380
414
  `Cloned handle is not yet in ready state.
381
- (Try await handle.waitForReady() first.)`
415
+ (Try await handle.whenReady() first.)`
382
416
  )
383
417
  }
384
418
 
385
- const sourceDoc = clonedHandle.docSync()
386
- if (!sourceDoc) {
387
- throw new Error("Cloned handle doesn't have a document.")
388
- }
389
-
419
+ const sourceDoc = clonedHandle.doc()
390
420
  const handle = this.create<T>()
391
421
 
392
422
  handle.update(() => {
@@ -397,60 +427,263 @@ export class Repo extends EventEmitter<RepoEvents> {
397
427
  return handle
398
428
  }
399
429
 
400
- /**
401
- * Retrieves a document by id. It gets data from the local system, but also emits a `document`
402
- * event to advertise interest in the document.
403
- */
404
- find<T>(
405
- /** The url or documentId of the handle to retrieve */
406
- id: AnyDocumentId
407
- ): DocHandle<T> {
408
- const documentId = interpretAsDocumentId(id)
430
+ findWithProgress<T>(
431
+ id: AnyDocumentId,
432
+ options: AbortOptions = {}
433
+ ): FindProgressWithMethods<T> | FindProgress<T> {
434
+ const { signal } = options
435
+ const abortPromise = abortable(signal)
409
436
 
410
- // If we have the handle cached, return it
437
+ const { documentId, heads } = isValidAutomergeUrl(id)
438
+ ? parseAutomergeUrl(id)
439
+ : { documentId: interpretAsDocumentId(id), heads: undefined }
440
+
441
+ // Check handle cache first - return plain FindStep for terminal states
411
442
  if (this.#handleCache[documentId]) {
412
- if (this.#handleCache[documentId].isUnavailable()) {
413
- // this ensures that the event fires after the handle has been returned
414
- setTimeout(() => {
415
- this.#handleCache[documentId].emit("unavailable", {
416
- handle: this.#handleCache[documentId],
417
- })
418
- })
443
+ const handle = this.#handleCache[documentId]
444
+ if (handle.state === UNAVAILABLE) {
445
+ const result = {
446
+ state: "unavailable" as const,
447
+ error: new Error(`Document ${id} is unavailable`),
448
+ handle,
449
+ }
450
+ return result
451
+ }
452
+ if (handle.state === DELETED) {
453
+ const result = {
454
+ state: "failed" as const,
455
+ error: new Error(`Document ${id} was deleted`),
456
+ handle,
457
+ }
458
+ return result
459
+ }
460
+ if (handle.state === READY) {
461
+ const result = {
462
+ state: "ready" as const,
463
+ handle: heads ? handle.view(heads) : handle,
464
+ }
465
+ return result
419
466
  }
420
- return this.#handleCache[documentId]
421
467
  }
422
468
 
423
- // If we don't already have the handle, make an empty one and try loading it
424
- const handle = this.#getHandle<T>({
469
+ // Check progress cache for any existing signal
470
+ const cachedProgress = this.#progressCache[documentId]
471
+ if (cachedProgress) {
472
+ const handle = this.#handleCache[documentId]
473
+ // Return cached progress if we have a handle and it's either in a terminal state or loading
474
+ if (
475
+ handle &&
476
+ (handle.state === READY ||
477
+ handle.state === UNAVAILABLE ||
478
+ handle.state === DELETED ||
479
+ handle.state === "loading")
480
+ ) {
481
+ return cachedProgress as FindProgressWithMethods<T>
482
+ }
483
+ }
484
+
485
+ const handle = this.#getHandle<T>({ documentId })
486
+ const initial = {
487
+ state: "loading" as const,
488
+ progress: 0,
489
+ handle,
490
+ }
491
+
492
+ // Create a new progress signal
493
+ const progressSignal = {
494
+ subscribers: new Set<(progress: FindProgress<T>) => void>(),
495
+ currentProgress: undefined as FindProgress<T> | undefined,
496
+ notify: (progress: FindProgress<T>) => {
497
+ progressSignal.currentProgress = progress
498
+ progressSignal.subscribers.forEach(callback => callback(progress))
499
+ // Cache all states, not just terminal ones
500
+ this.#progressCache[documentId] = progress
501
+ },
502
+ peek: () => progressSignal.currentProgress || initial,
503
+ subscribe: (callback: (progress: FindProgress<T>) => void) => {
504
+ progressSignal.subscribers.add(callback)
505
+ return () => progressSignal.subscribers.delete(callback)
506
+ },
507
+ }
508
+
509
+ progressSignal.notify(initial)
510
+
511
+ // Start the loading process
512
+ void this.#loadDocumentWithProgress(
513
+ id,
425
514
  documentId,
426
- }) as DocHandle<T>
515
+ handle,
516
+ progressSignal,
517
+ abortPromise
518
+ )
427
519
 
428
- // Loading & network is going to be asynchronous no matter what,
429
- // but we want to return the handle immediately.
430
- const attemptLoad = this.storageSubsystem
431
- ? this.storageSubsystem.loadDoc(handle.documentId)
432
- : Promise.resolve(null)
433
-
434
- attemptLoad
435
- .then(async loadedDoc => {
436
- if (loadedDoc) {
437
- // uhhhh, sorry if you're reading this because we were lying to the type system
438
- handle.update(() => loadedDoc as Automerge.Doc<T>)
439
- handle.doneLoading()
440
- } else {
441
- // we want to wait for the network subsystem to be ready before
442
- // we request the document. this prevents entering unavailable during initialization.
443
- await this.networkSubsystem.whenReady()
444
- handle.request()
520
+ const result = {
521
+ ...initial,
522
+ peek: progressSignal.peek,
523
+ subscribe: progressSignal.subscribe,
524
+ }
525
+ this.#progressCache[documentId] = result
526
+ return result
527
+ }
528
+
529
+ async #loadDocumentWithProgress<T>(
530
+ id: AnyDocumentId,
531
+ documentId: DocumentId,
532
+ handle: DocHandle<T>,
533
+ progressSignal: {
534
+ notify: (progress: FindProgress<T>) => void
535
+ },
536
+ abortPromise: Promise<never>
537
+ ) {
538
+ try {
539
+ progressSignal.notify({
540
+ state: "loading" as const,
541
+ progress: 25,
542
+ handle,
543
+ })
544
+
545
+ const loadingPromise = await (this.storageSubsystem
546
+ ? this.storageSubsystem.loadDoc(handle.documentId)
547
+ : Promise.resolve(null))
548
+
549
+ const loadedDoc = await Promise.race([loadingPromise, abortPromise])
550
+
551
+ if (loadedDoc) {
552
+ handle.update(() => loadedDoc as Automerge.Doc<T>)
553
+ handle.doneLoading()
554
+ progressSignal.notify({
555
+ state: "loading" as const,
556
+ progress: 50,
557
+ handle,
558
+ })
559
+ } else {
560
+ await Promise.race([this.networkSubsystem.whenReady(), abortPromise])
561
+ handle.request()
562
+ progressSignal.notify({
563
+ state: "loading" as const,
564
+ progress: 75,
565
+ handle,
566
+ })
567
+ }
568
+
569
+ this.#registerHandleWithSubsystems(handle)
570
+
571
+ await Promise.race([handle.whenReady([READY, UNAVAILABLE]), abortPromise])
572
+
573
+ if (handle.state === UNAVAILABLE) {
574
+ const unavailableProgress = {
575
+ state: "unavailable" as const,
576
+ handle,
445
577
  }
446
- this.#registerHandleWithSubsystems(handle)
578
+ progressSignal.notify(unavailableProgress)
579
+ return
580
+ }
581
+ if (handle.state === DELETED) {
582
+ throw new Error(`Document ${id} was deleted`)
583
+ }
584
+
585
+ progressSignal.notify({ state: "ready" as const, handle })
586
+ } catch (error) {
587
+ progressSignal.notify({
588
+ state: "failed" as const,
589
+ error: error instanceof Error ? error : new Error(String(error)),
590
+ handle: this.#getHandle<T>({ documentId }),
447
591
  })
448
- .catch(err => {
449
- this.#log("error waiting for network", { err })
592
+ }
593
+ }
594
+
595
+ async find<T>(
596
+ id: AnyDocumentId,
597
+ options: RepoFindOptions & AbortOptions = {}
598
+ ): Promise<DocHandle<T>> {
599
+ const { allowableStates = ["ready"], signal } = options
600
+ const progress = this.findWithProgress<T>(id, { signal })
601
+
602
+ if ("subscribe" in progress) {
603
+ this.#registerHandleWithSubsystems(progress.handle)
604
+ return new Promise((resolve, reject) => {
605
+ const unsubscribe = progress.subscribe(state => {
606
+ if (allowableStates.includes(state.handle.state)) {
607
+ unsubscribe()
608
+ resolve(state.handle)
609
+ } else if (state.state === "unavailable") {
610
+ unsubscribe()
611
+ reject(new Error(`Document ${id} is unavailable`))
612
+ } else if (state.state === "failed") {
613
+ unsubscribe()
614
+ reject(state.error)
615
+ }
616
+ })
450
617
  })
618
+ } else {
619
+ if (progress.handle.state === READY) {
620
+ return progress.handle
621
+ }
622
+ // If the handle isn't ready, wait for it and then return it
623
+ await progress.handle.whenReady([READY, UNAVAILABLE])
624
+ return progress.handle
625
+ }
626
+ }
627
+
628
+ /**
629
+ * Loads a document without waiting for ready state
630
+ */
631
+ async #loadDocument<T>(documentId: DocumentId): Promise<DocHandle<T>> {
632
+ // If we have the handle cached, return it
633
+ if (this.#handleCache[documentId]) {
634
+ return this.#handleCache[documentId]
635
+ }
636
+
637
+ // If we don't already have the handle, make an empty one and try loading it
638
+ const handle = this.#getHandle<T>({ documentId })
639
+ const loadedDoc = await (this.storageSubsystem
640
+ ? this.storageSubsystem.loadDoc(handle.documentId)
641
+ : Promise.resolve(null))
642
+
643
+ if (loadedDoc) {
644
+ // We need to cast this to <T> because loadDoc operates in <unknowns>.
645
+ // This is really where we ought to be validating the input matches <T>.
646
+ handle.update(() => loadedDoc as Automerge.Doc<T>)
647
+ handle.doneLoading()
648
+ } else {
649
+ // Because the network subsystem might still be booting up, we wait
650
+ // here so that we don't immediately give up loading because we're still
651
+ // making our initial connection to a sync server.
652
+ await this.networkSubsystem.whenReady()
653
+ handle.request()
654
+ }
655
+
656
+ this.#registerHandleWithSubsystems(handle)
451
657
  return handle
452
658
  }
453
659
 
660
+ /**
661
+ * Retrieves a document by id. It gets data from the local system, but also emits a `document`
662
+ * event to advertise interest in the document.
663
+ */
664
+ async findClassic<T>(
665
+ /** The url or documentId of the handle to retrieve */
666
+ id: AnyDocumentId,
667
+ options: RepoFindOptions & AbortOptions = {}
668
+ ): Promise<DocHandle<T>> {
669
+ const documentId = interpretAsDocumentId(id)
670
+ const { allowableStates, signal } = options
671
+
672
+ return Promise.race([
673
+ (async () => {
674
+ const handle = await this.#loadDocument<T>(documentId)
675
+ if (!allowableStates) {
676
+ await handle.whenReady([READY, UNAVAILABLE])
677
+ if (handle.state === UNAVAILABLE && !signal?.aborted) {
678
+ throw new Error(`Document ${id} is unavailable`)
679
+ }
680
+ }
681
+ return handle
682
+ })(),
683
+ abortable(signal),
684
+ ])
685
+ }
686
+
454
687
  delete(
455
688
  /** The url or documentId of the handle to delete */
456
689
  id: AnyDocumentId
@@ -461,6 +694,7 @@ export class Repo extends EventEmitter<RepoEvents> {
461
694
  handle.delete()
462
695
 
463
696
  delete this.#handleCache[documentId]
697
+ delete this.#progressCache[documentId]
464
698
  this.emit("delete-document", { documentId })
465
699
  }
466
700
 
@@ -475,8 +709,7 @@ export class Repo extends EventEmitter<RepoEvents> {
475
709
  const documentId = interpretAsDocumentId(id)
476
710
 
477
711
  const handle = this.#getHandle({ documentId })
478
- const doc = await handle.doc()
479
- if (!doc) return undefined
712
+ const doc = handle.doc()
480
713
  return Automerge.save(doc)
481
714
  }
482
715
 
@@ -530,15 +763,46 @@ export class Repo extends EventEmitter<RepoEvents> {
530
763
  : Object.values(this.#handleCache)
531
764
  await Promise.all(
532
765
  handles.map(async handle => {
533
- const doc = handle.docSync()
534
- if (!doc) {
535
- return
536
- }
537
- return this.storageSubsystem!.saveDoc(handle.documentId, doc)
766
+ return this.storageSubsystem!.saveDoc(handle.documentId, handle.doc())
538
767
  })
539
768
  )
540
769
  }
541
770
 
771
+ /**
772
+ * Removes a DocHandle from the handleCache.
773
+ * @hidden this API is experimental and may change.
774
+ * @param documentId - documentId of the DocHandle to remove from handleCache, if present in cache.
775
+ * @returns Promise<void>
776
+ */
777
+ async removeFromCache(documentId: DocumentId) {
778
+ if (!this.#handleCache[documentId]) {
779
+ this.#log(
780
+ `WARN: removeFromCache called but handle not found in handleCache for documentId: ${documentId}`
781
+ )
782
+ return
783
+ }
784
+ const handle = this.#getHandle({ documentId })
785
+ await handle.whenReady([READY, UNLOADED, DELETED, UNAVAILABLE])
786
+ const doc = handle.doc()
787
+ // because this is an internal-ish function, we'll be extra careful about undefined docs here
788
+ if (doc) {
789
+ if (handle.isReady()) {
790
+ handle.unload()
791
+ } else {
792
+ this.#log(
793
+ `WARN: removeFromCache called but handle for documentId: ${documentId} in unexpected state: ${handle.state}`
794
+ )
795
+ }
796
+ delete this.#handleCache[documentId]
797
+ // TODO: remove document from synchronizer when removeDocument is implemented
798
+ // this.synchronizer.removeDocument(documentId)
799
+ } else {
800
+ this.#log(
801
+ `WARN: removeFromCache called but doc undefined for documentId: ${documentId}`
802
+ )
803
+ }
804
+ }
805
+
542
806
  shutdown(): Promise<void> {
543
807
  this.networkSubsystem.adapters.forEach(adapter => {
544
808
  adapter.disconnect()
@@ -575,6 +839,13 @@ export interface RepoConfig {
575
839
  * Whether to enable the experimental remote heads gossiping feature
576
840
  */
577
841
  enableRemoteHeadsGossiping?: boolean
842
+
843
+ /**
844
+ * A list of automerge URLs which should never be loaded regardless of what
845
+ * messages are received or what the share policy is. This is useful to avoid
846
+ * loading documents that are known to be too resource intensive.
847
+ */
848
+ denylist?: AutomergeUrl[]
578
849
  }
579
850
 
580
851
  /** A function that determines whether we should share a document with a peer
@@ -598,6 +869,11 @@ export interface RepoEvents {
598
869
  "delete-document": (arg: DeleteDocumentPayload) => void
599
870
  /** A document was marked as unavailable (we don't have it and none of our peers have it) */
600
871
  "unavailable-document": (arg: DeleteDocumentPayload) => void
872
+ "doc-metrics": (arg: DocMetrics) => void
873
+ }
874
+
875
+ export interface RepoFindOptions {
876
+ allowableStates?: string[]
601
877
  }
602
878
 
603
879
  export interface DocumentPayload {
@@ -607,3 +883,17 @@ export interface DocumentPayload {
607
883
  export interface DeleteDocumentPayload {
608
884
  documentId: DocumentId
609
885
  }
886
+
887
+ export type DocMetrics =
888
+ | DocSyncMetrics
889
+ | {
890
+ type: "doc-loaded"
891
+ documentId: DocumentId
892
+ durationMillis: number
893
+ numOps: number
894
+ numChanges: number
895
+ }
896
+ | {
897
+ type: "doc-denied"
898
+ documentId: DocumentId
899
+ }
@@ -0,0 +1,61 @@
1
+ /**
2
+ * Creates a promise that rejects when the signal is aborted.
3
+ *
4
+ * @remarks
5
+ * This utility creates a promise that rejects when the provided AbortSignal is aborted.
6
+ * It's designed to be used with Promise.race() to make operations abortable.
7
+ *
8
+ * @example
9
+ * ```typescript
10
+ * const controller = new AbortController();
11
+ *
12
+ * try {
13
+ * const result = await Promise.race([
14
+ * fetch('https://api.example.com/data'),
15
+ * abortable(controller.signal)
16
+ * ]);
17
+ * } catch (err) {
18
+ * if (err.name === 'AbortError') {
19
+ * console.log('The operation was aborted');
20
+ * }
21
+ * }
22
+ *
23
+ * // Later, to abort:
24
+ * controller.abort();
25
+ * ```
26
+ *
27
+ * @param signal - An AbortSignal that can be used to abort the operation
28
+ * @param cleanup - Optional cleanup function that will be called if aborted
29
+ * @returns A promise that rejects with AbortError when the signal is aborted
30
+ * @throws {DOMException} With name "AbortError" when aborted
31
+ */
32
+ export function abortable(
33
+ signal?: AbortSignal,
34
+ cleanup?: () => void
35
+ ): Promise<never> {
36
+ if (signal?.aborted) {
37
+ throw new DOMException("Operation aborted", "AbortError")
38
+ }
39
+
40
+ if (!signal) {
41
+ return new Promise(() => {}) // Never resolves
42
+ }
43
+
44
+ return new Promise((_, reject) => {
45
+ signal.addEventListener(
46
+ "abort",
47
+ () => {
48
+ cleanup?.()
49
+ reject(new DOMException("Operation aborted", "AbortError"))
50
+ },
51
+ { once: true }
52
+ )
53
+ })
54
+ }
55
+
56
+ /**
57
+ * Include this type in an options object to pass an AbortSignal to a function.
58
+ */
59
+ export interface AbortOptions {
60
+ signal?: AbortSignal
61
+ }
@@ -0,0 +1,14 @@
1
+ export const uint8ArrayFromHexString = (hexString: string): Uint8Array => {
2
+ if (hexString.length % 2 !== 0) {
3
+ throw new Error("Hex string must have an even length")
4
+ }
5
+ const bytes = new Uint8Array(hexString.length / 2)
6
+ for (let i = 0; i < hexString.length; i += 2) {
7
+ bytes[i >> 1] = parseInt(hexString.slice(i, i + 2), 16)
8
+ }
9
+ return bytes
10
+ }
11
+
12
+ export const uint8ArrayToHexString = (data: Uint8Array): string => {
13
+ return Array.from(data, byte => byte.toString(16).padStart(2, "0")).join("")
14
+ }
@@ -1,6 +1,6 @@
1
- import { Heads } from "@automerge/automerge/slim/next"
2
1
  import { arraysAreEqual } from "./arraysAreEqual.js"
2
+ import type { UrlHeads } from "../types.js"
3
3
 
4
- export const headsAreSame = (a: Heads, b: Heads) => {
4
+ export const headsAreSame = (a: UrlHeads, b: UrlHeads) => {
5
5
  return arraysAreEqual(a, b)
6
6
  }