@automerge/automerge-repo 2.0.0-alpha.7 → 2.0.0-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. package/README.md +8 -8
  2. package/dist/AutomergeUrl.d.ts +17 -5
  3. package/dist/AutomergeUrl.d.ts.map +1 -1
  4. package/dist/AutomergeUrl.js +71 -24
  5. package/dist/DocHandle.d.ts +68 -45
  6. package/dist/DocHandle.d.ts.map +1 -1
  7. package/dist/DocHandle.js +166 -69
  8. package/dist/FindProgress.d.ts +30 -0
  9. package/dist/FindProgress.d.ts.map +1 -0
  10. package/dist/FindProgress.js +1 -0
  11. package/dist/RemoteHeadsSubscriptions.d.ts +4 -5
  12. package/dist/RemoteHeadsSubscriptions.d.ts.map +1 -1
  13. package/dist/RemoteHeadsSubscriptions.js +4 -1
  14. package/dist/Repo.d.ts +46 -6
  15. package/dist/Repo.d.ts.map +1 -1
  16. package/dist/Repo.js +252 -67
  17. package/dist/helpers/abortable.d.ts +36 -0
  18. package/dist/helpers/abortable.d.ts.map +1 -0
  19. package/dist/helpers/abortable.js +47 -0
  20. package/dist/helpers/arraysAreEqual.d.ts.map +1 -1
  21. package/dist/helpers/bufferFromHex.d.ts +3 -0
  22. package/dist/helpers/bufferFromHex.d.ts.map +1 -0
  23. package/dist/helpers/bufferFromHex.js +13 -0
  24. package/dist/helpers/debounce.d.ts.map +1 -1
  25. package/dist/helpers/eventPromise.d.ts.map +1 -1
  26. package/dist/helpers/headsAreSame.d.ts +2 -2
  27. package/dist/helpers/headsAreSame.d.ts.map +1 -1
  28. package/dist/helpers/mergeArrays.d.ts +1 -1
  29. package/dist/helpers/mergeArrays.d.ts.map +1 -1
  30. package/dist/helpers/pause.d.ts.map +1 -1
  31. package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
  32. package/dist/helpers/tests/network-adapter-tests.js +13 -13
  33. package/dist/helpers/tests/storage-adapter-tests.d.ts +2 -2
  34. package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
  35. package/dist/helpers/tests/storage-adapter-tests.js +25 -48
  36. package/dist/helpers/throttle.d.ts.map +1 -1
  37. package/dist/helpers/withTimeout.d.ts.map +1 -1
  38. package/dist/index.d.ts +2 -1
  39. package/dist/index.d.ts.map +1 -1
  40. package/dist/index.js +1 -1
  41. package/dist/network/messages.d.ts.map +1 -1
  42. package/dist/storage/StorageSubsystem.d.ts +15 -1
  43. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  44. package/dist/storage/StorageSubsystem.js +50 -14
  45. package/dist/synchronizer/CollectionSynchronizer.d.ts +4 -3
  46. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  47. package/dist/synchronizer/CollectionSynchronizer.js +34 -15
  48. package/dist/synchronizer/DocSynchronizer.d.ts +3 -2
  49. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  50. package/dist/synchronizer/DocSynchronizer.js +51 -27
  51. package/dist/synchronizer/Synchronizer.d.ts +11 -0
  52. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  53. package/dist/types.d.ts +4 -1
  54. package/dist/types.d.ts.map +1 -1
  55. package/fuzz/fuzz.ts +3 -3
  56. package/package.json +3 -4
  57. package/src/AutomergeUrl.ts +101 -26
  58. package/src/DocHandle.ts +235 -82
  59. package/src/FindProgress.ts +48 -0
  60. package/src/RemoteHeadsSubscriptions.ts +11 -9
  61. package/src/Repo.ts +368 -74
  62. package/src/helpers/abortable.ts +62 -0
  63. package/src/helpers/bufferFromHex.ts +14 -0
  64. package/src/helpers/headsAreSame.ts +2 -2
  65. package/src/helpers/tests/network-adapter-tests.ts +14 -13
  66. package/src/helpers/tests/storage-adapter-tests.ts +44 -86
  67. package/src/index.ts +7 -0
  68. package/src/storage/StorageSubsystem.ts +66 -16
  69. package/src/synchronizer/CollectionSynchronizer.ts +37 -16
  70. package/src/synchronizer/DocSynchronizer.ts +59 -32
  71. package/src/synchronizer/Synchronizer.ts +14 -0
  72. package/src/types.ts +4 -1
  73. package/test/AutomergeUrl.test.ts +130 -0
  74. package/test/CollectionSynchronizer.test.ts +4 -4
  75. package/test/DocHandle.test.ts +181 -38
  76. package/test/DocSynchronizer.test.ts +10 -3
  77. package/test/Repo.test.ts +376 -203
  78. package/test/StorageSubsystem.test.ts +80 -1
  79. package/test/remoteHeads.test.ts +27 -12
package/src/Repo.ts CHANGED
@@ -2,11 +2,20 @@ import { next as Automerge } from "@automerge/automerge/slim"
2
2
  import debug from "debug"
3
3
  import { EventEmitter } from "eventemitter3"
4
4
  import {
5
+ encodeHeads,
5
6
  generateAutomergeUrl,
6
7
  interpretAsDocumentId,
8
+ isValidAutomergeUrl,
7
9
  parseAutomergeUrl,
8
10
  } from "./AutomergeUrl.js"
9
- import { DocHandle, DocHandleEncodedChangePayload } from "./DocHandle.js"
11
+ import {
12
+ DELETED,
13
+ DocHandle,
14
+ DocHandleEncodedChangePayload,
15
+ READY,
16
+ UNAVAILABLE,
17
+ UNLOADED,
18
+ } from "./DocHandle.js"
10
19
  import { RemoteHeadsSubscriptions } from "./RemoteHeadsSubscriptions.js"
11
20
  import { headsAreSame } from "./helpers/headsAreSame.js"
12
21
  import { throttle } from "./helpers/throttle.js"
@@ -20,8 +29,30 @@ import { StorageAdapterInterface } from "./storage/StorageAdapterInterface.js"
20
29
  import { StorageSubsystem } from "./storage/StorageSubsystem.js"
21
30
  import { StorageId } from "./storage/types.js"
22
31
  import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js"
23
- import { SyncStatePayload } from "./synchronizer/Synchronizer.js"
24
- import type { AnyDocumentId, DocumentId, PeerId } from "./types.js"
32
+ import {
33
+ DocSyncMetrics,
34
+ SyncStatePayload,
35
+ } from "./synchronizer/Synchronizer.js"
36
+ import type {
37
+ AnyDocumentId,
38
+ AutomergeUrl,
39
+ DocumentId,
40
+ PeerId,
41
+ } from "./types.js"
42
+ import { abortable, AbortOptions } from "./helpers/abortable.js"
43
+ import { FindProgress } from "./FindProgress.js"
44
+
45
+ export type FindProgressWithMethods<T> = FindProgress<T> & {
46
+ untilReady: (allowableStates: string[]) => Promise<DocHandle<T>>
47
+ peek: () => FindProgress<T>
48
+ subscribe: (callback: (progress: FindProgress<T>) => void) => () => void
49
+ }
50
+
51
+ export type ProgressSignal<T> = {
52
+ peek: () => FindProgress<T>
53
+ subscribe: (callback: (progress: FindProgress<T>) => void) => () => void
54
+ untilReady: (allowableStates: string[]) => Promise<DocHandle<T>>
55
+ }
25
56
 
26
57
  function randomPeerId() {
27
58
  return ("peer-" + Math.random().toString(36).slice(4)) as PeerId
@@ -62,6 +93,7 @@ export class Repo extends EventEmitter<RepoEvents> {
62
93
 
63
94
  #remoteHeadsSubscriptions = new RemoteHeadsSubscriptions()
64
95
  #remoteHeadsGossipingEnabled = false
96
+ #progressCache: Record<DocumentId, FindProgress<any>> = {}
65
97
 
66
98
  constructor({
67
99
  storage,
@@ -70,6 +102,7 @@ export class Repo extends EventEmitter<RepoEvents> {
70
102
  sharePolicy,
71
103
  isEphemeral = storage === undefined,
72
104
  enableRemoteHeadsGossiping = false,
105
+ denylist = [],
73
106
  }: RepoConfig = {}) {
74
107
  super()
75
108
  this.#remoteHeadsGossipingEnabled = enableRemoteHeadsGossiping
@@ -89,7 +122,7 @@ export class Repo extends EventEmitter<RepoEvents> {
89
122
 
90
123
  // SYNCHRONIZER
91
124
  // The synchronizer uses the network subsystem to keep documents in sync with peers.
92
- this.synchronizer = new CollectionSynchronizer(this)
125
+ this.synchronizer = new CollectionSynchronizer(this, denylist)
93
126
 
94
127
  // When the synchronizer emits messages, send them to peers
95
128
  this.synchronizer.on("message", message => {
@@ -97,6 +130,9 @@ export class Repo extends EventEmitter<RepoEvents> {
97
130
  networkSubsystem.send(message)
98
131
  })
99
132
 
133
+ // Forward metrics from doc synchronizers
134
+ this.synchronizer.on("metrics", event => this.emit("doc-metrics", event))
135
+
100
136
  if (this.#remoteHeadsGossipingEnabled) {
101
137
  this.synchronizer.on("open-doc", ({ peerId, documentId }) => {
102
138
  this.#remoteHeadsSubscriptions.subscribePeerToDoc(peerId, documentId)
@@ -106,6 +142,12 @@ export class Repo extends EventEmitter<RepoEvents> {
106
142
  // STORAGE
107
143
  // The storage subsystem has access to some form of persistence, and deals with save and loading documents.
108
144
  const storageSubsystem = storage ? new StorageSubsystem(storage) : undefined
145
+ if (storageSubsystem) {
146
+ storageSubsystem.on("document-loaded", event =>
147
+ this.emit("doc-metrics", { type: "doc-loaded", ...event })
148
+ )
149
+ }
150
+
109
151
  this.storageSubsystem = storageSubsystem
110
152
 
111
153
  // NETWORK
@@ -168,16 +210,20 @@ export class Repo extends EventEmitter<RepoEvents> {
168
210
  const heads = handle.getRemoteHeads(storageId)
169
211
  const haveHeadsChanged =
170
212
  message.syncState.theirHeads &&
171
- (!heads || !headsAreSame(heads, message.syncState.theirHeads))
213
+ (!heads ||
214
+ !headsAreSame(heads, encodeHeads(message.syncState.theirHeads)))
172
215
 
173
216
  if (haveHeadsChanged && message.syncState.theirHeads) {
174
- handle.setRemoteHeads(storageId, message.syncState.theirHeads)
217
+ handle.setRemoteHeads(
218
+ storageId,
219
+ encodeHeads(message.syncState.theirHeads)
220
+ )
175
221
 
176
222
  if (storageId && this.#remoteHeadsGossipingEnabled) {
177
223
  this.#remoteHeadsSubscriptions.handleImmediateRemoteHeadsChanged(
178
224
  message.documentId,
179
225
  storageId,
180
- message.syncState.theirHeads
226
+ encodeHeads(message.syncState.theirHeads)
181
227
  )
182
228
  }
183
229
  }
@@ -229,18 +275,8 @@ export class Repo extends EventEmitter<RepoEvents> {
229
275
  handle.on("heads-changed", throttle(saveFn, this.saveDebounceRate))
230
276
  }
231
277
 
232
- handle.on("unavailable", () => {
233
- this.#log("document unavailable", { documentId: handle.documentId })
234
- this.emit("unavailable-document", {
235
- documentId: handle.documentId,
236
- })
237
- })
238
-
239
278
  // Register the document with the synchronizer. This advertises our interest in the document.
240
- this.synchronizer.addDocument(handle.documentId)
241
-
242
- // Preserve the old event in case anyone was using it.
243
- this.emit("document", { handle })
279
+ this.synchronizer.addDocument(handle)
244
280
  }
245
281
 
246
282
  #receiveMessage(message: RepoMessage) {
@@ -371,22 +407,16 @@ export class Repo extends EventEmitter<RepoEvents> {
371
407
  * Any peers this `Repo` is connected to for whom `sharePolicy` returns `true` will
372
408
  * be notified of the newly created DocHandle.
373
409
  *
374
- * @throws if the cloned handle is not yet ready or if
375
- * `clonedHandle.docSync()` returns `undefined` (i.e. the handle is unavailable).
376
410
  */
377
411
  clone<T>(clonedHandle: DocHandle<T>) {
378
412
  if (!clonedHandle.isReady()) {
379
413
  throw new Error(
380
414
  `Cloned handle is not yet in ready state.
381
- (Try await handle.waitForReady() first.)`
415
+ (Try await handle.whenReady() first.)`
382
416
  )
383
417
  }
384
418
 
385
- const sourceDoc = clonedHandle.docSync()
386
- if (!sourceDoc) {
387
- throw new Error("Cloned handle doesn't have a document.")
388
- }
389
-
419
+ const sourceDoc = clonedHandle.doc()
390
420
  const handle = this.create<T>()
391
421
 
392
422
  handle.update(() => {
@@ -397,60 +427,267 @@ export class Repo extends EventEmitter<RepoEvents> {
397
427
  return handle
398
428
  }
399
429
 
400
- /**
401
- * Retrieves a document by id. It gets data from the local system, but also emits a `document`
402
- * event to advertise interest in the document.
403
- */
404
- find<T>(
405
- /** The url or documentId of the handle to retrieve */
406
- id: AnyDocumentId
407
- ): DocHandle<T> {
408
- const documentId = interpretAsDocumentId(id)
430
+ findWithProgress<T>(
431
+ id: AnyDocumentId,
432
+ options: AbortOptions = {}
433
+ ): FindProgressWithMethods<T> | FindProgress<T> {
434
+ const { signal } = options
435
+ const { documentId, heads } = isValidAutomergeUrl(id)
436
+ ? parseAutomergeUrl(id)
437
+ : { documentId: interpretAsDocumentId(id), heads: undefined }
409
438
 
410
- // If we have the handle cached, return it
439
+ // Check handle cache first - return plain FindStep for terminal states
411
440
  if (this.#handleCache[documentId]) {
412
- if (this.#handleCache[documentId].isUnavailable()) {
413
- // this ensures that the event fires after the handle has been returned
414
- setTimeout(() => {
415
- this.#handleCache[documentId].emit("unavailable", {
416
- handle: this.#handleCache[documentId],
417
- })
418
- })
441
+ const handle = this.#handleCache[documentId]
442
+ if (handle.state === UNAVAILABLE) {
443
+ const result = {
444
+ state: "unavailable" as const,
445
+ error: new Error(`Document ${id} is unavailable`),
446
+ handle,
447
+ }
448
+ return result
449
+ }
450
+ if (handle.state === DELETED) {
451
+ const result = {
452
+ state: "failed" as const,
453
+ error: new Error(`Document ${id} was deleted`),
454
+ handle,
455
+ }
456
+ return result
457
+ }
458
+ if (handle.state === READY) {
459
+ const result = {
460
+ state: "ready" as const,
461
+ handle: heads ? handle.view(heads) : handle,
462
+ }
463
+ return result
419
464
  }
420
- return this.#handleCache[documentId]
421
465
  }
422
466
 
423
- // If we don't already have the handle, make an empty one and try loading it
424
- const handle = this.#getHandle<T>({
467
+ // Check progress cache for any existing signal
468
+ const cachedProgress = this.#progressCache[documentId]
469
+ if (cachedProgress) {
470
+ const handle = this.#handleCache[documentId]
471
+ // Return cached progress if we have a handle and it's either in a terminal state or loading
472
+ if (
473
+ handle &&
474
+ (handle.state === READY ||
475
+ handle.state === UNAVAILABLE ||
476
+ handle.state === DELETED ||
477
+ handle.state === "loading")
478
+ ) {
479
+ return cachedProgress as FindProgressWithMethods<T>
480
+ }
481
+ }
482
+
483
+ const handle = this.#getHandle<T>({ documentId })
484
+ const initial = {
485
+ state: "loading" as const,
486
+ progress: 0,
487
+ handle,
488
+ }
489
+
490
+ // Create a new progress signal
491
+ const progressSignal = {
492
+ subscribers: new Set<(progress: FindProgress<T>) => void>(),
493
+ currentProgress: undefined as FindProgress<T> | undefined,
494
+ notify: (progress: FindProgress<T>) => {
495
+ progressSignal.currentProgress = progress
496
+ progressSignal.subscribers.forEach(callback => callback(progress))
497
+ // Cache all states, not just terminal ones
498
+ this.#progressCache[documentId] = progress
499
+ },
500
+ peek: () => progressSignal.currentProgress || initial,
501
+ subscribe: (callback: (progress: FindProgress<T>) => void) => {
502
+ progressSignal.subscribers.add(callback)
503
+ return () => progressSignal.subscribers.delete(callback)
504
+ },
505
+ }
506
+
507
+ progressSignal.notify(initial)
508
+
509
+ // Start the loading process
510
+ void this.#loadDocumentWithProgress(
511
+ id,
425
512
  documentId,
426
- }) as DocHandle<T>
513
+ handle,
514
+ progressSignal,
515
+ signal ? abortable(new Promise(() => {}), signal) : new Promise(() => {})
516
+ )
427
517
 
428
- // Loading & network is going to be asynchronous no matter what,
429
- // but we want to return the handle immediately.
430
- const attemptLoad = this.storageSubsystem
431
- ? this.storageSubsystem.loadDoc(handle.documentId)
432
- : Promise.resolve(null)
433
-
434
- attemptLoad
435
- .then(async loadedDoc => {
436
- if (loadedDoc) {
437
- // uhhhh, sorry if you're reading this because we were lying to the type system
438
- handle.update(() => loadedDoc as Automerge.Doc<T>)
439
- handle.doneLoading()
440
- } else {
441
- // we want to wait for the network subsystem to be ready before
442
- // we request the document. this prevents entering unavailable during initialization.
443
- await this.networkSubsystem.whenReady()
444
- handle.request()
518
+ const result = {
519
+ ...initial,
520
+ peek: progressSignal.peek,
521
+ subscribe: progressSignal.subscribe,
522
+ }
523
+ this.#progressCache[documentId] = result
524
+ return result
525
+ }
526
+
527
+ async #loadDocumentWithProgress<T>(
528
+ id: AnyDocumentId,
529
+ documentId: DocumentId,
530
+ handle: DocHandle<T>,
531
+ progressSignal: {
532
+ notify: (progress: FindProgress<T>) => void
533
+ },
534
+ abortPromise: Promise<never>
535
+ ) {
536
+ try {
537
+ progressSignal.notify({
538
+ state: "loading" as const,
539
+ progress: 25,
540
+ handle,
541
+ })
542
+
543
+ const loadingPromise = await (this.storageSubsystem
544
+ ? this.storageSubsystem.loadDoc(handle.documentId)
545
+ : Promise.resolve(null))
546
+
547
+ const loadedDoc = await Promise.race([loadingPromise, abortPromise])
548
+
549
+ if (loadedDoc) {
550
+ handle.update(() => loadedDoc as Automerge.Doc<T>)
551
+ handle.doneLoading()
552
+ progressSignal.notify({
553
+ state: "loading" as const,
554
+ progress: 50,
555
+ handle,
556
+ })
557
+ } else {
558
+ await Promise.race([this.networkSubsystem.whenReady(), abortPromise])
559
+ handle.request()
560
+ progressSignal.notify({
561
+ state: "loading" as const,
562
+ progress: 75,
563
+ handle,
564
+ })
565
+ }
566
+
567
+ this.#registerHandleWithSubsystems(handle)
568
+
569
+ await Promise.race([handle.whenReady([READY, UNAVAILABLE]), abortPromise])
570
+
571
+ if (handle.state === UNAVAILABLE) {
572
+ const unavailableProgress = {
573
+ state: "unavailable" as const,
574
+ handle,
445
575
  }
446
- this.#registerHandleWithSubsystems(handle)
576
+ progressSignal.notify(unavailableProgress)
577
+ return
578
+ }
579
+ if (handle.state === DELETED) {
580
+ throw new Error(`Document ${id} was deleted`)
581
+ }
582
+
583
+ progressSignal.notify({ state: "ready" as const, handle })
584
+ } catch (error) {
585
+ progressSignal.notify({
586
+ state: "failed" as const,
587
+ error: error instanceof Error ? error : new Error(String(error)),
588
+ handle: this.#getHandle<T>({ documentId }),
447
589
  })
448
- .catch(err => {
449
- this.#log("error waiting for network", { err })
590
+ }
591
+ }
592
+
593
+ async find<T>(
594
+ id: AnyDocumentId,
595
+ options: RepoFindOptions & AbortOptions = {}
596
+ ): Promise<DocHandle<T>> {
597
+ const { allowableStates = ["ready"], signal } = options
598
+
599
+ // Check if already aborted
600
+ if (signal?.aborted) {
601
+ throw new Error("Operation aborted")
602
+ }
603
+
604
+ const progress = this.findWithProgress<T>(id, { signal })
605
+
606
+ if ("subscribe" in progress) {
607
+ this.#registerHandleWithSubsystems(progress.handle)
608
+ return new Promise((resolve, reject) => {
609
+ const unsubscribe = progress.subscribe(state => {
610
+ if (allowableStates.includes(state.handle.state)) {
611
+ unsubscribe()
612
+ resolve(state.handle)
613
+ } else if (state.state === "unavailable") {
614
+ unsubscribe()
615
+ reject(new Error(`Document ${id} is unavailable`))
616
+ } else if (state.state === "failed") {
617
+ unsubscribe()
618
+ reject(state.error)
619
+ }
620
+ })
450
621
  })
622
+ } else {
623
+ if (progress.handle.state === READY) {
624
+ return progress.handle
625
+ }
626
+ // If the handle isn't ready, wait for it and then return it
627
+ await progress.handle.whenReady([READY, UNAVAILABLE])
628
+ return progress.handle
629
+ }
630
+ }
631
+
632
+ /**
633
+ * Loads a document without waiting for ready state
634
+ */
635
+ async #loadDocument<T>(documentId: DocumentId): Promise<DocHandle<T>> {
636
+ // If we have the handle cached, return it
637
+ if (this.#handleCache[documentId]) {
638
+ return this.#handleCache[documentId]
639
+ }
640
+
641
+ // If we don't already have the handle, make an empty one and try loading it
642
+ const handle = this.#getHandle<T>({ documentId })
643
+ const loadedDoc = await (this.storageSubsystem
644
+ ? this.storageSubsystem.loadDoc(handle.documentId)
645
+ : Promise.resolve(null))
646
+
647
+ if (loadedDoc) {
648
+ // We need to cast this to <T> because loadDoc operates in <unknowns>.
649
+ // This is really where we ought to be validating the input matches <T>.
650
+ handle.update(() => loadedDoc as Automerge.Doc<T>)
651
+ handle.doneLoading()
652
+ } else {
653
+ // Because the network subsystem might still be booting up, we wait
654
+ // here so that we don't immediately give up loading because we're still
655
+ // making our initial connection to a sync server.
656
+ await this.networkSubsystem.whenReady()
657
+ handle.request()
658
+ }
659
+
660
+ this.#registerHandleWithSubsystems(handle)
451
661
  return handle
452
662
  }
453
663
 
664
+ /**
665
+ * Retrieves a document by id. It gets data from the local system, but also emits a `document`
666
+ * event to advertise interest in the document.
667
+ */
668
+ async findClassic<T>(
669
+ /** The url or documentId of the handle to retrieve */
670
+ id: AnyDocumentId,
671
+ options: RepoFindOptions & AbortOptions = {}
672
+ ): Promise<DocHandle<T>> {
673
+ const documentId = interpretAsDocumentId(id)
674
+ const { allowableStates, signal } = options
675
+
676
+ return abortable(
677
+ (async () => {
678
+ const handle = await this.#loadDocument<T>(documentId)
679
+ if (!allowableStates) {
680
+ await handle.whenReady([READY, UNAVAILABLE])
681
+ if (handle.state === UNAVAILABLE && !signal?.aborted) {
682
+ throw new Error(`Document ${id} is unavailable`)
683
+ }
684
+ }
685
+ return handle
686
+ })(),
687
+ signal
688
+ )
689
+ }
690
+
454
691
  delete(
455
692
  /** The url or documentId of the handle to delete */
456
693
  id: AnyDocumentId
@@ -461,6 +698,7 @@ export class Repo extends EventEmitter<RepoEvents> {
461
698
  handle.delete()
462
699
 
463
700
  delete this.#handleCache[documentId]
701
+ delete this.#progressCache[documentId]
464
702
  this.emit("delete-document", { documentId })
465
703
  }
466
704
 
@@ -475,8 +713,7 @@ export class Repo extends EventEmitter<RepoEvents> {
475
713
  const documentId = interpretAsDocumentId(id)
476
714
 
477
715
  const handle = this.#getHandle({ documentId })
478
- const doc = await handle.doc()
479
- if (!doc) return undefined
716
+ const doc = handle.doc()
480
717
  return Automerge.save(doc)
481
718
  }
482
719
 
@@ -530,15 +767,46 @@ export class Repo extends EventEmitter<RepoEvents> {
530
767
  : Object.values(this.#handleCache)
531
768
  await Promise.all(
532
769
  handles.map(async handle => {
533
- const doc = handle.docSync()
534
- if (!doc) {
535
- return
536
- }
537
- return this.storageSubsystem!.saveDoc(handle.documentId, doc)
770
+ return this.storageSubsystem!.saveDoc(handle.documentId, handle.doc())
538
771
  })
539
772
  )
540
773
  }
541
774
 
775
+ /**
776
+ * Removes a DocHandle from the handleCache.
777
+ * @hidden this API is experimental and may change.
778
+ * @param documentId - documentId of the DocHandle to remove from handleCache, if present in cache.
779
+ * @returns Promise<void>
780
+ */
781
+ async removeFromCache(documentId: DocumentId) {
782
+ if (!this.#handleCache[documentId]) {
783
+ this.#log(
784
+ `WARN: removeFromCache called but handle not found in handleCache for documentId: ${documentId}`
785
+ )
786
+ return
787
+ }
788
+ const handle = this.#getHandle({ documentId })
789
+ await handle.whenReady([READY, UNLOADED, DELETED, UNAVAILABLE])
790
+ const doc = handle.doc()
791
+ // because this is an internal-ish function, we'll be extra careful about undefined docs here
792
+ if (doc) {
793
+ if (handle.isReady()) {
794
+ handle.unload()
795
+ } else {
796
+ this.#log(
797
+ `WARN: removeFromCache called but handle for documentId: ${documentId} in unexpected state: ${handle.state}`
798
+ )
799
+ }
800
+ delete this.#handleCache[documentId]
801
+ // TODO: remove document from synchronizer when removeDocument is implemented
802
+ // this.synchronizer.removeDocument(documentId)
803
+ } else {
804
+ this.#log(
805
+ `WARN: removeFromCache called but doc undefined for documentId: ${documentId}`
806
+ )
807
+ }
808
+ }
809
+
542
810
  shutdown(): Promise<void> {
543
811
  this.networkSubsystem.adapters.forEach(adapter => {
544
812
  adapter.disconnect()
@@ -575,6 +843,13 @@ export interface RepoConfig {
575
843
  * Whether to enable the experimental remote heads gossiping feature
576
844
  */
577
845
  enableRemoteHeadsGossiping?: boolean
846
+
847
+ /**
848
+ * A list of automerge URLs which should never be loaded regardless of what
849
+ * messages are received or what the share policy is. This is useful to avoid
850
+ * loading documents that are known to be too resource intensive.
851
+ */
852
+ denylist?: AutomergeUrl[]
578
853
  }
579
854
 
580
855
  /** A function that determines whether we should share a document with a peer
@@ -598,6 +873,11 @@ export interface RepoEvents {
598
873
  "delete-document": (arg: DeleteDocumentPayload) => void
599
874
  /** A document was marked as unavailable (we don't have it and none of our peers have it) */
600
875
  "unavailable-document": (arg: DeleteDocumentPayload) => void
876
+ "doc-metrics": (arg: DocMetrics) => void
877
+ }
878
+
879
+ export interface RepoFindOptions {
880
+ allowableStates?: string[]
601
881
  }
602
882
 
603
883
  export interface DocumentPayload {
@@ -607,3 +887,17 @@ export interface DocumentPayload {
607
887
  export interface DeleteDocumentPayload {
608
888
  documentId: DocumentId
609
889
  }
890
+
891
+ export type DocMetrics =
892
+ | DocSyncMetrics
893
+ | {
894
+ type: "doc-loaded"
895
+ documentId: DocumentId
896
+ durationMillis: number
897
+ numOps: number
898
+ numChanges: number
899
+ }
900
+ | {
901
+ type: "doc-denied"
902
+ documentId: DocumentId
903
+ }
@@ -0,0 +1,62 @@
1
+ /**
2
+ * Wraps a Promise and causes it to reject when the signal is aborted.
3
+ *
4
+ * @remarks
5
+ * This utility wraps a Promise and rejects when the provided AbortSignal is aborted.
6
+ * It's designed to make Promise awaits abortable.
7
+ *
8
+ * @example
9
+ * ```typescript
10
+ * const controller = new AbortController();
11
+ *
12
+ * try {
13
+ * const result = await abortable(fetch('https://api.example.com/data'), controller.signal);
14
+ * // Meanwhile, to abort in concurrent code before the above line returns: controller.abort();
15
+ * } catch (err) {
16
+ * if (err.name === 'AbortError') {
17
+ * console.log('The operation was aborted');
18
+ * }
19
+ * }
20
+ *
21
+ * ```
22
+ *
23
+ * @param p - A Promise to wrap
24
+ * @param signal - An AbortSignal that can be used to abort the operation
25
+ * @returns A wrapper Promise that rejects with AbortError if the signal is aborted
26
+ * before the promise p settles, and settles as p settles otherwise
27
+ * @throws {DOMException} With name "AbortError" if aborted before p settles
28
+ */
29
+
30
+ export function abortable<T>(
31
+ p: Promise<T>,
32
+ signal: AbortSignal | undefined
33
+ ): Promise<T> {
34
+ let settled = false
35
+ return new Promise((resolve, reject) => {
36
+ signal?.addEventListener(
37
+ "abort",
38
+ () => {
39
+ if (!settled) {
40
+ reject(new DOMException("Operation aborted", "AbortError"))
41
+ }
42
+ },
43
+ { once: true }
44
+ )
45
+ p.then(result => {
46
+ resolve(result)
47
+ })
48
+ .catch(error => {
49
+ reject(error)
50
+ })
51
+ .finally(() => {
52
+ settled = true
53
+ })
54
+ })
55
+ }
56
+
57
+ /**
58
+ * Include this type in an options object to pass an AbortSignal to a function.
59
+ */
60
+ export interface AbortOptions {
61
+ signal?: AbortSignal
62
+ }
@@ -0,0 +1,14 @@
1
+ export const uint8ArrayFromHexString = (hexString: string): Uint8Array => {
2
+ if (hexString.length % 2 !== 0) {
3
+ throw new Error("Hex string must have an even length")
4
+ }
5
+ const bytes = new Uint8Array(hexString.length / 2)
6
+ for (let i = 0; i < hexString.length; i += 2) {
7
+ bytes[i >> 1] = parseInt(hexString.slice(i, i + 2), 16)
8
+ }
9
+ return bytes
10
+ }
11
+
12
+ export const uint8ArrayToHexString = (data: Uint8Array): string => {
13
+ return Array.from(data, byte => byte.toString(16).padStart(2, "0")).join("")
14
+ }
@@ -1,6 +1,6 @@
1
- import { Heads } from "@automerge/automerge/slim/next"
2
1
  import { arraysAreEqual } from "./arraysAreEqual.js"
2
+ import type { UrlHeads } from "../types.js"
3
3
 
4
- export const headsAreSame = (a: Heads, b: Heads) => {
4
+ export const headsAreSame = (a: UrlHeads, b: UrlHeads) => {
5
5
  return arraysAreEqual(a, b)
6
6
  }