@automerge/automerge-repo 2.0.0-alpha.13 → 2.0.0-alpha.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/dist/AutomergeUrl.d.ts +19 -4
  2. package/dist/AutomergeUrl.d.ts.map +1 -1
  3. package/dist/AutomergeUrl.js +71 -24
  4. package/dist/DocHandle.d.ts +21 -17
  5. package/dist/DocHandle.d.ts.map +1 -1
  6. package/dist/DocHandle.js +83 -26
  7. package/dist/RemoteHeadsSubscriptions.d.ts +4 -4
  8. package/dist/RemoteHeadsSubscriptions.d.ts.map +1 -1
  9. package/dist/RemoteHeadsSubscriptions.js +4 -1
  10. package/dist/Repo.d.ts +11 -2
  11. package/dist/Repo.d.ts.map +1 -1
  12. package/dist/Repo.js +19 -14
  13. package/dist/helpers/bufferFromHex.d.ts +3 -0
  14. package/dist/helpers/bufferFromHex.d.ts.map +1 -0
  15. package/dist/helpers/bufferFromHex.js +13 -0
  16. package/dist/helpers/headsAreSame.d.ts +2 -2
  17. package/dist/helpers/headsAreSame.d.ts.map +1 -1
  18. package/dist/helpers/mergeArrays.d.ts +1 -1
  19. package/dist/helpers/mergeArrays.d.ts.map +1 -1
  20. package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
  21. package/dist/helpers/tests/storage-adapter-tests.js +6 -9
  22. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  23. package/dist/storage/StorageSubsystem.js +2 -1
  24. package/dist/synchronizer/CollectionSynchronizer.d.ts +2 -2
  25. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  26. package/dist/synchronizer/CollectionSynchronizer.js +16 -2
  27. package/dist/synchronizer/Synchronizer.d.ts +3 -0
  28. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  29. package/package.json +2 -2
  30. package/src/AutomergeUrl.ts +103 -26
  31. package/src/DocHandle.ts +130 -37
  32. package/src/RemoteHeadsSubscriptions.ts +11 -8
  33. package/src/Repo.ts +41 -13
  34. package/src/helpers/bufferFromHex.ts +14 -0
  35. package/src/helpers/headsAreSame.ts +2 -2
  36. package/src/helpers/tests/storage-adapter-tests.ts +13 -24
  37. package/src/storage/StorageSubsystem.ts +3 -1
  38. package/src/synchronizer/CollectionSynchronizer.ts +19 -3
  39. package/src/synchronizer/Synchronizer.ts +12 -7
  40. package/test/AutomergeUrl.test.ts +130 -0
  41. package/test/DocHandle.test.ts +70 -4
  42. package/test/DocSynchronizer.test.ts +10 -3
  43. package/test/Repo.test.ts +155 -3
package/src/DocHandle.ts CHANGED
@@ -2,7 +2,12 @@ import * as A from "@automerge/automerge/slim/next"
2
2
  import debug from "debug"
3
3
  import { EventEmitter } from "eventemitter3"
4
4
  import { assertEvent, assign, createActor, setup, waitFor } from "xstate"
5
- import { stringifyAutomergeUrl } from "./AutomergeUrl.js"
5
+ import {
6
+ decodeHeads,
7
+ encodeHeads,
8
+ stringifyAutomergeUrl,
9
+ UrlHeads,
10
+ } from "./AutomergeUrl.js"
6
11
  import { encode } from "./helpers/cbor.js"
7
12
  import { headsAreSame } from "./helpers/headsAreSame.js"
8
13
  import { withTimeout } from "./helpers/withTimeout.js"
@@ -28,6 +33,9 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
28
33
  /** The XState actor running our state machine. */
29
34
  #machine
30
35
 
36
+ /** If set, this handle will only show the document at these heads */
37
+ #fixedHeads?: UrlHeads
38
+
31
39
  /** The last known state of our document. */
32
40
  #prevDocState: T = A.init<T>()
33
41
 
@@ -36,7 +44,7 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
36
44
  #timeoutDelay = 60_000
37
45
 
38
46
  /** A dictionary mapping each peer to the last heads we know they have. */
39
- #remoteHeads: Record<StorageId, A.Heads> = {}
47
+ #remoteHeads: Record<StorageId, UrlHeads> = {}
40
48
 
41
49
  /** @hidden */
42
50
  constructor(
@@ -49,6 +57,10 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
49
57
  this.#timeoutDelay = options.timeoutDelay
50
58
  }
51
59
 
60
+ if ("heads" in options) {
61
+ this.#fixedHeads = options.heads
62
+ }
63
+
52
64
  const doc = A.init<T>()
53
65
 
54
66
  this.#log = debug(`automerge-repo:dochandle:${this.documentId.slice(0, 5)}`)
@@ -176,7 +188,10 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
176
188
  #checkForChanges(before: A.Doc<T>, after: A.Doc<T>) {
177
189
  const beforeHeads = A.getHeads(before)
178
190
  const afterHeads = A.getHeads(after)
179
- const docChanged = !headsAreSame(afterHeads, beforeHeads)
191
+ const docChanged = !headsAreSame(
192
+ encodeHeads(afterHeads),
193
+ encodeHeads(beforeHeads)
194
+ )
180
195
  if (docChanged) {
181
196
  this.emit("heads-changed", { handle: this, doc: after })
182
197
 
@@ -202,7 +217,10 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
202
217
  /** Our documentId in Automerge URL form.
203
218
  */
204
219
  get url(): AutomergeUrl {
205
- return stringifyAutomergeUrl({ documentId: this.documentId })
220
+ return stringifyAutomergeUrl({
221
+ documentId: this.documentId,
222
+ heads: this.#fixedHeads,
223
+ })
206
224
  }
207
225
 
208
226
  /**
@@ -275,6 +293,12 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
275
293
  // if we timed out, return undefined
276
294
  return undefined
277
295
  }
296
+ // If we have fixed heads, return a view at those heads
297
+ if (this.#fixedHeads) {
298
+ const doc = this.#doc
299
+ if (!doc || this.isUnavailable()) return undefined
300
+ return A.view(doc, decodeHeads(this.#fixedHeads))
301
+ }
278
302
  // Return the document
279
303
  return !this.isUnavailable() ? this.#doc : undefined
280
304
  }
@@ -294,7 +318,11 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
294
318
  */
295
319
  docSync() {
296
320
  if (!this.isReady()) return undefined
297
- else return this.#doc
321
+ if (this.#fixedHeads) {
322
+ const doc = this.#doc
323
+ return doc ? A.view(doc, decodeHeads(this.#fixedHeads)) : undefined
324
+ }
325
+ return this.#doc
298
326
  }
299
327
 
300
328
  /**
@@ -302,11 +330,12 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
302
330
  * This precisely defines the state of a document.
303
331
  * @returns the current document's heads, or undefined if the document is not ready
304
332
  */
305
- heads(): A.Heads | undefined {
306
- if (!this.isReady()) {
307
- return undefined
333
+ heads(): UrlHeads | undefined {
334
+ if (!this.isReady()) return undefined
335
+ if (this.#fixedHeads) {
336
+ return this.#fixedHeads
308
337
  }
309
- return A.getHeads(this.#doc)
338
+ return encodeHeads(A.getHeads(this.#doc))
310
339
  }
311
340
 
312
341
  begin() {
@@ -314,9 +343,7 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
314
343
  }
315
344
 
316
345
  /**
317
- * Creates a fixed "view" of an automerge document at the given point in time represented
318
- * by the `heads` passed in. The return value is the same type as docSync() and will return
319
- * undefined if the object hasn't finished loading.
346
+ * Returns an array of all past "heads" for the document in topological order.
320
347
  *
321
348
  * @remarks
322
349
  * A point-in-time in an automerge document is an *array* of heads since there may be
@@ -325,19 +352,21 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
325
352
  * history views would be quite large under concurrency (every thing in each branch against each other).
326
353
  * There might be a clever way to think about this, but we haven't found it yet, so for now at least
327
354
  * we present a single traversable view which excludes concurrency.
328
- * @returns The individual heads for every change in the document.
355
+ * @returns UrlHeads[] - The individual heads for every change in the document. Each item is a tagged string[1].
329
356
  */
330
- history(): A.Heads[] | undefined {
357
+ history(): UrlHeads[] | undefined {
331
358
  if (!this.isReady()) {
332
359
  return undefined
333
360
  }
334
361
  // This just returns all the heads as individual strings.
335
362
 
336
- return A.topoHistoryTraversal(this.#doc).map(h => [h]) as A.Heads[]
363
+ return A.topoHistoryTraversal(this.#doc).map(h =>
364
+ encodeHeads([h])
365
+ ) as UrlHeads[]
337
366
  }
338
367
 
339
368
  /**
340
- * Creates a fixed "view" of an automerge document at the given point in time represented
369
+ * Creates a new DocHandle with a fixed "view" at the given point in time represented
341
370
  * by the `heads` passed in. The return value is the same type as docSync() and will return
342
371
  * undefined if the object hasn't finished loading.
343
372
  *
@@ -346,13 +375,24 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
346
375
  * of Automerge doesn't check types at runtime, so if you go back to an old set of heads
347
376
  * that doesn't match the heads here, Typescript will not save you.
348
377
  *
349
- * @returns An Automerge.Doc<T> at the point in time.
378
+ * @argument heads - The heads to view the document at. See history().
379
+ * @returns DocHandle<T> at the time of `heads`
350
380
  */
351
- view(heads: A.Heads): A.Doc<T> | undefined {
381
+ view(heads: UrlHeads): DocHandle<T> {
352
382
  if (!this.isReady()) {
353
- return undefined
383
+ throw new Error(
384
+ `DocHandle#${this.documentId} is not ready. Check \`handle.isReady()\` before calling view().`
385
+ )
354
386
  }
355
- return A.view(this.#doc, heads)
387
+ // Create a new handle with the same documentId but fixed heads
388
+ const handle = new DocHandle<T>(this.documentId, {
389
+ heads,
390
+ timeoutDelay: this.#timeoutDelay,
391
+ })
392
+ handle.update(() => A.clone(this.#doc))
393
+ handle.doneLoading()
394
+
395
+ return handle
356
396
  }
357
397
 
358
398
  /**
@@ -360,19 +400,46 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
360
400
  * if applied.
361
401
  *
362
402
  * @remarks
363
- * We allow specifying both a from/to heads or just a single comparison point, in which case
364
- * the base will be the current document heads.
403
+ * We allow specifying either:
404
+ * - Two sets of heads to compare directly
405
+ * - A single set of heads to compare against our current heads
406
+ * - Another DocHandle to compare against (which must share history with this document)
365
407
  *
366
- * @returns Automerge patches that go from one document state to the other. Use view() to get the full state.
408
+ * @throws Error if the documents don't share history or if either document is not ready
409
+ * @returns Automerge patches that go from one document state to the other
367
410
  */
368
- diff(first: A.Heads, second?: A.Heads): A.Patch[] | undefined {
411
+ diff(first: UrlHeads | DocHandle<T>, second?: UrlHeads): A.Patch[] {
369
412
  if (!this.isReady()) {
370
- return undefined
413
+ throw new Error(
414
+ `DocHandle#${this.documentId} is not ready. Check \`handle.isReady()\` before calling diff().`
415
+ )
371
416
  }
372
- // We allow only one set of heads to be specified, in which case we use the doc's heads
373
- const from = second ? first : this.heads() || [] // because we guard above this should always have useful data
417
+
418
+ const doc = this.#doc
419
+ if (!doc) throw new Error("Document not available")
420
+
421
+ // If first argument is a DocHandle
422
+ if (first instanceof DocHandle) {
423
+ if (!first.isReady()) {
424
+ throw new Error("Cannot diff against a handle that isn't ready")
425
+ }
426
+ const otherHeads = first.heads()
427
+ if (!otherHeads) throw new Error("Other document's heads not available")
428
+
429
+ // Create a temporary merged doc to verify shared history and compute diff
430
+ const mergedDoc = A.merge(A.clone(doc), first.docSync()!)
431
+ // Use the merged doc to compute the diff
432
+ return A.diff(
433
+ mergedDoc,
434
+ decodeHeads(this.heads()!),
435
+ decodeHeads(otherHeads)
436
+ )
437
+ }
438
+
439
+ // Otherwise treat as heads
440
+ const from = second ? first : ((this.heads() || []) as UrlHeads)
374
441
  const to = second ? second : first
375
- return A.diff(this.#doc, from, to)
442
+ return A.diff(doc, decodeHeads(from), decodeHeads(to))
376
443
  }
377
444
 
378
445
  /**
@@ -390,11 +457,15 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
390
457
  if (!this.isReady()) {
391
458
  return undefined
392
459
  }
460
+
393
461
  if (!change) {
394
462
  change = this.heads()![0]
395
463
  }
396
464
  // we return undefined instead of null by convention in this API
397
- return A.inspectChange(this.#doc, change) || undefined
465
+ return (
466
+ A.inspectChange(this.#doc, decodeHeads([change] as UrlHeads)[0]) ||
467
+ undefined
468
+ )
398
469
  }
399
470
 
400
471
  /**
@@ -420,13 +491,13 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
420
491
  * Called by the repo either when a doc handle changes or we receive new remote heads.
421
492
  * @hidden
422
493
  */
423
- setRemoteHeads(storageId: StorageId, heads: A.Heads) {
494
+ setRemoteHeads(storageId: StorageId, heads: UrlHeads) {
424
495
  this.#remoteHeads[storageId] = heads
425
496
  this.emit("remote-heads", { storageId, heads })
426
497
  }
427
498
 
428
499
  /** Returns the heads of the storageId. */
429
- getRemoteHeads(storageId: StorageId): A.Heads | undefined {
500
+ getRemoteHeads(storageId: StorageId): UrlHeads | undefined {
430
501
  return this.#remoteHeads[storageId]
431
502
  }
432
503
 
@@ -451,6 +522,13 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
451
522
  `DocHandle#${this.documentId} is in ${this.state} and not ready. Check \`handle.isReady()\` before accessing the document.`
452
523
  )
453
524
  }
525
+
526
+ if (this.#fixedHeads) {
527
+ throw new Error(
528
+ `DocHandle#${this.documentId} is in view-only mode at specific heads. Use clone() to create a new document from this state.`
529
+ )
530
+ }
531
+
454
532
  this.#machine.send({
455
533
  type: UPDATE,
456
534
  payload: { callback: doc => A.change(doc, options, callback) },
@@ -462,22 +540,29 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
462
540
  * @returns A set of heads representing the concurrent change that was made.
463
541
  */
464
542
  changeAt(
465
- heads: A.Heads,
543
+ heads: UrlHeads,
466
544
  callback: A.ChangeFn<T>,
467
545
  options: A.ChangeOptions<T> = {}
468
- ): string[] | undefined {
546
+ ): UrlHeads[] | undefined {
469
547
  if (!this.isReady()) {
470
548
  throw new Error(
471
549
  `DocHandle#${this.documentId} is not ready. Check \`handle.isReady()\` before accessing the document.`
472
550
  )
473
551
  }
474
- let resultHeads: string[] | undefined = undefined
552
+ if (this.#fixedHeads) {
553
+ throw new Error(
554
+ `DocHandle#${this.documentId} is in view-only mode at specific heads. Use clone() to create a new document from this state.`
555
+ )
556
+ }
557
+ let resultHeads: UrlHeads | undefined = undefined
475
558
  this.#machine.send({
476
559
  type: UPDATE,
477
560
  payload: {
478
561
  callback: doc => {
479
- const result = A.changeAt(doc, heads, options, callback)
480
- resultHeads = result.newHeads || undefined
562
+ const result = A.changeAt(doc, decodeHeads(heads), options, callback)
563
+ resultHeads = result.newHeads
564
+ ? encodeHeads(result.newHeads)
565
+ : undefined
481
566
  return result.newDoc
482
567
  },
483
568
  },
@@ -502,6 +587,11 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
502
587
  if (!this.isReady() || !otherHandle.isReady()) {
503
588
  throw new Error("Both handles must be ready to merge")
504
589
  }
590
+ if (this.#fixedHeads) {
591
+ throw new Error(
592
+ `DocHandle#${this.documentId} is in view-only mode at specific heads. Use clone() to create a new document from this state.`
593
+ )
594
+ }
505
595
  const mergingDoc = otherHandle.docSync()
506
596
  if (!mergingDoc) {
507
597
  throw new Error("The document to be merged in is falsy, aborting.")
@@ -577,6 +667,9 @@ export type DocHandleOptions<T> =
577
667
  | {
578
668
  isNew?: false
579
669
 
670
+ // An optional point in time to lock the document to.
671
+ heads?: UrlHeads
672
+
580
673
  /** The number of milliseconds before we mark this document as unavailable if we don't have it and nobody shares it with us. */
581
674
  timeoutDelay?: number
582
675
  }
@@ -640,7 +733,7 @@ export interface DocHandleOutboundEphemeralMessagePayload<T> {
640
733
  /** Emitted when we have new remote heads for this document */
641
734
  export interface DocHandleRemoteHeadsPayload {
642
735
  storageId: StorageId
643
- heads: A.Heads
736
+ heads: UrlHeads
644
737
  }
645
738
 
646
739
  // STATE MACHINE TYPES & CONSTANTS
@@ -1,4 +1,3 @@
1
- import { next as A } from "@automerge/automerge/slim"
2
1
  import { EventEmitter } from "eventemitter3"
3
2
  import { DocumentId, PeerId } from "./types.js"
4
3
  import {
@@ -7,12 +6,13 @@ import {
7
6
  } from "./network/messages.js"
8
7
  import { StorageId } from "./index.js"
9
8
  import debug from "debug"
9
+ import { UrlHeads } from "./AutomergeUrl.js"
10
10
 
11
11
  // Notify a DocHandle that remote heads have changed
12
12
  export type RemoteHeadsSubscriptionEventPayload = {
13
13
  documentId: DocumentId
14
14
  storageId: StorageId
15
- remoteHeads: A.Heads
15
+ remoteHeads: UrlHeads
16
16
  timestamp: number
17
17
  }
18
18
 
@@ -21,7 +21,7 @@ export type NotifyRemoteHeadsPayload = {
21
21
  targetId: PeerId
22
22
  documentId: DocumentId
23
23
  storageId: StorageId
24
- heads: A.Heads
24
+ heads: UrlHeads
25
25
  timestamp: number
26
26
  }
27
27
 
@@ -216,7 +216,7 @@ export class RemoteHeadsSubscriptions extends EventEmitter<RemoteHeadsSubscripti
216
216
  handleImmediateRemoteHeadsChanged(
217
217
  documentId: DocumentId,
218
218
  storageId: StorageId,
219
- heads: A.Heads
219
+ heads: UrlHeads
220
220
  ) {
221
221
  this.#log("handleLocalHeadsChanged", documentId, storageId, heads)
222
222
  const remote = this.#knownHeads.get(documentId)
@@ -334,7 +334,7 @@ export class RemoteHeadsSubscriptions extends EventEmitter<RemoteHeadsSubscripti
334
334
  #changedHeads(msg: RemoteHeadsChanged): {
335
335
  documentId: DocumentId
336
336
  storageId: StorageId
337
- remoteHeads: A.Heads
337
+ remoteHeads: UrlHeads
338
338
  timestamp: number
339
339
  }[] {
340
340
  const changedHeads = []
@@ -356,11 +356,14 @@ export class RemoteHeadsSubscriptions extends EventEmitter<RemoteHeadsSubscripti
356
356
  if (docRemote && docRemote.timestamp >= timestamp) {
357
357
  continue
358
358
  } else {
359
- remote.set(storageId as StorageId, { timestamp, heads })
359
+ remote.set(storageId as StorageId, {
360
+ timestamp,
361
+ heads: heads as UrlHeads,
362
+ })
360
363
  changedHeads.push({
361
364
  documentId,
362
365
  storageId: storageId as StorageId,
363
- remoteHeads: heads,
366
+ remoteHeads: heads as UrlHeads,
364
367
  timestamp,
365
368
  })
366
369
  }
@@ -371,5 +374,5 @@ export class RemoteHeadsSubscriptions extends EventEmitter<RemoteHeadsSubscripti
371
374
 
372
375
  type LastHeads = {
373
376
  timestamp: number
374
- heads: A.Heads
377
+ heads: UrlHeads
375
378
  }
package/src/Repo.ts CHANGED
@@ -2,8 +2,10 @@ import { next as Automerge } from "@automerge/automerge/slim"
2
2
  import debug from "debug"
3
3
  import { EventEmitter } from "eventemitter3"
4
4
  import {
5
+ encodeHeads,
5
6
  generateAutomergeUrl,
6
7
  interpretAsDocumentId,
8
+ isValidAutomergeUrl,
7
9
  parseAutomergeUrl,
8
10
  } from "./AutomergeUrl.js"
9
11
  import {
@@ -31,7 +33,12 @@ import {
31
33
  DocSyncMetrics,
32
34
  SyncStatePayload,
33
35
  } from "./synchronizer/Synchronizer.js"
34
- import type { AnyDocumentId, DocumentId, PeerId } from "./types.js"
36
+ import type {
37
+ AnyDocumentId,
38
+ AutomergeUrl,
39
+ DocumentId,
40
+ PeerId,
41
+ } from "./types.js"
35
42
 
36
43
  function randomPeerId() {
37
44
  return ("peer-" + Math.random().toString(36).slice(4)) as PeerId
@@ -80,6 +87,7 @@ export class Repo extends EventEmitter<RepoEvents> {
80
87
  sharePolicy,
81
88
  isEphemeral = storage === undefined,
82
89
  enableRemoteHeadsGossiping = false,
90
+ denylist = [],
83
91
  }: RepoConfig = {}) {
84
92
  super()
85
93
  this.#remoteHeadsGossipingEnabled = enableRemoteHeadsGossiping
@@ -99,7 +107,7 @@ export class Repo extends EventEmitter<RepoEvents> {
99
107
 
100
108
  // SYNCHRONIZER
101
109
  // The synchronizer uses the network subsystem to keep documents in sync with peers.
102
- this.synchronizer = new CollectionSynchronizer(this)
110
+ this.synchronizer = new CollectionSynchronizer(this, denylist)
103
111
 
104
112
  // When the synchronizer emits messages, send them to peers
105
113
  this.synchronizer.on("message", message => {
@@ -187,16 +195,20 @@ export class Repo extends EventEmitter<RepoEvents> {
187
195
  const heads = handle.getRemoteHeads(storageId)
188
196
  const haveHeadsChanged =
189
197
  message.syncState.theirHeads &&
190
- (!heads || !headsAreSame(heads, message.syncState.theirHeads))
198
+ (!heads ||
199
+ !headsAreSame(heads, encodeHeads(message.syncState.theirHeads)))
191
200
 
192
201
  if (haveHeadsChanged && message.syncState.theirHeads) {
193
- handle.setRemoteHeads(storageId, message.syncState.theirHeads)
202
+ handle.setRemoteHeads(
203
+ storageId,
204
+ encodeHeads(message.syncState.theirHeads)
205
+ )
194
206
 
195
207
  if (storageId && this.#remoteHeadsGossipingEnabled) {
196
208
  this.#remoteHeadsSubscriptions.handleImmediateRemoteHeadsChanged(
197
209
  message.documentId,
198
210
  storageId,
199
- message.syncState.theirHeads
211
+ encodeHeads(message.syncState.theirHeads)
200
212
  )
201
213
  }
202
214
  }
@@ -424,19 +436,22 @@ export class Repo extends EventEmitter<RepoEvents> {
424
436
  /** The url or documentId of the handle to retrieve */
425
437
  id: AnyDocumentId
426
438
  ): DocHandle<T> {
427
- const documentId = interpretAsDocumentId(id)
439
+ const { documentId, heads } = isValidAutomergeUrl(id)
440
+ ? parseAutomergeUrl(id)
441
+ : { documentId: interpretAsDocumentId(id), heads: undefined }
428
442
 
429
- // If we have the handle cached, return it
430
- if (this.#handleCache[documentId]) {
431
- if (this.#handleCache[documentId].isUnavailable()) {
443
+ const cachedHandle = this.#handleCache[documentId]
444
+ if (cachedHandle) {
445
+ if (cachedHandle.isUnavailable()) {
432
446
  // this ensures that the event fires after the handle has been returned
433
447
  setTimeout(() => {
434
- this.#handleCache[documentId].emit("unavailable", {
435
- handle: this.#handleCache[documentId],
448
+ cachedHandle.emit("unavailable", {
449
+ handle: cachedHandle,
436
450
  })
437
451
  })
438
452
  }
439
- return this.#handleCache[documentId]
453
+ // If we already have the handle, return it immediately (or a view of the handle if heads are specified)
454
+ return heads ? cachedHandle.view(heads) : cachedHandle
440
455
  }
441
456
 
442
457
  // If we don't already have the handle, make an empty one and try loading it
@@ -467,7 +482,9 @@ export class Repo extends EventEmitter<RepoEvents> {
467
482
  .catch(err => {
468
483
  this.#log("error waiting for network", { err })
469
484
  })
470
- return handle
485
+
486
+ // If we already have the handle, return it immediately (or a view of the handle if heads are specified)
487
+ return heads ? handle.view(heads) : handle
471
488
  }
472
489
 
473
490
  delete(
@@ -627,6 +644,13 @@ export interface RepoConfig {
627
644
  * Whether to enable the experimental remote heads gossiping feature
628
645
  */
629
646
  enableRemoteHeadsGossiping?: boolean
647
+
648
+ /**
649
+ * A list of automerge URLs which should never be loaded regardless of what
650
+ * messages are received or what the share policy is. This is useful to avoid
651
+ * loading documents that are known to be too resource intensive.
652
+ */
653
+ denylist?: AutomergeUrl[]
630
654
  }
631
655
 
632
656
  /** A function that determines whether we should share a document with a peer
@@ -670,3 +694,7 @@ export type DocMetrics =
670
694
  numOps: number
671
695
  numChanges: number
672
696
  }
697
+ | {
698
+ type: "doc-denied"
699
+ documentId: DocumentId
700
+ }
@@ -0,0 +1,14 @@
1
+ export const uint8ArrayFromHexString = (hexString: string): Uint8Array => {
2
+ if (hexString.length % 2 !== 0) {
3
+ throw new Error("Hex string must have an even length")
4
+ }
5
+ const bytes = new Uint8Array(hexString.length / 2)
6
+ for (let i = 0; i < hexString.length; i += 2) {
7
+ bytes[i >> 1] = parseInt(hexString.slice(i, i + 2), 16)
8
+ }
9
+ return bytes
10
+ }
11
+
12
+ export const uint8ArrayToHexString = (data: Uint8Array): string => {
13
+ return Array.from(data, byte => byte.toString(16).padStart(2, "0")).join("")
14
+ }
@@ -1,6 +1,6 @@
1
- import { Heads } from "@automerge/automerge/slim/next"
2
1
  import { arraysAreEqual } from "./arraysAreEqual.js"
2
+ import { UrlHeads } from "../AutomergeUrl.js"
3
3
 
4
- export const headsAreSame = (a: Heads, b: Heads) => {
4
+ export const headsAreSame = (a: UrlHeads, b: UrlHeads) => {
5
5
  return arraysAreEqual(a, b)
6
6
  }
@@ -67,20 +67,16 @@ export function runStorageAdapterTests(setup: SetupFn, title?: string): void {
67
67
  await adapter.save(["AAAAA", "snapshot", "yyyyy"], PAYLOAD_B())
68
68
  await adapter.save(["AAAAA", "sync-state", "zzzzz"], PAYLOAD_C())
69
69
 
70
- expect(await adapter.loadRange(["AAAAA"])).toStrictEqual(
71
- expect.arrayContaining([
72
- { key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
73
- { key: ["AAAAA", "snapshot", "yyyyy"], data: PAYLOAD_B() },
74
- { key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() },
75
- ])
76
- )
77
-
78
- expect(await adapter.loadRange(["AAAAA", "sync-state"])).toStrictEqual(
79
- expect.arrayContaining([
80
- { key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
81
- { key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() },
82
- ])
83
- )
70
+ expect(await adapter.loadRange(["AAAAA"])).toStrictEqual([
71
+ { key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
72
+ { key: ["AAAAA", "snapshot", "yyyyy"], data: PAYLOAD_B() },
73
+ { key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() },
74
+ ])
75
+
76
+ expect(await adapter.loadRange(["AAAAA", "sync-state"])).toStrictEqual([
77
+ { key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
78
+ { key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() },
79
+ ])
84
80
  })
85
81
 
86
82
  it("should only load values that match they key", async ({ adapter }) => {
@@ -88,16 +84,9 @@ export function runStorageAdapterTests(setup: SetupFn, title?: string): void {
88
84
  await adapter.save(["BBBBB", "sync-state", "zzzzz"], PAYLOAD_C())
89
85
 
90
86
  const actual = await adapter.loadRange(["AAAAA"])
91
- expect(actual).toStrictEqual(
92
- expect.arrayContaining([
93
- { key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
94
- ])
95
- )
96
- expect(actual).toStrictEqual(
97
- expect.not.arrayContaining([
98
- { key: ["BBBBB", "sync-state", "zzzzz"], data: PAYLOAD_C() },
99
- ])
100
- )
87
+ expect(actual).toStrictEqual([
88
+ { key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
89
+ ])
101
90
  })
102
91
  })
103
92
 
@@ -9,6 +9,7 @@ import { keyHash, headsHash } from "./keyHash.js"
9
9
  import { chunkTypeFromKey } from "./chunkTypeFromKey.js"
10
10
  import * as Uuid from "uuid"
11
11
  import { EventEmitter } from "eventemitter3"
12
+ import { encodeHeads } from "../AutomergeUrl.js"
12
13
 
13
14
  type StorageSubsystemEvents = {
14
15
  "document-loaded": (arg: {
@@ -173,6 +174,7 @@ export class StorageSubsystem extends EventEmitter<StorageSubsystemEvents> {
173
174
  } else {
174
175
  await this.#saveIncremental(documentId, doc)
175
176
  }
177
+
176
178
  this.#storedHeads.set(documentId, A.getHeads(doc))
177
179
  }
178
180
 
@@ -279,7 +281,7 @@ export class StorageSubsystem extends EventEmitter<StorageSubsystemEvents> {
279
281
  }
280
282
 
281
283
  const newHeads = A.getHeads(doc)
282
- if (headsAreSame(newHeads, oldHeads)) {
284
+ if (headsAreSame(encodeHeads(newHeads), encodeHeads(oldHeads))) {
283
285
  // the document hasn't changed
284
286
  return false
285
287
  }