@automerge/automerge-repo 2.0.0-alpha.2 → 2.0.0-alpha.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/dist/AutomergeUrl.d.ts +17 -5
  2. package/dist/AutomergeUrl.d.ts.map +1 -1
  3. package/dist/AutomergeUrl.js +71 -24
  4. package/dist/DocHandle.d.ts +80 -8
  5. package/dist/DocHandle.d.ts.map +1 -1
  6. package/dist/DocHandle.js +181 -10
  7. package/dist/RemoteHeadsSubscriptions.d.ts +4 -5
  8. package/dist/RemoteHeadsSubscriptions.d.ts.map +1 -1
  9. package/dist/RemoteHeadsSubscriptions.js +4 -1
  10. package/dist/Repo.d.ts +35 -2
  11. package/dist/Repo.d.ts.map +1 -1
  12. package/dist/Repo.js +112 -70
  13. package/dist/entrypoints/fullfat.d.ts +1 -0
  14. package/dist/entrypoints/fullfat.d.ts.map +1 -1
  15. package/dist/entrypoints/fullfat.js +1 -2
  16. package/dist/helpers/bufferFromHex.d.ts +3 -0
  17. package/dist/helpers/bufferFromHex.d.ts.map +1 -0
  18. package/dist/helpers/bufferFromHex.js +13 -0
  19. package/dist/helpers/headsAreSame.d.ts +2 -2
  20. package/dist/helpers/headsAreSame.d.ts.map +1 -1
  21. package/dist/helpers/mergeArrays.d.ts +1 -1
  22. package/dist/helpers/mergeArrays.d.ts.map +1 -1
  23. package/dist/helpers/tests/storage-adapter-tests.d.ts +2 -2
  24. package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
  25. package/dist/helpers/tests/storage-adapter-tests.js +25 -48
  26. package/dist/index.d.ts +1 -1
  27. package/dist/index.d.ts.map +1 -1
  28. package/dist/index.js +1 -1
  29. package/dist/storage/StorageSubsystem.d.ts +11 -1
  30. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  31. package/dist/storage/StorageSubsystem.js +20 -4
  32. package/dist/synchronizer/CollectionSynchronizer.d.ts +15 -2
  33. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  34. package/dist/synchronizer/CollectionSynchronizer.js +29 -8
  35. package/dist/synchronizer/DocSynchronizer.d.ts +7 -0
  36. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  37. package/dist/synchronizer/DocSynchronizer.js +14 -0
  38. package/dist/synchronizer/Synchronizer.d.ts +11 -0
  39. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  40. package/dist/types.d.ts +4 -1
  41. package/dist/types.d.ts.map +1 -1
  42. package/package.json +3 -3
  43. package/src/AutomergeUrl.ts +101 -26
  44. package/src/DocHandle.ts +245 -20
  45. package/src/RemoteHeadsSubscriptions.ts +11 -9
  46. package/src/Repo.ts +163 -68
  47. package/src/entrypoints/fullfat.ts +1 -2
  48. package/src/helpers/bufferFromHex.ts +14 -0
  49. package/src/helpers/headsAreSame.ts +2 -2
  50. package/src/helpers/tests/storage-adapter-tests.ts +44 -86
  51. package/src/index.ts +2 -0
  52. package/src/storage/StorageSubsystem.ts +29 -4
  53. package/src/synchronizer/CollectionSynchronizer.ts +42 -9
  54. package/src/synchronizer/DocSynchronizer.ts +15 -0
  55. package/src/synchronizer/Synchronizer.ts +14 -0
  56. package/src/types.ts +4 -1
  57. package/test/AutomergeUrl.test.ts +130 -0
  58. package/test/DocHandle.test.ts +209 -2
  59. package/test/DocSynchronizer.test.ts +10 -3
  60. package/test/Repo.test.ts +228 -3
  61. package/test/StorageSubsystem.test.ts +17 -0
@@ -8,12 +8,23 @@ import { ChunkInfo, StorageKey, StorageId } from "./types.js"
8
8
  import { keyHash, headsHash } from "./keyHash.js"
9
9
  import { chunkTypeFromKey } from "./chunkTypeFromKey.js"
10
10
  import * as Uuid from "uuid"
11
+ import { EventEmitter } from "eventemitter3"
12
+ import { encodeHeads } from "../AutomergeUrl.js"
13
+
14
+ type StorageSubsystemEvents = {
15
+ "document-loaded": (arg: {
16
+ documentId: DocumentId
17
+ durationMillis: number
18
+ numOps: number
19
+ numChanges: number
20
+ }) => void
21
+ }
11
22
 
12
23
  /**
13
24
  * The storage subsystem is responsible for saving and loading Automerge documents to and from
14
25
  * storage adapter. It also provides a generic key/value storage interface for other uses.
15
26
  */
16
- export class StorageSubsystem {
27
+ export class StorageSubsystem extends EventEmitter<StorageSubsystemEvents> {
17
28
  /** The storage adapter to use for saving and loading documents */
18
29
  #storageAdapter: StorageAdapterInterface
19
30
 
@@ -29,6 +40,7 @@ export class StorageSubsystem {
29
40
  #log = debug(`automerge-repo:storage-subsystem`)
30
41
 
31
42
  constructor(storageAdapter: StorageAdapterInterface) {
43
+ super()
32
44
  this.#storageAdapter = storageAdapter
33
45
  }
34
46
 
@@ -130,7 +142,14 @@ export class StorageSubsystem {
130
142
  if (binary.length === 0) return null
131
143
 
132
144
  // Load into an Automerge document
145
+ const start = performance.now()
133
146
  const newDoc = A.loadIncremental(A.init(), binary) as A.Doc<T>
147
+ const end = performance.now()
148
+ this.emit("document-loaded", {
149
+ documentId,
150
+ durationMillis: end - start,
151
+ ...A.stats(newDoc),
152
+ })
134
153
 
135
154
  // Record the latest heads for the document
136
155
  this.#storedHeads.set(documentId, A.getHeads(newDoc))
@@ -155,6 +174,7 @@ export class StorageSubsystem {
155
174
  } else {
156
175
  await this.#saveIncremental(documentId, doc)
157
176
  }
177
+
158
178
  this.#storedHeads.set(documentId, A.getHeads(doc))
159
179
  }
160
180
 
@@ -232,8 +252,13 @@ export class StorageSubsystem {
232
252
  storageId: StorageId
233
253
  ): Promise<A.SyncState | undefined> {
234
254
  const key = [documentId, "sync-state", storageId]
235
- const loaded = await this.#storageAdapter.load(key)
236
- return loaded ? A.decodeSyncState(loaded) : undefined
255
+ try {
256
+ const loaded = await this.#storageAdapter.load(key)
257
+ return loaded ? A.decodeSyncState(loaded) : undefined
258
+ } catch (e) {
259
+ this.#log(`Error loading sync state for ${documentId} from ${storageId}`)
260
+ return undefined
261
+ }
237
262
  }
238
263
 
239
264
  async saveSyncState(
@@ -256,7 +281,7 @@ export class StorageSubsystem {
256
281
  }
257
282
 
258
283
  const newHeads = A.getHeads(doc)
259
- if (headsAreSame(newHeads, oldHeads)) {
284
+ if (headsAreSame(encodeHeads(newHeads), encodeHeads(oldHeads))) {
260
285
  // the document hasn't changed
261
286
  return false
262
287
  }
@@ -1,9 +1,9 @@
1
1
  import debug from "debug"
2
2
  import { DocHandle } from "../DocHandle.js"
3
- import { stringifyAutomergeUrl } from "../AutomergeUrl.js"
3
+ import { parseAutomergeUrl, stringifyAutomergeUrl } from "../AutomergeUrl.js"
4
4
  import { Repo } from "../Repo.js"
5
5
  import { DocMessage } from "../network/messages.js"
6
- import { DocumentId, PeerId } from "../types.js"
6
+ import { AutomergeUrl, DocumentId, PeerId } from "../types.js"
7
7
  import { DocSynchronizer } from "./DocSynchronizer.js"
8
8
  import { Synchronizer } from "./Synchronizer.js"
9
9
 
@@ -15,22 +15,26 @@ export class CollectionSynchronizer extends Synchronizer {
15
15
  #peers: Set<PeerId> = new Set()
16
16
 
17
17
  /** A map of documentIds to their synchronizers */
18
- #docSynchronizers: Record<DocumentId, DocSynchronizer> = {}
18
+ /** @hidden */
19
+ docSynchronizers: Record<DocumentId, DocSynchronizer> = {}
19
20
 
20
21
  /** Used to determine if the document is know to the Collection and a synchronizer exists or is being set up */
21
22
  #docSetUp: Record<DocumentId, boolean> = {}
22
23
 
23
- constructor(private repo: Repo) {
24
+ #denylist: DocumentId[]
25
+
26
+ constructor(private repo: Repo, denylist: AutomergeUrl[] = []) {
24
27
  super()
28
+ this.#denylist = denylist.map(url => parseAutomergeUrl(url).documentId)
25
29
  }
26
30
 
27
31
  /** Returns a synchronizer for the given document, creating one if it doesn't already exist. */
28
32
  #fetchDocSynchronizer(documentId: DocumentId) {
29
- if (!this.#docSynchronizers[documentId]) {
33
+ if (!this.docSynchronizers[documentId]) {
30
34
  const handle = this.repo.find(stringifyAutomergeUrl({ documentId }))
31
- this.#docSynchronizers[documentId] = this.#initDocSynchronizer(handle)
35
+ this.docSynchronizers[documentId] = this.#initDocSynchronizer(handle)
32
36
  }
33
- return this.#docSynchronizers[documentId]
37
+ return this.docSynchronizers[documentId]
34
38
  }
35
39
 
36
40
  /** Creates a new docSynchronizer and sets it up to propagate messages */
@@ -57,6 +61,7 @@ export class CollectionSynchronizer extends Synchronizer {
57
61
  docSynchronizer.on("message", event => this.emit("message", event))
58
62
  docSynchronizer.on("open-doc", event => this.emit("open-doc", event))
59
63
  docSynchronizer.on("sync-state", event => this.emit("sync-state", event))
64
+ docSynchronizer.on("metrics", event => this.emit("metrics", event))
60
65
  return docSynchronizer
61
66
  }
62
67
 
@@ -89,6 +94,19 @@ export class CollectionSynchronizer extends Synchronizer {
89
94
  throw new Error("received a message with an invalid documentId")
90
95
  }
91
96
 
97
+ if (this.#denylist.includes(documentId)) {
98
+ this.emit("metrics", {
99
+ type: "doc-denied",
100
+ documentId,
101
+ })
102
+ this.emit("message", {
103
+ type: "doc-unavailable",
104
+ documentId,
105
+ targetId: message.senderId,
106
+ })
107
+ return
108
+ }
109
+
92
110
  this.#docSetUp[documentId] = true
93
111
 
94
112
  const docSynchronizer = this.#fetchDocSynchronizer(documentId)
@@ -131,7 +149,7 @@ export class CollectionSynchronizer extends Synchronizer {
131
149
  }
132
150
 
133
151
  this.#peers.add(peerId)
134
- for (const docSynchronizer of Object.values(this.#docSynchronizers)) {
152
+ for (const docSynchronizer of Object.values(this.docSynchronizers)) {
135
153
  const { documentId } = docSynchronizer
136
154
  void this.repo.sharePolicy(peerId, documentId).then(okToShare => {
137
155
  if (okToShare) docSynchronizer.beginSync([peerId])
@@ -144,7 +162,7 @@ export class CollectionSynchronizer extends Synchronizer {
144
162
  log(`removing peer ${peerId}`)
145
163
  this.#peers.delete(peerId)
146
164
 
147
- for (const docSynchronizer of Object.values(this.#docSynchronizers)) {
165
+ for (const docSynchronizer of Object.values(this.docSynchronizers)) {
148
166
  docSynchronizer.endSync(peerId)
149
167
  }
150
168
  }
@@ -153,4 +171,19 @@ export class CollectionSynchronizer extends Synchronizer {
153
171
  get peers(): PeerId[] {
154
172
  return Array.from(this.#peers)
155
173
  }
174
+
175
+ metrics(): {
176
+ [key: string]: {
177
+ peers: PeerId[]
178
+ size: { numOps: number; numChanges: number }
179
+ }
180
+ } {
181
+ return Object.fromEntries(
182
+ Object.entries(this.docSynchronizers).map(
183
+ ([documentId, synchronizer]) => {
184
+ return [documentId, synchronizer.metrics()]
185
+ }
186
+ )
187
+ )
188
+ }
156
189
  }
@@ -351,11 +351,19 @@ export class DocSynchronizer extends Synchronizer {
351
351
 
352
352
  this.#withSyncState(message.senderId, syncState => {
353
353
  this.#handle.update(doc => {
354
+ const start = performance.now()
354
355
  const [newDoc, newSyncState] = A.receiveSyncMessage(
355
356
  doc,
356
357
  syncState,
357
358
  message.data
358
359
  )
360
+ const end = performance.now()
361
+ this.emit("metrics", {
362
+ type: "receive-sync-message",
363
+ documentId: this.#handle.documentId,
364
+ durationMillis: end - start,
365
+ ...A.stats(doc),
366
+ })
359
367
 
360
368
  this.#setSyncState(message.senderId, newSyncState)
361
369
 
@@ -401,4 +409,11 @@ export class DocSynchronizer extends Synchronizer {
401
409
 
402
410
  this.#pendingSyncMessages = []
403
411
  }
412
+
413
+ metrics(): { peers: PeerId[]; size: { numOps: number; numChanges: number } } {
414
+ return {
415
+ peers: this.#peers,
416
+ size: this.#handle.metrics(),
417
+ }
418
+ }
404
419
  }
@@ -15,6 +15,7 @@ export interface SynchronizerEvents {
15
15
  message: (payload: MessageContents) => void
16
16
  "sync-state": (payload: SyncStatePayload) => void
17
17
  "open-doc": (arg: OpenDocMessage) => void
18
+ metrics: (arg: DocSyncMetrics) => void
18
19
  }
19
20
 
20
21
  /** Notify the repo that the sync state has changed */
@@ -23,3 +24,16 @@ export interface SyncStatePayload {
23
24
  documentId: DocumentId
24
25
  syncState: SyncState
25
26
  }
27
+
28
+ export type DocSyncMetrics =
29
+ | {
30
+ type: "receive-sync-message"
31
+ documentId: DocumentId
32
+ durationMillis: number
33
+ numOps: number
34
+ numChanges: number
35
+ }
36
+ | {
37
+ type: "doc-denied"
38
+ documentId: DocumentId
39
+ }
package/src/types.ts CHANGED
@@ -27,8 +27,11 @@ export type AnyDocumentId =
27
27
  | BinaryDocumentId
28
28
  | LegacyDocumentId
29
29
 
30
+ // We need to define our own version of heads because the AutomergeHeads type is not bs58check encoded
31
+ export type UrlHeads = string[] & { __automergeUrlHeads: unknown }
32
+
30
33
  /** A branded type for peer IDs */
31
34
  export type PeerId = string & { __peerId: true }
32
35
 
33
36
  /** A randomly generated string created when the {@link Repo} starts up */
34
- export type SessionId = string & { __SessionId: true }
37
+ export type SessionId = string & { __sessionId: true }
@@ -3,9 +3,11 @@ import bs58check from "bs58check"
3
3
  import { describe, it } from "vitest"
4
4
  import {
5
5
  generateAutomergeUrl,
6
+ getHeadsFromUrl,
6
7
  isValidAutomergeUrl,
7
8
  parseAutomergeUrl,
8
9
  stringifyAutomergeUrl,
10
+ UrlHeads,
9
11
  } from "../src/AutomergeUrl.js"
10
12
  import type {
11
13
  AutomergeUrl,
@@ -102,3 +104,131 @@ describe("AutomergeUrl", () => {
102
104
  })
103
105
  })
104
106
  })
107
+
108
+ describe("AutomergeUrl with heads", () => {
109
+ // Create some sample encoded heads for testing
110
+ const head1 = bs58check.encode(new Uint8Array([1, 2, 3, 4])) as string
111
+ const head2 = bs58check.encode(new Uint8Array([5, 6, 7, 8])) as string
112
+ const goodHeads = [head1, head2] as UrlHeads
113
+ const urlWithHeads = `${goodUrl}#${head1}|${head2}` as AutomergeUrl
114
+ const invalidHead = "not-base58-encoded"
115
+ const invalidHeads = [invalidHead] as UrlHeads
116
+
117
+ describe("stringifyAutomergeUrl", () => {
118
+ it("should stringify a url with heads", () => {
119
+ const url = stringifyAutomergeUrl({
120
+ documentId: goodDocumentId,
121
+ heads: goodHeads,
122
+ })
123
+ assert.strictEqual(url, urlWithHeads)
124
+ })
125
+
126
+ it("should throw if heads are not valid base58check", () => {
127
+ assert.throws(() =>
128
+ stringifyAutomergeUrl({
129
+ documentId: goodDocumentId,
130
+ heads: invalidHeads,
131
+ })
132
+ )
133
+ })
134
+ })
135
+
136
+ describe("parseAutomergeUrl", () => {
137
+ it("should parse a url with heads", () => {
138
+ const { documentId, heads } = parseAutomergeUrl(urlWithHeads)
139
+ assert.equal(documentId, goodDocumentId)
140
+ assert.deepEqual(heads, [head1, head2])
141
+ })
142
+
143
+ it("should parse a url without heads", () => {
144
+ const { documentId, heads } = parseAutomergeUrl(goodUrl)
145
+ assert.equal(documentId, goodDocumentId)
146
+ assert.equal(heads, undefined)
147
+ })
148
+
149
+ it("should throw on url with invalid heads encoding", () => {
150
+ const badUrl = `${goodUrl}#${invalidHead}` as AutomergeUrl
151
+ assert.throws(() => parseAutomergeUrl(badUrl))
152
+ })
153
+ })
154
+
155
+ describe("isValidAutomergeUrl", () => {
156
+ it("should return true for a valid url with heads", () => {
157
+ assert(isValidAutomergeUrl(urlWithHeads) === true)
158
+ })
159
+
160
+ it("should return false for a url with invalid heads", () => {
161
+ const badUrl = `${goodUrl}#${invalidHead}` as AutomergeUrl
162
+ assert(isValidAutomergeUrl(badUrl) === false)
163
+ })
164
+ })
165
+
166
+ describe("getHeadsFromUrl", () => {
167
+ it("should return heads from a valid url", () => {
168
+ const heads = getHeadsFromUrl(urlWithHeads)
169
+ assert.deepEqual(heads, [head1, head2])
170
+ })
171
+
172
+ it("should return undefined for url without heads", () => {
173
+ const heads = getHeadsFromUrl(goodUrl)
174
+ assert.equal(heads, undefined)
175
+ })
176
+ })
177
+ it("should handle a single head correctly", () => {
178
+ const urlWithOneHead = `${goodUrl}#${head1}` as AutomergeUrl
179
+ const { heads } = parseAutomergeUrl(urlWithOneHead)
180
+ assert.deepEqual(heads, [head1])
181
+ })
182
+
183
+ it("should round-trip urls with heads", () => {
184
+ const originalUrl = urlWithHeads
185
+ const parsed = parseAutomergeUrl(originalUrl)
186
+ const roundTripped = stringifyAutomergeUrl({
187
+ documentId: parsed.documentId,
188
+ heads: parsed.heads,
189
+ })
190
+ assert.equal(roundTripped, originalUrl)
191
+ })
192
+
193
+ describe("should reject malformed urls", () => {
194
+ it("should reject urls with trailing delimiter", () => {
195
+ assert(!isValidAutomergeUrl(`${goodUrl}#${head1}:` as AutomergeUrl))
196
+ })
197
+
198
+ it("should reject urls with empty head", () => {
199
+ assert(!isValidAutomergeUrl(`${goodUrl}#|${head1}` as AutomergeUrl))
200
+ })
201
+
202
+ it("should reject urls with multiple hash characters", () => {
203
+ assert(
204
+ !isValidAutomergeUrl(`${goodUrl}#${head1}#${head2}` as AutomergeUrl)
205
+ )
206
+ })
207
+ })
208
+ })
209
+
210
+ describe("empty heads section", () => {
211
+ it("should treat bare # as empty heads array", () => {
212
+ const urlWithEmptyHeads = `${goodUrl}#` as AutomergeUrl
213
+ const { heads } = parseAutomergeUrl(urlWithEmptyHeads)
214
+ assert.deepEqual(heads, [])
215
+ })
216
+
217
+ it("should round-trip empty heads array", () => {
218
+ const original = `${goodUrl}#` as AutomergeUrl
219
+ const parsed = parseAutomergeUrl(original)
220
+ const roundTripped = stringifyAutomergeUrl({
221
+ documentId: parsed.documentId,
222
+ heads: parsed.heads,
223
+ })
224
+ assert.equal(roundTripped, original)
225
+ })
226
+
227
+ it("should distinguish between no heads and empty heads", () => {
228
+ const noHeads = parseAutomergeUrl(goodUrl)
229
+ const emptyHeads = parseAutomergeUrl(`${goodUrl}#` as AutomergeUrl)
230
+
231
+ assert.equal(noHeads.heads, undefined)
232
+ assert.deepEqual(emptyHeads.heads, [])
233
+ })
234
+ })
@@ -2,11 +2,16 @@ import * as A from "@automerge/automerge/next"
2
2
  import assert from "assert"
3
3
  import { decode } from "cbor-x"
4
4
  import { describe, it, vi } from "vitest"
5
- import { generateAutomergeUrl, parseAutomergeUrl } from "../src/AutomergeUrl.js"
5
+ import {
6
+ encodeHeads,
7
+ generateAutomergeUrl,
8
+ parseAutomergeUrl,
9
+ } from "../src/AutomergeUrl.js"
6
10
  import { eventPromise } from "../src/helpers/eventPromise.js"
7
11
  import { pause } from "../src/helpers/pause.js"
8
12
  import { DocHandle, DocHandleChangePayload } from "../src/index.js"
9
13
  import { TestDoc } from "./types.js"
14
+ import { UNLOADED } from "../src/DocHandle.js"
10
15
 
11
16
  describe("DocHandle", () => {
12
17
  const TEST_ID = parseAutomergeUrl(generateAutomergeUrl()).documentId
@@ -68,12 +73,21 @@ describe("DocHandle", () => {
68
73
  assert.equal(doc?.foo, "bar")
69
74
  })
70
75
 
76
+ /** HISTORY TRAVERSAL
77
+ * This API is relatively alpha-ish but we're already
78
+ * doing things in our own apps that are fairly ambitious
79
+ * by routing around to a lower-level API.
80
+ * This is an attempt to wrap up the existing practice
81
+ * in a slightly more supportable set of APIs but should be
82
+ * considered provisional: expect further improvements.
83
+ */
84
+
71
85
  it("should return the heads when requested", async () => {
72
86
  const handle = setup()
73
87
  handle.change(d => (d.foo = "bar"))
74
88
  assert.equal(handle.isReady(), true)
75
89
 
76
- const heads = A.getHeads(handle.docSync())
90
+ const heads = encodeHeads(A.getHeads(handle.docSync()))
77
91
  assert.notDeepEqual(handle.heads(), [])
78
92
  assert.deepEqual(heads, handle.heads())
79
93
  })
@@ -84,6 +98,156 @@ describe("DocHandle", () => {
84
98
  assert.deepEqual(handle.heads(), undefined)
85
99
  })
86
100
 
101
+ it("should return the history when requested", async () => {
102
+ const handle = setup()
103
+ handle.change(d => (d.foo = "bar"))
104
+ handle.change(d => (d.foo = "baz"))
105
+ assert.equal(handle.isReady(), true)
106
+
107
+ const history = handle.history()
108
+ assert.deepEqual(handle.history().length, 2)
109
+ })
110
+
111
+ it("should return a commit from the history", async () => {
112
+ const handle = setup()
113
+ handle.change(d => (d.foo = "zero"))
114
+ handle.change(d => (d.foo = "one"))
115
+ handle.change(d => (d.foo = "two"))
116
+ handle.change(d => (d.foo = "three"))
117
+ assert.equal(handle.isReady(), true)
118
+
119
+ const history = handle.history()
120
+ const viewHandle = handle.view(history[1])
121
+ assert.deepEqual(await viewHandle.doc(), { foo: "one" })
122
+ })
123
+
124
+ it("should support fixed heads from construction", async () => {
125
+ const handle = setup()
126
+ handle.change(d => (d.foo = "zero"))
127
+ handle.change(d => (d.foo = "one"))
128
+
129
+ const history = handle.history()
130
+ const viewHandle = new DocHandle<TestDoc>(TEST_ID, { heads: history[0] })
131
+ viewHandle.update(() => A.clone(handle.docSync()!))
132
+ viewHandle.doneLoading()
133
+
134
+ assert.deepEqual(await viewHandle.doc(), { foo: "zero" })
135
+ })
136
+
137
+ it("should prevent changes on fixed-heads handles", async () => {
138
+ const handle = setup()
139
+ handle.change(d => (d.foo = "zero"))
140
+ const viewHandle = handle.view(handle.heads()!)
141
+
142
+ assert.throws(() => viewHandle.change(d => (d.foo = "one")))
143
+ assert.throws(() =>
144
+ viewHandle.changeAt(handle.heads()!, d => (d.foo = "one"))
145
+ )
146
+ assert.throws(() => viewHandle.merge(handle))
147
+ })
148
+
149
+ it("should return fixed heads from heads()", async () => {
150
+ const handle = setup()
151
+ handle.change(d => (d.foo = "zero"))
152
+ const originalHeads = handle.heads()!
153
+
154
+ handle.change(d => (d.foo = "one"))
155
+ const viewHandle = handle.view(originalHeads)
156
+
157
+ assert.deepEqual(viewHandle.heads(), originalHeads)
158
+ assert.notDeepEqual(viewHandle.heads(), handle.heads())
159
+ })
160
+
161
+ it("should return diffs", async () => {
162
+ const handle = setup()
163
+ handle.change(d => (d.foo = "zero"))
164
+ handle.change(d => (d.foo = "one"))
165
+ handle.change(d => (d.foo = "two"))
166
+ handle.change(d => (d.foo = "three"))
167
+ assert.equal(handle.isReady(), true)
168
+
169
+ const history = handle.history()
170
+ const patches = handle.diff(history[1])
171
+ assert.deepEqual(patches, [
172
+ { action: "put", path: ["foo"], value: "" },
173
+ { action: "splice", path: ["foo", 0], value: "one" },
174
+ ])
175
+ })
176
+
177
+ it("should support arbitrary diffs too", async () => {
178
+ const handle = setup()
179
+ handle.change(d => (d.foo = "zero"))
180
+ handle.change(d => (d.foo = "one"))
181
+ handle.change(d => (d.foo = "two"))
182
+ handle.change(d => (d.foo = "three"))
183
+ assert.equal(handle.isReady(), true)
184
+
185
+ const history = handle.history()
186
+ const patches = handle.diff(history[1], history[3])
187
+ assert.deepEqual(patches, [
188
+ { action: "put", path: ["foo"], value: "" },
189
+ { action: "splice", path: ["foo", 0], value: "three" },
190
+ ])
191
+ const backPatches = handle.diff(history[3], history[1])
192
+ assert.deepEqual(backPatches, [
193
+ { action: "put", path: ["foo"], value: "" },
194
+ { action: "splice", path: ["foo", 0], value: "one" },
195
+ ])
196
+ })
197
+
198
+ it("should support diffing against another handle", async () => {
199
+ const handle = setup()
200
+ handle.change(d => (d.foo = "zero"))
201
+ const viewHandle = handle.view(handle.heads()!)
202
+
203
+ handle.change(d => (d.foo = "one"))
204
+
205
+ const patches = viewHandle.diff(handle)
206
+ assert.deepEqual(patches, [
207
+ { action: "put", path: ["foo"], value: "" },
208
+ { action: "splice", path: ["foo", 0], value: "one" },
209
+ ])
210
+ })
211
+
212
+ // TODO: alexg -- should i remove this test? should this fail or no?
213
+ it.skip("should fail diffing against unrelated handles", async () => {
214
+ const handle1 = setup()
215
+ const handle2 = setup()
216
+
217
+ handle1.change(d => (d.foo = "zero"))
218
+ handle2.change(d => (d.foo = "one"))
219
+
220
+ assert.throws(() => handle1.diff(handle2))
221
+ })
222
+
223
+ it("should allow direct access to decoded changes", async () => {
224
+ const handle = setup()
225
+ const time = Date.now()
226
+ handle.change(d => (d.foo = "foo"), { message: "commitMessage" })
227
+ assert.equal(handle.isReady(), true)
228
+
229
+ const metadata = handle.metadata()
230
+ assert.deepEqual(metadata.message, "commitMessage")
231
+ // NOTE: I'm not testing time because of https://github.com/automerge/automerge/issues/965
232
+ // but it does round-trip successfully!
233
+ })
234
+
235
+ it("should allow direct access to a specific decoded change", async () => {
236
+ const handle = setup()
237
+ const time = Date.now()
238
+ handle.change(d => (d.foo = "foo"), { message: "commitMessage" })
239
+ handle.change(d => (d.foo = "foo"), { message: "commitMessage2" })
240
+ handle.change(d => (d.foo = "foo"), { message: "commitMessage3" })
241
+ handle.change(d => (d.foo = "foo"), { message: "commitMessage4" })
242
+ assert.equal(handle.isReady(), true)
243
+
244
+ const history = handle.history()
245
+ const metadata = handle.metadata(history[0][0])
246
+ assert.deepEqual(metadata.message, "commitMessage")
247
+ // NOTE: I'm not testing time because of https://github.com/automerge/automerge/issues/965
248
+ // but it does round-trip successfully!
249
+ })
250
+
87
251
  /**
88
252
  * Once there's a Repo#stop API this case should be covered in accompanying
89
253
  * tests and the following test removed.
@@ -325,6 +489,49 @@ describe("DocHandle", () => {
325
489
  assert.equal(handle.isDeleted(), true)
326
490
  })
327
491
 
492
+ it("should clear document reference when unloaded", async () => {
493
+ const handle = setup()
494
+
495
+ handle.change(doc => {
496
+ doc.foo = "bar"
497
+ })
498
+ const doc = await handle.doc()
499
+ assert.equal(doc?.foo, "bar")
500
+
501
+ handle.unload()
502
+ assert.equal(handle.isUnloaded(), true)
503
+
504
+ const clearedDoc = await handle.doc([UNLOADED])
505
+ assert.notEqual(clearedDoc?.foo, "bar")
506
+ })
507
+
508
+ it("should allow reloading after unloading", async () => {
509
+ const handle = setup()
510
+
511
+ handle.change(doc => {
512
+ doc.foo = "bar"
513
+ })
514
+ const doc = await handle.doc()
515
+ assert.equal(doc?.foo, "bar")
516
+
517
+ handle.unload()
518
+
519
+ // reload to transition from unloaded to loading
520
+ handle.reload()
521
+
522
+ // simulate requesting from the network
523
+ handle.request()
524
+
525
+ // simulate updating from the network
526
+ handle.update(doc => {
527
+ return A.change(doc, d => (d.foo = "bar"))
528
+ })
529
+
530
+ const reloadedDoc = await handle.doc()
531
+ assert.equal(handle.isReady(), true)
532
+ assert.equal(reloadedDoc?.foo, "bar")
533
+ })
534
+
328
535
  it("should allow changing at old heads", async () => {
329
536
  const handle = setup()
330
537
 
@@ -1,7 +1,11 @@
1
1
  import assert from "assert"
2
2
  import { describe, it } from "vitest"
3
3
  import { next as Automerge } from "@automerge/automerge"
4
- import { generateAutomergeUrl, parseAutomergeUrl } from "../src/AutomergeUrl.js"
4
+ import {
5
+ encodeHeads,
6
+ generateAutomergeUrl,
7
+ parseAutomergeUrl,
8
+ } from "../src/AutomergeUrl.js"
5
9
  import { DocHandle } from "../src/DocHandle.js"
6
10
  import { eventPromise } from "../src/helpers/eventPromise.js"
7
11
  import {
@@ -67,11 +71,14 @@ describe("DocSynchronizer", () => {
67
71
 
68
72
  assert.equal(message1.peerId, "alice")
69
73
  assert.equal(message1.documentId, handle.documentId)
70
- assert.deepEqual(message1.syncState.lastSentHeads, [])
74
+ assert.deepStrictEqual(message1.syncState.lastSentHeads, [])
71
75
 
72
76
  assert.equal(message2.peerId, "alice")
73
77
  assert.equal(message2.documentId, handle.documentId)
74
- assert.deepEqual(message2.syncState.lastSentHeads, handle.heads())
78
+ assert.deepStrictEqual(
79
+ encodeHeads(message2.syncState.lastSentHeads),
80
+ handle.heads()
81
+ )
75
82
  })
76
83
 
77
84
  it("still syncs with a peer after it disconnects and reconnects", async () => {