@automerge/automerge-repo 2.0.0-alpha.2 → 2.0.0-alpha.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. package/README.md +5 -6
  2. package/dist/AutomergeUrl.d.ts +17 -5
  3. package/dist/AutomergeUrl.d.ts.map +1 -1
  4. package/dist/AutomergeUrl.js +71 -24
  5. package/dist/DocHandle.d.ts +89 -20
  6. package/dist/DocHandle.d.ts.map +1 -1
  7. package/dist/DocHandle.js +189 -28
  8. package/dist/FindProgress.d.ts +30 -0
  9. package/dist/FindProgress.d.ts.map +1 -0
  10. package/dist/FindProgress.js +1 -0
  11. package/dist/RemoteHeadsSubscriptions.d.ts +4 -5
  12. package/dist/RemoteHeadsSubscriptions.d.ts.map +1 -1
  13. package/dist/RemoteHeadsSubscriptions.js +4 -1
  14. package/dist/Repo.d.ts +44 -6
  15. package/dist/Repo.d.ts.map +1 -1
  16. package/dist/Repo.js +226 -87
  17. package/dist/entrypoints/fullfat.d.ts +1 -0
  18. package/dist/entrypoints/fullfat.d.ts.map +1 -1
  19. package/dist/entrypoints/fullfat.js +1 -2
  20. package/dist/helpers/abortable.d.ts +39 -0
  21. package/dist/helpers/abortable.d.ts.map +1 -0
  22. package/dist/helpers/abortable.js +45 -0
  23. package/dist/helpers/bufferFromHex.d.ts +3 -0
  24. package/dist/helpers/bufferFromHex.d.ts.map +1 -0
  25. package/dist/helpers/bufferFromHex.js +13 -0
  26. package/dist/helpers/headsAreSame.d.ts +2 -2
  27. package/dist/helpers/headsAreSame.d.ts.map +1 -1
  28. package/dist/helpers/mergeArrays.d.ts +1 -1
  29. package/dist/helpers/mergeArrays.d.ts.map +1 -1
  30. package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
  31. package/dist/helpers/tests/network-adapter-tests.js +13 -13
  32. package/dist/helpers/tests/storage-adapter-tests.d.ts +2 -2
  33. package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
  34. package/dist/helpers/tests/storage-adapter-tests.js +25 -48
  35. package/dist/index.d.ts +1 -1
  36. package/dist/index.d.ts.map +1 -1
  37. package/dist/index.js +1 -1
  38. package/dist/storage/StorageSubsystem.d.ts +11 -1
  39. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  40. package/dist/storage/StorageSubsystem.js +20 -4
  41. package/dist/synchronizer/CollectionSynchronizer.d.ts +17 -3
  42. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  43. package/dist/synchronizer/CollectionSynchronizer.js +43 -18
  44. package/dist/synchronizer/DocSynchronizer.d.ts +10 -2
  45. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  46. package/dist/synchronizer/DocSynchronizer.js +30 -8
  47. package/dist/synchronizer/Synchronizer.d.ts +11 -0
  48. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  49. package/dist/types.d.ts +4 -1
  50. package/dist/types.d.ts.map +1 -1
  51. package/fuzz/fuzz.ts +3 -3
  52. package/package.json +3 -3
  53. package/src/AutomergeUrl.ts +101 -26
  54. package/src/DocHandle.ts +256 -38
  55. package/src/FindProgress.ts +48 -0
  56. package/src/RemoteHeadsSubscriptions.ts +11 -9
  57. package/src/Repo.ts +310 -95
  58. package/src/entrypoints/fullfat.ts +1 -2
  59. package/src/helpers/abortable.ts +61 -0
  60. package/src/helpers/bufferFromHex.ts +14 -0
  61. package/src/helpers/headsAreSame.ts +2 -2
  62. package/src/helpers/tests/network-adapter-tests.ts +14 -13
  63. package/src/helpers/tests/storage-adapter-tests.ts +44 -86
  64. package/src/index.ts +2 -0
  65. package/src/storage/StorageSubsystem.ts +29 -4
  66. package/src/synchronizer/CollectionSynchronizer.ts +56 -19
  67. package/src/synchronizer/DocSynchronizer.ts +34 -9
  68. package/src/synchronizer/Synchronizer.ts +14 -0
  69. package/src/types.ts +4 -1
  70. package/test/AutomergeUrl.test.ts +130 -0
  71. package/test/CollectionSynchronizer.test.ts +4 -4
  72. package/test/DocHandle.test.ts +189 -29
  73. package/test/DocSynchronizer.test.ts +10 -3
  74. package/test/Repo.test.ts +377 -191
  75. package/test/StorageSubsystem.test.ts +17 -0
  76. package/test/remoteHeads.test.ts +27 -12
@@ -15,6 +15,7 @@ export interface SynchronizerEvents {
15
15
  message: (payload: MessageContents) => void
16
16
  "sync-state": (payload: SyncStatePayload) => void
17
17
  "open-doc": (arg: OpenDocMessage) => void
18
+ metrics: (arg: DocSyncMetrics) => void
18
19
  }
19
20
 
20
21
  /** Notify the repo that the sync state has changed */
@@ -23,3 +24,16 @@ export interface SyncStatePayload {
23
24
  documentId: DocumentId
24
25
  syncState: SyncState
25
26
  }
27
+
28
+ export type DocSyncMetrics =
29
+ | {
30
+ type: "receive-sync-message"
31
+ documentId: DocumentId
32
+ durationMillis: number
33
+ numOps: number
34
+ numChanges: number
35
+ }
36
+ | {
37
+ type: "doc-denied"
38
+ documentId: DocumentId
39
+ }
package/src/types.ts CHANGED
@@ -27,8 +27,11 @@ export type AnyDocumentId =
27
27
  | BinaryDocumentId
28
28
  | LegacyDocumentId
29
29
 
30
+ // We need to define our own version of heads because the AutomergeHeads type is not bs58check encoded
31
+ export type UrlHeads = string[] & { __automergeUrlHeads: unknown }
32
+
30
33
  /** A branded type for peer IDs */
31
34
  export type PeerId = string & { __peerId: true }
32
35
 
33
36
  /** A randomly generated string created when the {@link Repo} starts up */
34
- export type SessionId = string & { __SessionId: true }
37
+ export type SessionId = string & { __sessionId: true }
@@ -3,9 +3,11 @@ import bs58check from "bs58check"
3
3
  import { describe, it } from "vitest"
4
4
  import {
5
5
  generateAutomergeUrl,
6
+ getHeadsFromUrl,
6
7
  isValidAutomergeUrl,
7
8
  parseAutomergeUrl,
8
9
  stringifyAutomergeUrl,
10
+ UrlHeads,
9
11
  } from "../src/AutomergeUrl.js"
10
12
  import type {
11
13
  AutomergeUrl,
@@ -102,3 +104,131 @@ describe("AutomergeUrl", () => {
102
104
  })
103
105
  })
104
106
  })
107
+
108
+ describe("AutomergeUrl with heads", () => {
109
+ // Create some sample encoded heads for testing
110
+ const head1 = bs58check.encode(new Uint8Array([1, 2, 3, 4])) as string
111
+ const head2 = bs58check.encode(new Uint8Array([5, 6, 7, 8])) as string
112
+ const goodHeads = [head1, head2] as UrlHeads
113
+ const urlWithHeads = `${goodUrl}#${head1}|${head2}` as AutomergeUrl
114
+ const invalidHead = "not-base58-encoded"
115
+ const invalidHeads = [invalidHead] as UrlHeads
116
+
117
+ describe("stringifyAutomergeUrl", () => {
118
+ it("should stringify a url with heads", () => {
119
+ const url = stringifyAutomergeUrl({
120
+ documentId: goodDocumentId,
121
+ heads: goodHeads,
122
+ })
123
+ assert.strictEqual(url, urlWithHeads)
124
+ })
125
+
126
+ it("should throw if heads are not valid base58check", () => {
127
+ assert.throws(() =>
128
+ stringifyAutomergeUrl({
129
+ documentId: goodDocumentId,
130
+ heads: invalidHeads,
131
+ })
132
+ )
133
+ })
134
+ })
135
+
136
+ describe("parseAutomergeUrl", () => {
137
+ it("should parse a url with heads", () => {
138
+ const { documentId, heads } = parseAutomergeUrl(urlWithHeads)
139
+ assert.equal(documentId, goodDocumentId)
140
+ assert.deepEqual(heads, [head1, head2])
141
+ })
142
+
143
+ it("should parse a url without heads", () => {
144
+ const { documentId, heads } = parseAutomergeUrl(goodUrl)
145
+ assert.equal(documentId, goodDocumentId)
146
+ assert.equal(heads, undefined)
147
+ })
148
+
149
+ it("should throw on url with invalid heads encoding", () => {
150
+ const badUrl = `${goodUrl}#${invalidHead}` as AutomergeUrl
151
+ assert.throws(() => parseAutomergeUrl(badUrl))
152
+ })
153
+ })
154
+
155
+ describe("isValidAutomergeUrl", () => {
156
+ it("should return true for a valid url with heads", () => {
157
+ assert(isValidAutomergeUrl(urlWithHeads) === true)
158
+ })
159
+
160
+ it("should return false for a url with invalid heads", () => {
161
+ const badUrl = `${goodUrl}#${invalidHead}` as AutomergeUrl
162
+ assert(isValidAutomergeUrl(badUrl) === false)
163
+ })
164
+ })
165
+
166
+ describe("getHeadsFromUrl", () => {
167
+ it("should return heads from a valid url", () => {
168
+ const heads = getHeadsFromUrl(urlWithHeads)
169
+ assert.deepEqual(heads, [head1, head2])
170
+ })
171
+
172
+ it("should return undefined for url without heads", () => {
173
+ const heads = getHeadsFromUrl(goodUrl)
174
+ assert.equal(heads, undefined)
175
+ })
176
+ })
177
+ it("should handle a single head correctly", () => {
178
+ const urlWithOneHead = `${goodUrl}#${head1}` as AutomergeUrl
179
+ const { heads } = parseAutomergeUrl(urlWithOneHead)
180
+ assert.deepEqual(heads, [head1])
181
+ })
182
+
183
+ it("should round-trip urls with heads", () => {
184
+ const originalUrl = urlWithHeads
185
+ const parsed = parseAutomergeUrl(originalUrl)
186
+ const roundTripped = stringifyAutomergeUrl({
187
+ documentId: parsed.documentId,
188
+ heads: parsed.heads,
189
+ })
190
+ assert.equal(roundTripped, originalUrl)
191
+ })
192
+
193
+ describe("should reject malformed urls", () => {
194
+ it("should reject urls with trailing delimiter", () => {
195
+ assert(!isValidAutomergeUrl(`${goodUrl}#${head1}:` as AutomergeUrl))
196
+ })
197
+
198
+ it("should reject urls with empty head", () => {
199
+ assert(!isValidAutomergeUrl(`${goodUrl}#|${head1}` as AutomergeUrl))
200
+ })
201
+
202
+ it("should reject urls with multiple hash characters", () => {
203
+ assert(
204
+ !isValidAutomergeUrl(`${goodUrl}#${head1}#${head2}` as AutomergeUrl)
205
+ )
206
+ })
207
+ })
208
+ })
209
+
210
+ describe("empty heads section", () => {
211
+ it("should treat bare # as empty heads array", () => {
212
+ const urlWithEmptyHeads = `${goodUrl}#` as AutomergeUrl
213
+ const { heads } = parseAutomergeUrl(urlWithEmptyHeads)
214
+ assert.deepEqual(heads, [])
215
+ })
216
+
217
+ it("should round-trip empty heads array", () => {
218
+ const original = `${goodUrl}#` as AutomergeUrl
219
+ const parsed = parseAutomergeUrl(original)
220
+ const roundTripped = stringifyAutomergeUrl({
221
+ documentId: parsed.documentId,
222
+ heads: parsed.heads,
223
+ })
224
+ assert.equal(roundTripped, original)
225
+ })
226
+
227
+ it("should distinguish between no heads and empty heads", () => {
228
+ const noHeads = parseAutomergeUrl(goodUrl)
229
+ const emptyHeads = parseAutomergeUrl(`${goodUrl}#` as AutomergeUrl)
230
+
231
+ assert.equal(noHeads.heads, undefined)
232
+ assert.deepEqual(emptyHeads.heads, [])
233
+ })
234
+ })
@@ -28,13 +28,13 @@ describe("CollectionSynchronizer", () => {
28
28
  done()
29
29
  })
30
30
 
31
- synchronizer.addDocument(handle.documentId)
31
+ synchronizer.addDocument(handle)
32
32
  }))
33
33
 
34
34
  it("starts synchronizing existing documents when a peer is added", () =>
35
35
  new Promise<void>(done => {
36
36
  const handle = repo.create()
37
- synchronizer.addDocument(handle.documentId)
37
+ synchronizer.addDocument(handle)
38
38
  synchronizer.once("message", event => {
39
39
  const { targetId, documentId } = event as SyncMessage
40
40
  assert(targetId === "peer1")
@@ -50,7 +50,7 @@ describe("CollectionSynchronizer", () => {
50
50
 
51
51
  repo.sharePolicy = async (peerId: PeerId) => peerId !== "peer1"
52
52
 
53
- synchronizer.addDocument(handle.documentId)
53
+ synchronizer.addDocument(handle)
54
54
  synchronizer.once("message", () => {
55
55
  reject(new Error("Should not have sent a message"))
56
56
  })
@@ -71,7 +71,7 @@ describe("CollectionSynchronizer", () => {
71
71
  reject(new Error("Should not have sent a message"))
72
72
  })
73
73
 
74
- synchronizer.addDocument(handle.documentId)
74
+ synchronizer.addDocument(handle)
75
75
 
76
76
  setTimeout(done)
77
77
  }))
@@ -1,8 +1,12 @@
1
1
  import * as A from "@automerge/automerge/next"
2
2
  import assert from "assert"
3
3
  import { decode } from "cbor-x"
4
- import { describe, it, vi } from "vitest"
5
- import { generateAutomergeUrl, parseAutomergeUrl } from "../src/AutomergeUrl.js"
4
+ import { describe, expect, it, vi } from "vitest"
5
+ import {
6
+ encodeHeads,
7
+ generateAutomergeUrl,
8
+ parseAutomergeUrl,
9
+ } from "../src/AutomergeUrl.js"
6
10
  import { eventPromise } from "../src/helpers/eventPromise.js"
7
11
  import { pause } from "../src/helpers/pause.js"
8
12
  import { DocHandle, DocHandleChangePayload } from "../src/index.js"
@@ -34,7 +38,7 @@ describe("DocHandle", () => {
34
38
  handle.update(doc => docFromMockStorage(doc))
35
39
 
36
40
  assert.equal(handle.isReady(), true)
37
- const doc = await handle.doc()
41
+ const doc = handle.doc()
38
42
  assert.equal(doc?.foo, "bar")
39
43
  })
40
44
 
@@ -46,13 +50,13 @@ describe("DocHandle", () => {
46
50
  handle.update(doc => docFromMockStorage(doc))
47
51
 
48
52
  assert.equal(handle.isReady(), true)
49
- const doc = await handle.doc()
50
- assert.deepEqual(doc, handle.docSync())
53
+ const doc = handle.doc()
54
+ assert.deepEqual(doc, handle.doc())
51
55
  })
52
56
 
53
- it("should return undefined if we access the doc before ready", async () => {
57
+ it("should throw an exception if we access the doc before ready", async () => {
54
58
  const handle = new DocHandle<TestDoc>(TEST_ID)
55
- assert.equal(handle.docSync(), undefined)
59
+ assert.throws(() => handle.doc())
56
60
  })
57
61
 
58
62
  it("should not return a doc until ready", async () => {
@@ -62,26 +66,185 @@ describe("DocHandle", () => {
62
66
  // simulate loading from storage
63
67
  handle.update(doc => docFromMockStorage(doc))
64
68
 
65
- const doc = await handle.doc()
69
+ const doc = handle.doc()
66
70
 
67
71
  assert.equal(handle.isReady(), true)
68
72
  assert.equal(doc?.foo, "bar")
69
73
  })
70
74
 
75
+ /** HISTORY TRAVERSAL
76
+ * This API is relatively alpha-ish but we're already
77
+ * doing things in our own apps that are fairly ambitious
78
+ * by routing around to a lower-level API.
79
+ * This is an attempt to wrap up the existing practice
80
+ * in a slightly more supportable set of APIs but should be
81
+ * considered provisional: expect further improvements.
82
+ */
83
+
71
84
  it("should return the heads when requested", async () => {
72
85
  const handle = setup()
73
86
  handle.change(d => (d.foo = "bar"))
74
87
  assert.equal(handle.isReady(), true)
75
88
 
76
- const heads = A.getHeads(handle.docSync())
89
+ const heads = encodeHeads(A.getHeads(handle.doc()))
77
90
  assert.notDeepEqual(handle.heads(), [])
78
91
  assert.deepEqual(heads, handle.heads())
79
92
  })
80
93
 
81
- it("should return undefined if the heads aren't loaded", async () => {
94
+ it("should throw an if the heads aren't loaded", async () => {
82
95
  const handle = new DocHandle<TestDoc>(TEST_ID)
83
96
  assert.equal(handle.isReady(), false)
84
- assert.deepEqual(handle.heads(), undefined)
97
+ expect(() => handle.heads()).toThrow("DocHandle is not ready")
98
+ })
99
+
100
+ it("should return the history when requested", async () => {
101
+ const handle = setup()
102
+ handle.change(d => (d.foo = "bar"))
103
+ handle.change(d => (d.foo = "baz"))
104
+ assert.equal(handle.isReady(), true)
105
+
106
+ const history = handle.history()
107
+ assert.deepEqual(handle.history().length, 2)
108
+ })
109
+
110
+ it("should return a commit from the history", async () => {
111
+ const handle = setup()
112
+ handle.change(d => (d.foo = "zero"))
113
+ handle.change(d => (d.foo = "one"))
114
+ handle.change(d => (d.foo = "two"))
115
+ handle.change(d => (d.foo = "three"))
116
+ assert.equal(handle.isReady(), true)
117
+
118
+ const history = handle.history()
119
+ const viewHandle = handle.view(history[1])
120
+ assert.deepEqual(await viewHandle.doc(), { foo: "one" })
121
+ })
122
+
123
+ it("should support fixed heads from construction", async () => {
124
+ const handle = setup()
125
+ handle.change(d => (d.foo = "zero"))
126
+ handle.change(d => (d.foo = "one"))
127
+
128
+ const history = handle.history()
129
+ const viewHandle = new DocHandle<TestDoc>(TEST_ID, { heads: history[0] })
130
+ viewHandle.update(() => A.clone(handle.doc()!))
131
+ viewHandle.doneLoading()
132
+
133
+ assert.deepEqual(await viewHandle.doc(), { foo: "zero" })
134
+ })
135
+
136
+ it("should prevent changes on fixed-heads handles", async () => {
137
+ const handle = setup()
138
+ handle.change(d => (d.foo = "zero"))
139
+ const viewHandle = handle.view(handle.heads()!)
140
+
141
+ assert.throws(() => viewHandle.change(d => (d.foo = "one")))
142
+ assert.throws(() =>
143
+ viewHandle.changeAt(handle.heads()!, d => (d.foo = "one"))
144
+ )
145
+ assert.throws(() => viewHandle.merge(handle))
146
+ })
147
+
148
+ it("should return fixed heads from heads()", async () => {
149
+ const handle = setup()
150
+ handle.change(d => (d.foo = "zero"))
151
+ const originalHeads = handle.heads()!
152
+
153
+ handle.change(d => (d.foo = "one"))
154
+ const viewHandle = handle.view(originalHeads)
155
+
156
+ assert.deepEqual(viewHandle.heads(), originalHeads)
157
+ assert.notDeepEqual(viewHandle.heads(), handle.heads())
158
+ })
159
+
160
+ it("should return diffs", async () => {
161
+ const handle = setup()
162
+ handle.change(d => (d.foo = "zero"))
163
+ handle.change(d => (d.foo = "one"))
164
+ handle.change(d => (d.foo = "two"))
165
+ handle.change(d => (d.foo = "three"))
166
+ assert.equal(handle.isReady(), true)
167
+
168
+ const history = handle.history()
169
+ const patches = handle.diff(history[1])
170
+ assert.deepEqual(patches, [
171
+ { action: "put", path: ["foo"], value: "" },
172
+ { action: "splice", path: ["foo", 0], value: "one" },
173
+ ])
174
+ })
175
+
176
+ it("should support arbitrary diffs too", async () => {
177
+ const handle = setup()
178
+ handle.change(d => (d.foo = "zero"))
179
+ handle.change(d => (d.foo = "one"))
180
+ handle.change(d => (d.foo = "two"))
181
+ handle.change(d => (d.foo = "three"))
182
+ assert.equal(handle.isReady(), true)
183
+
184
+ const history = handle.history()
185
+ const patches = handle.diff(history[1], history[3])
186
+ assert.deepEqual(patches, [
187
+ { action: "put", path: ["foo"], value: "" },
188
+ { action: "splice", path: ["foo", 0], value: "three" },
189
+ ])
190
+ const backPatches = handle.diff(history[3], history[1])
191
+ assert.deepEqual(backPatches, [
192
+ { action: "put", path: ["foo"], value: "" },
193
+ { action: "splice", path: ["foo", 0], value: "one" },
194
+ ])
195
+ })
196
+
197
+ it("should support diffing against another handle", async () => {
198
+ const handle = setup()
199
+ handle.change(d => (d.foo = "zero"))
200
+ const viewHandle = handle.view(handle.heads()!)
201
+
202
+ handle.change(d => (d.foo = "one"))
203
+
204
+ const patches = viewHandle.diff(handle)
205
+ assert.deepEqual(patches, [
206
+ { action: "put", path: ["foo"], value: "" },
207
+ { action: "splice", path: ["foo", 0], value: "one" },
208
+ ])
209
+ })
210
+
211
+ // TODO: alexg -- should i remove this test? should this fail or no?
212
+ it.skip("should fail diffing against unrelated handles", async () => {
213
+ const handle1 = setup()
214
+ const handle2 = setup()
215
+
216
+ handle1.change(d => (d.foo = "zero"))
217
+ handle2.change(d => (d.foo = "one"))
218
+
219
+ assert.throws(() => handle1.diff(handle2))
220
+ })
221
+
222
+ it("should allow direct access to decoded changes", async () => {
223
+ const handle = setup()
224
+ const time = Date.now()
225
+ handle.change(d => (d.foo = "foo"), { message: "commitMessage" })
226
+ assert.equal(handle.isReady(), true)
227
+
228
+ const metadata = handle.metadata()
229
+ assert.deepEqual(metadata.message, "commitMessage")
230
+ // NOTE: I'm not testing time because of https://github.com/automerge/automerge/issues/965
231
+ // but it does round-trip successfully!
232
+ })
233
+
234
+ it("should allow direct access to a specific decoded change", async () => {
235
+ const handle = setup()
236
+ const time = Date.now()
237
+ handle.change(d => (d.foo = "foo"), { message: "commitMessage" })
238
+ handle.change(d => (d.foo = "foo"), { message: "commitMessage2" })
239
+ handle.change(d => (d.foo = "foo"), { message: "commitMessage3" })
240
+ handle.change(d => (d.foo = "foo"), { message: "commitMessage4" })
241
+ assert.equal(handle.isReady(), true)
242
+
243
+ const history = handle.history()
244
+ const metadata = handle.metadata(history[0][0])
245
+ assert.deepEqual(metadata.message, "commitMessage")
246
+ // NOTE: I'm not testing time because of https://github.com/automerge/automerge/issues/965
247
+ // but it does round-trip successfully!
85
248
  })
86
249
 
87
250
  /**
@@ -96,7 +259,7 @@ describe("DocHandle", () => {
96
259
  const handle = new DocHandle<TestDoc>(TEST_ID)
97
260
  assert.equal(handle.isReady(), false)
98
261
 
99
- handle.doc()
262
+ handle.legacyAsyncDoc()
100
263
 
101
264
  assert(vi.getTimerCount() > timerCount)
102
265
 
@@ -122,7 +285,7 @@ describe("DocHandle", () => {
122
285
  assert.equal(handle.isReady(), true)
123
286
  handle.change(d => (d.foo = "pizza"))
124
287
 
125
- const doc = await handle.doc()
288
+ const doc = handle.doc()
126
289
  assert.equal(doc?.foo, "pizza")
127
290
  })
128
291
 
@@ -132,7 +295,9 @@ describe("DocHandle", () => {
132
295
  // we don't have it in storage, so we request it from the network
133
296
  handle.request()
134
297
 
135
- assert.equal(handle.docSync(), undefined)
298
+ await expect(() => {
299
+ handle.doc()
300
+ }).toThrowError("DocHandle is not ready")
136
301
  assert.equal(handle.isReady(), false)
137
302
  assert.throws(() => handle.change(_ => {}))
138
303
  })
@@ -148,7 +313,7 @@ describe("DocHandle", () => {
148
313
  return A.change(doc, d => (d.foo = "bar"))
149
314
  })
150
315
 
151
- const doc = await handle.doc()
316
+ const doc = handle.doc()
152
317
  assert.equal(handle.isReady(), true)
153
318
  assert.equal(doc?.foo, "bar")
154
319
  })
@@ -164,7 +329,7 @@ describe("DocHandle", () => {
164
329
  doc.foo = "bar"
165
330
  })
166
331
 
167
- const doc = await handle.doc()
332
+ const doc = handle.doc()
168
333
  assert.equal(doc?.foo, "bar")
169
334
 
170
335
  const changePayload = await p
@@ -189,7 +354,7 @@ describe("DocHandle", () => {
189
354
 
190
355
  const p = new Promise<void>(resolve =>
191
356
  handle.once("change", ({ handle, doc }) => {
192
- assert.equal(handle.docSync()?.foo, doc.foo)
357
+ assert.equal(handle.doc()?.foo, doc.foo)
193
358
 
194
359
  resolve()
195
360
  })
@@ -226,7 +391,7 @@ describe("DocHandle", () => {
226
391
  doc.foo = "baz"
227
392
  })
228
393
 
229
- const doc = await handle.doc()
394
+ const doc = handle.doc()
230
395
  assert.equal(doc?.foo, "baz")
231
396
 
232
397
  return p
@@ -241,7 +406,7 @@ describe("DocHandle", () => {
241
406
  })
242
407
 
243
408
  await p
244
- const doc = await handle.doc()
409
+ const doc = handle.doc()
245
410
  assert.equal(doc?.foo, "bar")
246
411
  })
247
412
 
@@ -261,11 +426,7 @@ describe("DocHandle", () => {
261
426
  // set docHandle time out after 5 ms
262
427
  const handle = new DocHandle<TestDoc>(TEST_ID, { timeoutDelay: 5 })
263
428
 
264
- const doc = await handle.doc()
265
-
266
- assert.equal(doc, undefined)
267
-
268
- assert.equal(handle.state, "unavailable")
429
+ expect(() => handle.doc()).toThrowError("DocHandle is not ready")
269
430
  })
270
431
 
271
432
  it("should not time out if the document is loaded in time", async () => {
@@ -276,11 +437,11 @@ describe("DocHandle", () => {
276
437
  handle.update(doc => docFromMockStorage(doc))
277
438
 
278
439
  // now it should not time out
279
- const doc = await handle.doc()
440
+ const doc = handle.doc()
280
441
  assert.equal(doc?.foo, "bar")
281
442
  })
282
443
 
283
- it("should be undefined if loading from the network times out", async () => {
444
+ it("should throw an exception if loading from the network times out", async () => {
284
445
  // set docHandle time out after 5 ms
285
446
  const handle = new DocHandle<TestDoc>(TEST_ID, { timeoutDelay: 5 })
286
447
 
@@ -290,8 +451,7 @@ describe("DocHandle", () => {
290
451
  // there's no update
291
452
  await pause(10)
292
453
 
293
- const doc = await handle.doc()
294
- assert.equal(doc, undefined)
454
+ expect(() => handle.doc()).toThrowError("DocHandle is not ready")
295
455
  })
296
456
 
297
457
  it("should not time out if the document is updated in time", async () => {
@@ -309,7 +469,7 @@ describe("DocHandle", () => {
309
469
  // now it should not time out
310
470
  await pause(5)
311
471
 
312
- const doc = await handle.doc()
472
+ const doc = handle.doc()
313
473
  assert.equal(doc?.foo, "bar")
314
474
  })
315
475
 
@@ -1,7 +1,11 @@
1
1
  import assert from "assert"
2
2
  import { describe, it } from "vitest"
3
3
  import { next as Automerge } from "@automerge/automerge"
4
- import { generateAutomergeUrl, parseAutomergeUrl } from "../src/AutomergeUrl.js"
4
+ import {
5
+ encodeHeads,
6
+ generateAutomergeUrl,
7
+ parseAutomergeUrl,
8
+ } from "../src/AutomergeUrl.js"
5
9
  import { DocHandle } from "../src/DocHandle.js"
6
10
  import { eventPromise } from "../src/helpers/eventPromise.js"
7
11
  import {
@@ -67,11 +71,14 @@ describe("DocSynchronizer", () => {
67
71
 
68
72
  assert.equal(message1.peerId, "alice")
69
73
  assert.equal(message1.documentId, handle.documentId)
70
- assert.deepEqual(message1.syncState.lastSentHeads, [])
74
+ assert.deepStrictEqual(message1.syncState.lastSentHeads, [])
71
75
 
72
76
  assert.equal(message2.peerId, "alice")
73
77
  assert.equal(message2.documentId, handle.documentId)
74
- assert.deepEqual(message2.syncState.lastSentHeads, handle.heads())
78
+ assert.deepStrictEqual(
79
+ encodeHeads(message2.syncState.lastSentHeads),
80
+ handle.heads()
81
+ )
75
82
  })
76
83
 
77
84
  it("still syncs with a peer after it disconnects and reconnects", async () => {