@automerge/automerge-repo 0.2.1 → 1.0.0-alpha.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/README.md +7 -24
  2. package/dist/DocCollection.d.ts +4 -4
  3. package/dist/DocCollection.d.ts.map +1 -1
  4. package/dist/DocCollection.js +25 -17
  5. package/dist/DocHandle.d.ts +46 -13
  6. package/dist/DocHandle.d.ts.map +1 -1
  7. package/dist/DocHandle.js +104 -53
  8. package/dist/DocUrl.d.ts +38 -18
  9. package/dist/DocUrl.d.ts.map +1 -1
  10. package/dist/DocUrl.js +63 -24
  11. package/dist/Repo.d.ts.map +1 -1
  12. package/dist/Repo.js +9 -9
  13. package/dist/helpers/headsAreSame.d.ts +2 -2
  14. package/dist/helpers/headsAreSame.d.ts.map +1 -1
  15. package/dist/helpers/headsAreSame.js +1 -4
  16. package/dist/helpers/tests/network-adapter-tests.js +10 -10
  17. package/dist/index.d.ts +3 -2
  18. package/dist/index.d.ts.map +1 -1
  19. package/dist/index.js +1 -0
  20. package/dist/network/NetworkAdapter.d.ts +2 -3
  21. package/dist/network/NetworkAdapter.d.ts.map +1 -1
  22. package/dist/network/NetworkSubsystem.d.ts +2 -3
  23. package/dist/network/NetworkSubsystem.d.ts.map +1 -1
  24. package/dist/network/NetworkSubsystem.js +9 -13
  25. package/dist/storage/StorageAdapter.d.ts +9 -5
  26. package/dist/storage/StorageAdapter.d.ts.map +1 -1
  27. package/dist/storage/StorageSubsystem.d.ts +4 -4
  28. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  29. package/dist/storage/StorageSubsystem.js +109 -31
  30. package/dist/synchronizer/CollectionSynchronizer.d.ts +1 -1
  31. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  32. package/dist/synchronizer/CollectionSynchronizer.js +5 -1
  33. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  34. package/dist/synchronizer/DocSynchronizer.js +6 -5
  35. package/dist/types.d.ts +6 -0
  36. package/dist/types.d.ts.map +1 -1
  37. package/package.json +8 -5
  38. package/src/DocCollection.ts +32 -22
  39. package/src/DocHandle.ts +119 -77
  40. package/src/DocUrl.ts +90 -0
  41. package/src/Repo.ts +10 -11
  42. package/src/helpers/headsAreSame.ts +3 -5
  43. package/src/helpers/tests/network-adapter-tests.ts +10 -10
  44. package/src/index.ts +7 -5
  45. package/src/network/NetworkAdapter.ts +2 -3
  46. package/src/network/NetworkSubsystem.ts +9 -14
  47. package/src/storage/StorageAdapter.ts +7 -5
  48. package/src/storage/StorageSubsystem.ts +133 -36
  49. package/src/synchronizer/CollectionSynchronizer.ts +10 -2
  50. package/src/synchronizer/DocSynchronizer.ts +7 -6
  51. package/src/types.ts +4 -1
  52. package/test/CollectionSynchronizer.test.ts +1 -1
  53. package/test/DocCollection.test.ts +3 -2
  54. package/test/DocHandle.test.ts +40 -35
  55. package/test/DocSynchronizer.test.ts +3 -2
  56. package/test/Repo.test.ts +134 -27
  57. package/test/StorageSubsystem.test.ts +13 -10
  58. package/test/helpers/DummyNetworkAdapter.ts +2 -2
  59. package/test/helpers/DummyStorageAdapter.ts +8 -4
@@ -1,5 +1,5 @@
1
1
  import { CollectionSynchronizer } from "../src/synchronizer/CollectionSynchronizer.js"
2
- import { ChannelId, DocCollection, DocumentId, PeerId } from "../src"
2
+ import { ChannelId, DocCollection, BinaryDocumentId, PeerId } from "../src"
3
3
  import assert from "assert"
4
4
  import { beforeEach } from "mocha"
5
5
  import { MessagePayload } from "../src/network/NetworkAdapter.js"
@@ -1,8 +1,9 @@
1
1
  import assert from "assert"
2
- import { DocCollection, DocumentId } from "../src"
2
+ import { DocCollection, BinaryDocumentId } from "../src"
3
3
  import { TestDoc } from "./types.js"
4
+ import { generateAutomergeUrl, stringifyAutomergeUrl } from "../src/DocUrl"
4
5
 
5
- const MISSING_DOCID = "non-existent-docID" as DocumentId
6
+ const MISSING_DOCID = generateAutomergeUrl()
6
7
 
7
8
  describe("DocCollection", () => {
8
9
  it("can create documents which are ready to go", async () => {
@@ -1,17 +1,17 @@
1
1
  import * as A from "@automerge/automerge"
2
2
  import assert from "assert"
3
3
  import { it } from "mocha"
4
- import { DocHandle, DocHandleChangePayload, DocumentId } from "../src"
4
+ import { DocHandle, DocHandleChangePayload } from "../src"
5
5
  import { pause } from "../src/helpers/pause"
6
6
  import { TestDoc } from "./types.js"
7
+ import { generateAutomergeUrl, parseAutomergeUrl } from "../src/DocUrl"
7
8
 
8
9
  describe("DocHandle", () => {
9
- const TEST_ID = "test-document-id" as DocumentId
10
+ const TEST_ID = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
11
+ const BOGUS_ID = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
10
12
 
11
- const binaryFromMockStorage = () => {
12
- const doc = A.change<{ foo: string }>(A.init(), d => (d.foo = "bar"))
13
- const binary = A.save(doc)
14
- return binary
13
+ const docFromMockStorage = (doc: A.Doc<{ foo: string }>) => {
14
+ return A.change<{ foo: string }>(doc, d => (d.foo = "bar"))
15
15
  }
16
16
 
17
17
  it("should take the UUID passed into it", () => {
@@ -24,10 +24,11 @@ describe("DocHandle", () => {
24
24
  assert.equal(handle.isReady(), false)
25
25
 
26
26
  // simulate loading from storage
27
- handle.load(binaryFromMockStorage())
27
+ handle.update(doc => docFromMockStorage(doc))
28
28
 
29
29
  assert.equal(handle.isReady(), true)
30
- const doc = await handle.value()
30
+ const doc = await handle.doc()
31
+ console.log("DOC", JSON.stringify(doc))
31
32
  assert.equal(doc.foo, "bar")
32
33
  })
33
34
 
@@ -36,27 +37,27 @@ describe("DocHandle", () => {
36
37
  assert.equal(handle.isReady(), false)
37
38
 
38
39
  // simulate loading from storage
39
- handle.load(binaryFromMockStorage())
40
+ handle.update(doc => docFromMockStorage(doc))
40
41
 
41
42
  assert.equal(handle.isReady(), true)
42
- const doc = await handle.value()
43
- assert.deepEqual(doc, handle.doc)
43
+ const doc = await handle.doc()
44
+ assert.deepEqual(doc, handle.docSync())
44
45
  })
45
46
 
46
- it("should throws an error if we accessing the doc before ready", async () => {
47
+ it("should return undefined if we accessing the doc before ready", async () => {
47
48
  const handle = new DocHandle<TestDoc>(TEST_ID)
48
49
 
49
- assert.throws(() => handle.doc)
50
+ assert.equal(handle.docSync(), undefined)
50
51
  })
51
52
 
52
- it("should not return a value until ready", async () => {
53
+ it("should not return a doc until ready", async () => {
53
54
  const handle = new DocHandle<TestDoc>(TEST_ID)
54
55
  assert.equal(handle.isReady(), false)
55
56
 
56
57
  // simulate loading from storage
57
- handle.load(binaryFromMockStorage())
58
+ handle.update(doc => docFromMockStorage(doc))
58
59
 
59
- const doc = await handle.value()
60
+ const doc = await handle.doc()
60
61
 
61
62
  assert.equal(handle.isReady(), true)
62
63
  assert.equal(doc.foo, "bar")
@@ -70,13 +71,13 @@ describe("DocHandle", () => {
70
71
  assert.throws(() => handle.change(d => (d.foo = "baz")))
71
72
 
72
73
  // simulate loading from storage
73
- handle.load(binaryFromMockStorage())
74
+ handle.update(doc => docFromMockStorage(doc))
74
75
 
75
76
  // now we're in READY state so we can make changes
76
77
  assert.equal(handle.isReady(), true)
77
78
  handle.change(d => (d.foo = "pizza"))
78
79
 
79
- const doc = await handle.value()
80
+ const doc = await handle.doc()
80
81
  assert.equal(doc.foo, "pizza")
81
82
  })
82
83
 
@@ -86,7 +87,7 @@ describe("DocHandle", () => {
86
87
  // we don't have it in storage, so we request it from the network
87
88
  handle.request()
88
89
 
89
- assert.throws(() => handle.doc)
90
+ assert.equal(handle.docSync(), undefined)
90
91
  assert.equal(handle.isReady(), false)
91
92
  assert.throws(() => handle.change(h => {}))
92
93
  })
@@ -102,7 +103,7 @@ describe("DocHandle", () => {
102
103
  return A.change(doc, d => (d.foo = "bar"))
103
104
  })
104
105
 
105
- const doc = await handle.value()
106
+ const doc = await handle.doc()
106
107
  assert.equal(handle.isReady(), true)
107
108
  assert.equal(doc.foo, "bar")
108
109
  })
@@ -118,7 +119,7 @@ describe("DocHandle", () => {
118
119
  doc.foo = "bar"
119
120
  })
120
121
 
121
- const doc = await handle.value()
122
+ const doc = await handle.doc()
122
123
  assert.equal(doc.foo, "bar")
123
124
 
124
125
  const changePayload = await p
@@ -142,7 +143,7 @@ describe("DocHandle", () => {
142
143
 
143
144
  const p = new Promise<void>(resolve =>
144
145
  handle.once("change", ({ handle, doc }) => {
145
- assert.equal(handle.doc.foo, doc.foo)
146
+ assert.equal(handle.docSync()?.foo, doc.foo)
146
147
 
147
148
  resolve()
148
149
  })
@@ -179,29 +180,29 @@ describe("DocHandle", () => {
179
180
  doc.foo = "baz"
180
181
  })
181
182
 
182
- const doc = await handle.value()
183
+ const doc = await handle.doc()
183
184
  assert.equal(doc.foo, "baz")
184
185
 
185
186
  return p
186
187
  })
187
188
 
188
- it("should emit a patch message when changes happen", async () => {
189
+ it("should emit a change message when changes happen", async () => {
189
190
  const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
190
- const p = new Promise(resolve => handle.once("patch", d => resolve(d)))
191
+ const p = new Promise(resolve => handle.once("change", d => resolve(d)))
191
192
 
192
193
  handle.change(doc => {
193
194
  doc.foo = "bar"
194
195
  })
195
196
 
196
197
  await p
197
- const doc = await handle.value()
198
+ const doc = await handle.doc()
198
199
  assert.equal(doc.foo, "bar")
199
200
  })
200
201
 
201
202
  it("should not emit a patch message if no change happens", done => {
202
203
  const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
203
- handle.on("patch", () => {
204
- done(new Error("shouldn't have patched"))
204
+ handle.on("change", () => {
205
+ done(new Error("shouldn't have changed"))
205
206
  })
206
207
  handle.change(_doc => {
207
208
  // do nothing
@@ -216,8 +217,10 @@ describe("DocHandle", () => {
216
217
  // we're not going to load
217
218
  await pause(10)
218
219
 
220
+ assert.equal(handle.state, "failed")
221
+
219
222
  // so it should time out
220
- return assert.rejects(handle.value, "DocHandle timed out")
223
+ return assert.rejects(handle.doc, "DocHandle timed out")
221
224
  })
222
225
 
223
226
  it("should not time out if the document is loaded in time", async () => {
@@ -225,10 +228,10 @@ describe("DocHandle", () => {
225
228
  const handle = new DocHandle<TestDoc>(TEST_ID, { timeoutDelay: 5 })
226
229
 
227
230
  // simulate loading from storage before the timeout expires
228
- handle.load(binaryFromMockStorage())
231
+ handle.update(doc => docFromMockStorage(doc))
229
232
 
230
233
  // now it should not time out
231
- const doc = await handle.value()
234
+ const doc = await handle.doc()
232
235
  assert.equal(doc.foo, "bar")
233
236
  })
234
237
 
@@ -243,12 +246,12 @@ describe("DocHandle", () => {
243
246
  await pause(10)
244
247
 
245
248
  // so it should time out
246
- return assert.rejects(handle.value, "DocHandle timed out")
249
+ return assert.rejects(handle.doc, "DocHandle timed out")
247
250
  })
248
251
 
249
252
  it("should not time out if the document is updated in time", async () => {
250
253
  // set docHandle time out after 5 ms
251
- const handle = new DocHandle<TestDoc>(TEST_ID, { timeoutDelay: 5 })
254
+ const handle = new DocHandle<TestDoc>(TEST_ID, { timeoutDelay: 1 })
252
255
 
253
256
  // simulate requesting from the network
254
257
  handle.request()
@@ -259,7 +262,9 @@ describe("DocHandle", () => {
259
262
  })
260
263
 
261
264
  // now it should not time out
262
- const doc = await handle.value()
265
+ await pause(5)
266
+
267
+ const doc = await handle.doc()
263
268
  assert.equal(doc.foo, "bar")
264
269
  })
265
270
 
@@ -282,7 +287,7 @@ describe("DocHandle", () => {
282
287
  doc.foo = "bar"
283
288
  })
284
289
 
285
- const headsBefore = A.getHeads(handle.doc)
290
+ const headsBefore = A.getHeads(handle.docSync()!)
286
291
 
287
292
  handle.change(doc => {
288
293
  doc.foo = "rab"
@@ -1,9 +1,10 @@
1
1
  import assert from "assert"
2
- import { DocumentId, PeerId } from "../src/types.js"
2
+ import { BinaryDocumentId, PeerId } from "../src/types.js"
3
3
  import { DocHandle } from "../src/DocHandle.js"
4
4
  import { DocSynchronizer } from "../src/synchronizer/DocSynchronizer.js"
5
5
  import { eventPromise } from "../src/helpers/eventPromise.js"
6
6
  import { TestDoc } from "./types.js"
7
+ import { parseAutomergeUrl, generateAutomergeUrl } from "../src/DocUrl.js"
7
8
 
8
9
  const alice = "alice" as PeerId
9
10
  const bob = "bob" as PeerId
@@ -13,7 +14,7 @@ describe("DocSynchronizer", () => {
13
14
  let docSynchronizer: DocSynchronizer
14
15
 
15
16
  const setup = () => {
16
- const docId = "synced-doc" as DocumentId
17
+ const docId = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
17
18
  handle = new DocHandle<TestDoc>(docId, { isNew: true })
18
19
  docSynchronizer = new DocSynchronizer(handle)
19
20
  return { handle, docSynchronizer }
package/test/Repo.test.ts CHANGED
@@ -1,7 +1,15 @@
1
1
  import assert from "assert"
2
2
  import { MessageChannelNetworkAdapter } from "@automerge/automerge-repo-network-messagechannel"
3
-
4
- import { ChannelId, DocHandle, DocumentId, PeerId, SharePolicy } from "../src"
3
+ import * as A from "@automerge/automerge"
4
+
5
+ import {
6
+ AutomergeUrl,
7
+ ChannelId,
8
+ DocHandle,
9
+ DocumentId,
10
+ PeerId,
11
+ SharePolicy,
12
+ } from "../src"
5
13
  import { eventPromise } from "../src/helpers/eventPromise.js"
6
14
  import { pause, rejectOnTimeout } from "../src/helpers/pause.js"
7
15
  import { Repo } from "../src/Repo.js"
@@ -9,6 +17,11 @@ import { DummyNetworkAdapter } from "./helpers/DummyNetworkAdapter.js"
9
17
  import { DummyStorageAdapter } from "./helpers/DummyStorageAdapter.js"
10
18
  import { getRandomItem } from "./helpers/getRandomItem.js"
11
19
  import { TestDoc } from "./types.js"
20
+ import {
21
+ binaryToDocumentId,
22
+ generateAutomergeUrl,
23
+ stringifyAutomergeUrl,
24
+ } from "../src/DocUrl"
12
25
 
13
26
  describe("Repo", () => {
14
27
  describe("single repo", () => {
@@ -41,19 +54,29 @@ describe("Repo", () => {
41
54
  handle.change(d => {
42
55
  d.foo = "bar"
43
56
  })
44
- const v = await handle.value()
57
+ const v = await handle.doc()
58
+ console.log("V is ", v)
45
59
  assert.equal(handle.isReady(), true)
46
60
 
47
61
  assert.equal(v.foo, "bar")
48
62
  })
49
63
 
64
+ it("throws an error if we try to find a handle with an invalid AutomergeUrl", async () => {
65
+ const { repo } = setup()
66
+ try {
67
+ repo.find<TestDoc>("invalid-url" as unknown as AutomergeUrl)
68
+ } catch (e: any) {
69
+ assert.equal(e.message, "Invalid AutomergeUrl: 'invalid-url'")
70
+ }
71
+ })
72
+
50
73
  it("doesn't find a document that doesn't exist", async () => {
51
74
  const { repo } = setup()
52
- const handle = repo.find<TestDoc>("does-not-exist" as DocumentId)
75
+ const handle = repo.find<TestDoc>(generateAutomergeUrl())
53
76
  assert.equal(handle.isReady(), false)
54
77
 
55
78
  return assert.rejects(
56
- rejectOnTimeout(handle.value(), 100),
79
+ rejectOnTimeout(handle.doc(), 10),
57
80
  "This document should not exist"
58
81
  )
59
82
  })
@@ -66,12 +89,12 @@ describe("Repo", () => {
66
89
  })
67
90
  assert.equal(handle.isReady(), true)
68
91
 
69
- const bobHandle = repo.find<TestDoc>(handle.documentId)
92
+ const bobHandle = repo.find<TestDoc>(handle.url)
70
93
 
71
94
  assert.equal(handle, bobHandle)
72
95
  assert.equal(handle.isReady(), true)
73
96
 
74
- const v = await bobHandle.value()
97
+ const v = await bobHandle.doc()
75
98
  assert.equal(v.foo, "bar")
76
99
  })
77
100
 
@@ -92,9 +115,9 @@ describe("Repo", () => {
92
115
  network: [],
93
116
  })
94
117
 
95
- const bobHandle = repo2.find<TestDoc>(handle.documentId)
118
+ const bobHandle = repo2.find<TestDoc>(handle.url)
96
119
 
97
- const v = await bobHandle.value()
120
+ const v = await bobHandle.doc()
98
121
  assert.equal(v.foo, "bar")
99
122
  })
100
123
 
@@ -105,15 +128,37 @@ describe("Repo", () => {
105
128
  d.foo = "bar"
106
129
  })
107
130
  assert.equal(handle.isReady(), true)
108
- await handle.value()
131
+ await handle.doc()
109
132
  repo.delete(handle.documentId)
110
133
 
111
134
  assert(handle.isDeleted())
112
135
  assert.equal(repo.handles[handle.documentId], undefined)
113
136
 
114
- const bobHandle = repo.find<TestDoc>(handle.documentId)
137
+ const bobHandle = repo.find<TestDoc>(handle.url)
115
138
  await assert.rejects(
116
- rejectOnTimeout(bobHandle.value(), 10),
139
+ rejectOnTimeout(bobHandle.doc(), 10),
140
+ "document should have been deleted"
141
+ )
142
+
143
+ assert(!bobHandle.isReady())
144
+ })
145
+
146
+ it("can delete an existing document by url", async () => {
147
+ const { repo } = setup()
148
+ const handle = repo.create<TestDoc>()
149
+ handle.change(d => {
150
+ d.foo = "bar"
151
+ })
152
+ assert.equal(handle.isReady(), true)
153
+ await handle.doc()
154
+ repo.delete(handle.url)
155
+
156
+ assert(handle.isDeleted())
157
+ assert.equal(repo.handles[handle.documentId], undefined)
158
+
159
+ const bobHandle = repo.find<TestDoc>(handle.url)
160
+ await assert.rejects(
161
+ rejectOnTimeout(bobHandle.doc(), 10),
117
162
  "document should have been deleted"
118
163
  )
119
164
 
@@ -128,14 +173,72 @@ describe("Repo", () => {
128
173
  })
129
174
  assert.equal(handle.isReady(), true)
130
175
 
131
- repo.on("delete-document", ({ documentId }) => {
132
- assert.equal(documentId, handle.documentId)
176
+ repo.on("delete-document", ({ encodedDocumentId }) => {
177
+ assert.equal(encodedDocumentId, handle.documentId)
133
178
 
134
179
  done()
135
180
  })
136
181
 
137
182
  repo.delete(handle.documentId)
138
183
  })
184
+
185
+ it("storage state doesn't change across reloads when the document hasn't changed", async () => {
186
+ const storage = new DummyStorageAdapter()
187
+
188
+ const repo = new Repo({
189
+ storage,
190
+ network: [],
191
+ })
192
+
193
+ const handle = repo.create<{ count: number }>()
194
+
195
+ handle.change(d => {
196
+ d.count = 0
197
+ })
198
+ handle.change(d => {
199
+ d.count = 1
200
+ })
201
+
202
+ const initialKeys = storage.keys()
203
+
204
+ const repo2 = new Repo({
205
+ storage,
206
+ network: [],
207
+ })
208
+ const handle2 = repo2.find(handle.url)
209
+ await handle2.doc()
210
+
211
+ assert.deepEqual(storage.keys(), initialKeys)
212
+ })
213
+
214
+ it("doesn't delete a document from storage when we refresh", async () => {
215
+ const storage = new DummyStorageAdapter()
216
+
217
+ const repo = new Repo({
218
+ storage,
219
+ network: [],
220
+ })
221
+
222
+ const handle = repo.create<{ count: number }>()
223
+
224
+ handle.change(d => {
225
+ d.count = 0
226
+ })
227
+ handle.change(d => {
228
+ d.count = 1
229
+ })
230
+
231
+ for (let i = 0; i < 3; i++) {
232
+ const repo2 = new Repo({
233
+ storage,
234
+ network: [],
235
+ })
236
+ const handle2 = repo2.find(handle.url)
237
+ await handle2.doc()
238
+
239
+ assert(storage.keys().length !== 0)
240
+ }
241
+ })
139
242
  })
140
243
 
141
244
  describe("sync", async () => {
@@ -232,9 +335,9 @@ describe("Repo", () => {
232
335
  it("changes are replicated from aliceRepo to bobRepo", async () => {
233
336
  const { bobRepo, aliceHandle, teardown } = await setup()
234
337
 
235
- const bobHandle = bobRepo.find<TestDoc>(aliceHandle.documentId)
338
+ const bobHandle = bobRepo.find<TestDoc>(aliceHandle.url)
236
339
  await eventPromise(bobHandle, "change")
237
- const bobDoc = await bobHandle.value()
340
+ const bobDoc = await bobHandle.doc()
238
341
  assert.deepStrictEqual(bobDoc, { foo: "bar" })
239
342
  teardown()
240
343
  })
@@ -242,9 +345,9 @@ describe("Repo", () => {
242
345
  it("can load a document from aliceRepo on charlieRepo", async () => {
243
346
  const { charlieRepo, aliceHandle, teardown } = await setup()
244
347
 
245
- const handle3 = charlieRepo.find<TestDoc>(aliceHandle.documentId)
348
+ const handle3 = charlieRepo.find<TestDoc>(aliceHandle.url)
246
349
  await eventPromise(handle3, "change")
247
- const doc3 = await handle3.value()
350
+ const doc3 = await handle3.doc()
248
351
  assert.deepStrictEqual(doc3, { foo: "bar" })
249
352
  teardown()
250
353
  })
@@ -268,8 +371,10 @@ describe("Repo", () => {
268
371
  it("charlieRepo can request a document not initially shared with it", async () => {
269
372
  const { charlieRepo, notForCharlie, teardown } = await setup()
270
373
 
271
- const handle = charlieRepo.find<TestDoc>(notForCharlie)
272
- const doc = await handle.value()
374
+ const handle = charlieRepo.find<TestDoc>(
375
+ stringifyAutomergeUrl({ documentId: notForCharlie })
376
+ )
377
+ const doc = await handle.doc()
273
378
 
274
379
  assert.deepStrictEqual(doc, { foo: "baz" })
275
380
 
@@ -279,8 +384,10 @@ describe("Repo", () => {
279
384
  it("charlieRepo can request a document across a network of multiple peers", async () => {
280
385
  const { charlieRepo, notForBob, teardown } = await setup()
281
386
 
282
- const handle = charlieRepo.find<TestDoc>(notForBob)
283
- const doc = await handle.value()
387
+ const handle = charlieRepo.find<TestDoc>(
388
+ stringifyAutomergeUrl({ documentId: notForBob })
389
+ )
390
+ const doc = await handle.doc()
284
391
  assert.deepStrictEqual(doc, { foo: "bap" })
285
392
 
286
393
  teardown()
@@ -288,11 +395,11 @@ describe("Repo", () => {
288
395
 
289
396
  it("doesn't find a document which doesn't exist anywhere on the network", async () => {
290
397
  const { charlieRepo } = await setup()
291
- const handle = charlieRepo.find<TestDoc>("does-not-exist" as DocumentId)
398
+ const handle = charlieRepo.find<TestDoc>(generateAutomergeUrl())
292
399
  assert.equal(handle.isReady(), false)
293
400
 
294
401
  return assert.rejects(
295
- rejectOnTimeout(handle.value(), 100),
402
+ rejectOnTimeout(handle.doc(), 10),
296
403
  "This document should not exist"
297
404
  )
298
405
  })
@@ -310,9 +417,9 @@ describe("Repo", () => {
310
417
  })
311
418
  await changePromise
312
419
 
313
- const handle3 = charlieRepo.find<TestDoc>(aliceHandle.documentId)
420
+ const handle3 = charlieRepo.find<TestDoc>(aliceHandle.url)
314
421
  await eventPromise(handle3, "change")
315
- const doc3 = await handle3.value()
422
+ const doc3 = await handle3.doc()
316
423
 
317
424
  assert.deepStrictEqual(doc3, { foo: "baz" })
318
425
 
@@ -351,7 +458,7 @@ describe("Repo", () => {
351
458
 
352
459
  // make sure the doc is ready
353
460
  if (!doc.isReady()) {
354
- await doc.value()
461
+ await doc.doc()
355
462
  }
356
463
 
357
464
  // make a random change to it
@@ -9,8 +9,9 @@ import A from "@automerge/automerge"
9
9
  import { DummyStorageAdapter } from "./helpers/DummyStorageAdapter.js"
10
10
  import { NodeFSStorageAdapter } from "@automerge/automerge-repo-storage-nodefs"
11
11
 
12
- import { DocumentId, StorageSubsystem } from "../src"
12
+ import { StorageSubsystem } from "../src"
13
13
  import { TestDoc } from "./types.js"
14
+ import { generateAutomergeUrl, parseAutomergeUrl } from "../src/DocUrl.js"
14
15
 
15
16
  const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "automerge-repo-tests"))
16
17
 
@@ -30,11 +31,11 @@ describe("StorageSubsystem", () => {
30
31
  })
31
32
 
32
33
  // save it to storage
33
- const key = "test-key" as DocumentId
34
- await storage.save(key, doc)
34
+ const key = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
35
+ await storage.saveDoc(key, doc)
35
36
 
36
37
  // reload it from storage
37
- const reloadedDoc = await storage.load<TestDoc>(key)
38
+ const reloadedDoc = await storage.loadDoc(key)
38
39
 
39
40
  // check that it's the same doc
40
41
  assert.deepStrictEqual(reloadedDoc, doc)
@@ -51,24 +52,26 @@ describe("StorageSubsystem", () => {
51
52
  })
52
53
 
53
54
  // save it to storage
54
- const key = "test-key" as DocumentId
55
- storage.save(key, doc)
55
+ const key = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
56
+ storage.saveDoc(key, doc)
56
57
 
57
58
  // create new storage subsystem to simulate a new process
58
59
  const storage2 = new StorageSubsystem(adapter)
59
60
 
60
61
  // reload it from storage
61
- const reloadedDoc = await storage2.load<TestDoc>(key)
62
+ const reloadedDoc = await storage2.loadDoc(key)
63
+
64
+ assert(reloadedDoc, "doc should be loaded")
62
65
 
63
66
  // make a change
64
- const changedDoc = A.change(reloadedDoc, "test 2", d => {
67
+ const changedDoc = A.change<any>(reloadedDoc, "test 2", d => {
65
68
  d.foo = "baz"
66
69
  })
67
70
 
68
71
  // save it to storage
69
- storage2.save(key, changedDoc)
72
+ storage2.saveDoc(key, changedDoc)
70
73
 
71
74
  // check that the storage adapter contains the correct keys
72
- assert(adapter.keys().some(k => k.startsWith("test-key.incremental.")))
75
+ assert(adapter.keys().some(k => k.startsWith(`${key}.incremental.`)))
73
76
  })
74
77
  })
@@ -3,6 +3,6 @@ import { NetworkAdapter } from "../../src"
3
3
  export class DummyNetworkAdapter extends NetworkAdapter {
4
4
  sendMessage() {}
5
5
  connect(_: string) {}
6
- join(_: string) {}
7
- leave(_: string) {}
6
+ join() {}
7
+ leave() {}
8
8
  }
@@ -1,16 +1,20 @@
1
- import { StorageAdapter } from "../../src"
1
+ import { StorageAdapter, type StorageKey } from "../../src"
2
2
 
3
3
  export class DummyStorageAdapter implements StorageAdapter {
4
4
  #data: Record<string, Uint8Array> = {}
5
5
 
6
- #keyToString(key: string[]) {
6
+ #keyToString(key: string[]): string {
7
7
  return key.join(".")
8
8
  }
9
9
 
10
- async loadRange(keyPrefix: string[]): Promise<Uint8Array[]> {
10
+ #stringToKey(key: string): string[] {
11
+ return key.split(".")
12
+ }
13
+
14
+ async loadRange(keyPrefix: StorageKey): Promise<{data: Uint8Array, key: StorageKey}[]> {
11
15
  const range = Object.entries(this.#data)
12
16
  .filter(([key, _]) => key.startsWith(this.#keyToString(keyPrefix)))
13
- .map(([_, value]) => value)
17
+ .map(([key, data]) => ({key: this.#stringToKey(key), data}))
14
18
  return Promise.resolve(range)
15
19
  }
16
20