@automerge/automerge-repo 0.2.1 → 1.0.0-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/README.md +7 -24
  2. package/dist/DocCollection.d.ts +4 -4
  3. package/dist/DocCollection.d.ts.map +1 -1
  4. package/dist/DocCollection.js +25 -17
  5. package/dist/DocHandle.d.ts +46 -10
  6. package/dist/DocHandle.d.ts.map +1 -1
  7. package/dist/DocHandle.js +101 -36
  8. package/dist/DocUrl.d.ts +38 -18
  9. package/dist/DocUrl.d.ts.map +1 -1
  10. package/dist/DocUrl.js +63 -24
  11. package/dist/Repo.d.ts.map +1 -1
  12. package/dist/Repo.js +4 -6
  13. package/dist/helpers/headsAreSame.d.ts +1 -1
  14. package/dist/helpers/headsAreSame.d.ts.map +1 -1
  15. package/dist/helpers/tests/network-adapter-tests.js +10 -10
  16. package/dist/index.d.ts +3 -2
  17. package/dist/index.d.ts.map +1 -1
  18. package/dist/index.js +1 -0
  19. package/dist/network/NetworkAdapter.d.ts +2 -3
  20. package/dist/network/NetworkAdapter.d.ts.map +1 -1
  21. package/dist/network/NetworkSubsystem.d.ts +2 -3
  22. package/dist/network/NetworkSubsystem.d.ts.map +1 -1
  23. package/dist/network/NetworkSubsystem.js +9 -13
  24. package/dist/storage/StorageAdapter.d.ts +9 -5
  25. package/dist/storage/StorageAdapter.d.ts.map +1 -1
  26. package/dist/storage/StorageSubsystem.d.ts +2 -2
  27. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  28. package/dist/storage/StorageSubsystem.js +73 -25
  29. package/dist/synchronizer/CollectionSynchronizer.d.ts +1 -1
  30. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  31. package/dist/synchronizer/CollectionSynchronizer.js +5 -1
  32. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  33. package/dist/synchronizer/DocSynchronizer.js +6 -5
  34. package/dist/types.d.ts +6 -0
  35. package/dist/types.d.ts.map +1 -1
  36. package/package.json +8 -5
  37. package/src/DocCollection.ts +32 -22
  38. package/src/DocHandle.ts +121 -47
  39. package/src/DocUrl.ts +90 -0
  40. package/src/Repo.ts +5 -8
  41. package/src/helpers/tests/network-adapter-tests.ts +10 -10
  42. package/src/index.ts +7 -5
  43. package/src/network/NetworkAdapter.ts +2 -3
  44. package/src/network/NetworkSubsystem.ts +9 -14
  45. package/src/storage/StorageAdapter.ts +7 -5
  46. package/src/storage/StorageSubsystem.ts +95 -34
  47. package/src/synchronizer/CollectionSynchronizer.ts +10 -2
  48. package/src/synchronizer/DocSynchronizer.ts +7 -6
  49. package/src/types.ts +4 -1
  50. package/test/CollectionSynchronizer.test.ts +1 -1
  51. package/test/DocCollection.test.ts +3 -2
  52. package/test/DocHandle.test.ts +32 -26
  53. package/test/DocSynchronizer.test.ts +3 -2
  54. package/test/Repo.test.ts +76 -27
  55. package/test/StorageSubsystem.test.ts +10 -7
  56. package/test/helpers/DummyNetworkAdapter.ts +2 -2
  57. package/test/helpers/DummyStorageAdapter.ts +8 -4
@@ -1,12 +1,14 @@
1
1
  import * as A from "@automerge/automerge"
2
2
  import assert from "assert"
3
3
  import { it } from "mocha"
4
- import { DocHandle, DocHandleChangePayload, DocumentId } from "../src"
4
+ import { DocHandle, DocHandleChangePayload, BinaryDocumentId } from "../src"
5
5
  import { pause } from "../src/helpers/pause"
6
6
  import { TestDoc } from "./types.js"
7
+ import { generateAutomergeUrl, parseAutomergeUrl } from "../src/DocUrl"
7
8
 
8
9
  describe("DocHandle", () => {
9
- const TEST_ID = "test-document-id" as DocumentId
10
+ const TEST_ID = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
11
+ const BOGUS_ID = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
10
12
 
11
13
  const binaryFromMockStorage = () => {
12
14
  const doc = A.change<{ foo: string }>(A.init(), d => (d.foo = "bar"))
@@ -27,7 +29,7 @@ describe("DocHandle", () => {
27
29
  handle.load(binaryFromMockStorage())
28
30
 
29
31
  assert.equal(handle.isReady(), true)
30
- const doc = await handle.value()
32
+ const doc = await handle.doc()
31
33
  assert.equal(doc.foo, "bar")
32
34
  })
33
35
 
@@ -39,24 +41,24 @@ describe("DocHandle", () => {
39
41
  handle.load(binaryFromMockStorage())
40
42
 
41
43
  assert.equal(handle.isReady(), true)
42
- const doc = await handle.value()
43
- assert.deepEqual(doc, handle.doc)
44
+ const doc = await handle.doc()
45
+ assert.deepEqual(doc, handle.docSync())
44
46
  })
45
47
 
46
- it("should throws an error if we accessing the doc before ready", async () => {
48
+ it("should return undefined if we accessing the doc before ready", async () => {
47
49
  const handle = new DocHandle<TestDoc>(TEST_ID)
48
50
 
49
- assert.throws(() => handle.doc)
51
+ assert.equal(handle.docSync(), undefined)
50
52
  })
51
53
 
52
- it("should not return a value until ready", async () => {
54
+ it("should not return a doc until ready", async () => {
53
55
  const handle = new DocHandle<TestDoc>(TEST_ID)
54
56
  assert.equal(handle.isReady(), false)
55
57
 
56
58
  // simulate loading from storage
57
59
  handle.load(binaryFromMockStorage())
58
60
 
59
- const doc = await handle.value()
61
+ const doc = await handle.doc()
60
62
 
61
63
  assert.equal(handle.isReady(), true)
62
64
  assert.equal(doc.foo, "bar")
@@ -76,7 +78,7 @@ describe("DocHandle", () => {
76
78
  assert.equal(handle.isReady(), true)
77
79
  handle.change(d => (d.foo = "pizza"))
78
80
 
79
- const doc = await handle.value()
81
+ const doc = await handle.doc()
80
82
  assert.equal(doc.foo, "pizza")
81
83
  })
82
84
 
@@ -86,7 +88,7 @@ describe("DocHandle", () => {
86
88
  // we don't have it in storage, so we request it from the network
87
89
  handle.request()
88
90
 
89
- assert.throws(() => handle.doc)
91
+ assert.equal(handle.docSync(), undefined)
90
92
  assert.equal(handle.isReady(), false)
91
93
  assert.throws(() => handle.change(h => {}))
92
94
  })
@@ -102,7 +104,7 @@ describe("DocHandle", () => {
102
104
  return A.change(doc, d => (d.foo = "bar"))
103
105
  })
104
106
 
105
- const doc = await handle.value()
107
+ const doc = await handle.doc()
106
108
  assert.equal(handle.isReady(), true)
107
109
  assert.equal(doc.foo, "bar")
108
110
  })
@@ -118,7 +120,7 @@ describe("DocHandle", () => {
118
120
  doc.foo = "bar"
119
121
  })
120
122
 
121
- const doc = await handle.value()
123
+ const doc = await handle.doc()
122
124
  assert.equal(doc.foo, "bar")
123
125
 
124
126
  const changePayload = await p
@@ -142,7 +144,7 @@ describe("DocHandle", () => {
142
144
 
143
145
  const p = new Promise<void>(resolve =>
144
146
  handle.once("change", ({ handle, doc }) => {
145
- assert.equal(handle.doc.foo, doc.foo)
147
+ assert.equal(handle.docSync()?.foo, doc.foo)
146
148
 
147
149
  resolve()
148
150
  })
@@ -179,29 +181,29 @@ describe("DocHandle", () => {
179
181
  doc.foo = "baz"
180
182
  })
181
183
 
182
- const doc = await handle.value()
184
+ const doc = await handle.doc()
183
185
  assert.equal(doc.foo, "baz")
184
186
 
185
187
  return p
186
188
  })
187
189
 
188
- it("should emit a patch message when changes happen", async () => {
190
+ it("should emit a change message when changes happen", async () => {
189
191
  const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
190
- const p = new Promise(resolve => handle.once("patch", d => resolve(d)))
192
+ const p = new Promise(resolve => handle.once("change", d => resolve(d)))
191
193
 
192
194
  handle.change(doc => {
193
195
  doc.foo = "bar"
194
196
  })
195
197
 
196
198
  await p
197
- const doc = await handle.value()
199
+ const doc = await handle.doc()
198
200
  assert.equal(doc.foo, "bar")
199
201
  })
200
202
 
201
203
  it("should not emit a patch message if no change happens", done => {
202
204
  const handle = new DocHandle<TestDoc>(TEST_ID, { isNew: true })
203
- handle.on("patch", () => {
204
- done(new Error("shouldn't have patched"))
205
+ handle.on("change", () => {
206
+ done(new Error("shouldn't have changed"))
205
207
  })
206
208
  handle.change(_doc => {
207
209
  // do nothing
@@ -216,8 +218,10 @@ describe("DocHandle", () => {
216
218
  // we're not going to load
217
219
  await pause(10)
218
220
 
221
+ assert.equal(handle.state, "failed")
222
+
219
223
  // so it should time out
220
- return assert.rejects(handle.value, "DocHandle timed out")
224
+ return assert.rejects(handle.doc, "DocHandle timed out")
221
225
  })
222
226
 
223
227
  it("should not time out if the document is loaded in time", async () => {
@@ -228,7 +232,7 @@ describe("DocHandle", () => {
228
232
  handle.load(binaryFromMockStorage())
229
233
 
230
234
  // now it should not time out
231
- const doc = await handle.value()
235
+ const doc = await handle.doc()
232
236
  assert.equal(doc.foo, "bar")
233
237
  })
234
238
 
@@ -243,12 +247,12 @@ describe("DocHandle", () => {
243
247
  await pause(10)
244
248
 
245
249
  // so it should time out
246
- return assert.rejects(handle.value, "DocHandle timed out")
250
+ return assert.rejects(handle.doc, "DocHandle timed out")
247
251
  })
248
252
 
249
253
  it("should not time out if the document is updated in time", async () => {
250
254
  // set docHandle time out after 5 ms
251
- const handle = new DocHandle<TestDoc>(TEST_ID, { timeoutDelay: 5 })
255
+ const handle = new DocHandle<TestDoc>(TEST_ID, { timeoutDelay: 1 })
252
256
 
253
257
  // simulate requesting from the network
254
258
  handle.request()
@@ -259,7 +263,9 @@ describe("DocHandle", () => {
259
263
  })
260
264
 
261
265
  // now it should not time out
262
- const doc = await handle.value()
266
+ await pause(5)
267
+
268
+ const doc = await handle.doc()
263
269
  assert.equal(doc.foo, "bar")
264
270
  })
265
271
 
@@ -282,7 +288,7 @@ describe("DocHandle", () => {
282
288
  doc.foo = "bar"
283
289
  })
284
290
 
285
- const headsBefore = A.getHeads(handle.doc)
291
+ const headsBefore = A.getHeads(handle.docSync()!)
286
292
 
287
293
  handle.change(doc => {
288
294
  doc.foo = "rab"
@@ -1,9 +1,10 @@
1
1
  import assert from "assert"
2
- import { DocumentId, PeerId } from "../src/types.js"
2
+ import { BinaryDocumentId, PeerId } from "../src/types.js"
3
3
  import { DocHandle } from "../src/DocHandle.js"
4
4
  import { DocSynchronizer } from "../src/synchronizer/DocSynchronizer.js"
5
5
  import { eventPromise } from "../src/helpers/eventPromise.js"
6
6
  import { TestDoc } from "./types.js"
7
+ import { parseAutomergeUrl, generateAutomergeUrl } from "../src/DocUrl.js"
7
8
 
8
9
  const alice = "alice" as PeerId
9
10
  const bob = "bob" as PeerId
@@ -13,7 +14,7 @@ describe("DocSynchronizer", () => {
13
14
  let docSynchronizer: DocSynchronizer
14
15
 
15
16
  const setup = () => {
16
- const docId = "synced-doc" as DocumentId
17
+ const docId = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
17
18
  handle = new DocHandle<TestDoc>(docId, { isNew: true })
18
19
  docSynchronizer = new DocSynchronizer(handle)
19
20
  return { handle, docSynchronizer }
package/test/Repo.test.ts CHANGED
@@ -1,7 +1,15 @@
1
1
  import assert from "assert"
2
2
  import { MessageChannelNetworkAdapter } from "@automerge/automerge-repo-network-messagechannel"
3
-
4
- import { ChannelId, DocHandle, DocumentId, PeerId, SharePolicy } from "../src"
3
+ import * as A from "@automerge/automerge"
4
+
5
+ import {
6
+ AutomergeUrl,
7
+ ChannelId,
8
+ DocHandle,
9
+ DocumentId,
10
+ PeerId,
11
+ SharePolicy,
12
+ } from "../src"
5
13
  import { eventPromise } from "../src/helpers/eventPromise.js"
6
14
  import { pause, rejectOnTimeout } from "../src/helpers/pause.js"
7
15
  import { Repo } from "../src/Repo.js"
@@ -9,6 +17,11 @@ import { DummyNetworkAdapter } from "./helpers/DummyNetworkAdapter.js"
9
17
  import { DummyStorageAdapter } from "./helpers/DummyStorageAdapter.js"
10
18
  import { getRandomItem } from "./helpers/getRandomItem.js"
11
19
  import { TestDoc } from "./types.js"
20
+ import {
21
+ binaryToDocumentId,
22
+ generateAutomergeUrl,
23
+ stringifyAutomergeUrl,
24
+ } from "../src/DocUrl"
12
25
 
13
26
  describe("Repo", () => {
14
27
  describe("single repo", () => {
@@ -41,19 +54,28 @@ describe("Repo", () => {
41
54
  handle.change(d => {
42
55
  d.foo = "bar"
43
56
  })
44
- const v = await handle.value()
57
+ const v = await handle.doc()
45
58
  assert.equal(handle.isReady(), true)
46
59
 
47
60
  assert.equal(v.foo, "bar")
48
61
  })
49
62
 
63
+ it("throws an error if we try to find a handle with an invalid AutomergeUrl", async () => {
64
+ const { repo } = setup()
65
+ try {
66
+ repo.find<TestDoc>("invalid-url" as unknown as AutomergeUrl)
67
+ } catch (e: any) {
68
+ assert.equal(e.message, "Invalid AutomergeUrl: 'invalid-url'")
69
+ }
70
+ })
71
+
50
72
  it("doesn't find a document that doesn't exist", async () => {
51
73
  const { repo } = setup()
52
- const handle = repo.find<TestDoc>("does-not-exist" as DocumentId)
74
+ const handle = repo.find<TestDoc>(generateAutomergeUrl())
53
75
  assert.equal(handle.isReady(), false)
54
76
 
55
77
  return assert.rejects(
56
- rejectOnTimeout(handle.value(), 100),
78
+ rejectOnTimeout(handle.doc(), 10),
57
79
  "This document should not exist"
58
80
  )
59
81
  })
@@ -66,12 +88,12 @@ describe("Repo", () => {
66
88
  })
67
89
  assert.equal(handle.isReady(), true)
68
90
 
69
- const bobHandle = repo.find<TestDoc>(handle.documentId)
91
+ const bobHandle = repo.find<TestDoc>(handle.url)
70
92
 
71
93
  assert.equal(handle, bobHandle)
72
94
  assert.equal(handle.isReady(), true)
73
95
 
74
- const v = await bobHandle.value()
96
+ const v = await bobHandle.doc()
75
97
  assert.equal(v.foo, "bar")
76
98
  })
77
99
 
@@ -92,9 +114,9 @@ describe("Repo", () => {
92
114
  network: [],
93
115
  })
94
116
 
95
- const bobHandle = repo2.find<TestDoc>(handle.documentId)
117
+ const bobHandle = repo2.find<TestDoc>(handle.url)
96
118
 
97
- const v = await bobHandle.value()
119
+ const v = await bobHandle.doc()
98
120
  assert.equal(v.foo, "bar")
99
121
  })
100
122
 
@@ -105,15 +127,37 @@ describe("Repo", () => {
105
127
  d.foo = "bar"
106
128
  })
107
129
  assert.equal(handle.isReady(), true)
108
- await handle.value()
130
+ await handle.doc()
109
131
  repo.delete(handle.documentId)
110
132
 
111
133
  assert(handle.isDeleted())
112
134
  assert.equal(repo.handles[handle.documentId], undefined)
113
135
 
114
- const bobHandle = repo.find<TestDoc>(handle.documentId)
136
+ const bobHandle = repo.find<TestDoc>(handle.url)
115
137
  await assert.rejects(
116
- rejectOnTimeout(bobHandle.value(), 10),
138
+ rejectOnTimeout(bobHandle.doc(), 10),
139
+ "document should have been deleted"
140
+ )
141
+
142
+ assert(!bobHandle.isReady())
143
+ })
144
+
145
+ it("can delete an existing document by url", async () => {
146
+ const { repo } = setup()
147
+ const handle = repo.create<TestDoc>()
148
+ handle.change(d => {
149
+ d.foo = "bar"
150
+ })
151
+ assert.equal(handle.isReady(), true)
152
+ await handle.doc()
153
+ repo.delete(handle.url)
154
+
155
+ assert(handle.isDeleted())
156
+ assert.equal(repo.handles[handle.documentId], undefined)
157
+
158
+ const bobHandle = repo.find<TestDoc>(handle.url)
159
+ await assert.rejects(
160
+ rejectOnTimeout(bobHandle.doc(), 10),
117
161
  "document should have been deleted"
118
162
  )
119
163
 
@@ -128,8 +172,8 @@ describe("Repo", () => {
128
172
  })
129
173
  assert.equal(handle.isReady(), true)
130
174
 
131
- repo.on("delete-document", ({ documentId }) => {
132
- assert.equal(documentId, handle.documentId)
175
+ repo.on("delete-document", ({ encodedDocumentId }) => {
176
+ assert.equal(encodedDocumentId, handle.documentId)
133
177
 
134
178
  done()
135
179
  })
@@ -232,9 +276,9 @@ describe("Repo", () => {
232
276
  it("changes are replicated from aliceRepo to bobRepo", async () => {
233
277
  const { bobRepo, aliceHandle, teardown } = await setup()
234
278
 
235
- const bobHandle = bobRepo.find<TestDoc>(aliceHandle.documentId)
279
+ const bobHandle = bobRepo.find<TestDoc>(aliceHandle.url)
236
280
  await eventPromise(bobHandle, "change")
237
- const bobDoc = await bobHandle.value()
281
+ const bobDoc = await bobHandle.doc()
238
282
  assert.deepStrictEqual(bobDoc, { foo: "bar" })
239
283
  teardown()
240
284
  })
@@ -242,9 +286,9 @@ describe("Repo", () => {
242
286
  it("can load a document from aliceRepo on charlieRepo", async () => {
243
287
  const { charlieRepo, aliceHandle, teardown } = await setup()
244
288
 
245
- const handle3 = charlieRepo.find<TestDoc>(aliceHandle.documentId)
289
+ const handle3 = charlieRepo.find<TestDoc>(aliceHandle.url)
246
290
  await eventPromise(handle3, "change")
247
- const doc3 = await handle3.value()
291
+ const doc3 = await handle3.doc()
248
292
  assert.deepStrictEqual(doc3, { foo: "bar" })
249
293
  teardown()
250
294
  })
@@ -268,8 +312,10 @@ describe("Repo", () => {
268
312
  it("charlieRepo can request a document not initially shared with it", async () => {
269
313
  const { charlieRepo, notForCharlie, teardown } = await setup()
270
314
 
271
- const handle = charlieRepo.find<TestDoc>(notForCharlie)
272
- const doc = await handle.value()
315
+ const handle = charlieRepo.find<TestDoc>(
316
+ stringifyAutomergeUrl({ documentId: notForCharlie })
317
+ )
318
+ const doc = await handle.doc()
273
319
 
274
320
  assert.deepStrictEqual(doc, { foo: "baz" })
275
321
 
@@ -279,8 +325,10 @@ describe("Repo", () => {
279
325
  it("charlieRepo can request a document across a network of multiple peers", async () => {
280
326
  const { charlieRepo, notForBob, teardown } = await setup()
281
327
 
282
- const handle = charlieRepo.find<TestDoc>(notForBob)
283
- const doc = await handle.value()
328
+ const handle = charlieRepo.find<TestDoc>(
329
+ stringifyAutomergeUrl({ documentId: notForBob })
330
+ )
331
+ const doc = await handle.doc()
284
332
  assert.deepStrictEqual(doc, { foo: "bap" })
285
333
 
286
334
  teardown()
@@ -288,11 +336,11 @@ describe("Repo", () => {
288
336
 
289
337
  it("doesn't find a document which doesn't exist anywhere on the network", async () => {
290
338
  const { charlieRepo } = await setup()
291
- const handle = charlieRepo.find<TestDoc>("does-not-exist" as DocumentId)
339
+ const handle = charlieRepo.find<TestDoc>(generateAutomergeUrl())
292
340
  assert.equal(handle.isReady(), false)
293
341
 
294
342
  return assert.rejects(
295
- rejectOnTimeout(handle.value(), 100),
343
+ rejectOnTimeout(handle.doc(), 10),
296
344
  "This document should not exist"
297
345
  )
298
346
  })
@@ -310,9 +358,9 @@ describe("Repo", () => {
310
358
  })
311
359
  await changePromise
312
360
 
313
- const handle3 = charlieRepo.find<TestDoc>(aliceHandle.documentId)
361
+ const handle3 = charlieRepo.find<TestDoc>(aliceHandle.url)
314
362
  await eventPromise(handle3, "change")
315
- const doc3 = await handle3.value()
363
+ const doc3 = await handle3.doc()
316
364
 
317
365
  assert.deepStrictEqual(doc3, { foo: "baz" })
318
366
 
@@ -351,7 +399,7 @@ describe("Repo", () => {
351
399
 
352
400
  // make sure the doc is ready
353
401
  if (!doc.isReady()) {
354
- await doc.value()
402
+ await doc.doc()
355
403
  }
356
404
 
357
405
  // make a random change to it
@@ -364,4 +412,5 @@ describe("Repo", () => {
364
412
  teardown()
365
413
  })
366
414
  })
415
+
367
416
  })
@@ -9,8 +9,9 @@ import A from "@automerge/automerge"
9
9
  import { DummyStorageAdapter } from "./helpers/DummyStorageAdapter.js"
10
10
  import { NodeFSStorageAdapter } from "@automerge/automerge-repo-storage-nodefs"
11
11
 
12
- import { DocumentId, StorageSubsystem } from "../src"
12
+ import { StorageSubsystem } from "../src"
13
13
  import { TestDoc } from "./types.js"
14
+ import { generateAutomergeUrl, parseAutomergeUrl } from "../src/DocUrl.js"
14
15
 
15
16
  const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "automerge-repo-tests"))
16
17
 
@@ -30,11 +31,12 @@ describe("StorageSubsystem", () => {
30
31
  })
31
32
 
32
33
  // save it to storage
33
- const key = "test-key" as DocumentId
34
+ const key = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
34
35
  await storage.save(key, doc)
35
36
 
36
37
  // reload it from storage
37
- const reloadedDoc = await storage.load<TestDoc>(key)
38
+ const reloadedDocBinary = await storage.loadBinary(key)
39
+ const reloadedDoc = A.load<TestDoc>(reloadedDocBinary)
38
40
 
39
41
  // check that it's the same doc
40
42
  assert.deepStrictEqual(reloadedDoc, doc)
@@ -51,17 +53,18 @@ describe("StorageSubsystem", () => {
51
53
  })
52
54
 
53
55
  // save it to storage
54
- const key = "test-key" as DocumentId
56
+ const key = parseAutomergeUrl(generateAutomergeUrl()).encodedDocumentId
55
57
  storage.save(key, doc)
56
58
 
57
59
  // create new storage subsystem to simulate a new process
58
60
  const storage2 = new StorageSubsystem(adapter)
59
61
 
60
62
  // reload it from storage
61
- const reloadedDoc = await storage2.load<TestDoc>(key)
63
+ const reloadedDocBinary = await storage2.loadBinary(key)
64
+ const reloadedDoc = A.load<TestDoc>(reloadedDocBinary)
62
65
 
63
66
  // make a change
64
- const changedDoc = A.change(reloadedDoc, "test 2", d => {
67
+ const changedDoc = A.change<any>(reloadedDoc, "test 2", d => {
65
68
  d.foo = "baz"
66
69
  })
67
70
 
@@ -69,6 +72,6 @@ describe("StorageSubsystem", () => {
69
72
  storage2.save(key, changedDoc)
70
73
 
71
74
  // check that the storage adapter contains the correct keys
72
- assert(adapter.keys().some(k => k.startsWith("test-key.incremental.")))
75
+ assert(adapter.keys().some(k => k.startsWith(`${key}.incremental.`)))
73
76
  })
74
77
  })
@@ -3,6 +3,6 @@ import { NetworkAdapter } from "../../src"
3
3
  export class DummyNetworkAdapter extends NetworkAdapter {
4
4
  sendMessage() {}
5
5
  connect(_: string) {}
6
- join(_: string) {}
7
- leave(_: string) {}
6
+ join() {}
7
+ leave() {}
8
8
  }
@@ -1,16 +1,20 @@
1
- import { StorageAdapter } from "../../src"
1
+ import { StorageAdapter, type StorageKey } from "../../src"
2
2
 
3
3
  export class DummyStorageAdapter implements StorageAdapter {
4
4
  #data: Record<string, Uint8Array> = {}
5
5
 
6
- #keyToString(key: string[]) {
6
+ #keyToString(key: string[]): string {
7
7
  return key.join(".")
8
8
  }
9
9
 
10
- async loadRange(keyPrefix: string[]): Promise<Uint8Array[]> {
10
+ #stringToKey(key: string): string[] {
11
+ return key.split(".")
12
+ }
13
+
14
+ async loadRange(keyPrefix: StorageKey): Promise<{data: Uint8Array, key: StorageKey}[]> {
11
15
  const range = Object.entries(this.#data)
12
16
  .filter(([key, _]) => key.startsWith(this.#keyToString(keyPrefix)))
13
- .map(([_, value]) => value)
17
+ .map(([key, data]) => ({key: this.#stringToKey(key), data}))
14
18
  return Promise.resolve(range)
15
19
  }
16
20