@automerge/automerge-repo 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc +28 -0
- package/.mocharc.json +5 -0
- package/README.md +298 -0
- package/TODO.md +54 -0
- package/dist/DocCollection.d.ts +44 -0
- package/dist/DocCollection.d.ts.map +1 -0
- package/dist/DocCollection.js +85 -0
- package/dist/DocHandle.d.ts +78 -0
- package/dist/DocHandle.d.ts.map +1 -0
- package/dist/DocHandle.js +227 -0
- package/dist/EphemeralData.d.ts +27 -0
- package/dist/EphemeralData.d.ts.map +1 -0
- package/dist/EphemeralData.js +28 -0
- package/dist/Repo.d.ts +30 -0
- package/dist/Repo.d.ts.map +1 -0
- package/dist/Repo.js +97 -0
- package/dist/helpers/arraysAreEqual.d.ts +2 -0
- package/dist/helpers/arraysAreEqual.d.ts.map +1 -0
- package/dist/helpers/arraysAreEqual.js +1 -0
- package/dist/helpers/eventPromise.d.ts +5 -0
- package/dist/helpers/eventPromise.d.ts.map +1 -0
- package/dist/helpers/eventPromise.js +6 -0
- package/dist/helpers/headsAreSame.d.ts +3 -0
- package/dist/helpers/headsAreSame.d.ts.map +1 -0
- package/dist/helpers/headsAreSame.js +7 -0
- package/dist/helpers/mergeArrays.d.ts +2 -0
- package/dist/helpers/mergeArrays.d.ts.map +1 -0
- package/dist/helpers/mergeArrays.js +15 -0
- package/dist/helpers/pause.d.ts +3 -0
- package/dist/helpers/pause.d.ts.map +1 -0
- package/dist/helpers/pause.js +7 -0
- package/dist/helpers/withTimeout.d.ts +9 -0
- package/dist/helpers/withTimeout.d.ts.map +1 -0
- package/dist/helpers/withTimeout.js +22 -0
- package/dist/index.d.ts +13 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +10 -0
- package/dist/network/NetworkAdapter.d.ts +37 -0
- package/dist/network/NetworkAdapter.d.ts.map +1 -0
- package/dist/network/NetworkAdapter.js +4 -0
- package/dist/network/NetworkSubsystem.d.ts +23 -0
- package/dist/network/NetworkSubsystem.d.ts.map +1 -0
- package/dist/network/NetworkSubsystem.js +89 -0
- package/dist/storage/StorageAdapter.d.ts +6 -0
- package/dist/storage/StorageAdapter.d.ts.map +1 -0
- package/dist/storage/StorageAdapter.js +2 -0
- package/dist/storage/StorageSubsystem.d.ts +12 -0
- package/dist/storage/StorageSubsystem.d.ts.map +1 -0
- package/dist/storage/StorageSubsystem.js +65 -0
- package/dist/synchronizer/CollectionSynchronizer.d.ts +24 -0
- package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -0
- package/dist/synchronizer/CollectionSynchronizer.js +92 -0
- package/dist/synchronizer/DocSynchronizer.d.ts +18 -0
- package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -0
- package/dist/synchronizer/DocSynchronizer.js +136 -0
- package/dist/synchronizer/Synchronizer.d.ts +10 -0
- package/dist/synchronizer/Synchronizer.d.ts.map +1 -0
- package/dist/synchronizer/Synchronizer.js +3 -0
- package/dist/test-utilities/adapter-tests.d.ts +21 -0
- package/dist/test-utilities/adapter-tests.d.ts.map +1 -0
- package/dist/test-utilities/adapter-tests.js +117 -0
- package/dist/types.d.ts +10 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +1 -0
- package/fuzz/fuzz.ts +129 -0
- package/package.json +65 -0
- package/src/DocCollection.ts +123 -0
- package/src/DocHandle.ts +386 -0
- package/src/EphemeralData.ts +46 -0
- package/src/Repo.ts +155 -0
- package/src/helpers/arraysAreEqual.ts +2 -0
- package/src/helpers/eventPromise.ts +10 -0
- package/src/helpers/headsAreSame.ts +8 -0
- package/src/helpers/mergeArrays.ts +17 -0
- package/src/helpers/pause.ts +9 -0
- package/src/helpers/withTimeout.ts +28 -0
- package/src/index.ts +22 -0
- package/src/network/NetworkAdapter.ts +54 -0
- package/src/network/NetworkSubsystem.ts +130 -0
- package/src/storage/StorageAdapter.ts +5 -0
- package/src/storage/StorageSubsystem.ts +91 -0
- package/src/synchronizer/CollectionSynchronizer.ts +112 -0
- package/src/synchronizer/DocSynchronizer.ts +182 -0
- package/src/synchronizer/Synchronizer.ts +15 -0
- package/src/test-utilities/adapter-tests.ts +163 -0
- package/src/types.ts +3 -0
- package/test/CollectionSynchronizer.test.ts +73 -0
- package/test/DocCollection.test.ts +19 -0
- package/test/DocHandle.test.ts +281 -0
- package/test/DocSynchronizer.test.ts +68 -0
- package/test/EphemeralData.test.ts +44 -0
- package/test/Network.test.ts +13 -0
- package/test/Repo.test.ts +367 -0
- package/test/StorageSubsystem.test.ts +78 -0
- package/test/helpers/DummyNetworkAdapter.ts +8 -0
- package/test/helpers/DummyStorageAdapter.ts +23 -0
- package/test/helpers/getRandomItem.ts +4 -0
- package/test/types.ts +3 -0
- package/tsconfig.json +16 -0
|
@@ -0,0 +1,367 @@
|
|
|
1
|
+
import assert from "assert"
|
|
2
|
+
import { MessageChannelNetworkAdapter } from "@automerge/automerge-repo-network-messagechannel"
|
|
3
|
+
|
|
4
|
+
import { ChannelId, DocHandle, DocumentId, PeerId, SharePolicy } from "../src"
|
|
5
|
+
import { eventPromise } from "../src/helpers/eventPromise.js"
|
|
6
|
+
import { pause, rejectOnTimeout } from "../src/helpers/pause.js"
|
|
7
|
+
import { Repo } from "../src/Repo.js"
|
|
8
|
+
import { DummyNetworkAdapter } from "./helpers/DummyNetworkAdapter.js"
|
|
9
|
+
import { DummyStorageAdapter } from "./helpers/DummyStorageAdapter.js"
|
|
10
|
+
import { getRandomItem } from "./helpers/getRandomItem.js"
|
|
11
|
+
import { TestDoc } from "./types.js"
|
|
12
|
+
|
|
13
|
+
describe("Repo", () => {
|
|
14
|
+
describe("single repo", () => {
|
|
15
|
+
const setup = () => {
|
|
16
|
+
const storageAdapter = new DummyStorageAdapter()
|
|
17
|
+
|
|
18
|
+
const repo = new Repo({
|
|
19
|
+
storage: storageAdapter,
|
|
20
|
+
network: [new DummyNetworkAdapter()],
|
|
21
|
+
})
|
|
22
|
+
return { repo, storageAdapter }
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
it("can instantiate a Repo", () => {
|
|
26
|
+
const { repo } = setup()
|
|
27
|
+
assert.notEqual(repo, null)
|
|
28
|
+
assert(repo.networkSubsystem)
|
|
29
|
+
assert(repo.storageSubsystem)
|
|
30
|
+
})
|
|
31
|
+
|
|
32
|
+
it("can create a document", () => {
|
|
33
|
+
const { repo } = setup()
|
|
34
|
+
const handle = repo.create()
|
|
35
|
+
assert.notEqual(handle.documentId, null)
|
|
36
|
+
})
|
|
37
|
+
|
|
38
|
+
it("can change a document", async () => {
|
|
39
|
+
const { repo } = setup()
|
|
40
|
+
const handle = repo.create<TestDoc>()
|
|
41
|
+
handle.change(d => {
|
|
42
|
+
d.foo = "bar"
|
|
43
|
+
})
|
|
44
|
+
const v = await handle.value()
|
|
45
|
+
assert.equal(handle.isReady(), true)
|
|
46
|
+
|
|
47
|
+
assert.equal(v.foo, "bar")
|
|
48
|
+
})
|
|
49
|
+
|
|
50
|
+
it("doesn't find a document that doesn't exist", async () => {
|
|
51
|
+
const { repo } = setup()
|
|
52
|
+
const handle = repo.find<TestDoc>("does-not-exist" as DocumentId)
|
|
53
|
+
assert.equal(handle.isReady(), false)
|
|
54
|
+
|
|
55
|
+
return assert.rejects(
|
|
56
|
+
rejectOnTimeout(handle.value(), 100),
|
|
57
|
+
"This document should not exist"
|
|
58
|
+
)
|
|
59
|
+
})
|
|
60
|
+
|
|
61
|
+
it("can find a created document", async () => {
|
|
62
|
+
const { repo } = setup()
|
|
63
|
+
const handle = repo.create<TestDoc>()
|
|
64
|
+
handle.change(d => {
|
|
65
|
+
d.foo = "bar"
|
|
66
|
+
})
|
|
67
|
+
assert.equal(handle.isReady(), true)
|
|
68
|
+
|
|
69
|
+
const bobHandle = repo.find<TestDoc>(handle.documentId)
|
|
70
|
+
|
|
71
|
+
assert.equal(handle, bobHandle)
|
|
72
|
+
assert.equal(handle.isReady(), true)
|
|
73
|
+
|
|
74
|
+
const v = await bobHandle.value()
|
|
75
|
+
assert.equal(v.foo, "bar")
|
|
76
|
+
})
|
|
77
|
+
|
|
78
|
+
it("saves the document when changed and can find it again", async () => {
|
|
79
|
+
const { repo, storageAdapter } = setup()
|
|
80
|
+
const handle = repo.create<TestDoc>()
|
|
81
|
+
|
|
82
|
+
handle.change(d => {
|
|
83
|
+
d.foo = "bar"
|
|
84
|
+
})
|
|
85
|
+
|
|
86
|
+
assert.equal(handle.isReady(), true)
|
|
87
|
+
|
|
88
|
+
await pause()
|
|
89
|
+
|
|
90
|
+
const repo2 = new Repo({
|
|
91
|
+
storage: storageAdapter,
|
|
92
|
+
network: [],
|
|
93
|
+
})
|
|
94
|
+
|
|
95
|
+
const bobHandle = repo2.find<TestDoc>(handle.documentId)
|
|
96
|
+
|
|
97
|
+
const v = await bobHandle.value()
|
|
98
|
+
assert.equal(v.foo, "bar")
|
|
99
|
+
})
|
|
100
|
+
|
|
101
|
+
it("can delete an existing document", async () => {
|
|
102
|
+
const { repo } = setup()
|
|
103
|
+
const handle = repo.create<TestDoc>()
|
|
104
|
+
handle.change(d => {
|
|
105
|
+
d.foo = "bar"
|
|
106
|
+
})
|
|
107
|
+
assert.equal(handle.isReady(), true)
|
|
108
|
+
await handle.value()
|
|
109
|
+
repo.delete(handle.documentId)
|
|
110
|
+
|
|
111
|
+
assert(handle.isDeleted())
|
|
112
|
+
assert.equal(repo.handles[handle.documentId], undefined)
|
|
113
|
+
|
|
114
|
+
const bobHandle = repo.find<TestDoc>(handle.documentId)
|
|
115
|
+
await assert.rejects(
|
|
116
|
+
rejectOnTimeout(bobHandle.value(), 10),
|
|
117
|
+
"document should have been deleted"
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
assert(!bobHandle.isReady())
|
|
121
|
+
})
|
|
122
|
+
|
|
123
|
+
it("deleting a document emits an event", async done => {
|
|
124
|
+
const { repo } = setup()
|
|
125
|
+
const handle = repo.create<TestDoc>()
|
|
126
|
+
handle.change(d => {
|
|
127
|
+
d.foo = "bar"
|
|
128
|
+
})
|
|
129
|
+
assert.equal(handle.isReady(), true)
|
|
130
|
+
|
|
131
|
+
repo.on("delete-document", ({ documentId }) => {
|
|
132
|
+
assert.equal(documentId, handle.documentId)
|
|
133
|
+
|
|
134
|
+
done()
|
|
135
|
+
})
|
|
136
|
+
|
|
137
|
+
repo.delete(handle.documentId)
|
|
138
|
+
})
|
|
139
|
+
})
|
|
140
|
+
|
|
141
|
+
describe("sync", async () => {
|
|
142
|
+
const setup = async () => {
|
|
143
|
+
// Set up three repos; connect Alice to Bob, and Bob to Charlie
|
|
144
|
+
|
|
145
|
+
const aliceBobChannel = new MessageChannel()
|
|
146
|
+
const bobCharlieChannel = new MessageChannel()
|
|
147
|
+
|
|
148
|
+
const { port1: aliceToBob, port2: bobToAlice } = aliceBobChannel
|
|
149
|
+
const { port1: bobToCharlie, port2: charlieToBob } = bobCharlieChannel
|
|
150
|
+
|
|
151
|
+
const charlieExcludedDocuments: DocumentId[] = []
|
|
152
|
+
const bobExcludedDocuments: DocumentId[] = []
|
|
153
|
+
|
|
154
|
+
const sharePolicy: SharePolicy = async (peerId, documentId) => {
|
|
155
|
+
if (documentId === undefined) return false
|
|
156
|
+
|
|
157
|
+
// make sure that charlie never gets excluded documents
|
|
158
|
+
if (
|
|
159
|
+
charlieExcludedDocuments.includes(documentId) &&
|
|
160
|
+
peerId === "charlie"
|
|
161
|
+
)
|
|
162
|
+
return false
|
|
163
|
+
|
|
164
|
+
// make sure that charlie never gets excluded documents
|
|
165
|
+
if (bobExcludedDocuments.includes(documentId) && peerId === "bob")
|
|
166
|
+
return false
|
|
167
|
+
|
|
168
|
+
return true
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
const aliceRepo = new Repo({
|
|
172
|
+
network: [new MessageChannelNetworkAdapter(aliceToBob)],
|
|
173
|
+
peerId: "alice" as PeerId,
|
|
174
|
+
sharePolicy,
|
|
175
|
+
})
|
|
176
|
+
|
|
177
|
+
const bobRepo = new Repo({
|
|
178
|
+
network: [
|
|
179
|
+
new MessageChannelNetworkAdapter(bobToAlice),
|
|
180
|
+
new MessageChannelNetworkAdapter(bobToCharlie),
|
|
181
|
+
],
|
|
182
|
+
peerId: "bob" as PeerId,
|
|
183
|
+
sharePolicy,
|
|
184
|
+
})
|
|
185
|
+
|
|
186
|
+
const charlieRepo = new Repo({
|
|
187
|
+
network: [new MessageChannelNetworkAdapter(charlieToBob)],
|
|
188
|
+
peerId: "charlie" as PeerId,
|
|
189
|
+
})
|
|
190
|
+
|
|
191
|
+
const aliceHandle = aliceRepo.create<TestDoc>()
|
|
192
|
+
aliceHandle.change(d => {
|
|
193
|
+
d.foo = "bar"
|
|
194
|
+
})
|
|
195
|
+
|
|
196
|
+
const notForCharlieHandle = aliceRepo.create<TestDoc>()
|
|
197
|
+
const notForCharlie = notForCharlieHandle.documentId
|
|
198
|
+
charlieExcludedDocuments.push(notForCharlie)
|
|
199
|
+
notForCharlieHandle.change(d => {
|
|
200
|
+
d.foo = "baz"
|
|
201
|
+
})
|
|
202
|
+
|
|
203
|
+
const notForBobHandle = aliceRepo.create<TestDoc>()
|
|
204
|
+
const notForBob = notForBobHandle.documentId
|
|
205
|
+
bobExcludedDocuments.push(notForBob)
|
|
206
|
+
notForBobHandle.change(d => {
|
|
207
|
+
d.foo = "bap"
|
|
208
|
+
})
|
|
209
|
+
|
|
210
|
+
await Promise.all([
|
|
211
|
+
eventPromise(aliceRepo.networkSubsystem, "peer"),
|
|
212
|
+
eventPromise(bobRepo.networkSubsystem, "peer"),
|
|
213
|
+
eventPromise(charlieRepo.networkSubsystem, "peer"),
|
|
214
|
+
])
|
|
215
|
+
|
|
216
|
+
const teardown = () => {
|
|
217
|
+
aliceBobChannel.port1.close()
|
|
218
|
+
bobCharlieChannel.port1.close()
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
return {
|
|
222
|
+
aliceRepo,
|
|
223
|
+
bobRepo,
|
|
224
|
+
charlieRepo,
|
|
225
|
+
aliceHandle,
|
|
226
|
+
notForCharlie,
|
|
227
|
+
notForBob,
|
|
228
|
+
teardown,
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
it("changes are replicated from aliceRepo to bobRepo", async () => {
|
|
233
|
+
const { bobRepo, aliceHandle, teardown } = await setup()
|
|
234
|
+
|
|
235
|
+
const bobHandle = bobRepo.find<TestDoc>(aliceHandle.documentId)
|
|
236
|
+
await eventPromise(bobHandle, "change")
|
|
237
|
+
const bobDoc = await bobHandle.value()
|
|
238
|
+
assert.deepStrictEqual(bobDoc, { foo: "bar" })
|
|
239
|
+
teardown()
|
|
240
|
+
})
|
|
241
|
+
|
|
242
|
+
it("can load a document from aliceRepo on charlieRepo", async () => {
|
|
243
|
+
const { charlieRepo, aliceHandle, teardown } = await setup()
|
|
244
|
+
|
|
245
|
+
const handle3 = charlieRepo.find<TestDoc>(aliceHandle.documentId)
|
|
246
|
+
await eventPromise(handle3, "change")
|
|
247
|
+
const doc3 = await handle3.value()
|
|
248
|
+
assert.deepStrictEqual(doc3, { foo: "bar" })
|
|
249
|
+
teardown()
|
|
250
|
+
})
|
|
251
|
+
|
|
252
|
+
it("charlieRepo doesn't have a document it's not supposed to have", async () => {
|
|
253
|
+
const { aliceRepo, bobRepo, charlieRepo, notForCharlie, teardown } =
|
|
254
|
+
await setup()
|
|
255
|
+
|
|
256
|
+
await Promise.all([
|
|
257
|
+
eventPromise(bobRepo.networkSubsystem, "message"),
|
|
258
|
+
eventPromise(charlieRepo.networkSubsystem, "message"),
|
|
259
|
+
])
|
|
260
|
+
|
|
261
|
+
assert.notEqual(aliceRepo.handles[notForCharlie], undefined, "alice yes")
|
|
262
|
+
assert.notEqual(bobRepo.handles[notForCharlie], undefined, "bob yes")
|
|
263
|
+
assert.equal(charlieRepo.handles[notForCharlie], undefined, "charlie no")
|
|
264
|
+
|
|
265
|
+
teardown()
|
|
266
|
+
})
|
|
267
|
+
|
|
268
|
+
it("charlieRepo can request a document not initially shared with it", async () => {
|
|
269
|
+
const { charlieRepo, notForCharlie, teardown } = await setup()
|
|
270
|
+
|
|
271
|
+
const handle = charlieRepo.find<TestDoc>(notForCharlie)
|
|
272
|
+
const doc = await handle.value()
|
|
273
|
+
|
|
274
|
+
assert.deepStrictEqual(doc, { foo: "baz" })
|
|
275
|
+
|
|
276
|
+
teardown()
|
|
277
|
+
})
|
|
278
|
+
|
|
279
|
+
it("charlieRepo can request a document across a network of multiple peers", async () => {
|
|
280
|
+
const { charlieRepo, notForBob, teardown } = await setup()
|
|
281
|
+
|
|
282
|
+
const handle = charlieRepo.find<TestDoc>(notForBob)
|
|
283
|
+
const doc = await handle.value()
|
|
284
|
+
assert.deepStrictEqual(doc, { foo: "bap" })
|
|
285
|
+
|
|
286
|
+
teardown()
|
|
287
|
+
})
|
|
288
|
+
|
|
289
|
+
it("doesn't find a document which doesn't exist anywhere on the network", async () => {
|
|
290
|
+
const { charlieRepo } = await setup()
|
|
291
|
+
const handle = charlieRepo.find<TestDoc>("does-not-exist" as DocumentId)
|
|
292
|
+
assert.equal(handle.isReady(), false)
|
|
293
|
+
|
|
294
|
+
return assert.rejects(
|
|
295
|
+
rejectOnTimeout(handle.value(), 100),
|
|
296
|
+
"This document should not exist"
|
|
297
|
+
)
|
|
298
|
+
})
|
|
299
|
+
|
|
300
|
+
it("a deleted document from charlieRepo can be refetched", async () => {
|
|
301
|
+
const { charlieRepo, aliceHandle, teardown } = await setup()
|
|
302
|
+
|
|
303
|
+
const deletePromise = eventPromise(charlieRepo, "delete-document")
|
|
304
|
+
charlieRepo.delete(aliceHandle.documentId)
|
|
305
|
+
await deletePromise
|
|
306
|
+
|
|
307
|
+
const changePromise = eventPromise(aliceHandle, "change")
|
|
308
|
+
aliceHandle.change(d => {
|
|
309
|
+
d.foo = "baz"
|
|
310
|
+
})
|
|
311
|
+
await changePromise
|
|
312
|
+
|
|
313
|
+
const handle3 = charlieRepo.find<TestDoc>(aliceHandle.documentId)
|
|
314
|
+
await eventPromise(handle3, "change")
|
|
315
|
+
const doc3 = await handle3.value()
|
|
316
|
+
|
|
317
|
+
assert.deepStrictEqual(doc3, { foo: "baz" })
|
|
318
|
+
|
|
319
|
+
teardown()
|
|
320
|
+
})
|
|
321
|
+
|
|
322
|
+
it("can broadcast a message", async () => {
|
|
323
|
+
const { aliceRepo, bobRepo, teardown } = await setup()
|
|
324
|
+
|
|
325
|
+
const channelId = "broadcast" as ChannelId
|
|
326
|
+
const data = { presence: "bob" }
|
|
327
|
+
|
|
328
|
+
bobRepo.ephemeralData.broadcast(channelId, data)
|
|
329
|
+
const d = await eventPromise(aliceRepo.ephemeralData, "data")
|
|
330
|
+
|
|
331
|
+
assert.deepStrictEqual(d.data, data)
|
|
332
|
+
teardown()
|
|
333
|
+
})
|
|
334
|
+
|
|
335
|
+
it("syncs a bunch of changes", async () => {
|
|
336
|
+
const { aliceRepo, bobRepo, charlieRepo, teardown } = await setup()
|
|
337
|
+
|
|
338
|
+
// HACK: yield to give repos time to get the one doc that aliceRepo created
|
|
339
|
+
await pause(50)
|
|
340
|
+
|
|
341
|
+
for (let i = 0; i < 100; i++) {
|
|
342
|
+
// pick a repo
|
|
343
|
+
const repo = getRandomItem([aliceRepo, bobRepo, charlieRepo])
|
|
344
|
+
const docs = Object.values(repo.handles)
|
|
345
|
+
const doc =
|
|
346
|
+
Math.random() < 0.5
|
|
347
|
+
? // heads, create a new doc
|
|
348
|
+
repo.create<TestDoc>()
|
|
349
|
+
: // tails, pick a random doc
|
|
350
|
+
(getRandomItem(docs) as DocHandle<TestDoc>)
|
|
351
|
+
|
|
352
|
+
// make sure the doc is ready
|
|
353
|
+
if (!doc.isReady()) {
|
|
354
|
+
await doc.value()
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
// make a random change to it
|
|
358
|
+
doc.change(d => {
|
|
359
|
+
d.foo = Math.random().toString()
|
|
360
|
+
})
|
|
361
|
+
}
|
|
362
|
+
await pause(500)
|
|
363
|
+
|
|
364
|
+
teardown()
|
|
365
|
+
})
|
|
366
|
+
})
|
|
367
|
+
})
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import fs from "fs"
|
|
2
|
+
import os from "os"
|
|
3
|
+
import path from "path"
|
|
4
|
+
|
|
5
|
+
import assert from "assert"
|
|
6
|
+
|
|
7
|
+
import A from "@automerge/automerge"
|
|
8
|
+
|
|
9
|
+
import { DummyStorageAdapter } from "./helpers/DummyStorageAdapter.js"
|
|
10
|
+
import { NodeFSStorageAdapter } from "@automerge/automerge-repo-storage-nodefs"
|
|
11
|
+
|
|
12
|
+
import { DocumentId, StorageSubsystem } from "../src"
|
|
13
|
+
import { TestDoc } from "./types.js"
|
|
14
|
+
|
|
15
|
+
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "automerge-repo-tests"))
|
|
16
|
+
|
|
17
|
+
describe("StorageSubsystem", () => {
|
|
18
|
+
const adaptersToTest = {
|
|
19
|
+
dummyStorageAdapter: new DummyStorageAdapter(),
|
|
20
|
+
nodeFSStorageAdapter: new NodeFSStorageAdapter(tempDir),
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
Object.entries(adaptersToTest).forEach(([adapterName, adapter]) => {
|
|
24
|
+
describe(adapterName, () => {
|
|
25
|
+
it("can store and retrieve an Automerge document", async () => {
|
|
26
|
+
const storage = new StorageSubsystem(adapter)
|
|
27
|
+
|
|
28
|
+
const doc = A.change(A.init<any>(), "test", d => {
|
|
29
|
+
d.foo = "bar"
|
|
30
|
+
})
|
|
31
|
+
|
|
32
|
+
// save it to storage
|
|
33
|
+
const key = "test-key" as DocumentId
|
|
34
|
+
storage.save(key, doc)
|
|
35
|
+
|
|
36
|
+
// reload it from storage
|
|
37
|
+
const reloadedDoc = await storage.load<TestDoc>(key)
|
|
38
|
+
|
|
39
|
+
// check that it's the same doc
|
|
40
|
+
assert.deepStrictEqual(reloadedDoc, doc)
|
|
41
|
+
})
|
|
42
|
+
})
|
|
43
|
+
})
|
|
44
|
+
|
|
45
|
+
it("correctly stores incremental changes following a load", async () => {
|
|
46
|
+
const adapter = new DummyStorageAdapter()
|
|
47
|
+
const storage = new StorageSubsystem(adapter)
|
|
48
|
+
|
|
49
|
+
const doc = A.change(A.init<any>(), "test", d => {
|
|
50
|
+
d.foo = "bar"
|
|
51
|
+
})
|
|
52
|
+
|
|
53
|
+
// save it to storage
|
|
54
|
+
const key = "test-key" as DocumentId
|
|
55
|
+
storage.save(key, doc)
|
|
56
|
+
|
|
57
|
+
// create new storage subsystem to simulate a new process
|
|
58
|
+
const storage2 = new StorageSubsystem(adapter)
|
|
59
|
+
|
|
60
|
+
// reload it from storage
|
|
61
|
+
const reloadedDoc = await storage2.load<TestDoc>(key)
|
|
62
|
+
|
|
63
|
+
// make a change
|
|
64
|
+
const changedDoc = A.change(reloadedDoc, "test 2", d => {
|
|
65
|
+
d.foo = "baz"
|
|
66
|
+
})
|
|
67
|
+
|
|
68
|
+
// save it to storage
|
|
69
|
+
storage2.save(key, changedDoc)
|
|
70
|
+
|
|
71
|
+
// check that the storage adapter contains the correct keys
|
|
72
|
+
assert(adapter.keys().some(k => k.endsWith("1")))
|
|
73
|
+
|
|
74
|
+
// check that the last incrementalSave is not a full save
|
|
75
|
+
const bin = await adapter.load((key + ".incremental.1") as DocumentId)
|
|
76
|
+
assert.throws(() => A.load(bin!))
|
|
77
|
+
})
|
|
78
|
+
})
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { DocumentId, StorageAdapter } from "../../src"
|
|
2
|
+
|
|
3
|
+
export class DummyStorageAdapter implements StorageAdapter {
|
|
4
|
+
#data: Record<DocumentId, Uint8Array> = {}
|
|
5
|
+
|
|
6
|
+
load(docId: DocumentId) {
|
|
7
|
+
return new Promise<Uint8Array | null>(resolve =>
|
|
8
|
+
resolve(this.#data[docId] || null)
|
|
9
|
+
)
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
save(docId: DocumentId, binary: Uint8Array) {
|
|
13
|
+
this.#data[docId] = binary
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
remove(docId: DocumentId) {
|
|
17
|
+
delete this.#data[docId]
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
keys() {
|
|
21
|
+
return Object.keys(this.#data)
|
|
22
|
+
}
|
|
23
|
+
}
|
package/test/types.ts
ADDED
package/tsconfig.json
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
{
|
|
2
|
+
"compilerOptions": {
|
|
3
|
+
"target": "ESNext",
|
|
4
|
+
"jsx": "react",
|
|
5
|
+
"module": "ESNext",
|
|
6
|
+
"moduleResolution": "node",
|
|
7
|
+
"declaration": true,
|
|
8
|
+
"declarationMap": true,
|
|
9
|
+
"outDir": "./dist",
|
|
10
|
+
"esModuleInterop": true,
|
|
11
|
+
"forceConsistentCasingInFileNames": true,
|
|
12
|
+
"strict": true,
|
|
13
|
+
"skipLibCheck": true
|
|
14
|
+
},
|
|
15
|
+
"include": ["src/**/*.ts"]
|
|
16
|
+
}
|