@automerge/automerge-repo 2.0.0-collectionsync-alpha.1 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -8
- package/dist/AutomergeUrl.d.ts +17 -5
- package/dist/AutomergeUrl.d.ts.map +1 -1
- package/dist/AutomergeUrl.js +71 -24
- package/dist/DocHandle.d.ts +33 -41
- package/dist/DocHandle.d.ts.map +1 -1
- package/dist/DocHandle.js +105 -66
- package/dist/FindProgress.d.ts +30 -0
- package/dist/FindProgress.d.ts.map +1 -0
- package/dist/FindProgress.js +1 -0
- package/dist/RemoteHeadsSubscriptions.d.ts +4 -5
- package/dist/RemoteHeadsSubscriptions.d.ts.map +1 -1
- package/dist/RemoteHeadsSubscriptions.js +4 -1
- package/dist/Repo.d.ts +24 -5
- package/dist/Repo.d.ts.map +1 -1
- package/dist/Repo.js +355 -169
- package/dist/helpers/abortable.d.ts +36 -0
- package/dist/helpers/abortable.d.ts.map +1 -0
- package/dist/helpers/abortable.js +47 -0
- package/dist/helpers/arraysAreEqual.d.ts.map +1 -1
- package/dist/helpers/bufferFromHex.d.ts +3 -0
- package/dist/helpers/bufferFromHex.d.ts.map +1 -0
- package/dist/helpers/bufferFromHex.js +13 -0
- package/dist/helpers/debounce.d.ts.map +1 -1
- package/dist/helpers/eventPromise.d.ts.map +1 -1
- package/dist/helpers/headsAreSame.d.ts +2 -2
- package/dist/helpers/headsAreSame.d.ts.map +1 -1
- package/dist/helpers/mergeArrays.d.ts +1 -1
- package/dist/helpers/mergeArrays.d.ts.map +1 -1
- package/dist/helpers/pause.d.ts.map +1 -1
- package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
- package/dist/helpers/tests/network-adapter-tests.js +13 -13
- package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
- package/dist/helpers/tests/storage-adapter-tests.js +6 -9
- package/dist/helpers/throttle.d.ts.map +1 -1
- package/dist/helpers/withTimeout.d.ts.map +1 -1
- package/dist/index.d.ts +35 -7
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +37 -6
- package/dist/network/NetworkSubsystem.d.ts +0 -1
- package/dist/network/NetworkSubsystem.d.ts.map +1 -1
- package/dist/network/NetworkSubsystem.js +0 -3
- package/dist/network/messages.d.ts +1 -7
- package/dist/network/messages.d.ts.map +1 -1
- package/dist/network/messages.js +1 -2
- package/dist/storage/StorageAdapter.d.ts +0 -9
- package/dist/storage/StorageAdapter.d.ts.map +1 -1
- package/dist/storage/StorageAdapter.js +0 -33
- package/dist/storage/StorageSubsystem.d.ts +6 -2
- package/dist/storage/StorageSubsystem.d.ts.map +1 -1
- package/dist/storage/StorageSubsystem.js +131 -37
- package/dist/storage/keyHash.d.ts +1 -1
- package/dist/storage/keyHash.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.d.ts +3 -4
- package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.js +32 -26
- package/dist/synchronizer/DocSynchronizer.d.ts +8 -8
- package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/DocSynchronizer.js +205 -79
- package/dist/types.d.ts +4 -1
- package/dist/types.d.ts.map +1 -1
- package/fuzz/fuzz.ts +3 -3
- package/package.json +4 -5
- package/src/AutomergeUrl.ts +101 -26
- package/src/DocHandle.ts +158 -77
- package/src/FindProgress.ts +48 -0
- package/src/RemoteHeadsSubscriptions.ts +11 -9
- package/src/Repo.ts +465 -180
- package/src/helpers/abortable.ts +62 -0
- package/src/helpers/bufferFromHex.ts +14 -0
- package/src/helpers/headsAreSame.ts +2 -2
- package/src/helpers/tests/network-adapter-tests.ts +14 -13
- package/src/helpers/tests/storage-adapter-tests.ts +13 -24
- package/src/index.ts +57 -38
- package/src/network/NetworkSubsystem.ts +0 -4
- package/src/network/messages.ts +2 -11
- package/src/storage/StorageAdapter.ts +0 -42
- package/src/storage/StorageSubsystem.ts +155 -45
- package/src/storage/keyHash.ts +1 -1
- package/src/synchronizer/CollectionSynchronizer.ts +42 -29
- package/src/synchronizer/DocSynchronizer.ts +263 -89
- package/src/types.ts +4 -1
- package/test/AutomergeUrl.test.ts +130 -0
- package/test/CollectionSynchronizer.test.ts +6 -8
- package/test/DocHandle.test.ts +161 -77
- package/test/DocSynchronizer.test.ts +11 -9
- package/test/RemoteHeadsSubscriptions.test.ts +1 -1
- package/test/Repo.test.ts +406 -341
- package/test/StorageSubsystem.test.ts +95 -20
- package/test/remoteHeads.test.ts +28 -13
- package/dist/CollectionHandle.d.ts +0 -14
- package/dist/CollectionHandle.d.ts.map +0 -1
- package/dist/CollectionHandle.js +0 -37
- package/dist/DocUrl.d.ts +0 -47
- package/dist/DocUrl.d.ts.map +0 -1
- package/dist/DocUrl.js +0 -72
- package/dist/EphemeralData.d.ts +0 -20
- package/dist/EphemeralData.d.ts.map +0 -1
- package/dist/EphemeralData.js +0 -1
- package/dist/ferigan.d.ts +0 -51
- package/dist/ferigan.d.ts.map +0 -1
- package/dist/ferigan.js +0 -98
- package/dist/src/DocHandle.d.ts +0 -182
- package/dist/src/DocHandle.d.ts.map +0 -1
- package/dist/src/DocHandle.js +0 -405
- package/dist/src/DocUrl.d.ts +0 -49
- package/dist/src/DocUrl.d.ts.map +0 -1
- package/dist/src/DocUrl.js +0 -72
- package/dist/src/EphemeralData.d.ts +0 -19
- package/dist/src/EphemeralData.d.ts.map +0 -1
- package/dist/src/EphemeralData.js +0 -1
- package/dist/src/Repo.d.ts +0 -74
- package/dist/src/Repo.d.ts.map +0 -1
- package/dist/src/Repo.js +0 -208
- package/dist/src/helpers/arraysAreEqual.d.ts +0 -2
- package/dist/src/helpers/arraysAreEqual.d.ts.map +0 -1
- package/dist/src/helpers/arraysAreEqual.js +0 -2
- package/dist/src/helpers/cbor.d.ts +0 -4
- package/dist/src/helpers/cbor.d.ts.map +0 -1
- package/dist/src/helpers/cbor.js +0 -8
- package/dist/src/helpers/eventPromise.d.ts +0 -11
- package/dist/src/helpers/eventPromise.d.ts.map +0 -1
- package/dist/src/helpers/eventPromise.js +0 -7
- package/dist/src/helpers/headsAreSame.d.ts +0 -2
- package/dist/src/helpers/headsAreSame.d.ts.map +0 -1
- package/dist/src/helpers/headsAreSame.js +0 -4
- package/dist/src/helpers/mergeArrays.d.ts +0 -2
- package/dist/src/helpers/mergeArrays.d.ts.map +0 -1
- package/dist/src/helpers/mergeArrays.js +0 -15
- package/dist/src/helpers/pause.d.ts +0 -6
- package/dist/src/helpers/pause.d.ts.map +0 -1
- package/dist/src/helpers/pause.js +0 -10
- package/dist/src/helpers/tests/network-adapter-tests.d.ts +0 -21
- package/dist/src/helpers/tests/network-adapter-tests.d.ts.map +0 -1
- package/dist/src/helpers/tests/network-adapter-tests.js +0 -122
- package/dist/src/helpers/withTimeout.d.ts +0 -12
- package/dist/src/helpers/withTimeout.d.ts.map +0 -1
- package/dist/src/helpers/withTimeout.js +0 -24
- package/dist/src/index.d.ts +0 -53
- package/dist/src/index.d.ts.map +0 -1
- package/dist/src/index.js +0 -40
- package/dist/src/network/NetworkAdapter.d.ts +0 -26
- package/dist/src/network/NetworkAdapter.d.ts.map +0 -1
- package/dist/src/network/NetworkAdapter.js +0 -4
- package/dist/src/network/NetworkSubsystem.d.ts +0 -23
- package/dist/src/network/NetworkSubsystem.d.ts.map +0 -1
- package/dist/src/network/NetworkSubsystem.js +0 -120
- package/dist/src/network/messages.d.ts +0 -85
- package/dist/src/network/messages.d.ts.map +0 -1
- package/dist/src/network/messages.js +0 -23
- package/dist/src/storage/StorageAdapter.d.ts +0 -14
- package/dist/src/storage/StorageAdapter.d.ts.map +0 -1
- package/dist/src/storage/StorageAdapter.js +0 -1
- package/dist/src/storage/StorageSubsystem.d.ts +0 -12
- package/dist/src/storage/StorageSubsystem.d.ts.map +0 -1
- package/dist/src/storage/StorageSubsystem.js +0 -145
- package/dist/src/synchronizer/CollectionSynchronizer.d.ts +0 -25
- package/dist/src/synchronizer/CollectionSynchronizer.d.ts.map +0 -1
- package/dist/src/synchronizer/CollectionSynchronizer.js +0 -106
- package/dist/src/synchronizer/DocSynchronizer.d.ts +0 -29
- package/dist/src/synchronizer/DocSynchronizer.d.ts.map +0 -1
- package/dist/src/synchronizer/DocSynchronizer.js +0 -263
- package/dist/src/synchronizer/Synchronizer.d.ts +0 -9
- package/dist/src/synchronizer/Synchronizer.d.ts.map +0 -1
- package/dist/src/synchronizer/Synchronizer.js +0 -2
- package/dist/src/types.d.ts +0 -16
- package/dist/src/types.d.ts.map +0 -1
- package/dist/src/types.js +0 -1
- package/dist/test/CollectionSynchronizer.test.d.ts +0 -2
- package/dist/test/CollectionSynchronizer.test.d.ts.map +0 -1
- package/dist/test/CollectionSynchronizer.test.js +0 -57
- package/dist/test/DocHandle.test.d.ts +0 -2
- package/dist/test/DocHandle.test.d.ts.map +0 -1
- package/dist/test/DocHandle.test.js +0 -238
- package/dist/test/DocSynchronizer.test.d.ts +0 -2
- package/dist/test/DocSynchronizer.test.d.ts.map +0 -1
- package/dist/test/DocSynchronizer.test.js +0 -111
- package/dist/test/Network.test.d.ts +0 -2
- package/dist/test/Network.test.d.ts.map +0 -1
- package/dist/test/Network.test.js +0 -11
- package/dist/test/Repo.test.d.ts +0 -2
- package/dist/test/Repo.test.d.ts.map +0 -1
- package/dist/test/Repo.test.js +0 -568
- package/dist/test/StorageSubsystem.test.d.ts +0 -2
- package/dist/test/StorageSubsystem.test.d.ts.map +0 -1
- package/dist/test/StorageSubsystem.test.js +0 -56
- package/dist/test/helpers/DummyNetworkAdapter.d.ts +0 -9
- package/dist/test/helpers/DummyNetworkAdapter.d.ts.map +0 -1
- package/dist/test/helpers/DummyNetworkAdapter.js +0 -15
- package/dist/test/helpers/DummyStorageAdapter.d.ts +0 -16
- package/dist/test/helpers/DummyStorageAdapter.d.ts.map +0 -1
- package/dist/test/helpers/DummyStorageAdapter.js +0 -33
- package/dist/test/helpers/generate-large-object.d.ts +0 -5
- package/dist/test/helpers/generate-large-object.d.ts.map +0 -1
- package/dist/test/helpers/generate-large-object.js +0 -9
- package/dist/test/helpers/getRandomItem.d.ts +0 -2
- package/dist/test/helpers/getRandomItem.d.ts.map +0 -1
- package/dist/test/helpers/getRandomItem.js +0 -4
- package/dist/test/types.d.ts +0 -4
- package/dist/test/types.d.ts.map +0 -1
- package/dist/test/types.js +0 -1
- package/src/CollectionHandle.ts +0 -54
- package/src/ferigan.ts +0 -184
package/dist/test/Repo.test.js
DELETED
|
@@ -1,568 +0,0 @@
|
|
|
1
|
-
import assert from "assert"
|
|
2
|
-
import { MessageChannelNetworkAdapter } from "@automerge/automerge-repo-network-messagechannel"
|
|
3
|
-
import { BroadcastChannelNetworkAdapter } from "@automerge/automerge-repo-network-broadcastchannel"
|
|
4
|
-
import { eventPromise } from "../src/helpers/eventPromise.js"
|
|
5
|
-
import { pause, rejectOnTimeout } from "../src/helpers/pause.js"
|
|
6
|
-
import { Repo } from "../src/Repo.js"
|
|
7
|
-
import { DummyNetworkAdapter } from "./helpers/DummyNetworkAdapter.js"
|
|
8
|
-
import { DummyStorageAdapter } from "./helpers/DummyStorageAdapter.js"
|
|
9
|
-
import { getRandomItem } from "./helpers/getRandomItem.js"
|
|
10
|
-
import { generateAutomergeUrl, stringifyAutomergeUrl } from "../src/DocUrl.js"
|
|
11
|
-
import { READY } from "../src/DocHandle.js"
|
|
12
|
-
import { generateLargeObject } from "./helpers/generate-large-object.js"
|
|
13
|
-
import { parseAutomergeUrl } from "../dist/DocUrl.js"
|
|
14
|
-
import * as Uuid from "uuid"
|
|
15
|
-
describe("Repo", () => {
|
|
16
|
-
describe("single repo", () => {
|
|
17
|
-
const setup = (networkReady = true) => {
|
|
18
|
-
const storageAdapter = new DummyStorageAdapter()
|
|
19
|
-
const networkAdapter = new DummyNetworkAdapter(networkReady)
|
|
20
|
-
const repo = new Repo({
|
|
21
|
-
storage: storageAdapter,
|
|
22
|
-
network: [networkAdapter],
|
|
23
|
-
})
|
|
24
|
-
return { repo, storageAdapter, networkAdapter }
|
|
25
|
-
}
|
|
26
|
-
it("can instantiate a Repo", () => {
|
|
27
|
-
const { repo } = setup()
|
|
28
|
-
assert.notEqual(repo, null)
|
|
29
|
-
assert(repo.networkSubsystem)
|
|
30
|
-
assert(repo.storageSubsystem)
|
|
31
|
-
})
|
|
32
|
-
it("can create a document", () => {
|
|
33
|
-
const { repo } = setup()
|
|
34
|
-
const handle = repo.create()
|
|
35
|
-
assert.notEqual(handle.documentId, null)
|
|
36
|
-
assert.equal(handle.isReady(), true)
|
|
37
|
-
})
|
|
38
|
-
it("can find a document once it's created", () => {
|
|
39
|
-
const { repo } = setup()
|
|
40
|
-
const handle = repo.create()
|
|
41
|
-
handle.change(d => {
|
|
42
|
-
d.foo = "bar"
|
|
43
|
-
})
|
|
44
|
-
const handle2 = repo.find(handle.url)
|
|
45
|
-
assert.equal(handle, handle2)
|
|
46
|
-
assert.deepEqual(handle2.docSync(), { foo: "bar" })
|
|
47
|
-
})
|
|
48
|
-
it("can find a document using a legacy UUID (for now)", () => {
|
|
49
|
-
const { repo } = setup()
|
|
50
|
-
const handle = repo.create()
|
|
51
|
-
handle.change(d => {
|
|
52
|
-
d.foo = "bar"
|
|
53
|
-
})
|
|
54
|
-
const url = handle.url
|
|
55
|
-
const { binaryDocumentId } = parseAutomergeUrl(url)
|
|
56
|
-
const legacyDocumentId = Uuid.stringify(binaryDocumentId) // a white lie
|
|
57
|
-
const handle2 = repo.find(legacyDocumentId)
|
|
58
|
-
assert.equal(handle, handle2)
|
|
59
|
-
assert.deepEqual(handle2.docSync(), { foo: "bar" })
|
|
60
|
-
})
|
|
61
|
-
it("can change a document", async () => {
|
|
62
|
-
const { repo } = setup()
|
|
63
|
-
const handle = repo.create()
|
|
64
|
-
handle.change(d => {
|
|
65
|
-
d.foo = "bar"
|
|
66
|
-
})
|
|
67
|
-
const v = await handle.doc()
|
|
68
|
-
assert.equal(handle.isReady(), true)
|
|
69
|
-
assert.equal(v?.foo, "bar")
|
|
70
|
-
})
|
|
71
|
-
it("throws an error if we try to find a handle with an invalid AutomergeUrl", async () => {
|
|
72
|
-
const { repo } = setup()
|
|
73
|
-
try {
|
|
74
|
-
repo.find("invalid-url")
|
|
75
|
-
} catch (e) {
|
|
76
|
-
assert.equal(e.message, "Invalid AutomergeUrl: 'invalid-url'")
|
|
77
|
-
}
|
|
78
|
-
})
|
|
79
|
-
it("doesn't find a document that doesn't exist", async () => {
|
|
80
|
-
const { repo } = setup()
|
|
81
|
-
const handle = repo.find(generateAutomergeUrl())
|
|
82
|
-
assert.equal(handle.isReady(), false)
|
|
83
|
-
const doc = await handle.doc()
|
|
84
|
-
assert.equal(doc, undefined)
|
|
85
|
-
})
|
|
86
|
-
it("fires an 'unavailable' event when you don't have the document locally and network to connect to", async () => {
|
|
87
|
-
const { repo } = setup()
|
|
88
|
-
const url = generateAutomergeUrl()
|
|
89
|
-
const handle = repo.find(url)
|
|
90
|
-
assert.equal(handle.isReady(), false)
|
|
91
|
-
await eventPromise(handle, "unavailable")
|
|
92
|
-
})
|
|
93
|
-
it("doesn't mark a document as unavailable until network adapters are ready", async () => {
|
|
94
|
-
const { repo, networkAdapter } = setup(false)
|
|
95
|
-
const url = generateAutomergeUrl()
|
|
96
|
-
const handle = repo.find(url)
|
|
97
|
-
let wasUnavailable = false
|
|
98
|
-
handle.on("unavailable", () => {
|
|
99
|
-
wasUnavailable = true
|
|
100
|
-
})
|
|
101
|
-
await pause(50)
|
|
102
|
-
assert.equal(wasUnavailable, false)
|
|
103
|
-
networkAdapter.emit("ready", { network: networkAdapter })
|
|
104
|
-
await eventPromise(handle, "unavailable")
|
|
105
|
-
})
|
|
106
|
-
it("can find a created document", async () => {
|
|
107
|
-
const { repo } = setup()
|
|
108
|
-
const handle = repo.create()
|
|
109
|
-
handle.change(d => {
|
|
110
|
-
d.foo = "bar"
|
|
111
|
-
})
|
|
112
|
-
assert.equal(handle.isReady(), true)
|
|
113
|
-
const bobHandle = repo.find(handle.url)
|
|
114
|
-
assert.equal(handle, bobHandle)
|
|
115
|
-
assert.equal(handle.isReady(), true)
|
|
116
|
-
const v = await bobHandle.doc()
|
|
117
|
-
assert.equal(v?.foo, "bar")
|
|
118
|
-
})
|
|
119
|
-
it("saves the document when creating it", async () => {
|
|
120
|
-
const { repo, storageAdapter } = setup()
|
|
121
|
-
const handle = repo.create()
|
|
122
|
-
const repo2 = new Repo({
|
|
123
|
-
storage: storageAdapter,
|
|
124
|
-
network: [],
|
|
125
|
-
})
|
|
126
|
-
const bobHandle = repo2.find(handle.url)
|
|
127
|
-
await bobHandle.whenReady()
|
|
128
|
-
assert.equal(bobHandle.isReady(), true)
|
|
129
|
-
})
|
|
130
|
-
it("saves the document when changed and can find it again", async () => {
|
|
131
|
-
const { repo, storageAdapter } = setup()
|
|
132
|
-
const handle = repo.create()
|
|
133
|
-
handle.change(d => {
|
|
134
|
-
d.foo = "bar"
|
|
135
|
-
})
|
|
136
|
-
assert.equal(handle.isReady(), true)
|
|
137
|
-
await pause()
|
|
138
|
-
const repo2 = new Repo({
|
|
139
|
-
storage: storageAdapter,
|
|
140
|
-
network: [],
|
|
141
|
-
})
|
|
142
|
-
const bobHandle = repo2.find(handle.url)
|
|
143
|
-
const v = await bobHandle.doc()
|
|
144
|
-
assert.equal(v?.foo, "bar")
|
|
145
|
-
})
|
|
146
|
-
it("can delete an existing document", async () => {
|
|
147
|
-
const { repo } = setup()
|
|
148
|
-
const handle = repo.create()
|
|
149
|
-
handle.change(d => {
|
|
150
|
-
d.foo = "bar"
|
|
151
|
-
})
|
|
152
|
-
// we now have a snapshot and an incremental change in storage
|
|
153
|
-
assert.equal(handle.isReady(), true)
|
|
154
|
-
await handle.doc()
|
|
155
|
-
repo.delete(handle.documentId)
|
|
156
|
-
assert(handle.isDeleted())
|
|
157
|
-
assert.equal(repo.handles[handle.documentId], undefined)
|
|
158
|
-
const bobHandle = repo.find(handle.url)
|
|
159
|
-
await assert.rejects(
|
|
160
|
-
rejectOnTimeout(bobHandle.doc(), 10),
|
|
161
|
-
"document should have been deleted"
|
|
162
|
-
)
|
|
163
|
-
assert(!bobHandle.isReady())
|
|
164
|
-
})
|
|
165
|
-
it("can delete an existing document by url", async () => {
|
|
166
|
-
const { repo } = setup()
|
|
167
|
-
const handle = repo.create()
|
|
168
|
-
handle.change(d => {
|
|
169
|
-
d.foo = "bar"
|
|
170
|
-
})
|
|
171
|
-
assert.equal(handle.isReady(), true)
|
|
172
|
-
await handle.doc()
|
|
173
|
-
repo.delete(handle.url)
|
|
174
|
-
assert(handle.isDeleted())
|
|
175
|
-
assert.equal(repo.handles[handle.documentId], undefined)
|
|
176
|
-
const bobHandle = repo.find(handle.url)
|
|
177
|
-
await assert.rejects(
|
|
178
|
-
rejectOnTimeout(bobHandle.doc(), 10),
|
|
179
|
-
"document should have been deleted"
|
|
180
|
-
)
|
|
181
|
-
assert(!bobHandle.isReady())
|
|
182
|
-
})
|
|
183
|
-
it("deleting a document emits an event", async done => {
|
|
184
|
-
const { repo } = setup()
|
|
185
|
-
const handle = repo.create()
|
|
186
|
-
handle.change(d => {
|
|
187
|
-
d.foo = "bar"
|
|
188
|
-
})
|
|
189
|
-
assert.equal(handle.isReady(), true)
|
|
190
|
-
repo.on("delete-document", ({ documentId }) => {
|
|
191
|
-
assert.equal(documentId, handle.documentId)
|
|
192
|
-
done()
|
|
193
|
-
})
|
|
194
|
-
repo.delete(handle.documentId)
|
|
195
|
-
})
|
|
196
|
-
it("storage state doesn't change across reloads when the document hasn't changed", async () => {
|
|
197
|
-
const storage = new DummyStorageAdapter()
|
|
198
|
-
const repo = new Repo({
|
|
199
|
-
storage,
|
|
200
|
-
network: [],
|
|
201
|
-
})
|
|
202
|
-
const handle = repo.create()
|
|
203
|
-
handle.change(d => {
|
|
204
|
-
d.count = 0
|
|
205
|
-
})
|
|
206
|
-
handle.change(d => {
|
|
207
|
-
d.count = 1
|
|
208
|
-
})
|
|
209
|
-
const initialKeys = storage.keys()
|
|
210
|
-
const repo2 = new Repo({
|
|
211
|
-
storage,
|
|
212
|
-
network: [],
|
|
213
|
-
})
|
|
214
|
-
const handle2 = repo2.find(handle.url)
|
|
215
|
-
await handle2.doc()
|
|
216
|
-
assert.deepEqual(storage.keys(), initialKeys)
|
|
217
|
-
})
|
|
218
|
-
it("doesn't delete a document from storage when we refresh", async () => {
|
|
219
|
-
const storage = new DummyStorageAdapter()
|
|
220
|
-
const repo = new Repo({
|
|
221
|
-
storage,
|
|
222
|
-
network: [],
|
|
223
|
-
})
|
|
224
|
-
const handle = repo.create()
|
|
225
|
-
handle.change(d => {
|
|
226
|
-
d.count = 0
|
|
227
|
-
})
|
|
228
|
-
handle.change(d => {
|
|
229
|
-
d.count = 1
|
|
230
|
-
})
|
|
231
|
-
for (let i = 0; i < 3; i++) {
|
|
232
|
-
const repo2 = new Repo({
|
|
233
|
-
storage,
|
|
234
|
-
network: [],
|
|
235
|
-
})
|
|
236
|
-
const handle2 = repo2.find(handle.url)
|
|
237
|
-
await handle2.doc()
|
|
238
|
-
assert(storage.keys().length !== 0)
|
|
239
|
-
}
|
|
240
|
-
})
|
|
241
|
-
it("doesn't create multiple snapshots in storage when a series of large changes are made in succession", async () => {
|
|
242
|
-
const { repo, storageAdapter } = setup()
|
|
243
|
-
const handle = repo.create()
|
|
244
|
-
for (let i = 0; i < 5; i++) {
|
|
245
|
-
handle.change(d => {
|
|
246
|
-
d.objects = []
|
|
247
|
-
d.objects.push(generateLargeObject(100))
|
|
248
|
-
})
|
|
249
|
-
}
|
|
250
|
-
const storageKeyTypes = storageAdapter.keys().map(k => k.split(".")[1])
|
|
251
|
-
assert(storageKeyTypes.filter(k => k === "snapshot").length === 1)
|
|
252
|
-
})
|
|
253
|
-
})
|
|
254
|
-
describe("sync", async () => {
|
|
255
|
-
const charlieExcludedDocuments = []
|
|
256
|
-
const bobExcludedDocuments = []
|
|
257
|
-
const sharePolicy = async (peerId, documentId) => {
|
|
258
|
-
if (documentId === undefined) return false
|
|
259
|
-
// make sure that charlie never gets excluded documents
|
|
260
|
-
if (charlieExcludedDocuments.includes(documentId) && peerId === "charlie")
|
|
261
|
-
return false
|
|
262
|
-
// make sure that bob never gets excluded documents
|
|
263
|
-
if (bobExcludedDocuments.includes(documentId) && peerId === "bob")
|
|
264
|
-
return false
|
|
265
|
-
return true
|
|
266
|
-
}
|
|
267
|
-
const setupRepos = (connectAlice = true) => {
|
|
268
|
-
// Set up three repos; connect Alice to Bob, and Bob to Charlie
|
|
269
|
-
const aliceBobChannel = new MessageChannel()
|
|
270
|
-
const bobCharlieChannel = new MessageChannel()
|
|
271
|
-
const { port1: aliceToBob, port2: bobToAlice } = aliceBobChannel
|
|
272
|
-
const { port1: bobToCharlie, port2: charlieToBob } = bobCharlieChannel
|
|
273
|
-
const aliceNetworkAdapter = new MessageChannelNetworkAdapter(aliceToBob)
|
|
274
|
-
const aliceRepo = new Repo({
|
|
275
|
-
network: connectAlice ? [aliceNetworkAdapter] : [],
|
|
276
|
-
peerId: "alice",
|
|
277
|
-
sharePolicy,
|
|
278
|
-
})
|
|
279
|
-
const bobRepo = new Repo({
|
|
280
|
-
network: [
|
|
281
|
-
new MessageChannelNetworkAdapter(bobToAlice),
|
|
282
|
-
new MessageChannelNetworkAdapter(bobToCharlie),
|
|
283
|
-
],
|
|
284
|
-
peerId: "bob",
|
|
285
|
-
sharePolicy,
|
|
286
|
-
})
|
|
287
|
-
const charlieRepo = new Repo({
|
|
288
|
-
network: [new MessageChannelNetworkAdapter(charlieToBob)],
|
|
289
|
-
peerId: "charlie",
|
|
290
|
-
})
|
|
291
|
-
const teardown = () => {
|
|
292
|
-
aliceBobChannel.port1.close()
|
|
293
|
-
bobCharlieChannel.port1.close()
|
|
294
|
-
}
|
|
295
|
-
function doConnectAlice() {
|
|
296
|
-
aliceRepo.networkSubsystem.addNetworkAdapter(
|
|
297
|
-
new MessageChannelNetworkAdapter(aliceToBob)
|
|
298
|
-
)
|
|
299
|
-
//bobRepo.networkSubsystem.addNetworkAdapter(new MessageChannelNetworkAdapter(bobToAlice))
|
|
300
|
-
}
|
|
301
|
-
if (connectAlice) {
|
|
302
|
-
doConnectAlice()
|
|
303
|
-
}
|
|
304
|
-
return {
|
|
305
|
-
teardown,
|
|
306
|
-
aliceRepo,
|
|
307
|
-
bobRepo,
|
|
308
|
-
charlieRepo,
|
|
309
|
-
connectAliceToBob: doConnectAlice,
|
|
310
|
-
}
|
|
311
|
-
}
|
|
312
|
-
const setup = async (connectAlice = true) => {
|
|
313
|
-
const { teardown, aliceRepo, bobRepo, charlieRepo, connectAliceToBob } =
|
|
314
|
-
setupRepos(connectAlice)
|
|
315
|
-
const aliceHandle = aliceRepo.create()
|
|
316
|
-
aliceHandle.change(d => {
|
|
317
|
-
d.foo = "bar"
|
|
318
|
-
})
|
|
319
|
-
const notForCharlieHandle = aliceRepo.create()
|
|
320
|
-
const notForCharlie = notForCharlieHandle.documentId
|
|
321
|
-
charlieExcludedDocuments.push(notForCharlie)
|
|
322
|
-
notForCharlieHandle.change(d => {
|
|
323
|
-
d.foo = "baz"
|
|
324
|
-
})
|
|
325
|
-
const notForBobHandle = aliceRepo.create()
|
|
326
|
-
const notForBob = notForBobHandle.documentId
|
|
327
|
-
bobExcludedDocuments.push(notForBob)
|
|
328
|
-
notForBobHandle.change(d => {
|
|
329
|
-
d.foo = "bap"
|
|
330
|
-
})
|
|
331
|
-
await Promise.all([
|
|
332
|
-
...(connectAlice
|
|
333
|
-
? [eventPromise(aliceRepo.networkSubsystem, "peer")]
|
|
334
|
-
: []),
|
|
335
|
-
eventPromise(bobRepo.networkSubsystem, "peer"),
|
|
336
|
-
eventPromise(charlieRepo.networkSubsystem, "peer"),
|
|
337
|
-
])
|
|
338
|
-
return {
|
|
339
|
-
aliceRepo,
|
|
340
|
-
bobRepo,
|
|
341
|
-
charlieRepo,
|
|
342
|
-
aliceHandle,
|
|
343
|
-
notForCharlie,
|
|
344
|
-
notForBob,
|
|
345
|
-
teardown,
|
|
346
|
-
connectAliceToBob,
|
|
347
|
-
}
|
|
348
|
-
}
|
|
349
|
-
it("changes are replicated from aliceRepo to bobRepo", async () => {
|
|
350
|
-
const { bobRepo, aliceHandle, teardown } = await setup()
|
|
351
|
-
const bobHandle = bobRepo.find(aliceHandle.url)
|
|
352
|
-
await eventPromise(bobHandle, "change")
|
|
353
|
-
const bobDoc = await bobHandle.doc()
|
|
354
|
-
assert.deepStrictEqual(bobDoc, { foo: "bar" })
|
|
355
|
-
teardown()
|
|
356
|
-
})
|
|
357
|
-
it("can load a document from aliceRepo on charlieRepo", async () => {
|
|
358
|
-
const { charlieRepo, aliceHandle, teardown } = await setup()
|
|
359
|
-
const handle3 = charlieRepo.find(aliceHandle.url)
|
|
360
|
-
await eventPromise(handle3, "change")
|
|
361
|
-
const doc3 = await handle3.doc()
|
|
362
|
-
assert.deepStrictEqual(doc3, { foo: "bar" })
|
|
363
|
-
teardown()
|
|
364
|
-
})
|
|
365
|
-
it("charlieRepo doesn't have a document it's not supposed to have", async () => {
|
|
366
|
-
const { aliceRepo, bobRepo, charlieRepo, notForCharlie, teardown } =
|
|
367
|
-
await setup()
|
|
368
|
-
await Promise.all([
|
|
369
|
-
eventPromise(bobRepo.networkSubsystem, "message"),
|
|
370
|
-
eventPromise(charlieRepo.networkSubsystem, "message"),
|
|
371
|
-
])
|
|
372
|
-
assert.notEqual(aliceRepo.handles[notForCharlie], undefined, "alice yes")
|
|
373
|
-
assert.notEqual(bobRepo.handles[notForCharlie], undefined, "bob yes")
|
|
374
|
-
assert.equal(charlieRepo.handles[notForCharlie], undefined, "charlie no")
|
|
375
|
-
teardown()
|
|
376
|
-
})
|
|
377
|
-
it("charlieRepo can request a document not initially shared with it", async () => {
|
|
378
|
-
const { charlieRepo, notForCharlie, teardown } = await setup()
|
|
379
|
-
const handle = charlieRepo.find(
|
|
380
|
-
stringifyAutomergeUrl({ documentId: notForCharlie })
|
|
381
|
-
)
|
|
382
|
-
const doc = await handle.doc()
|
|
383
|
-
assert.deepStrictEqual(doc, { foo: "baz" })
|
|
384
|
-
teardown()
|
|
385
|
-
})
|
|
386
|
-
it("charlieRepo can request a document across a network of multiple peers", async () => {
|
|
387
|
-
const { charlieRepo, notForBob, teardown } = await setup()
|
|
388
|
-
const handle = charlieRepo.find(
|
|
389
|
-
stringifyAutomergeUrl({ documentId: notForBob })
|
|
390
|
-
)
|
|
391
|
-
const doc = await handle.doc()
|
|
392
|
-
assert.deepStrictEqual(doc, { foo: "bap" })
|
|
393
|
-
teardown()
|
|
394
|
-
})
|
|
395
|
-
it("doesn't find a document which doesn't exist anywhere on the network", async () => {
|
|
396
|
-
const { charlieRepo } = await setup()
|
|
397
|
-
const url = generateAutomergeUrl()
|
|
398
|
-
const handle = charlieRepo.find(url)
|
|
399
|
-
assert.equal(handle.isReady(), false)
|
|
400
|
-
const doc = await handle.doc()
|
|
401
|
-
assert.equal(doc, undefined)
|
|
402
|
-
})
|
|
403
|
-
it("fires an 'unavailable' event when a document is not available on the network", async () => {
|
|
404
|
-
const { charlieRepo } = await setup()
|
|
405
|
-
const url = generateAutomergeUrl()
|
|
406
|
-
const handle = charlieRepo.find(url)
|
|
407
|
-
assert.equal(handle.isReady(), false)
|
|
408
|
-
await Promise.all([
|
|
409
|
-
eventPromise(handle, "unavailable"),
|
|
410
|
-
eventPromise(charlieRepo, "unavailable-document"),
|
|
411
|
-
])
|
|
412
|
-
// make sure it fires a second time if the doc is still unavailable
|
|
413
|
-
const handle2 = charlieRepo.find(url)
|
|
414
|
-
assert.equal(handle2.isReady(), false)
|
|
415
|
-
await eventPromise(handle2, "unavailable")
|
|
416
|
-
})
|
|
417
|
-
it("a previously unavailable document syncs over the network if a peer with it connects", async () => {
|
|
418
|
-
const {
|
|
419
|
-
charlieRepo,
|
|
420
|
-
notForCharlie,
|
|
421
|
-
aliceRepo,
|
|
422
|
-
teardown,
|
|
423
|
-
connectAliceToBob,
|
|
424
|
-
} = await setup(false)
|
|
425
|
-
const url = stringifyAutomergeUrl({ documentId: notForCharlie })
|
|
426
|
-
const handle = charlieRepo.find(url)
|
|
427
|
-
assert.equal(handle.isReady(), false)
|
|
428
|
-
await eventPromise(handle, "unavailable")
|
|
429
|
-
connectAliceToBob()
|
|
430
|
-
await eventPromise(aliceRepo.networkSubsystem, "peer")
|
|
431
|
-
const doc = await handle.doc([READY])
|
|
432
|
-
assert.deepStrictEqual(doc, { foo: "baz" })
|
|
433
|
-
// an additional find should also return the correct resolved document
|
|
434
|
-
const handle2 = charlieRepo.find(url)
|
|
435
|
-
const doc2 = await handle2.doc()
|
|
436
|
-
assert.deepStrictEqual(doc2, { foo: "baz" })
|
|
437
|
-
teardown()
|
|
438
|
-
})
|
|
439
|
-
it("a deleted document from charlieRepo can be refetched", async () => {
|
|
440
|
-
const { charlieRepo, aliceHandle, teardown } = await setup()
|
|
441
|
-
const deletePromise = eventPromise(charlieRepo, "delete-document")
|
|
442
|
-
charlieRepo.delete(aliceHandle.documentId)
|
|
443
|
-
await deletePromise
|
|
444
|
-
const changePromise = eventPromise(aliceHandle, "change")
|
|
445
|
-
aliceHandle.change(d => {
|
|
446
|
-
d.foo = "baz"
|
|
447
|
-
})
|
|
448
|
-
await changePromise
|
|
449
|
-
const handle3 = charlieRepo.find(aliceHandle.url)
|
|
450
|
-
await eventPromise(handle3, "change")
|
|
451
|
-
const doc3 = await handle3.doc()
|
|
452
|
-
assert.deepStrictEqual(doc3, { foo: "baz" })
|
|
453
|
-
teardown()
|
|
454
|
-
})
|
|
455
|
-
const setupMeshNetwork = async () => {
|
|
456
|
-
const aliceRepo = new Repo({
|
|
457
|
-
network: [new BroadcastChannelNetworkAdapter()],
|
|
458
|
-
peerId: "alice",
|
|
459
|
-
})
|
|
460
|
-
const bobRepo = new Repo({
|
|
461
|
-
network: [new BroadcastChannelNetworkAdapter()],
|
|
462
|
-
peerId: "bob",
|
|
463
|
-
})
|
|
464
|
-
const charlieRepo = new Repo({
|
|
465
|
-
network: [new BroadcastChannelNetworkAdapter()],
|
|
466
|
-
peerId: "charlie",
|
|
467
|
-
})
|
|
468
|
-
// pause to let the network set up
|
|
469
|
-
await pause(50)
|
|
470
|
-
return {
|
|
471
|
-
aliceRepo,
|
|
472
|
-
bobRepo,
|
|
473
|
-
charlieRepo,
|
|
474
|
-
}
|
|
475
|
-
}
|
|
476
|
-
it("can emit an 'unavailable' event when it's not found on the network", async () => {
|
|
477
|
-
const { charlieRepo } = await setupMeshNetwork()
|
|
478
|
-
const url = generateAutomergeUrl()
|
|
479
|
-
const handle = charlieRepo.find(url)
|
|
480
|
-
assert.equal(handle.isReady(), false)
|
|
481
|
-
await eventPromise(handle, "unavailable")
|
|
482
|
-
})
|
|
483
|
-
it("syncs a bunch of changes", async () => {
|
|
484
|
-
const { aliceRepo, bobRepo, charlieRepo, teardown } = await setup()
|
|
485
|
-
// HACK: yield to give repos time to get the one doc that aliceRepo created
|
|
486
|
-
await pause(50)
|
|
487
|
-
for (let i = 0; i < 100; i++) {
|
|
488
|
-
// pick a repo
|
|
489
|
-
const repo = getRandomItem([aliceRepo, bobRepo, charlieRepo])
|
|
490
|
-
const docs = Object.values(repo.handles)
|
|
491
|
-
const doc =
|
|
492
|
-
Math.random() < 0.5
|
|
493
|
-
? // heads, create a new doc
|
|
494
|
-
repo.create()
|
|
495
|
-
: // tails, pick a random doc
|
|
496
|
-
getRandomItem(docs)
|
|
497
|
-
// make sure the doc is ready
|
|
498
|
-
if (!doc.isReady()) {
|
|
499
|
-
await doc.doc()
|
|
500
|
-
}
|
|
501
|
-
// make a random change to it
|
|
502
|
-
doc.change(d => {
|
|
503
|
-
d.foo = Math.random().toString()
|
|
504
|
-
})
|
|
505
|
-
}
|
|
506
|
-
await pause(500)
|
|
507
|
-
teardown()
|
|
508
|
-
})
|
|
509
|
-
it("can broadcast a message to peers with the correct document only", async () => {
|
|
510
|
-
const { aliceRepo, bobRepo, charlieRepo, notForCharlie, teardown } =
|
|
511
|
-
await setup()
|
|
512
|
-
const data = { presence: "alice" }
|
|
513
|
-
const aliceHandle = aliceRepo.find(
|
|
514
|
-
stringifyAutomergeUrl({ documentId: notForCharlie })
|
|
515
|
-
)
|
|
516
|
-
const bobHandle = bobRepo.find(
|
|
517
|
-
stringifyAutomergeUrl({ documentId: notForCharlie })
|
|
518
|
-
)
|
|
519
|
-
await pause(50)
|
|
520
|
-
const charliePromise = new Promise((resolve, reject) => {
|
|
521
|
-
charlieRepo.networkSubsystem.on("message", message => {
|
|
522
|
-
if (
|
|
523
|
-
message.type === "ephemeral" &&
|
|
524
|
-
message.documentId === notForCharlie
|
|
525
|
-
) {
|
|
526
|
-
reject(new Error("Charlie should not receive this message"))
|
|
527
|
-
}
|
|
528
|
-
})
|
|
529
|
-
setTimeout(resolve, 100)
|
|
530
|
-
})
|
|
531
|
-
aliceHandle.broadcast(data)
|
|
532
|
-
const { message } = await eventPromise(bobHandle, "ephemeral-message")
|
|
533
|
-
assert.deepStrictEqual(message, data)
|
|
534
|
-
assert.equal(charlieRepo.handles[notForCharlie], undefined, "charlie no")
|
|
535
|
-
await charliePromise
|
|
536
|
-
teardown()
|
|
537
|
-
})
|
|
538
|
-
it("can broadcast a message without entering into an infinite loop", async () => {
|
|
539
|
-
const { aliceRepo, bobRepo, charlieRepo } = await setupMeshNetwork()
|
|
540
|
-
// pause to let the network set up
|
|
541
|
-
await pause(50)
|
|
542
|
-
const message = { presence: "alex" }
|
|
543
|
-
const aliceHandle = aliceRepo.create()
|
|
544
|
-
const bobHandle = bobRepo.find(aliceHandle.url)
|
|
545
|
-
const charlieHandle = charlieRepo.find(aliceHandle.url)
|
|
546
|
-
const aliceDoesntGetIt = new Promise((resolve, reject) => {
|
|
547
|
-
setTimeout(() => {
|
|
548
|
-
resolve()
|
|
549
|
-
}, 100)
|
|
550
|
-
aliceHandle.on("ephemeral-message", () => {
|
|
551
|
-
reject("alice got the message")
|
|
552
|
-
})
|
|
553
|
-
})
|
|
554
|
-
const bobGotIt = eventPromise(bobHandle, "ephemeral-message")
|
|
555
|
-
const charlieGotIt = eventPromise(charlieHandle, "ephemeral-message")
|
|
556
|
-
// let things get in sync and peers meet one another
|
|
557
|
-
await pause(50)
|
|
558
|
-
aliceHandle.broadcast(message)
|
|
559
|
-
const [bob, charlie] = await Promise.all([
|
|
560
|
-
bobGotIt,
|
|
561
|
-
charlieGotIt,
|
|
562
|
-
aliceDoesntGetIt,
|
|
563
|
-
])
|
|
564
|
-
assert.deepStrictEqual(bob.message, message)
|
|
565
|
-
assert.deepStrictEqual(charlie.message, message)
|
|
566
|
-
})
|
|
567
|
-
})
|
|
568
|
-
})
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"StorageSubsystem.test.d.ts","sourceRoot":"","sources":["../../test/StorageSubsystem.test.ts"],"names":[],"mappings":""}
|
|
@@ -1,56 +0,0 @@
|
|
|
1
|
-
import fs from "fs"
|
|
2
|
-
import os from "os"
|
|
3
|
-
import path from "path"
|
|
4
|
-
import assert from "assert"
|
|
5
|
-
import * as A from "@automerge/automerge/next"
|
|
6
|
-
import { DummyStorageAdapter } from "./helpers/DummyStorageAdapter.js"
|
|
7
|
-
import { NodeFSStorageAdapter } from "@automerge/automerge-repo-storage-nodefs"
|
|
8
|
-
import { StorageSubsystem } from "../src/storage/StorageSubsystem.js"
|
|
9
|
-
import { generateAutomergeUrl, parseAutomergeUrl } from "../src/DocUrl.js"
|
|
10
|
-
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "automerge-repo-tests"))
|
|
11
|
-
describe("StorageSubsystem", () => {
|
|
12
|
-
const adaptersToTest = {
|
|
13
|
-
dummyStorageAdapter: new DummyStorageAdapter(),
|
|
14
|
-
nodeFSStorageAdapter: new NodeFSStorageAdapter(tempDir),
|
|
15
|
-
}
|
|
16
|
-
Object.entries(adaptersToTest).forEach(([adapterName, adapter]) => {
|
|
17
|
-
describe(adapterName, () => {
|
|
18
|
-
it("can store and retrieve an Automerge document", async () => {
|
|
19
|
-
const storage = new StorageSubsystem(adapter)
|
|
20
|
-
const doc = A.change(A.init(), "test", d => {
|
|
21
|
-
d.foo = "bar"
|
|
22
|
-
})
|
|
23
|
-
// save it to storage
|
|
24
|
-
const key = parseAutomergeUrl(generateAutomergeUrl()).documentId
|
|
25
|
-
await storage.saveDoc(key, doc)
|
|
26
|
-
// reload it from storage
|
|
27
|
-
const reloadedDoc = await storage.loadDoc(key)
|
|
28
|
-
// check that it's the same doc
|
|
29
|
-
assert.deepStrictEqual(reloadedDoc, doc)
|
|
30
|
-
})
|
|
31
|
-
})
|
|
32
|
-
})
|
|
33
|
-
it("correctly stores incremental changes following a load", async () => {
|
|
34
|
-
const adapter = new DummyStorageAdapter()
|
|
35
|
-
const storage = new StorageSubsystem(adapter)
|
|
36
|
-
const doc = A.change(A.init(), "test", d => {
|
|
37
|
-
d.foo = "bar"
|
|
38
|
-
})
|
|
39
|
-
// save it to storage
|
|
40
|
-
const key = parseAutomergeUrl(generateAutomergeUrl()).documentId
|
|
41
|
-
storage.saveDoc(key, doc)
|
|
42
|
-
// create new storage subsystem to simulate a new process
|
|
43
|
-
const storage2 = new StorageSubsystem(adapter)
|
|
44
|
-
// reload it from storage
|
|
45
|
-
const reloadedDoc = await storage2.loadDoc(key)
|
|
46
|
-
assert(reloadedDoc, "doc should be loaded")
|
|
47
|
-
// make a change
|
|
48
|
-
const changedDoc = A.change(reloadedDoc, "test 2", d => {
|
|
49
|
-
d.foo = "baz"
|
|
50
|
-
})
|
|
51
|
-
// save it to storage
|
|
52
|
-
storage2.saveDoc(key, changedDoc)
|
|
53
|
-
// check that the storage adapter contains the correct keys
|
|
54
|
-
assert(adapter.keys().some(k => k.startsWith(`${key}.incremental.`)))
|
|
55
|
-
})
|
|
56
|
-
})
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
import { NetworkAdapter } from "../../src/index.js"
|
|
2
|
-
export declare class DummyNetworkAdapter extends NetworkAdapter {
|
|
3
|
-
#private
|
|
4
|
-
constructor(startReady: boolean)
|
|
5
|
-
send(): void
|
|
6
|
-
connect(_: string): void
|
|
7
|
-
disconnect(): void
|
|
8
|
-
}
|
|
9
|
-
//# sourceMappingURL=DummyNetworkAdapter.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"DummyNetworkAdapter.d.ts","sourceRoot":"","sources":["../../../test/helpers/DummyNetworkAdapter.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,oBAAoB,CAAA;AAEnD,qBAAa,mBAAoB,SAAQ,cAAc;;gBAEzC,UAAU,EAAE,OAAO;IAI/B,IAAI;IACJ,OAAO,CAAC,CAAC,EAAE,MAAM;IAKjB,UAAU;CACX"}
|
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
import { NetworkAdapter } from "../../src/index.js"
|
|
2
|
-
export class DummyNetworkAdapter extends NetworkAdapter {
|
|
3
|
-
#startReady = true
|
|
4
|
-
constructor(startReady) {
|
|
5
|
-
super()
|
|
6
|
-
this.#startReady = startReady
|
|
7
|
-
}
|
|
8
|
-
send() {}
|
|
9
|
-
connect(_) {
|
|
10
|
-
if (this.#startReady) {
|
|
11
|
-
this.emit("ready", { network: this })
|
|
12
|
-
}
|
|
13
|
-
}
|
|
14
|
-
disconnect() {}
|
|
15
|
-
}
|
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
import { StorageAdapter, type StorageKey } from "../../src/index.js"
|
|
2
|
-
export declare class DummyStorageAdapter implements StorageAdapter {
|
|
3
|
-
#private
|
|
4
|
-
loadRange(keyPrefix: StorageKey): Promise<
|
|
5
|
-
{
|
|
6
|
-
data: Uint8Array
|
|
7
|
-
key: StorageKey
|
|
8
|
-
}[]
|
|
9
|
-
>
|
|
10
|
-
removeRange(keyPrefix: string[]): Promise<void>
|
|
11
|
-
load(key: string[]): Promise<Uint8Array | undefined>
|
|
12
|
-
save(key: string[], binary: Uint8Array): Promise<void>
|
|
13
|
-
remove(key: string[]): Promise<void>
|
|
14
|
-
keys(): string[]
|
|
15
|
-
}
|
|
16
|
-
//# sourceMappingURL=DummyStorageAdapter.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"DummyStorageAdapter.d.ts","sourceRoot":"","sources":["../../../test/helpers/DummyStorageAdapter.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,KAAK,UAAU,EAAE,MAAM,oBAAoB,CAAA;AAEpE,qBAAa,mBAAoB,YAAW,cAAc;;IAWlD,SAAS,CAAC,SAAS,EAAE,UAAU,GAAG,OAAO,CAAC;QAAC,IAAI,EAAE,UAAU,CAAC;QAAC,GAAG,EAAE,UAAU,CAAA;KAAC,EAAE,CAAC;IAOhF,WAAW,CAAC,SAAS,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAM/C,IAAI,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,UAAU,GAAG,SAAS,CAAC;IAIpD,IAAI,CAAC,GAAG,EAAE,MAAM,EAAE,EAAE,MAAM,EAAE,UAAU;IAKtC,MAAM,CAAC,GAAG,EAAE,MAAM,EAAE;IAI1B,IAAI;CAGL"}
|