@automerge/automerge-repo 2.0.0-collectionsync-alpha.1 → 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -8
- package/dist/AutomergeUrl.d.ts +17 -5
- package/dist/AutomergeUrl.d.ts.map +1 -1
- package/dist/AutomergeUrl.js +71 -24
- package/dist/DocHandle.d.ts +33 -41
- package/dist/DocHandle.d.ts.map +1 -1
- package/dist/DocHandle.js +105 -66
- package/dist/FindProgress.d.ts +30 -0
- package/dist/FindProgress.d.ts.map +1 -0
- package/dist/FindProgress.js +1 -0
- package/dist/RemoteHeadsSubscriptions.d.ts +4 -5
- package/dist/RemoteHeadsSubscriptions.d.ts.map +1 -1
- package/dist/RemoteHeadsSubscriptions.js +4 -1
- package/dist/Repo.d.ts +24 -5
- package/dist/Repo.d.ts.map +1 -1
- package/dist/Repo.js +355 -169
- package/dist/helpers/abortable.d.ts +36 -0
- package/dist/helpers/abortable.d.ts.map +1 -0
- package/dist/helpers/abortable.js +47 -0
- package/dist/helpers/arraysAreEqual.d.ts.map +1 -1
- package/dist/helpers/bufferFromHex.d.ts +3 -0
- package/dist/helpers/bufferFromHex.d.ts.map +1 -0
- package/dist/helpers/bufferFromHex.js +13 -0
- package/dist/helpers/debounce.d.ts.map +1 -1
- package/dist/helpers/eventPromise.d.ts.map +1 -1
- package/dist/helpers/headsAreSame.d.ts +2 -2
- package/dist/helpers/headsAreSame.d.ts.map +1 -1
- package/dist/helpers/mergeArrays.d.ts +1 -1
- package/dist/helpers/mergeArrays.d.ts.map +1 -1
- package/dist/helpers/pause.d.ts.map +1 -1
- package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
- package/dist/helpers/tests/network-adapter-tests.js +13 -13
- package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
- package/dist/helpers/tests/storage-adapter-tests.js +6 -9
- package/dist/helpers/throttle.d.ts.map +1 -1
- package/dist/helpers/withTimeout.d.ts.map +1 -1
- package/dist/index.d.ts +35 -7
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +37 -6
- package/dist/network/NetworkSubsystem.d.ts +0 -1
- package/dist/network/NetworkSubsystem.d.ts.map +1 -1
- package/dist/network/NetworkSubsystem.js +0 -3
- package/dist/network/messages.d.ts +1 -7
- package/dist/network/messages.d.ts.map +1 -1
- package/dist/network/messages.js +1 -2
- package/dist/storage/StorageAdapter.d.ts +0 -9
- package/dist/storage/StorageAdapter.d.ts.map +1 -1
- package/dist/storage/StorageAdapter.js +0 -33
- package/dist/storage/StorageSubsystem.d.ts +6 -2
- package/dist/storage/StorageSubsystem.d.ts.map +1 -1
- package/dist/storage/StorageSubsystem.js +131 -37
- package/dist/storage/keyHash.d.ts +1 -1
- package/dist/storage/keyHash.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.d.ts +3 -4
- package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.js +32 -26
- package/dist/synchronizer/DocSynchronizer.d.ts +8 -8
- package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/DocSynchronizer.js +205 -79
- package/dist/types.d.ts +4 -1
- package/dist/types.d.ts.map +1 -1
- package/fuzz/fuzz.ts +3 -3
- package/package.json +4 -5
- package/src/AutomergeUrl.ts +101 -26
- package/src/DocHandle.ts +158 -77
- package/src/FindProgress.ts +48 -0
- package/src/RemoteHeadsSubscriptions.ts +11 -9
- package/src/Repo.ts +465 -180
- package/src/helpers/abortable.ts +62 -0
- package/src/helpers/bufferFromHex.ts +14 -0
- package/src/helpers/headsAreSame.ts +2 -2
- package/src/helpers/tests/network-adapter-tests.ts +14 -13
- package/src/helpers/tests/storage-adapter-tests.ts +13 -24
- package/src/index.ts +57 -38
- package/src/network/NetworkSubsystem.ts +0 -4
- package/src/network/messages.ts +2 -11
- package/src/storage/StorageAdapter.ts +0 -42
- package/src/storage/StorageSubsystem.ts +155 -45
- package/src/storage/keyHash.ts +1 -1
- package/src/synchronizer/CollectionSynchronizer.ts +42 -29
- package/src/synchronizer/DocSynchronizer.ts +263 -89
- package/src/types.ts +4 -1
- package/test/AutomergeUrl.test.ts +130 -0
- package/test/CollectionSynchronizer.test.ts +6 -8
- package/test/DocHandle.test.ts +161 -77
- package/test/DocSynchronizer.test.ts +11 -9
- package/test/RemoteHeadsSubscriptions.test.ts +1 -1
- package/test/Repo.test.ts +406 -341
- package/test/StorageSubsystem.test.ts +95 -20
- package/test/remoteHeads.test.ts +28 -13
- package/dist/CollectionHandle.d.ts +0 -14
- package/dist/CollectionHandle.d.ts.map +0 -1
- package/dist/CollectionHandle.js +0 -37
- package/dist/DocUrl.d.ts +0 -47
- package/dist/DocUrl.d.ts.map +0 -1
- package/dist/DocUrl.js +0 -72
- package/dist/EphemeralData.d.ts +0 -20
- package/dist/EphemeralData.d.ts.map +0 -1
- package/dist/EphemeralData.js +0 -1
- package/dist/ferigan.d.ts +0 -51
- package/dist/ferigan.d.ts.map +0 -1
- package/dist/ferigan.js +0 -98
- package/dist/src/DocHandle.d.ts +0 -182
- package/dist/src/DocHandle.d.ts.map +0 -1
- package/dist/src/DocHandle.js +0 -405
- package/dist/src/DocUrl.d.ts +0 -49
- package/dist/src/DocUrl.d.ts.map +0 -1
- package/dist/src/DocUrl.js +0 -72
- package/dist/src/EphemeralData.d.ts +0 -19
- package/dist/src/EphemeralData.d.ts.map +0 -1
- package/dist/src/EphemeralData.js +0 -1
- package/dist/src/Repo.d.ts +0 -74
- package/dist/src/Repo.d.ts.map +0 -1
- package/dist/src/Repo.js +0 -208
- package/dist/src/helpers/arraysAreEqual.d.ts +0 -2
- package/dist/src/helpers/arraysAreEqual.d.ts.map +0 -1
- package/dist/src/helpers/arraysAreEqual.js +0 -2
- package/dist/src/helpers/cbor.d.ts +0 -4
- package/dist/src/helpers/cbor.d.ts.map +0 -1
- package/dist/src/helpers/cbor.js +0 -8
- package/dist/src/helpers/eventPromise.d.ts +0 -11
- package/dist/src/helpers/eventPromise.d.ts.map +0 -1
- package/dist/src/helpers/eventPromise.js +0 -7
- package/dist/src/helpers/headsAreSame.d.ts +0 -2
- package/dist/src/helpers/headsAreSame.d.ts.map +0 -1
- package/dist/src/helpers/headsAreSame.js +0 -4
- package/dist/src/helpers/mergeArrays.d.ts +0 -2
- package/dist/src/helpers/mergeArrays.d.ts.map +0 -1
- package/dist/src/helpers/mergeArrays.js +0 -15
- package/dist/src/helpers/pause.d.ts +0 -6
- package/dist/src/helpers/pause.d.ts.map +0 -1
- package/dist/src/helpers/pause.js +0 -10
- package/dist/src/helpers/tests/network-adapter-tests.d.ts +0 -21
- package/dist/src/helpers/tests/network-adapter-tests.d.ts.map +0 -1
- package/dist/src/helpers/tests/network-adapter-tests.js +0 -122
- package/dist/src/helpers/withTimeout.d.ts +0 -12
- package/dist/src/helpers/withTimeout.d.ts.map +0 -1
- package/dist/src/helpers/withTimeout.js +0 -24
- package/dist/src/index.d.ts +0 -53
- package/dist/src/index.d.ts.map +0 -1
- package/dist/src/index.js +0 -40
- package/dist/src/network/NetworkAdapter.d.ts +0 -26
- package/dist/src/network/NetworkAdapter.d.ts.map +0 -1
- package/dist/src/network/NetworkAdapter.js +0 -4
- package/dist/src/network/NetworkSubsystem.d.ts +0 -23
- package/dist/src/network/NetworkSubsystem.d.ts.map +0 -1
- package/dist/src/network/NetworkSubsystem.js +0 -120
- package/dist/src/network/messages.d.ts +0 -85
- package/dist/src/network/messages.d.ts.map +0 -1
- package/dist/src/network/messages.js +0 -23
- package/dist/src/storage/StorageAdapter.d.ts +0 -14
- package/dist/src/storage/StorageAdapter.d.ts.map +0 -1
- package/dist/src/storage/StorageAdapter.js +0 -1
- package/dist/src/storage/StorageSubsystem.d.ts +0 -12
- package/dist/src/storage/StorageSubsystem.d.ts.map +0 -1
- package/dist/src/storage/StorageSubsystem.js +0 -145
- package/dist/src/synchronizer/CollectionSynchronizer.d.ts +0 -25
- package/dist/src/synchronizer/CollectionSynchronizer.d.ts.map +0 -1
- package/dist/src/synchronizer/CollectionSynchronizer.js +0 -106
- package/dist/src/synchronizer/DocSynchronizer.d.ts +0 -29
- package/dist/src/synchronizer/DocSynchronizer.d.ts.map +0 -1
- package/dist/src/synchronizer/DocSynchronizer.js +0 -263
- package/dist/src/synchronizer/Synchronizer.d.ts +0 -9
- package/dist/src/synchronizer/Synchronizer.d.ts.map +0 -1
- package/dist/src/synchronizer/Synchronizer.js +0 -2
- package/dist/src/types.d.ts +0 -16
- package/dist/src/types.d.ts.map +0 -1
- package/dist/src/types.js +0 -1
- package/dist/test/CollectionSynchronizer.test.d.ts +0 -2
- package/dist/test/CollectionSynchronizer.test.d.ts.map +0 -1
- package/dist/test/CollectionSynchronizer.test.js +0 -57
- package/dist/test/DocHandle.test.d.ts +0 -2
- package/dist/test/DocHandle.test.d.ts.map +0 -1
- package/dist/test/DocHandle.test.js +0 -238
- package/dist/test/DocSynchronizer.test.d.ts +0 -2
- package/dist/test/DocSynchronizer.test.d.ts.map +0 -1
- package/dist/test/DocSynchronizer.test.js +0 -111
- package/dist/test/Network.test.d.ts +0 -2
- package/dist/test/Network.test.d.ts.map +0 -1
- package/dist/test/Network.test.js +0 -11
- package/dist/test/Repo.test.d.ts +0 -2
- package/dist/test/Repo.test.d.ts.map +0 -1
- package/dist/test/Repo.test.js +0 -568
- package/dist/test/StorageSubsystem.test.d.ts +0 -2
- package/dist/test/StorageSubsystem.test.d.ts.map +0 -1
- package/dist/test/StorageSubsystem.test.js +0 -56
- package/dist/test/helpers/DummyNetworkAdapter.d.ts +0 -9
- package/dist/test/helpers/DummyNetworkAdapter.d.ts.map +0 -1
- package/dist/test/helpers/DummyNetworkAdapter.js +0 -15
- package/dist/test/helpers/DummyStorageAdapter.d.ts +0 -16
- package/dist/test/helpers/DummyStorageAdapter.d.ts.map +0 -1
- package/dist/test/helpers/DummyStorageAdapter.js +0 -33
- package/dist/test/helpers/generate-large-object.d.ts +0 -5
- package/dist/test/helpers/generate-large-object.d.ts.map +0 -1
- package/dist/test/helpers/generate-large-object.js +0 -9
- package/dist/test/helpers/getRandomItem.d.ts +0 -2
- package/dist/test/helpers/getRandomItem.d.ts.map +0 -1
- package/dist/test/helpers/getRandomItem.js +0 -4
- package/dist/test/types.d.ts +0 -4
- package/dist/test/types.d.ts.map +0 -1
- package/dist/test/types.js +0 -1
- package/src/CollectionHandle.ts +0 -54
- package/src/ferigan.ts +0 -184
package/test/Repo.test.ts
CHANGED
|
@@ -3,8 +3,11 @@ import { MessageChannelNetworkAdapter } from "../../automerge-repo-network-messa
|
|
|
3
3
|
import assert from "assert"
|
|
4
4
|
import * as Uuid from "uuid"
|
|
5
5
|
import { describe, expect, it } from "vitest"
|
|
6
|
-
import { parseAutomergeUrl } from "../src/AutomergeUrl.js"
|
|
7
6
|
import {
|
|
7
|
+
encodeHeads,
|
|
8
|
+
getHeadsFromUrl,
|
|
9
|
+
isValidAutomergeUrl,
|
|
10
|
+
parseAutomergeUrl,
|
|
8
11
|
generateAutomergeUrl,
|
|
9
12
|
stringifyAutomergeUrl,
|
|
10
13
|
} from "../src/AutomergeUrl.js"
|
|
@@ -13,6 +16,7 @@ import { eventPromise } from "../src/helpers/eventPromise.js"
|
|
|
13
16
|
import { pause } from "../src/helpers/pause.js"
|
|
14
17
|
import {
|
|
15
18
|
AnyDocumentId,
|
|
19
|
+
UrlHeads,
|
|
16
20
|
AutomergeUrl,
|
|
17
21
|
DocHandle,
|
|
18
22
|
DocumentId,
|
|
@@ -29,8 +33,7 @@ import {
|
|
|
29
33
|
import { getRandomItem } from "./helpers/getRandomItem.js"
|
|
30
34
|
import { TestDoc } from "./types.js"
|
|
31
35
|
import { StorageId, StorageKey } from "../src/storage/types.js"
|
|
32
|
-
|
|
33
|
-
const someDoc = A.init()
|
|
36
|
+
import { chunkTypeFromKey } from "../src/storage/chunkTypeFromKey.js"
|
|
34
37
|
|
|
35
38
|
describe("Repo", () => {
|
|
36
39
|
describe("constructor", () => {
|
|
@@ -42,7 +45,6 @@ describe("Repo", () => {
|
|
|
42
45
|
|
|
43
46
|
describe("local only", () => {
|
|
44
47
|
const setup = ({ startReady = true } = {}) => {
|
|
45
|
-
const someDoc = A.init()
|
|
46
48
|
const storageAdapter = new DummyStorageAdapter()
|
|
47
49
|
const networkAdapter = new DummyNetworkAdapter({ startReady })
|
|
48
50
|
|
|
@@ -75,35 +77,34 @@ describe("Repo", () => {
|
|
|
75
77
|
it("can create a document with an initial value", async () => {
|
|
76
78
|
const { repo } = setup()
|
|
77
79
|
const handle = repo.create({ foo: "bar" })
|
|
78
|
-
|
|
79
|
-
assert.equal(handle.docSync().foo, "bar")
|
|
80
|
+
assert.equal(handle.doc().foo, "bar")
|
|
80
81
|
})
|
|
81
82
|
|
|
82
|
-
it("can find a document by url", () => {
|
|
83
|
+
it("can find a document by url", async () => {
|
|
83
84
|
const { repo } = setup()
|
|
84
85
|
const handle = repo.create<TestDoc>()
|
|
85
86
|
handle.change((d: TestDoc) => {
|
|
86
87
|
d.foo = "bar"
|
|
87
88
|
})
|
|
88
89
|
|
|
89
|
-
const handle2 = repo.find(handle.url)
|
|
90
|
+
const handle2 = await repo.find(handle.url)
|
|
90
91
|
assert.equal(handle, handle2)
|
|
91
|
-
assert.deepEqual(handle2.
|
|
92
|
+
assert.deepEqual(handle2.doc(), { foo: "bar" })
|
|
92
93
|
})
|
|
93
94
|
|
|
94
|
-
it("can find a document by its unprefixed document ID", () => {
|
|
95
|
+
it("can find a document by its unprefixed document ID", async () => {
|
|
95
96
|
const { repo } = setup()
|
|
96
97
|
const handle = repo.create<TestDoc>()
|
|
97
98
|
handle.change((d: TestDoc) => {
|
|
98
99
|
d.foo = "bar"
|
|
99
100
|
})
|
|
100
101
|
|
|
101
|
-
const handle2 = repo.find(handle.documentId)
|
|
102
|
+
const handle2 = await repo.find(handle.documentId)
|
|
102
103
|
assert.equal(handle, handle2)
|
|
103
|
-
assert.deepEqual(handle2.
|
|
104
|
+
assert.deepEqual(handle2.doc(), { foo: "bar" })
|
|
104
105
|
})
|
|
105
106
|
|
|
106
|
-
it("can find a document by legacy UUID (for now)", () => {
|
|
107
|
+
it("can find a document by legacy UUID (for now)", async () => {
|
|
107
108
|
disableConsoleWarn()
|
|
108
109
|
|
|
109
110
|
const { repo } = setup()
|
|
@@ -116,9 +117,9 @@ describe("Repo", () => {
|
|
|
116
117
|
const { binaryDocumentId } = parseAutomergeUrl(url)
|
|
117
118
|
const legacyDocId = Uuid.stringify(binaryDocumentId) as LegacyDocumentId
|
|
118
119
|
|
|
119
|
-
const handle2 = repo.find(legacyDocId)
|
|
120
|
+
const handle2 = await repo.find(legacyDocId)
|
|
120
121
|
assert.equal(handle, handle2)
|
|
121
|
-
assert.deepEqual(handle2.
|
|
122
|
+
assert.deepEqual(handle2.doc(), { foo: "bar" })
|
|
122
123
|
|
|
123
124
|
reenableConsoleWarn()
|
|
124
125
|
})
|
|
@@ -129,7 +130,7 @@ describe("Repo", () => {
|
|
|
129
130
|
handle.change(d => {
|
|
130
131
|
d.foo = "bar"
|
|
131
132
|
})
|
|
132
|
-
const v =
|
|
133
|
+
const v = handle.doc()
|
|
133
134
|
assert.equal(handle.isReady(), true)
|
|
134
135
|
assert.equal(v.foo, "bar")
|
|
135
136
|
})
|
|
@@ -143,8 +144,8 @@ describe("Repo", () => {
|
|
|
143
144
|
const handle2 = repo.clone(handle)
|
|
144
145
|
assert.equal(handle2.isReady(), true)
|
|
145
146
|
assert.notEqual(handle.documentId, handle2.documentId)
|
|
146
|
-
assert.deepStrictEqual(handle.
|
|
147
|
-
assert.deepStrictEqual(handle2.
|
|
147
|
+
assert.deepStrictEqual(handle.doc(), handle2.doc())
|
|
148
|
+
assert.deepStrictEqual(handle2.doc(), { foo: "bar" })
|
|
148
149
|
})
|
|
149
150
|
|
|
150
151
|
it("the cloned documents are distinct", () => {
|
|
@@ -162,9 +163,9 @@ describe("Repo", () => {
|
|
|
162
163
|
d.baz = "baz"
|
|
163
164
|
})
|
|
164
165
|
|
|
165
|
-
assert.notDeepStrictEqual(handle.
|
|
166
|
-
assert.deepStrictEqual(handle.
|
|
167
|
-
assert.deepStrictEqual(handle2.
|
|
166
|
+
assert.notDeepStrictEqual(handle.doc(), handle2.doc())
|
|
167
|
+
assert.deepStrictEqual(handle.doc(), { foo: "bar", bar: "bif" })
|
|
168
|
+
assert.deepStrictEqual(handle2.doc(), { foo: "bar", baz: "baz" })
|
|
168
169
|
})
|
|
169
170
|
|
|
170
171
|
it("the cloned documents can merge", () => {
|
|
@@ -184,59 +185,47 @@ describe("Repo", () => {
|
|
|
184
185
|
|
|
185
186
|
handle.merge(handle2)
|
|
186
187
|
|
|
187
|
-
assert.deepStrictEqual(handle.
|
|
188
|
+
assert.deepStrictEqual(handle.doc(), {
|
|
188
189
|
foo: "bar",
|
|
189
190
|
bar: "bif",
|
|
190
191
|
baz: "baz",
|
|
191
192
|
})
|
|
192
193
|
// only the one handle should be changed
|
|
193
|
-
assert.deepStrictEqual(handle2.
|
|
194
|
+
assert.deepStrictEqual(handle2.doc(), { foo: "bar", baz: "baz" })
|
|
194
195
|
})
|
|
195
196
|
|
|
196
197
|
it("throws an error if we try to find a handle with an invalid AutomergeUrl", async () => {
|
|
197
198
|
const { repo } = setup()
|
|
198
|
-
|
|
199
|
-
repo.find<TestDoc>("invalid-url" as unknown as AutomergeUrl)
|
|
200
|
-
}
|
|
201
|
-
assert.equal(e.message, "Invalid AutomergeUrl: 'invalid-url'")
|
|
202
|
-
}
|
|
199
|
+
await expect(async () => {
|
|
200
|
+
await repo.find<TestDoc>("invalid-url" as unknown as AutomergeUrl)
|
|
201
|
+
}).rejects.toThrow("Invalid AutomergeUrl: 'invalid-url'")
|
|
203
202
|
})
|
|
204
203
|
|
|
205
204
|
it("doesn't find a document that doesn't exist", async () => {
|
|
206
205
|
const { repo } = setup()
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
assert.equal(handle.isReady(), false)
|
|
212
|
-
assert.equal(handle.state, "unavailable")
|
|
213
|
-
const doc = await handle.doc()
|
|
214
|
-
assert.equal(doc, undefined)
|
|
215
|
-
})
|
|
216
|
-
|
|
217
|
-
it("emits an unavailable event when you don't have the document locally and are not connected to anyone", async () => {
|
|
218
|
-
const { repo } = setup()
|
|
219
|
-
const url = generateAutomergeUrl()
|
|
220
|
-
const handle = repo.find<TestDoc>(url)
|
|
221
|
-
assert.equal(handle.isReady(), false)
|
|
222
|
-
await eventPromise(handle, "unavailable")
|
|
206
|
+
await expect(async () => {
|
|
207
|
+
await repo.find<TestDoc>(generateAutomergeUrl())
|
|
208
|
+
}).rejects.toThrow(/Document (.*) is unavailable/)
|
|
223
209
|
})
|
|
224
210
|
|
|
225
211
|
it("doesn't mark a document as unavailable until network adapters are ready", async () => {
|
|
226
212
|
const { repo, networkAdapter } = setup({ startReady: false })
|
|
227
213
|
const url = generateAutomergeUrl()
|
|
228
|
-
const handle = repo.find<TestDoc>(url)
|
|
229
214
|
|
|
230
|
-
|
|
231
|
-
handle.on("unavailable", () => {
|
|
232
|
-
wasUnavailable = true
|
|
233
|
-
})
|
|
215
|
+
const attemptedFind = repo.find<TestDoc>(url)
|
|
234
216
|
|
|
235
|
-
|
|
236
|
-
|
|
217
|
+
// First verify it stays pending for 50ms
|
|
218
|
+
await expect(
|
|
219
|
+
Promise.race([attemptedFind, pause(50)])
|
|
220
|
+
).resolves.toBeUndefined()
|
|
237
221
|
|
|
222
|
+
// Trigger the rejection
|
|
238
223
|
networkAdapter.forceReady()
|
|
239
|
-
|
|
224
|
+
|
|
225
|
+
// Now verify it rejects
|
|
226
|
+
await expect(attemptedFind).rejects.toThrow(
|
|
227
|
+
/Document (.*) is unavailable/
|
|
228
|
+
)
|
|
240
229
|
})
|
|
241
230
|
|
|
242
231
|
it("can find a created document", async () => {
|
|
@@ -247,30 +236,27 @@ describe("Repo", () => {
|
|
|
247
236
|
})
|
|
248
237
|
assert.equal(handle.isReady(), true)
|
|
249
238
|
|
|
250
|
-
const bobHandle = repo.find<TestDoc>(handle.url)
|
|
239
|
+
const bobHandle = await repo.find<TestDoc>(handle.url)
|
|
251
240
|
|
|
252
241
|
assert.equal(handle, bobHandle)
|
|
253
242
|
assert.equal(handle.isReady(), true)
|
|
254
243
|
|
|
255
|
-
const v =
|
|
244
|
+
const v = bobHandle.doc()
|
|
256
245
|
assert.equal(v?.foo, "bar")
|
|
257
246
|
})
|
|
258
247
|
|
|
259
248
|
it("saves the document when creating it", async () => {
|
|
260
249
|
const { repo, storageAdapter } = setup()
|
|
261
|
-
const handle = repo.create<TestDoc>()
|
|
250
|
+
const handle = repo.create<TestDoc>({ foo: "saved" })
|
|
262
251
|
|
|
263
252
|
const repo2 = new Repo({
|
|
264
253
|
storage: storageAdapter,
|
|
265
254
|
})
|
|
266
255
|
|
|
267
|
-
// TODO: remove this pause
|
|
268
|
-
await pause(100)
|
|
269
256
|
await repo.flush()
|
|
270
257
|
|
|
271
|
-
const bobHandle = repo2.find<TestDoc>(handle.url)
|
|
272
|
-
|
|
273
|
-
assert.equal(bobHandle.isReady(), true)
|
|
258
|
+
const bobHandle = await repo2.find<TestDoc>(handle.url)
|
|
259
|
+
assert.deepEqual(bobHandle.doc(), { foo: "saved" })
|
|
274
260
|
})
|
|
275
261
|
|
|
276
262
|
it("saves the document when changed and can find it again", async () => {
|
|
@@ -283,17 +269,15 @@ describe("Repo", () => {
|
|
|
283
269
|
|
|
284
270
|
assert.equal(handle.isReady(), true)
|
|
285
271
|
|
|
286
|
-
// TODO: remove this pause
|
|
287
|
-
await pause(100)
|
|
288
272
|
await repo.flush()
|
|
289
273
|
|
|
290
274
|
const repo2 = new Repo({
|
|
291
275
|
storage: storageAdapter,
|
|
292
276
|
})
|
|
293
277
|
|
|
294
|
-
const bobHandle = repo2.find<TestDoc>(handle.url)
|
|
278
|
+
const bobHandle = await repo2.find<TestDoc>(handle.url)
|
|
295
279
|
|
|
296
|
-
const v =
|
|
280
|
+
const v = bobHandle.doc()
|
|
297
281
|
assert.equal(v?.foo, "bar")
|
|
298
282
|
})
|
|
299
283
|
|
|
@@ -305,7 +289,7 @@ describe("Repo", () => {
|
|
|
305
289
|
})
|
|
306
290
|
// we now have a snapshot and an incremental change in storage
|
|
307
291
|
assert.equal(handle.isReady(), true)
|
|
308
|
-
const foo =
|
|
292
|
+
const foo = handle.doc()
|
|
309
293
|
assert.equal(foo?.foo, "bar")
|
|
310
294
|
|
|
311
295
|
await pause()
|
|
@@ -322,7 +306,6 @@ describe("Repo", () => {
|
|
|
322
306
|
d.foo = "bar"
|
|
323
307
|
})
|
|
324
308
|
assert.equal(handle.isReady(), true)
|
|
325
|
-
await handle.doc()
|
|
326
309
|
|
|
327
310
|
await pause()
|
|
328
311
|
repo.delete(handle.url)
|
|
@@ -359,7 +342,7 @@ describe("Repo", () => {
|
|
|
359
342
|
|
|
360
343
|
const exported = await repo.export(handle.documentId)
|
|
361
344
|
const loaded = A.load(exported)
|
|
362
|
-
const doc =
|
|
345
|
+
const doc = handle.doc()
|
|
363
346
|
assert.deepEqual(doc, loaded)
|
|
364
347
|
})
|
|
365
348
|
|
|
@@ -387,17 +370,13 @@ describe("Repo", () => {
|
|
|
387
370
|
})
|
|
388
371
|
|
|
389
372
|
await repo.flush()
|
|
390
|
-
// TODO: remove this pause
|
|
391
|
-
await pause(100)
|
|
392
373
|
|
|
393
374
|
const initialKeys = storage.keys()
|
|
394
375
|
|
|
395
376
|
const repo2 = new Repo({
|
|
396
377
|
storage,
|
|
397
378
|
})
|
|
398
|
-
const handle2 = repo2.find(handle.url)
|
|
399
|
-
await handle2.doc()
|
|
400
|
-
|
|
379
|
+
const handle2 = await repo2.find(handle.url)
|
|
401
380
|
assert.deepEqual(storage.keys(), initialKeys)
|
|
402
381
|
})
|
|
403
382
|
|
|
@@ -423,14 +402,12 @@ describe("Repo", () => {
|
|
|
423
402
|
const repo2 = new Repo({
|
|
424
403
|
storage,
|
|
425
404
|
})
|
|
426
|
-
const handle2 = repo2.find(handle.url)
|
|
427
|
-
await handle2.doc()
|
|
428
|
-
|
|
405
|
+
const handle2 = await repo2.find(handle.url)
|
|
429
406
|
assert(storage.keys().length !== 0)
|
|
430
407
|
}
|
|
431
408
|
})
|
|
432
409
|
|
|
433
|
-
it
|
|
410
|
+
it("doesn't create multiple snapshots in storage when a series of large changes are made in succession", async () => {
|
|
434
411
|
const { repo, storageAdapter } = setup()
|
|
435
412
|
const handle = repo.create<{ objects: LargeObject[] }>()
|
|
436
413
|
|
|
@@ -454,6 +431,40 @@ describe("Repo", () => {
|
|
|
454
431
|
)
|
|
455
432
|
})
|
|
456
433
|
|
|
434
|
+
it("should not call loadDoc multiple times when find() is called in quick succession", async () => {
|
|
435
|
+
const { repo, storageAdapter } = setup()
|
|
436
|
+
const handle = repo.create<TestDoc>()
|
|
437
|
+
handle.change(d => {
|
|
438
|
+
d.foo = "bar"
|
|
439
|
+
})
|
|
440
|
+
await repo.flush()
|
|
441
|
+
|
|
442
|
+
// Create a new repo instance that will use the same storage
|
|
443
|
+
const repo2 = new Repo({
|
|
444
|
+
storage: storageAdapter,
|
|
445
|
+
})
|
|
446
|
+
|
|
447
|
+
// Track how many times loadDoc is called
|
|
448
|
+
let loadDocCallCount = 0
|
|
449
|
+
const originalLoadDoc = repo2.storageSubsystem!.loadDoc.bind(
|
|
450
|
+
repo2.storageSubsystem
|
|
451
|
+
)
|
|
452
|
+
repo2.storageSubsystem!.loadDoc = async documentId => {
|
|
453
|
+
loadDocCallCount++
|
|
454
|
+
return originalLoadDoc(documentId)
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
// Call find() twice in quick succession
|
|
458
|
+
const find1 = repo2.find(handle.url)
|
|
459
|
+
const find2 = repo2.find(handle.url)
|
|
460
|
+
|
|
461
|
+
// Wait for both calls to complete
|
|
462
|
+
await Promise.all([find1, find2])
|
|
463
|
+
|
|
464
|
+
// Verify loadDoc was only called once
|
|
465
|
+
assert.equal(loadDocCallCount, 1, "loadDoc should only be called once")
|
|
466
|
+
})
|
|
467
|
+
|
|
457
468
|
it("can import an existing document", async () => {
|
|
458
469
|
const { repo } = setup()
|
|
459
470
|
const doc = A.init<TestDoc>()
|
|
@@ -465,7 +476,7 @@ describe("Repo", () => {
|
|
|
465
476
|
|
|
466
477
|
const handle = repo.import<TestDoc>(saved)
|
|
467
478
|
assert.equal(handle.isReady(), true)
|
|
468
|
-
const v =
|
|
479
|
+
const v = handle.doc()
|
|
469
480
|
assert.equal(v?.foo, "bar")
|
|
470
481
|
|
|
471
482
|
expect(A.getHistory(v)).toEqual(A.getHistory(updatedDoc))
|
|
@@ -484,7 +495,7 @@ describe("Repo", () => {
|
|
|
484
495
|
const { repo } = setup()
|
|
485
496
|
// @ts-ignore - passing something other than UInt8Array
|
|
486
497
|
const handle = repo.import<TestDoc>(A.from({ foo: 123 }))
|
|
487
|
-
const doc =
|
|
498
|
+
const doc = handle.doc()
|
|
488
499
|
expect(doc).toEqual({})
|
|
489
500
|
})
|
|
490
501
|
|
|
@@ -492,7 +503,7 @@ describe("Repo", () => {
|
|
|
492
503
|
const { repo } = setup()
|
|
493
504
|
// @ts-ignore - passing something other than UInt8Array
|
|
494
505
|
const handle = repo.import<TestDoc>({ foo: 123 })
|
|
495
|
-
const doc =
|
|
506
|
+
const doc = handle.doc()
|
|
496
507
|
expect(doc).toEqual({})
|
|
497
508
|
})
|
|
498
509
|
|
|
@@ -500,14 +511,12 @@ describe("Repo", () => {
|
|
|
500
511
|
it("contains doc handle", async () => {
|
|
501
512
|
const { repo } = setup()
|
|
502
513
|
const handle = repo.create({ foo: "bar" })
|
|
503
|
-
await handle.doc()
|
|
504
514
|
assert(repo.handles[handle.documentId])
|
|
505
515
|
})
|
|
506
516
|
|
|
507
517
|
it("delete removes doc handle", async () => {
|
|
508
518
|
const { repo } = setup()
|
|
509
519
|
const handle = repo.create({ foo: "bar" })
|
|
510
|
-
await handle.doc()
|
|
511
520
|
await repo.delete(handle.documentId)
|
|
512
521
|
assert(repo.handles[handle.documentId] === undefined)
|
|
513
522
|
})
|
|
@@ -515,7 +524,6 @@ describe("Repo", () => {
|
|
|
515
524
|
it("removeFromCache removes doc handle", async () => {
|
|
516
525
|
const { repo } = setup()
|
|
517
526
|
const handle = repo.create({ foo: "bar" })
|
|
518
|
-
await handle.doc()
|
|
519
527
|
await repo.removeFromCache(handle.documentId)
|
|
520
528
|
assert(repo.handles[handle.documentId] === undefined)
|
|
521
529
|
})
|
|
@@ -530,7 +538,7 @@ describe("Repo", () => {
|
|
|
530
538
|
})
|
|
531
539
|
})
|
|
532
540
|
|
|
533
|
-
describe
|
|
541
|
+
describe("flush behaviour", () => {
|
|
534
542
|
const setup = () => {
|
|
535
543
|
let blockedSaves = new Set<{ path: StorageKey; resolve: () => void }>()
|
|
536
544
|
let resume = (documentIds?: DocumentId[]) => {
|
|
@@ -574,8 +582,8 @@ describe("Repo", () => {
|
|
|
574
582
|
|
|
575
583
|
it("should not be in a new repo yet because the storage is slow", async () => {
|
|
576
584
|
const { pausedStorage, repo, handle, handle2 } = setup()
|
|
577
|
-
expect((await handle.doc()
|
|
578
|
-
expect((await handle2.doc()
|
|
585
|
+
expect((await handle).doc().foo).toEqual("first")
|
|
586
|
+
expect((await handle2).doc().foo).toEqual("second")
|
|
579
587
|
|
|
580
588
|
// Reload repo
|
|
581
589
|
const repo2 = new Repo({
|
|
@@ -583,9 +591,10 @@ describe("Repo", () => {
|
|
|
583
591
|
})
|
|
584
592
|
|
|
585
593
|
// Could not find the document that is not yet saved because of slow storage.
|
|
586
|
-
|
|
594
|
+
await expect(async () => {
|
|
595
|
+
const reloadedHandle = await repo2.find<{ foo: string }>(handle.url)
|
|
596
|
+
}).rejects.toThrow(/Document (.*) is unavailable/)
|
|
587
597
|
expect(pausedStorage.keys()).to.deep.equal([])
|
|
588
|
-
expect(await reloadedHandle.doc()).toEqual(undefined)
|
|
589
598
|
})
|
|
590
599
|
|
|
591
600
|
it("should be visible to a new repo after flush()", async () => {
|
|
@@ -605,10 +614,10 @@ describe("Repo", () => {
|
|
|
605
614
|
})
|
|
606
615
|
|
|
607
616
|
expect(
|
|
608
|
-
(await repo.find<{ foo: string }>(handle.documentId).doc()
|
|
617
|
+
(await repo.find<{ foo: string }>(handle.documentId)).doc().foo
|
|
609
618
|
).toEqual("first")
|
|
610
619
|
expect(
|
|
611
|
-
(await repo.find<{ foo: string }>(handle2.documentId).doc()
|
|
620
|
+
(await repo.find<{ foo: string }>(handle2.documentId)).doc().foo
|
|
612
621
|
).toEqual("second")
|
|
613
622
|
}
|
|
614
623
|
})
|
|
@@ -630,13 +639,13 @@ describe("Repo", () => {
|
|
|
630
639
|
})
|
|
631
640
|
|
|
632
641
|
expect(
|
|
633
|
-
(await repo.find<{ foo: string }>(handle.documentId).doc()
|
|
642
|
+
(await repo.find<{ foo: string }>(handle.documentId)).doc().foo
|
|
634
643
|
).toEqual("first")
|
|
635
644
|
// Really, it's okay if the second one is also flushed but I'm forcing the issue
|
|
636
645
|
// in the test storage engine above to make sure the behaviour is as documented
|
|
637
|
-
expect(
|
|
638
|
-
await repo.find<{ foo: string }>(handle2.documentId).doc()
|
|
639
|
-
).
|
|
646
|
+
await expect(async () => {
|
|
647
|
+
;(await repo.find<{ foo: string }>(handle2.documentId)).doc()
|
|
648
|
+
}).rejects.toThrow(/Document (.*) is unavailable/)
|
|
640
649
|
}
|
|
641
650
|
})
|
|
642
651
|
|
|
@@ -659,40 +668,7 @@ describe("Repo", () => {
|
|
|
659
668
|
})
|
|
660
669
|
})
|
|
661
670
|
|
|
662
|
-
|
|
663
|
-
const pipe = DummyNetworkAdapter.createConnectedPair()
|
|
664
|
-
const repo1 = new Repo({
|
|
665
|
-
peerId: "peer1" as PeerId,
|
|
666
|
-
storage: new DummyStorageAdapter(),
|
|
667
|
-
sharePolicy: async () => false,
|
|
668
|
-
network: [pipe[0]],
|
|
669
|
-
})
|
|
670
|
-
const repo2 = new Repo({
|
|
671
|
-
peerId: "peer2" as PeerId,
|
|
672
|
-
storage: new DummyStorageAdapter(),
|
|
673
|
-
sharePolicy: async () => true,
|
|
674
|
-
network: [pipe[1]],
|
|
675
|
-
})
|
|
676
|
-
|
|
677
|
-
const handleOn1 = repo1.create()
|
|
678
|
-
await handleOn1.whenReady()
|
|
679
|
-
handleOn1.change((d: any) => {
|
|
680
|
-
d.foo = "bar"
|
|
681
|
-
})
|
|
682
|
-
|
|
683
|
-
pipe[0].peerCandidate("peer2" as PeerId)
|
|
684
|
-
pipe[1].peerCandidate("peer1" as PeerId)
|
|
685
|
-
|
|
686
|
-
await repo2.networkSubsystem.whenReady()
|
|
687
|
-
console.log("about to load handle on 2")
|
|
688
|
-
const handleOn2 = repo2.find(handleOn1.url)
|
|
689
|
-
|
|
690
|
-
await handleOn2.whenReady()
|
|
691
|
-
await pause(1)
|
|
692
|
-
assert.deepStrictEqual(handleOn2.docSync(), { foo: "bar" })
|
|
693
|
-
})
|
|
694
|
-
|
|
695
|
-
describe("with peers (linear network)", function () {
|
|
671
|
+
describe("with peers (linear network)", async () => {
|
|
696
672
|
it("n-peers connected in a line", async () => {
|
|
697
673
|
const createNConnectedRepos = async (
|
|
698
674
|
numberOfPeers: number,
|
|
@@ -717,7 +693,7 @@ describe("Repo", () => {
|
|
|
717
693
|
|
|
718
694
|
if (idx < numberOfPeers - 1) {
|
|
719
695
|
network.push(pair[0])
|
|
720
|
-
pair[0].whenReady()
|
|
696
|
+
networkReady.push(pair[0].whenReady())
|
|
721
697
|
}
|
|
722
698
|
|
|
723
699
|
const repo = new Repo({
|
|
@@ -748,23 +724,25 @@ describe("Repo", () => {
|
|
|
748
724
|
}
|
|
749
725
|
|
|
750
726
|
await connectedPromise
|
|
751
|
-
|
|
752
727
|
return { repos }
|
|
753
728
|
}
|
|
754
729
|
|
|
755
|
-
|
|
756
|
-
const numberOfPeers = 2
|
|
730
|
+
const numberOfPeers = 10
|
|
757
731
|
const { repos } = await createNConnectedRepos(numberOfPeers, 10)
|
|
758
732
|
|
|
759
733
|
const handle0 = repos[0].create()
|
|
760
734
|
handle0.change((d: any) => {
|
|
761
735
|
d.foo = "bar"
|
|
762
736
|
})
|
|
763
|
-
const handleN = repos[numberOfPeers - 1].find<TestDoc>(handle0.url)
|
|
764
737
|
|
|
765
|
-
await
|
|
766
|
-
|
|
767
|
-
|
|
738
|
+
const handleN = await repos[numberOfPeers - 1].find<TestDoc>(handle0.url)
|
|
739
|
+
assert.deepStrictEqual(handleN.doc(), { foo: "bar" })
|
|
740
|
+
|
|
741
|
+
const handleNBack = repos[numberOfPeers - 1].create({
|
|
742
|
+
foo: "reverse-trip",
|
|
743
|
+
})
|
|
744
|
+
const handle0Back = await repos[0].find<TestDoc>(handleNBack.url)
|
|
745
|
+
assert.deepStrictEqual(handle0Back.doc(), { foo: "reverse-trip" })
|
|
768
746
|
})
|
|
769
747
|
|
|
770
748
|
const setup = async ({
|
|
@@ -845,7 +823,6 @@ describe("Repo", () => {
|
|
|
845
823
|
}
|
|
846
824
|
|
|
847
825
|
const aliceHandle = aliceRepo.create<TestDoc>()
|
|
848
|
-
// await pause(10)
|
|
849
826
|
aliceHandle.change(d => {
|
|
850
827
|
d.foo = "bar"
|
|
851
828
|
})
|
|
@@ -871,6 +848,7 @@ describe("Repo", () => {
|
|
|
871
848
|
eventPromise(bobRepo.networkSubsystem, "peer"),
|
|
872
849
|
eventPromise(charlieRepo.networkSubsystem, "peer"),
|
|
873
850
|
])
|
|
851
|
+
|
|
874
852
|
return {
|
|
875
853
|
alice,
|
|
876
854
|
aliceRepo,
|
|
@@ -891,9 +869,8 @@ describe("Repo", () => {
|
|
|
891
869
|
it("changes are replicated from aliceRepo to bobRepo", async () => {
|
|
892
870
|
const { bobRepo, aliceHandle, teardown } = await setup()
|
|
893
871
|
|
|
894
|
-
const bobHandle = bobRepo.find<TestDoc>(aliceHandle.url)
|
|
895
|
-
|
|
896
|
-
const bobDoc = await bobHandle.doc()
|
|
872
|
+
const bobHandle = await bobRepo.find<TestDoc>(aliceHandle.url)
|
|
873
|
+
const bobDoc = bobHandle.doc()
|
|
897
874
|
assert.deepStrictEqual(bobDoc, { foo: "bar" })
|
|
898
875
|
teardown()
|
|
899
876
|
})
|
|
@@ -901,9 +878,8 @@ describe("Repo", () => {
|
|
|
901
878
|
it("can load a document from aliceRepo on charlieRepo", async () => {
|
|
902
879
|
const { charlieRepo, aliceHandle, teardown } = await setup()
|
|
903
880
|
|
|
904
|
-
const handle3 = charlieRepo.find<TestDoc>(aliceHandle.url)
|
|
905
|
-
|
|
906
|
-
const doc3 = await handle3.doc()
|
|
881
|
+
const handle3 = await charlieRepo.find<TestDoc>(aliceHandle.url)
|
|
882
|
+
const doc3 = handle3.doc()
|
|
907
883
|
assert.deepStrictEqual(doc3, { foo: "bar" })
|
|
908
884
|
teardown()
|
|
909
885
|
})
|
|
@@ -920,15 +896,13 @@ describe("Repo", () => {
|
|
|
920
896
|
foo: "foundOnFakeDisk",
|
|
921
897
|
})
|
|
922
898
|
await bobRepo2.flush()
|
|
923
|
-
console.log("bob storage: ", bobStorage.keys())
|
|
924
899
|
|
|
925
900
|
// Now, let's load it on the original bob repo (which shares a "disk")
|
|
926
|
-
const bobFoundIt = bobRepo.find<TestDoc>(inStorageHandle.url)
|
|
927
|
-
await bobFoundIt.whenReady()
|
|
901
|
+
const bobFoundIt = await bobRepo.find<TestDoc>(inStorageHandle.url)
|
|
928
902
|
|
|
929
903
|
// Before checking if it syncs, make sure we have it!
|
|
930
904
|
// (This behaviour is mostly test-validation, we are already testing load/save elsewhere.)
|
|
931
|
-
assert.deepStrictEqual(
|
|
905
|
+
assert.deepStrictEqual(bobFoundIt.doc(), { foo: "foundOnFakeDisk" })
|
|
932
906
|
|
|
933
907
|
await pause(10)
|
|
934
908
|
|
|
@@ -949,7 +923,7 @@ describe("Repo", () => {
|
|
|
949
923
|
teardown()
|
|
950
924
|
})
|
|
951
925
|
|
|
952
|
-
it
|
|
926
|
+
it("charlieRepo doesn't have a document it's not supposed to have", async () => {
|
|
953
927
|
const { aliceRepo, bobRepo, charlieRepo, notForCharlie, teardown } =
|
|
954
928
|
await setup()
|
|
955
929
|
|
|
@@ -968,25 +942,22 @@ describe("Repo", () => {
|
|
|
968
942
|
it("charlieRepo can request a document not initially shared with it", async () => {
|
|
969
943
|
const { charlieRepo, notForCharlie, teardown } = await setup()
|
|
970
944
|
|
|
971
|
-
const handle = charlieRepo.find<TestDoc>(notForCharlie)
|
|
972
|
-
|
|
973
|
-
await pause(50)
|
|
974
|
-
|
|
975
|
-
const doc = await handle.doc()
|
|
945
|
+
const handle = await charlieRepo.find<TestDoc>(notForCharlie)
|
|
946
|
+
const doc = handle.doc()
|
|
976
947
|
|
|
977
948
|
assert.deepStrictEqual(doc, { foo: "baz" })
|
|
978
949
|
|
|
979
950
|
teardown()
|
|
980
951
|
})
|
|
981
952
|
|
|
982
|
-
it
|
|
953
|
+
it("charlieRepo can request a document across a network of multiple peers", async () => {
|
|
983
954
|
const { charlieRepo, notForBob, teardown } = await setup()
|
|
984
955
|
|
|
985
|
-
const handle = charlieRepo.find<TestDoc>(notForBob)
|
|
956
|
+
const handle = await charlieRepo.find<TestDoc>(notForBob)
|
|
986
957
|
|
|
987
958
|
await pause(50)
|
|
988
959
|
|
|
989
|
-
const doc =
|
|
960
|
+
const doc = handle.doc()
|
|
990
961
|
assert.deepStrictEqual(doc, { foo: "bap" })
|
|
991
962
|
|
|
992
963
|
teardown()
|
|
@@ -995,42 +966,10 @@ describe("Repo", () => {
|
|
|
995
966
|
it("doesn't find a document which doesn't exist anywhere on the network", async () => {
|
|
996
967
|
const { charlieRepo, teardown } = await setup()
|
|
997
968
|
const url = generateAutomergeUrl()
|
|
998
|
-
const handle = charlieRepo.find<TestDoc>(url)
|
|
999
|
-
assert.equal(handle.isReady(), false)
|
|
1000
|
-
|
|
1001
|
-
const doc = await handle.doc()
|
|
1002
|
-
assert.equal(doc, undefined)
|
|
1003
|
-
|
|
1004
|
-
teardown()
|
|
1005
|
-
})
|
|
1006
969
|
|
|
1007
|
-
|
|
1008
|
-
|
|
1009
|
-
|
|
1010
|
-
const handle = aliceRepo.find(url)
|
|
1011
|
-
assert.equal(handle.isReady(), false)
|
|
1012
|
-
await eventPromise(handle, "unavailable")
|
|
1013
|
-
teardown()
|
|
1014
|
-
})
|
|
1015
|
-
|
|
1016
|
-
it("emits an unavailable event every time an unavailable doc is requested", async () => {
|
|
1017
|
-
const { charlieRepo, teardown } = await setup()
|
|
1018
|
-
const url = generateAutomergeUrl()
|
|
1019
|
-
const handle = charlieRepo.find<TestDoc>(url)
|
|
1020
|
-
assert.equal(handle.isReady(), false)
|
|
1021
|
-
|
|
1022
|
-
await Promise.all([
|
|
1023
|
-
eventPromise(handle, "unavailable"),
|
|
1024
|
-
eventPromise(charlieRepo, "unavailable-document"),
|
|
1025
|
-
])
|
|
1026
|
-
|
|
1027
|
-
// make sure it emits a second time if the doc is still unavailable
|
|
1028
|
-
const handle2 = charlieRepo.find<TestDoc>(url)
|
|
1029
|
-
assert.equal(handle2.isReady(), false)
|
|
1030
|
-
await Promise.all([
|
|
1031
|
-
eventPromise(handle, "unavailable"),
|
|
1032
|
-
eventPromise(charlieRepo, "unavailable-document"),
|
|
1033
|
-
])
|
|
970
|
+
await expect(charlieRepo.find<TestDoc>(url)).rejects.toThrow(
|
|
971
|
+
/Document (.*) is unavailable/
|
|
972
|
+
)
|
|
1034
973
|
|
|
1035
974
|
teardown()
|
|
1036
975
|
})
|
|
@@ -1045,23 +984,23 @@ describe("Repo", () => {
|
|
|
1045
984
|
} = await setup({ connectAlice: false })
|
|
1046
985
|
|
|
1047
986
|
const url = stringifyAutomergeUrl({ documentId: notForCharlie })
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
await eventPromise(handle, "unavailable")
|
|
987
|
+
await expect(charlieRepo.find<TestDoc>(url)).rejects.toThrow(
|
|
988
|
+
/Document (.*) is unavailable/
|
|
989
|
+
)
|
|
1052
990
|
|
|
1053
991
|
connectAliceToBob()
|
|
1054
992
|
|
|
1055
993
|
await eventPromise(aliceRepo.networkSubsystem, "peer")
|
|
1056
|
-
// TODO: remove this pause
|
|
1057
|
-
await pause(100)
|
|
1058
994
|
|
|
1059
|
-
|
|
995
|
+
// Not sure why we need this pause here, but... we do.
|
|
996
|
+
await pause(150)
|
|
997
|
+
const handle = await charlieRepo.find<TestDoc>(url)
|
|
998
|
+
const doc = handle.doc()
|
|
1060
999
|
assert.deepStrictEqual(doc, { foo: "baz" })
|
|
1061
1000
|
|
|
1062
1001
|
// an additional find should also return the correct resolved document
|
|
1063
|
-
const handle2 = charlieRepo.find<TestDoc>(url)
|
|
1064
|
-
const doc2 =
|
|
1002
|
+
const handle2 = await charlieRepo.find<TestDoc>(url)
|
|
1003
|
+
const doc2 = handle2.doc()
|
|
1065
1004
|
assert.deepStrictEqual(doc2, { foo: "baz" })
|
|
1066
1005
|
|
|
1067
1006
|
teardown()
|
|
@@ -1097,11 +1036,9 @@ describe("Repo", () => {
|
|
|
1097
1036
|
sharePolicy: async () => true,
|
|
1098
1037
|
})
|
|
1099
1038
|
|
|
1100
|
-
|
|
1101
|
-
|
|
1102
|
-
|
|
1103
|
-
// the repo has no storage.
|
|
1104
|
-
await eventPromise(handle, "unavailable")
|
|
1039
|
+
await expect(a.find<TestDoc>(url)).rejects.toThrow(
|
|
1040
|
+
/Document (.*) is unavailable/
|
|
1041
|
+
)
|
|
1105
1042
|
|
|
1106
1043
|
// Now create a repo pointing at the storage containing the document and
|
|
1107
1044
|
// connect it to the other end of the MessageChannel
|
|
@@ -1111,12 +1048,17 @@ describe("Repo", () => {
|
|
|
1111
1048
|
network: [new MessageChannelNetworkAdapter(ba)],
|
|
1112
1049
|
})
|
|
1113
1050
|
|
|
1051
|
+
// We need a proper peer status API so we can tell when the
|
|
1052
|
+
// peer is connected. For now we just wait a bit.
|
|
1053
|
+
await pause(50)
|
|
1054
|
+
|
|
1114
1055
|
// The empty repo should be notified of the new peer, send it a request
|
|
1115
1056
|
// and eventually resolve the handle to "READY"
|
|
1116
|
-
await
|
|
1057
|
+
const handle = await a.find<TestDoc>(url)
|
|
1058
|
+
expect(handle.state).toBe("ready")
|
|
1117
1059
|
})
|
|
1118
1060
|
|
|
1119
|
-
it
|
|
1061
|
+
it("a deleted document from charlieRepo can be refetched", async () => {
|
|
1120
1062
|
const { charlieRepo, aliceHandle, teardown } = await setup()
|
|
1121
1063
|
|
|
1122
1064
|
const deletePromise = eventPromise(charlieRepo, "delete-document")
|
|
@@ -1128,12 +1070,9 @@ describe("Repo", () => {
|
|
|
1128
1070
|
d.foo = "baz"
|
|
1129
1071
|
})
|
|
1130
1072
|
await changePromise
|
|
1131
|
-
// TODO: remove this pause
|
|
1132
|
-
await pause(100)
|
|
1133
1073
|
|
|
1134
|
-
const handle3 = charlieRepo.find<TestDoc>(aliceHandle.url)
|
|
1135
|
-
|
|
1136
|
-
const doc3 = await handle3.doc()
|
|
1074
|
+
const handle3 = await charlieRepo.find<TestDoc>(aliceHandle.url)
|
|
1075
|
+
const doc3 = handle3.doc()
|
|
1137
1076
|
|
|
1138
1077
|
assert.deepStrictEqual(doc3, { foo: "baz" })
|
|
1139
1078
|
|
|
@@ -1151,17 +1090,12 @@ describe("Repo", () => {
|
|
|
1151
1090
|
const repo = getRandomItem([aliceRepo, bobRepo, charlieRepo])
|
|
1152
1091
|
const docs = Object.values(repo.handles)
|
|
1153
1092
|
const doc =
|
|
1154
|
-
Math.random() < 0.5
|
|
1093
|
+
Math.random() < 0.5
|
|
1155
1094
|
? // heads, create a new doc
|
|
1156
1095
|
repo.create<TestDoc>()
|
|
1157
1096
|
: // tails, pick a random doc
|
|
1158
1097
|
(getRandomItem(docs) as DocHandle<TestDoc>)
|
|
1159
1098
|
|
|
1160
|
-
// make sure the doc is ready
|
|
1161
|
-
if (!doc.isReady()) {
|
|
1162
|
-
await doc.doc()
|
|
1163
|
-
}
|
|
1164
|
-
|
|
1165
1099
|
// make a random change to it
|
|
1166
1100
|
doc.change(d => {
|
|
1167
1101
|
d.foo = Math.random().toString()
|
|
@@ -1177,10 +1111,10 @@ describe("Repo", () => {
|
|
|
1177
1111
|
|
|
1178
1112
|
const data = { presence: "alice" }
|
|
1179
1113
|
|
|
1180
|
-
const aliceHandle = aliceRepo.find<TestDoc>(
|
|
1114
|
+
const aliceHandle = await aliceRepo.find<TestDoc>(
|
|
1181
1115
|
stringifyAutomergeUrl({ documentId: notForCharlie })
|
|
1182
1116
|
)
|
|
1183
|
-
const bobHandle = bobRepo.find<TestDoc>(
|
|
1117
|
+
const bobHandle = await bobRepo.find<TestDoc>(
|
|
1184
1118
|
stringifyAutomergeUrl({ documentId: notForCharlie })
|
|
1185
1119
|
)
|
|
1186
1120
|
|
|
@@ -1196,11 +1130,8 @@ describe("Repo", () => {
|
|
|
1196
1130
|
setTimeout(resolve, 100)
|
|
1197
1131
|
})
|
|
1198
1132
|
|
|
1199
|
-
const messagePromise = eventPromise(bobHandle, "ephemeral-message")
|
|
1200
1133
|
aliceHandle.broadcast(data)
|
|
1201
|
-
|
|
1202
|
-
// const { message } = await eventPromise(bobHandle, "ephemeral-message")
|
|
1203
|
-
const { message } = await messagePromise
|
|
1134
|
+
const { message } = await eventPromise(bobHandle, "ephemeral-message")
|
|
1204
1135
|
|
|
1205
1136
|
assert.deepStrictEqual(message, data)
|
|
1206
1137
|
assert.equal(charlieRepo.handles[notForCharlie], undefined, "charlie no")
|
|
@@ -1209,7 +1140,111 @@ describe("Repo", () => {
|
|
|
1209
1140
|
teardown()
|
|
1210
1141
|
})
|
|
1211
1142
|
|
|
1212
|
-
it
|
|
1143
|
+
it("should save sync state of other peers", async () => {
|
|
1144
|
+
const { bobRepo, teardown, charlieRepo } = await setup({
|
|
1145
|
+
connectAlice: false,
|
|
1146
|
+
})
|
|
1147
|
+
|
|
1148
|
+
const bobHandle = bobRepo.create<TestDoc>()
|
|
1149
|
+
bobHandle.change(d => {
|
|
1150
|
+
d.foo = "bar"
|
|
1151
|
+
})
|
|
1152
|
+
|
|
1153
|
+
await pause(200)
|
|
1154
|
+
|
|
1155
|
+
// bob should store the sync state of charlie
|
|
1156
|
+
const storedSyncState = await bobRepo.storageSubsystem.loadSyncState(
|
|
1157
|
+
bobHandle.documentId,
|
|
1158
|
+
await charlieRepo!.storageSubsystem.id()
|
|
1159
|
+
)
|
|
1160
|
+
assert.deepStrictEqual(
|
|
1161
|
+
encodeHeads(storedSyncState.sharedHeads),
|
|
1162
|
+
bobHandle.heads()
|
|
1163
|
+
)
|
|
1164
|
+
|
|
1165
|
+
teardown()
|
|
1166
|
+
})
|
|
1167
|
+
|
|
1168
|
+
it("should not save sync state of ephemeral peers", async () => {
|
|
1169
|
+
const { bobRepo, teardown, charlieRepo } = await setup({
|
|
1170
|
+
connectAlice: false,
|
|
1171
|
+
isCharlieEphemeral: true,
|
|
1172
|
+
})
|
|
1173
|
+
|
|
1174
|
+
const bobHandle = bobRepo.create<TestDoc>()
|
|
1175
|
+
bobHandle.change(d => {
|
|
1176
|
+
d.foo = "bar"
|
|
1177
|
+
})
|
|
1178
|
+
|
|
1179
|
+
await pause(200)
|
|
1180
|
+
|
|
1181
|
+
// bob should not store the sync state for charlie because charly is an ephemeral peer
|
|
1182
|
+
const storedSyncState = await bobRepo.storageSubsystem.loadSyncState(
|
|
1183
|
+
bobHandle.documentId,
|
|
1184
|
+
await charlieRepo!.storageSubsystem.id()
|
|
1185
|
+
)
|
|
1186
|
+
assert.deepStrictEqual(storedSyncState, undefined)
|
|
1187
|
+
|
|
1188
|
+
teardown()
|
|
1189
|
+
})
|
|
1190
|
+
|
|
1191
|
+
it("should load sync state from storage", async () => {
|
|
1192
|
+
const { bobRepo, teardown, charlie, charlieRepo, bobStorage, bob } =
|
|
1193
|
+
await setup({
|
|
1194
|
+
connectAlice: false,
|
|
1195
|
+
})
|
|
1196
|
+
|
|
1197
|
+
// create a new doc and count sync messages
|
|
1198
|
+
const bobHandle = bobRepo.create<TestDoc>()
|
|
1199
|
+
bobHandle.change(d => {
|
|
1200
|
+
d.foo = "bar"
|
|
1201
|
+
})
|
|
1202
|
+
let bobSyncMessages = 0
|
|
1203
|
+
bobRepo.networkSubsystem.on("message", message => {
|
|
1204
|
+
if (message.type === "sync") {
|
|
1205
|
+
bobSyncMessages++
|
|
1206
|
+
}
|
|
1207
|
+
})
|
|
1208
|
+
await pause(500)
|
|
1209
|
+
|
|
1210
|
+
// repo has no stored sync state for charlie so we should see 2 sync messages
|
|
1211
|
+
assert.strictEqual(bobSyncMessages, 2)
|
|
1212
|
+
|
|
1213
|
+
await bobRepo.flush()
|
|
1214
|
+
|
|
1215
|
+
// setup new repo which uses bob's storage
|
|
1216
|
+
const bob2Repo = new Repo({
|
|
1217
|
+
storage: bobStorage,
|
|
1218
|
+
peerId: "bob-2" as PeerId,
|
|
1219
|
+
})
|
|
1220
|
+
|
|
1221
|
+
// connnect it with charlie
|
|
1222
|
+
const channel = new MessageChannel()
|
|
1223
|
+
bob2Repo.networkSubsystem.addNetworkAdapter(
|
|
1224
|
+
new MessageChannelNetworkAdapter(channel.port2)
|
|
1225
|
+
)
|
|
1226
|
+
charlieRepo.networkSubsystem.addNetworkAdapter(
|
|
1227
|
+
new MessageChannelNetworkAdapter(channel.port1)
|
|
1228
|
+
)
|
|
1229
|
+
|
|
1230
|
+
// lookup doc we've previously created and count the messages
|
|
1231
|
+
bob2Repo.find(bobHandle.documentId)
|
|
1232
|
+
let bob2SyncMessages = 0
|
|
1233
|
+
bob2Repo.networkSubsystem.on("message", message => {
|
|
1234
|
+
if (message.type === "sync") {
|
|
1235
|
+
bob2SyncMessages++
|
|
1236
|
+
}
|
|
1237
|
+
})
|
|
1238
|
+
await pause(100)
|
|
1239
|
+
|
|
1240
|
+
// repo has stored sync state for charlie so we should see one sync messages
|
|
1241
|
+
assert.strictEqual(bob2SyncMessages, 1)
|
|
1242
|
+
|
|
1243
|
+
channel.port1.close()
|
|
1244
|
+
teardown()
|
|
1245
|
+
})
|
|
1246
|
+
|
|
1247
|
+
it("should report the remote heads when they change", async () => {
|
|
1213
1248
|
const { bobRepo, charlieRepo, teardown } = await setup({
|
|
1214
1249
|
connectAlice: false,
|
|
1215
1250
|
})
|
|
@@ -1225,15 +1260,14 @@ describe("Repo", () => {
|
|
|
1225
1260
|
|
|
1226
1261
|
const nextRemoteHeadsPromise = new Promise<{
|
|
1227
1262
|
storageId: StorageId
|
|
1228
|
-
heads:
|
|
1263
|
+
heads: UrlHeads
|
|
1229
1264
|
}>(resolve => {
|
|
1230
1265
|
handle.on("remote-heads", ({ storageId, heads }) => {
|
|
1231
1266
|
resolve({ storageId, heads })
|
|
1232
1267
|
})
|
|
1233
1268
|
})
|
|
1234
1269
|
|
|
1235
|
-
const charlieHandle = charlieRepo.find<TestDoc>(handle.url)
|
|
1236
|
-
await charlieHandle.whenReady()
|
|
1270
|
+
const charlieHandle = await charlieRepo.find<TestDoc>(handle.url)
|
|
1237
1271
|
|
|
1238
1272
|
// make a change on charlie
|
|
1239
1273
|
charlieHandle.change(d => {
|
|
@@ -1270,35 +1304,7 @@ describe("Repo", () => {
|
|
|
1270
1304
|
})
|
|
1271
1305
|
})
|
|
1272
1306
|
|
|
1273
|
-
|
|
1274
|
-
const alice = "alice" as PeerId
|
|
1275
|
-
const bob = "bob" as PeerId
|
|
1276
|
-
const [aliceAdapter, bobAdapter] = DummyNetworkAdapter.createConnectedPair()
|
|
1277
|
-
const aliceRepo = new Repo({
|
|
1278
|
-
network: [aliceAdapter],
|
|
1279
|
-
peerId: alice,
|
|
1280
|
-
})
|
|
1281
|
-
const bobRepo = new Repo({
|
|
1282
|
-
network: [bobAdapter],
|
|
1283
|
-
peerId: bob,
|
|
1284
|
-
})
|
|
1285
|
-
const aliceDoc = aliceRepo.create()
|
|
1286
|
-
aliceDoc.change((doc: any) => (doc.text = "Hello world"))
|
|
1287
|
-
|
|
1288
|
-
const bobDoc = bobRepo.find(aliceDoc.url)
|
|
1289
|
-
await eventPromise(bobDoc, "unavailable")
|
|
1290
|
-
|
|
1291
|
-
aliceAdapter.peerCandidate(bob)
|
|
1292
|
-
// Bob isn't yet connected to Alice and can't respond to her sync message
|
|
1293
|
-
await pause(100)
|
|
1294
|
-
bobAdapter.peerCandidate(alice)
|
|
1295
|
-
|
|
1296
|
-
await bobDoc.whenReady()
|
|
1297
|
-
|
|
1298
|
-
assert.equal(bobDoc.isReady(), true)
|
|
1299
|
-
})
|
|
1300
|
-
|
|
1301
|
-
describe.skip("with peers (mesh network)", () => {
|
|
1307
|
+
describe("with peers (mesh network)", () => {
|
|
1302
1308
|
const setup = async () => {
|
|
1303
1309
|
// Set up three repos; connect Alice to Bob, Bob to Charlie, and Alice to Charlie
|
|
1304
1310
|
|
|
@@ -1359,8 +1365,8 @@ describe("Repo", () => {
|
|
|
1359
1365
|
|
|
1360
1366
|
const aliceHandle = aliceRepo.create<TestDoc>()
|
|
1361
1367
|
|
|
1362
|
-
const bobHandle = bobRepo.find(aliceHandle.url)
|
|
1363
|
-
const charlieHandle = charlieRepo.find(aliceHandle.url)
|
|
1368
|
+
const bobHandle = await bobRepo.find(aliceHandle.url)
|
|
1369
|
+
const charlieHandle = await charlieRepo.find(aliceHandle.url)
|
|
1364
1370
|
|
|
1365
1371
|
// Alice should not receive her own ephemeral message
|
|
1366
1372
|
aliceHandle.on("ephemeral-message", () => {
|
|
@@ -1398,9 +1404,8 @@ describe("Repo", () => {
|
|
|
1398
1404
|
// pause to let the sync happen
|
|
1399
1405
|
await pause(50)
|
|
1400
1406
|
|
|
1401
|
-
const charlieHandle = charlieRepo.find(handle2.url)
|
|
1402
|
-
|
|
1403
|
-
assert.deepStrictEqual(charlieHandle.docSync(), { foo: "bar" })
|
|
1407
|
+
const charlieHandle = await charlieRepo.find(handle2.url)
|
|
1408
|
+
assert.deepStrictEqual(charlieHandle.doc(), { foo: "bar" })
|
|
1404
1409
|
|
|
1405
1410
|
teardown()
|
|
1406
1411
|
})
|
|
@@ -1417,9 +1422,8 @@ describe("Repo", () => {
|
|
|
1417
1422
|
// pause to let the sync happen
|
|
1418
1423
|
await pause(50)
|
|
1419
1424
|
|
|
1420
|
-
const charlieHandle = charlieRepo.find(handle2.url)
|
|
1421
|
-
|
|
1422
|
-
assert.deepStrictEqual(charlieHandle.docSync(), { foo: "bar" })
|
|
1425
|
+
const charlieHandle = await charlieRepo.find(handle2.url)
|
|
1426
|
+
assert.deepStrictEqual(charlieHandle.doc(), { foo: "bar" })
|
|
1423
1427
|
|
|
1424
1428
|
// now make a change to doc2 on bobs side and merge it into doc1
|
|
1425
1429
|
handle2.change(d => {
|
|
@@ -1430,14 +1434,13 @@ describe("Repo", () => {
|
|
|
1430
1434
|
// wait for the network to do it's thang
|
|
1431
1435
|
await pause(350)
|
|
1432
1436
|
|
|
1433
|
-
|
|
1434
|
-
assert.deepStrictEqual(charlieHandle.docSync(), { foo: "baz" })
|
|
1437
|
+
assert.deepStrictEqual(charlieHandle.doc(), { foo: "baz" })
|
|
1435
1438
|
|
|
1436
1439
|
teardown()
|
|
1437
1440
|
})
|
|
1438
1441
|
})
|
|
1439
1442
|
|
|
1440
|
-
describe
|
|
1443
|
+
describe("the denylist", () => {
|
|
1441
1444
|
it("should immediately return an unavailable message in response to a request for a denylisted document", async () => {
|
|
1442
1445
|
const storage = new DummyStorageAdapter()
|
|
1443
1446
|
|
|
@@ -1466,100 +1469,162 @@ describe("Repo", () => {
|
|
|
1466
1469
|
eventPromise(client.networkSubsystem, "peer"),
|
|
1467
1470
|
])
|
|
1468
1471
|
|
|
1469
|
-
|
|
1470
|
-
|
|
1471
|
-
|
|
1472
|
+
await expect(async () => {
|
|
1473
|
+
const clientDoc = await client.find(doc.url)
|
|
1474
|
+
}).rejects.toThrow(/Document (.*) is unavailable/)
|
|
1472
1475
|
|
|
1473
1476
|
const openDocs = Object.keys(server.metrics().documents).length
|
|
1474
1477
|
assert.deepEqual(openDocs, 0)
|
|
1475
1478
|
})
|
|
1476
1479
|
})
|
|
1477
|
-
|
|
1478
|
-
function endsWithTwoZerosInDecimal(hexString: string) {
|
|
1479
|
-
// Validate input is 32 bytes (64 characters) of hex
|
|
1480
|
-
if (!/^[0-9a-fA-F]+$/.test(hexString)) {
|
|
1481
|
-
throw new Error("Input must be a hexadecimal string, got " + hexString)
|
|
1482
|
-
}
|
|
1483
|
-
// Convert hex to decimal string
|
|
1484
|
-
const decimal = BigInt("0x" + hexString).toString()
|
|
1485
|
-
// Check if the last two digits are zeros
|
|
1486
|
-
return decimal.endsWith("00")
|
|
1487
|
-
}
|
|
1480
|
+
})
|
|
1488
1481
|
|
|
1489
|
-
|
|
1490
|
-
|
|
1491
|
-
|
|
1492
|
-
|
|
1493
|
-
|
|
1494
|
-
|
|
1495
|
-
|
|
1496
|
-
|
|
1497
|
-
|
|
1498
|
-
|
|
1499
|
-
|
|
1500
|
-
|
|
1501
|
-
|
|
1502
|
-
|
|
1503
|
-
}
|
|
1504
|
-
})
|
|
1482
|
+
describe("Repo heads-in-URLs functionality", () => {
|
|
1483
|
+
const setup = () => {
|
|
1484
|
+
const repo = new Repo({})
|
|
1485
|
+
const handle = repo.create()
|
|
1486
|
+
handle.change((doc: any) => (doc.title = "Hello World"))
|
|
1487
|
+
return { repo, handle }
|
|
1488
|
+
}
|
|
1489
|
+
|
|
1490
|
+
it("finds a document view by URL with heads", async () => {
|
|
1491
|
+
const { repo, handle } = setup()
|
|
1492
|
+
const heads = handle.heads()!
|
|
1493
|
+
const url = stringifyAutomergeUrl({ documentId: handle.documentId, heads })
|
|
1494
|
+
const view = await repo.find(url)
|
|
1495
|
+
expect(view.doc()).toEqual({ title: "Hello World" })
|
|
1505
1496
|
})
|
|
1506
1497
|
|
|
1507
|
-
|
|
1508
|
-
|
|
1509
|
-
|
|
1498
|
+
it("returns a view, not the actual handle, when finding by URL with heads", async () => {
|
|
1499
|
+
const { repo, handle } = setup()
|
|
1500
|
+
const heads = handle.heads()!
|
|
1501
|
+
await handle.change((doc: any) => (doc.title = "Changed"))
|
|
1502
|
+
const url = stringifyAutomergeUrl({ documentId: handle.documentId, heads })
|
|
1503
|
+
const view = await repo.find(url)
|
|
1504
|
+
expect(view.doc()).toEqual({ title: "Hello World" })
|
|
1505
|
+
expect(handle.doc()).toEqual({ title: "Changed" })
|
|
1506
|
+
})
|
|
1510
1507
|
|
|
1511
|
-
|
|
1508
|
+
it("changes to a document view do not affect the original", async () => {
|
|
1509
|
+
const { repo, handle } = setup()
|
|
1510
|
+
const heads = handle.heads()!
|
|
1511
|
+
const url = stringifyAutomergeUrl({ documentId: handle.documentId, heads })
|
|
1512
|
+
const view = await repo.find(url)
|
|
1513
|
+
expect(() =>
|
|
1514
|
+
view.change((doc: any) => (doc.title = "Changed in View"))
|
|
1515
|
+
).toThrow()
|
|
1516
|
+
expect(handle.doc()).toEqual({ title: "Hello World" })
|
|
1517
|
+
})
|
|
1512
1518
|
|
|
1513
|
-
|
|
1514
|
-
|
|
1515
|
-
|
|
1516
|
-
|
|
1517
|
-
|
|
1518
|
-
|
|
1519
|
-
|
|
1520
|
-
peerId: "bob" as PeerId,
|
|
1521
|
-
})
|
|
1522
|
-
return { alice, bob }
|
|
1523
|
-
}
|
|
1519
|
+
it("document views are read-only", async () => {
|
|
1520
|
+
const { repo, handle } = setup()
|
|
1521
|
+
const heads = handle.heads()!
|
|
1522
|
+
const url = stringifyAutomergeUrl({ documentId: handle.documentId, heads })
|
|
1523
|
+
const view = await repo.find(url)
|
|
1524
|
+
expect(() => view.change((doc: any) => (doc.title = "Changed"))).toThrow()
|
|
1525
|
+
})
|
|
1524
1526
|
|
|
1525
|
-
|
|
1526
|
-
|
|
1527
|
-
|
|
1528
|
-
|
|
1529
|
-
|
|
1530
|
-
|
|
1531
|
-
})
|
|
1527
|
+
it("finds the latest document when given a URL without heads", async () => {
|
|
1528
|
+
const { repo, handle } = setup()
|
|
1529
|
+
await handle.change((doc: any) => (doc.title = "Changed"))
|
|
1530
|
+
const found = await repo.find(handle.url)
|
|
1531
|
+
expect(found.doc()).toEqual({ title: "Changed" })
|
|
1532
|
+
})
|
|
1532
1533
|
|
|
1533
|
-
|
|
1534
|
-
|
|
1535
|
-
|
|
1534
|
+
it("getHeadsFromUrl returns heads array if present or undefined", () => {
|
|
1535
|
+
const { repo, handle } = setup()
|
|
1536
|
+
const heads = handle.heads()!
|
|
1537
|
+
const url = stringifyAutomergeUrl({ documentId: handle.documentId, heads })
|
|
1538
|
+
expect(getHeadsFromUrl(url)).toEqual(heads)
|
|
1539
|
+
|
|
1540
|
+
const urlWithoutHeads = generateAutomergeUrl()
|
|
1541
|
+
expect(getHeadsFromUrl(urlWithoutHeads)).toBeUndefined()
|
|
1542
|
+
})
|
|
1536
1543
|
|
|
1537
|
-
|
|
1538
|
-
|
|
1544
|
+
it("isValidAutomergeUrl returns true for valid URLs", () => {
|
|
1545
|
+
const { repo, handle } = setup()
|
|
1546
|
+
const url = generateAutomergeUrl()
|
|
1547
|
+
expect(isValidAutomergeUrl(url)).toBe(true)
|
|
1539
1548
|
|
|
1540
|
-
|
|
1541
|
-
|
|
1542
|
-
|
|
1549
|
+
const urlWithHeads = stringifyAutomergeUrl({
|
|
1550
|
+
documentId: handle.documentId,
|
|
1551
|
+
heads: handle.heads()!,
|
|
1543
1552
|
})
|
|
1553
|
+
expect(isValidAutomergeUrl(urlWithHeads)).toBe(true)
|
|
1554
|
+
})
|
|
1544
1555
|
|
|
1545
|
-
|
|
1546
|
-
|
|
1547
|
-
|
|
1548
|
-
|
|
1549
|
-
|
|
1550
|
-
|
|
1556
|
+
it("isValidAutomergeUrl returns false for invalid URLs", () => {
|
|
1557
|
+
const { repo, handle } = setup()
|
|
1558
|
+
expect(isValidAutomergeUrl("not a url")).toBe(false)
|
|
1559
|
+
expect(isValidAutomergeUrl("automerge:invalidid")).toBe(false)
|
|
1560
|
+
expect(isValidAutomergeUrl("automerge:validid#invalidhead")).toBe(false)
|
|
1561
|
+
})
|
|
1551
1562
|
|
|
1552
|
-
|
|
1553
|
-
|
|
1554
|
-
|
|
1563
|
+
it("parseAutomergeUrl extracts documentId and heads", () => {
|
|
1564
|
+
const { repo, handle } = setup()
|
|
1565
|
+
const url = stringifyAutomergeUrl({
|
|
1566
|
+
documentId: handle.documentId,
|
|
1567
|
+
heads: handle.heads()!,
|
|
1568
|
+
})
|
|
1569
|
+
const parsed = parseAutomergeUrl(url)
|
|
1570
|
+
expect(parsed.documentId).toBe(handle.documentId)
|
|
1571
|
+
expect(parsed.heads).toEqual(handle.heads())
|
|
1572
|
+
})
|
|
1555
1573
|
|
|
1556
|
-
|
|
1557
|
-
|
|
1574
|
+
it("stringifyAutomergeUrl creates valid URL", () => {
|
|
1575
|
+
const { repo, handle } = setup()
|
|
1576
|
+
const url = stringifyAutomergeUrl({
|
|
1577
|
+
documentId: handle.documentId,
|
|
1578
|
+
heads: handle.heads()!,
|
|
1579
|
+
})
|
|
1580
|
+
expect(isValidAutomergeUrl(url)).toBe(true)
|
|
1581
|
+
const parsed = parseAutomergeUrl(url)
|
|
1582
|
+
expect(parsed.documentId).toBe(handle.documentId)
|
|
1583
|
+
expect(parsed.heads).toEqual(handle.heads())
|
|
1584
|
+
})
|
|
1585
|
+
})
|
|
1558
1586
|
|
|
1559
|
-
|
|
1560
|
-
|
|
1561
|
-
|
|
1587
|
+
describe("Repo.find() abort behavior", () => {
|
|
1588
|
+
it("aborts immediately if signal is already aborted", async () => {
|
|
1589
|
+
const repo = new Repo()
|
|
1590
|
+
const controller = new AbortController()
|
|
1591
|
+
controller.abort()
|
|
1592
|
+
|
|
1593
|
+
await expect(
|
|
1594
|
+
repo.find(generateAutomergeUrl(), { signal: controller.signal })
|
|
1595
|
+
).rejects.toThrow("Operation aborted")
|
|
1596
|
+
})
|
|
1597
|
+
|
|
1598
|
+
it("can abort while waiting for ready state", async () => {
|
|
1599
|
+
// Create a repo with no network adapters so document can't become ready
|
|
1600
|
+
const repo = new Repo()
|
|
1601
|
+
const url = generateAutomergeUrl()
|
|
1602
|
+
|
|
1603
|
+
const controller = new AbortController()
|
|
1604
|
+
|
|
1605
|
+
// Start find and abort after a moment
|
|
1606
|
+
const findPromise = repo.find(url, { signal: controller.signal })
|
|
1607
|
+
controller.abort()
|
|
1608
|
+
|
|
1609
|
+
await expect(findPromise).rejects.toThrow("Operation aborted")
|
|
1610
|
+
await expect(findPromise).rejects.not.toThrow("unavailable")
|
|
1611
|
+
})
|
|
1612
|
+
|
|
1613
|
+
it("returns handle immediately when allow unavailable is true, even with abort signal", async () => {
|
|
1614
|
+
const repo = new Repo()
|
|
1615
|
+
const controller = new AbortController()
|
|
1616
|
+
const url = generateAutomergeUrl()
|
|
1617
|
+
|
|
1618
|
+
const handle = await repo.find(url, {
|
|
1619
|
+
allowableStates: ["unavailable"],
|
|
1620
|
+
signal: controller.signal,
|
|
1562
1621
|
})
|
|
1622
|
+
|
|
1623
|
+
expect(handle).toBeDefined()
|
|
1624
|
+
|
|
1625
|
+
// Abort shouldn't affect the result since we skipped ready
|
|
1626
|
+
controller.abort()
|
|
1627
|
+
expect(handle.url).toBe(url)
|
|
1563
1628
|
})
|
|
1564
1629
|
})
|
|
1565
1630
|
|