@automerge/automerge-repo 2.0.0-alpha.7 → 2.0.0-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -8
- package/dist/AutomergeUrl.d.ts +17 -5
- package/dist/AutomergeUrl.d.ts.map +1 -1
- package/dist/AutomergeUrl.js +71 -24
- package/dist/DocHandle.d.ts +68 -45
- package/dist/DocHandle.d.ts.map +1 -1
- package/dist/DocHandle.js +166 -69
- package/dist/FindProgress.d.ts +30 -0
- package/dist/FindProgress.d.ts.map +1 -0
- package/dist/FindProgress.js +1 -0
- package/dist/RemoteHeadsSubscriptions.d.ts +4 -5
- package/dist/RemoteHeadsSubscriptions.d.ts.map +1 -1
- package/dist/RemoteHeadsSubscriptions.js +4 -1
- package/dist/Repo.d.ts +46 -6
- package/dist/Repo.d.ts.map +1 -1
- package/dist/Repo.js +252 -67
- package/dist/helpers/abortable.d.ts +36 -0
- package/dist/helpers/abortable.d.ts.map +1 -0
- package/dist/helpers/abortable.js +47 -0
- package/dist/helpers/arraysAreEqual.d.ts.map +1 -1
- package/dist/helpers/bufferFromHex.d.ts +3 -0
- package/dist/helpers/bufferFromHex.d.ts.map +1 -0
- package/dist/helpers/bufferFromHex.js +13 -0
- package/dist/helpers/debounce.d.ts.map +1 -1
- package/dist/helpers/eventPromise.d.ts.map +1 -1
- package/dist/helpers/headsAreSame.d.ts +2 -2
- package/dist/helpers/headsAreSame.d.ts.map +1 -1
- package/dist/helpers/mergeArrays.d.ts +1 -1
- package/dist/helpers/mergeArrays.d.ts.map +1 -1
- package/dist/helpers/pause.d.ts.map +1 -1
- package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
- package/dist/helpers/tests/network-adapter-tests.js +13 -13
- package/dist/helpers/tests/storage-adapter-tests.d.ts +2 -2
- package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
- package/dist/helpers/tests/storage-adapter-tests.js +25 -48
- package/dist/helpers/throttle.d.ts.map +1 -1
- package/dist/helpers/withTimeout.d.ts.map +1 -1
- package/dist/index.d.ts +2 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -1
- package/dist/network/messages.d.ts.map +1 -1
- package/dist/storage/StorageSubsystem.d.ts +15 -1
- package/dist/storage/StorageSubsystem.d.ts.map +1 -1
- package/dist/storage/StorageSubsystem.js +50 -14
- package/dist/synchronizer/CollectionSynchronizer.d.ts +4 -3
- package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.js +34 -15
- package/dist/synchronizer/DocSynchronizer.d.ts +3 -2
- package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/DocSynchronizer.js +51 -27
- package/dist/synchronizer/Synchronizer.d.ts +11 -0
- package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
- package/dist/types.d.ts +4 -1
- package/dist/types.d.ts.map +1 -1
- package/fuzz/fuzz.ts +3 -3
- package/package.json +3 -4
- package/src/AutomergeUrl.ts +101 -26
- package/src/DocHandle.ts +235 -82
- package/src/FindProgress.ts +48 -0
- package/src/RemoteHeadsSubscriptions.ts +11 -9
- package/src/Repo.ts +368 -74
- package/src/helpers/abortable.ts +62 -0
- package/src/helpers/bufferFromHex.ts +14 -0
- package/src/helpers/headsAreSame.ts +2 -2
- package/src/helpers/tests/network-adapter-tests.ts +14 -13
- package/src/helpers/tests/storage-adapter-tests.ts +44 -86
- package/src/index.ts +7 -0
- package/src/storage/StorageSubsystem.ts +66 -16
- package/src/synchronizer/CollectionSynchronizer.ts +37 -16
- package/src/synchronizer/DocSynchronizer.ts +59 -32
- package/src/synchronizer/Synchronizer.ts +14 -0
- package/src/types.ts +4 -1
- package/test/AutomergeUrl.test.ts +130 -0
- package/test/CollectionSynchronizer.test.ts +4 -4
- package/test/DocHandle.test.ts +181 -38
- package/test/DocSynchronizer.test.ts +10 -3
- package/test/Repo.test.ts +376 -203
- package/test/StorageSubsystem.test.ts +80 -1
- package/test/remoteHeads.test.ts +27 -12
|
@@ -49,9 +49,10 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
|
|
|
49
49
|
// Alice creates a document
|
|
50
50
|
const aliceHandle = aliceRepo.create<TestDoc>()
|
|
51
51
|
|
|
52
|
-
//
|
|
53
|
-
await
|
|
54
|
-
|
|
52
|
+
// TODO: ... let connections complete. this shouldn't be necessary.
|
|
53
|
+
await pause(50)
|
|
54
|
+
|
|
55
|
+
const bobHandle = await bobRepo.find<TestDoc>(aliceHandle.url)
|
|
55
56
|
|
|
56
57
|
// Alice changes the document
|
|
57
58
|
aliceHandle.change(d => {
|
|
@@ -60,7 +61,7 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
|
|
|
60
61
|
|
|
61
62
|
// Bob receives the change
|
|
62
63
|
await eventPromise(bobHandle, "change")
|
|
63
|
-
assert.equal((await bobHandle.doc()
|
|
64
|
+
assert.equal((await bobHandle).doc()?.foo, "bar")
|
|
64
65
|
|
|
65
66
|
// Bob changes the document
|
|
66
67
|
bobHandle.change(d => {
|
|
@@ -69,7 +70,7 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
|
|
|
69
70
|
|
|
70
71
|
// Alice receives the change
|
|
71
72
|
await eventPromise(aliceHandle, "change")
|
|
72
|
-
assert.equal(
|
|
73
|
+
assert.equal(aliceHandle.doc().foo, "baz")
|
|
73
74
|
}
|
|
74
75
|
|
|
75
76
|
// Run the test in both directions, in case they're different types of adapters
|
|
@@ -100,9 +101,9 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
|
|
|
100
101
|
const docUrl = aliceHandle.url
|
|
101
102
|
|
|
102
103
|
// Bob and Charlie receive the document
|
|
103
|
-
await
|
|
104
|
-
const bobHandle = bobRepo.find<TestDoc>(docUrl)
|
|
105
|
-
const charlieHandle = charlieRepo.find<TestDoc>(docUrl)
|
|
104
|
+
await pause(50)
|
|
105
|
+
const bobHandle = await bobRepo.find<TestDoc>(docUrl)
|
|
106
|
+
const charlieHandle = await charlieRepo.find<TestDoc>(docUrl)
|
|
106
107
|
|
|
107
108
|
// Alice changes the document
|
|
108
109
|
aliceHandle.change(d => {
|
|
@@ -111,8 +112,8 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
|
|
|
111
112
|
|
|
112
113
|
// Bob and Charlie receive the change
|
|
113
114
|
await eventPromises([bobHandle, charlieHandle], "change")
|
|
114
|
-
assert.equal(
|
|
115
|
-
assert.equal(
|
|
115
|
+
assert.equal(bobHandle.doc().foo, "bar")
|
|
116
|
+
assert.equal(charlieHandle.doc().foo, "bar")
|
|
116
117
|
|
|
117
118
|
// Charlie changes the document
|
|
118
119
|
charlieHandle.change(d => {
|
|
@@ -121,8 +122,8 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
|
|
|
121
122
|
|
|
122
123
|
// Alice and Bob receive the change
|
|
123
124
|
await eventPromises([aliceHandle, bobHandle], "change")
|
|
124
|
-
assert.equal(
|
|
125
|
-
assert.equal(
|
|
125
|
+
assert.equal(bobHandle.doc().foo, "baz")
|
|
126
|
+
assert.equal(charlieHandle.doc().foo, "baz")
|
|
126
127
|
|
|
127
128
|
teardown()
|
|
128
129
|
})
|
|
@@ -141,7 +142,7 @@ export function runNetworkAdapterTests(_setup: SetupFn, title?: string): void {
|
|
|
141
142
|
)
|
|
142
143
|
|
|
143
144
|
const aliceHandle = aliceRepo.create<TestDoc>()
|
|
144
|
-
const charlieHandle = charlieRepo.find(aliceHandle.url)
|
|
145
|
+
const charlieHandle = await charlieRepo.find(aliceHandle.url)
|
|
145
146
|
|
|
146
147
|
// pause to give charlie a chance to let alice know it wants the doc
|
|
147
148
|
await pause(100)
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { describe, expect, it } from "vitest"
|
|
1
|
+
import { describe, expect, beforeEach, it as _it } from "vitest"
|
|
2
2
|
|
|
3
3
|
import type { StorageAdapterInterface } from "../../storage/StorageAdapterInterface.js"
|
|
4
4
|
|
|
@@ -8,120 +8,90 @@ const PAYLOAD_C = () => new Uint8Array([2, 111, 74, 131, 236, 96, 142, 193])
|
|
|
8
8
|
|
|
9
9
|
const LARGE_PAYLOAD = new Uint8Array(100000).map(() => Math.random() * 256)
|
|
10
10
|
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
11
|
+
type AdapterTestContext = {
|
|
12
|
+
adapter: StorageAdapterInterface
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
const it = _it<AdapterTestContext>
|
|
16
|
+
|
|
17
|
+
export function runStorageAdapterTests(setup: SetupFn, title?: string): void {
|
|
18
|
+
beforeEach<AdapterTestContext>(async ctx => {
|
|
19
|
+
const { adapter, teardown = NO_OP } = await setup()
|
|
20
|
+
ctx.adapter = adapter
|
|
21
|
+
return teardown
|
|
22
|
+
})
|
|
16
23
|
|
|
17
24
|
describe(`Storage adapter acceptance tests ${
|
|
18
25
|
title ? `(${title})` : ""
|
|
19
26
|
}`, () => {
|
|
20
27
|
describe("load", () => {
|
|
21
|
-
it("should return undefined if there is no data", async () => {
|
|
22
|
-
const { adapter, teardown } = await setup()
|
|
23
|
-
|
|
28
|
+
it("should return undefined if there is no data", async ({ adapter }) => {
|
|
24
29
|
const actual = await adapter.load(["AAAAA", "sync-state", "xxxxx"])
|
|
25
30
|
expect(actual).toBeUndefined()
|
|
26
|
-
|
|
27
|
-
teardown()
|
|
28
31
|
})
|
|
29
32
|
})
|
|
30
33
|
|
|
31
34
|
describe("save and load", () => {
|
|
32
|
-
it("should return data that was saved", async () => {
|
|
33
|
-
const { adapter, teardown } = await setup()
|
|
34
|
-
|
|
35
|
+
it("should return data that was saved", async ({ adapter }) => {
|
|
35
36
|
await adapter.save(["storage-adapter-id"], PAYLOAD_A())
|
|
36
37
|
const actual = await adapter.load(["storage-adapter-id"])
|
|
37
38
|
expect(actual).toStrictEqual(PAYLOAD_A())
|
|
38
|
-
|
|
39
|
-
teardown()
|
|
40
39
|
})
|
|
41
40
|
|
|
42
|
-
it("should work with composite keys", async () => {
|
|
43
|
-
const { adapter, teardown } = await setup()
|
|
44
|
-
|
|
41
|
+
it("should work with composite keys", async ({ adapter }) => {
|
|
45
42
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A())
|
|
46
43
|
const actual = await adapter.load(["AAAAA", "sync-state", "xxxxx"])
|
|
47
44
|
expect(actual).toStrictEqual(PAYLOAD_A())
|
|
48
|
-
|
|
49
|
-
teardown()
|
|
50
45
|
})
|
|
51
46
|
|
|
52
|
-
it("should work with a large payload", async () => {
|
|
53
|
-
const { adapter, teardown } = await setup()
|
|
54
|
-
|
|
47
|
+
it("should work with a large payload", async ({ adapter }) => {
|
|
55
48
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], LARGE_PAYLOAD)
|
|
56
49
|
const actual = await adapter.load(["AAAAA", "sync-state", "xxxxx"])
|
|
57
50
|
expect(actual).toStrictEqual(LARGE_PAYLOAD)
|
|
58
|
-
|
|
59
|
-
teardown()
|
|
60
51
|
})
|
|
61
52
|
})
|
|
62
53
|
|
|
63
54
|
describe("loadRange", () => {
|
|
64
|
-
it("should return an empty array if there is no data", async (
|
|
65
|
-
|
|
66
|
-
|
|
55
|
+
it("should return an empty array if there is no data", async ({
|
|
56
|
+
adapter,
|
|
57
|
+
}) => {
|
|
67
58
|
expect(await adapter.loadRange(["AAAAA"])).toStrictEqual([])
|
|
68
|
-
|
|
69
|
-
teardown()
|
|
70
59
|
})
|
|
71
60
|
})
|
|
72
61
|
|
|
73
62
|
describe("save and loadRange", () => {
|
|
74
|
-
it("should return all the data that matches the key", async (
|
|
75
|
-
|
|
76
|
-
|
|
63
|
+
it("should return all the data that matches the key", async ({
|
|
64
|
+
adapter,
|
|
65
|
+
}) => {
|
|
77
66
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A())
|
|
78
67
|
await adapter.save(["AAAAA", "snapshot", "yyyyy"], PAYLOAD_B())
|
|
79
68
|
await adapter.save(["AAAAA", "sync-state", "zzzzz"], PAYLOAD_C())
|
|
80
69
|
|
|
81
|
-
expect(await adapter.loadRange(["AAAAA"])).toStrictEqual(
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
])
|
|
87
|
-
)
|
|
88
|
-
|
|
89
|
-
expect(await adapter.loadRange(["AAAAA", "sync-state"])).toStrictEqual(
|
|
90
|
-
expect.arrayContaining([
|
|
91
|
-
{ key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
|
|
92
|
-
{ key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() },
|
|
93
|
-
])
|
|
94
|
-
)
|
|
95
|
-
|
|
96
|
-
teardown()
|
|
97
|
-
})
|
|
70
|
+
expect(await adapter.loadRange(["AAAAA"])).toStrictEqual([
|
|
71
|
+
{ key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
|
|
72
|
+
{ key: ["AAAAA", "snapshot", "yyyyy"], data: PAYLOAD_B() },
|
|
73
|
+
{ key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() },
|
|
74
|
+
])
|
|
98
75
|
|
|
99
|
-
|
|
100
|
-
|
|
76
|
+
expect(await adapter.loadRange(["AAAAA", "sync-state"])).toStrictEqual([
|
|
77
|
+
{ key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
|
|
78
|
+
{ key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() },
|
|
79
|
+
])
|
|
80
|
+
})
|
|
101
81
|
|
|
82
|
+
it("should only load values that match they key", async ({ adapter }) => {
|
|
102
83
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A())
|
|
103
84
|
await adapter.save(["BBBBB", "sync-state", "zzzzz"], PAYLOAD_C())
|
|
104
85
|
|
|
105
86
|
const actual = await adapter.loadRange(["AAAAA"])
|
|
106
|
-
expect(actual).toStrictEqual(
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
])
|
|
110
|
-
)
|
|
111
|
-
expect(actual).toStrictEqual(
|
|
112
|
-
expect.not.arrayContaining([
|
|
113
|
-
{ key: ["BBBBB", "sync-state", "zzzzz"], data: PAYLOAD_C() },
|
|
114
|
-
])
|
|
115
|
-
)
|
|
116
|
-
|
|
117
|
-
teardown()
|
|
87
|
+
expect(actual).toStrictEqual([
|
|
88
|
+
{ key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_A() },
|
|
89
|
+
])
|
|
118
90
|
})
|
|
119
91
|
})
|
|
120
92
|
|
|
121
93
|
describe("save and remove", () => {
|
|
122
|
-
it("after removing, should be empty", async () => {
|
|
123
|
-
const { adapter, teardown } = await setup()
|
|
124
|
-
|
|
94
|
+
it("after removing, should be empty", async ({ adapter }) => {
|
|
125
95
|
await adapter.save(["AAAAA", "snapshot", "xxxxx"], PAYLOAD_A())
|
|
126
96
|
await adapter.remove(["AAAAA", "snapshot", "xxxxx"])
|
|
127
97
|
|
|
@@ -129,30 +99,24 @@ export function runStorageAdapterTests(_setup: SetupFn, title?: string): void {
|
|
|
129
99
|
expect(
|
|
130
100
|
await adapter.load(["AAAAA", "snapshot", "xxxxx"])
|
|
131
101
|
).toBeUndefined()
|
|
132
|
-
|
|
133
|
-
teardown()
|
|
134
102
|
})
|
|
135
103
|
})
|
|
136
104
|
|
|
137
105
|
describe("save and save", () => {
|
|
138
|
-
it("should overwrite data saved with the same key", async (
|
|
139
|
-
|
|
140
|
-
|
|
106
|
+
it("should overwrite data saved with the same key", async ({
|
|
107
|
+
adapter,
|
|
108
|
+
}) => {
|
|
141
109
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A())
|
|
142
110
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_B())
|
|
143
111
|
|
|
144
112
|
expect(await adapter.loadRange(["AAAAA", "sync-state"])).toStrictEqual([
|
|
145
113
|
{ key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_B() },
|
|
146
114
|
])
|
|
147
|
-
|
|
148
|
-
teardown()
|
|
149
115
|
})
|
|
150
116
|
})
|
|
151
117
|
|
|
152
118
|
describe("removeRange", () => {
|
|
153
|
-
it("should remove a range of records", async () => {
|
|
154
|
-
const { adapter, teardown } = await setup()
|
|
155
|
-
|
|
119
|
+
it("should remove a range of records", async ({ adapter }) => {
|
|
156
120
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A())
|
|
157
121
|
await adapter.save(["AAAAA", "snapshot", "yyyyy"], PAYLOAD_B())
|
|
158
122
|
await adapter.save(["AAAAA", "sync-state", "zzzzz"], PAYLOAD_C())
|
|
@@ -162,13 +126,9 @@ export function runStorageAdapterTests(_setup: SetupFn, title?: string): void {
|
|
|
162
126
|
expect(await adapter.loadRange(["AAAAA"])).toStrictEqual([
|
|
163
127
|
{ key: ["AAAAA", "snapshot", "yyyyy"], data: PAYLOAD_B() },
|
|
164
128
|
])
|
|
165
|
-
|
|
166
|
-
teardown()
|
|
167
129
|
})
|
|
168
130
|
|
|
169
|
-
it("should not remove records that don't match", async () => {
|
|
170
|
-
const { adapter, teardown } = await setup()
|
|
171
|
-
|
|
131
|
+
it("should not remove records that don't match", async ({ adapter }) => {
|
|
172
132
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A())
|
|
173
133
|
await adapter.save(["BBBBB", "sync-state", "zzzzz"], PAYLOAD_B())
|
|
174
134
|
|
|
@@ -178,8 +138,6 @@ export function runStorageAdapterTests(_setup: SetupFn, title?: string): void {
|
|
|
178
138
|
expect(actual).toStrictEqual([
|
|
179
139
|
{ key: ["BBBBB", "sync-state", "zzzzz"], data: PAYLOAD_B() },
|
|
180
140
|
])
|
|
181
|
-
|
|
182
|
-
teardown()
|
|
183
141
|
})
|
|
184
142
|
})
|
|
185
143
|
})
|
|
@@ -189,5 +147,5 @@ const NO_OP = () => {}
|
|
|
189
147
|
|
|
190
148
|
export type SetupFn = () => Promise<{
|
|
191
149
|
adapter: StorageAdapterInterface
|
|
192
|
-
teardown?: () => void
|
|
150
|
+
teardown?: () => void | Promise<void>
|
|
193
151
|
}>
|
package/src/index.ts
CHANGED
|
@@ -34,6 +34,8 @@ export {
|
|
|
34
34
|
stringifyAutomergeUrl,
|
|
35
35
|
interpretAsDocumentId,
|
|
36
36
|
generateAutomergeUrl,
|
|
37
|
+
encodeHeads,
|
|
38
|
+
decodeHeads,
|
|
37
39
|
} from "./AutomergeUrl.js"
|
|
38
40
|
export { Repo } from "./Repo.js"
|
|
39
41
|
export { NetworkAdapter } from "./network/NetworkAdapter.js"
|
|
@@ -75,6 +77,11 @@ export type {
|
|
|
75
77
|
PeerMetadata,
|
|
76
78
|
} from "./network/NetworkAdapterInterface.js"
|
|
77
79
|
|
|
80
|
+
export type {
|
|
81
|
+
NetworkSubsystemEvents,
|
|
82
|
+
PeerPayload,
|
|
83
|
+
} from "./network/NetworkSubsystem.js"
|
|
84
|
+
|
|
78
85
|
export type {
|
|
79
86
|
DocumentUnavailableMessage,
|
|
80
87
|
EphemeralMessage,
|
|
@@ -6,14 +6,24 @@ import { type DocumentId } from "../types.js"
|
|
|
6
6
|
import { StorageAdapterInterface } from "./StorageAdapterInterface.js"
|
|
7
7
|
import { ChunkInfo, StorageKey, StorageId } from "./types.js"
|
|
8
8
|
import { keyHash, headsHash } from "./keyHash.js"
|
|
9
|
-
import { chunkTypeFromKey } from "./chunkTypeFromKey.js"
|
|
10
9
|
import * as Uuid from "uuid"
|
|
10
|
+
import { EventEmitter } from "eventemitter3"
|
|
11
|
+
import { encodeHeads } from "../AutomergeUrl.js"
|
|
12
|
+
|
|
13
|
+
type StorageSubsystemEvents = {
|
|
14
|
+
"document-loaded": (arg: {
|
|
15
|
+
documentId: DocumentId
|
|
16
|
+
durationMillis: number
|
|
17
|
+
numOps: number
|
|
18
|
+
numChanges: number
|
|
19
|
+
}) => void
|
|
20
|
+
}
|
|
11
21
|
|
|
12
22
|
/**
|
|
13
23
|
* The storage subsystem is responsible for saving and loading Automerge documents to and from
|
|
14
24
|
* storage adapter. It also provides a generic key/value storage interface for other uses.
|
|
15
25
|
*/
|
|
16
|
-
export class StorageSubsystem {
|
|
26
|
+
export class StorageSubsystem extends EventEmitter<StorageSubsystemEvents> {
|
|
17
27
|
/** The storage adapter to use for saving and loading documents */
|
|
18
28
|
#storageAdapter: StorageAdapterInterface
|
|
19
29
|
|
|
@@ -29,6 +39,7 @@ export class StorageSubsystem {
|
|
|
29
39
|
#log = debug(`automerge-repo:storage-subsystem`)
|
|
30
40
|
|
|
31
41
|
constructor(storageAdapter: StorageAdapterInterface) {
|
|
42
|
+
super()
|
|
32
43
|
this.#storageAdapter = storageAdapter
|
|
33
44
|
}
|
|
34
45
|
|
|
@@ -101,36 +112,73 @@ export class StorageSubsystem {
|
|
|
101
112
|
// AUTOMERGE DOCUMENT STORAGE
|
|
102
113
|
|
|
103
114
|
/**
|
|
104
|
-
* Loads
|
|
115
|
+
* Loads and combines document chunks from storage, with snapshots first.
|
|
105
116
|
*/
|
|
106
|
-
async
|
|
107
|
-
// Load
|
|
108
|
-
const
|
|
109
|
-
|
|
117
|
+
async loadDocData(documentId: DocumentId): Promise<Uint8Array | null> {
|
|
118
|
+
// Load snapshots first
|
|
119
|
+
const snapshotChunks = await this.#storageAdapter.loadRange([
|
|
120
|
+
documentId,
|
|
121
|
+
"snapshot",
|
|
122
|
+
])
|
|
123
|
+
const incrementalChunks = await this.#storageAdapter.loadRange([
|
|
124
|
+
documentId,
|
|
125
|
+
"incremental",
|
|
126
|
+
])
|
|
127
|
+
|
|
128
|
+
const binaries: Uint8Array[] = []
|
|
110
129
|
const chunkInfos: ChunkInfo[] = []
|
|
111
130
|
|
|
112
|
-
|
|
113
|
-
|
|
131
|
+
// Process snapshots first
|
|
132
|
+
for (const chunk of snapshotChunks) {
|
|
114
133
|
if (chunk.data === undefined) continue
|
|
134
|
+
chunkInfos.push({
|
|
135
|
+
key: chunk.key,
|
|
136
|
+
type: "snapshot",
|
|
137
|
+
size: chunk.data.length,
|
|
138
|
+
})
|
|
139
|
+
binaries.push(chunk.data)
|
|
140
|
+
}
|
|
115
141
|
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
142
|
+
// Then process incrementals
|
|
143
|
+
for (const chunk of incrementalChunks) {
|
|
144
|
+
if (chunk.data === undefined) continue
|
|
119
145
|
chunkInfos.push({
|
|
120
146
|
key: chunk.key,
|
|
121
|
-
type:
|
|
147
|
+
type: "incremental",
|
|
122
148
|
size: chunk.data.length,
|
|
123
149
|
})
|
|
124
150
|
binaries.push(chunk.data)
|
|
125
151
|
}
|
|
152
|
+
|
|
153
|
+
// Store chunk infos for future reference
|
|
126
154
|
this.#chunkInfos.set(documentId, chunkInfos)
|
|
127
155
|
|
|
156
|
+
// If no chunks were found, return null
|
|
157
|
+
if (binaries.length === 0) {
|
|
158
|
+
return null
|
|
159
|
+
}
|
|
160
|
+
|
|
128
161
|
// Merge the chunks into a single binary
|
|
129
|
-
|
|
130
|
-
|
|
162
|
+
return mergeArrays(binaries)
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
/**
|
|
166
|
+
* Loads the Automerge document with the given ID from storage.
|
|
167
|
+
*/
|
|
168
|
+
async loadDoc<T>(documentId: DocumentId): Promise<A.Doc<T> | null> {
|
|
169
|
+
// Load and combine chunks
|
|
170
|
+
const binary = await this.loadDocData(documentId)
|
|
171
|
+
if (!binary) return null
|
|
131
172
|
|
|
132
173
|
// Load into an Automerge document
|
|
174
|
+
const start = performance.now()
|
|
133
175
|
const newDoc = A.loadIncremental(A.init(), binary) as A.Doc<T>
|
|
176
|
+
const end = performance.now()
|
|
177
|
+
this.emit("document-loaded", {
|
|
178
|
+
documentId,
|
|
179
|
+
durationMillis: end - start,
|
|
180
|
+
...A.stats(newDoc),
|
|
181
|
+
})
|
|
134
182
|
|
|
135
183
|
// Record the latest heads for the document
|
|
136
184
|
this.#storedHeads.set(documentId, A.getHeads(newDoc))
|
|
@@ -150,11 +198,13 @@ export class StorageSubsystem {
|
|
|
150
198
|
if (!this.#shouldSave(documentId, doc)) return
|
|
151
199
|
|
|
152
200
|
const sourceChunks = this.#chunkInfos.get(documentId) ?? []
|
|
201
|
+
|
|
153
202
|
if (this.#shouldCompact(sourceChunks)) {
|
|
154
203
|
await this.#saveTotal(documentId, doc, sourceChunks)
|
|
155
204
|
} else {
|
|
156
205
|
await this.#saveIncremental(documentId, doc)
|
|
157
206
|
}
|
|
207
|
+
|
|
158
208
|
this.#storedHeads.set(documentId, A.getHeads(doc))
|
|
159
209
|
}
|
|
160
210
|
|
|
@@ -261,7 +311,7 @@ export class StorageSubsystem {
|
|
|
261
311
|
}
|
|
262
312
|
|
|
263
313
|
const newHeads = A.getHeads(doc)
|
|
264
|
-
if (headsAreSame(newHeads, oldHeads)) {
|
|
314
|
+
if (headsAreSame(encodeHeads(newHeads), encodeHeads(oldHeads))) {
|
|
265
315
|
// the document hasn't changed
|
|
266
316
|
return false
|
|
267
317
|
}
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import debug from "debug"
|
|
2
2
|
import { DocHandle } from "../DocHandle.js"
|
|
3
|
-
import {
|
|
3
|
+
import { parseAutomergeUrl } from "../AutomergeUrl.js"
|
|
4
4
|
import { Repo } from "../Repo.js"
|
|
5
5
|
import { DocMessage } from "../network/messages.js"
|
|
6
|
-
import { DocumentId, PeerId } from "../types.js"
|
|
6
|
+
import { AutomergeUrl, DocumentId, PeerId } from "../types.js"
|
|
7
7
|
import { DocSynchronizer } from "./DocSynchronizer.js"
|
|
8
8
|
import { Synchronizer } from "./Synchronizer.js"
|
|
9
9
|
|
|
@@ -21,23 +21,27 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
21
21
|
/** Used to determine if the document is know to the Collection and a synchronizer exists or is being set up */
|
|
22
22
|
#docSetUp: Record<DocumentId, boolean> = {}
|
|
23
23
|
|
|
24
|
-
|
|
24
|
+
#denylist: DocumentId[]
|
|
25
|
+
|
|
26
|
+
constructor(private repo: Repo, denylist: AutomergeUrl[] = []) {
|
|
25
27
|
super()
|
|
28
|
+
this.#denylist = denylist.map(url => parseAutomergeUrl(url).documentId)
|
|
26
29
|
}
|
|
27
30
|
|
|
28
31
|
/** Returns a synchronizer for the given document, creating one if it doesn't already exist. */
|
|
29
|
-
#fetchDocSynchronizer(
|
|
30
|
-
if (!this.docSynchronizers[documentId]) {
|
|
31
|
-
|
|
32
|
-
|
|
32
|
+
#fetchDocSynchronizer(handle: DocHandle<unknown>) {
|
|
33
|
+
if (!this.docSynchronizers[handle.documentId]) {
|
|
34
|
+
this.docSynchronizers[handle.documentId] =
|
|
35
|
+
this.#initDocSynchronizer(handle)
|
|
33
36
|
}
|
|
34
|
-
return this.docSynchronizers[documentId]
|
|
37
|
+
return this.docSynchronizers[handle.documentId]
|
|
35
38
|
}
|
|
36
39
|
|
|
37
40
|
/** Creates a new docSynchronizer and sets it up to propagate messages */
|
|
38
41
|
#initDocSynchronizer(handle: DocHandle<unknown>): DocSynchronizer {
|
|
39
42
|
const docSynchronizer = new DocSynchronizer({
|
|
40
43
|
handle,
|
|
44
|
+
peerId: this.repo.networkSubsystem.peerId,
|
|
41
45
|
onLoadSyncState: async peerId => {
|
|
42
46
|
if (!this.repo.storageSubsystem) {
|
|
43
47
|
return
|
|
@@ -58,6 +62,7 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
58
62
|
docSynchronizer.on("message", event => this.emit("message", event))
|
|
59
63
|
docSynchronizer.on("open-doc", event => this.emit("open-doc", event))
|
|
60
64
|
docSynchronizer.on("sync-state", event => this.emit("sync-state", event))
|
|
65
|
+
docSynchronizer.on("metrics", event => this.emit("metrics", event))
|
|
61
66
|
return docSynchronizer
|
|
62
67
|
}
|
|
63
68
|
|
|
@@ -90,15 +95,31 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
90
95
|
throw new Error("received a message with an invalid documentId")
|
|
91
96
|
}
|
|
92
97
|
|
|
98
|
+
if (this.#denylist.includes(documentId)) {
|
|
99
|
+
this.emit("metrics", {
|
|
100
|
+
type: "doc-denied",
|
|
101
|
+
documentId,
|
|
102
|
+
})
|
|
103
|
+
this.emit("message", {
|
|
104
|
+
type: "doc-unavailable",
|
|
105
|
+
documentId,
|
|
106
|
+
targetId: message.senderId,
|
|
107
|
+
})
|
|
108
|
+
return
|
|
109
|
+
}
|
|
110
|
+
|
|
93
111
|
this.#docSetUp[documentId] = true
|
|
94
112
|
|
|
95
|
-
const
|
|
113
|
+
const handle = await this.repo.find(documentId, {
|
|
114
|
+
allowableStates: ["ready", "unavailable", "requesting"],
|
|
115
|
+
})
|
|
116
|
+
const docSynchronizer = this.#fetchDocSynchronizer(handle)
|
|
96
117
|
|
|
97
118
|
docSynchronizer.receiveMessage(message)
|
|
98
119
|
|
|
99
120
|
// Initiate sync with any new peers
|
|
100
121
|
const peers = await this.#documentGenerousPeers(documentId)
|
|
101
|
-
docSynchronizer.beginSync(
|
|
122
|
+
void docSynchronizer.beginSync(
|
|
102
123
|
peers.filter(peerId => !docSynchronizer.hasPeer(peerId))
|
|
103
124
|
)
|
|
104
125
|
}
|
|
@@ -106,14 +127,14 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
106
127
|
/**
|
|
107
128
|
* Starts synchronizing the given document with all peers that we share it generously with.
|
|
108
129
|
*/
|
|
109
|
-
addDocument(
|
|
130
|
+
addDocument(handle: DocHandle<unknown>) {
|
|
110
131
|
// HACK: this is a hack to prevent us from adding the same document twice
|
|
111
|
-
if (this.#docSetUp[documentId]) {
|
|
132
|
+
if (this.#docSetUp[handle.documentId]) {
|
|
112
133
|
return
|
|
113
134
|
}
|
|
114
|
-
const docSynchronizer = this.#fetchDocSynchronizer(
|
|
115
|
-
void this.#documentGenerousPeers(documentId).then(peers => {
|
|
116
|
-
docSynchronizer.beginSync(peers)
|
|
135
|
+
const docSynchronizer = this.#fetchDocSynchronizer(handle)
|
|
136
|
+
void this.#documentGenerousPeers(handle.documentId).then(peers => {
|
|
137
|
+
void docSynchronizer.beginSync(peers)
|
|
117
138
|
})
|
|
118
139
|
}
|
|
119
140
|
|
|
@@ -135,7 +156,7 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
135
156
|
for (const docSynchronizer of Object.values(this.docSynchronizers)) {
|
|
136
157
|
const { documentId } = docSynchronizer
|
|
137
158
|
void this.repo.sharePolicy(peerId, documentId).then(okToShare => {
|
|
138
|
-
if (okToShare) docSynchronizer.beginSync([peerId])
|
|
159
|
+
if (okToShare) void docSynchronizer.beginSync([peerId])
|
|
139
160
|
})
|
|
140
161
|
}
|
|
141
162
|
}
|