@automerge/automerge-repo 2.0.0-collectionsync-alpha.1 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -8
- package/dist/AutomergeUrl.d.ts +17 -5
- package/dist/AutomergeUrl.d.ts.map +1 -1
- package/dist/AutomergeUrl.js +71 -24
- package/dist/DocHandle.d.ts +33 -41
- package/dist/DocHandle.d.ts.map +1 -1
- package/dist/DocHandle.js +105 -66
- package/dist/FindProgress.d.ts +30 -0
- package/dist/FindProgress.d.ts.map +1 -0
- package/dist/FindProgress.js +1 -0
- package/dist/RemoteHeadsSubscriptions.d.ts +4 -5
- package/dist/RemoteHeadsSubscriptions.d.ts.map +1 -1
- package/dist/RemoteHeadsSubscriptions.js +4 -1
- package/dist/Repo.d.ts +24 -5
- package/dist/Repo.d.ts.map +1 -1
- package/dist/Repo.js +355 -169
- package/dist/helpers/abortable.d.ts +36 -0
- package/dist/helpers/abortable.d.ts.map +1 -0
- package/dist/helpers/abortable.js +47 -0
- package/dist/helpers/arraysAreEqual.d.ts.map +1 -1
- package/dist/helpers/bufferFromHex.d.ts +3 -0
- package/dist/helpers/bufferFromHex.d.ts.map +1 -0
- package/dist/helpers/bufferFromHex.js +13 -0
- package/dist/helpers/debounce.d.ts.map +1 -1
- package/dist/helpers/eventPromise.d.ts.map +1 -1
- package/dist/helpers/headsAreSame.d.ts +2 -2
- package/dist/helpers/headsAreSame.d.ts.map +1 -1
- package/dist/helpers/mergeArrays.d.ts +1 -1
- package/dist/helpers/mergeArrays.d.ts.map +1 -1
- package/dist/helpers/pause.d.ts.map +1 -1
- package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
- package/dist/helpers/tests/network-adapter-tests.js +13 -13
- package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
- package/dist/helpers/tests/storage-adapter-tests.js +6 -9
- package/dist/helpers/throttle.d.ts.map +1 -1
- package/dist/helpers/withTimeout.d.ts.map +1 -1
- package/dist/index.d.ts +35 -7
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +37 -6
- package/dist/network/NetworkSubsystem.d.ts +0 -1
- package/dist/network/NetworkSubsystem.d.ts.map +1 -1
- package/dist/network/NetworkSubsystem.js +0 -3
- package/dist/network/messages.d.ts +1 -7
- package/dist/network/messages.d.ts.map +1 -1
- package/dist/network/messages.js +1 -2
- package/dist/storage/StorageAdapter.d.ts +0 -9
- package/dist/storage/StorageAdapter.d.ts.map +1 -1
- package/dist/storage/StorageAdapter.js +0 -33
- package/dist/storage/StorageSubsystem.d.ts +6 -2
- package/dist/storage/StorageSubsystem.d.ts.map +1 -1
- package/dist/storage/StorageSubsystem.js +131 -37
- package/dist/storage/keyHash.d.ts +1 -1
- package/dist/storage/keyHash.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.d.ts +3 -4
- package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.js +32 -26
- package/dist/synchronizer/DocSynchronizer.d.ts +8 -8
- package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/DocSynchronizer.js +205 -79
- package/dist/types.d.ts +4 -1
- package/dist/types.d.ts.map +1 -1
- package/fuzz/fuzz.ts +3 -3
- package/package.json +4 -5
- package/src/AutomergeUrl.ts +101 -26
- package/src/DocHandle.ts +158 -77
- package/src/FindProgress.ts +48 -0
- package/src/RemoteHeadsSubscriptions.ts +11 -9
- package/src/Repo.ts +465 -180
- package/src/helpers/abortable.ts +62 -0
- package/src/helpers/bufferFromHex.ts +14 -0
- package/src/helpers/headsAreSame.ts +2 -2
- package/src/helpers/tests/network-adapter-tests.ts +14 -13
- package/src/helpers/tests/storage-adapter-tests.ts +13 -24
- package/src/index.ts +57 -38
- package/src/network/NetworkSubsystem.ts +0 -4
- package/src/network/messages.ts +2 -11
- package/src/storage/StorageAdapter.ts +0 -42
- package/src/storage/StorageSubsystem.ts +155 -45
- package/src/storage/keyHash.ts +1 -1
- package/src/synchronizer/CollectionSynchronizer.ts +42 -29
- package/src/synchronizer/DocSynchronizer.ts +263 -89
- package/src/types.ts +4 -1
- package/test/AutomergeUrl.test.ts +130 -0
- package/test/CollectionSynchronizer.test.ts +6 -8
- package/test/DocHandle.test.ts +161 -77
- package/test/DocSynchronizer.test.ts +11 -9
- package/test/RemoteHeadsSubscriptions.test.ts +1 -1
- package/test/Repo.test.ts +406 -341
- package/test/StorageSubsystem.test.ts +95 -20
- package/test/remoteHeads.test.ts +28 -13
- package/dist/CollectionHandle.d.ts +0 -14
- package/dist/CollectionHandle.d.ts.map +0 -1
- package/dist/CollectionHandle.js +0 -37
- package/dist/DocUrl.d.ts +0 -47
- package/dist/DocUrl.d.ts.map +0 -1
- package/dist/DocUrl.js +0 -72
- package/dist/EphemeralData.d.ts +0 -20
- package/dist/EphemeralData.d.ts.map +0 -1
- package/dist/EphemeralData.js +0 -1
- package/dist/ferigan.d.ts +0 -51
- package/dist/ferigan.d.ts.map +0 -1
- package/dist/ferigan.js +0 -98
- package/dist/src/DocHandle.d.ts +0 -182
- package/dist/src/DocHandle.d.ts.map +0 -1
- package/dist/src/DocHandle.js +0 -405
- package/dist/src/DocUrl.d.ts +0 -49
- package/dist/src/DocUrl.d.ts.map +0 -1
- package/dist/src/DocUrl.js +0 -72
- package/dist/src/EphemeralData.d.ts +0 -19
- package/dist/src/EphemeralData.d.ts.map +0 -1
- package/dist/src/EphemeralData.js +0 -1
- package/dist/src/Repo.d.ts +0 -74
- package/dist/src/Repo.d.ts.map +0 -1
- package/dist/src/Repo.js +0 -208
- package/dist/src/helpers/arraysAreEqual.d.ts +0 -2
- package/dist/src/helpers/arraysAreEqual.d.ts.map +0 -1
- package/dist/src/helpers/arraysAreEqual.js +0 -2
- package/dist/src/helpers/cbor.d.ts +0 -4
- package/dist/src/helpers/cbor.d.ts.map +0 -1
- package/dist/src/helpers/cbor.js +0 -8
- package/dist/src/helpers/eventPromise.d.ts +0 -11
- package/dist/src/helpers/eventPromise.d.ts.map +0 -1
- package/dist/src/helpers/eventPromise.js +0 -7
- package/dist/src/helpers/headsAreSame.d.ts +0 -2
- package/dist/src/helpers/headsAreSame.d.ts.map +0 -1
- package/dist/src/helpers/headsAreSame.js +0 -4
- package/dist/src/helpers/mergeArrays.d.ts +0 -2
- package/dist/src/helpers/mergeArrays.d.ts.map +0 -1
- package/dist/src/helpers/mergeArrays.js +0 -15
- package/dist/src/helpers/pause.d.ts +0 -6
- package/dist/src/helpers/pause.d.ts.map +0 -1
- package/dist/src/helpers/pause.js +0 -10
- package/dist/src/helpers/tests/network-adapter-tests.d.ts +0 -21
- package/dist/src/helpers/tests/network-adapter-tests.d.ts.map +0 -1
- package/dist/src/helpers/tests/network-adapter-tests.js +0 -122
- package/dist/src/helpers/withTimeout.d.ts +0 -12
- package/dist/src/helpers/withTimeout.d.ts.map +0 -1
- package/dist/src/helpers/withTimeout.js +0 -24
- package/dist/src/index.d.ts +0 -53
- package/dist/src/index.d.ts.map +0 -1
- package/dist/src/index.js +0 -40
- package/dist/src/network/NetworkAdapter.d.ts +0 -26
- package/dist/src/network/NetworkAdapter.d.ts.map +0 -1
- package/dist/src/network/NetworkAdapter.js +0 -4
- package/dist/src/network/NetworkSubsystem.d.ts +0 -23
- package/dist/src/network/NetworkSubsystem.d.ts.map +0 -1
- package/dist/src/network/NetworkSubsystem.js +0 -120
- package/dist/src/network/messages.d.ts +0 -85
- package/dist/src/network/messages.d.ts.map +0 -1
- package/dist/src/network/messages.js +0 -23
- package/dist/src/storage/StorageAdapter.d.ts +0 -14
- package/dist/src/storage/StorageAdapter.d.ts.map +0 -1
- package/dist/src/storage/StorageAdapter.js +0 -1
- package/dist/src/storage/StorageSubsystem.d.ts +0 -12
- package/dist/src/storage/StorageSubsystem.d.ts.map +0 -1
- package/dist/src/storage/StorageSubsystem.js +0 -145
- package/dist/src/synchronizer/CollectionSynchronizer.d.ts +0 -25
- package/dist/src/synchronizer/CollectionSynchronizer.d.ts.map +0 -1
- package/dist/src/synchronizer/CollectionSynchronizer.js +0 -106
- package/dist/src/synchronizer/DocSynchronizer.d.ts +0 -29
- package/dist/src/synchronizer/DocSynchronizer.d.ts.map +0 -1
- package/dist/src/synchronizer/DocSynchronizer.js +0 -263
- package/dist/src/synchronizer/Synchronizer.d.ts +0 -9
- package/dist/src/synchronizer/Synchronizer.d.ts.map +0 -1
- package/dist/src/synchronizer/Synchronizer.js +0 -2
- package/dist/src/types.d.ts +0 -16
- package/dist/src/types.d.ts.map +0 -1
- package/dist/src/types.js +0 -1
- package/dist/test/CollectionSynchronizer.test.d.ts +0 -2
- package/dist/test/CollectionSynchronizer.test.d.ts.map +0 -1
- package/dist/test/CollectionSynchronizer.test.js +0 -57
- package/dist/test/DocHandle.test.d.ts +0 -2
- package/dist/test/DocHandle.test.d.ts.map +0 -1
- package/dist/test/DocHandle.test.js +0 -238
- package/dist/test/DocSynchronizer.test.d.ts +0 -2
- package/dist/test/DocSynchronizer.test.d.ts.map +0 -1
- package/dist/test/DocSynchronizer.test.js +0 -111
- package/dist/test/Network.test.d.ts +0 -2
- package/dist/test/Network.test.d.ts.map +0 -1
- package/dist/test/Network.test.js +0 -11
- package/dist/test/Repo.test.d.ts +0 -2
- package/dist/test/Repo.test.d.ts.map +0 -1
- package/dist/test/Repo.test.js +0 -568
- package/dist/test/StorageSubsystem.test.d.ts +0 -2
- package/dist/test/StorageSubsystem.test.d.ts.map +0 -1
- package/dist/test/StorageSubsystem.test.js +0 -56
- package/dist/test/helpers/DummyNetworkAdapter.d.ts +0 -9
- package/dist/test/helpers/DummyNetworkAdapter.d.ts.map +0 -1
- package/dist/test/helpers/DummyNetworkAdapter.js +0 -15
- package/dist/test/helpers/DummyStorageAdapter.d.ts +0 -16
- package/dist/test/helpers/DummyStorageAdapter.d.ts.map +0 -1
- package/dist/test/helpers/DummyStorageAdapter.js +0 -33
- package/dist/test/helpers/generate-large-object.d.ts +0 -5
- package/dist/test/helpers/generate-large-object.d.ts.map +0 -1
- package/dist/test/helpers/generate-large-object.js +0 -9
- package/dist/test/helpers/getRandomItem.d.ts +0 -2
- package/dist/test/helpers/getRandomItem.d.ts.map +0 -1
- package/dist/test/helpers/getRandomItem.js +0 -4
- package/dist/test/types.d.ts +0 -4
- package/dist/test/types.d.ts.map +0 -1
- package/dist/test/types.js +0 -1
- package/src/CollectionHandle.ts +0 -54
- package/src/ferigan.ts +0 -184
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { next as A } from "@automerge/automerge/slim"
|
|
2
2
|
import debug from "debug"
|
|
3
3
|
import { headsAreSame } from "../helpers/headsAreSame.js"
|
|
4
4
|
import { mergeArrays } from "../helpers/mergeArrays.js"
|
|
@@ -6,9 +6,9 @@ import { type DocumentId } from "../types.js"
|
|
|
6
6
|
import { StorageAdapterInterface } from "./StorageAdapterInterface.js"
|
|
7
7
|
import { ChunkInfo, StorageKey, StorageId } from "./types.js"
|
|
8
8
|
import { keyHash, headsHash } from "./keyHash.js"
|
|
9
|
-
import { chunkTypeFromKey } from "./chunkTypeFromKey.js"
|
|
10
9
|
import * as Uuid from "uuid"
|
|
11
10
|
import { EventEmitter } from "eventemitter3"
|
|
11
|
+
import { encodeHeads } from "../AutomergeUrl.js"
|
|
12
12
|
|
|
13
13
|
type StorageSubsystemEvents = {
|
|
14
14
|
"document-loaded": (arg: {
|
|
@@ -30,17 +30,17 @@ export class StorageSubsystem extends EventEmitter<StorageSubsystemEvents> {
|
|
|
30
30
|
/** Record of the latest heads we've loaded or saved for each document */
|
|
31
31
|
#storedHeads: Map<DocumentId, A.Heads> = new Map()
|
|
32
32
|
|
|
33
|
-
|
|
33
|
+
/** Metadata on the chunks we've already loaded for each document */
|
|
34
|
+
#chunkInfos: Map<DocumentId, ChunkInfo[]> = new Map()
|
|
35
|
+
|
|
36
|
+
/** Flag to avoid compacting when a compaction is already underway */
|
|
37
|
+
#compacting = false
|
|
34
38
|
|
|
35
|
-
#
|
|
39
|
+
#log = debug(`automerge-repo:storage-subsystem`)
|
|
36
40
|
|
|
37
|
-
constructor(
|
|
38
|
-
beelay: A.beelay.Beelay,
|
|
39
|
-
storageAdapter: StorageAdapterInterface
|
|
40
|
-
) {
|
|
41
|
+
constructor(storageAdapter: StorageAdapterInterface) {
|
|
41
42
|
super()
|
|
42
43
|
this.#storageAdapter = storageAdapter
|
|
43
|
-
this.#beelay = beelay
|
|
44
44
|
}
|
|
45
45
|
|
|
46
46
|
async id(): Promise<StorageId> {
|
|
@@ -112,16 +112,63 @@ export class StorageSubsystem extends EventEmitter<StorageSubsystemEvents> {
|
|
|
112
112
|
// AUTOMERGE DOCUMENT STORAGE
|
|
113
113
|
|
|
114
114
|
/**
|
|
115
|
-
* Loads
|
|
115
|
+
* Loads and combines document chunks from storage, with snapshots first.
|
|
116
116
|
*/
|
|
117
|
-
async
|
|
118
|
-
|
|
119
|
-
|
|
117
|
+
async loadDocData(documentId: DocumentId): Promise<Uint8Array | null> {
|
|
118
|
+
// Load snapshots first
|
|
119
|
+
const snapshotChunks = await this.#storageAdapter.loadRange([
|
|
120
|
+
documentId,
|
|
121
|
+
"snapshot",
|
|
122
|
+
])
|
|
123
|
+
const incrementalChunks = await this.#storageAdapter.loadRange([
|
|
124
|
+
documentId,
|
|
125
|
+
"incremental",
|
|
126
|
+
])
|
|
127
|
+
|
|
128
|
+
const binaries: Uint8Array[] = []
|
|
129
|
+
const chunkInfos: ChunkInfo[] = []
|
|
130
|
+
|
|
131
|
+
// Process snapshots first
|
|
132
|
+
for (const chunk of snapshotChunks) {
|
|
133
|
+
if (chunk.data === undefined) continue
|
|
134
|
+
chunkInfos.push({
|
|
135
|
+
key: chunk.key,
|
|
136
|
+
type: "snapshot",
|
|
137
|
+
size: chunk.data.length,
|
|
138
|
+
})
|
|
139
|
+
binaries.push(chunk.data)
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// Then process incrementals
|
|
143
|
+
for (const chunk of incrementalChunks) {
|
|
144
|
+
if (chunk.data === undefined) continue
|
|
145
|
+
chunkInfos.push({
|
|
146
|
+
key: chunk.key,
|
|
147
|
+
type: "incremental",
|
|
148
|
+
size: chunk.data.length,
|
|
149
|
+
})
|
|
150
|
+
binaries.push(chunk.data)
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// Store chunk infos for future reference
|
|
154
|
+
this.#chunkInfos.set(documentId, chunkInfos)
|
|
155
|
+
|
|
156
|
+
// If no chunks were found, return null
|
|
157
|
+
if (binaries.length === 0) {
|
|
120
158
|
return null
|
|
121
159
|
}
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
160
|
+
|
|
161
|
+
// Merge the chunks into a single binary
|
|
162
|
+
return mergeArrays(binaries)
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
/**
|
|
166
|
+
* Loads the Automerge document with the given ID from storage.
|
|
167
|
+
*/
|
|
168
|
+
async loadDoc<T>(documentId: DocumentId): Promise<A.Doc<T> | null> {
|
|
169
|
+
// Load and combine chunks
|
|
170
|
+
const binary = await this.loadDocData(documentId)
|
|
171
|
+
if (!binary) return null
|
|
125
172
|
|
|
126
173
|
// Load into an Automerge document
|
|
127
174
|
const start = performance.now()
|
|
@@ -135,6 +182,7 @@ export class StorageSubsystem extends EventEmitter<StorageSubsystemEvents> {
|
|
|
135
182
|
|
|
136
183
|
// Record the latest heads for the document
|
|
137
184
|
this.#storedHeads.set(documentId, A.getHeads(newDoc))
|
|
185
|
+
|
|
138
186
|
return newDoc
|
|
139
187
|
}
|
|
140
188
|
|
|
@@ -149,36 +197,15 @@ export class StorageSubsystem extends EventEmitter<StorageSubsystemEvents> {
|
|
|
149
197
|
// Don't bother saving if the document hasn't changed
|
|
150
198
|
if (!this.#shouldSave(documentId, doc)) return
|
|
151
199
|
|
|
152
|
-
const
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
200
|
+
const sourceChunks = this.#chunkInfos.get(documentId) ?? []
|
|
201
|
+
|
|
202
|
+
if (this.#shouldCompact(sourceChunks)) {
|
|
203
|
+
await this.#saveTotal(documentId, doc, sourceChunks)
|
|
204
|
+
} else {
|
|
205
|
+
await this.#saveIncremental(documentId, doc)
|
|
206
|
+
}
|
|
156
207
|
|
|
157
|
-
const commits = changes.map(c => {
|
|
158
|
-
const decoded = A.decodeChange(c)
|
|
159
|
-
return {
|
|
160
|
-
parents: decoded.deps,
|
|
161
|
-
hash: decoded.hash,
|
|
162
|
-
contents: c,
|
|
163
|
-
}
|
|
164
|
-
})
|
|
165
|
-
let done = this.#beelay
|
|
166
|
-
.addCommits({
|
|
167
|
-
docId: documentId,
|
|
168
|
-
commits: changes.map(c => {
|
|
169
|
-
const decoded = A.decodeChange(c)
|
|
170
|
-
return {
|
|
171
|
-
parents: decoded.deps,
|
|
172
|
-
hash: decoded.hash,
|
|
173
|
-
contents: c,
|
|
174
|
-
}
|
|
175
|
-
}),
|
|
176
|
-
})
|
|
177
|
-
.catch(e => {
|
|
178
|
-
console.error(`Error saving document ${documentId}: ${e}`)
|
|
179
|
-
})
|
|
180
208
|
this.#storedHeads.set(documentId, A.getHeads(doc))
|
|
181
|
-
await done
|
|
182
209
|
}
|
|
183
210
|
|
|
184
211
|
/**
|
|
@@ -190,6 +217,66 @@ export class StorageSubsystem extends EventEmitter<StorageSubsystemEvents> {
|
|
|
190
217
|
await this.#storageAdapter.removeRange([documentId, "sync-state"])
|
|
191
218
|
}
|
|
192
219
|
|
|
220
|
+
/**
|
|
221
|
+
* Saves just the incremental changes since the last save.
|
|
222
|
+
*/
|
|
223
|
+
async #saveIncremental(
|
|
224
|
+
documentId: DocumentId,
|
|
225
|
+
doc: A.Doc<unknown>
|
|
226
|
+
): Promise<void> {
|
|
227
|
+
const binary = A.saveSince(doc, this.#storedHeads.get(documentId) ?? [])
|
|
228
|
+
if (binary && binary.length > 0) {
|
|
229
|
+
const key = [documentId, "incremental", keyHash(binary)]
|
|
230
|
+
this.#log(`Saving incremental ${key} for document ${documentId}`)
|
|
231
|
+
await this.#storageAdapter.save(key, binary)
|
|
232
|
+
if (!this.#chunkInfos.has(documentId)) {
|
|
233
|
+
this.#chunkInfos.set(documentId, [])
|
|
234
|
+
}
|
|
235
|
+
this.#chunkInfos.get(documentId)!.push({
|
|
236
|
+
key,
|
|
237
|
+
type: "incremental",
|
|
238
|
+
size: binary.length,
|
|
239
|
+
})
|
|
240
|
+
this.#storedHeads.set(documentId, A.getHeads(doc))
|
|
241
|
+
} else {
|
|
242
|
+
return Promise.resolve()
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
/**
|
|
247
|
+
* Compacts the document storage into a single shapshot.
|
|
248
|
+
*/
|
|
249
|
+
async #saveTotal(
|
|
250
|
+
documentId: DocumentId,
|
|
251
|
+
doc: A.Doc<unknown>,
|
|
252
|
+
sourceChunks: ChunkInfo[]
|
|
253
|
+
): Promise<void> {
|
|
254
|
+
this.#compacting = true
|
|
255
|
+
|
|
256
|
+
const binary = A.save(doc)
|
|
257
|
+
const snapshotHash = headsHash(A.getHeads(doc))
|
|
258
|
+
const key = [documentId, "snapshot", snapshotHash]
|
|
259
|
+
const oldKeys = new Set(
|
|
260
|
+
sourceChunks.map(c => c.key).filter(k => k[2] !== snapshotHash)
|
|
261
|
+
)
|
|
262
|
+
|
|
263
|
+
this.#log(`Saving snapshot ${key} for document ${documentId}`)
|
|
264
|
+
this.#log(`deleting old chunks ${Array.from(oldKeys)}`)
|
|
265
|
+
|
|
266
|
+
await this.#storageAdapter.save(key, binary)
|
|
267
|
+
|
|
268
|
+
for (const key of oldKeys) {
|
|
269
|
+
await this.#storageAdapter.remove(key)
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
const newChunkInfos =
|
|
273
|
+
this.#chunkInfos.get(documentId)?.filter(c => !oldKeys.has(c.key)) ?? []
|
|
274
|
+
newChunkInfos.push({ key, type: "snapshot", size: binary.length })
|
|
275
|
+
|
|
276
|
+
this.#chunkInfos.set(documentId, newChunkInfos)
|
|
277
|
+
this.#compacting = false
|
|
278
|
+
}
|
|
279
|
+
|
|
193
280
|
async loadSyncState(
|
|
194
281
|
documentId: DocumentId,
|
|
195
282
|
storageId: StorageId
|
|
@@ -224,11 +311,34 @@ export class StorageSubsystem extends EventEmitter<StorageSubsystemEvents> {
|
|
|
224
311
|
}
|
|
225
312
|
|
|
226
313
|
const newHeads = A.getHeads(doc)
|
|
227
|
-
if (headsAreSame(newHeads, oldHeads)) {
|
|
314
|
+
if (headsAreSame(encodeHeads(newHeads), encodeHeads(oldHeads))) {
|
|
228
315
|
// the document hasn't changed
|
|
229
316
|
return false
|
|
230
317
|
}
|
|
231
318
|
|
|
232
319
|
return true // the document has changed
|
|
233
320
|
}
|
|
321
|
+
|
|
322
|
+
/**
|
|
323
|
+
* We only compact if the incremental size is greater than the snapshot size.
|
|
324
|
+
*/
|
|
325
|
+
#shouldCompact(sourceChunks: ChunkInfo[]) {
|
|
326
|
+
if (this.#compacting) return false
|
|
327
|
+
|
|
328
|
+
let snapshotSize = 0
|
|
329
|
+
let incrementalSize = 0
|
|
330
|
+
for (const chunk of sourceChunks) {
|
|
331
|
+
if (chunk.type === "snapshot") {
|
|
332
|
+
snapshotSize += chunk.size
|
|
333
|
+
} else {
|
|
334
|
+
incrementalSize += chunk.size
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
// if the file is currently small, don't worry, just compact
|
|
338
|
+
// this might seem a bit arbitrary (1k is arbitrary) but is designed to ensure compaction
|
|
339
|
+
// for documents with only a single large change on top of an empty (or nearly empty) document
|
|
340
|
+
// for example: imported NPM modules, images, etc.
|
|
341
|
+
// if we have even more incrementals (so far) than the snapshot, compact
|
|
342
|
+
return snapshotSize < 1024 || incrementalSize >= snapshotSize
|
|
343
|
+
}
|
|
234
344
|
}
|
package/src/storage/keyHash.ts
CHANGED
|
@@ -1,12 +1,11 @@
|
|
|
1
1
|
import debug from "debug"
|
|
2
2
|
import { DocHandle } from "../DocHandle.js"
|
|
3
|
-
import { parseAutomergeUrl
|
|
3
|
+
import { parseAutomergeUrl } from "../AutomergeUrl.js"
|
|
4
4
|
import { Repo } from "../Repo.js"
|
|
5
5
|
import { DocMessage } from "../network/messages.js"
|
|
6
6
|
import { AutomergeUrl, DocumentId, PeerId } from "../types.js"
|
|
7
7
|
import { DocSynchronizer } from "./DocSynchronizer.js"
|
|
8
8
|
import { Synchronizer } from "./Synchronizer.js"
|
|
9
|
-
import { next as A } from "@automerge/automerge"
|
|
10
9
|
|
|
11
10
|
const log = debug("automerge-repo:collectionsync")
|
|
12
11
|
|
|
@@ -24,29 +23,41 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
24
23
|
|
|
25
24
|
#denylist: DocumentId[]
|
|
26
25
|
|
|
27
|
-
constructor(
|
|
28
|
-
private beelay: A.beelay.Beelay,
|
|
29
|
-
private repo: Repo,
|
|
30
|
-
denylist: AutomergeUrl[] = []
|
|
31
|
-
) {
|
|
26
|
+
constructor(private repo: Repo, denylist: AutomergeUrl[] = []) {
|
|
32
27
|
super()
|
|
33
28
|
this.#denylist = denylist.map(url => parseAutomergeUrl(url).documentId)
|
|
34
29
|
}
|
|
35
30
|
|
|
36
31
|
/** Returns a synchronizer for the given document, creating one if it doesn't already exist. */
|
|
37
|
-
#fetchDocSynchronizer(
|
|
38
|
-
if (!this.docSynchronizers[documentId]) {
|
|
39
|
-
|
|
40
|
-
|
|
32
|
+
#fetchDocSynchronizer(handle: DocHandle<unknown>) {
|
|
33
|
+
if (!this.docSynchronizers[handle.documentId]) {
|
|
34
|
+
this.docSynchronizers[handle.documentId] =
|
|
35
|
+
this.#initDocSynchronizer(handle)
|
|
41
36
|
}
|
|
42
|
-
return this.docSynchronizers[documentId]
|
|
37
|
+
return this.docSynchronizers[handle.documentId]
|
|
43
38
|
}
|
|
44
39
|
|
|
45
40
|
/** Creates a new docSynchronizer and sets it up to propagate messages */
|
|
46
41
|
#initDocSynchronizer(handle: DocHandle<unknown>): DocSynchronizer {
|
|
47
42
|
const docSynchronizer = new DocSynchronizer({
|
|
48
|
-
beelay: this.beelay,
|
|
49
43
|
handle,
|
|
44
|
+
peerId: this.repo.networkSubsystem.peerId,
|
|
45
|
+
onLoadSyncState: async peerId => {
|
|
46
|
+
if (!this.repo.storageSubsystem) {
|
|
47
|
+
return
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
const { storageId, isEphemeral } =
|
|
51
|
+
this.repo.peerMetadataByPeerId[peerId] || {}
|
|
52
|
+
if (!storageId || isEphemeral) {
|
|
53
|
+
return
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
return this.repo.storageSubsystem.loadSyncState(
|
|
57
|
+
handle.documentId,
|
|
58
|
+
storageId
|
|
59
|
+
)
|
|
60
|
+
},
|
|
50
61
|
})
|
|
51
62
|
docSynchronizer.on("message", event => this.emit("message", event))
|
|
52
63
|
docSynchronizer.on("open-doc", event => this.emit("open-doc", event))
|
|
@@ -99,13 +110,16 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
99
110
|
|
|
100
111
|
this.#docSetUp[documentId] = true
|
|
101
112
|
|
|
102
|
-
const
|
|
113
|
+
const handle = await this.repo.find(documentId, {
|
|
114
|
+
allowableStates: ["ready", "unavailable", "requesting"],
|
|
115
|
+
})
|
|
116
|
+
const docSynchronizer = this.#fetchDocSynchronizer(handle)
|
|
103
117
|
|
|
104
118
|
docSynchronizer.receiveMessage(message)
|
|
105
119
|
|
|
106
120
|
// Initiate sync with any new peers
|
|
107
121
|
const peers = await this.#documentGenerousPeers(documentId)
|
|
108
|
-
docSynchronizer.beginSync(
|
|
122
|
+
void docSynchronizer.beginSync(
|
|
109
123
|
peers.filter(peerId => !docSynchronizer.hasPeer(peerId))
|
|
110
124
|
)
|
|
111
125
|
}
|
|
@@ -113,14 +127,14 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
113
127
|
/**
|
|
114
128
|
* Starts synchronizing the given document with all peers that we share it generously with.
|
|
115
129
|
*/
|
|
116
|
-
addDocument(
|
|
130
|
+
addDocument(handle: DocHandle<unknown>) {
|
|
117
131
|
// HACK: this is a hack to prevent us from adding the same document twice
|
|
118
|
-
if (this.#docSetUp[documentId]) {
|
|
132
|
+
if (this.#docSetUp[handle.documentId]) {
|
|
119
133
|
return
|
|
120
134
|
}
|
|
121
|
-
const docSynchronizer = this.#fetchDocSynchronizer(
|
|
122
|
-
void this.#documentGenerousPeers(documentId).then(peers => {
|
|
123
|
-
docSynchronizer.beginSync(peers)
|
|
135
|
+
const docSynchronizer = this.#fetchDocSynchronizer(handle)
|
|
136
|
+
void this.#documentGenerousPeers(handle.documentId).then(peers => {
|
|
137
|
+
void docSynchronizer.beginSync(peers)
|
|
124
138
|
})
|
|
125
139
|
}
|
|
126
140
|
|
|
@@ -142,7 +156,7 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
142
156
|
for (const docSynchronizer of Object.values(this.docSynchronizers)) {
|
|
143
157
|
const { documentId } = docSynchronizer
|
|
144
158
|
void this.repo.sharePolicy(peerId, documentId).then(okToShare => {
|
|
145
|
-
if (okToShare) docSynchronizer.beginSync([peerId])
|
|
159
|
+
if (okToShare) void docSynchronizer.beginSync([peerId])
|
|
146
160
|
})
|
|
147
161
|
}
|
|
148
162
|
}
|
|
@@ -168,13 +182,12 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
168
182
|
size: { numOps: number; numChanges: number }
|
|
169
183
|
}
|
|
170
184
|
} {
|
|
171
|
-
return
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
// )
|
|
185
|
+
return Object.fromEntries(
|
|
186
|
+
Object.entries(this.docSynchronizers).map(
|
|
187
|
+
([documentId, synchronizer]) => {
|
|
188
|
+
return [documentId, synchronizer.metrics()]
|
|
189
|
+
}
|
|
190
|
+
)
|
|
191
|
+
)
|
|
179
192
|
}
|
|
180
193
|
}
|