@automerge/automerge-repo 2.0.0-collectionsync-alpha.1 → 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -8
- package/dist/AutomergeUrl.d.ts +17 -5
- package/dist/AutomergeUrl.d.ts.map +1 -1
- package/dist/AutomergeUrl.js +71 -24
- package/dist/DocHandle.d.ts +33 -41
- package/dist/DocHandle.d.ts.map +1 -1
- package/dist/DocHandle.js +105 -66
- package/dist/FindProgress.d.ts +30 -0
- package/dist/FindProgress.d.ts.map +1 -0
- package/dist/FindProgress.js +1 -0
- package/dist/RemoteHeadsSubscriptions.d.ts +4 -5
- package/dist/RemoteHeadsSubscriptions.d.ts.map +1 -1
- package/dist/RemoteHeadsSubscriptions.js +4 -1
- package/dist/Repo.d.ts +24 -5
- package/dist/Repo.d.ts.map +1 -1
- package/dist/Repo.js +355 -169
- package/dist/helpers/abortable.d.ts +36 -0
- package/dist/helpers/abortable.d.ts.map +1 -0
- package/dist/helpers/abortable.js +47 -0
- package/dist/helpers/arraysAreEqual.d.ts.map +1 -1
- package/dist/helpers/bufferFromHex.d.ts +3 -0
- package/dist/helpers/bufferFromHex.d.ts.map +1 -0
- package/dist/helpers/bufferFromHex.js +13 -0
- package/dist/helpers/debounce.d.ts.map +1 -1
- package/dist/helpers/eventPromise.d.ts.map +1 -1
- package/dist/helpers/headsAreSame.d.ts +2 -2
- package/dist/helpers/headsAreSame.d.ts.map +1 -1
- package/dist/helpers/mergeArrays.d.ts +1 -1
- package/dist/helpers/mergeArrays.d.ts.map +1 -1
- package/dist/helpers/pause.d.ts.map +1 -1
- package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
- package/dist/helpers/tests/network-adapter-tests.js +13 -13
- package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
- package/dist/helpers/tests/storage-adapter-tests.js +6 -9
- package/dist/helpers/throttle.d.ts.map +1 -1
- package/dist/helpers/withTimeout.d.ts.map +1 -1
- package/dist/index.d.ts +35 -7
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +37 -6
- package/dist/network/NetworkSubsystem.d.ts +0 -1
- package/dist/network/NetworkSubsystem.d.ts.map +1 -1
- package/dist/network/NetworkSubsystem.js +0 -3
- package/dist/network/messages.d.ts +1 -7
- package/dist/network/messages.d.ts.map +1 -1
- package/dist/network/messages.js +1 -2
- package/dist/storage/StorageAdapter.d.ts +0 -9
- package/dist/storage/StorageAdapter.d.ts.map +1 -1
- package/dist/storage/StorageAdapter.js +0 -33
- package/dist/storage/StorageSubsystem.d.ts +6 -2
- package/dist/storage/StorageSubsystem.d.ts.map +1 -1
- package/dist/storage/StorageSubsystem.js +131 -37
- package/dist/storage/keyHash.d.ts +1 -1
- package/dist/storage/keyHash.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.d.ts +3 -4
- package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.js +32 -26
- package/dist/synchronizer/DocSynchronizer.d.ts +8 -8
- package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/DocSynchronizer.js +205 -79
- package/dist/types.d.ts +4 -1
- package/dist/types.d.ts.map +1 -1
- package/fuzz/fuzz.ts +3 -3
- package/package.json +4 -5
- package/src/AutomergeUrl.ts +101 -26
- package/src/DocHandle.ts +158 -77
- package/src/FindProgress.ts +48 -0
- package/src/RemoteHeadsSubscriptions.ts +11 -9
- package/src/Repo.ts +465 -180
- package/src/helpers/abortable.ts +62 -0
- package/src/helpers/bufferFromHex.ts +14 -0
- package/src/helpers/headsAreSame.ts +2 -2
- package/src/helpers/tests/network-adapter-tests.ts +14 -13
- package/src/helpers/tests/storage-adapter-tests.ts +13 -24
- package/src/index.ts +57 -38
- package/src/network/NetworkSubsystem.ts +0 -4
- package/src/network/messages.ts +2 -11
- package/src/storage/StorageAdapter.ts +0 -42
- package/src/storage/StorageSubsystem.ts +155 -45
- package/src/storage/keyHash.ts +1 -1
- package/src/synchronizer/CollectionSynchronizer.ts +42 -29
- package/src/synchronizer/DocSynchronizer.ts +263 -89
- package/src/types.ts +4 -1
- package/test/AutomergeUrl.test.ts +130 -0
- package/test/CollectionSynchronizer.test.ts +6 -8
- package/test/DocHandle.test.ts +161 -77
- package/test/DocSynchronizer.test.ts +11 -9
- package/test/RemoteHeadsSubscriptions.test.ts +1 -1
- package/test/Repo.test.ts +406 -341
- package/test/StorageSubsystem.test.ts +95 -20
- package/test/remoteHeads.test.ts +28 -13
- package/dist/CollectionHandle.d.ts +0 -14
- package/dist/CollectionHandle.d.ts.map +0 -1
- package/dist/CollectionHandle.js +0 -37
- package/dist/DocUrl.d.ts +0 -47
- package/dist/DocUrl.d.ts.map +0 -1
- package/dist/DocUrl.js +0 -72
- package/dist/EphemeralData.d.ts +0 -20
- package/dist/EphemeralData.d.ts.map +0 -1
- package/dist/EphemeralData.js +0 -1
- package/dist/ferigan.d.ts +0 -51
- package/dist/ferigan.d.ts.map +0 -1
- package/dist/ferigan.js +0 -98
- package/dist/src/DocHandle.d.ts +0 -182
- package/dist/src/DocHandle.d.ts.map +0 -1
- package/dist/src/DocHandle.js +0 -405
- package/dist/src/DocUrl.d.ts +0 -49
- package/dist/src/DocUrl.d.ts.map +0 -1
- package/dist/src/DocUrl.js +0 -72
- package/dist/src/EphemeralData.d.ts +0 -19
- package/dist/src/EphemeralData.d.ts.map +0 -1
- package/dist/src/EphemeralData.js +0 -1
- package/dist/src/Repo.d.ts +0 -74
- package/dist/src/Repo.d.ts.map +0 -1
- package/dist/src/Repo.js +0 -208
- package/dist/src/helpers/arraysAreEqual.d.ts +0 -2
- package/dist/src/helpers/arraysAreEqual.d.ts.map +0 -1
- package/dist/src/helpers/arraysAreEqual.js +0 -2
- package/dist/src/helpers/cbor.d.ts +0 -4
- package/dist/src/helpers/cbor.d.ts.map +0 -1
- package/dist/src/helpers/cbor.js +0 -8
- package/dist/src/helpers/eventPromise.d.ts +0 -11
- package/dist/src/helpers/eventPromise.d.ts.map +0 -1
- package/dist/src/helpers/eventPromise.js +0 -7
- package/dist/src/helpers/headsAreSame.d.ts +0 -2
- package/dist/src/helpers/headsAreSame.d.ts.map +0 -1
- package/dist/src/helpers/headsAreSame.js +0 -4
- package/dist/src/helpers/mergeArrays.d.ts +0 -2
- package/dist/src/helpers/mergeArrays.d.ts.map +0 -1
- package/dist/src/helpers/mergeArrays.js +0 -15
- package/dist/src/helpers/pause.d.ts +0 -6
- package/dist/src/helpers/pause.d.ts.map +0 -1
- package/dist/src/helpers/pause.js +0 -10
- package/dist/src/helpers/tests/network-adapter-tests.d.ts +0 -21
- package/dist/src/helpers/tests/network-adapter-tests.d.ts.map +0 -1
- package/dist/src/helpers/tests/network-adapter-tests.js +0 -122
- package/dist/src/helpers/withTimeout.d.ts +0 -12
- package/dist/src/helpers/withTimeout.d.ts.map +0 -1
- package/dist/src/helpers/withTimeout.js +0 -24
- package/dist/src/index.d.ts +0 -53
- package/dist/src/index.d.ts.map +0 -1
- package/dist/src/index.js +0 -40
- package/dist/src/network/NetworkAdapter.d.ts +0 -26
- package/dist/src/network/NetworkAdapter.d.ts.map +0 -1
- package/dist/src/network/NetworkAdapter.js +0 -4
- package/dist/src/network/NetworkSubsystem.d.ts +0 -23
- package/dist/src/network/NetworkSubsystem.d.ts.map +0 -1
- package/dist/src/network/NetworkSubsystem.js +0 -120
- package/dist/src/network/messages.d.ts +0 -85
- package/dist/src/network/messages.d.ts.map +0 -1
- package/dist/src/network/messages.js +0 -23
- package/dist/src/storage/StorageAdapter.d.ts +0 -14
- package/dist/src/storage/StorageAdapter.d.ts.map +0 -1
- package/dist/src/storage/StorageAdapter.js +0 -1
- package/dist/src/storage/StorageSubsystem.d.ts +0 -12
- package/dist/src/storage/StorageSubsystem.d.ts.map +0 -1
- package/dist/src/storage/StorageSubsystem.js +0 -145
- package/dist/src/synchronizer/CollectionSynchronizer.d.ts +0 -25
- package/dist/src/synchronizer/CollectionSynchronizer.d.ts.map +0 -1
- package/dist/src/synchronizer/CollectionSynchronizer.js +0 -106
- package/dist/src/synchronizer/DocSynchronizer.d.ts +0 -29
- package/dist/src/synchronizer/DocSynchronizer.d.ts.map +0 -1
- package/dist/src/synchronizer/DocSynchronizer.js +0 -263
- package/dist/src/synchronizer/Synchronizer.d.ts +0 -9
- package/dist/src/synchronizer/Synchronizer.d.ts.map +0 -1
- package/dist/src/synchronizer/Synchronizer.js +0 -2
- package/dist/src/types.d.ts +0 -16
- package/dist/src/types.d.ts.map +0 -1
- package/dist/src/types.js +0 -1
- package/dist/test/CollectionSynchronizer.test.d.ts +0 -2
- package/dist/test/CollectionSynchronizer.test.d.ts.map +0 -1
- package/dist/test/CollectionSynchronizer.test.js +0 -57
- package/dist/test/DocHandle.test.d.ts +0 -2
- package/dist/test/DocHandle.test.d.ts.map +0 -1
- package/dist/test/DocHandle.test.js +0 -238
- package/dist/test/DocSynchronizer.test.d.ts +0 -2
- package/dist/test/DocSynchronizer.test.d.ts.map +0 -1
- package/dist/test/DocSynchronizer.test.js +0 -111
- package/dist/test/Network.test.d.ts +0 -2
- package/dist/test/Network.test.d.ts.map +0 -1
- package/dist/test/Network.test.js +0 -11
- package/dist/test/Repo.test.d.ts +0 -2
- package/dist/test/Repo.test.d.ts.map +0 -1
- package/dist/test/Repo.test.js +0 -568
- package/dist/test/StorageSubsystem.test.d.ts +0 -2
- package/dist/test/StorageSubsystem.test.d.ts.map +0 -1
- package/dist/test/StorageSubsystem.test.js +0 -56
- package/dist/test/helpers/DummyNetworkAdapter.d.ts +0 -9
- package/dist/test/helpers/DummyNetworkAdapter.d.ts.map +0 -1
- package/dist/test/helpers/DummyNetworkAdapter.js +0 -15
- package/dist/test/helpers/DummyStorageAdapter.d.ts +0 -16
- package/dist/test/helpers/DummyStorageAdapter.d.ts.map +0 -1
- package/dist/test/helpers/DummyStorageAdapter.js +0 -33
- package/dist/test/helpers/generate-large-object.d.ts +0 -5
- package/dist/test/helpers/generate-large-object.d.ts.map +0 -1
- package/dist/test/helpers/generate-large-object.js +0 -9
- package/dist/test/helpers/getRandomItem.d.ts +0 -2
- package/dist/test/helpers/getRandomItem.d.ts.map +0 -1
- package/dist/test/helpers/getRandomItem.js +0 -4
- package/dist/test/types.d.ts +0 -4
- package/dist/test/types.d.ts.map +0 -1
- package/dist/test/types.js +0 -1
- package/src/CollectionHandle.ts +0 -54
- package/src/ferigan.ts +0 -184
|
@@ -1,9 +1,11 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { next as A } from "@automerge/automerge/slim";
|
|
2
2
|
import debug from "debug";
|
|
3
3
|
import { headsAreSame } from "../helpers/headsAreSame.js";
|
|
4
4
|
import { mergeArrays } from "../helpers/mergeArrays.js";
|
|
5
|
+
import { keyHash, headsHash } from "./keyHash.js";
|
|
5
6
|
import * as Uuid from "uuid";
|
|
6
7
|
import { EventEmitter } from "eventemitter3";
|
|
8
|
+
import { encodeHeads } from "../AutomergeUrl.js";
|
|
7
9
|
/**
|
|
8
10
|
* The storage subsystem is responsible for saving and loading Automerge documents to and from
|
|
9
11
|
* storage adapter. It also provides a generic key/value storage interface for other uses.
|
|
@@ -13,12 +15,14 @@ export class StorageSubsystem extends EventEmitter {
|
|
|
13
15
|
#storageAdapter;
|
|
14
16
|
/** Record of the latest heads we've loaded or saved for each document */
|
|
15
17
|
#storedHeads = new Map();
|
|
18
|
+
/** Metadata on the chunks we've already loaded for each document */
|
|
19
|
+
#chunkInfos = new Map();
|
|
20
|
+
/** Flag to avoid compacting when a compaction is already underway */
|
|
21
|
+
#compacting = false;
|
|
16
22
|
#log = debug(`automerge-repo:storage-subsystem`);
|
|
17
|
-
|
|
18
|
-
constructor(beelay, storageAdapter) {
|
|
23
|
+
constructor(storageAdapter) {
|
|
19
24
|
super();
|
|
20
25
|
this.#storageAdapter = storageAdapter;
|
|
21
|
-
this.#beelay = beelay;
|
|
22
26
|
}
|
|
23
27
|
async id() {
|
|
24
28
|
const storedId = await this.#storageAdapter.load(["storage-adapter-id"]);
|
|
@@ -71,16 +75,58 @@ export class StorageSubsystem extends EventEmitter {
|
|
|
71
75
|
}
|
|
72
76
|
// AUTOMERGE DOCUMENT STORAGE
|
|
73
77
|
/**
|
|
74
|
-
* Loads
|
|
78
|
+
* Loads and combines document chunks from storage, with snapshots first.
|
|
75
79
|
*/
|
|
76
|
-
async
|
|
77
|
-
|
|
78
|
-
|
|
80
|
+
async loadDocData(documentId) {
|
|
81
|
+
// Load snapshots first
|
|
82
|
+
const snapshotChunks = await this.#storageAdapter.loadRange([
|
|
83
|
+
documentId,
|
|
84
|
+
"snapshot",
|
|
85
|
+
]);
|
|
86
|
+
const incrementalChunks = await this.#storageAdapter.loadRange([
|
|
87
|
+
documentId,
|
|
88
|
+
"incremental",
|
|
89
|
+
]);
|
|
90
|
+
const binaries = [];
|
|
91
|
+
const chunkInfos = [];
|
|
92
|
+
// Process snapshots first
|
|
93
|
+
for (const chunk of snapshotChunks) {
|
|
94
|
+
if (chunk.data === undefined)
|
|
95
|
+
continue;
|
|
96
|
+
chunkInfos.push({
|
|
97
|
+
key: chunk.key,
|
|
98
|
+
type: "snapshot",
|
|
99
|
+
size: chunk.data.length,
|
|
100
|
+
});
|
|
101
|
+
binaries.push(chunk.data);
|
|
102
|
+
}
|
|
103
|
+
// Then process incrementals
|
|
104
|
+
for (const chunk of incrementalChunks) {
|
|
105
|
+
if (chunk.data === undefined)
|
|
106
|
+
continue;
|
|
107
|
+
chunkInfos.push({
|
|
108
|
+
key: chunk.key,
|
|
109
|
+
type: "incremental",
|
|
110
|
+
size: chunk.data.length,
|
|
111
|
+
});
|
|
112
|
+
binaries.push(chunk.data);
|
|
113
|
+
}
|
|
114
|
+
// Store chunk infos for future reference
|
|
115
|
+
this.#chunkInfos.set(documentId, chunkInfos);
|
|
116
|
+
// If no chunks were found, return null
|
|
117
|
+
if (binaries.length === 0) {
|
|
79
118
|
return null;
|
|
80
119
|
}
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
120
|
+
// Merge the chunks into a single binary
|
|
121
|
+
return mergeArrays(binaries);
|
|
122
|
+
}
|
|
123
|
+
/**
|
|
124
|
+
* Loads the Automerge document with the given ID from storage.
|
|
125
|
+
*/
|
|
126
|
+
async loadDoc(documentId) {
|
|
127
|
+
// Load and combine chunks
|
|
128
|
+
const binary = await this.loadDocData(documentId);
|
|
129
|
+
if (!binary)
|
|
84
130
|
return null;
|
|
85
131
|
// Load into an Automerge document
|
|
86
132
|
const start = performance.now();
|
|
@@ -106,32 +152,14 @@ export class StorageSubsystem extends EventEmitter {
|
|
|
106
152
|
// Don't bother saving if the document hasn't changed
|
|
107
153
|
if (!this.#shouldSave(documentId, doc))
|
|
108
154
|
return;
|
|
109
|
-
const
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
};
|
|
117
|
-
});
|
|
118
|
-
let done = this.#beelay
|
|
119
|
-
.addCommits({
|
|
120
|
-
docId: documentId,
|
|
121
|
-
commits: changes.map(c => {
|
|
122
|
-
const decoded = A.decodeChange(c);
|
|
123
|
-
return {
|
|
124
|
-
parents: decoded.deps,
|
|
125
|
-
hash: decoded.hash,
|
|
126
|
-
contents: c,
|
|
127
|
-
};
|
|
128
|
-
}),
|
|
129
|
-
})
|
|
130
|
-
.catch(e => {
|
|
131
|
-
console.error(`Error saving document ${documentId}: ${e}`);
|
|
132
|
-
});
|
|
155
|
+
const sourceChunks = this.#chunkInfos.get(documentId) ?? [];
|
|
156
|
+
if (this.#shouldCompact(sourceChunks)) {
|
|
157
|
+
await this.#saveTotal(documentId, doc, sourceChunks);
|
|
158
|
+
}
|
|
159
|
+
else {
|
|
160
|
+
await this.#saveIncremental(documentId, doc);
|
|
161
|
+
}
|
|
133
162
|
this.#storedHeads.set(documentId, A.getHeads(doc));
|
|
134
|
-
await done;
|
|
135
163
|
}
|
|
136
164
|
/**
|
|
137
165
|
* Removes the Automerge document with the given ID from storage
|
|
@@ -141,6 +169,49 @@ export class StorageSubsystem extends EventEmitter {
|
|
|
141
169
|
await this.#storageAdapter.removeRange([documentId, "incremental"]);
|
|
142
170
|
await this.#storageAdapter.removeRange([documentId, "sync-state"]);
|
|
143
171
|
}
|
|
172
|
+
/**
|
|
173
|
+
* Saves just the incremental changes since the last save.
|
|
174
|
+
*/
|
|
175
|
+
async #saveIncremental(documentId, doc) {
|
|
176
|
+
const binary = A.saveSince(doc, this.#storedHeads.get(documentId) ?? []);
|
|
177
|
+
if (binary && binary.length > 0) {
|
|
178
|
+
const key = [documentId, "incremental", keyHash(binary)];
|
|
179
|
+
this.#log(`Saving incremental ${key} for document ${documentId}`);
|
|
180
|
+
await this.#storageAdapter.save(key, binary);
|
|
181
|
+
if (!this.#chunkInfos.has(documentId)) {
|
|
182
|
+
this.#chunkInfos.set(documentId, []);
|
|
183
|
+
}
|
|
184
|
+
this.#chunkInfos.get(documentId).push({
|
|
185
|
+
key,
|
|
186
|
+
type: "incremental",
|
|
187
|
+
size: binary.length,
|
|
188
|
+
});
|
|
189
|
+
this.#storedHeads.set(documentId, A.getHeads(doc));
|
|
190
|
+
}
|
|
191
|
+
else {
|
|
192
|
+
return Promise.resolve();
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
/**
|
|
196
|
+
* Compacts the document storage into a single shapshot.
|
|
197
|
+
*/
|
|
198
|
+
async #saveTotal(documentId, doc, sourceChunks) {
|
|
199
|
+
this.#compacting = true;
|
|
200
|
+
const binary = A.save(doc);
|
|
201
|
+
const snapshotHash = headsHash(A.getHeads(doc));
|
|
202
|
+
const key = [documentId, "snapshot", snapshotHash];
|
|
203
|
+
const oldKeys = new Set(sourceChunks.map(c => c.key).filter(k => k[2] !== snapshotHash));
|
|
204
|
+
this.#log(`Saving snapshot ${key} for document ${documentId}`);
|
|
205
|
+
this.#log(`deleting old chunks ${Array.from(oldKeys)}`);
|
|
206
|
+
await this.#storageAdapter.save(key, binary);
|
|
207
|
+
for (const key of oldKeys) {
|
|
208
|
+
await this.#storageAdapter.remove(key);
|
|
209
|
+
}
|
|
210
|
+
const newChunkInfos = this.#chunkInfos.get(documentId)?.filter(c => !oldKeys.has(c.key)) ?? [];
|
|
211
|
+
newChunkInfos.push({ key, type: "snapshot", size: binary.length });
|
|
212
|
+
this.#chunkInfos.set(documentId, newChunkInfos);
|
|
213
|
+
this.#compacting = false;
|
|
214
|
+
}
|
|
144
215
|
async loadSyncState(documentId, storageId) {
|
|
145
216
|
const key = [documentId, "sync-state", storageId];
|
|
146
217
|
try {
|
|
@@ -166,10 +237,33 @@ export class StorageSubsystem extends EventEmitter {
|
|
|
166
237
|
return true;
|
|
167
238
|
}
|
|
168
239
|
const newHeads = A.getHeads(doc);
|
|
169
|
-
if (headsAreSame(newHeads, oldHeads)) {
|
|
240
|
+
if (headsAreSame(encodeHeads(newHeads), encodeHeads(oldHeads))) {
|
|
170
241
|
// the document hasn't changed
|
|
171
242
|
return false;
|
|
172
243
|
}
|
|
173
244
|
return true; // the document has changed
|
|
174
245
|
}
|
|
246
|
+
/**
|
|
247
|
+
* We only compact if the incremental size is greater than the snapshot size.
|
|
248
|
+
*/
|
|
249
|
+
#shouldCompact(sourceChunks) {
|
|
250
|
+
if (this.#compacting)
|
|
251
|
+
return false;
|
|
252
|
+
let snapshotSize = 0;
|
|
253
|
+
let incrementalSize = 0;
|
|
254
|
+
for (const chunk of sourceChunks) {
|
|
255
|
+
if (chunk.type === "snapshot") {
|
|
256
|
+
snapshotSize += chunk.size;
|
|
257
|
+
}
|
|
258
|
+
else {
|
|
259
|
+
incrementalSize += chunk.size;
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
// if the file is currently small, don't worry, just compact
|
|
263
|
+
// this might seem a bit arbitrary (1k is arbitrary) but is designed to ensure compaction
|
|
264
|
+
// for documents with only a single large change on top of an empty (or nearly empty) document
|
|
265
|
+
// for example: imported NPM modules, images, etc.
|
|
266
|
+
// if we have even more incrementals (so far) than the snapshot, compact
|
|
267
|
+
return snapshotSize < 1024 || incrementalSize >= snapshotSize;
|
|
268
|
+
}
|
|
175
269
|
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { next as A } from "@automerge/automerge/slim";
|
|
2
2
|
export declare function keyHash(binary: Uint8Array): string;
|
|
3
3
|
export declare function headsHash(heads: A.Heads): string;
|
|
4
4
|
//# sourceMappingURL=keyHash.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"keyHash.d.ts","sourceRoot":"","sources":["../../src/storage/keyHash.ts"],"names":[],"mappings":"AAAA,OAAO,
|
|
1
|
+
{"version":3,"file":"keyHash.d.ts","sourceRoot":"","sources":["../../src/storage/keyHash.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,IAAI,CAAC,EAAE,MAAM,2BAA2B,CAAA;AAIrD,wBAAgB,OAAO,CAAC,MAAM,EAAE,UAAU,UAIzC;AAED,wBAAgB,SAAS,CAAC,KAAK,EAAE,CAAC,CAAC,KAAK,GAAG,MAAM,CAIhD"}
|
|
@@ -1,18 +1,17 @@
|
|
|
1
|
+
import { DocHandle } from "../DocHandle.js";
|
|
1
2
|
import { Repo } from "../Repo.js";
|
|
2
3
|
import { DocMessage } from "../network/messages.js";
|
|
3
4
|
import { AutomergeUrl, DocumentId, PeerId } from "../types.js";
|
|
4
5
|
import { DocSynchronizer } from "./DocSynchronizer.js";
|
|
5
6
|
import { Synchronizer } from "./Synchronizer.js";
|
|
6
|
-
import { next as A } from "@automerge/automerge";
|
|
7
7
|
/** A CollectionSynchronizer is responsible for synchronizing a DocCollection with peers. */
|
|
8
8
|
export declare class CollectionSynchronizer extends Synchronizer {
|
|
9
9
|
#private;
|
|
10
|
-
private beelay;
|
|
11
10
|
private repo;
|
|
12
11
|
/** A map of documentIds to their synchronizers */
|
|
13
12
|
/** @hidden */
|
|
14
13
|
docSynchronizers: Record<DocumentId, DocSynchronizer>;
|
|
15
|
-
constructor(
|
|
14
|
+
constructor(repo: Repo, denylist?: AutomergeUrl[]);
|
|
16
15
|
/**
|
|
17
16
|
* When we receive a sync message for a document we haven't got in memory, we
|
|
18
17
|
* register it with the repo and start synchronizing
|
|
@@ -21,7 +20,7 @@ export declare class CollectionSynchronizer extends Synchronizer {
|
|
|
21
20
|
/**
|
|
22
21
|
* Starts synchronizing the given document with all peers that we share it generously with.
|
|
23
22
|
*/
|
|
24
|
-
addDocument(
|
|
23
|
+
addDocument(handle: DocHandle<unknown>): void;
|
|
25
24
|
removeDocument(documentId: DocumentId): void;
|
|
26
25
|
/** Adds a peer and maybe starts synchronizing with them */
|
|
27
26
|
addPeer(peerId: PeerId): void;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"CollectionSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/CollectionSynchronizer.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"CollectionSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/CollectionSynchronizer.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAA;AAE3C,OAAO,EAAE,IAAI,EAAE,MAAM,YAAY,CAAA;AACjC,OAAO,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAA;AACnD,OAAO,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAC9D,OAAO,EAAE,eAAe,EAAE,MAAM,sBAAsB,CAAA;AACtD,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAIhD,4FAA4F;AAC5F,qBAAa,sBAAuB,SAAQ,YAAY;;IAa1C,OAAO,CAAC,IAAI;IATxB,kDAAkD;IAClD,cAAc;IACd,gBAAgB,EAAE,MAAM,CAAC,UAAU,EAAE,eAAe,CAAC,CAAK;gBAOtC,IAAI,EAAE,IAAI,EAAE,QAAQ,GAAE,YAAY,EAAO;IAwD7D;;;OAGG;IACG,cAAc,CAAC,OAAO,EAAE,UAAU;IAyCxC;;OAEG;IACH,WAAW,CAAC,MAAM,EAAE,SAAS,CAAC,OAAO,CAAC;IAatC,cAAc,CAAC,UAAU,EAAE,UAAU;IAIrC,2DAA2D;IAC3D,OAAO,CAAC,MAAM,EAAE,MAAM;IAgBtB,uDAAuD;IACvD,UAAU,CAAC,MAAM,EAAE,MAAM;IASzB,+CAA+C;IAC/C,IAAI,KAAK,IAAI,MAAM,EAAE,CAEpB;IAED,OAAO,IAAI;QACT,CAAC,GAAG,EAAE,MAAM,GAAG;YACb,KAAK,EAAE,MAAM,EAAE,CAAA;YACf,IAAI,EAAE;gBAAE,MAAM,EAAE,MAAM,CAAC;gBAAC,UAAU,EAAE,MAAM,CAAA;aAAE,CAAA;SAC7C,CAAA;KACF;CASF"}
|
|
@@ -1,11 +1,10 @@
|
|
|
1
1
|
import debug from "debug";
|
|
2
|
-
import { parseAutomergeUrl
|
|
2
|
+
import { parseAutomergeUrl } from "../AutomergeUrl.js";
|
|
3
3
|
import { DocSynchronizer } from "./DocSynchronizer.js";
|
|
4
4
|
import { Synchronizer } from "./Synchronizer.js";
|
|
5
5
|
const log = debug("automerge-repo:collectionsync");
|
|
6
6
|
/** A CollectionSynchronizer is responsible for synchronizing a DocCollection with peers. */
|
|
7
7
|
export class CollectionSynchronizer extends Synchronizer {
|
|
8
|
-
beelay;
|
|
9
8
|
repo;
|
|
10
9
|
/** The set of peers we are connected with */
|
|
11
10
|
#peers = new Set();
|
|
@@ -15,25 +14,34 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
15
14
|
/** Used to determine if the document is know to the Collection and a synchronizer exists or is being set up */
|
|
16
15
|
#docSetUp = {};
|
|
17
16
|
#denylist;
|
|
18
|
-
constructor(
|
|
17
|
+
constructor(repo, denylist = []) {
|
|
19
18
|
super();
|
|
20
|
-
this.beelay = beelay;
|
|
21
19
|
this.repo = repo;
|
|
22
20
|
this.#denylist = denylist.map(url => parseAutomergeUrl(url).documentId);
|
|
23
21
|
}
|
|
24
22
|
/** Returns a synchronizer for the given document, creating one if it doesn't already exist. */
|
|
25
|
-
#fetchDocSynchronizer(
|
|
26
|
-
if (!this.docSynchronizers[documentId]) {
|
|
27
|
-
|
|
28
|
-
|
|
23
|
+
#fetchDocSynchronizer(handle) {
|
|
24
|
+
if (!this.docSynchronizers[handle.documentId]) {
|
|
25
|
+
this.docSynchronizers[handle.documentId] =
|
|
26
|
+
this.#initDocSynchronizer(handle);
|
|
29
27
|
}
|
|
30
|
-
return this.docSynchronizers[documentId];
|
|
28
|
+
return this.docSynchronizers[handle.documentId];
|
|
31
29
|
}
|
|
32
30
|
/** Creates a new docSynchronizer and sets it up to propagate messages */
|
|
33
31
|
#initDocSynchronizer(handle) {
|
|
34
32
|
const docSynchronizer = new DocSynchronizer({
|
|
35
|
-
beelay: this.beelay,
|
|
36
33
|
handle,
|
|
34
|
+
peerId: this.repo.networkSubsystem.peerId,
|
|
35
|
+
onLoadSyncState: async (peerId) => {
|
|
36
|
+
if (!this.repo.storageSubsystem) {
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
const { storageId, isEphemeral } = this.repo.peerMetadataByPeerId[peerId] || {};
|
|
40
|
+
if (!storageId || isEphemeral) {
|
|
41
|
+
return;
|
|
42
|
+
}
|
|
43
|
+
return this.repo.storageSubsystem.loadSyncState(handle.documentId, storageId);
|
|
44
|
+
},
|
|
37
45
|
});
|
|
38
46
|
docSynchronizer.on("message", event => this.emit("message", event));
|
|
39
47
|
docSynchronizer.on("open-doc", event => this.emit("open-doc", event));
|
|
@@ -76,23 +84,26 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
76
84
|
return;
|
|
77
85
|
}
|
|
78
86
|
this.#docSetUp[documentId] = true;
|
|
79
|
-
const
|
|
87
|
+
const handle = await this.repo.find(documentId, {
|
|
88
|
+
allowableStates: ["ready", "unavailable", "requesting"],
|
|
89
|
+
});
|
|
90
|
+
const docSynchronizer = this.#fetchDocSynchronizer(handle);
|
|
80
91
|
docSynchronizer.receiveMessage(message);
|
|
81
92
|
// Initiate sync with any new peers
|
|
82
93
|
const peers = await this.#documentGenerousPeers(documentId);
|
|
83
|
-
docSynchronizer.beginSync(peers.filter(peerId => !docSynchronizer.hasPeer(peerId)));
|
|
94
|
+
void docSynchronizer.beginSync(peers.filter(peerId => !docSynchronizer.hasPeer(peerId)));
|
|
84
95
|
}
|
|
85
96
|
/**
|
|
86
97
|
* Starts synchronizing the given document with all peers that we share it generously with.
|
|
87
98
|
*/
|
|
88
|
-
addDocument(
|
|
99
|
+
addDocument(handle) {
|
|
89
100
|
// HACK: this is a hack to prevent us from adding the same document twice
|
|
90
|
-
if (this.#docSetUp[documentId]) {
|
|
101
|
+
if (this.#docSetUp[handle.documentId]) {
|
|
91
102
|
return;
|
|
92
103
|
}
|
|
93
|
-
const docSynchronizer = this.#fetchDocSynchronizer(
|
|
94
|
-
void this.#documentGenerousPeers(documentId).then(peers => {
|
|
95
|
-
docSynchronizer.beginSync(peers);
|
|
104
|
+
const docSynchronizer = this.#fetchDocSynchronizer(handle);
|
|
105
|
+
void this.#documentGenerousPeers(handle.documentId).then(peers => {
|
|
106
|
+
void docSynchronizer.beginSync(peers);
|
|
96
107
|
});
|
|
97
108
|
}
|
|
98
109
|
// TODO: implement this
|
|
@@ -111,7 +122,7 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
111
122
|
const { documentId } = docSynchronizer;
|
|
112
123
|
void this.repo.sharePolicy(peerId, documentId).then(okToShare => {
|
|
113
124
|
if (okToShare)
|
|
114
|
-
docSynchronizer.beginSync([peerId]);
|
|
125
|
+
void docSynchronizer.beginSync([peerId]);
|
|
115
126
|
});
|
|
116
127
|
}
|
|
117
128
|
}
|
|
@@ -128,13 +139,8 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
128
139
|
return Array.from(this.#peers);
|
|
129
140
|
}
|
|
130
141
|
metrics() {
|
|
131
|
-
return {
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
// ([documentId, synchronizer]) => {
|
|
135
|
-
// return [documentId, synchronizer.metrics()]
|
|
136
|
-
// }
|
|
137
|
-
// )
|
|
138
|
-
// )
|
|
142
|
+
return Object.fromEntries(Object.entries(this.docSynchronizers).map(([documentId, synchronizer]) => {
|
|
143
|
+
return [documentId, synchronizer.metrics()];
|
|
144
|
+
}));
|
|
139
145
|
}
|
|
140
146
|
}
|
|
@@ -1,12 +1,13 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { next as A } from "@automerge/automerge/slim";
|
|
2
2
|
import { DocHandle } from "../DocHandle.js";
|
|
3
3
|
import { EphemeralMessage, RepoMessage, RequestMessage, SyncMessage } from "../network/messages.js";
|
|
4
|
-
import {
|
|
4
|
+
import { PeerId } from "../types.js";
|
|
5
5
|
import { Synchronizer } from "./Synchronizer.js";
|
|
6
6
|
type PeerDocumentStatus = "unknown" | "has" | "unavailable" | "wants";
|
|
7
7
|
interface DocSynchronizerConfig {
|
|
8
8
|
handle: DocHandle<unknown>;
|
|
9
|
-
|
|
9
|
+
peerId: PeerId;
|
|
10
|
+
onLoadSyncState?: (peerId: PeerId) => Promise<A.SyncState | undefined>;
|
|
10
11
|
}
|
|
11
12
|
/**
|
|
12
13
|
* DocSynchronizer takes a handle to an Automerge document, and receives & dispatches sync messages
|
|
@@ -15,12 +16,11 @@ interface DocSynchronizerConfig {
|
|
|
15
16
|
export declare class DocSynchronizer extends Synchronizer {
|
|
16
17
|
#private;
|
|
17
18
|
syncDebounceRate: number;
|
|
18
|
-
constructor({ handle,
|
|
19
|
+
constructor({ handle, peerId, onLoadSyncState }: DocSynchronizerConfig);
|
|
19
20
|
get peerStates(): Record<PeerId, PeerDocumentStatus>;
|
|
20
|
-
get documentId(): DocumentId;
|
|
21
|
+
get documentId(): import("../types.js").DocumentId;
|
|
21
22
|
hasPeer(peerId: PeerId): boolean;
|
|
22
|
-
beginSync(peerIds: PeerId[]): void
|
|
23
|
-
peerWantsDocument(peerId: PeerId): void;
|
|
23
|
+
beginSync(peerIds: PeerId[]): Promise<void>;
|
|
24
24
|
endSync(peerId: PeerId): void;
|
|
25
25
|
receiveMessage(message: RepoMessage): void;
|
|
26
26
|
receiveEphemeralMessage(message: EphemeralMessage): void;
|
|
@@ -30,7 +30,7 @@ export declare class DocSynchronizer extends Synchronizer {
|
|
|
30
30
|
size: {
|
|
31
31
|
numOps: number;
|
|
32
32
|
numChanges: number;
|
|
33
|
-
}
|
|
33
|
+
};
|
|
34
34
|
};
|
|
35
35
|
}
|
|
36
36
|
export {};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"DocSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/DocSynchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,
|
|
1
|
+
{"version":3,"file":"DocSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/DocSynchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,IAAI,CAAC,EAAE,MAAM,2BAA2B,CAAA;AAGrD,OAAO,EACL,SAAS,EAKV,MAAM,iBAAiB,CAAA;AACxB,OAAO,EAEL,gBAAgB,EAEhB,WAAW,EACX,cAAc,EACd,WAAW,EAEZ,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AACpC,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAGhD,KAAK,kBAAkB,GAAG,SAAS,GAAG,KAAK,GAAG,aAAa,GAAG,OAAO,CAAA;AAOrE,UAAU,qBAAqB;IAC7B,MAAM,EAAE,SAAS,CAAC,OAAO,CAAC,CAAA;IAC1B,MAAM,EAAE,MAAM,CAAA;IACd,eAAe,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK,OAAO,CAAC,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC,CAAA;CACvE;AAED;;;GAGG;AACH,qBAAa,eAAgB,SAAQ,YAAY;;IAE/C,gBAAgB,SAAM;gBAyBV,EAAE,MAAM,EAAE,MAAM,EAAE,eAAe,EAAE,EAAE,qBAAqB;IAyBtE,IAAI,UAAU,uCAEb;IAED,IAAI,UAAU,qCAEb;IAqID,OAAO,CAAC,MAAM,EAAE,MAAM;IAIhB,SAAS,CAAC,OAAO,EAAE,MAAM,EAAE;IA8DjC,OAAO,CAAC,MAAM,EAAE,MAAM;IAKtB,cAAc,CAAC,OAAO,EAAE,WAAW;IAkBnC,uBAAuB,CAAC,OAAO,EAAE,gBAAgB;IAuBjD,kBAAkB,CAAC,OAAO,EAAE,WAAW,GAAG,cAAc;IAwFxD,OAAO,IAAI;QAAE,KAAK,EAAE,MAAM,EAAE,CAAC;QAAC,IAAI,EAAE;YAAE,MAAM,EAAE,MAAM,CAAC;YAAC,UAAU,EAAE,MAAM,CAAA;SAAE,CAAA;KAAE;CAM7E"}
|