@automerge/automerge-repo 2.0.0-alpha.22 → 2.0.0-alpha.26
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -2
- package/dist/DocHandle.d.ts +4 -12
- package/dist/DocHandle.d.ts.map +1 -1
- package/dist/DocHandle.js +16 -24
- package/dist/storage/StorageSubsystem.d.ts +4 -0
- package/dist/storage/StorageSubsystem.d.ts.map +1 -1
- package/dist/storage/StorageSubsystem.js +38 -12
- package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/DocSynchronizer.js +31 -23
- package/package.json +3 -4
- package/src/DocHandle.ts +21 -25
- package/src/storage/StorageSubsystem.ts +44 -14
- package/src/synchronizer/DocSynchronizer.ts +36 -27
- package/test/DocHandle.test.ts +67 -2
- package/test/Repo.test.ts +1 -8
- package/test/StorageSubsystem.test.ts +80 -1
package/README.md
CHANGED
|
@@ -232,14 +232,14 @@ Now import it and add it to your list of network adapters:
|
|
|
232
232
|
|
|
233
233
|
```ts
|
|
234
234
|
// main.tsx
|
|
235
|
-
import {
|
|
235
|
+
import { WebSocketClientAdapter } from "@automerge/automerge-repo-network-websocket" // <-- add this line
|
|
236
236
|
|
|
237
237
|
// ...
|
|
238
238
|
|
|
239
239
|
const repo = new Repo({
|
|
240
240
|
network: [
|
|
241
241
|
new BroadcastChannelNetworkAdapter(),
|
|
242
|
-
new
|
|
242
|
+
new WebSocketClientAdapter("ws://localhost:3030"), // <-- add this line
|
|
243
243
|
],
|
|
244
244
|
storage: new IndexedDBStorageAdapter(),
|
|
245
245
|
})
|
|
@@ -261,3 +261,4 @@ With gratitude for contributions by:
|
|
|
261
261
|
- Jeremy Rose
|
|
262
262
|
- Alex Currie-Clark
|
|
263
263
|
- Dylan Mackenzie
|
|
264
|
+
- Maciek Sakrejda
|
package/dist/DocHandle.d.ts
CHANGED
|
@@ -64,15 +64,6 @@ export declare class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
|
|
|
64
64
|
* checking `inState()`.
|
|
65
65
|
*/
|
|
66
66
|
whenReady(awaitStates?: HandleState[]): Promise<void>;
|
|
67
|
-
/**
|
|
68
|
-
* @returns the current state of this handle's Automerge document.
|
|
69
|
-
*
|
|
70
|
-
* This is the recommended way to access a handle's document. Note that this waits for the handle
|
|
71
|
-
* to be ready if necessary. If loading (or synchronization) fails, this will never resolve.
|
|
72
|
-
*/
|
|
73
|
-
legacyAsyncDoc(
|
|
74
|
-
/** states to wait for, such as "LOADING". mostly for internal use. */
|
|
75
|
-
awaitStates?: HandleState[]): Promise<A.Doc<T> | undefined>;
|
|
76
67
|
/**
|
|
77
68
|
* Returns the current state of the Automerge document this handle manages.
|
|
78
69
|
*
|
|
@@ -159,7 +150,7 @@ export declare class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
|
|
|
159
150
|
*/
|
|
160
151
|
doneLoading(): void;
|
|
161
152
|
/**
|
|
162
|
-
* Called by the repo
|
|
153
|
+
* Called by the repo when a doc handle changes or we receive new remote heads.
|
|
163
154
|
* @hidden
|
|
164
155
|
*/
|
|
165
156
|
setRemoteHeads(storageId: StorageId, heads: UrlHeads): void;
|
|
@@ -199,11 +190,12 @@ export declare class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
|
|
|
199
190
|
/** the handle of the document to merge into this one */
|
|
200
191
|
otherHandle: DocHandle<T>): void;
|
|
201
192
|
/**
|
|
202
|
-
*
|
|
193
|
+
* Updates the internal state machine to mark the document unavailable.
|
|
203
194
|
* @hidden
|
|
204
195
|
*/
|
|
205
196
|
unavailable(): void;
|
|
206
|
-
/**
|
|
197
|
+
/**
|
|
198
|
+
* Called by the repo either when the document is not found in storage.
|
|
207
199
|
* @hidden
|
|
208
200
|
* */
|
|
209
201
|
request(): void;
|
package/dist/DocHandle.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"DocHandle.d.ts","sourceRoot":"","sources":["../src/DocHandle.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAEnD,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAU5C,OAAO,KAAK,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAA;AAC5E,OAAO,EAAE,SAAS,EAAE,MAAM,oBAAoB,CAAA;AAE9C;;;;;;;;;;;;GAYG;AACH,qBAAa,SAAS,CAAC,CAAC,CAAE,SAAQ,YAAY,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;;
|
|
1
|
+
{"version":3,"file":"DocHandle.d.ts","sourceRoot":"","sources":["../src/DocHandle.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAEnD,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAU5C,OAAO,KAAK,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAA;AAC5E,OAAO,EAAE,SAAS,EAAE,MAAM,oBAAoB,CAAA;AAE9C;;;;;;;;;;;;GAYG;AACH,qBAAa,SAAS,CAAC,CAAC,CAAE,SAAQ,YAAY,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;;IAwBvD,UAAU,EAAE,UAAU;IAF/B,cAAc;gBAEL,UAAU,EAAE,UAAU,EAC7B,OAAO,GAAE,gBAAgB,CAAC,CAAC,CAAM;IAqKnC;OACG;IACH,IAAI,GAAG,IAAI,YAAY,CAKtB;IAED;;;;;OAKG;IACH,OAAO,gBAAgC;IAEvC;;;;;OAKG;IACH,UAAU,gBAAmC;IAE7C;;;;;OAKG;IACH,SAAS,gBAAkC;IAE3C;;;;OAIG;IACH,aAAa,gBAAsC;IAEnD;;OAEG;IACH,OAAO,WAAY,WAAW,EAAE,aAC0B;IAE1D,cAAc;IACd,IAAI,KAAK,yFAER;IAED;;;;;;OAMG;IACG,SAAS,CAAC,WAAW,GAAE,WAAW,EAAc;IAItD;;;;;;OAMG;IACH,GAAG;IAQH;;qBAEiB;IACjB,OAAO;IAOP;;;;OAIG;IACH,KAAK,IAAI,QAAQ;IAQjB,KAAK;IAIL;;;;;;;;;;;OAWG;IACH,OAAO,IAAI,QAAQ,EAAE,GAAG,SAAS;IAWjC;;;;;;;;;;;;OAYG;IACH,IAAI,CAAC,KAAK,EAAE,QAAQ,GAAG,SAAS,CAAC,CAAC,CAAC;IA8BnC;;;;;;;;;;;;OAYG;IACH,IAAI,CAAC,KAAK,EAAE,QAAQ,GAAG,SAAS,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,EAAE,QAAQ,GAAG,CAAC,CAAC,KAAK,EAAE;IAkClE;;;;;;;;;;OAUG;IACH,QAAQ,CAAC,MAAM,CAAC,EAAE,MAAM,GAAG,CAAC,CAAC,aAAa,GAAG,SAAS;IAetD;;;;;OAKG;IACH,MAAM,CAAC,QAAQ,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;IAI5C;;;;OAIG;IACH,WAAW;IAIX;;;OAGG;IACH,cAAc,CAAC,SAAS,EAAE,SAAS,EAAE,KAAK,EAAE,QAAQ;IAKpD,0CAA0C;IAC1C,cAAc,CAAC,SAAS,EAAE,SAAS,GAAG,QAAQ,GAAG,SAAS;IAI1D;;;;;;;;;;;;;;OAcG;IACH,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,OAAO,GAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAM;IAkBhE;;;;OAIG;IACH,QAAQ,CACN,KAAK,EAAE,QAAQ,EACf,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EACvB,OAAO,GAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAM,GAC/B,QAAQ,EAAE,GAAG,SAAS;IA6BzB;;;;;;;OAOG;IACH,KAAK;IACH,wDAAwD;IACxD,WAAW,EAAE,SAAS,CAAC,CAAC,CAAC;IAiB3B;;;OAGG;IACH,WAAW;IAIX;;;SAGK;IACL,OAAO;IAIP,8DAA8D;IAC9D,MAAM;IAIN,sDAAsD;IACtD,MAAM;IAIN,uDAAuD;IACvD,MAAM;IAIN;;;;;;OAMG;IACH,SAAS,CAAC,OAAO,EAAE,OAAO;IAO1B,OAAO,IAAI;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,MAAM,CAAA;KAAE;CAGlD;AAID,cAAc;AACd,MAAM,MAAM,gBAAgB,CAAC,CAAC,IAE1B;IACE,gGAAgG;IAChG,KAAK,EAAE,IAAI,CAAA;IAEX,yCAAyC;IACzC,YAAY,CAAC,EAAE,CAAC,CAAA;CACjB,GAED;IACE,KAAK,CAAC,EAAE,KAAK,CAAA;IAGb,KAAK,CAAC,EAAE,QAAQ,CAAA;IAEhB,+HAA+H;IAC/H,YAAY,CAAC,EAAE,MAAM,CAAA;CACtB,CAAA;AAIL,2EAA2E;AAC3E,MAAM,WAAW,eAAe,CAAC,CAAC;IAChC,eAAe,EAAE,CAAC,OAAO,EAAE,6BAA6B,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpE,MAAM,EAAE,CAAC,OAAO,EAAE,sBAAsB,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpD,MAAM,EAAE,CAAC,OAAO,EAAE,sBAAsB,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpD,mBAAmB,EAAE,CAAC,OAAO,EAAE,gCAAgC,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IAC3E,4BAA4B,EAAE,CAC5B,OAAO,EAAE,wCAAwC,CAAC,CAAC,CAAC,KACjD,IAAI,CAAA;IACT,cAAc,EAAE,CAAC,OAAO,EAAE,2BAA2B,KAAK,IAAI,CAAA;CAC/D;AAED,sDAAsD;AACtD,MAAM,WAAW,6BAA6B,CAAC,CAAC;IAC9C,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;CACd;AAED,6CAA6C;AAC7C,MAAM,WAAW,sBAAsB,CAAC,CAAC;IACvC,8BAA8B;IAC9B,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,iDAAiD;IACjD,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;IACb,wDAAwD;IACxD,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAA;IAClB,mCAAmC;IACnC,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;CAC1B;AAED,4CAA4C;AAC5C,MAAM,WAAW,sBAAsB,CAAC,CAAC;IACvC,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;CACrB;AAED,6DAA6D;AAC7D,MAAM,WAAW,2BAA2B,CAAC,CAAC;IAC5C,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;CACrB;AAED,qEAAqE;AACrE,MAAM,WAAW,gCAAgC,CAAC,CAAC;IACjD,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;CACjB;AAED,kEAAkE;AAClE,MAAM,WAAW,wCAAwC,CAAC,CAAC;IACzD,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,IAAI,EAAE,UAAU,CAAA;CACjB;AAED,8DAA8D;AAC9D,MAAM,WAAW,2BAA2B;IAC1C,SAAS,EAAE,SAAS,CAAA;IACpB,KAAK,EAAE,QAAQ,CAAA;CAChB;AAMD;;GAEG;AACH,eAAO,MAAM,WAAW;IACtB,kEAAkE;;IAElE,mDAAmD;;IAEnD,6EAA6E;;IAE7E,gCAAgC;;IAEhC,2EAA2E;;IAE3E,kDAAkD;;IAElD,4EAA4E;;CAEpE,CAAA;AACV,MAAM,MAAM,WAAW,GAAG,CAAC,OAAO,WAAW,CAAC,CAAC,MAAM,OAAO,WAAW,CAAC,CAAA;AAExE,eAAO,MACL,IAAI,UACJ,OAAO,aACP,UAAU,gBACV,KAAK,WACL,QAAQ,cACR,OAAO,aACP,WAAW,eACE,CAAA"}
|
package/dist/DocHandle.js
CHANGED
|
@@ -33,6 +33,8 @@ export class DocHandle extends EventEmitter {
|
|
|
33
33
|
#timeoutDelay = 60_000;
|
|
34
34
|
/** A dictionary mapping each peer to the last heads we know they have. */
|
|
35
35
|
#remoteHeads = {};
|
|
36
|
+
/** Cache for view handles, keyed by the stringified heads */
|
|
37
|
+
#viewCache = new Map();
|
|
36
38
|
/** @hidden */
|
|
37
39
|
constructor(documentId, options = {}) {
|
|
38
40
|
super();
|
|
@@ -228,26 +230,6 @@ export class DocHandle extends EventEmitter {
|
|
|
228
230
|
async whenReady(awaitStates = ["ready"]) {
|
|
229
231
|
await withTimeout(this.#statePromise(awaitStates), this.#timeoutDelay);
|
|
230
232
|
}
|
|
231
|
-
/**
|
|
232
|
-
* @returns the current state of this handle's Automerge document.
|
|
233
|
-
*
|
|
234
|
-
* This is the recommended way to access a handle's document. Note that this waits for the handle
|
|
235
|
-
* to be ready if necessary. If loading (or synchronization) fails, this will never resolve.
|
|
236
|
-
*/
|
|
237
|
-
async legacyAsyncDoc(
|
|
238
|
-
/** states to wait for, such as "LOADING". mostly for internal use. */
|
|
239
|
-
awaitStates = ["ready", "unavailable"]) {
|
|
240
|
-
try {
|
|
241
|
-
// wait for the document to enter one of the desired states
|
|
242
|
-
await this.#statePromise(awaitStates);
|
|
243
|
-
}
|
|
244
|
-
catch (error) {
|
|
245
|
-
// if we timed out, return undefined
|
|
246
|
-
return undefined;
|
|
247
|
-
}
|
|
248
|
-
// Return the document
|
|
249
|
-
return !this.isUnavailable() ? this.#doc : undefined;
|
|
250
|
-
}
|
|
251
233
|
/**
|
|
252
234
|
* Returns the current state of the Automerge document this handle manages.
|
|
253
235
|
*
|
|
@@ -322,6 +304,13 @@ export class DocHandle extends EventEmitter {
|
|
|
322
304
|
if (!this.isReady()) {
|
|
323
305
|
throw new Error(`DocHandle#${this.documentId} is not ready. Check \`handle.isReady()\` before calling view().`);
|
|
324
306
|
}
|
|
307
|
+
// Create a cache key from the heads
|
|
308
|
+
const cacheKey = JSON.stringify(heads);
|
|
309
|
+
// Check if we have a cached handle for these heads
|
|
310
|
+
const cachedHandle = this.#viewCache.get(cacheKey);
|
|
311
|
+
if (cachedHandle) {
|
|
312
|
+
return cachedHandle;
|
|
313
|
+
}
|
|
325
314
|
// Create a new handle with the same documentId but fixed heads
|
|
326
315
|
const handle = new DocHandle(this.documentId, {
|
|
327
316
|
heads,
|
|
@@ -329,6 +318,8 @@ export class DocHandle extends EventEmitter {
|
|
|
329
318
|
});
|
|
330
319
|
handle.update(() => A.clone(this.#doc));
|
|
331
320
|
handle.doneLoading();
|
|
321
|
+
// Store in cache
|
|
322
|
+
this.#viewCache.set(cacheKey, handle);
|
|
332
323
|
return handle;
|
|
333
324
|
}
|
|
334
325
|
/**
|
|
@@ -409,7 +400,7 @@ export class DocHandle extends EventEmitter {
|
|
|
409
400
|
this.#machine.send({ type: DOC_READY });
|
|
410
401
|
}
|
|
411
402
|
/**
|
|
412
|
-
* Called by the repo
|
|
403
|
+
* Called by the repo when a doc handle changes or we receive new remote heads.
|
|
413
404
|
* @hidden
|
|
414
405
|
*/
|
|
415
406
|
setRemoteHeads(storageId, heads) {
|
|
@@ -498,13 +489,14 @@ export class DocHandle extends EventEmitter {
|
|
|
498
489
|
});
|
|
499
490
|
}
|
|
500
491
|
/**
|
|
501
|
-
*
|
|
492
|
+
* Updates the internal state machine to mark the document unavailable.
|
|
502
493
|
* @hidden
|
|
503
494
|
*/
|
|
504
495
|
unavailable() {
|
|
505
496
|
this.#machine.send({ type: DOC_UNAVAILABLE });
|
|
506
497
|
}
|
|
507
|
-
/**
|
|
498
|
+
/**
|
|
499
|
+
* Called by the repo either when the document is not found in storage.
|
|
508
500
|
* @hidden
|
|
509
501
|
* */
|
|
510
502
|
request() {
|
|
@@ -533,7 +525,7 @@ export class DocHandle extends EventEmitter {
|
|
|
533
525
|
broadcast(message) {
|
|
534
526
|
this.emit("ephemeral-message-outbound", {
|
|
535
527
|
handle: this,
|
|
536
|
-
data: encode(message),
|
|
528
|
+
data: new Uint8Array(encode(message)),
|
|
537
529
|
});
|
|
538
530
|
}
|
|
539
531
|
metrics() {
|
|
@@ -39,6 +39,10 @@ export declare class StorageSubsystem extends EventEmitter<StorageSubsystemEvent
|
|
|
39
39
|
namespace: string,
|
|
40
40
|
/** Key to remove. Typically a UUID or other unique identifier, but could be any string. */
|
|
41
41
|
key: string): Promise<void>;
|
|
42
|
+
/**
|
|
43
|
+
* Loads and combines document chunks from storage, with snapshots first.
|
|
44
|
+
*/
|
|
45
|
+
loadDocData(documentId: DocumentId): Promise<Uint8Array | null>;
|
|
42
46
|
/**
|
|
43
47
|
* Loads the Automerge document with the given ID from storage.
|
|
44
48
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAInD,OAAO,EAAE,KAAK,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,OAAO,EAAE,uBAAuB,EAAE,MAAM,8BAA8B,CAAA;AACtE,OAAO,EAAyB,SAAS,EAAE,MAAM,YAAY,CAAA;
|
|
1
|
+
{"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAInD,OAAO,EAAE,KAAK,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,OAAO,EAAE,uBAAuB,EAAE,MAAM,8BAA8B,CAAA;AACtE,OAAO,EAAyB,SAAS,EAAE,MAAM,YAAY,CAAA;AAG7D,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAG5C,KAAK,sBAAsB,GAAG;IAC5B,iBAAiB,EAAE,CAAC,GAAG,EAAE;QACvB,UAAU,EAAE,UAAU,CAAA;QACtB,cAAc,EAAE,MAAM,CAAA;QACtB,MAAM,EAAE,MAAM,CAAA;QACd,UAAU,EAAE,MAAM,CAAA;KACnB,KAAK,IAAI,CAAA;CACX,CAAA;AAED;;;GAGG;AACH,qBAAa,gBAAiB,SAAQ,YAAY,CAAC,sBAAsB,CAAC;;gBAe5D,cAAc,EAAE,uBAAuB;IAK7C,EAAE,IAAI,OAAO,CAAC,SAAS,CAAC;IA2B9B,kCAAkC;IAC5B,IAAI;IACR,iFAAiF;IACjF,SAAS,EAAE,MAAM;IAEjB,yFAAyF;IACzF,GAAG,EAAE,MAAM,GACV,OAAO,CAAC,UAAU,GAAG,SAAS,CAAC;IAKlC,gCAAgC;IAC1B,IAAI;IACR,iFAAiF;IACjF,SAAS,EAAE,MAAM;IAEjB,yFAAyF;IACzF,GAAG,EAAE,MAAM;IAEX,sCAAsC;IACtC,IAAI,EAAE,UAAU,GACf,OAAO,CAAC,IAAI,CAAC;IAKhB,oCAAoC;IAC9B,MAAM;IACV,iFAAiF;IACjF,SAAS,EAAE,MAAM;IAEjB,2FAA2F;IAC3F,GAAG,EAAE,MAAM,GACV,OAAO,CAAC,IAAI,CAAC;IAOhB;;OAEG;IACG,WAAW,CAAC,UAAU,EAAE,UAAU,GAAG,OAAO,CAAC,UAAU,GAAG,IAAI,CAAC;IAgDrE;;OAEG;IACG,OAAO,CAAC,CAAC,EAAE,UAAU,EAAE,UAAU,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;IAqBlE;;;;;;OAMG;IACG,OAAO,CAAC,UAAU,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAezE;;OAEG;IACG,SAAS,CAAC,UAAU,EAAE,UAAU;IAkEhC,aAAa,CACjB,UAAU,EAAE,UAAU,EACtB,SAAS,EAAE,SAAS,GACnB,OAAO,CAAC,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC;IAW7B,aAAa,CACjB,UAAU,EAAE,UAAU,EACtB,SAAS,EAAE,SAAS,EACpB,SAAS,EAAE,CAAC,CAAC,SAAS,GACrB,OAAO,CAAC,IAAI,CAAC;CA8CjB"}
|
|
@@ -3,7 +3,6 @@ import debug from "debug";
|
|
|
3
3
|
import { headsAreSame } from "../helpers/headsAreSame.js";
|
|
4
4
|
import { mergeArrays } from "../helpers/mergeArrays.js";
|
|
5
5
|
import { keyHash, headsHash } from "./keyHash.js";
|
|
6
|
-
import { chunkTypeFromKey } from "./chunkTypeFromKey.js";
|
|
7
6
|
import * as Uuid from "uuid";
|
|
8
7
|
import { EventEmitter } from "eventemitter3";
|
|
9
8
|
import { encodeHeads } from "../AutomergeUrl.js";
|
|
@@ -76,31 +75,58 @@ export class StorageSubsystem extends EventEmitter {
|
|
|
76
75
|
}
|
|
77
76
|
// AUTOMERGE DOCUMENT STORAGE
|
|
78
77
|
/**
|
|
79
|
-
* Loads
|
|
78
|
+
* Loads and combines document chunks from storage, with snapshots first.
|
|
80
79
|
*/
|
|
81
|
-
async
|
|
82
|
-
// Load
|
|
83
|
-
const
|
|
80
|
+
async loadDocData(documentId) {
|
|
81
|
+
// Load snapshots first
|
|
82
|
+
const snapshotChunks = await this.#storageAdapter.loadRange([
|
|
83
|
+
documentId,
|
|
84
|
+
"snapshot",
|
|
85
|
+
]);
|
|
86
|
+
const incrementalChunks = await this.#storageAdapter.loadRange([
|
|
87
|
+
documentId,
|
|
88
|
+
"incremental",
|
|
89
|
+
]);
|
|
84
90
|
const binaries = [];
|
|
85
91
|
const chunkInfos = [];
|
|
86
|
-
|
|
87
|
-
|
|
92
|
+
// Process snapshots first
|
|
93
|
+
for (const chunk of snapshotChunks) {
|
|
88
94
|
if (chunk.data === undefined)
|
|
89
95
|
continue;
|
|
90
|
-
|
|
91
|
-
|
|
96
|
+
chunkInfos.push({
|
|
97
|
+
key: chunk.key,
|
|
98
|
+
type: "snapshot",
|
|
99
|
+
size: chunk.data.length,
|
|
100
|
+
});
|
|
101
|
+
binaries.push(chunk.data);
|
|
102
|
+
}
|
|
103
|
+
// Then process incrementals
|
|
104
|
+
for (const chunk of incrementalChunks) {
|
|
105
|
+
if (chunk.data === undefined)
|
|
92
106
|
continue;
|
|
93
107
|
chunkInfos.push({
|
|
94
108
|
key: chunk.key,
|
|
95
|
-
type:
|
|
109
|
+
type: "incremental",
|
|
96
110
|
size: chunk.data.length,
|
|
97
111
|
});
|
|
98
112
|
binaries.push(chunk.data);
|
|
99
113
|
}
|
|
114
|
+
// Store chunk infos for future reference
|
|
100
115
|
this.#chunkInfos.set(documentId, chunkInfos);
|
|
116
|
+
// If no chunks were found, return null
|
|
117
|
+
if (binaries.length === 0) {
|
|
118
|
+
return null;
|
|
119
|
+
}
|
|
101
120
|
// Merge the chunks into a single binary
|
|
102
|
-
|
|
103
|
-
|
|
121
|
+
return mergeArrays(binaries);
|
|
122
|
+
}
|
|
123
|
+
/**
|
|
124
|
+
* Loads the Automerge document with the given ID from storage.
|
|
125
|
+
*/
|
|
126
|
+
async loadDoc(documentId) {
|
|
127
|
+
// Load and combine chunks
|
|
128
|
+
const binary = await this.loadDocData(documentId);
|
|
129
|
+
if (!binary)
|
|
104
130
|
return null;
|
|
105
131
|
// Load into an Automerge document
|
|
106
132
|
const start = performance.now();
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"DocSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/DocSynchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAGnD,OAAO,EACL,SAAS,EAKV,MAAM,iBAAiB,CAAA;AACxB,OAAO,EAEL,gBAAgB,EAEhB,WAAW,EACX,cAAc,EACd,WAAW,EAEZ,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AACpC,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAGhD,KAAK,kBAAkB,GAAG,SAAS,GAAG,KAAK,GAAG,aAAa,GAAG,OAAO,CAAA;AAOrE,UAAU,qBAAqB;IAC7B,MAAM,EAAE,SAAS,CAAC,OAAO,CAAC,CAAA;IAC1B,MAAM,EAAE,MAAM,CAAA;IACd,eAAe,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK,OAAO,CAAC,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC,CAAA;CACvE;AAED;;;GAGG;AACH,qBAAa,eAAgB,SAAQ,YAAY;;IAE/C,gBAAgB,SAAM;gBAyBV,EAAE,MAAM,EAAE,MAAM,EAAE,eAAe,EAAE,EAAE,qBAAqB;
|
|
1
|
+
{"version":3,"file":"DocSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/DocSynchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAGnD,OAAO,EACL,SAAS,EAKV,MAAM,iBAAiB,CAAA;AACxB,OAAO,EAEL,gBAAgB,EAEhB,WAAW,EACX,cAAc,EACd,WAAW,EAEZ,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AACpC,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAGhD,KAAK,kBAAkB,GAAG,SAAS,GAAG,KAAK,GAAG,aAAa,GAAG,OAAO,CAAA;AAOrE,UAAU,qBAAqB;IAC7B,MAAM,EAAE,SAAS,CAAC,OAAO,CAAC,CAAA;IAC1B,MAAM,EAAE,MAAM,CAAA;IACd,eAAe,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK,OAAO,CAAC,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC,CAAA;CACvE;AAED;;;GAGG;AACH,qBAAa,eAAgB,SAAQ,YAAY;;IAE/C,gBAAgB,SAAM;gBAyBV,EAAE,MAAM,EAAE,MAAM,EAAE,eAAe,EAAE,EAAE,qBAAqB;IAyBtE,IAAI,UAAU,uCAEb;IAED,IAAI,UAAU,qCAEb;IAqID,OAAO,CAAC,MAAM,EAAE,MAAM;IAIhB,SAAS,CAAC,OAAO,EAAE,MAAM,EAAE;IA8DjC,OAAO,CAAC,MAAM,EAAE,MAAM;IAKtB,cAAc,CAAC,OAAO,EAAE,WAAW;IAkBnC,uBAAuB,CAAC,OAAO,EAAE,gBAAgB;IAuBjD,kBAAkB,CAAC,OAAO,EAAE,WAAW,GAAG,cAAc;IAwFxD,OAAO,IAAI;QAAE,KAAK,EAAE,MAAM,EAAE,CAAC;QAAC,IAAI,EAAE;YAAE,MAAM,EAAE,MAAM,CAAC;YAAC,UAAU,EAAE,MAAM,CAAA;SAAE,CAAA;KAAE;CAM7E"}
|
|
@@ -37,7 +37,6 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
37
37
|
handle.on("ephemeral-message-outbound", payload => this.#broadcastToPeers(payload));
|
|
38
38
|
// Process pending sync messages immediately after the handle becomes ready.
|
|
39
39
|
void (async () => {
|
|
40
|
-
await handle.whenReady([READY, REQUESTING]);
|
|
41
40
|
this.#processAllPendingSyncMessages();
|
|
42
41
|
})();
|
|
43
42
|
}
|
|
@@ -49,10 +48,14 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
49
48
|
}
|
|
50
49
|
/// PRIVATE
|
|
51
50
|
async #syncWithPeers() {
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
51
|
+
try {
|
|
52
|
+
await this.#handle.whenReady();
|
|
53
|
+
const doc = this.#handle.doc(); // XXX THIS ONE IS WEIRD
|
|
54
|
+
this.#peers.forEach(peerId => this.#sendSyncMessage(peerId, doc));
|
|
55
|
+
}
|
|
56
|
+
catch (e) {
|
|
57
|
+
console.log("sync with peers threw an exception");
|
|
58
|
+
}
|
|
56
59
|
}
|
|
57
60
|
async #broadcastToPeers({ data, }) {
|
|
58
61
|
this.#log(`broadcastToPeers`, this.#peers);
|
|
@@ -155,22 +158,16 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
155
158
|
return this.#peers.includes(peerId);
|
|
156
159
|
}
|
|
157
160
|
async beginSync(peerIds) {
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
161
|
+
void this.#handle
|
|
162
|
+
.whenReady([READY, REQUESTING, UNAVAILABLE])
|
|
163
|
+
.then(() => {
|
|
164
|
+
this.#syncStarted = true;
|
|
165
|
+
this.#checkDocUnavailable();
|
|
166
|
+
})
|
|
167
|
+
.catch(e => {
|
|
168
|
+
console.log("caught whenready", e);
|
|
165
169
|
this.#syncStarted = true;
|
|
166
170
|
this.#checkDocUnavailable();
|
|
167
|
-
const wasUnavailable = doc === undefined;
|
|
168
|
-
if (wasUnavailable && noPeersWithDocument) {
|
|
169
|
-
return;
|
|
170
|
-
}
|
|
171
|
-
// If the doc is unavailable we still need a blank document to generate
|
|
172
|
-
// the sync message from
|
|
173
|
-
return doc ?? A.init();
|
|
174
171
|
});
|
|
175
172
|
const peersWithDocument = this.#peers.some(peerId => {
|
|
176
173
|
return this.#peerDocumentStatuses[peerId] == "has";
|
|
@@ -186,11 +183,22 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
186
183
|
// TODO: cover that case with a test and remove this hack
|
|
187
184
|
const reparsedSyncState = A.decodeSyncState(A.encodeSyncState(syncState));
|
|
188
185
|
this.#setSyncState(peerId, reparsedSyncState);
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
186
|
+
// At this point if we don't have anything in our storage, we need to use an empty doc to sync
|
|
187
|
+
// with; but we don't want to surface that state to the front end
|
|
188
|
+
this.#handle
|
|
189
|
+
.whenReady([READY, REQUESTING, UNAVAILABLE])
|
|
190
|
+
.then(() => {
|
|
191
|
+
const doc = this.#handle.isReady()
|
|
192
|
+
? this.#handle.doc()
|
|
193
|
+
: A.init();
|
|
194
|
+
const noPeersWithDocument = peerIds.every(peerId => this.#peerDocumentStatuses[peerId] in ["unavailable", "wants"]);
|
|
195
|
+
const wasUnavailable = doc === undefined;
|
|
196
|
+
if (wasUnavailable && noPeersWithDocument) {
|
|
197
|
+
return;
|
|
193
198
|
}
|
|
199
|
+
// If the doc is unavailable we still need a blank document to generate
|
|
200
|
+
// the sync message from
|
|
201
|
+
this.#sendSyncMessage(peerId, doc ?? A.init());
|
|
194
202
|
})
|
|
195
203
|
.catch(err => {
|
|
196
204
|
this.#log(`Error loading doc for ${peerId}: ${err}`);
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@automerge/automerge-repo",
|
|
3
|
-
"version": "2.0.0-alpha.
|
|
3
|
+
"version": "2.0.0-alpha.26",
|
|
4
4
|
"description": "A repository object to manage a collection of automerge documents",
|
|
5
5
|
"repository": "https://github.com/automerge/automerge-repo/tree/master/packages/automerge-repo",
|
|
6
6
|
"author": "Peter van Hardenberg <pvh@pvh.ca>",
|
|
@@ -20,6 +20,7 @@
|
|
|
20
20
|
},
|
|
21
21
|
"devDependencies": {
|
|
22
22
|
"http-server": "^14.1.0",
|
|
23
|
+
"ts-node": "^10.9.2",
|
|
23
24
|
"vite": "^5.0.8"
|
|
24
25
|
},
|
|
25
26
|
"dependencies": {
|
|
@@ -29,8 +30,6 @@
|
|
|
29
30
|
"debug": "^4.3.4",
|
|
30
31
|
"eventemitter3": "^5.0.1",
|
|
31
32
|
"fast-sha256": "^1.3.0",
|
|
32
|
-
"tiny-typed-emitter": "^2.1.0",
|
|
33
|
-
"ts-node": "^10.9.1",
|
|
34
33
|
"uuid": "^9.0.0",
|
|
35
34
|
"xstate": "^5.9.1"
|
|
36
35
|
},
|
|
@@ -60,5 +59,5 @@
|
|
|
60
59
|
"publishConfig": {
|
|
61
60
|
"access": "public"
|
|
62
61
|
},
|
|
63
|
-
"gitHead": "
|
|
62
|
+
"gitHead": "3d3e0f6fb267080a5bd8d12315efb85b8a2ada70"
|
|
64
63
|
}
|
package/src/DocHandle.ts
CHANGED
|
@@ -45,6 +45,9 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
|
|
|
45
45
|
/** A dictionary mapping each peer to the last heads we know they have. */
|
|
46
46
|
#remoteHeads: Record<StorageId, UrlHeads> = {}
|
|
47
47
|
|
|
48
|
+
/** Cache for view handles, keyed by the stringified heads */
|
|
49
|
+
#viewCache: Map<string, DocHandle<T>> = new Map()
|
|
50
|
+
|
|
48
51
|
/** @hidden */
|
|
49
52
|
constructor(
|
|
50
53
|
public documentId: DocumentId,
|
|
@@ -275,27 +278,6 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
|
|
|
275
278
|
await withTimeout(this.#statePromise(awaitStates), this.#timeoutDelay)
|
|
276
279
|
}
|
|
277
280
|
|
|
278
|
-
/**
|
|
279
|
-
* @returns the current state of this handle's Automerge document.
|
|
280
|
-
*
|
|
281
|
-
* This is the recommended way to access a handle's document. Note that this waits for the handle
|
|
282
|
-
* to be ready if necessary. If loading (or synchronization) fails, this will never resolve.
|
|
283
|
-
*/
|
|
284
|
-
async legacyAsyncDoc(
|
|
285
|
-
/** states to wait for, such as "LOADING". mostly for internal use. */
|
|
286
|
-
awaitStates: HandleState[] = ["ready", "unavailable"]
|
|
287
|
-
) {
|
|
288
|
-
try {
|
|
289
|
-
// wait for the document to enter one of the desired states
|
|
290
|
-
await this.#statePromise(awaitStates)
|
|
291
|
-
} catch (error) {
|
|
292
|
-
// if we timed out, return undefined
|
|
293
|
-
return undefined
|
|
294
|
-
}
|
|
295
|
-
// Return the document
|
|
296
|
-
return !this.isUnavailable() ? this.#doc : undefined
|
|
297
|
-
}
|
|
298
|
-
|
|
299
281
|
/**
|
|
300
282
|
* Returns the current state of the Automerge document this handle manages.
|
|
301
283
|
*
|
|
@@ -380,6 +362,16 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
|
|
|
380
362
|
`DocHandle#${this.documentId} is not ready. Check \`handle.isReady()\` before calling view().`
|
|
381
363
|
)
|
|
382
364
|
}
|
|
365
|
+
|
|
366
|
+
// Create a cache key from the heads
|
|
367
|
+
const cacheKey = JSON.stringify(heads)
|
|
368
|
+
|
|
369
|
+
// Check if we have a cached handle for these heads
|
|
370
|
+
const cachedHandle = this.#viewCache.get(cacheKey)
|
|
371
|
+
if (cachedHandle) {
|
|
372
|
+
return cachedHandle
|
|
373
|
+
}
|
|
374
|
+
|
|
383
375
|
// Create a new handle with the same documentId but fixed heads
|
|
384
376
|
const handle = new DocHandle<T>(this.documentId, {
|
|
385
377
|
heads,
|
|
@@ -388,6 +380,9 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
|
|
|
388
380
|
handle.update(() => A.clone(this.#doc))
|
|
389
381
|
handle.doneLoading()
|
|
390
382
|
|
|
383
|
+
// Store in cache
|
|
384
|
+
this.#viewCache.set(cacheKey, handle)
|
|
385
|
+
|
|
391
386
|
return handle
|
|
392
387
|
}
|
|
393
388
|
|
|
@@ -484,7 +479,7 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
|
|
|
484
479
|
}
|
|
485
480
|
|
|
486
481
|
/**
|
|
487
|
-
* Called by the repo
|
|
482
|
+
* Called by the repo when a doc handle changes or we receive new remote heads.
|
|
488
483
|
* @hidden
|
|
489
484
|
*/
|
|
490
485
|
setRemoteHeads(storageId: StorageId, heads: UrlHeads) {
|
|
@@ -596,14 +591,15 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
|
|
|
596
591
|
}
|
|
597
592
|
|
|
598
593
|
/**
|
|
599
|
-
*
|
|
594
|
+
* Updates the internal state machine to mark the document unavailable.
|
|
600
595
|
* @hidden
|
|
601
596
|
*/
|
|
602
597
|
unavailable() {
|
|
603
598
|
this.#machine.send({ type: DOC_UNAVAILABLE })
|
|
604
599
|
}
|
|
605
600
|
|
|
606
|
-
/**
|
|
601
|
+
/**
|
|
602
|
+
* Called by the repo either when the document is not found in storage.
|
|
607
603
|
* @hidden
|
|
608
604
|
* */
|
|
609
605
|
request() {
|
|
@@ -635,7 +631,7 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
|
|
|
635
631
|
broadcast(message: unknown) {
|
|
636
632
|
this.emit("ephemeral-message-outbound", {
|
|
637
633
|
handle: this,
|
|
638
|
-
data: encode(message),
|
|
634
|
+
data: new Uint8Array(encode(message)),
|
|
639
635
|
})
|
|
640
636
|
}
|
|
641
637
|
|
|
@@ -6,7 +6,6 @@ import { type DocumentId } from "../types.js"
|
|
|
6
6
|
import { StorageAdapterInterface } from "./StorageAdapterInterface.js"
|
|
7
7
|
import { ChunkInfo, StorageKey, StorageId } from "./types.js"
|
|
8
8
|
import { keyHash, headsHash } from "./keyHash.js"
|
|
9
|
-
import { chunkTypeFromKey } from "./chunkTypeFromKey.js"
|
|
10
9
|
import * as Uuid from "uuid"
|
|
11
10
|
import { EventEmitter } from "eventemitter3"
|
|
12
11
|
import { encodeHeads } from "../AutomergeUrl.js"
|
|
@@ -113,33 +112,63 @@ export class StorageSubsystem extends EventEmitter<StorageSubsystemEvents> {
|
|
|
113
112
|
// AUTOMERGE DOCUMENT STORAGE
|
|
114
113
|
|
|
115
114
|
/**
|
|
116
|
-
* Loads
|
|
115
|
+
* Loads and combines document chunks from storage, with snapshots first.
|
|
117
116
|
*/
|
|
118
|
-
async
|
|
119
|
-
// Load
|
|
120
|
-
const
|
|
121
|
-
|
|
117
|
+
async loadDocData(documentId: DocumentId): Promise<Uint8Array | null> {
|
|
118
|
+
// Load snapshots first
|
|
119
|
+
const snapshotChunks = await this.#storageAdapter.loadRange([
|
|
120
|
+
documentId,
|
|
121
|
+
"snapshot",
|
|
122
|
+
])
|
|
123
|
+
const incrementalChunks = await this.#storageAdapter.loadRange([
|
|
124
|
+
documentId,
|
|
125
|
+
"incremental",
|
|
126
|
+
])
|
|
127
|
+
|
|
128
|
+
const binaries: Uint8Array[] = []
|
|
122
129
|
const chunkInfos: ChunkInfo[] = []
|
|
123
130
|
|
|
124
|
-
|
|
125
|
-
|
|
131
|
+
// Process snapshots first
|
|
132
|
+
for (const chunk of snapshotChunks) {
|
|
126
133
|
if (chunk.data === undefined) continue
|
|
134
|
+
chunkInfos.push({
|
|
135
|
+
key: chunk.key,
|
|
136
|
+
type: "snapshot",
|
|
137
|
+
size: chunk.data.length,
|
|
138
|
+
})
|
|
139
|
+
binaries.push(chunk.data)
|
|
140
|
+
}
|
|
127
141
|
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
142
|
+
// Then process incrementals
|
|
143
|
+
for (const chunk of incrementalChunks) {
|
|
144
|
+
if (chunk.data === undefined) continue
|
|
131
145
|
chunkInfos.push({
|
|
132
146
|
key: chunk.key,
|
|
133
|
-
type:
|
|
147
|
+
type: "incremental",
|
|
134
148
|
size: chunk.data.length,
|
|
135
149
|
})
|
|
136
150
|
binaries.push(chunk.data)
|
|
137
151
|
}
|
|
152
|
+
|
|
153
|
+
// Store chunk infos for future reference
|
|
138
154
|
this.#chunkInfos.set(documentId, chunkInfos)
|
|
139
155
|
|
|
156
|
+
// If no chunks were found, return null
|
|
157
|
+
if (binaries.length === 0) {
|
|
158
|
+
return null
|
|
159
|
+
}
|
|
160
|
+
|
|
140
161
|
// Merge the chunks into a single binary
|
|
141
|
-
|
|
142
|
-
|
|
162
|
+
return mergeArrays(binaries)
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
/**
|
|
166
|
+
* Loads the Automerge document with the given ID from storage.
|
|
167
|
+
*/
|
|
168
|
+
async loadDoc<T>(documentId: DocumentId): Promise<A.Doc<T> | null> {
|
|
169
|
+
// Load and combine chunks
|
|
170
|
+
const binary = await this.loadDocData(documentId)
|
|
171
|
+
if (!binary) return null
|
|
143
172
|
|
|
144
173
|
// Load into an Automerge document
|
|
145
174
|
const start = performance.now()
|
|
@@ -169,6 +198,7 @@ export class StorageSubsystem extends EventEmitter<StorageSubsystemEvents> {
|
|
|
169
198
|
if (!this.#shouldSave(documentId, doc)) return
|
|
170
199
|
|
|
171
200
|
const sourceChunks = this.#chunkInfos.get(documentId) ?? []
|
|
201
|
+
|
|
172
202
|
if (this.#shouldCompact(sourceChunks)) {
|
|
173
203
|
await this.#saveTotal(documentId, doc, sourceChunks)
|
|
174
204
|
} else {
|
|
@@ -86,7 +86,6 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
86
86
|
|
|
87
87
|
// Process pending sync messages immediately after the handle becomes ready.
|
|
88
88
|
void (async () => {
|
|
89
|
-
await handle.whenReady([READY, REQUESTING])
|
|
90
89
|
this.#processAllPendingSyncMessages()
|
|
91
90
|
})()
|
|
92
91
|
}
|
|
@@ -102,9 +101,13 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
102
101
|
/// PRIVATE
|
|
103
102
|
|
|
104
103
|
async #syncWithPeers() {
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
104
|
+
try {
|
|
105
|
+
await this.#handle.whenReady()
|
|
106
|
+
const doc = this.#handle.doc() // XXX THIS ONE IS WEIRD
|
|
107
|
+
this.#peers.forEach(peerId => this.#sendSyncMessage(peerId, doc))
|
|
108
|
+
} catch (e) {
|
|
109
|
+
console.log("sync with peers threw an exception")
|
|
110
|
+
}
|
|
108
111
|
}
|
|
109
112
|
|
|
110
113
|
async #broadcastToPeers({
|
|
@@ -231,27 +234,16 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
231
234
|
}
|
|
232
235
|
|
|
233
236
|
async beginSync(peerIds: PeerId[]) {
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
.then(doc => {
|
|
243
|
-
// we register out peers first, then say that sync has started
|
|
237
|
+
void this.#handle
|
|
238
|
+
.whenReady([READY, REQUESTING, UNAVAILABLE])
|
|
239
|
+
.then(() => {
|
|
240
|
+
this.#syncStarted = true
|
|
241
|
+
this.#checkDocUnavailable()
|
|
242
|
+
})
|
|
243
|
+
.catch(e => {
|
|
244
|
+
console.log("caught whenready", e)
|
|
244
245
|
this.#syncStarted = true
|
|
245
246
|
this.#checkDocUnavailable()
|
|
246
|
-
|
|
247
|
-
const wasUnavailable = doc === undefined
|
|
248
|
-
if (wasUnavailable && noPeersWithDocument) {
|
|
249
|
-
return
|
|
250
|
-
}
|
|
251
|
-
|
|
252
|
-
// If the doc is unavailable we still need a blank document to generate
|
|
253
|
-
// the sync message from
|
|
254
|
-
return doc ?? A.init<unknown>()
|
|
255
247
|
})
|
|
256
248
|
|
|
257
249
|
const peersWithDocument = this.#peers.some(peerId => {
|
|
@@ -273,11 +265,28 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
273
265
|
)
|
|
274
266
|
this.#setSyncState(peerId, reparsedSyncState)
|
|
275
267
|
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
268
|
+
// At this point if we don't have anything in our storage, we need to use an empty doc to sync
|
|
269
|
+
// with; but we don't want to surface that state to the front end
|
|
270
|
+
this.#handle
|
|
271
|
+
.whenReady([READY, REQUESTING, UNAVAILABLE])
|
|
272
|
+
.then(() => {
|
|
273
|
+
const doc = this.#handle.isReady()
|
|
274
|
+
? this.#handle.doc()
|
|
275
|
+
: A.init<unknown>()
|
|
276
|
+
|
|
277
|
+
const noPeersWithDocument = peerIds.every(
|
|
278
|
+
peerId =>
|
|
279
|
+
this.#peerDocumentStatuses[peerId] in ["unavailable", "wants"]
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
const wasUnavailable = doc === undefined
|
|
283
|
+
if (wasUnavailable && noPeersWithDocument) {
|
|
284
|
+
return
|
|
280
285
|
}
|
|
286
|
+
|
|
287
|
+
// If the doc is unavailable we still need a blank document to generate
|
|
288
|
+
// the sync message from
|
|
289
|
+
this.#sendSyncMessage(peerId, doc ?? A.init<unknown>())
|
|
281
290
|
})
|
|
282
291
|
.catch(err => {
|
|
283
292
|
this.#log(`Error loading doc for ${peerId}: ${err}`)
|
package/test/DocHandle.test.ts
CHANGED
|
@@ -259,8 +259,6 @@ describe("DocHandle", () => {
|
|
|
259
259
|
const handle = new DocHandle<TestDoc>(TEST_ID)
|
|
260
260
|
assert.equal(handle.isReady(), false)
|
|
261
261
|
|
|
262
|
-
handle.legacyAsyncDoc()
|
|
263
|
-
|
|
264
262
|
assert(vi.getTimerCount() > timerCount)
|
|
265
263
|
|
|
266
264
|
// simulate loading from storage
|
|
@@ -522,4 +520,71 @@ describe("DocHandle", () => {
|
|
|
522
520
|
assert.deepStrictEqual(decode(data), message)
|
|
523
521
|
})
|
|
524
522
|
})
|
|
523
|
+
|
|
524
|
+
it("should cache view handles based on heads", async () => {
|
|
525
|
+
// Create and setup a document with some data
|
|
526
|
+
const handle = setup()
|
|
527
|
+
handle.change(doc => {
|
|
528
|
+
doc.foo = "Hello"
|
|
529
|
+
})
|
|
530
|
+
const heads1 = handle.heads()
|
|
531
|
+
|
|
532
|
+
// Make another change to get a different set of heads
|
|
533
|
+
handle.change(doc => {
|
|
534
|
+
doc.foo = "Hello, World!"
|
|
535
|
+
})
|
|
536
|
+
|
|
537
|
+
// Create a view at the first set of heads
|
|
538
|
+
const view1 = handle.view(heads1)
|
|
539
|
+
|
|
540
|
+
// Request the same view again
|
|
541
|
+
const view2 = handle.view(heads1)
|
|
542
|
+
|
|
543
|
+
// Verify we got the same handle instance back (cached version)
|
|
544
|
+
expect(view1).toBe(view2)
|
|
545
|
+
|
|
546
|
+
// Verify the contents are correct
|
|
547
|
+
expect(view1.doc().foo).toBe("Hello")
|
|
548
|
+
|
|
549
|
+
// Test with a different set of heads
|
|
550
|
+
const view3 = handle.view(handle.heads())
|
|
551
|
+
expect(view3).not.toBe(view1)
|
|
552
|
+
expect(view3.doc().foo).toBe("Hello, World!")
|
|
553
|
+
})
|
|
554
|
+
|
|
555
|
+
it("should improve performance when requesting the same view multiple times", () => {
|
|
556
|
+
// Create and setup a document with some data
|
|
557
|
+
const handle = setup()
|
|
558
|
+
handle.change(doc => {
|
|
559
|
+
doc.foo = "Hello"
|
|
560
|
+
})
|
|
561
|
+
const heads = handle.heads()
|
|
562
|
+
|
|
563
|
+
// First, measure time without cache (first access)
|
|
564
|
+
const startTimeNoCached = performance.now()
|
|
565
|
+
const firstView = handle.view(heads)
|
|
566
|
+
const endTimeNoCached = performance.now()
|
|
567
|
+
|
|
568
|
+
// Now measure with cache (subsequent accesses)
|
|
569
|
+
const startTimeCached = performance.now()
|
|
570
|
+
for (let i = 0; i < 100; i++) {
|
|
571
|
+
handle.view(heads)
|
|
572
|
+
}
|
|
573
|
+
const endTimeCached = performance.now()
|
|
574
|
+
|
|
575
|
+
// Assert that all views are the same instance
|
|
576
|
+
for (let i = 0; i < 10; i++) {
|
|
577
|
+
expect(handle.view(heads)).toBe(firstView)
|
|
578
|
+
}
|
|
579
|
+
|
|
580
|
+
// Calculate average times
|
|
581
|
+
const timeForFirstAccess = endTimeNoCached - startTimeNoCached
|
|
582
|
+
const timeForCachedAccesses = (endTimeCached - startTimeCached) / 100
|
|
583
|
+
|
|
584
|
+
console.log(`Time for first view (no cache): ${timeForFirstAccess}ms`)
|
|
585
|
+
console.log(`Average time per cached view: ${timeForCachedAccesses}ms`)
|
|
586
|
+
|
|
587
|
+
// Cached access should be significantly faster
|
|
588
|
+
expect(timeForCachedAccesses).toBeLessThan(timeForFirstAccess / 10)
|
|
589
|
+
})
|
|
525
590
|
})
|
package/test/Repo.test.ts
CHANGED
|
@@ -33,6 +33,7 @@ import {
|
|
|
33
33
|
import { getRandomItem } from "./helpers/getRandomItem.js"
|
|
34
34
|
import { TestDoc } from "./types.js"
|
|
35
35
|
import { StorageId, StorageKey } from "../src/storage/types.js"
|
|
36
|
+
import { chunkTypeFromKey } from "../src/storage/chunkTypeFromKey.js"
|
|
36
37
|
|
|
37
38
|
describe("Repo", () => {
|
|
38
39
|
describe("constructor", () => {
|
|
@@ -255,7 +256,6 @@ describe("Repo", () => {
|
|
|
255
256
|
await repo.flush()
|
|
256
257
|
|
|
257
258
|
const bobHandle = await repo2.find<TestDoc>(handle.url)
|
|
258
|
-
await bobHandle.whenReady()
|
|
259
259
|
assert.deepEqual(bobHandle.doc(), { foo: "saved" })
|
|
260
260
|
})
|
|
261
261
|
|
|
@@ -306,7 +306,6 @@ describe("Repo", () => {
|
|
|
306
306
|
d.foo = "bar"
|
|
307
307
|
})
|
|
308
308
|
assert.equal(handle.isReady(), true)
|
|
309
|
-
await handle.whenReady()
|
|
310
309
|
|
|
311
310
|
await pause()
|
|
312
311
|
repo.delete(handle.url)
|
|
@@ -1063,11 +1062,6 @@ describe("Repo", () => {
|
|
|
1063
1062
|
: // tails, pick a random doc
|
|
1064
1063
|
(getRandomItem(docs) as DocHandle<TestDoc>)
|
|
1065
1064
|
|
|
1066
|
-
// make sure the doc is ready
|
|
1067
|
-
if (!doc.isReady()) {
|
|
1068
|
-
await doc.whenReady()
|
|
1069
|
-
}
|
|
1070
|
-
|
|
1071
1065
|
// make a random change to it
|
|
1072
1066
|
doc.change(d => {
|
|
1073
1067
|
d.foo = Math.random().toString()
|
|
@@ -1240,7 +1234,6 @@ describe("Repo", () => {
|
|
|
1240
1234
|
})
|
|
1241
1235
|
|
|
1242
1236
|
const charlieHandle = await charlieRepo.find<TestDoc>(handle.url)
|
|
1243
|
-
await charlieHandle.whenReady()
|
|
1244
1237
|
|
|
1245
1238
|
// make a change on charlie
|
|
1246
1239
|
charlieHandle.change(d => {
|
|
@@ -4,13 +4,15 @@ import assert from "assert"
|
|
|
4
4
|
import fs from "fs"
|
|
5
5
|
import os from "os"
|
|
6
6
|
import path from "path"
|
|
7
|
-
import { describe, it } from "vitest"
|
|
7
|
+
import { describe, it, expect } from "vitest"
|
|
8
8
|
import { generateAutomergeUrl, parseAutomergeUrl } from "../src/AutomergeUrl.js"
|
|
9
9
|
import { PeerId, cbor } from "../src/index.js"
|
|
10
10
|
import { StorageSubsystem } from "../src/storage/StorageSubsystem.js"
|
|
11
11
|
import { StorageId } from "../src/storage/types.js"
|
|
12
12
|
import { DummyStorageAdapter } from "../src/helpers/DummyStorageAdapter.js"
|
|
13
13
|
import * as Uuid from "uuid"
|
|
14
|
+
import { chunkTypeFromKey } from "../src/storage/chunkTypeFromKey.js"
|
|
15
|
+
import { DocumentId } from "../src/types.js"
|
|
14
16
|
|
|
15
17
|
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "automerge-repo-tests"))
|
|
16
18
|
|
|
@@ -243,6 +245,83 @@ describe("StorageSubsystem", () => {
|
|
|
243
245
|
assert.strictEqual(id1, id2)
|
|
244
246
|
})
|
|
245
247
|
})
|
|
248
|
+
|
|
249
|
+
describe("loadDoc", () => {
|
|
250
|
+
it("maintains correct document state when loading chunks in order", async () => {
|
|
251
|
+
const storageAdapter = new DummyStorageAdapter()
|
|
252
|
+
const storage = new StorageSubsystem(storageAdapter)
|
|
253
|
+
|
|
254
|
+
// Create a document with multiple changes
|
|
255
|
+
const doc = A.init<{ foo: string }>()
|
|
256
|
+
const doc1 = A.change(doc, d => {
|
|
257
|
+
d.foo = "first"
|
|
258
|
+
})
|
|
259
|
+
const doc2 = A.change(doc1, d => {
|
|
260
|
+
d.foo = "second"
|
|
261
|
+
})
|
|
262
|
+
const doc3 = A.change(doc2, d => {
|
|
263
|
+
d.foo = "third"
|
|
264
|
+
})
|
|
265
|
+
|
|
266
|
+
// Save the document with multiple changes
|
|
267
|
+
const documentId = "test-doc" as DocumentId
|
|
268
|
+
await storage.saveDoc(documentId, doc3)
|
|
269
|
+
|
|
270
|
+
// Load the document
|
|
271
|
+
const loadedDoc = await storage.loadDoc<{ foo: string }>(documentId)
|
|
272
|
+
|
|
273
|
+
// Verify the document state is correct
|
|
274
|
+
expect(loadedDoc?.foo).toBe("third")
|
|
275
|
+
})
|
|
276
|
+
|
|
277
|
+
it("combines chunks with snapshot first", async () => {
|
|
278
|
+
const storageAdapter = new DummyStorageAdapter()
|
|
279
|
+
const storage = new StorageSubsystem(storageAdapter)
|
|
280
|
+
|
|
281
|
+
// Create a document with multiple changes
|
|
282
|
+
const doc = A.init<{ foo: string }>()
|
|
283
|
+
const doc1 = A.change(doc, d => {
|
|
284
|
+
d.foo = "first"
|
|
285
|
+
})
|
|
286
|
+
const doc2 = A.change(doc1, d => {
|
|
287
|
+
d.foo = Array(10000)
|
|
288
|
+
.fill(0)
|
|
289
|
+
.map(() =>
|
|
290
|
+
String.fromCharCode(Math.floor(Math.random() * 26) + 97)
|
|
291
|
+
)
|
|
292
|
+
.join("")
|
|
293
|
+
})
|
|
294
|
+
|
|
295
|
+
// Save the document with multiple changes
|
|
296
|
+
const documentId = "test-doc" as DocumentId
|
|
297
|
+
await storage.saveDoc(documentId, doc2)
|
|
298
|
+
|
|
299
|
+
const doc3 = A.change(doc2, d => {
|
|
300
|
+
d.foo = "third"
|
|
301
|
+
})
|
|
302
|
+
await storage.saveDoc(documentId, doc3)
|
|
303
|
+
|
|
304
|
+
// Load the document
|
|
305
|
+
const loadedDoc = await storage.loadDoc<{ foo: string }>(documentId)
|
|
306
|
+
|
|
307
|
+
// Verify the document state is correct
|
|
308
|
+
expect(loadedDoc?.foo).toBe(doc3.foo)
|
|
309
|
+
|
|
310
|
+
// Get the raw binary data from storage
|
|
311
|
+
const binary = await storage.loadDocData(documentId)
|
|
312
|
+
expect(binary).not.toBeNull()
|
|
313
|
+
if (!binary) return
|
|
314
|
+
|
|
315
|
+
// Verify the binary starts with the Automerge magic value
|
|
316
|
+
expect(binary[0]).toBe(0x85)
|
|
317
|
+
expect(binary[1]).toBe(0x6f)
|
|
318
|
+
expect(binary[2]).toBe(0x4a)
|
|
319
|
+
expect(binary[3]).toBe(0x83)
|
|
320
|
+
|
|
321
|
+
// Verify the chunk type is CHUNK_TYPE_DOCUMENT (0x00)
|
|
322
|
+
expect(binary[8]).toBe(0x00)
|
|
323
|
+
})
|
|
324
|
+
})
|
|
246
325
|
})
|
|
247
326
|
}
|
|
248
327
|
})
|