@automerge/automerge-repo 2.0.0-alpha.23 → 2.0.0-alpha.26
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/DocHandle.d.ts +4 -3
- package/dist/DocHandle.d.ts.map +1 -1
- package/dist/DocHandle.js +15 -3
- package/dist/storage/StorageSubsystem.d.ts +4 -0
- package/dist/storage/StorageSubsystem.d.ts.map +1 -1
- package/dist/storage/StorageSubsystem.js +38 -12
- package/package.json +2 -2
- package/src/DocHandle.ts +20 -3
- package/src/storage/StorageSubsystem.ts +44 -14
- package/test/DocHandle.test.ts +67 -0
- package/test/Repo.test.ts +1 -0
- package/test/StorageSubsystem.test.ts +80 -1
package/dist/DocHandle.d.ts
CHANGED
|
@@ -150,7 +150,7 @@ export declare class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
|
|
|
150
150
|
*/
|
|
151
151
|
doneLoading(): void;
|
|
152
152
|
/**
|
|
153
|
-
* Called by the repo
|
|
153
|
+
* Called by the repo when a doc handle changes or we receive new remote heads.
|
|
154
154
|
* @hidden
|
|
155
155
|
*/
|
|
156
156
|
setRemoteHeads(storageId: StorageId, heads: UrlHeads): void;
|
|
@@ -190,11 +190,12 @@ export declare class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
|
|
|
190
190
|
/** the handle of the document to merge into this one */
|
|
191
191
|
otherHandle: DocHandle<T>): void;
|
|
192
192
|
/**
|
|
193
|
-
*
|
|
193
|
+
* Updates the internal state machine to mark the document unavailable.
|
|
194
194
|
* @hidden
|
|
195
195
|
*/
|
|
196
196
|
unavailable(): void;
|
|
197
|
-
/**
|
|
197
|
+
/**
|
|
198
|
+
* Called by the repo either when the document is not found in storage.
|
|
198
199
|
* @hidden
|
|
199
200
|
* */
|
|
200
201
|
request(): void;
|
package/dist/DocHandle.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"DocHandle.d.ts","sourceRoot":"","sources":["../src/DocHandle.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAEnD,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAU5C,OAAO,KAAK,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAA;AAC5E,OAAO,EAAE,SAAS,EAAE,MAAM,oBAAoB,CAAA;AAE9C;;;;;;;;;;;;GAYG;AACH,qBAAa,SAAS,CAAC,CAAC,CAAE,SAAQ,YAAY,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;;
|
|
1
|
+
{"version":3,"file":"DocHandle.d.ts","sourceRoot":"","sources":["../src/DocHandle.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAEnD,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAU5C,OAAO,KAAK,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAA;AAC5E,OAAO,EAAE,SAAS,EAAE,MAAM,oBAAoB,CAAA;AAE9C;;;;;;;;;;;;GAYG;AACH,qBAAa,SAAS,CAAC,CAAC,CAAE,SAAQ,YAAY,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;;IAwBvD,UAAU,EAAE,UAAU;IAF/B,cAAc;gBAEL,UAAU,EAAE,UAAU,EAC7B,OAAO,GAAE,gBAAgB,CAAC,CAAC,CAAM;IAqKnC;OACG;IACH,IAAI,GAAG,IAAI,YAAY,CAKtB;IAED;;;;;OAKG;IACH,OAAO,gBAAgC;IAEvC;;;;;OAKG;IACH,UAAU,gBAAmC;IAE7C;;;;;OAKG;IACH,SAAS,gBAAkC;IAE3C;;;;OAIG;IACH,aAAa,gBAAsC;IAEnD;;OAEG;IACH,OAAO,WAAY,WAAW,EAAE,aAC0B;IAE1D,cAAc;IACd,IAAI,KAAK,yFAER;IAED;;;;;;OAMG;IACG,SAAS,CAAC,WAAW,GAAE,WAAW,EAAc;IAItD;;;;;;OAMG;IACH,GAAG;IAQH;;qBAEiB;IACjB,OAAO;IAOP;;;;OAIG;IACH,KAAK,IAAI,QAAQ;IAQjB,KAAK;IAIL;;;;;;;;;;;OAWG;IACH,OAAO,IAAI,QAAQ,EAAE,GAAG,SAAS;IAWjC;;;;;;;;;;;;OAYG;IACH,IAAI,CAAC,KAAK,EAAE,QAAQ,GAAG,SAAS,CAAC,CAAC,CAAC;IA8BnC;;;;;;;;;;;;OAYG;IACH,IAAI,CAAC,KAAK,EAAE,QAAQ,GAAG,SAAS,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,EAAE,QAAQ,GAAG,CAAC,CAAC,KAAK,EAAE;IAkClE;;;;;;;;;;OAUG;IACH,QAAQ,CAAC,MAAM,CAAC,EAAE,MAAM,GAAG,CAAC,CAAC,aAAa,GAAG,SAAS;IAetD;;;;;OAKG;IACH,MAAM,CAAC,QAAQ,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;IAI5C;;;;OAIG;IACH,WAAW;IAIX;;;OAGG;IACH,cAAc,CAAC,SAAS,EAAE,SAAS,EAAE,KAAK,EAAE,QAAQ;IAKpD,0CAA0C;IAC1C,cAAc,CAAC,SAAS,EAAE,SAAS,GAAG,QAAQ,GAAG,SAAS;IAI1D;;;;;;;;;;;;;;OAcG;IACH,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,OAAO,GAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAM;IAkBhE;;;;OAIG;IACH,QAAQ,CACN,KAAK,EAAE,QAAQ,EACf,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EACvB,OAAO,GAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAM,GAC/B,QAAQ,EAAE,GAAG,SAAS;IA6BzB;;;;;;;OAOG;IACH,KAAK;IACH,wDAAwD;IACxD,WAAW,EAAE,SAAS,CAAC,CAAC,CAAC;IAiB3B;;;OAGG;IACH,WAAW;IAIX;;;SAGK;IACL,OAAO;IAIP,8DAA8D;IAC9D,MAAM;IAIN,sDAAsD;IACtD,MAAM;IAIN,uDAAuD;IACvD,MAAM;IAIN;;;;;;OAMG;IACH,SAAS,CAAC,OAAO,EAAE,OAAO;IAO1B,OAAO,IAAI;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,MAAM,CAAA;KAAE;CAGlD;AAID,cAAc;AACd,MAAM,MAAM,gBAAgB,CAAC,CAAC,IAE1B;IACE,gGAAgG;IAChG,KAAK,EAAE,IAAI,CAAA;IAEX,yCAAyC;IACzC,YAAY,CAAC,EAAE,CAAC,CAAA;CACjB,GAED;IACE,KAAK,CAAC,EAAE,KAAK,CAAA;IAGb,KAAK,CAAC,EAAE,QAAQ,CAAA;IAEhB,+HAA+H;IAC/H,YAAY,CAAC,EAAE,MAAM,CAAA;CACtB,CAAA;AAIL,2EAA2E;AAC3E,MAAM,WAAW,eAAe,CAAC,CAAC;IAChC,eAAe,EAAE,CAAC,OAAO,EAAE,6BAA6B,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpE,MAAM,EAAE,CAAC,OAAO,EAAE,sBAAsB,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpD,MAAM,EAAE,CAAC,OAAO,EAAE,sBAAsB,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpD,mBAAmB,EAAE,CAAC,OAAO,EAAE,gCAAgC,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IAC3E,4BAA4B,EAAE,CAC5B,OAAO,EAAE,wCAAwC,CAAC,CAAC,CAAC,KACjD,IAAI,CAAA;IACT,cAAc,EAAE,CAAC,OAAO,EAAE,2BAA2B,KAAK,IAAI,CAAA;CAC/D;AAED,sDAAsD;AACtD,MAAM,WAAW,6BAA6B,CAAC,CAAC;IAC9C,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;CACd;AAED,6CAA6C;AAC7C,MAAM,WAAW,sBAAsB,CAAC,CAAC;IACvC,8BAA8B;IAC9B,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,iDAAiD;IACjD,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;IACb,wDAAwD;IACxD,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAA;IAClB,mCAAmC;IACnC,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;CAC1B;AAED,4CAA4C;AAC5C,MAAM,WAAW,sBAAsB,CAAC,CAAC;IACvC,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;CACrB;AAED,6DAA6D;AAC7D,MAAM,WAAW,2BAA2B,CAAC,CAAC;IAC5C,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;CACrB;AAED,qEAAqE;AACrE,MAAM,WAAW,gCAAgC,CAAC,CAAC;IACjD,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;CACjB;AAED,kEAAkE;AAClE,MAAM,WAAW,wCAAwC,CAAC,CAAC;IACzD,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,IAAI,EAAE,UAAU,CAAA;CACjB;AAED,8DAA8D;AAC9D,MAAM,WAAW,2BAA2B;IAC1C,SAAS,EAAE,SAAS,CAAA;IACpB,KAAK,EAAE,QAAQ,CAAA;CAChB;AAMD;;GAEG;AACH,eAAO,MAAM,WAAW;IACtB,kEAAkE;;IAElE,mDAAmD;;IAEnD,6EAA6E;;IAE7E,gCAAgC;;IAEhC,2EAA2E;;IAE3E,kDAAkD;;IAElD,4EAA4E;;CAEpE,CAAA;AACV,MAAM,MAAM,WAAW,GAAG,CAAC,OAAO,WAAW,CAAC,CAAC,MAAM,OAAO,WAAW,CAAC,CAAA;AAExE,eAAO,MACL,IAAI,UACJ,OAAO,aACP,UAAU,gBACV,KAAK,WACL,QAAQ,cACR,OAAO,aACP,WAAW,eACE,CAAA"}
|
package/dist/DocHandle.js
CHANGED
|
@@ -33,6 +33,8 @@ export class DocHandle extends EventEmitter {
|
|
|
33
33
|
#timeoutDelay = 60_000;
|
|
34
34
|
/** A dictionary mapping each peer to the last heads we know they have. */
|
|
35
35
|
#remoteHeads = {};
|
|
36
|
+
/** Cache for view handles, keyed by the stringified heads */
|
|
37
|
+
#viewCache = new Map();
|
|
36
38
|
/** @hidden */
|
|
37
39
|
constructor(documentId, options = {}) {
|
|
38
40
|
super();
|
|
@@ -302,6 +304,13 @@ export class DocHandle extends EventEmitter {
|
|
|
302
304
|
if (!this.isReady()) {
|
|
303
305
|
throw new Error(`DocHandle#${this.documentId} is not ready. Check \`handle.isReady()\` before calling view().`);
|
|
304
306
|
}
|
|
307
|
+
// Create a cache key from the heads
|
|
308
|
+
const cacheKey = JSON.stringify(heads);
|
|
309
|
+
// Check if we have a cached handle for these heads
|
|
310
|
+
const cachedHandle = this.#viewCache.get(cacheKey);
|
|
311
|
+
if (cachedHandle) {
|
|
312
|
+
return cachedHandle;
|
|
313
|
+
}
|
|
305
314
|
// Create a new handle with the same documentId but fixed heads
|
|
306
315
|
const handle = new DocHandle(this.documentId, {
|
|
307
316
|
heads,
|
|
@@ -309,6 +318,8 @@ export class DocHandle extends EventEmitter {
|
|
|
309
318
|
});
|
|
310
319
|
handle.update(() => A.clone(this.#doc));
|
|
311
320
|
handle.doneLoading();
|
|
321
|
+
// Store in cache
|
|
322
|
+
this.#viewCache.set(cacheKey, handle);
|
|
312
323
|
return handle;
|
|
313
324
|
}
|
|
314
325
|
/**
|
|
@@ -389,7 +400,7 @@ export class DocHandle extends EventEmitter {
|
|
|
389
400
|
this.#machine.send({ type: DOC_READY });
|
|
390
401
|
}
|
|
391
402
|
/**
|
|
392
|
-
* Called by the repo
|
|
403
|
+
* Called by the repo when a doc handle changes or we receive new remote heads.
|
|
393
404
|
* @hidden
|
|
394
405
|
*/
|
|
395
406
|
setRemoteHeads(storageId, heads) {
|
|
@@ -478,13 +489,14 @@ export class DocHandle extends EventEmitter {
|
|
|
478
489
|
});
|
|
479
490
|
}
|
|
480
491
|
/**
|
|
481
|
-
*
|
|
492
|
+
* Updates the internal state machine to mark the document unavailable.
|
|
482
493
|
* @hidden
|
|
483
494
|
*/
|
|
484
495
|
unavailable() {
|
|
485
496
|
this.#machine.send({ type: DOC_UNAVAILABLE });
|
|
486
497
|
}
|
|
487
|
-
/**
|
|
498
|
+
/**
|
|
499
|
+
* Called by the repo either when the document is not found in storage.
|
|
488
500
|
* @hidden
|
|
489
501
|
* */
|
|
490
502
|
request() {
|
|
@@ -39,6 +39,10 @@ export declare class StorageSubsystem extends EventEmitter<StorageSubsystemEvent
|
|
|
39
39
|
namespace: string,
|
|
40
40
|
/** Key to remove. Typically a UUID or other unique identifier, but could be any string. */
|
|
41
41
|
key: string): Promise<void>;
|
|
42
|
+
/**
|
|
43
|
+
* Loads and combines document chunks from storage, with snapshots first.
|
|
44
|
+
*/
|
|
45
|
+
loadDocData(documentId: DocumentId): Promise<Uint8Array | null>;
|
|
42
46
|
/**
|
|
43
47
|
* Loads the Automerge document with the given ID from storage.
|
|
44
48
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAInD,OAAO,EAAE,KAAK,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,OAAO,EAAE,uBAAuB,EAAE,MAAM,8BAA8B,CAAA;AACtE,OAAO,EAAyB,SAAS,EAAE,MAAM,YAAY,CAAA;
|
|
1
|
+
{"version":3,"file":"StorageSubsystem.d.ts","sourceRoot":"","sources":["../../src/storage/StorageSubsystem.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,gCAAgC,CAAA;AAInD,OAAO,EAAE,KAAK,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,OAAO,EAAE,uBAAuB,EAAE,MAAM,8BAA8B,CAAA;AACtE,OAAO,EAAyB,SAAS,EAAE,MAAM,YAAY,CAAA;AAG7D,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAG5C,KAAK,sBAAsB,GAAG;IAC5B,iBAAiB,EAAE,CAAC,GAAG,EAAE;QACvB,UAAU,EAAE,UAAU,CAAA;QACtB,cAAc,EAAE,MAAM,CAAA;QACtB,MAAM,EAAE,MAAM,CAAA;QACd,UAAU,EAAE,MAAM,CAAA;KACnB,KAAK,IAAI,CAAA;CACX,CAAA;AAED;;;GAGG;AACH,qBAAa,gBAAiB,SAAQ,YAAY,CAAC,sBAAsB,CAAC;;gBAe5D,cAAc,EAAE,uBAAuB;IAK7C,EAAE,IAAI,OAAO,CAAC,SAAS,CAAC;IA2B9B,kCAAkC;IAC5B,IAAI;IACR,iFAAiF;IACjF,SAAS,EAAE,MAAM;IAEjB,yFAAyF;IACzF,GAAG,EAAE,MAAM,GACV,OAAO,CAAC,UAAU,GAAG,SAAS,CAAC;IAKlC,gCAAgC;IAC1B,IAAI;IACR,iFAAiF;IACjF,SAAS,EAAE,MAAM;IAEjB,yFAAyF;IACzF,GAAG,EAAE,MAAM;IAEX,sCAAsC;IACtC,IAAI,EAAE,UAAU,GACf,OAAO,CAAC,IAAI,CAAC;IAKhB,oCAAoC;IAC9B,MAAM;IACV,iFAAiF;IACjF,SAAS,EAAE,MAAM;IAEjB,2FAA2F;IAC3F,GAAG,EAAE,MAAM,GACV,OAAO,CAAC,IAAI,CAAC;IAOhB;;OAEG;IACG,WAAW,CAAC,UAAU,EAAE,UAAU,GAAG,OAAO,CAAC,UAAU,GAAG,IAAI,CAAC;IAgDrE;;OAEG;IACG,OAAO,CAAC,CAAC,EAAE,UAAU,EAAE,UAAU,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;IAqBlE;;;;;;OAMG;IACG,OAAO,CAAC,UAAU,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAezE;;OAEG;IACG,SAAS,CAAC,UAAU,EAAE,UAAU;IAkEhC,aAAa,CACjB,UAAU,EAAE,UAAU,EACtB,SAAS,EAAE,SAAS,GACnB,OAAO,CAAC,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC;IAW7B,aAAa,CACjB,UAAU,EAAE,UAAU,EACtB,SAAS,EAAE,SAAS,EACpB,SAAS,EAAE,CAAC,CAAC,SAAS,GACrB,OAAO,CAAC,IAAI,CAAC;CA8CjB"}
|
|
@@ -3,7 +3,6 @@ import debug from "debug";
|
|
|
3
3
|
import { headsAreSame } from "../helpers/headsAreSame.js";
|
|
4
4
|
import { mergeArrays } from "../helpers/mergeArrays.js";
|
|
5
5
|
import { keyHash, headsHash } from "./keyHash.js";
|
|
6
|
-
import { chunkTypeFromKey } from "./chunkTypeFromKey.js";
|
|
7
6
|
import * as Uuid from "uuid";
|
|
8
7
|
import { EventEmitter } from "eventemitter3";
|
|
9
8
|
import { encodeHeads } from "../AutomergeUrl.js";
|
|
@@ -76,31 +75,58 @@ export class StorageSubsystem extends EventEmitter {
|
|
|
76
75
|
}
|
|
77
76
|
// AUTOMERGE DOCUMENT STORAGE
|
|
78
77
|
/**
|
|
79
|
-
* Loads
|
|
78
|
+
* Loads and combines document chunks from storage, with snapshots first.
|
|
80
79
|
*/
|
|
81
|
-
async
|
|
82
|
-
// Load
|
|
83
|
-
const
|
|
80
|
+
async loadDocData(documentId) {
|
|
81
|
+
// Load snapshots first
|
|
82
|
+
const snapshotChunks = await this.#storageAdapter.loadRange([
|
|
83
|
+
documentId,
|
|
84
|
+
"snapshot",
|
|
85
|
+
]);
|
|
86
|
+
const incrementalChunks = await this.#storageAdapter.loadRange([
|
|
87
|
+
documentId,
|
|
88
|
+
"incremental",
|
|
89
|
+
]);
|
|
84
90
|
const binaries = [];
|
|
85
91
|
const chunkInfos = [];
|
|
86
|
-
|
|
87
|
-
|
|
92
|
+
// Process snapshots first
|
|
93
|
+
for (const chunk of snapshotChunks) {
|
|
88
94
|
if (chunk.data === undefined)
|
|
89
95
|
continue;
|
|
90
|
-
|
|
91
|
-
|
|
96
|
+
chunkInfos.push({
|
|
97
|
+
key: chunk.key,
|
|
98
|
+
type: "snapshot",
|
|
99
|
+
size: chunk.data.length,
|
|
100
|
+
});
|
|
101
|
+
binaries.push(chunk.data);
|
|
102
|
+
}
|
|
103
|
+
// Then process incrementals
|
|
104
|
+
for (const chunk of incrementalChunks) {
|
|
105
|
+
if (chunk.data === undefined)
|
|
92
106
|
continue;
|
|
93
107
|
chunkInfos.push({
|
|
94
108
|
key: chunk.key,
|
|
95
|
-
type:
|
|
109
|
+
type: "incremental",
|
|
96
110
|
size: chunk.data.length,
|
|
97
111
|
});
|
|
98
112
|
binaries.push(chunk.data);
|
|
99
113
|
}
|
|
114
|
+
// Store chunk infos for future reference
|
|
100
115
|
this.#chunkInfos.set(documentId, chunkInfos);
|
|
116
|
+
// If no chunks were found, return null
|
|
117
|
+
if (binaries.length === 0) {
|
|
118
|
+
return null;
|
|
119
|
+
}
|
|
101
120
|
// Merge the chunks into a single binary
|
|
102
|
-
|
|
103
|
-
|
|
121
|
+
return mergeArrays(binaries);
|
|
122
|
+
}
|
|
123
|
+
/**
|
|
124
|
+
* Loads the Automerge document with the given ID from storage.
|
|
125
|
+
*/
|
|
126
|
+
async loadDoc(documentId) {
|
|
127
|
+
// Load and combine chunks
|
|
128
|
+
const binary = await this.loadDocData(documentId);
|
|
129
|
+
if (!binary)
|
|
104
130
|
return null;
|
|
105
131
|
// Load into an Automerge document
|
|
106
132
|
const start = performance.now();
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@automerge/automerge-repo",
|
|
3
|
-
"version": "2.0.0-alpha.
|
|
3
|
+
"version": "2.0.0-alpha.26",
|
|
4
4
|
"description": "A repository object to manage a collection of automerge documents",
|
|
5
5
|
"repository": "https://github.com/automerge/automerge-repo/tree/master/packages/automerge-repo",
|
|
6
6
|
"author": "Peter van Hardenberg <pvh@pvh.ca>",
|
|
@@ -59,5 +59,5 @@
|
|
|
59
59
|
"publishConfig": {
|
|
60
60
|
"access": "public"
|
|
61
61
|
},
|
|
62
|
-
"gitHead": "
|
|
62
|
+
"gitHead": "3d3e0f6fb267080a5bd8d12315efb85b8a2ada70"
|
|
63
63
|
}
|
package/src/DocHandle.ts
CHANGED
|
@@ -45,6 +45,9 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
|
|
|
45
45
|
/** A dictionary mapping each peer to the last heads we know they have. */
|
|
46
46
|
#remoteHeads: Record<StorageId, UrlHeads> = {}
|
|
47
47
|
|
|
48
|
+
/** Cache for view handles, keyed by the stringified heads */
|
|
49
|
+
#viewCache: Map<string, DocHandle<T>> = new Map()
|
|
50
|
+
|
|
48
51
|
/** @hidden */
|
|
49
52
|
constructor(
|
|
50
53
|
public documentId: DocumentId,
|
|
@@ -359,6 +362,16 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
|
|
|
359
362
|
`DocHandle#${this.documentId} is not ready. Check \`handle.isReady()\` before calling view().`
|
|
360
363
|
)
|
|
361
364
|
}
|
|
365
|
+
|
|
366
|
+
// Create a cache key from the heads
|
|
367
|
+
const cacheKey = JSON.stringify(heads)
|
|
368
|
+
|
|
369
|
+
// Check if we have a cached handle for these heads
|
|
370
|
+
const cachedHandle = this.#viewCache.get(cacheKey)
|
|
371
|
+
if (cachedHandle) {
|
|
372
|
+
return cachedHandle
|
|
373
|
+
}
|
|
374
|
+
|
|
362
375
|
// Create a new handle with the same documentId but fixed heads
|
|
363
376
|
const handle = new DocHandle<T>(this.documentId, {
|
|
364
377
|
heads,
|
|
@@ -367,6 +380,9 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
|
|
|
367
380
|
handle.update(() => A.clone(this.#doc))
|
|
368
381
|
handle.doneLoading()
|
|
369
382
|
|
|
383
|
+
// Store in cache
|
|
384
|
+
this.#viewCache.set(cacheKey, handle)
|
|
385
|
+
|
|
370
386
|
return handle
|
|
371
387
|
}
|
|
372
388
|
|
|
@@ -463,7 +479,7 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
|
|
|
463
479
|
}
|
|
464
480
|
|
|
465
481
|
/**
|
|
466
|
-
* Called by the repo
|
|
482
|
+
* Called by the repo when a doc handle changes or we receive new remote heads.
|
|
467
483
|
* @hidden
|
|
468
484
|
*/
|
|
469
485
|
setRemoteHeads(storageId: StorageId, heads: UrlHeads) {
|
|
@@ -575,14 +591,15 @@ export class DocHandle<T> extends EventEmitter<DocHandleEvents<T>> {
|
|
|
575
591
|
}
|
|
576
592
|
|
|
577
593
|
/**
|
|
578
|
-
*
|
|
594
|
+
* Updates the internal state machine to mark the document unavailable.
|
|
579
595
|
* @hidden
|
|
580
596
|
*/
|
|
581
597
|
unavailable() {
|
|
582
598
|
this.#machine.send({ type: DOC_UNAVAILABLE })
|
|
583
599
|
}
|
|
584
600
|
|
|
585
|
-
/**
|
|
601
|
+
/**
|
|
602
|
+
* Called by the repo either when the document is not found in storage.
|
|
586
603
|
* @hidden
|
|
587
604
|
* */
|
|
588
605
|
request() {
|
|
@@ -6,7 +6,6 @@ import { type DocumentId } from "../types.js"
|
|
|
6
6
|
import { StorageAdapterInterface } from "./StorageAdapterInterface.js"
|
|
7
7
|
import { ChunkInfo, StorageKey, StorageId } from "./types.js"
|
|
8
8
|
import { keyHash, headsHash } from "./keyHash.js"
|
|
9
|
-
import { chunkTypeFromKey } from "./chunkTypeFromKey.js"
|
|
10
9
|
import * as Uuid from "uuid"
|
|
11
10
|
import { EventEmitter } from "eventemitter3"
|
|
12
11
|
import { encodeHeads } from "../AutomergeUrl.js"
|
|
@@ -113,33 +112,63 @@ export class StorageSubsystem extends EventEmitter<StorageSubsystemEvents> {
|
|
|
113
112
|
// AUTOMERGE DOCUMENT STORAGE
|
|
114
113
|
|
|
115
114
|
/**
|
|
116
|
-
* Loads
|
|
115
|
+
* Loads and combines document chunks from storage, with snapshots first.
|
|
117
116
|
*/
|
|
118
|
-
async
|
|
119
|
-
// Load
|
|
120
|
-
const
|
|
121
|
-
|
|
117
|
+
async loadDocData(documentId: DocumentId): Promise<Uint8Array | null> {
|
|
118
|
+
// Load snapshots first
|
|
119
|
+
const snapshotChunks = await this.#storageAdapter.loadRange([
|
|
120
|
+
documentId,
|
|
121
|
+
"snapshot",
|
|
122
|
+
])
|
|
123
|
+
const incrementalChunks = await this.#storageAdapter.loadRange([
|
|
124
|
+
documentId,
|
|
125
|
+
"incremental",
|
|
126
|
+
])
|
|
127
|
+
|
|
128
|
+
const binaries: Uint8Array[] = []
|
|
122
129
|
const chunkInfos: ChunkInfo[] = []
|
|
123
130
|
|
|
124
|
-
|
|
125
|
-
|
|
131
|
+
// Process snapshots first
|
|
132
|
+
for (const chunk of snapshotChunks) {
|
|
126
133
|
if (chunk.data === undefined) continue
|
|
134
|
+
chunkInfos.push({
|
|
135
|
+
key: chunk.key,
|
|
136
|
+
type: "snapshot",
|
|
137
|
+
size: chunk.data.length,
|
|
138
|
+
})
|
|
139
|
+
binaries.push(chunk.data)
|
|
140
|
+
}
|
|
127
141
|
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
142
|
+
// Then process incrementals
|
|
143
|
+
for (const chunk of incrementalChunks) {
|
|
144
|
+
if (chunk.data === undefined) continue
|
|
131
145
|
chunkInfos.push({
|
|
132
146
|
key: chunk.key,
|
|
133
|
-
type:
|
|
147
|
+
type: "incremental",
|
|
134
148
|
size: chunk.data.length,
|
|
135
149
|
})
|
|
136
150
|
binaries.push(chunk.data)
|
|
137
151
|
}
|
|
152
|
+
|
|
153
|
+
// Store chunk infos for future reference
|
|
138
154
|
this.#chunkInfos.set(documentId, chunkInfos)
|
|
139
155
|
|
|
156
|
+
// If no chunks were found, return null
|
|
157
|
+
if (binaries.length === 0) {
|
|
158
|
+
return null
|
|
159
|
+
}
|
|
160
|
+
|
|
140
161
|
// Merge the chunks into a single binary
|
|
141
|
-
|
|
142
|
-
|
|
162
|
+
return mergeArrays(binaries)
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
/**
|
|
166
|
+
* Loads the Automerge document with the given ID from storage.
|
|
167
|
+
*/
|
|
168
|
+
async loadDoc<T>(documentId: DocumentId): Promise<A.Doc<T> | null> {
|
|
169
|
+
// Load and combine chunks
|
|
170
|
+
const binary = await this.loadDocData(documentId)
|
|
171
|
+
if (!binary) return null
|
|
143
172
|
|
|
144
173
|
// Load into an Automerge document
|
|
145
174
|
const start = performance.now()
|
|
@@ -169,6 +198,7 @@ export class StorageSubsystem extends EventEmitter<StorageSubsystemEvents> {
|
|
|
169
198
|
if (!this.#shouldSave(documentId, doc)) return
|
|
170
199
|
|
|
171
200
|
const sourceChunks = this.#chunkInfos.get(documentId) ?? []
|
|
201
|
+
|
|
172
202
|
if (this.#shouldCompact(sourceChunks)) {
|
|
173
203
|
await this.#saveTotal(documentId, doc, sourceChunks)
|
|
174
204
|
} else {
|
package/test/DocHandle.test.ts
CHANGED
|
@@ -520,4 +520,71 @@ describe("DocHandle", () => {
|
|
|
520
520
|
assert.deepStrictEqual(decode(data), message)
|
|
521
521
|
})
|
|
522
522
|
})
|
|
523
|
+
|
|
524
|
+
it("should cache view handles based on heads", async () => {
|
|
525
|
+
// Create and setup a document with some data
|
|
526
|
+
const handle = setup()
|
|
527
|
+
handle.change(doc => {
|
|
528
|
+
doc.foo = "Hello"
|
|
529
|
+
})
|
|
530
|
+
const heads1 = handle.heads()
|
|
531
|
+
|
|
532
|
+
// Make another change to get a different set of heads
|
|
533
|
+
handle.change(doc => {
|
|
534
|
+
doc.foo = "Hello, World!"
|
|
535
|
+
})
|
|
536
|
+
|
|
537
|
+
// Create a view at the first set of heads
|
|
538
|
+
const view1 = handle.view(heads1)
|
|
539
|
+
|
|
540
|
+
// Request the same view again
|
|
541
|
+
const view2 = handle.view(heads1)
|
|
542
|
+
|
|
543
|
+
// Verify we got the same handle instance back (cached version)
|
|
544
|
+
expect(view1).toBe(view2)
|
|
545
|
+
|
|
546
|
+
// Verify the contents are correct
|
|
547
|
+
expect(view1.doc().foo).toBe("Hello")
|
|
548
|
+
|
|
549
|
+
// Test with a different set of heads
|
|
550
|
+
const view3 = handle.view(handle.heads())
|
|
551
|
+
expect(view3).not.toBe(view1)
|
|
552
|
+
expect(view3.doc().foo).toBe("Hello, World!")
|
|
553
|
+
})
|
|
554
|
+
|
|
555
|
+
it("should improve performance when requesting the same view multiple times", () => {
|
|
556
|
+
// Create and setup a document with some data
|
|
557
|
+
const handle = setup()
|
|
558
|
+
handle.change(doc => {
|
|
559
|
+
doc.foo = "Hello"
|
|
560
|
+
})
|
|
561
|
+
const heads = handle.heads()
|
|
562
|
+
|
|
563
|
+
// First, measure time without cache (first access)
|
|
564
|
+
const startTimeNoCached = performance.now()
|
|
565
|
+
const firstView = handle.view(heads)
|
|
566
|
+
const endTimeNoCached = performance.now()
|
|
567
|
+
|
|
568
|
+
// Now measure with cache (subsequent accesses)
|
|
569
|
+
const startTimeCached = performance.now()
|
|
570
|
+
for (let i = 0; i < 100; i++) {
|
|
571
|
+
handle.view(heads)
|
|
572
|
+
}
|
|
573
|
+
const endTimeCached = performance.now()
|
|
574
|
+
|
|
575
|
+
// Assert that all views are the same instance
|
|
576
|
+
for (let i = 0; i < 10; i++) {
|
|
577
|
+
expect(handle.view(heads)).toBe(firstView)
|
|
578
|
+
}
|
|
579
|
+
|
|
580
|
+
// Calculate average times
|
|
581
|
+
const timeForFirstAccess = endTimeNoCached - startTimeNoCached
|
|
582
|
+
const timeForCachedAccesses = (endTimeCached - startTimeCached) / 100
|
|
583
|
+
|
|
584
|
+
console.log(`Time for first view (no cache): ${timeForFirstAccess}ms`)
|
|
585
|
+
console.log(`Average time per cached view: ${timeForCachedAccesses}ms`)
|
|
586
|
+
|
|
587
|
+
// Cached access should be significantly faster
|
|
588
|
+
expect(timeForCachedAccesses).toBeLessThan(timeForFirstAccess / 10)
|
|
589
|
+
})
|
|
523
590
|
})
|
package/test/Repo.test.ts
CHANGED
|
@@ -33,6 +33,7 @@ import {
|
|
|
33
33
|
import { getRandomItem } from "./helpers/getRandomItem.js"
|
|
34
34
|
import { TestDoc } from "./types.js"
|
|
35
35
|
import { StorageId, StorageKey } from "../src/storage/types.js"
|
|
36
|
+
import { chunkTypeFromKey } from "../src/storage/chunkTypeFromKey.js"
|
|
36
37
|
|
|
37
38
|
describe("Repo", () => {
|
|
38
39
|
describe("constructor", () => {
|
|
@@ -4,13 +4,15 @@ import assert from "assert"
|
|
|
4
4
|
import fs from "fs"
|
|
5
5
|
import os from "os"
|
|
6
6
|
import path from "path"
|
|
7
|
-
import { describe, it } from "vitest"
|
|
7
|
+
import { describe, it, expect } from "vitest"
|
|
8
8
|
import { generateAutomergeUrl, parseAutomergeUrl } from "../src/AutomergeUrl.js"
|
|
9
9
|
import { PeerId, cbor } from "../src/index.js"
|
|
10
10
|
import { StorageSubsystem } from "../src/storage/StorageSubsystem.js"
|
|
11
11
|
import { StorageId } from "../src/storage/types.js"
|
|
12
12
|
import { DummyStorageAdapter } from "../src/helpers/DummyStorageAdapter.js"
|
|
13
13
|
import * as Uuid from "uuid"
|
|
14
|
+
import { chunkTypeFromKey } from "../src/storage/chunkTypeFromKey.js"
|
|
15
|
+
import { DocumentId } from "../src/types.js"
|
|
14
16
|
|
|
15
17
|
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "automerge-repo-tests"))
|
|
16
18
|
|
|
@@ -243,6 +245,83 @@ describe("StorageSubsystem", () => {
|
|
|
243
245
|
assert.strictEqual(id1, id2)
|
|
244
246
|
})
|
|
245
247
|
})
|
|
248
|
+
|
|
249
|
+
describe("loadDoc", () => {
|
|
250
|
+
it("maintains correct document state when loading chunks in order", async () => {
|
|
251
|
+
const storageAdapter = new DummyStorageAdapter()
|
|
252
|
+
const storage = new StorageSubsystem(storageAdapter)
|
|
253
|
+
|
|
254
|
+
// Create a document with multiple changes
|
|
255
|
+
const doc = A.init<{ foo: string }>()
|
|
256
|
+
const doc1 = A.change(doc, d => {
|
|
257
|
+
d.foo = "first"
|
|
258
|
+
})
|
|
259
|
+
const doc2 = A.change(doc1, d => {
|
|
260
|
+
d.foo = "second"
|
|
261
|
+
})
|
|
262
|
+
const doc3 = A.change(doc2, d => {
|
|
263
|
+
d.foo = "third"
|
|
264
|
+
})
|
|
265
|
+
|
|
266
|
+
// Save the document with multiple changes
|
|
267
|
+
const documentId = "test-doc" as DocumentId
|
|
268
|
+
await storage.saveDoc(documentId, doc3)
|
|
269
|
+
|
|
270
|
+
// Load the document
|
|
271
|
+
const loadedDoc = await storage.loadDoc<{ foo: string }>(documentId)
|
|
272
|
+
|
|
273
|
+
// Verify the document state is correct
|
|
274
|
+
expect(loadedDoc?.foo).toBe("third")
|
|
275
|
+
})
|
|
276
|
+
|
|
277
|
+
it("combines chunks with snapshot first", async () => {
|
|
278
|
+
const storageAdapter = new DummyStorageAdapter()
|
|
279
|
+
const storage = new StorageSubsystem(storageAdapter)
|
|
280
|
+
|
|
281
|
+
// Create a document with multiple changes
|
|
282
|
+
const doc = A.init<{ foo: string }>()
|
|
283
|
+
const doc1 = A.change(doc, d => {
|
|
284
|
+
d.foo = "first"
|
|
285
|
+
})
|
|
286
|
+
const doc2 = A.change(doc1, d => {
|
|
287
|
+
d.foo = Array(10000)
|
|
288
|
+
.fill(0)
|
|
289
|
+
.map(() =>
|
|
290
|
+
String.fromCharCode(Math.floor(Math.random() * 26) + 97)
|
|
291
|
+
)
|
|
292
|
+
.join("")
|
|
293
|
+
})
|
|
294
|
+
|
|
295
|
+
// Save the document with multiple changes
|
|
296
|
+
const documentId = "test-doc" as DocumentId
|
|
297
|
+
await storage.saveDoc(documentId, doc2)
|
|
298
|
+
|
|
299
|
+
const doc3 = A.change(doc2, d => {
|
|
300
|
+
d.foo = "third"
|
|
301
|
+
})
|
|
302
|
+
await storage.saveDoc(documentId, doc3)
|
|
303
|
+
|
|
304
|
+
// Load the document
|
|
305
|
+
const loadedDoc = await storage.loadDoc<{ foo: string }>(documentId)
|
|
306
|
+
|
|
307
|
+
// Verify the document state is correct
|
|
308
|
+
expect(loadedDoc?.foo).toBe(doc3.foo)
|
|
309
|
+
|
|
310
|
+
// Get the raw binary data from storage
|
|
311
|
+
const binary = await storage.loadDocData(documentId)
|
|
312
|
+
expect(binary).not.toBeNull()
|
|
313
|
+
if (!binary) return
|
|
314
|
+
|
|
315
|
+
// Verify the binary starts with the Automerge magic value
|
|
316
|
+
expect(binary[0]).toBe(0x85)
|
|
317
|
+
expect(binary[1]).toBe(0x6f)
|
|
318
|
+
expect(binary[2]).toBe(0x4a)
|
|
319
|
+
expect(binary[3]).toBe(0x83)
|
|
320
|
+
|
|
321
|
+
// Verify the chunk type is CHUNK_TYPE_DOCUMENT (0x00)
|
|
322
|
+
expect(binary[8]).toBe(0x00)
|
|
323
|
+
})
|
|
324
|
+
})
|
|
246
325
|
})
|
|
247
326
|
}
|
|
248
327
|
})
|