@automerge/automerge-repo 1.0.0-alpha.5 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc +2 -2
- package/dist/DocHandle.d.ts +5 -1
- package/dist/DocHandle.d.ts.map +1 -1
- package/dist/DocHandle.js +9 -1
- package/dist/Repo.d.ts +38 -4
- package/dist/Repo.d.ts.map +1 -1
- package/dist/Repo.js +95 -3
- package/dist/index.d.ts +0 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +0 -1
- package/dist/synchronizer/CollectionSynchronizer.d.ts +2 -2
- package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
- package/package.json +3 -3
- package/src/DocHandle.ts +10 -2
- package/src/Repo.ts +130 -4
- package/src/index.ts +0 -1
- package/src/synchronizer/CollectionSynchronizer.ts +2 -2
- package/test/CollectionSynchronizer.test.ts +12 -11
- package/test/DocHandle.test.ts +2 -1
- package/test/Repo.test.ts +1 -0
- package/dist/DocCollection.d.ts +0 -46
- package/dist/DocCollection.d.ts.map +0 -1
- package/dist/DocCollection.js +0 -102
- package/src/DocCollection.ts +0 -144
- package/test/DocCollection.test.ts +0 -20
package/.eslintrc
CHANGED
|
@@ -8,7 +8,7 @@
|
|
|
8
8
|
"plugin:@typescript-eslint/eslint-recommended",
|
|
9
9
|
"plugin:@typescript-eslint/recommended"
|
|
10
10
|
],
|
|
11
|
-
"ignorePatterns": ["dist/**"],
|
|
11
|
+
"ignorePatterns": ["dist/**", "test/**", "node_modules/**"],
|
|
12
12
|
"parser": "@typescript-eslint/parser",
|
|
13
13
|
"plugins": ["@typescript-eslint", "mocha"],
|
|
14
14
|
"parserOptions": {
|
|
@@ -21,7 +21,7 @@
|
|
|
21
21
|
"import/extensions": 0,
|
|
22
22
|
"lines-between-class-members": 0,
|
|
23
23
|
"@typescript-eslint/no-floating-promises": "error",
|
|
24
|
-
"@typescript-eslint/no-empty-function": ["warn", { "allow": ["methods"]}],
|
|
24
|
+
"@typescript-eslint/no-empty-function": ["warn", { "allow": ["methods"] }],
|
|
25
25
|
"no-param-reassign": 0,
|
|
26
26
|
"no-use-before-define": 0
|
|
27
27
|
}
|
package/dist/DocHandle.d.ts
CHANGED
|
@@ -55,7 +55,11 @@ export declare class DocHandle<T>//
|
|
|
55
55
|
update(callback: (doc: A.Doc<T>) => A.Doc<T>): void;
|
|
56
56
|
/** `change` is called by the repo when the document is changed locally */
|
|
57
57
|
change(callback: A.ChangeFn<T>, options?: A.ChangeOptions<T>): void;
|
|
58
|
-
|
|
58
|
+
/** Make a change as if the document were at `heads`
|
|
59
|
+
*
|
|
60
|
+
* @returns A set of heads representing the concurrent change that was made.
|
|
61
|
+
*/
|
|
62
|
+
changeAt(heads: A.Heads, callback: A.ChangeFn<T>, options?: A.ChangeOptions<T>): string[] | undefined;
|
|
59
63
|
unavailable(): void;
|
|
60
64
|
/** `request` is called by the repo when the document is not found in storage */
|
|
61
65
|
request(): void;
|
package/dist/DocHandle.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"DocHandle.d.ts","sourceRoot":"","sources":["../src/DocHandle.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,2BAA2B,CAAA;AAE9C,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAC5C,OAAO,EASL,UAAU,EAEX,MAAM,QAAQ,CAAA;AAKf,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,YAAY,CAAA;AAIlE,iGAAiG;AACjG,qBAAa,SAAS,CAAC,CAAC,CAAE,EAAE;AAC1B,SAAQ,YAAY,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;;IAY/B,UAAU,EAAE,UAAU;IAL/B,IAAI,GAAG,IAAI,YAAY,CAEtB;gBAGQ,UAAU,EAAE,UAAU,EAC7B,EAAE,KAAa,EAAE,YAAqB,EAAE,GAAE,gBAAqB;IAmMjE;;;;OAIG;IACH,OAAO,gBAA0C;IACjD;;;;;OAKG;IACH,SAAS,gBAA4C;IACrD,aAAa,gBAAgD;IAC7D,OAAO,WAAY,WAAW,EAAE,aACmB;IAEnD,IAAI,KAAK,eAER;IAED;;;;;OAKG;IACG,SAAS,CAAC,WAAW,GAAE,WAAW,EAAY,GAAG,OAAO,CAAC,IAAI,CAAC;IAIpE;;;;;;OAMG;IACG,GAAG,CACP,WAAW,GAAE,WAAW,EAAyB,GAChD,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC;IAchC;;;;;;;;;OASG;IACH,OAAO,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS;IAQ/B,8EAA8E;IAC9E,MAAM,CAAC,QAAQ,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;IAM5C,2EAA2E;IAC3E,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,OAAO,GAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAM;IAehE,QAAQ,CACN,KAAK,EAAE,CAAC,CAAC,KAAK,EACd,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EACvB,OAAO,GAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAM;
|
|
1
|
+
{"version":3,"file":"DocHandle.d.ts","sourceRoot":"","sources":["../src/DocHandle.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,2BAA2B,CAAA;AAE9C,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAC5C,OAAO,EASL,UAAU,EAEX,MAAM,QAAQ,CAAA;AAKf,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,YAAY,CAAA;AAIlE,iGAAiG;AACjG,qBAAa,SAAS,CAAC,CAAC,CAAE,EAAE;AAC1B,SAAQ,YAAY,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;;IAY/B,UAAU,EAAE,UAAU;IAL/B,IAAI,GAAG,IAAI,YAAY,CAEtB;gBAGQ,UAAU,EAAE,UAAU,EAC7B,EAAE,KAAa,EAAE,YAAqB,EAAE,GAAE,gBAAqB;IAmMjE;;;;OAIG;IACH,OAAO,gBAA0C;IACjD;;;;;OAKG;IACH,SAAS,gBAA4C;IACrD,aAAa,gBAAgD;IAC7D,OAAO,WAAY,WAAW,EAAE,aACmB;IAEnD,IAAI,KAAK,eAER;IAED;;;;;OAKG;IACG,SAAS,CAAC,WAAW,GAAE,WAAW,EAAY,GAAG,OAAO,CAAC,IAAI,CAAC;IAIpE;;;;;;OAMG;IACG,GAAG,CACP,WAAW,GAAE,WAAW,EAAyB,GAChD,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC;IAchC;;;;;;;;;OASG;IACH,OAAO,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS;IAQ/B,8EAA8E;IAC9E,MAAM,CAAC,QAAQ,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;IAM5C,2EAA2E;IAC3E,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,OAAO,GAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAM;IAehE;;;OAGG;IACH,QAAQ,CACN,KAAK,EAAE,CAAC,CAAC,KAAK,EACd,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EACvB,OAAO,GAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAM,GAC/B,MAAM,EAAE,GAAG,SAAS;IAmBvB,WAAW;IAIX,gFAAgF;IAChF,OAAO;IAIP,YAAY;IAIZ,YAAY;IAIZ,kEAAkE;IAClE,MAAM;IAIN;;;;;OAKG;IACH,SAAS,CAAC,OAAO,EAAE,GAAG;CAMvB;AAID,UAAU,gBAAgB;IACxB,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,YAAY,CAAC,EAAE,MAAM,CAAA;CACtB;AAED,MAAM,WAAW,uBAAuB;IACtC,aAAa,EAAE,MAAM,CAAA;IACrB,UAAU,EAAE,UAAU,CAAA;IACtB,IAAI,EAAE,UAAU,CAAA;CACjB;AAED,MAAM,WAAW,6BAA6B,CAAC,CAAC;IAC9C,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;CACd;AAED,MAAM,WAAW,sBAAsB,CAAC,CAAC;IACvC,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;CACrB;AAED,MAAM,WAAW,sBAAsB,CAAC,CAAC;IACvC,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;IACb,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAA;IAClB,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;CAC1B;AAED,MAAM,WAAW,gCAAgC;IAC/C,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC,CAAA;IACtB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;CACjB;AAED,MAAM,WAAW,wCAAwC;IACvD,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC,CAAA;IACtB,IAAI,EAAE,UAAU,CAAA;CACjB;AAED,MAAM,WAAW,eAAe,CAAC,CAAC;IAChC,eAAe,EAAE,CAAC,OAAO,EAAE,6BAA6B,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpE,MAAM,EAAE,CAAC,OAAO,EAAE,sBAAsB,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpD,MAAM,EAAE,CAAC,OAAO,EAAE,sBAAsB,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpD,WAAW,EAAE,CAAC,OAAO,EAAE,sBAAsB,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACzD,mBAAmB,EAAE,CAAC,OAAO,EAAE,gCAAgC,KAAK,IAAI,CAAA;IACxE,4BAA4B,EAAE,CAC5B,OAAO,EAAE,wCAAwC,KAC9C,IAAI,CAAA;CACV;AAMD,eAAO,MAAM,WAAW;;;;;;;;;CASd,CAAA;AACV,MAAM,MAAM,WAAW,GAAG,CAAC,OAAO,WAAW,CAAC,CAAC,MAAM,OAAO,WAAW,CAAC,CAAA;AAkBxE,eAAO,MAAM,KAAK;;;;;;;;;;;CAWR,CAAA;AA8CV,eAAO,MACL,IAAI,UACJ,OAAO,aACP,gBAAgB,qBAChB,UAAU,gBACV,KAAK,WACL,MAAM,YACN,OAAO,aACP,WAAW,eACE,CAAA"}
|
package/dist/DocHandle.js
CHANGED
|
@@ -269,17 +269,25 @@ export class DocHandle//
|
|
|
269
269
|
},
|
|
270
270
|
});
|
|
271
271
|
}
|
|
272
|
+
/** Make a change as if the document were at `heads`
|
|
273
|
+
*
|
|
274
|
+
* @returns A set of heads representing the concurrent change that was made.
|
|
275
|
+
*/
|
|
272
276
|
changeAt(heads, callback, options = {}) {
|
|
273
277
|
if (!this.isReady()) {
|
|
274
278
|
throw new Error(`DocHandle#${this.documentId} is not ready. Check \`handle.isReady()\` before accessing the document.`);
|
|
275
279
|
}
|
|
280
|
+
let resultHeads = undefined;
|
|
276
281
|
this.#machine.send(UPDATE, {
|
|
277
282
|
payload: {
|
|
278
283
|
callback: (doc) => {
|
|
279
|
-
|
|
284
|
+
const result = A.changeAt(doc, heads, options, callback);
|
|
285
|
+
resultHeads = result.newHeads;
|
|
286
|
+
return result.newDoc;
|
|
280
287
|
},
|
|
281
288
|
},
|
|
282
289
|
});
|
|
290
|
+
return resultHeads;
|
|
283
291
|
}
|
|
284
292
|
unavailable() {
|
|
285
293
|
this.#machine.send(MARK_UNAVAILABLE);
|
package/dist/Repo.d.ts
CHANGED
|
@@ -1,15 +1,36 @@
|
|
|
1
|
-
import { DocCollection } from "./DocCollection.js";
|
|
2
1
|
import { NetworkAdapter } from "./network/NetworkAdapter.js";
|
|
3
2
|
import { NetworkSubsystem } from "./network/NetworkSubsystem.js";
|
|
4
3
|
import { StorageAdapter } from "./storage/StorageAdapter.js";
|
|
5
4
|
import { StorageSubsystem } from "./storage/StorageSubsystem.js";
|
|
6
|
-
import { DocumentId, PeerId } from "./types.js";
|
|
7
|
-
|
|
8
|
-
|
|
5
|
+
import { type AutomergeUrl, DocumentId, PeerId } from "./types.js";
|
|
6
|
+
import { DocHandle } from "./DocHandle.js";
|
|
7
|
+
import { EventEmitter } from "eventemitter3";
|
|
8
|
+
/** A Repo is a collection of documents with networking, syncing, and storage capabilities. */
|
|
9
|
+
export declare class Repo extends EventEmitter<DocCollectionEvents> {
|
|
9
10
|
#private;
|
|
10
11
|
networkSubsystem: NetworkSubsystem;
|
|
11
12
|
storageSubsystem?: StorageSubsystem;
|
|
13
|
+
/** By default, we share generously with all peers. */
|
|
14
|
+
sharePolicy: SharePolicy;
|
|
12
15
|
constructor({ storage, network, peerId, sharePolicy }: RepoConfig);
|
|
16
|
+
/** Returns all the handles we have cached. */
|
|
17
|
+
get handles(): Record<DocumentId, DocHandle<any>>;
|
|
18
|
+
/**
|
|
19
|
+
* Creates a new document and returns a handle to it. The initial value of the document is
|
|
20
|
+
* an empty object `{}`. Its documentId is generated by the system. we emit a `document` event
|
|
21
|
+
* to advertise interest in the document.
|
|
22
|
+
*/
|
|
23
|
+
create<T>(): DocHandle<T>;
|
|
24
|
+
/**
|
|
25
|
+
* Retrieves a document by id. It gets data from the local system, but also emits a `document`
|
|
26
|
+
* event to advertise interest in the document.
|
|
27
|
+
*/
|
|
28
|
+
find<T>(
|
|
29
|
+
/** The documentId of the handle to retrieve */
|
|
30
|
+
automergeUrl: AutomergeUrl): DocHandle<T>;
|
|
31
|
+
delete(
|
|
32
|
+
/** The documentId of the handle to delete */
|
|
33
|
+
id: DocumentId | AutomergeUrl): void;
|
|
13
34
|
}
|
|
14
35
|
export interface RepoConfig {
|
|
15
36
|
/** Our unique identifier */
|
|
@@ -25,4 +46,17 @@ export interface RepoConfig {
|
|
|
25
46
|
sharePolicy?: SharePolicy;
|
|
26
47
|
}
|
|
27
48
|
export type SharePolicy = (peerId: PeerId, documentId?: DocumentId) => Promise<boolean>;
|
|
49
|
+
interface DocCollectionEvents {
|
|
50
|
+
document: (arg: DocumentPayload) => void;
|
|
51
|
+
"delete-document": (arg: DeleteDocumentPayload) => void;
|
|
52
|
+
"unavailable-document": (arg: DeleteDocumentPayload) => void;
|
|
53
|
+
}
|
|
54
|
+
interface DocumentPayload {
|
|
55
|
+
handle: DocHandle<any>;
|
|
56
|
+
isNew: boolean;
|
|
57
|
+
}
|
|
58
|
+
interface DeleteDocumentPayload {
|
|
59
|
+
documentId: DocumentId;
|
|
60
|
+
}
|
|
61
|
+
export {};
|
|
28
62
|
//# sourceMappingURL=Repo.d.ts.map
|
package/dist/Repo.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Repo.d.ts","sourceRoot":"","sources":["../src/Repo.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,
|
|
1
|
+
{"version":3,"file":"Repo.d.ts","sourceRoot":"","sources":["../src/Repo.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAChE,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAEhE,OAAO,EAAE,KAAK,YAAY,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAIlE,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAA;AAC1C,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAE5C,8FAA8F;AAC9F,qBAAa,IAAK,SAAQ,YAAY,CAAC,mBAAmB,CAAC;;IAGzD,gBAAgB,EAAE,gBAAgB,CAAA;IAClC,gBAAgB,CAAC,EAAE,gBAAgB,CAAA;IAGnC,sDAAsD;IACtD,WAAW,EAAE,WAAW,CAAmB;gBAE/B,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,WAAW,EAAE,EAAE,UAAU;IAoHjE,8CAA8C;IAC9C,IAAI,OAAO,uCAEV;IAED;;;;OAIG;IACH,MAAM,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC,CAAC;IA0BzB;;;OAGG;IACH,IAAI,CAAC,CAAC;IACJ,+CAA+C;IAC/C,YAAY,EAAE,YAAY,GACzB,SAAS,CAAC,CAAC,CAAC;IAwBf,MAAM;IACJ,6CAA6C;IAC7C,EAAE,EAAE,UAAU,GAAG,YAAY;CAchC;AAED,MAAM,WAAW,UAAU;IACzB,4BAA4B;IAC5B,MAAM,CAAC,EAAE,MAAM,CAAA;IAEf,gDAAgD;IAChD,OAAO,CAAC,EAAE,cAAc,CAAA;IAExB,oDAAoD;IACpD,OAAO,EAAE,cAAc,EAAE,CAAA;IAEzB;;;OAGG;IACH,WAAW,CAAC,EAAE,WAAW,CAAA;CAC1B;AAED,MAAM,MAAM,WAAW,GAAG,CACxB,MAAM,EAAE,MAAM,EACd,UAAU,CAAC,EAAE,UAAU,KACpB,OAAO,CAAC,OAAO,CAAC,CAAA;AAGrB,UAAU,mBAAmB;IAC3B,QAAQ,EAAE,CAAC,GAAG,EAAE,eAAe,KAAK,IAAI,CAAA;IACxC,iBAAiB,EAAE,CAAC,GAAG,EAAE,qBAAqB,KAAK,IAAI,CAAA;IACvD,sBAAsB,EAAE,CAAC,GAAG,EAAE,qBAAqB,KAAK,IAAI,CAAA;CAC7D;AAED,UAAU,eAAe;IACvB,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC,CAAA;IACtB,KAAK,EAAE,OAAO,CAAA;CACf;AAED,UAAU,qBAAqB;IAC7B,UAAU,EAAE,UAAU,CAAA;CACvB"}
|
package/dist/Repo.js
CHANGED
|
@@ -1,13 +1,18 @@
|
|
|
1
1
|
import debug from "debug";
|
|
2
|
-
import { DocCollection } from "./DocCollection.js";
|
|
3
2
|
import { NetworkSubsystem } from "./network/NetworkSubsystem.js";
|
|
4
3
|
import { StorageSubsystem } from "./storage/StorageSubsystem.js";
|
|
5
4
|
import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js";
|
|
6
|
-
|
|
7
|
-
|
|
5
|
+
import { parseAutomergeUrl, generateAutomergeUrl, isValidAutomergeUrl } from "./DocUrl.js";
|
|
6
|
+
import { DocHandle } from "./DocHandle.js";
|
|
7
|
+
import { EventEmitter } from "eventemitter3";
|
|
8
|
+
/** A Repo is a collection of documents with networking, syncing, and storage capabilities. */
|
|
9
|
+
export class Repo extends EventEmitter {
|
|
8
10
|
#log;
|
|
9
11
|
networkSubsystem;
|
|
10
12
|
storageSubsystem;
|
|
13
|
+
#handleCache = {};
|
|
14
|
+
/** By default, we share generously with all peers. */
|
|
15
|
+
sharePolicy = async () => true;
|
|
11
16
|
constructor({ storage, network, peerId, sharePolicy }) {
|
|
12
17
|
super();
|
|
13
18
|
this.#log = debug(`automerge-repo:repo`);
|
|
@@ -92,4 +97,91 @@ export class Repo extends DocCollection {
|
|
|
92
97
|
await synchronizer.receiveMessage(msg);
|
|
93
98
|
});
|
|
94
99
|
}
|
|
100
|
+
/** Returns an existing handle if we have it; creates one otherwise. */
|
|
101
|
+
#getHandle(
|
|
102
|
+
/** The documentId of the handle to look up or create */
|
|
103
|
+
documentId,
|
|
104
|
+
/** If we know we're creating a new document, specify this so we can have access to it immediately */
|
|
105
|
+
isNew) {
|
|
106
|
+
// If we have the handle cached, return it
|
|
107
|
+
if (this.#handleCache[documentId])
|
|
108
|
+
return this.#handleCache[documentId];
|
|
109
|
+
// If not, create a new handle, cache it, and return it
|
|
110
|
+
if (!documentId)
|
|
111
|
+
throw new Error(`Invalid documentId ${documentId}`);
|
|
112
|
+
const handle = new DocHandle(documentId, { isNew });
|
|
113
|
+
this.#handleCache[documentId] = handle;
|
|
114
|
+
return handle;
|
|
115
|
+
}
|
|
116
|
+
/** Returns all the handles we have cached. */
|
|
117
|
+
get handles() {
|
|
118
|
+
return this.#handleCache;
|
|
119
|
+
}
|
|
120
|
+
/**
|
|
121
|
+
* Creates a new document and returns a handle to it. The initial value of the document is
|
|
122
|
+
* an empty object `{}`. Its documentId is generated by the system. we emit a `document` event
|
|
123
|
+
* to advertise interest in the document.
|
|
124
|
+
*/
|
|
125
|
+
create() {
|
|
126
|
+
// TODO:
|
|
127
|
+
// either
|
|
128
|
+
// - pass an initial value and do something like this to ensure that you get a valid initial value
|
|
129
|
+
// const myInitialValue = {
|
|
130
|
+
// tasks: [],
|
|
131
|
+
// filter: "all",
|
|
132
|
+
//
|
|
133
|
+
// const guaranteeInitialValue = (doc: any) => {
|
|
134
|
+
// if (!doc.tasks) doc.tasks = []
|
|
135
|
+
// if (!doc.filter) doc.filter = "all"
|
|
136
|
+
// return { ...myInitialValue, ...doc }
|
|
137
|
+
// }
|
|
138
|
+
// or
|
|
139
|
+
// - pass a "reify" function that takes a `<any>` and returns `<T>`
|
|
140
|
+
// Generate a new UUID and store it in the buffer
|
|
141
|
+
const { documentId } = parseAutomergeUrl(generateAutomergeUrl());
|
|
142
|
+
const handle = this.#getHandle(documentId, true);
|
|
143
|
+
this.emit("document", { handle, isNew: true });
|
|
144
|
+
return handle;
|
|
145
|
+
}
|
|
146
|
+
/**
|
|
147
|
+
* Retrieves a document by id. It gets data from the local system, but also emits a `document`
|
|
148
|
+
* event to advertise interest in the document.
|
|
149
|
+
*/
|
|
150
|
+
find(
|
|
151
|
+
/** The documentId of the handle to retrieve */
|
|
152
|
+
automergeUrl) {
|
|
153
|
+
if (!isValidAutomergeUrl(automergeUrl)) {
|
|
154
|
+
throw new Error(`Invalid AutomergeUrl: '${automergeUrl}'`);
|
|
155
|
+
}
|
|
156
|
+
const { documentId } = parseAutomergeUrl(automergeUrl);
|
|
157
|
+
// If we have the handle cached, return it
|
|
158
|
+
if (this.#handleCache[documentId]) {
|
|
159
|
+
if (this.#handleCache[documentId].isUnavailable()) {
|
|
160
|
+
// this ensures that the event fires after the handle has been returned
|
|
161
|
+
setTimeout(() => {
|
|
162
|
+
this.#handleCache[documentId].emit("unavailable", {
|
|
163
|
+
handle: this.#handleCache[documentId],
|
|
164
|
+
});
|
|
165
|
+
});
|
|
166
|
+
}
|
|
167
|
+
return this.#handleCache[documentId];
|
|
168
|
+
}
|
|
169
|
+
const handle = this.#getHandle(documentId, false);
|
|
170
|
+
this.emit("document", { handle, isNew: false });
|
|
171
|
+
return handle;
|
|
172
|
+
}
|
|
173
|
+
delete(
|
|
174
|
+
/** The documentId of the handle to delete */
|
|
175
|
+
id) {
|
|
176
|
+
if (isValidAutomergeUrl(id)) {
|
|
177
|
+
;
|
|
178
|
+
({ documentId: id } = parseAutomergeUrl(id));
|
|
179
|
+
}
|
|
180
|
+
const handle = this.#getHandle(id, false);
|
|
181
|
+
handle.delete();
|
|
182
|
+
delete this.#handleCache[id];
|
|
183
|
+
this.emit("delete-document", {
|
|
184
|
+
documentId: id,
|
|
185
|
+
});
|
|
186
|
+
}
|
|
95
187
|
}
|
package/dist/index.d.ts
CHANGED
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAA;AACvD,YAAY,EAAE,sBAAsB,EAAE,MAAM,gBAAgB,CAAA;AAC5D,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,YAAY,EACV,WAAW,EACX,oBAAoB,EACpB,uBAAuB,GACxB,MAAM,6BAA6B,CAAA;AAMpC,YAAY,EACV,OAAO,EACP,qBAAqB,EACrB,gBAAgB,EAChB,WAAW,GACZ,MAAM,uBAAuB,CAAA;AAE9B,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAChE,OAAO,EAAE,IAAI,EAAE,KAAK,WAAW,EAAE,MAAM,WAAW,CAAA;AAClD,OAAO,EAAE,cAAc,EAAE,KAAK,UAAU,EAAE,MAAM,6BAA6B,CAAA;AAC7E,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAChE,OAAO,EAAE,sBAAsB,EAAE,MAAM,0CAA0C,CAAA;AACjF,OAAO,EACL,iBAAiB,EACjB,mBAAmB,EACnB,qBAAqB,IAAI,oBAAoB,GAC9C,MAAM,aAAa,CAAA;AACpB,cAAc,YAAY,CAAA;AAE1B,OAAO,KAAK,IAAI,MAAM,mBAAmB,CAAA"}
|
package/dist/index.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { Repo } from "../Repo.js";
|
|
2
2
|
import { PeerId, DocumentId } from "../types.js";
|
|
3
3
|
import { Synchronizer } from "./Synchronizer.js";
|
|
4
4
|
import { SynchronizerMessage } from "../network/messages.js";
|
|
@@ -6,7 +6,7 @@ import { SynchronizerMessage } from "../network/messages.js";
|
|
|
6
6
|
export declare class CollectionSynchronizer extends Synchronizer {
|
|
7
7
|
#private;
|
|
8
8
|
private repo;
|
|
9
|
-
constructor(repo:
|
|
9
|
+
constructor(repo: Repo);
|
|
10
10
|
/**
|
|
11
11
|
* When we receive a sync message for a document we haven't got in memory, we
|
|
12
12
|
* register it with the repo and start synchronizing
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"CollectionSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/CollectionSynchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,
|
|
1
|
+
{"version":3,"file":"CollectionSynchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/CollectionSynchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,YAAY,CAAA;AAOjC,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAEhD,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAA;AAGhD,OAAO,EAGL,mBAAmB,EAEpB,MAAM,wBAAwB,CAAA;AAG/B,4FAA4F;AAC5F,qBAAa,sBAAuB,SAAQ,YAAY;;IAU1C,OAAO,CAAC,IAAI;gBAAJ,IAAI,EAAE,IAAI;IAiC9B;;;OAGG;IACG,cAAc,CAAC,OAAO,EAAE,mBAAmB;IAyBjD;;OAEG;IACH,WAAW,CAAC,UAAU,EAAE,UAAU;IAYlC,cAAc,CAAC,UAAU,EAAE,UAAU;IAIrC,2DAA2D;IAC3D,OAAO,CAAC,MAAM,EAAE,MAAM;IAgBtB,uDAAuD;IACvD,UAAU,CAAC,MAAM,EAAE,MAAM;CAQ1B"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@automerge/automerge-repo",
|
|
3
|
-
"version": "1.0.0
|
|
3
|
+
"version": "1.0.0",
|
|
4
4
|
"description": "A repository object to manage a collection of automerge documents",
|
|
5
5
|
"repository": "https://github.com/automerge/automerge-repo",
|
|
6
6
|
"author": "Peter van Hardenberg <pvh@pvh.ca>",
|
|
@@ -31,7 +31,7 @@
|
|
|
31
31
|
"typescript": "^5.1.6"
|
|
32
32
|
},
|
|
33
33
|
"peerDependencies": {
|
|
34
|
-
"@automerge/automerge": "^2.1.0
|
|
34
|
+
"@automerge/automerge": "^2.1.0"
|
|
35
35
|
},
|
|
36
36
|
"dependencies": {
|
|
37
37
|
"bs58check": "^3.0.1",
|
|
@@ -65,5 +65,5 @@
|
|
|
65
65
|
"publishConfig": {
|
|
66
66
|
"access": "public"
|
|
67
67
|
},
|
|
68
|
-
"gitHead": "
|
|
68
|
+
"gitHead": "89c6ee2714d43dbebd69c7a62f148d17869608d6"
|
|
69
69
|
}
|
package/src/DocHandle.ts
CHANGED
|
@@ -326,23 +326,31 @@ export class DocHandle<T> //
|
|
|
326
326
|
})
|
|
327
327
|
}
|
|
328
328
|
|
|
329
|
+
/** Make a change as if the document were at `heads`
|
|
330
|
+
*
|
|
331
|
+
* @returns A set of heads representing the concurrent change that was made.
|
|
332
|
+
*/
|
|
329
333
|
changeAt(
|
|
330
334
|
heads: A.Heads,
|
|
331
335
|
callback: A.ChangeFn<T>,
|
|
332
336
|
options: A.ChangeOptions<T> = {}
|
|
333
|
-
) {
|
|
337
|
+
): string[] | undefined {
|
|
334
338
|
if (!this.isReady()) {
|
|
335
339
|
throw new Error(
|
|
336
340
|
`DocHandle#${this.documentId} is not ready. Check \`handle.isReady()\` before accessing the document.`
|
|
337
341
|
)
|
|
338
342
|
}
|
|
343
|
+
let resultHeads: string[] | undefined = undefined
|
|
339
344
|
this.#machine.send(UPDATE, {
|
|
340
345
|
payload: {
|
|
341
346
|
callback: (doc: A.Doc<T>) => {
|
|
342
|
-
|
|
347
|
+
const result = A.changeAt(doc, heads, options, callback)
|
|
348
|
+
resultHeads = result.newHeads
|
|
349
|
+
return result.newDoc
|
|
343
350
|
},
|
|
344
351
|
},
|
|
345
352
|
})
|
|
353
|
+
return resultHeads
|
|
346
354
|
}
|
|
347
355
|
|
|
348
356
|
unavailable() {
|
package/src/Repo.ts
CHANGED
|
@@ -1,18 +1,26 @@
|
|
|
1
1
|
import debug from "debug"
|
|
2
|
-
import { DocCollection } from "./DocCollection.js"
|
|
3
2
|
import { NetworkAdapter } from "./network/NetworkAdapter.js"
|
|
4
3
|
import { NetworkSubsystem } from "./network/NetworkSubsystem.js"
|
|
5
4
|
import { StorageAdapter } from "./storage/StorageAdapter.js"
|
|
6
5
|
import { StorageSubsystem } from "./storage/StorageSubsystem.js"
|
|
7
6
|
import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js"
|
|
8
|
-
import { DocumentId, PeerId } from "./types.js"
|
|
7
|
+
import { type AutomergeUrl, DocumentId, PeerId } from "./types.js"
|
|
8
|
+
import { v4 as uuid } from "uuid"
|
|
9
|
+
import { parseAutomergeUrl, generateAutomergeUrl, isValidAutomergeUrl } from "./DocUrl.js"
|
|
9
10
|
|
|
10
|
-
|
|
11
|
-
|
|
11
|
+
import { DocHandle } from "./DocHandle.js"
|
|
12
|
+
import { EventEmitter } from "eventemitter3"
|
|
13
|
+
|
|
14
|
+
/** A Repo is a collection of documents with networking, syncing, and storage capabilities. */
|
|
15
|
+
export class Repo extends EventEmitter<DocCollectionEvents> {
|
|
12
16
|
#log: debug.Debugger
|
|
13
17
|
|
|
14
18
|
networkSubsystem: NetworkSubsystem
|
|
15
19
|
storageSubsystem?: StorageSubsystem
|
|
20
|
+
#handleCache: Record<DocumentId, DocHandle<any>> = {}
|
|
21
|
+
|
|
22
|
+
/** By default, we share generously with all peers. */
|
|
23
|
+
sharePolicy: SharePolicy = async () => true
|
|
16
24
|
|
|
17
25
|
constructor({ storage, network, peerId, sharePolicy }: RepoConfig) {
|
|
18
26
|
super()
|
|
@@ -111,6 +119,108 @@ export class Repo extends DocCollection {
|
|
|
111
119
|
await synchronizer.receiveMessage(msg)
|
|
112
120
|
})
|
|
113
121
|
}
|
|
122
|
+
|
|
123
|
+
/** Returns an existing handle if we have it; creates one otherwise. */
|
|
124
|
+
#getHandle<T>(
|
|
125
|
+
/** The documentId of the handle to look up or create */
|
|
126
|
+
documentId: DocumentId,
|
|
127
|
+
|
|
128
|
+
/** If we know we're creating a new document, specify this so we can have access to it immediately */
|
|
129
|
+
isNew: boolean
|
|
130
|
+
) {
|
|
131
|
+
// If we have the handle cached, return it
|
|
132
|
+
if (this.#handleCache[documentId]) return this.#handleCache[documentId]
|
|
133
|
+
|
|
134
|
+
// If not, create a new handle, cache it, and return it
|
|
135
|
+
if (!documentId) throw new Error(`Invalid documentId ${documentId}`)
|
|
136
|
+
const handle = new DocHandle<T>(documentId, { isNew })
|
|
137
|
+
this.#handleCache[documentId] = handle
|
|
138
|
+
return handle
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
/** Returns all the handles we have cached. */
|
|
142
|
+
get handles() {
|
|
143
|
+
return this.#handleCache
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
/**
|
|
147
|
+
* Creates a new document and returns a handle to it. The initial value of the document is
|
|
148
|
+
* an empty object `{}`. Its documentId is generated by the system. we emit a `document` event
|
|
149
|
+
* to advertise interest in the document.
|
|
150
|
+
*/
|
|
151
|
+
create<T>(): DocHandle<T> {
|
|
152
|
+
// TODO:
|
|
153
|
+
// either
|
|
154
|
+
// - pass an initial value and do something like this to ensure that you get a valid initial value
|
|
155
|
+
|
|
156
|
+
// const myInitialValue = {
|
|
157
|
+
// tasks: [],
|
|
158
|
+
// filter: "all",
|
|
159
|
+
//
|
|
160
|
+
// const guaranteeInitialValue = (doc: any) => {
|
|
161
|
+
// if (!doc.tasks) doc.tasks = []
|
|
162
|
+
// if (!doc.filter) doc.filter = "all"
|
|
163
|
+
|
|
164
|
+
// return { ...myInitialValue, ...doc }
|
|
165
|
+
// }
|
|
166
|
+
|
|
167
|
+
// or
|
|
168
|
+
// - pass a "reify" function that takes a `<any>` and returns `<T>`
|
|
169
|
+
|
|
170
|
+
// Generate a new UUID and store it in the buffer
|
|
171
|
+
const { documentId } = parseAutomergeUrl(generateAutomergeUrl())
|
|
172
|
+
const handle = this.#getHandle<T>(documentId, true) as DocHandle<T>
|
|
173
|
+
this.emit("document", { handle, isNew: true })
|
|
174
|
+
return handle
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
/**
|
|
178
|
+
* Retrieves a document by id. It gets data from the local system, but also emits a `document`
|
|
179
|
+
* event to advertise interest in the document.
|
|
180
|
+
*/
|
|
181
|
+
find<T>(
|
|
182
|
+
/** The documentId of the handle to retrieve */
|
|
183
|
+
automergeUrl: AutomergeUrl
|
|
184
|
+
): DocHandle<T> {
|
|
185
|
+
if (!isValidAutomergeUrl(automergeUrl)) {
|
|
186
|
+
throw new Error(`Invalid AutomergeUrl: '${automergeUrl}'`)
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
const { documentId } = parseAutomergeUrl(automergeUrl)
|
|
190
|
+
// If we have the handle cached, return it
|
|
191
|
+
if (this.#handleCache[documentId]) {
|
|
192
|
+
if (this.#handleCache[documentId].isUnavailable()) {
|
|
193
|
+
// this ensures that the event fires after the handle has been returned
|
|
194
|
+
setTimeout(() => {
|
|
195
|
+
this.#handleCache[documentId].emit("unavailable", {
|
|
196
|
+
handle: this.#handleCache[documentId],
|
|
197
|
+
})
|
|
198
|
+
})
|
|
199
|
+
}
|
|
200
|
+
return this.#handleCache[documentId]
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
const handle = this.#getHandle<T>(documentId, false) as DocHandle<T>
|
|
204
|
+
this.emit("document", { handle, isNew: false })
|
|
205
|
+
return handle
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
delete(
|
|
209
|
+
/** The documentId of the handle to delete */
|
|
210
|
+
id: DocumentId | AutomergeUrl
|
|
211
|
+
) {
|
|
212
|
+
if (isValidAutomergeUrl(id)) {
|
|
213
|
+
;({ documentId: id } = parseAutomergeUrl(id))
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
const handle = this.#getHandle(id, false)
|
|
217
|
+
handle.delete()
|
|
218
|
+
|
|
219
|
+
delete this.#handleCache[id]
|
|
220
|
+
this.emit("delete-document", {
|
|
221
|
+
documentId: id,
|
|
222
|
+
})
|
|
223
|
+
}
|
|
114
224
|
}
|
|
115
225
|
|
|
116
226
|
export interface RepoConfig {
|
|
@@ -134,3 +244,19 @@ export type SharePolicy = (
|
|
|
134
244
|
peerId: PeerId,
|
|
135
245
|
documentId?: DocumentId
|
|
136
246
|
) => Promise<boolean>
|
|
247
|
+
|
|
248
|
+
// events & payloads
|
|
249
|
+
interface DocCollectionEvents {
|
|
250
|
+
document: (arg: DocumentPayload) => void
|
|
251
|
+
"delete-document": (arg: DeleteDocumentPayload) => void
|
|
252
|
+
"unavailable-document": (arg: DeleteDocumentPayload) => void
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
interface DocumentPayload {
|
|
256
|
+
handle: DocHandle<any>
|
|
257
|
+
isNew: boolean
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
interface DeleteDocumentPayload {
|
|
261
|
+
documentId: DocumentId
|
|
262
|
+
}
|
package/src/index.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { Repo } from "../Repo.js"
|
|
2
2
|
import { DocHandle } from "../DocHandle.js"
|
|
3
3
|
import {
|
|
4
4
|
documentIdToBinary,
|
|
@@ -29,7 +29,7 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
29
29
|
/** Used to determine if the document is know to the Collection and a synchronizer exists or is being set up */
|
|
30
30
|
#docSetUp: Record<DocumentId, boolean> = {}
|
|
31
31
|
|
|
32
|
-
constructor(private repo:
|
|
32
|
+
constructor(private repo: Repo) {
|
|
33
33
|
super()
|
|
34
34
|
}
|
|
35
35
|
|
|
@@ -1,15 +1,17 @@
|
|
|
1
1
|
import assert from "assert"
|
|
2
2
|
import { beforeEach } from "mocha"
|
|
3
|
-
import {
|
|
3
|
+
import { PeerId, Repo } from "../src/index.js"
|
|
4
4
|
import { CollectionSynchronizer } from "../src/synchronizer/CollectionSynchronizer.js"
|
|
5
5
|
|
|
6
6
|
describe("CollectionSynchronizer", () => {
|
|
7
|
-
let
|
|
7
|
+
let repo: Repo
|
|
8
8
|
let synchronizer: CollectionSynchronizer
|
|
9
9
|
|
|
10
10
|
beforeEach(() => {
|
|
11
|
-
|
|
12
|
-
|
|
11
|
+
repo = new Repo({
|
|
12
|
+
network: [],
|
|
13
|
+
})
|
|
14
|
+
synchronizer = new CollectionSynchronizer(repo)
|
|
13
15
|
})
|
|
14
16
|
|
|
15
17
|
it("is not null", async () => {
|
|
@@ -17,7 +19,7 @@ describe("CollectionSynchronizer", () => {
|
|
|
17
19
|
})
|
|
18
20
|
|
|
19
21
|
it("starts synchronizing a document to peers when added", done => {
|
|
20
|
-
const handle =
|
|
22
|
+
const handle = repo.create()
|
|
21
23
|
synchronizer.addPeer("peer1" as PeerId)
|
|
22
24
|
|
|
23
25
|
synchronizer.once("message", event => {
|
|
@@ -30,7 +32,7 @@ describe("CollectionSynchronizer", () => {
|
|
|
30
32
|
})
|
|
31
33
|
|
|
32
34
|
it("starts synchronizing existing documents when a peer is added", done => {
|
|
33
|
-
const handle =
|
|
35
|
+
const handle = repo.create()
|
|
34
36
|
synchronizer.addDocument(handle.documentId)
|
|
35
37
|
synchronizer.once("message", event => {
|
|
36
38
|
assert(event.targetId === "peer1")
|
|
@@ -41,9 +43,9 @@ describe("CollectionSynchronizer", () => {
|
|
|
41
43
|
})
|
|
42
44
|
|
|
43
45
|
it("should not synchronize to a peer which is excluded from the share policy", done => {
|
|
44
|
-
const handle =
|
|
46
|
+
const handle = repo.create()
|
|
45
47
|
|
|
46
|
-
|
|
48
|
+
repo.sharePolicy = async (peerId: PeerId) => peerId !== "peer1"
|
|
47
49
|
|
|
48
50
|
synchronizer.addDocument(handle.documentId)
|
|
49
51
|
synchronizer.once("message", () => {
|
|
@@ -55,9 +57,8 @@ describe("CollectionSynchronizer", () => {
|
|
|
55
57
|
})
|
|
56
58
|
|
|
57
59
|
it("should not synchronize a document which is excluded from the share policy", done => {
|
|
58
|
-
const handle =
|
|
59
|
-
|
|
60
|
-
documentId !== handle.documentId
|
|
60
|
+
const handle = repo.create()
|
|
61
|
+
repo.sharePolicy = async (_, documentId) => documentId !== handle.documentId
|
|
61
62
|
|
|
62
63
|
synchronizer.addPeer("peer2" as PeerId)
|
|
63
64
|
|
package/test/DocHandle.test.ts
CHANGED
|
@@ -295,10 +295,11 @@ describe("DocHandle", () => {
|
|
|
295
295
|
})
|
|
296
296
|
|
|
297
297
|
let wasBar = false
|
|
298
|
-
handle.changeAt(headsBefore, doc => {
|
|
298
|
+
let newHeads = handle.changeAt(headsBefore, doc => {
|
|
299
299
|
wasBar = doc.foo === "bar"
|
|
300
300
|
doc.foo = "baz"
|
|
301
301
|
})
|
|
302
|
+
assert(newHeads && newHeads.length > 0, "should have new heads")
|
|
302
303
|
|
|
303
304
|
assert(wasBar, "foo should have been bar as we changed at the old heads")
|
|
304
305
|
})
|
package/test/Repo.test.ts
CHANGED
package/dist/DocCollection.d.ts
DELETED
|
@@ -1,46 +0,0 @@
|
|
|
1
|
-
import { EventEmitter } from "eventemitter3";
|
|
2
|
-
import { DocHandle } from "./DocHandle.js";
|
|
3
|
-
import { DocumentId, AutomergeUrl } from "./types.js";
|
|
4
|
-
import { type SharePolicy } from "./Repo.js";
|
|
5
|
-
/**
|
|
6
|
-
* A DocCollection is a collection of DocHandles. It supports creating new documents and finding
|
|
7
|
-
* documents by ID.
|
|
8
|
-
* */
|
|
9
|
-
export declare class DocCollection extends EventEmitter<DocCollectionEvents> {
|
|
10
|
-
#private;
|
|
11
|
-
/** By default, we share generously with all peers. */
|
|
12
|
-
sharePolicy: SharePolicy;
|
|
13
|
-
constructor();
|
|
14
|
-
/** Returns all the handles we have cached. */
|
|
15
|
-
get handles(): Record<DocumentId, DocHandle<any>>;
|
|
16
|
-
/**
|
|
17
|
-
* Creates a new document and returns a handle to it. The initial value of the document is
|
|
18
|
-
* an empty object `{}`. Its documentId is generated by the system. we emit a `document` event
|
|
19
|
-
* to advertise interest in the document.
|
|
20
|
-
*/
|
|
21
|
-
create<T>(): DocHandle<T>;
|
|
22
|
-
/**
|
|
23
|
-
* Retrieves a document by id. It gets data from the local system, but also emits a `document`
|
|
24
|
-
* event to advertise interest in the document.
|
|
25
|
-
*/
|
|
26
|
-
find<T>(
|
|
27
|
-
/** The documentId of the handle to retrieve */
|
|
28
|
-
automergeUrl: AutomergeUrl): DocHandle<T>;
|
|
29
|
-
delete(
|
|
30
|
-
/** The documentId of the handle to delete */
|
|
31
|
-
id: DocumentId | AutomergeUrl): void;
|
|
32
|
-
}
|
|
33
|
-
interface DocCollectionEvents {
|
|
34
|
-
document: (arg: DocumentPayload) => void;
|
|
35
|
-
"delete-document": (arg: DeleteDocumentPayload) => void;
|
|
36
|
-
"unavailable-document": (arg: DeleteDocumentPayload) => void;
|
|
37
|
-
}
|
|
38
|
-
interface DocumentPayload {
|
|
39
|
-
handle: DocHandle<any>;
|
|
40
|
-
isNew: boolean;
|
|
41
|
-
}
|
|
42
|
-
interface DeleteDocumentPayload {
|
|
43
|
-
documentId: DocumentId;
|
|
44
|
-
}
|
|
45
|
-
export {};
|
|
46
|
-
//# sourceMappingURL=DocCollection.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"DocCollection.d.ts","sourceRoot":"","sources":["../src/DocCollection.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAC5C,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAA;AAC1C,OAAO,EAAE,UAAU,EAAyB,YAAY,EAAE,MAAM,YAAY,CAAA;AAC5E,OAAO,EAAE,KAAK,WAAW,EAAE,MAAM,WAAW,CAAA;AAS5C;;;KAGK;AACL,qBAAa,aAAc,SAAQ,YAAY,CAAC,mBAAmB,CAAC;;IAGlE,sDAAsD;IACtD,WAAW,EAAE,WAAW,CAAmB;;IAwB3C,8CAA8C;IAC9C,IAAI,OAAO,uCAEV;IAED;;;;OAIG;IACH,MAAM,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC,CAAC;IA0BzB;;;OAGG;IACH,IAAI,CAAC,CAAC;IACJ,+CAA+C;IAC/C,YAAY,EAAE,YAAY,GACzB,SAAS,CAAC,CAAC,CAAC;IAwBf,MAAM;IACJ,6CAA6C;IAC7C,EAAE,EAAE,UAAU,GAAG,YAAY;CAchC;AAGD,UAAU,mBAAmB;IAC3B,QAAQ,EAAE,CAAC,GAAG,EAAE,eAAe,KAAK,IAAI,CAAA;IACxC,iBAAiB,EAAE,CAAC,GAAG,EAAE,qBAAqB,KAAK,IAAI,CAAA;IACvD,sBAAsB,EAAE,CAAC,GAAG,EAAE,qBAAqB,KAAK,IAAI,CAAA;CAC7D;AAED,UAAU,eAAe;IACvB,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC,CAAA;IACtB,KAAK,EAAE,OAAO,CAAA;CACf;AAED,UAAU,qBAAqB;IAC7B,UAAU,EAAE,UAAU,CAAA;CACvB"}
|
package/dist/DocCollection.js
DELETED
|
@@ -1,102 +0,0 @@
|
|
|
1
|
-
import { EventEmitter } from "eventemitter3";
|
|
2
|
-
import { DocHandle } from "./DocHandle.js";
|
|
3
|
-
import { generateAutomergeUrl, isValidAutomergeUrl, parseAutomergeUrl, } from "./DocUrl.js";
|
|
4
|
-
/**
|
|
5
|
-
* A DocCollection is a collection of DocHandles. It supports creating new documents and finding
|
|
6
|
-
* documents by ID.
|
|
7
|
-
* */
|
|
8
|
-
export class DocCollection extends EventEmitter {
|
|
9
|
-
#handleCache = {};
|
|
10
|
-
/** By default, we share generously with all peers. */
|
|
11
|
-
sharePolicy = async () => true;
|
|
12
|
-
constructor() {
|
|
13
|
-
super();
|
|
14
|
-
}
|
|
15
|
-
/** Returns an existing handle if we have it; creates one otherwise. */
|
|
16
|
-
#getHandle(
|
|
17
|
-
/** The documentId of the handle to look up or create */
|
|
18
|
-
documentId,
|
|
19
|
-
/** If we know we're creating a new document, specify this so we can have access to it immediately */
|
|
20
|
-
isNew) {
|
|
21
|
-
// If we have the handle cached, return it
|
|
22
|
-
if (this.#handleCache[documentId])
|
|
23
|
-
return this.#handleCache[documentId];
|
|
24
|
-
// If not, create a new handle, cache it, and return it
|
|
25
|
-
if (!documentId)
|
|
26
|
-
throw new Error(`Invalid documentId ${documentId}`);
|
|
27
|
-
const handle = new DocHandle(documentId, { isNew });
|
|
28
|
-
this.#handleCache[documentId] = handle;
|
|
29
|
-
return handle;
|
|
30
|
-
}
|
|
31
|
-
/** Returns all the handles we have cached. */
|
|
32
|
-
get handles() {
|
|
33
|
-
return this.#handleCache;
|
|
34
|
-
}
|
|
35
|
-
/**
|
|
36
|
-
* Creates a new document and returns a handle to it. The initial value of the document is
|
|
37
|
-
* an empty object `{}`. Its documentId is generated by the system. we emit a `document` event
|
|
38
|
-
* to advertise interest in the document.
|
|
39
|
-
*/
|
|
40
|
-
create() {
|
|
41
|
-
// TODO:
|
|
42
|
-
// either
|
|
43
|
-
// - pass an initial value and do something like this to ensure that you get a valid initial value
|
|
44
|
-
// const myInitialValue = {
|
|
45
|
-
// tasks: [],
|
|
46
|
-
// filter: "all",
|
|
47
|
-
//
|
|
48
|
-
// const guaranteeInitialValue = (doc: any) => {
|
|
49
|
-
// if (!doc.tasks) doc.tasks = []
|
|
50
|
-
// if (!doc.filter) doc.filter = "all"
|
|
51
|
-
// return { ...myInitialValue, ...doc }
|
|
52
|
-
// }
|
|
53
|
-
// or
|
|
54
|
-
// - pass a "reify" function that takes a `<any>` and returns `<T>`
|
|
55
|
-
// Generate a new UUID and store it in the buffer
|
|
56
|
-
const { documentId } = parseAutomergeUrl(generateAutomergeUrl());
|
|
57
|
-
const handle = this.#getHandle(documentId, true);
|
|
58
|
-
this.emit("document", { handle, isNew: true });
|
|
59
|
-
return handle;
|
|
60
|
-
}
|
|
61
|
-
/**
|
|
62
|
-
* Retrieves a document by id. It gets data from the local system, but also emits a `document`
|
|
63
|
-
* event to advertise interest in the document.
|
|
64
|
-
*/
|
|
65
|
-
find(
|
|
66
|
-
/** The documentId of the handle to retrieve */
|
|
67
|
-
automergeUrl) {
|
|
68
|
-
if (!isValidAutomergeUrl(automergeUrl)) {
|
|
69
|
-
throw new Error(`Invalid AutomergeUrl: '${automergeUrl}'`);
|
|
70
|
-
}
|
|
71
|
-
const { documentId } = parseAutomergeUrl(automergeUrl);
|
|
72
|
-
// If we have the handle cached, return it
|
|
73
|
-
if (this.#handleCache[documentId]) {
|
|
74
|
-
if (this.#handleCache[documentId].isUnavailable()) {
|
|
75
|
-
// this ensures that the event fires after the handle has been returned
|
|
76
|
-
setTimeout(() => {
|
|
77
|
-
this.#handleCache[documentId].emit("unavailable", {
|
|
78
|
-
handle: this.#handleCache[documentId],
|
|
79
|
-
});
|
|
80
|
-
});
|
|
81
|
-
}
|
|
82
|
-
return this.#handleCache[documentId];
|
|
83
|
-
}
|
|
84
|
-
const handle = this.#getHandle(documentId, false);
|
|
85
|
-
this.emit("document", { handle, isNew: false });
|
|
86
|
-
return handle;
|
|
87
|
-
}
|
|
88
|
-
delete(
|
|
89
|
-
/** The documentId of the handle to delete */
|
|
90
|
-
id) {
|
|
91
|
-
if (isValidAutomergeUrl(id)) {
|
|
92
|
-
;
|
|
93
|
-
({ documentId: id } = parseAutomergeUrl(id));
|
|
94
|
-
}
|
|
95
|
-
const handle = this.#getHandle(id, false);
|
|
96
|
-
handle.delete();
|
|
97
|
-
delete this.#handleCache[id];
|
|
98
|
-
this.emit("delete-document", {
|
|
99
|
-
documentId: id,
|
|
100
|
-
});
|
|
101
|
-
}
|
|
102
|
-
}
|
package/src/DocCollection.ts
DELETED
|
@@ -1,144 +0,0 @@
|
|
|
1
|
-
import { EventEmitter } from "eventemitter3"
|
|
2
|
-
import { DocHandle } from "./DocHandle.js"
|
|
3
|
-
import { DocumentId, type BinaryDocumentId, AutomergeUrl } from "./types.js"
|
|
4
|
-
import { type SharePolicy } from "./Repo.js"
|
|
5
|
-
import {
|
|
6
|
-
documentIdToBinary,
|
|
7
|
-
binaryToDocumentId,
|
|
8
|
-
generateAutomergeUrl,
|
|
9
|
-
isValidAutomergeUrl,
|
|
10
|
-
parseAutomergeUrl,
|
|
11
|
-
} from "./DocUrl.js"
|
|
12
|
-
|
|
13
|
-
/**
|
|
14
|
-
* A DocCollection is a collection of DocHandles. It supports creating new documents and finding
|
|
15
|
-
* documents by ID.
|
|
16
|
-
* */
|
|
17
|
-
export class DocCollection extends EventEmitter<DocCollectionEvents> {
|
|
18
|
-
#handleCache: Record<DocumentId, DocHandle<any>> = {}
|
|
19
|
-
|
|
20
|
-
/** By default, we share generously with all peers. */
|
|
21
|
-
sharePolicy: SharePolicy = async () => true
|
|
22
|
-
|
|
23
|
-
constructor() {
|
|
24
|
-
super()
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
/** Returns an existing handle if we have it; creates one otherwise. */
|
|
28
|
-
#getHandle<T>(
|
|
29
|
-
/** The documentId of the handle to look up or create */
|
|
30
|
-
documentId: DocumentId,
|
|
31
|
-
|
|
32
|
-
/** If we know we're creating a new document, specify this so we can have access to it immediately */
|
|
33
|
-
isNew: boolean
|
|
34
|
-
) {
|
|
35
|
-
// If we have the handle cached, return it
|
|
36
|
-
if (this.#handleCache[documentId]) return this.#handleCache[documentId]
|
|
37
|
-
|
|
38
|
-
// If not, create a new handle, cache it, and return it
|
|
39
|
-
if (!documentId) throw new Error(`Invalid documentId ${documentId}`)
|
|
40
|
-
const handle = new DocHandle<T>(documentId, { isNew })
|
|
41
|
-
this.#handleCache[documentId] = handle
|
|
42
|
-
return handle
|
|
43
|
-
}
|
|
44
|
-
|
|
45
|
-
/** Returns all the handles we have cached. */
|
|
46
|
-
get handles() {
|
|
47
|
-
return this.#handleCache
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
/**
|
|
51
|
-
* Creates a new document and returns a handle to it. The initial value of the document is
|
|
52
|
-
* an empty object `{}`. Its documentId is generated by the system. we emit a `document` event
|
|
53
|
-
* to advertise interest in the document.
|
|
54
|
-
*/
|
|
55
|
-
create<T>(): DocHandle<T> {
|
|
56
|
-
// TODO:
|
|
57
|
-
// either
|
|
58
|
-
// - pass an initial value and do something like this to ensure that you get a valid initial value
|
|
59
|
-
|
|
60
|
-
// const myInitialValue = {
|
|
61
|
-
// tasks: [],
|
|
62
|
-
// filter: "all",
|
|
63
|
-
//
|
|
64
|
-
// const guaranteeInitialValue = (doc: any) => {
|
|
65
|
-
// if (!doc.tasks) doc.tasks = []
|
|
66
|
-
// if (!doc.filter) doc.filter = "all"
|
|
67
|
-
|
|
68
|
-
// return { ...myInitialValue, ...doc }
|
|
69
|
-
// }
|
|
70
|
-
|
|
71
|
-
// or
|
|
72
|
-
// - pass a "reify" function that takes a `<any>` and returns `<T>`
|
|
73
|
-
|
|
74
|
-
// Generate a new UUID and store it in the buffer
|
|
75
|
-
const { documentId } = parseAutomergeUrl(generateAutomergeUrl())
|
|
76
|
-
const handle = this.#getHandle<T>(documentId, true) as DocHandle<T>
|
|
77
|
-
this.emit("document", { handle, isNew: true })
|
|
78
|
-
return handle
|
|
79
|
-
}
|
|
80
|
-
|
|
81
|
-
/**
|
|
82
|
-
* Retrieves a document by id. It gets data from the local system, but also emits a `document`
|
|
83
|
-
* event to advertise interest in the document.
|
|
84
|
-
*/
|
|
85
|
-
find<T>(
|
|
86
|
-
/** The documentId of the handle to retrieve */
|
|
87
|
-
automergeUrl: AutomergeUrl
|
|
88
|
-
): DocHandle<T> {
|
|
89
|
-
if (!isValidAutomergeUrl(automergeUrl)) {
|
|
90
|
-
throw new Error(`Invalid AutomergeUrl: '${automergeUrl}'`)
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
const { documentId } = parseAutomergeUrl(automergeUrl)
|
|
94
|
-
// If we have the handle cached, return it
|
|
95
|
-
if (this.#handleCache[documentId]) {
|
|
96
|
-
if (this.#handleCache[documentId].isUnavailable()) {
|
|
97
|
-
// this ensures that the event fires after the handle has been returned
|
|
98
|
-
setTimeout(() => {
|
|
99
|
-
this.#handleCache[documentId].emit("unavailable", {
|
|
100
|
-
handle: this.#handleCache[documentId],
|
|
101
|
-
})
|
|
102
|
-
})
|
|
103
|
-
}
|
|
104
|
-
return this.#handleCache[documentId]
|
|
105
|
-
}
|
|
106
|
-
|
|
107
|
-
const handle = this.#getHandle<T>(documentId, false) as DocHandle<T>
|
|
108
|
-
this.emit("document", { handle, isNew: false })
|
|
109
|
-
return handle
|
|
110
|
-
}
|
|
111
|
-
|
|
112
|
-
delete(
|
|
113
|
-
/** The documentId of the handle to delete */
|
|
114
|
-
id: DocumentId | AutomergeUrl
|
|
115
|
-
) {
|
|
116
|
-
if (isValidAutomergeUrl(id)) {
|
|
117
|
-
;({ documentId: id } = parseAutomergeUrl(id))
|
|
118
|
-
}
|
|
119
|
-
|
|
120
|
-
const handle = this.#getHandle(id, false)
|
|
121
|
-
handle.delete()
|
|
122
|
-
|
|
123
|
-
delete this.#handleCache[id]
|
|
124
|
-
this.emit("delete-document", {
|
|
125
|
-
documentId: id,
|
|
126
|
-
})
|
|
127
|
-
}
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
// events & payloads
|
|
131
|
-
interface DocCollectionEvents {
|
|
132
|
-
document: (arg: DocumentPayload) => void
|
|
133
|
-
"delete-document": (arg: DeleteDocumentPayload) => void
|
|
134
|
-
"unavailable-document": (arg: DeleteDocumentPayload) => void
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
interface DocumentPayload {
|
|
138
|
-
handle: DocHandle<any>
|
|
139
|
-
isNew: boolean
|
|
140
|
-
}
|
|
141
|
-
|
|
142
|
-
interface DeleteDocumentPayload {
|
|
143
|
-
documentId: DocumentId
|
|
144
|
-
}
|
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
import assert from "assert"
|
|
2
|
-
import { DocCollection, BinaryDocumentId } from "../src/index.js"
|
|
3
|
-
import { TestDoc } from "./types.js"
|
|
4
|
-
import { generateAutomergeUrl, stringifyAutomergeUrl } from "../src/DocUrl.js"
|
|
5
|
-
|
|
6
|
-
const MISSING_DOCID = generateAutomergeUrl()
|
|
7
|
-
|
|
8
|
-
describe("DocCollection", () => {
|
|
9
|
-
it("can create documents which are ready to go", async () => {
|
|
10
|
-
const collection = new DocCollection()
|
|
11
|
-
const handle = collection.create<TestDoc>()
|
|
12
|
-
assert(handle.isReady() === true)
|
|
13
|
-
})
|
|
14
|
-
|
|
15
|
-
it("can start finding documents and they shouldn't be ready", () => {
|
|
16
|
-
const collection = new DocCollection()
|
|
17
|
-
const handle = collection.find<TestDoc>(MISSING_DOCID)
|
|
18
|
-
assert(handle.isReady() === false)
|
|
19
|
-
})
|
|
20
|
-
})
|