@automerge/automerge-repo 1.0.0-alpha.0 → 1.0.0-alpha.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/DocCollection.d.ts +2 -1
- package/dist/DocCollection.d.ts.map +1 -1
- package/dist/DocCollection.js +17 -8
- package/dist/DocHandle.d.ts +27 -7
- package/dist/DocHandle.d.ts.map +1 -1
- package/dist/DocHandle.js +47 -23
- package/dist/DocUrl.d.ts +3 -3
- package/dist/DocUrl.js +9 -9
- package/dist/EphemeralData.d.ts +8 -16
- package/dist/EphemeralData.d.ts.map +1 -1
- package/dist/EphemeralData.js +1 -28
- package/dist/Repo.d.ts +0 -2
- package/dist/Repo.d.ts.map +1 -1
- package/dist/Repo.js +18 -36
- package/dist/helpers/headsAreSame.d.ts +2 -2
- package/dist/helpers/headsAreSame.d.ts.map +1 -1
- package/dist/helpers/headsAreSame.js +1 -4
- package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
- package/dist/helpers/tests/network-adapter-tests.js +15 -13
- package/dist/index.d.ts +2 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/network/NetworkAdapter.d.ts +4 -13
- package/dist/network/NetworkAdapter.d.ts.map +1 -1
- package/dist/network/NetworkSubsystem.d.ts +5 -4
- package/dist/network/NetworkSubsystem.d.ts.map +1 -1
- package/dist/network/NetworkSubsystem.js +39 -25
- package/dist/network/messages.d.ts +57 -0
- package/dist/network/messages.d.ts.map +1 -0
- package/dist/network/messages.js +21 -0
- package/dist/storage/StorageSubsystem.d.ts +2 -2
- package/dist/storage/StorageSubsystem.d.ts.map +1 -1
- package/dist/storage/StorageSubsystem.js +36 -6
- package/dist/synchronizer/CollectionSynchronizer.d.ts +3 -2
- package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.js +19 -13
- package/dist/synchronizer/DocSynchronizer.d.ts +9 -3
- package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/DocSynchronizer.js +145 -29
- package/dist/synchronizer/Synchronizer.d.ts +3 -4
- package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
- package/dist/types.d.ts +1 -3
- package/dist/types.d.ts.map +1 -1
- package/fuzz/fuzz.ts +4 -4
- package/package.json +3 -3
- package/src/DocCollection.ts +19 -9
- package/src/DocHandle.ts +82 -37
- package/src/DocUrl.ts +9 -9
- package/src/EphemeralData.ts +6 -36
- package/src/Repo.ts +20 -52
- package/src/helpers/headsAreSame.ts +3 -5
- package/src/helpers/tests/network-adapter-tests.ts +18 -14
- package/src/index.ts +12 -2
- package/src/network/NetworkAdapter.ts +4 -20
- package/src/network/NetworkSubsystem.ts +61 -38
- package/src/network/messages.ts +123 -0
- package/src/storage/StorageSubsystem.ts +42 -6
- package/src/synchronizer/CollectionSynchronizer.ts +38 -19
- package/src/synchronizer/DocSynchronizer.ts +196 -38
- package/src/synchronizer/Synchronizer.ts +3 -8
- package/src/types.ts +4 -1
- package/test/CollectionSynchronizer.test.ts +6 -7
- package/test/DocHandle.test.ts +36 -22
- package/test/DocSynchronizer.test.ts +85 -9
- package/test/Repo.test.ts +279 -59
- package/test/StorageSubsystem.test.ts +9 -9
- package/test/helpers/DummyNetworkAdapter.ts +1 -1
- package/tsconfig.json +2 -1
- package/test/EphemeralData.test.ts +0 -44
package/dist/DocCollection.d.ts
CHANGED
|
@@ -33,12 +33,13 @@ export declare class DocCollection extends EventEmitter<DocCollectionEvents> {
|
|
|
33
33
|
interface DocCollectionEvents {
|
|
34
34
|
document: (arg: DocumentPayload) => void;
|
|
35
35
|
"delete-document": (arg: DeleteDocumentPayload) => void;
|
|
36
|
+
"unavailable-document": (arg: DeleteDocumentPayload) => void;
|
|
36
37
|
}
|
|
37
38
|
interface DocumentPayload {
|
|
38
39
|
handle: DocHandle<any>;
|
|
39
40
|
}
|
|
40
41
|
interface DeleteDocumentPayload {
|
|
41
|
-
|
|
42
|
+
documentId: DocumentId;
|
|
42
43
|
}
|
|
43
44
|
export {};
|
|
44
45
|
//# sourceMappingURL=DocCollection.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"DocCollection.d.ts","sourceRoot":"","sources":["../src/DocCollection.ts"],"names":[],"mappings":"AAAA,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAA;AAC1C,OAAO,EAAE,UAAU,EAAyB,YAAY,EAAE,MAAM,YAAY,CAAA;AAC5E,OAAO,EAAE,KAAK,WAAW,EAAE,MAAM,WAAW,CAAA;AAS5C;;;KAGK;AACL,qBAAa,aAAc,SAAQ,YAAY,CAAC,mBAAmB,CAAC;;IAGlE,sDAAsD;IACtD,WAAW,EAAE,WAAW,CAAmB;;IAwB3C,8CAA8C;IAC9C,IAAI,OAAO,uCAEV;IAED;;;;OAIG;IACH,MAAM,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC,CAAC;IA0BzB;;;OAGG;IACH,IAAI,CAAC,CAAC;IACJ,+CAA+C;IAC/C,YAAY,EAAE,YAAY,GACzB,SAAS,CAAC,CAAC,CAAC;
|
|
1
|
+
{"version":3,"file":"DocCollection.d.ts","sourceRoot":"","sources":["../src/DocCollection.ts"],"names":[],"mappings":"AAAA,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAA;AAC1C,OAAO,EAAE,UAAU,EAAyB,YAAY,EAAE,MAAM,YAAY,CAAA;AAC5E,OAAO,EAAE,KAAK,WAAW,EAAE,MAAM,WAAW,CAAA;AAS5C;;;KAGK;AACL,qBAAa,aAAc,SAAQ,YAAY,CAAC,mBAAmB,CAAC;;IAGlE,sDAAsD;IACtD,WAAW,EAAE,WAAW,CAAmB;;IAwB3C,8CAA8C;IAC9C,IAAI,OAAO,uCAEV;IAED;;;;OAIG;IACH,MAAM,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC,CAAC;IA0BzB;;;OAGG;IACH,IAAI,CAAC,CAAC;IACJ,+CAA+C;IAC/C,YAAY,EAAE,YAAY,GACzB,SAAS,CAAC,CAAC,CAAC;IAwBf,MAAM;IACJ,6CAA6C;IAC7C,EAAE,EAAE,UAAU,GAAG,YAAY;CAchC;AAGD,UAAU,mBAAmB;IAC3B,QAAQ,EAAE,CAAC,GAAG,EAAE,eAAe,KAAK,IAAI,CAAA;IACxC,iBAAiB,EAAE,CAAC,GAAG,EAAE,qBAAqB,KAAK,IAAI,CAAA;IACvD,sBAAsB,EAAE,CAAC,GAAG,EAAE,qBAAqB,KAAK,IAAI,CAAA;CAC7D;AAED,UAAU,eAAe;IACvB,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC,CAAA;CACvB;AAED,UAAU,qBAAqB;IAC7B,UAAU,EAAE,UAAU,CAAA;CACvB"}
|
package/dist/DocCollection.js
CHANGED
|
@@ -53,8 +53,8 @@ export class DocCollection extends EventEmitter {
|
|
|
53
53
|
// or
|
|
54
54
|
// - pass a "reify" function that takes a `<any>` and returns `<T>`
|
|
55
55
|
// Generate a new UUID and store it in the buffer
|
|
56
|
-
const {
|
|
57
|
-
const handle = this.#getHandle(
|
|
56
|
+
const { documentId } = parseAutomergeUrl(generateAutomergeUrl());
|
|
57
|
+
const handle = this.#getHandle(documentId, true);
|
|
58
58
|
this.emit("document", { handle });
|
|
59
59
|
return handle;
|
|
60
60
|
}
|
|
@@ -68,11 +68,20 @@ export class DocCollection extends EventEmitter {
|
|
|
68
68
|
if (!isValidAutomergeUrl(automergeUrl)) {
|
|
69
69
|
throw new Error(`Invalid AutomergeUrl: '${automergeUrl}'`);
|
|
70
70
|
}
|
|
71
|
-
const {
|
|
71
|
+
const { documentId } = parseAutomergeUrl(automergeUrl);
|
|
72
72
|
// If we have the handle cached, return it
|
|
73
|
-
if (this.#handleCache[
|
|
74
|
-
|
|
75
|
-
|
|
73
|
+
if (this.#handleCache[documentId]) {
|
|
74
|
+
if (this.#handleCache[documentId].isUnavailable()) {
|
|
75
|
+
// this ensures that the event fires after the handle has been returned
|
|
76
|
+
setTimeout(() => {
|
|
77
|
+
this.#handleCache[documentId].emit("unavailable", {
|
|
78
|
+
handle: this.#handleCache[documentId],
|
|
79
|
+
});
|
|
80
|
+
});
|
|
81
|
+
}
|
|
82
|
+
return this.#handleCache[documentId];
|
|
83
|
+
}
|
|
84
|
+
const handle = this.#getHandle(documentId, false);
|
|
76
85
|
this.emit("document", { handle });
|
|
77
86
|
return handle;
|
|
78
87
|
}
|
|
@@ -81,13 +90,13 @@ export class DocCollection extends EventEmitter {
|
|
|
81
90
|
id) {
|
|
82
91
|
if (isValidAutomergeUrl(id)) {
|
|
83
92
|
;
|
|
84
|
-
({
|
|
93
|
+
({ documentId: id } = parseAutomergeUrl(id));
|
|
85
94
|
}
|
|
86
95
|
const handle = this.#getHandle(id, false);
|
|
87
96
|
handle.delete();
|
|
88
97
|
delete this.#handleCache[id];
|
|
89
98
|
this.emit("delete-document", {
|
|
90
|
-
|
|
99
|
+
documentId: id,
|
|
91
100
|
});
|
|
92
101
|
}
|
|
93
102
|
}
|
package/dist/DocHandle.d.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import * as A from "@automerge/automerge";
|
|
2
2
|
import EventEmitter from "eventemitter3";
|
|
3
3
|
import { StateValue } from "xstate";
|
|
4
|
-
import type {
|
|
4
|
+
import type { DocumentId, PeerId, AutomergeUrl } from "./types.js";
|
|
5
5
|
/** DocHandle is a wrapper around a single Automerge document that lets us listen for changes. */
|
|
6
6
|
export declare class DocHandle<T>//
|
|
7
7
|
extends EventEmitter<DocHandleEvents<T>> {
|
|
@@ -22,6 +22,7 @@ export declare class DocHandle<T>//
|
|
|
22
22
|
* @returns true if the document has been marked as deleted
|
|
23
23
|
*/
|
|
24
24
|
isDeleted: () => boolean;
|
|
25
|
+
isUnavailable: () => boolean;
|
|
25
26
|
inState: (states: HandleState[]) => boolean;
|
|
26
27
|
get state(): StateValue;
|
|
27
28
|
/**
|
|
@@ -38,7 +39,7 @@ export declare class DocHandle<T>//
|
|
|
38
39
|
*
|
|
39
40
|
* @param {awaitStates=[READY]} optional states to wait for, such as "LOADING". mostly for internal use.
|
|
40
41
|
*/
|
|
41
|
-
doc(awaitStates?: HandleState[]): Promise<A.Doc<T
|
|
42
|
+
doc(awaitStates?: HandleState[]): Promise<A.Doc<T> | undefined>;
|
|
42
43
|
/**
|
|
43
44
|
* Returns the current state of the Automerge document this handle manages, or undefined.
|
|
44
45
|
* Useful in a synchronous context. Consider using `await handle.doc()` instead, check `isReady()`,
|
|
@@ -50,17 +51,23 @@ export declare class DocHandle<T>//
|
|
|
50
51
|
* @returns the current document, or undefined if the document is not ready
|
|
51
52
|
*/
|
|
52
53
|
docSync(): A.Doc<T> | undefined;
|
|
53
|
-
/** `load` is called by the repo when the document is found in storage */
|
|
54
|
-
load(binary: Uint8Array): void;
|
|
55
54
|
/** `update` is called by the repo when we receive changes from the network */
|
|
56
55
|
update(callback: (doc: A.Doc<T>) => A.Doc<T>): void;
|
|
57
56
|
/** `change` is called by the repo when the document is changed locally */
|
|
58
57
|
change(callback: A.ChangeFn<T>, options?: A.ChangeOptions<T>): void;
|
|
59
58
|
changeAt(heads: A.Heads, callback: A.ChangeFn<T>, options?: A.ChangeOptions<T>): void;
|
|
59
|
+
unavailable(): void;
|
|
60
60
|
/** `request` is called by the repo when the document is not found in storage */
|
|
61
61
|
request(): void;
|
|
62
62
|
/** `delete` is called by the repo when the document is deleted */
|
|
63
63
|
delete(): void;
|
|
64
|
+
/** `broadcast` sends an arbitrary ephemeral message out to all reachable peers who would receive sync messages from you
|
|
65
|
+
* it has no guarantee of delivery, and is not persisted to the underlying automerge doc in any way.
|
|
66
|
+
* messages will have a sending PeerId but this is *not* a useful user identifier.
|
|
67
|
+
* a user could have multiple tabs open and would appear as multiple PeerIds.
|
|
68
|
+
* every message source must have a unique PeerId.
|
|
69
|
+
*/
|
|
70
|
+
broadcast(message: any): void;
|
|
64
71
|
}
|
|
65
72
|
interface DocHandleOptions {
|
|
66
73
|
isNew?: boolean;
|
|
@@ -68,7 +75,7 @@ interface DocHandleOptions {
|
|
|
68
75
|
}
|
|
69
76
|
export interface DocHandleMessagePayload {
|
|
70
77
|
destinationId: PeerId;
|
|
71
|
-
|
|
78
|
+
documentId: DocumentId;
|
|
72
79
|
data: Uint8Array;
|
|
73
80
|
}
|
|
74
81
|
export interface DocHandleEncodedChangePayload<T> {
|
|
@@ -84,10 +91,22 @@ export interface DocHandleChangePayload<T> {
|
|
|
84
91
|
patches: A.Patch[];
|
|
85
92
|
patchInfo: A.PatchInfo<T>;
|
|
86
93
|
}
|
|
94
|
+
export interface DocHandleEphemeralMessagePayload {
|
|
95
|
+
handle: DocHandle<any>;
|
|
96
|
+
senderId: PeerId;
|
|
97
|
+
message: unknown;
|
|
98
|
+
}
|
|
99
|
+
export interface DocHandleOutboundEphemeralMessagePayload {
|
|
100
|
+
handle: DocHandle<any>;
|
|
101
|
+
data: Uint8Array;
|
|
102
|
+
}
|
|
87
103
|
export interface DocHandleEvents<T> {
|
|
88
104
|
"heads-changed": (payload: DocHandleEncodedChangePayload<T>) => void;
|
|
89
105
|
change: (payload: DocHandleChangePayload<T>) => void;
|
|
90
106
|
delete: (payload: DocHandleDeletePayload<T>) => void;
|
|
107
|
+
unavailable: (payload: DocHandleDeletePayload<T>) => void;
|
|
108
|
+
"ephemeral-message": (payload: DocHandleEphemeralMessagePayload) => void;
|
|
109
|
+
"ephemeral-message-outbound": (payload: DocHandleOutboundEphemeralMessagePayload) => void;
|
|
91
110
|
}
|
|
92
111
|
export declare const HandleState: {
|
|
93
112
|
readonly IDLE: "idle";
|
|
@@ -96,18 +115,19 @@ export declare const HandleState: {
|
|
|
96
115
|
readonly READY: "ready";
|
|
97
116
|
readonly FAILED: "failed";
|
|
98
117
|
readonly DELETED: "deleted";
|
|
118
|
+
readonly UNAVAILABLE: "unavailable";
|
|
99
119
|
};
|
|
100
120
|
export type HandleState = (typeof HandleState)[keyof typeof HandleState];
|
|
101
121
|
export declare const Event: {
|
|
102
122
|
readonly CREATE: "CREATE";
|
|
103
|
-
readonly LOAD: "LOAD";
|
|
104
123
|
readonly FIND: "FIND";
|
|
105
124
|
readonly REQUEST: "REQUEST";
|
|
106
125
|
readonly REQUEST_COMPLETE: "REQUEST_COMPLETE";
|
|
107
126
|
readonly UPDATE: "UPDATE";
|
|
108
127
|
readonly TIMEOUT: "TIMEOUT";
|
|
109
128
|
readonly DELETE: "DELETE";
|
|
129
|
+
readonly MARK_UNAVAILABLE: "MARK_UNAVAILABLE";
|
|
110
130
|
};
|
|
111
|
-
export declare const IDLE: "idle", LOADING: "loading", REQUESTING: "requesting", READY: "ready", FAILED: "failed", DELETED: "deleted";
|
|
131
|
+
export declare const IDLE: "idle", LOADING: "loading", REQUESTING: "requesting", READY: "ready", FAILED: "failed", DELETED: "deleted", UNAVAILABLE: "unavailable";
|
|
112
132
|
export {};
|
|
113
133
|
//# sourceMappingURL=DocHandle.d.ts.map
|
package/dist/DocHandle.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"DocHandle.d.ts","sourceRoot":"","sources":["../src/DocHandle.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,sBAAsB,CAAA;AAEzC,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EASL,UAAU,EAEX,MAAM,QAAQ,CAAA;AAKf,OAAO,KAAK,
|
|
1
|
+
{"version":3,"file":"DocHandle.d.ts","sourceRoot":"","sources":["../src/DocHandle.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,sBAAsB,CAAA;AAEzC,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EASL,UAAU,EAEX,MAAM,QAAQ,CAAA;AAKf,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,YAAY,CAAA;AAIlE,iGAAiG;AACjG,qBAAa,SAAS,CAAC,CAAC,CAAE,EAAE;AAC1B,SAAQ,YAAY,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;;IAY/B,UAAU,EAAE,UAAU;IAL/B,IAAI,GAAG,IAAI,YAAY,CAEtB;gBAGQ,UAAU,EAAE,UAAU,EAC7B,EAAE,KAAa,EAAE,YAAqB,EAAE,GAAE,gBAAqB;IAuLjE;;;;OAIG;IACH,OAAO,gBAA0C;IACjD;;;;;OAKG;IACH,SAAS,gBAA4C;IACrD,aAAa,gBAAgD;IAC7D,OAAO,WAAY,WAAW,EAAE,aACmB;IAEnD,IAAI,KAAK,eAER;IAED;;;;;OAKG;IACG,SAAS,CAAC,WAAW,GAAE,WAAW,EAAY,GAAG,OAAO,CAAC,IAAI,CAAC;IAIpE;;;;;;OAMG;IACG,GAAG,CACP,WAAW,GAAE,WAAW,EAAyB,GAChD,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC;IAchC;;;;;;;;;OASG;IACH,OAAO,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS;IAQ/B,8EAA8E;IAC9E,MAAM,CAAC,QAAQ,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;IAM5C,2EAA2E;IAC3E,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,OAAO,GAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAM;IAehE,QAAQ,CACN,KAAK,EAAE,CAAC,CAAC,KAAK,EACd,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EACvB,OAAO,GAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAM;IAgBlC,WAAW;IAIX,gFAAgF;IAChF,OAAO;IAIP,kEAAkE;IAClE,MAAM;IAIN;;;;;OAKG;IACH,SAAS,CAAC,OAAO,EAAE,GAAG;CAMvB;AAID,UAAU,gBAAgB;IACxB,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,YAAY,CAAC,EAAE,MAAM,CAAA;CACtB;AAED,MAAM,WAAW,uBAAuB;IACtC,aAAa,EAAE,MAAM,CAAA;IACrB,UAAU,EAAE,UAAU,CAAA;IACtB,IAAI,EAAE,UAAU,CAAA;CACjB;AAED,MAAM,WAAW,6BAA6B,CAAC,CAAC;IAC9C,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;CACd;AAED,MAAM,WAAW,sBAAsB,CAAC,CAAC;IACvC,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;CACrB;AAED,MAAM,WAAW,sBAAsB,CAAC,CAAC;IACvC,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;IACb,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAA;IAClB,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;CAC1B;AAED,MAAM,WAAW,gCAAgC;IAC/C,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC,CAAA;IACtB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;CACjB;AAED,MAAM,WAAW,wCAAwC;IACvD,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC,CAAA;IACtB,IAAI,EAAE,UAAU,CAAA;CACjB;AAED,MAAM,WAAW,eAAe,CAAC,CAAC;IAChC,eAAe,EAAE,CAAC,OAAO,EAAE,6BAA6B,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpE,MAAM,EAAE,CAAC,OAAO,EAAE,sBAAsB,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpD,MAAM,EAAE,CAAC,OAAO,EAAE,sBAAsB,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpD,WAAW,EAAE,CAAC,OAAO,EAAE,sBAAsB,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACzD,mBAAmB,EAAE,CAAC,OAAO,EAAE,gCAAgC,KAAK,IAAI,CAAA;IACxE,4BAA4B,EAAE,CAC5B,OAAO,EAAE,wCAAwC,KAC9C,IAAI,CAAA;CACV;AAMD,eAAO,MAAM,WAAW;;;;;;;;CAQd,CAAA;AACV,MAAM,MAAM,WAAW,GAAG,CAAC,OAAO,WAAW,CAAC,CAAC,MAAM,OAAO,WAAW,CAAC,CAAA;AAkBxE,eAAO,MAAM,KAAK;;;;;;;;;CASR,CAAA;AA0CV,eAAO,MACL,IAAI,UACJ,OAAO,aACP,UAAU,gBACV,KAAK,WACL,MAAM,YACN,OAAO,aACP,WAAW,eACE,CAAA"}
|
package/dist/DocHandle.js
CHANGED
|
@@ -7,6 +7,7 @@ import { headsAreSame } from "./helpers/headsAreSame.js";
|
|
|
7
7
|
import { pause } from "./helpers/pause.js";
|
|
8
8
|
import { TimeoutError, withTimeout } from "./helpers/withTimeout.js";
|
|
9
9
|
import { stringifyAutomergeUrl } from "./DocUrl.js";
|
|
10
|
+
import { encode } from "cbor-x";
|
|
10
11
|
/** DocHandle is a wrapper around a single Automerge document that lets us listen for changes. */
|
|
11
12
|
export class DocHandle//
|
|
12
13
|
extends EventEmitter {
|
|
@@ -54,8 +55,8 @@ export class DocHandle//
|
|
|
54
55
|
},
|
|
55
56
|
loading: {
|
|
56
57
|
on: {
|
|
57
|
-
//
|
|
58
|
-
|
|
58
|
+
// UPDATE is called by the Repo if the document is found in storage
|
|
59
|
+
UPDATE: { actions: "onUpdate", target: READY },
|
|
59
60
|
// REQUEST is called by the Repo if the document is not found in storage
|
|
60
61
|
REQUEST: { target: REQUESTING },
|
|
61
62
|
DELETE: { actions: "onDelete", target: DELETED },
|
|
@@ -69,6 +70,10 @@ export class DocHandle//
|
|
|
69
70
|
},
|
|
70
71
|
requesting: {
|
|
71
72
|
on: {
|
|
73
|
+
MARK_UNAVAILABLE: {
|
|
74
|
+
target: UNAVAILABLE,
|
|
75
|
+
actions: "onUnavailable",
|
|
76
|
+
},
|
|
72
77
|
// UPDATE is called by the Repo when we receive changes from the network
|
|
73
78
|
UPDATE: { actions: "onUpdate" },
|
|
74
79
|
// REQUEST_COMPLETE is called from `onUpdate` when the doc has been fully loaded from the network
|
|
@@ -95,17 +100,18 @@ export class DocHandle//
|
|
|
95
100
|
deleted: {
|
|
96
101
|
type: "final",
|
|
97
102
|
},
|
|
103
|
+
unavailable: {
|
|
104
|
+
on: {
|
|
105
|
+
UPDATE: { actions: "onUpdate" },
|
|
106
|
+
// REQUEST_COMPLETE is called from `onUpdate` when the doc has been fully loaded from the network
|
|
107
|
+
REQUEST_COMPLETE: { target: READY },
|
|
108
|
+
DELETE: { actions: "onDelete", target: DELETED },
|
|
109
|
+
},
|
|
110
|
+
},
|
|
98
111
|
},
|
|
99
112
|
}, {
|
|
100
113
|
actions: {
|
|
101
|
-
/**
|
|
102
|
-
onLoad: assign((context, { payload }) => {
|
|
103
|
-
const { binary } = payload;
|
|
104
|
-
const { doc } = context;
|
|
105
|
-
const newDoc = A.loadIncremental(doc, binary);
|
|
106
|
-
return { doc: newDoc };
|
|
107
|
-
}),
|
|
108
|
-
/** Put the updated doc on context; if it's different, emit a `change` event */
|
|
114
|
+
/** Put the updated doc on context */
|
|
109
115
|
onUpdate: assign((context, { payload }) => {
|
|
110
116
|
const { doc: oldDoc } = context;
|
|
111
117
|
const { callback } = payload;
|
|
@@ -116,13 +122,20 @@ export class DocHandle//
|
|
|
116
122
|
this.emit("delete", { handle: this });
|
|
117
123
|
return { doc: undefined };
|
|
118
124
|
}),
|
|
125
|
+
onUnavailable: assign(context => {
|
|
126
|
+
const { doc } = context;
|
|
127
|
+
this.emit("unavailable", { handle: this });
|
|
128
|
+
return { doc };
|
|
129
|
+
}),
|
|
119
130
|
},
|
|
120
131
|
}))
|
|
121
132
|
.onTransition(({ value: state, history, context }, event) => {
|
|
122
133
|
const oldDoc = history?.context?.doc;
|
|
123
134
|
const newDoc = context.doc;
|
|
124
|
-
this.#log(`${event} → ${state}`, newDoc);
|
|
125
|
-
const docChanged = newDoc &&
|
|
135
|
+
this.#log(`${history?.value}: ${event.type} → ${state}`, newDoc);
|
|
136
|
+
const docChanged = newDoc &&
|
|
137
|
+
oldDoc &&
|
|
138
|
+
!headsAreSame(A.getHeads(newDoc), A.getHeads(oldDoc));
|
|
126
139
|
if (docChanged) {
|
|
127
140
|
this.emit("heads-changed", { handle: this, doc: newDoc });
|
|
128
141
|
const patches = A.diff(newDoc, A.getHeads(oldDoc), A.getHeads(newDoc));
|
|
@@ -174,6 +187,7 @@ export class DocHandle//
|
|
|
174
187
|
* @returns true if the document has been marked as deleted
|
|
175
188
|
*/
|
|
176
189
|
isDeleted = () => this.inState([HandleState.DELETED]);
|
|
190
|
+
isUnavailable = () => this.inState([HandleState.UNAVAILABLE]);
|
|
177
191
|
inState = (states) => states.some(this.#machine?.getSnapshot().matches);
|
|
178
192
|
get state() {
|
|
179
193
|
return this.#machine?.getSnapshot().value;
|
|
@@ -194,7 +208,7 @@ export class DocHandle//
|
|
|
194
208
|
*
|
|
195
209
|
* @param {awaitStates=[READY]} optional states to wait for, such as "LOADING". mostly for internal use.
|
|
196
210
|
*/
|
|
197
|
-
async doc(awaitStates = [READY]) {
|
|
211
|
+
async doc(awaitStates = [READY, UNAVAILABLE]) {
|
|
198
212
|
await pause(); // yield one tick because reasons
|
|
199
213
|
try {
|
|
200
214
|
// wait for the document to enter one of the desired states
|
|
@@ -207,7 +221,7 @@ export class DocHandle//
|
|
|
207
221
|
throw error;
|
|
208
222
|
}
|
|
209
223
|
// Return the document
|
|
210
|
-
return this.#doc;
|
|
224
|
+
return !this.isUnavailable() ? this.#doc : undefined;
|
|
211
225
|
}
|
|
212
226
|
/**
|
|
213
227
|
* Returns the current state of the Automerge document this handle manages, or undefined.
|
|
@@ -225,12 +239,6 @@ export class DocHandle//
|
|
|
225
239
|
}
|
|
226
240
|
return this.#doc;
|
|
227
241
|
}
|
|
228
|
-
/** `load` is called by the repo when the document is found in storage */
|
|
229
|
-
load(binary) {
|
|
230
|
-
if (binary.length && binary.length > 0) {
|
|
231
|
-
this.#machine.send(LOAD, { payload: { binary } });
|
|
232
|
-
}
|
|
233
|
-
}
|
|
234
242
|
/** `update` is called by the repo when we receive changes from the network */
|
|
235
243
|
update(callback) {
|
|
236
244
|
this.#machine.send(UPDATE, {
|
|
@@ -262,6 +270,9 @@ export class DocHandle//
|
|
|
262
270
|
},
|
|
263
271
|
});
|
|
264
272
|
}
|
|
273
|
+
unavailable() {
|
|
274
|
+
this.#machine.send(MARK_UNAVAILABLE);
|
|
275
|
+
}
|
|
265
276
|
/** `request` is called by the repo when the document is not found in storage */
|
|
266
277
|
request() {
|
|
267
278
|
if (this.#state === LOADING)
|
|
@@ -271,6 +282,18 @@ export class DocHandle//
|
|
|
271
282
|
delete() {
|
|
272
283
|
this.#machine.send(DELETE);
|
|
273
284
|
}
|
|
285
|
+
/** `broadcast` sends an arbitrary ephemeral message out to all reachable peers who would receive sync messages from you
|
|
286
|
+
* it has no guarantee of delivery, and is not persisted to the underlying automerge doc in any way.
|
|
287
|
+
* messages will have a sending PeerId but this is *not* a useful user identifier.
|
|
288
|
+
* a user could have multiple tabs open and would appear as multiple PeerIds.
|
|
289
|
+
* every message source must have a unique PeerId.
|
|
290
|
+
*/
|
|
291
|
+
broadcast(message) {
|
|
292
|
+
this.emit("ephemeral-message-outbound", {
|
|
293
|
+
handle: this,
|
|
294
|
+
data: encode(message),
|
|
295
|
+
});
|
|
296
|
+
}
|
|
274
297
|
}
|
|
275
298
|
// STATE MACHINE TYPES
|
|
276
299
|
// state
|
|
@@ -281,18 +304,19 @@ export const HandleState = {
|
|
|
281
304
|
READY: "ready",
|
|
282
305
|
FAILED: "failed",
|
|
283
306
|
DELETED: "deleted",
|
|
307
|
+
UNAVAILABLE: "unavailable",
|
|
284
308
|
};
|
|
285
309
|
// events
|
|
286
310
|
export const Event = {
|
|
287
311
|
CREATE: "CREATE",
|
|
288
|
-
LOAD: "LOAD",
|
|
289
312
|
FIND: "FIND",
|
|
290
313
|
REQUEST: "REQUEST",
|
|
291
314
|
REQUEST_COMPLETE: "REQUEST_COMPLETE",
|
|
292
315
|
UPDATE: "UPDATE",
|
|
293
316
|
TIMEOUT: "TIMEOUT",
|
|
294
317
|
DELETE: "DELETE",
|
|
318
|
+
MARK_UNAVAILABLE: "MARK_UNAVAILABLE",
|
|
295
319
|
};
|
|
296
320
|
// CONSTANTS
|
|
297
|
-
export const { IDLE, LOADING, REQUESTING, READY, FAILED, DELETED } = HandleState;
|
|
298
|
-
const { CREATE,
|
|
321
|
+
export const { IDLE, LOADING, REQUESTING, READY, FAILED, DELETED, UNAVAILABLE, } = HandleState;
|
|
322
|
+
const { CREATE, FIND, REQUEST, UPDATE, TIMEOUT, DELETE, REQUEST_COMPLETE, MARK_UNAVAILABLE, } = Event;
|
package/dist/DocUrl.d.ts
CHANGED
|
@@ -4,11 +4,11 @@ export declare const urlPrefix = "automerge:";
|
|
|
4
4
|
* given an Automerge URL, return a decoded DocumentId (and the encoded DocumentId)
|
|
5
5
|
*
|
|
6
6
|
* @param url
|
|
7
|
-
* @returns {
|
|
7
|
+
* @returns { binaryDocumentId: BinaryDocumentId, documentId: DocumentId }
|
|
8
8
|
*/
|
|
9
9
|
export declare const parseAutomergeUrl: (url: AutomergeUrl) => {
|
|
10
10
|
binaryDocumentId: BinaryDocumentId;
|
|
11
|
-
|
|
11
|
+
documentId: DocumentId;
|
|
12
12
|
};
|
|
13
13
|
interface StringifyAutomergeUrlOptions {
|
|
14
14
|
documentId: DocumentId | BinaryDocumentId;
|
|
@@ -17,7 +17,7 @@ interface StringifyAutomergeUrlOptions {
|
|
|
17
17
|
* Given a documentId in either canonical form, return an Automerge URL
|
|
18
18
|
* Throws on invalid input.
|
|
19
19
|
* Note: this is an object because we anticipate adding fields in the future.
|
|
20
|
-
* @param { documentId:
|
|
20
|
+
* @param { documentId: BinaryDocumentId | DocumentId }
|
|
21
21
|
* @returns AutomergeUrl
|
|
22
22
|
*/
|
|
23
23
|
export declare const stringifyAutomergeUrl: ({ documentId, }: StringifyAutomergeUrlOptions) => AutomergeUrl;
|
package/dist/DocUrl.js
CHANGED
|
@@ -5,19 +5,19 @@ export const urlPrefix = "automerge:";
|
|
|
5
5
|
* given an Automerge URL, return a decoded DocumentId (and the encoded DocumentId)
|
|
6
6
|
*
|
|
7
7
|
* @param url
|
|
8
|
-
* @returns {
|
|
8
|
+
* @returns { binaryDocumentId: BinaryDocumentId, documentId: DocumentId }
|
|
9
9
|
*/
|
|
10
10
|
export const parseAutomergeUrl = (url) => {
|
|
11
|
-
const { binaryDocumentId
|
|
11
|
+
const { binaryDocumentId, documentId } = parts(url);
|
|
12
12
|
if (!binaryDocumentId)
|
|
13
13
|
throw new Error("Invalid document URL: " + url);
|
|
14
|
-
return { binaryDocumentId,
|
|
14
|
+
return { binaryDocumentId, documentId };
|
|
15
15
|
};
|
|
16
16
|
/**
|
|
17
17
|
* Given a documentId in either canonical form, return an Automerge URL
|
|
18
18
|
* Throws on invalid input.
|
|
19
19
|
* Note: this is an object because we anticipate adding fields in the future.
|
|
20
|
-
* @param { documentId:
|
|
20
|
+
* @param { documentId: BinaryDocumentId | DocumentId }
|
|
21
21
|
* @returns AutomergeUrl
|
|
22
22
|
*/
|
|
23
23
|
export const stringifyAutomergeUrl = ({ documentId, }) => {
|
|
@@ -56,12 +56,12 @@ export const binaryToDocumentId = (docId) => bs58check.encode(docId);
|
|
|
56
56
|
* eventually this could include things like heads, so we use this structure
|
|
57
57
|
* we return both a binary & string-encoded version of the document ID
|
|
58
58
|
* @param str
|
|
59
|
-
* @returns { binaryDocumentId,
|
|
59
|
+
* @returns { binaryDocumentId, documentId }
|
|
60
60
|
*/
|
|
61
61
|
const parts = (str) => {
|
|
62
62
|
const regex = new RegExp(`^${urlPrefix}(\\w+)$`);
|
|
63
|
-
const [
|
|
64
|
-
const
|
|
65
|
-
const binaryDocumentId = documentIdToBinary(
|
|
66
|
-
return { binaryDocumentId,
|
|
63
|
+
const [_, docMatch] = str.match(regex) || [];
|
|
64
|
+
const documentId = docMatch;
|
|
65
|
+
const binaryDocumentId = documentIdToBinary(documentId);
|
|
66
|
+
return { binaryDocumentId, documentId };
|
|
67
67
|
};
|
package/dist/EphemeralData.d.ts
CHANGED
|
@@ -1,27 +1,19 @@
|
|
|
1
|
-
import
|
|
2
|
-
import {
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
* heartbeats, etc. — that is useful in the moment but not worth persisting.
|
|
7
|
-
*/
|
|
8
|
-
export declare class EphemeralData extends EventEmitter<EphemeralDataMessageEvents> {
|
|
9
|
-
/** Broadcast an ephemeral message */
|
|
10
|
-
broadcast(channelId: ChannelId, message: unknown): void;
|
|
11
|
-
/** Receive an ephemeral message */
|
|
12
|
-
receive(senderId: PeerId, grossChannelId: ChannelId, message: Uint8Array): void;
|
|
13
|
-
}
|
|
1
|
+
import { DocumentId, PeerId } from "./index.js";
|
|
2
|
+
import { EphemeralMessageContents } from "./network/messages.js";
|
|
3
|
+
export type SessionId = string & {
|
|
4
|
+
__SessionId: false;
|
|
5
|
+
};
|
|
14
6
|
export interface EphemeralDataPayload {
|
|
15
|
-
|
|
7
|
+
documentId: DocumentId;
|
|
16
8
|
peerId: PeerId;
|
|
17
9
|
data: {
|
|
18
10
|
peerId: PeerId;
|
|
19
|
-
|
|
11
|
+
documentId: DocumentId;
|
|
20
12
|
data: unknown;
|
|
21
13
|
};
|
|
22
14
|
}
|
|
23
15
|
export type EphemeralDataMessageEvents = {
|
|
24
|
-
message: (event:
|
|
16
|
+
message: (event: EphemeralMessageContents) => void;
|
|
25
17
|
data: (event: EphemeralDataPayload) => void;
|
|
26
18
|
};
|
|
27
19
|
//# sourceMappingURL=EphemeralData.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"EphemeralData.d.ts","sourceRoot":"","sources":["../src/EphemeralData.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"EphemeralData.d.ts","sourceRoot":"","sources":["../src/EphemeralData.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAC/C,OAAO,EAAE,wBAAwB,EAAE,MAAM,uBAAuB,CAAA;AAGhE,MAAM,MAAM,SAAS,GAAG,MAAM,GAAG;IAAE,WAAW,EAAE,KAAK,CAAA;CAAE,CAAA;AAEvD,MAAM,WAAW,oBAAoB;IACnC,UAAU,EAAE,UAAU,CAAA;IACtB,MAAM,EAAE,MAAM,CAAA;IACd,IAAI,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,UAAU,CAAC;QAAC,IAAI,EAAE,OAAO,CAAA;KAAE,CAAA;CAChE;AAED,MAAM,MAAM,0BAA0B,GAAG;IACvC,OAAO,EAAE,CAAC,KAAK,EAAE,wBAAwB,KAAK,IAAI,CAAA;IAClD,IAAI,EAAE,CAAC,KAAK,EAAE,oBAAoB,KAAK,IAAI,CAAA;CAC5C,CAAA"}
|
package/dist/EphemeralData.js
CHANGED
|
@@ -1,28 +1 @@
|
|
|
1
|
-
|
|
2
|
-
import EventEmitter from "eventemitter3";
|
|
3
|
-
/**
|
|
4
|
-
* EphemeralData provides a mechanism to broadcast short-lived data — cursor positions, presence,
|
|
5
|
-
* heartbeats, etc. — that is useful in the moment but not worth persisting.
|
|
6
|
-
*/
|
|
7
|
-
export class EphemeralData extends EventEmitter {
|
|
8
|
-
/** Broadcast an ephemeral message */
|
|
9
|
-
broadcast(channelId, message) {
|
|
10
|
-
const messageBytes = encode(message);
|
|
11
|
-
this.emit("message", {
|
|
12
|
-
targetId: "*",
|
|
13
|
-
channelId: ("m/" + channelId),
|
|
14
|
-
message: messageBytes,
|
|
15
|
-
broadcast: true,
|
|
16
|
-
});
|
|
17
|
-
}
|
|
18
|
-
/** Receive an ephemeral message */
|
|
19
|
-
receive(senderId, grossChannelId, message) {
|
|
20
|
-
const data = decode(message);
|
|
21
|
-
const channelId = grossChannelId.slice(2);
|
|
22
|
-
this.emit("data", {
|
|
23
|
-
peerId: senderId,
|
|
24
|
-
channelId,
|
|
25
|
-
data,
|
|
26
|
-
});
|
|
27
|
-
}
|
|
28
|
-
}
|
|
1
|
+
export {};
|
package/dist/Repo.d.ts
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import { DocCollection } from "./DocCollection.js";
|
|
2
|
-
import { EphemeralData } from "./EphemeralData.js";
|
|
3
2
|
import { NetworkAdapter } from "./network/NetworkAdapter.js";
|
|
4
3
|
import { NetworkSubsystem } from "./network/NetworkSubsystem.js";
|
|
5
4
|
import { StorageAdapter } from "./storage/StorageAdapter.js";
|
|
@@ -10,7 +9,6 @@ export declare class Repo extends DocCollection {
|
|
|
10
9
|
#private;
|
|
11
10
|
networkSubsystem: NetworkSubsystem;
|
|
12
11
|
storageSubsystem?: StorageSubsystem;
|
|
13
|
-
ephemeralData: EphemeralData;
|
|
14
12
|
constructor({ storage, network, peerId, sharePolicy }: RepoConfig);
|
|
15
13
|
}
|
|
16
14
|
export interface RepoConfig {
|
package/dist/Repo.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Repo.d.ts","sourceRoot":"","sources":["../src/Repo.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"Repo.d.ts","sourceRoot":"","sources":["../src/Repo.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAA;AAClD,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAChE,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAEhE,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAE/C,oFAAoF;AACpF,qBAAa,IAAK,SAAQ,aAAa;;IAGrC,gBAAgB,EAAE,gBAAgB,CAAA;IAClC,gBAAgB,CAAC,EAAE,gBAAgB,CAAA;gBAEvB,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,WAAW,EAAE,EAAE,UAAU;CAiFlE;AAED,MAAM,WAAW,UAAU;IACzB,4BAA4B;IAC5B,MAAM,CAAC,EAAE,MAAM,CAAA;IAEf,gDAAgD;IAChD,OAAO,CAAC,EAAE,cAAc,CAAA;IAExB,oDAAoD;IACpD,OAAO,EAAE,cAAc,EAAE,CAAA;IAEzB;;;OAGG;IACH,WAAW,CAAC,EAAE,WAAW,CAAA;CAC1B;AAED,MAAM,MAAM,WAAW,GAAG,CACxB,MAAM,EAAE,MAAM,EACd,UAAU,CAAC,EAAE,UAAU,KACpB,OAAO,CAAC,OAAO,CAAC,CAAA"}
|
package/dist/Repo.js
CHANGED
|
@@ -1,15 +1,13 @@
|
|
|
1
|
+
import debug from "debug";
|
|
1
2
|
import { DocCollection } from "./DocCollection.js";
|
|
2
|
-
import { EphemeralData } from "./EphemeralData.js";
|
|
3
3
|
import { NetworkSubsystem } from "./network/NetworkSubsystem.js";
|
|
4
4
|
import { StorageSubsystem } from "./storage/StorageSubsystem.js";
|
|
5
5
|
import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js";
|
|
6
|
-
import debug from "debug";
|
|
7
6
|
/** A Repo is a DocCollection with networking, syncing, and storage capabilities. */
|
|
8
7
|
export class Repo extends DocCollection {
|
|
9
8
|
#log;
|
|
10
9
|
networkSubsystem;
|
|
11
10
|
storageSubsystem;
|
|
12
|
-
ephemeralData;
|
|
13
11
|
constructor({ storage, network, peerId, sharePolicy }) {
|
|
14
12
|
super();
|
|
15
13
|
this.#log = debug(`automerge-repo:repo`);
|
|
@@ -21,30 +19,38 @@ export class Repo extends DocCollection {
|
|
|
21
19
|
if (storageSubsystem) {
|
|
22
20
|
// Save when the document changes
|
|
23
21
|
handle.on("heads-changed", async ({ handle, doc }) => {
|
|
24
|
-
await storageSubsystem.
|
|
22
|
+
await storageSubsystem.saveDoc(handle.documentId, doc);
|
|
25
23
|
});
|
|
26
24
|
// Try to load from disk
|
|
27
|
-
const
|
|
28
|
-
|
|
25
|
+
const loadedDoc = await storageSubsystem.loadDoc(handle.documentId);
|
|
26
|
+
if (loadedDoc) {
|
|
27
|
+
handle.update(() => loadedDoc);
|
|
28
|
+
}
|
|
29
29
|
}
|
|
30
|
+
handle.on("unavailable", () => {
|
|
31
|
+
this.#log("document unavailable", { documentId: handle.documentId });
|
|
32
|
+
this.emit("unavailable-document", {
|
|
33
|
+
documentId: handle.documentId,
|
|
34
|
+
});
|
|
35
|
+
});
|
|
30
36
|
handle.request();
|
|
31
37
|
// Register the document with the synchronizer. This advertises our interest in the document.
|
|
32
38
|
synchronizer.addDocument(handle.documentId);
|
|
33
39
|
});
|
|
34
|
-
this.on("delete-document", ({
|
|
40
|
+
this.on("delete-document", ({ documentId }) => {
|
|
35
41
|
// TODO Pass the delete on to the network
|
|
36
42
|
// synchronizer.removeDocument(documentId)
|
|
37
43
|
if (storageSubsystem) {
|
|
38
|
-
storageSubsystem.remove(
|
|
44
|
+
storageSubsystem.remove(documentId);
|
|
39
45
|
}
|
|
40
46
|
});
|
|
41
47
|
// SYNCHRONIZER
|
|
42
48
|
// The synchronizer uses the network subsystem to keep documents in sync with peers.
|
|
43
49
|
const synchronizer = new CollectionSynchronizer(this);
|
|
44
50
|
// When the synchronizer emits sync messages, send them to peers
|
|
45
|
-
synchronizer.on("message",
|
|
46
|
-
this.#log(`sending sync message to ${targetId}`);
|
|
47
|
-
networkSubsystem.
|
|
51
|
+
synchronizer.on("message", message => {
|
|
52
|
+
this.#log(`sending sync message to ${message.targetId}`);
|
|
53
|
+
networkSubsystem.send(message);
|
|
48
54
|
});
|
|
49
55
|
// STORAGE
|
|
50
56
|
// The storage subsystem has access to some form of persistence, and deals with save and loading documents.
|
|
@@ -65,31 +71,7 @@ export class Repo extends DocCollection {
|
|
|
65
71
|
});
|
|
66
72
|
// Handle incoming messages
|
|
67
73
|
networkSubsystem.on("message", async (msg) => {
|
|
68
|
-
|
|
69
|
-
// TODO: this demands a more principled way of associating channels with recipients
|
|
70
|
-
// Ephemeral channel ids start with "m/"
|
|
71
|
-
if (channelId.startsWith("m/")) {
|
|
72
|
-
// Ephemeral message
|
|
73
|
-
this.#log(`receiving ephemeral message from ${senderId}`);
|
|
74
|
-
ephemeralData.receive(senderId, channelId, message);
|
|
75
|
-
}
|
|
76
|
-
else {
|
|
77
|
-
// Sync message
|
|
78
|
-
this.#log(`receiving sync message from ${senderId}`);
|
|
79
|
-
await synchronizer.receiveSyncMessage(senderId, channelId, message);
|
|
80
|
-
}
|
|
81
|
-
});
|
|
82
|
-
// We establish a special channel for sync messages
|
|
83
|
-
networkSubsystem.join();
|
|
84
|
-
// EPHEMERAL DATA
|
|
85
|
-
// The ephemeral data subsystem uses the network to send and receive messages that are not
|
|
86
|
-
// persisted to storage, e.g. cursor position, presence, etc.
|
|
87
|
-
const ephemeralData = new EphemeralData();
|
|
88
|
-
this.ephemeralData = ephemeralData;
|
|
89
|
-
// Send ephemeral messages to peers
|
|
90
|
-
ephemeralData.on("message", ({ targetId, channelId, message, broadcast }) => {
|
|
91
|
-
this.#log(`sending ephemeral message to ${targetId}`);
|
|
92
|
-
networkSubsystem.sendMessage(targetId, channelId, message, broadcast);
|
|
74
|
+
await synchronizer.receiveMessage(msg);
|
|
93
75
|
});
|
|
94
76
|
}
|
|
95
77
|
}
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import
|
|
2
|
-
export declare const headsAreSame:
|
|
1
|
+
import { Heads } from "@automerge/automerge";
|
|
2
|
+
export declare const headsAreSame: (a: Heads, b: Heads) => boolean;
|
|
3
3
|
//# sourceMappingURL=headsAreSame.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"headsAreSame.d.ts","sourceRoot":"","sources":["../../src/helpers/headsAreSame.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,
|
|
1
|
+
{"version":3,"file":"headsAreSame.d.ts","sourceRoot":"","sources":["../../src/helpers/headsAreSame.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,KAAK,EAAC,MAAM,sBAAsB,CAAA;AAG1C,eAAO,MAAM,YAAY,iCAExB,CAAA"}
|
|
@@ -1,7 +1,4 @@
|
|
|
1
|
-
import * as A from "@automerge/automerge";
|
|
2
1
|
import { arraysAreEqual } from "./arraysAreEqual.js";
|
|
3
2
|
export const headsAreSame = (a, b) => {
|
|
4
|
-
|
|
5
|
-
const bHeads = A.getHeads(b);
|
|
6
|
-
return arraysAreEqual(aHeads, bHeads);
|
|
3
|
+
return arraysAreEqual(a, b);
|
|
7
4
|
};
|