@automerge/automerge-repo 1.0.0-alpha.2 → 1.0.0-alpha.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/DocCollection.d.ts +2 -1
- package/dist/DocCollection.d.ts.map +1 -1
- package/dist/DocCollection.js +17 -8
- package/dist/DocHandle.d.ts +27 -4
- package/dist/DocHandle.d.ts.map +1 -1
- package/dist/DocHandle.js +44 -6
- package/dist/DocUrl.d.ts +3 -3
- package/dist/DocUrl.js +9 -9
- package/dist/EphemeralData.d.ts +8 -16
- package/dist/EphemeralData.d.ts.map +1 -1
- package/dist/EphemeralData.js +1 -28
- package/dist/Repo.d.ts +0 -2
- package/dist/Repo.d.ts.map +1 -1
- package/dist/Repo.js +13 -33
- package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
- package/dist/helpers/tests/network-adapter-tests.js +15 -13
- package/dist/index.d.ts +2 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/network/NetworkAdapter.d.ts +4 -13
- package/dist/network/NetworkAdapter.d.ts.map +1 -1
- package/dist/network/NetworkSubsystem.d.ts +5 -4
- package/dist/network/NetworkSubsystem.d.ts.map +1 -1
- package/dist/network/NetworkSubsystem.js +39 -25
- package/dist/network/messages.d.ts +57 -0
- package/dist/network/messages.d.ts.map +1 -0
- package/dist/network/messages.js +21 -0
- package/dist/synchronizer/CollectionSynchronizer.d.ts +3 -2
- package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.js +19 -13
- package/dist/synchronizer/DocSynchronizer.d.ts +9 -3
- package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/DocSynchronizer.js +145 -29
- package/dist/synchronizer/Synchronizer.d.ts +3 -4
- package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
- package/dist/types.d.ts +1 -3
- package/dist/types.d.ts.map +1 -1
- package/fuzz/fuzz.ts +4 -4
- package/package.json +2 -2
- package/src/DocCollection.ts +19 -9
- package/src/DocHandle.ts +87 -10
- package/src/DocUrl.ts +9 -9
- package/src/EphemeralData.ts +6 -36
- package/src/Repo.ts +15 -49
- package/src/helpers/tests/network-adapter-tests.ts +18 -14
- package/src/index.ts +12 -2
- package/src/network/NetworkAdapter.ts +4 -20
- package/src/network/NetworkSubsystem.ts +61 -38
- package/src/network/messages.ts +123 -0
- package/src/synchronizer/CollectionSynchronizer.ts +38 -19
- package/src/synchronizer/DocSynchronizer.ts +196 -38
- package/src/synchronizer/Synchronizer.ts +3 -8
- package/src/types.ts +4 -1
- package/test/CollectionSynchronizer.test.ts +6 -7
- package/test/DocHandle.test.ts +28 -13
- package/test/DocSynchronizer.test.ts +85 -9
- package/test/Repo.test.ts +221 -59
- package/test/StorageSubsystem.test.ts +2 -2
- package/test/helpers/DummyNetworkAdapter.ts +1 -1
- package/tsconfig.json +2 -1
- package/test/EphemeralData.test.ts +0 -44
package/dist/DocCollection.d.ts
CHANGED
|
@@ -33,12 +33,13 @@ export declare class DocCollection extends EventEmitter<DocCollectionEvents> {
|
|
|
33
33
|
interface DocCollectionEvents {
|
|
34
34
|
document: (arg: DocumentPayload) => void;
|
|
35
35
|
"delete-document": (arg: DeleteDocumentPayload) => void;
|
|
36
|
+
"unavailable-document": (arg: DeleteDocumentPayload) => void;
|
|
36
37
|
}
|
|
37
38
|
interface DocumentPayload {
|
|
38
39
|
handle: DocHandle<any>;
|
|
39
40
|
}
|
|
40
41
|
interface DeleteDocumentPayload {
|
|
41
|
-
|
|
42
|
+
documentId: DocumentId;
|
|
42
43
|
}
|
|
43
44
|
export {};
|
|
44
45
|
//# sourceMappingURL=DocCollection.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"DocCollection.d.ts","sourceRoot":"","sources":["../src/DocCollection.ts"],"names":[],"mappings":"AAAA,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAA;AAC1C,OAAO,EAAE,UAAU,EAAyB,YAAY,EAAE,MAAM,YAAY,CAAA;AAC5E,OAAO,EAAE,KAAK,WAAW,EAAE,MAAM,WAAW,CAAA;AAS5C;;;KAGK;AACL,qBAAa,aAAc,SAAQ,YAAY,CAAC,mBAAmB,CAAC;;IAGlE,sDAAsD;IACtD,WAAW,EAAE,WAAW,CAAmB;;IAwB3C,8CAA8C;IAC9C,IAAI,OAAO,uCAEV;IAED;;;;OAIG;IACH,MAAM,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC,CAAC;IA0BzB;;;OAGG;IACH,IAAI,CAAC,CAAC;IACJ,+CAA+C;IAC/C,YAAY,EAAE,YAAY,GACzB,SAAS,CAAC,CAAC,CAAC;
|
|
1
|
+
{"version":3,"file":"DocCollection.d.ts","sourceRoot":"","sources":["../src/DocCollection.ts"],"names":[],"mappings":"AAAA,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAA;AAC1C,OAAO,EAAE,UAAU,EAAyB,YAAY,EAAE,MAAM,YAAY,CAAA;AAC5E,OAAO,EAAE,KAAK,WAAW,EAAE,MAAM,WAAW,CAAA;AAS5C;;;KAGK;AACL,qBAAa,aAAc,SAAQ,YAAY,CAAC,mBAAmB,CAAC;;IAGlE,sDAAsD;IACtD,WAAW,EAAE,WAAW,CAAmB;;IAwB3C,8CAA8C;IAC9C,IAAI,OAAO,uCAEV;IAED;;;;OAIG;IACH,MAAM,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC,CAAC;IA0BzB;;;OAGG;IACH,IAAI,CAAC,CAAC;IACJ,+CAA+C;IAC/C,YAAY,EAAE,YAAY,GACzB,SAAS,CAAC,CAAC,CAAC;IAwBf,MAAM;IACJ,6CAA6C;IAC7C,EAAE,EAAE,UAAU,GAAG,YAAY;CAchC;AAGD,UAAU,mBAAmB;IAC3B,QAAQ,EAAE,CAAC,GAAG,EAAE,eAAe,KAAK,IAAI,CAAA;IACxC,iBAAiB,EAAE,CAAC,GAAG,EAAE,qBAAqB,KAAK,IAAI,CAAA;IACvD,sBAAsB,EAAE,CAAC,GAAG,EAAE,qBAAqB,KAAK,IAAI,CAAA;CAC7D;AAED,UAAU,eAAe;IACvB,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC,CAAA;CACvB;AAED,UAAU,qBAAqB;IAC7B,UAAU,EAAE,UAAU,CAAA;CACvB"}
|
package/dist/DocCollection.js
CHANGED
|
@@ -53,8 +53,8 @@ export class DocCollection extends EventEmitter {
|
|
|
53
53
|
// or
|
|
54
54
|
// - pass a "reify" function that takes a `<any>` and returns `<T>`
|
|
55
55
|
// Generate a new UUID and store it in the buffer
|
|
56
|
-
const {
|
|
57
|
-
const handle = this.#getHandle(
|
|
56
|
+
const { documentId } = parseAutomergeUrl(generateAutomergeUrl());
|
|
57
|
+
const handle = this.#getHandle(documentId, true);
|
|
58
58
|
this.emit("document", { handle });
|
|
59
59
|
return handle;
|
|
60
60
|
}
|
|
@@ -68,11 +68,20 @@ export class DocCollection extends EventEmitter {
|
|
|
68
68
|
if (!isValidAutomergeUrl(automergeUrl)) {
|
|
69
69
|
throw new Error(`Invalid AutomergeUrl: '${automergeUrl}'`);
|
|
70
70
|
}
|
|
71
|
-
const {
|
|
71
|
+
const { documentId } = parseAutomergeUrl(automergeUrl);
|
|
72
72
|
// If we have the handle cached, return it
|
|
73
|
-
if (this.#handleCache[
|
|
74
|
-
|
|
75
|
-
|
|
73
|
+
if (this.#handleCache[documentId]) {
|
|
74
|
+
if (this.#handleCache[documentId].isUnavailable()) {
|
|
75
|
+
// this ensures that the event fires after the handle has been returned
|
|
76
|
+
setTimeout(() => {
|
|
77
|
+
this.#handleCache[documentId].emit("unavailable", {
|
|
78
|
+
handle: this.#handleCache[documentId],
|
|
79
|
+
});
|
|
80
|
+
});
|
|
81
|
+
}
|
|
82
|
+
return this.#handleCache[documentId];
|
|
83
|
+
}
|
|
84
|
+
const handle = this.#getHandle(documentId, false);
|
|
76
85
|
this.emit("document", { handle });
|
|
77
86
|
return handle;
|
|
78
87
|
}
|
|
@@ -81,13 +90,13 @@ export class DocCollection extends EventEmitter {
|
|
|
81
90
|
id) {
|
|
82
91
|
if (isValidAutomergeUrl(id)) {
|
|
83
92
|
;
|
|
84
|
-
({
|
|
93
|
+
({ documentId: id } = parseAutomergeUrl(id));
|
|
85
94
|
}
|
|
86
95
|
const handle = this.#getHandle(id, false);
|
|
87
96
|
handle.delete();
|
|
88
97
|
delete this.#handleCache[id];
|
|
89
98
|
this.emit("delete-document", {
|
|
90
|
-
|
|
99
|
+
documentId: id,
|
|
91
100
|
});
|
|
92
101
|
}
|
|
93
102
|
}
|
package/dist/DocHandle.d.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import * as A from "@automerge/automerge";
|
|
2
2
|
import EventEmitter from "eventemitter3";
|
|
3
3
|
import { StateValue } from "xstate";
|
|
4
|
-
import type {
|
|
4
|
+
import type { DocumentId, PeerId, AutomergeUrl } from "./types.js";
|
|
5
5
|
/** DocHandle is a wrapper around a single Automerge document that lets us listen for changes. */
|
|
6
6
|
export declare class DocHandle<T>//
|
|
7
7
|
extends EventEmitter<DocHandleEvents<T>> {
|
|
@@ -22,6 +22,7 @@ export declare class DocHandle<T>//
|
|
|
22
22
|
* @returns true if the document has been marked as deleted
|
|
23
23
|
*/
|
|
24
24
|
isDeleted: () => boolean;
|
|
25
|
+
isUnavailable: () => boolean;
|
|
25
26
|
inState: (states: HandleState[]) => boolean;
|
|
26
27
|
get state(): StateValue;
|
|
27
28
|
/**
|
|
@@ -38,7 +39,7 @@ export declare class DocHandle<T>//
|
|
|
38
39
|
*
|
|
39
40
|
* @param {awaitStates=[READY]} optional states to wait for, such as "LOADING". mostly for internal use.
|
|
40
41
|
*/
|
|
41
|
-
doc(awaitStates?: HandleState[]): Promise<A.Doc<T
|
|
42
|
+
doc(awaitStates?: HandleState[]): Promise<A.Doc<T> | undefined>;
|
|
42
43
|
/**
|
|
43
44
|
* Returns the current state of the Automerge document this handle manages, or undefined.
|
|
44
45
|
* Useful in a synchronous context. Consider using `await handle.doc()` instead, check `isReady()`,
|
|
@@ -55,10 +56,18 @@ export declare class DocHandle<T>//
|
|
|
55
56
|
/** `change` is called by the repo when the document is changed locally */
|
|
56
57
|
change(callback: A.ChangeFn<T>, options?: A.ChangeOptions<T>): void;
|
|
57
58
|
changeAt(heads: A.Heads, callback: A.ChangeFn<T>, options?: A.ChangeOptions<T>): void;
|
|
59
|
+
unavailable(): void;
|
|
58
60
|
/** `request` is called by the repo when the document is not found in storage */
|
|
59
61
|
request(): void;
|
|
60
62
|
/** `delete` is called by the repo when the document is deleted */
|
|
61
63
|
delete(): void;
|
|
64
|
+
/** `broadcast` sends an arbitrary ephemeral message out to all reachable peers who would receive sync messages from you
|
|
65
|
+
* it has no guarantee of delivery, and is not persisted to the underlying automerge doc in any way.
|
|
66
|
+
* messages will have a sending PeerId but this is *not* a useful user identifier.
|
|
67
|
+
* a user could have multiple tabs open and would appear as multiple PeerIds.
|
|
68
|
+
* every message source must have a unique PeerId.
|
|
69
|
+
*/
|
|
70
|
+
broadcast(message: any): void;
|
|
62
71
|
}
|
|
63
72
|
interface DocHandleOptions {
|
|
64
73
|
isNew?: boolean;
|
|
@@ -66,7 +75,7 @@ interface DocHandleOptions {
|
|
|
66
75
|
}
|
|
67
76
|
export interface DocHandleMessagePayload {
|
|
68
77
|
destinationId: PeerId;
|
|
69
|
-
|
|
78
|
+
documentId: DocumentId;
|
|
70
79
|
data: Uint8Array;
|
|
71
80
|
}
|
|
72
81
|
export interface DocHandleEncodedChangePayload<T> {
|
|
@@ -82,10 +91,22 @@ export interface DocHandleChangePayload<T> {
|
|
|
82
91
|
patches: A.Patch[];
|
|
83
92
|
patchInfo: A.PatchInfo<T>;
|
|
84
93
|
}
|
|
94
|
+
export interface DocHandleEphemeralMessagePayload {
|
|
95
|
+
handle: DocHandle<any>;
|
|
96
|
+
senderId: PeerId;
|
|
97
|
+
message: unknown;
|
|
98
|
+
}
|
|
99
|
+
export interface DocHandleOutboundEphemeralMessagePayload {
|
|
100
|
+
handle: DocHandle<any>;
|
|
101
|
+
data: Uint8Array;
|
|
102
|
+
}
|
|
85
103
|
export interface DocHandleEvents<T> {
|
|
86
104
|
"heads-changed": (payload: DocHandleEncodedChangePayload<T>) => void;
|
|
87
105
|
change: (payload: DocHandleChangePayload<T>) => void;
|
|
88
106
|
delete: (payload: DocHandleDeletePayload<T>) => void;
|
|
107
|
+
unavailable: (payload: DocHandleDeletePayload<T>) => void;
|
|
108
|
+
"ephemeral-message": (payload: DocHandleEphemeralMessagePayload) => void;
|
|
109
|
+
"ephemeral-message-outbound": (payload: DocHandleOutboundEphemeralMessagePayload) => void;
|
|
89
110
|
}
|
|
90
111
|
export declare const HandleState: {
|
|
91
112
|
readonly IDLE: "idle";
|
|
@@ -94,6 +115,7 @@ export declare const HandleState: {
|
|
|
94
115
|
readonly READY: "ready";
|
|
95
116
|
readonly FAILED: "failed";
|
|
96
117
|
readonly DELETED: "deleted";
|
|
118
|
+
readonly UNAVAILABLE: "unavailable";
|
|
97
119
|
};
|
|
98
120
|
export type HandleState = (typeof HandleState)[keyof typeof HandleState];
|
|
99
121
|
export declare const Event: {
|
|
@@ -104,7 +126,8 @@ export declare const Event: {
|
|
|
104
126
|
readonly UPDATE: "UPDATE";
|
|
105
127
|
readonly TIMEOUT: "TIMEOUT";
|
|
106
128
|
readonly DELETE: "DELETE";
|
|
129
|
+
readonly MARK_UNAVAILABLE: "MARK_UNAVAILABLE";
|
|
107
130
|
};
|
|
108
|
-
export declare const IDLE: "idle", LOADING: "loading", REQUESTING: "requesting", READY: "ready", FAILED: "failed", DELETED: "deleted";
|
|
131
|
+
export declare const IDLE: "idle", LOADING: "loading", REQUESTING: "requesting", READY: "ready", FAILED: "failed", DELETED: "deleted", UNAVAILABLE: "unavailable";
|
|
109
132
|
export {};
|
|
110
133
|
//# sourceMappingURL=DocHandle.d.ts.map
|
package/dist/DocHandle.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"DocHandle.d.ts","sourceRoot":"","sources":["../src/DocHandle.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,sBAAsB,CAAA;AAEzC,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EASL,UAAU,EAEX,MAAM,QAAQ,CAAA;AAKf,OAAO,KAAK,EAAE,
|
|
1
|
+
{"version":3,"file":"DocHandle.d.ts","sourceRoot":"","sources":["../src/DocHandle.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,CAAC,MAAM,sBAAsB,CAAA;AAEzC,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EASL,UAAU,EAEX,MAAM,QAAQ,CAAA;AAKf,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,YAAY,CAAA;AAIlE,iGAAiG;AACjG,qBAAa,SAAS,CAAC,CAAC,CAAE,EAAE;AAC1B,SAAQ,YAAY,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC;;IAY/B,UAAU,EAAE,UAAU;IAL/B,IAAI,GAAG,IAAI,YAAY,CAEtB;gBAGQ,UAAU,EAAE,UAAU,EAC7B,EAAE,KAAa,EAAE,YAAqB,EAAE,GAAE,gBAAqB;IAuLjE;;;;OAIG;IACH,OAAO,gBAA0C;IACjD;;;;;OAKG;IACH,SAAS,gBAA4C;IACrD,aAAa,gBAAgD;IAC7D,OAAO,WAAY,WAAW,EAAE,aACmB;IAEnD,IAAI,KAAK,eAER;IAED;;;;;OAKG;IACG,SAAS,CAAC,WAAW,GAAE,WAAW,EAAY,GAAG,OAAO,CAAC,IAAI,CAAC;IAIpE;;;;;;OAMG;IACG,GAAG,CACP,WAAW,GAAE,WAAW,EAAyB,GAChD,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC;IAchC;;;;;;;;;OASG;IACH,OAAO,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS;IAQ/B,8EAA8E;IAC9E,MAAM,CAAC,QAAQ,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;IAM5C,2EAA2E;IAC3E,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,OAAO,GAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAM;IAehE,QAAQ,CACN,KAAK,EAAE,CAAC,CAAC,KAAK,EACd,QAAQ,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,EACvB,OAAO,GAAE,CAAC,CAAC,aAAa,CAAC,CAAC,CAAM;IAgBlC,WAAW;IAIX,gFAAgF;IAChF,OAAO;IAIP,kEAAkE;IAClE,MAAM;IAIN;;;;;OAKG;IACH,SAAS,CAAC,OAAO,EAAE,GAAG;CAMvB;AAID,UAAU,gBAAgB;IACxB,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,YAAY,CAAC,EAAE,MAAM,CAAA;CACtB;AAED,MAAM,WAAW,uBAAuB;IACtC,aAAa,EAAE,MAAM,CAAA;IACrB,UAAU,EAAE,UAAU,CAAA;IACtB,IAAI,EAAE,UAAU,CAAA;CACjB;AAED,MAAM,WAAW,6BAA6B,CAAC,CAAC;IAC9C,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;CACd;AAED,MAAM,WAAW,sBAAsB,CAAC,CAAC;IACvC,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;CACrB;AAED,MAAM,WAAW,sBAAsB,CAAC,CAAC;IACvC,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,CAAA;IACpB,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;IACb,OAAO,EAAE,CAAC,CAAC,KAAK,EAAE,CAAA;IAClB,SAAS,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;CAC1B;AAED,MAAM,WAAW,gCAAgC;IAC/C,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC,CAAA;IACtB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;CACjB;AAED,MAAM,WAAW,wCAAwC;IACvD,MAAM,EAAE,SAAS,CAAC,GAAG,CAAC,CAAA;IACtB,IAAI,EAAE,UAAU,CAAA;CACjB;AAED,MAAM,WAAW,eAAe,CAAC,CAAC;IAChC,eAAe,EAAE,CAAC,OAAO,EAAE,6BAA6B,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpE,MAAM,EAAE,CAAC,OAAO,EAAE,sBAAsB,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpD,MAAM,EAAE,CAAC,OAAO,EAAE,sBAAsB,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACpD,WAAW,EAAE,CAAC,OAAO,EAAE,sBAAsB,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACzD,mBAAmB,EAAE,CAAC,OAAO,EAAE,gCAAgC,KAAK,IAAI,CAAA;IACxE,4BAA4B,EAAE,CAC5B,OAAO,EAAE,wCAAwC,KAC9C,IAAI,CAAA;CACV;AAMD,eAAO,MAAM,WAAW;;;;;;;;CAQd,CAAA;AACV,MAAM,MAAM,WAAW,GAAG,CAAC,OAAO,WAAW,CAAC,CAAC,MAAM,OAAO,WAAW,CAAC,CAAA;AAkBxE,eAAO,MAAM,KAAK;;;;;;;;;CASR,CAAA;AA0CV,eAAO,MACL,IAAI,UACJ,OAAO,aACP,UAAU,gBACV,KAAK,WACL,MAAM,YACN,OAAO,aACP,WAAW,eACE,CAAA"}
|
package/dist/DocHandle.js
CHANGED
|
@@ -7,6 +7,7 @@ import { headsAreSame } from "./helpers/headsAreSame.js";
|
|
|
7
7
|
import { pause } from "./helpers/pause.js";
|
|
8
8
|
import { TimeoutError, withTimeout } from "./helpers/withTimeout.js";
|
|
9
9
|
import { stringifyAutomergeUrl } from "./DocUrl.js";
|
|
10
|
+
import { encode } from "cbor-x";
|
|
10
11
|
/** DocHandle is a wrapper around a single Automerge document that lets us listen for changes. */
|
|
11
12
|
export class DocHandle//
|
|
12
13
|
extends EventEmitter {
|
|
@@ -69,6 +70,10 @@ export class DocHandle//
|
|
|
69
70
|
},
|
|
70
71
|
requesting: {
|
|
71
72
|
on: {
|
|
73
|
+
MARK_UNAVAILABLE: {
|
|
74
|
+
target: UNAVAILABLE,
|
|
75
|
+
actions: "onUnavailable",
|
|
76
|
+
},
|
|
72
77
|
// UPDATE is called by the Repo when we receive changes from the network
|
|
73
78
|
UPDATE: { actions: "onUpdate" },
|
|
74
79
|
// REQUEST_COMPLETE is called from `onUpdate` when the doc has been fully loaded from the network
|
|
@@ -95,6 +100,14 @@ export class DocHandle//
|
|
|
95
100
|
deleted: {
|
|
96
101
|
type: "final",
|
|
97
102
|
},
|
|
103
|
+
unavailable: {
|
|
104
|
+
on: {
|
|
105
|
+
UPDATE: { actions: "onUpdate" },
|
|
106
|
+
// REQUEST_COMPLETE is called from `onUpdate` when the doc has been fully loaded from the network
|
|
107
|
+
REQUEST_COMPLETE: { target: READY },
|
|
108
|
+
DELETE: { actions: "onDelete", target: DELETED },
|
|
109
|
+
},
|
|
110
|
+
},
|
|
98
111
|
},
|
|
99
112
|
}, {
|
|
100
113
|
actions: {
|
|
@@ -109,13 +122,20 @@ export class DocHandle//
|
|
|
109
122
|
this.emit("delete", { handle: this });
|
|
110
123
|
return { doc: undefined };
|
|
111
124
|
}),
|
|
125
|
+
onUnavailable: assign(context => {
|
|
126
|
+
const { doc } = context;
|
|
127
|
+
this.emit("unavailable", { handle: this });
|
|
128
|
+
return { doc };
|
|
129
|
+
}),
|
|
112
130
|
},
|
|
113
131
|
}))
|
|
114
132
|
.onTransition(({ value: state, history, context }, event) => {
|
|
115
133
|
const oldDoc = history?.context?.doc;
|
|
116
134
|
const newDoc = context.doc;
|
|
117
|
-
|
|
118
|
-
const docChanged = newDoc &&
|
|
135
|
+
this.#log(`${history?.value}: ${event.type} → ${state}`, newDoc);
|
|
136
|
+
const docChanged = newDoc &&
|
|
137
|
+
oldDoc &&
|
|
138
|
+
!headsAreSame(A.getHeads(newDoc), A.getHeads(oldDoc));
|
|
119
139
|
if (docChanged) {
|
|
120
140
|
this.emit("heads-changed", { handle: this, doc: newDoc });
|
|
121
141
|
const patches = A.diff(newDoc, A.getHeads(oldDoc), A.getHeads(newDoc));
|
|
@@ -167,6 +187,7 @@ export class DocHandle//
|
|
|
167
187
|
* @returns true if the document has been marked as deleted
|
|
168
188
|
*/
|
|
169
189
|
isDeleted = () => this.inState([HandleState.DELETED]);
|
|
190
|
+
isUnavailable = () => this.inState([HandleState.UNAVAILABLE]);
|
|
170
191
|
inState = (states) => states.some(this.#machine?.getSnapshot().matches);
|
|
171
192
|
get state() {
|
|
172
193
|
return this.#machine?.getSnapshot().value;
|
|
@@ -187,7 +208,7 @@ export class DocHandle//
|
|
|
187
208
|
*
|
|
188
209
|
* @param {awaitStates=[READY]} optional states to wait for, such as "LOADING". mostly for internal use.
|
|
189
210
|
*/
|
|
190
|
-
async doc(awaitStates = [READY]) {
|
|
211
|
+
async doc(awaitStates = [READY, UNAVAILABLE]) {
|
|
191
212
|
await pause(); // yield one tick because reasons
|
|
192
213
|
try {
|
|
193
214
|
// wait for the document to enter one of the desired states
|
|
@@ -200,7 +221,7 @@ export class DocHandle//
|
|
|
200
221
|
throw error;
|
|
201
222
|
}
|
|
202
223
|
// Return the document
|
|
203
|
-
return this.#doc;
|
|
224
|
+
return !this.isUnavailable() ? this.#doc : undefined;
|
|
204
225
|
}
|
|
205
226
|
/**
|
|
206
227
|
* Returns the current state of the Automerge document this handle manages, or undefined.
|
|
@@ -249,6 +270,9 @@ export class DocHandle//
|
|
|
249
270
|
},
|
|
250
271
|
});
|
|
251
272
|
}
|
|
273
|
+
unavailable() {
|
|
274
|
+
this.#machine.send(MARK_UNAVAILABLE);
|
|
275
|
+
}
|
|
252
276
|
/** `request` is called by the repo when the document is not found in storage */
|
|
253
277
|
request() {
|
|
254
278
|
if (this.#state === LOADING)
|
|
@@ -258,6 +282,18 @@ export class DocHandle//
|
|
|
258
282
|
delete() {
|
|
259
283
|
this.#machine.send(DELETE);
|
|
260
284
|
}
|
|
285
|
+
/** `broadcast` sends an arbitrary ephemeral message out to all reachable peers who would receive sync messages from you
|
|
286
|
+
* it has no guarantee of delivery, and is not persisted to the underlying automerge doc in any way.
|
|
287
|
+
* messages will have a sending PeerId but this is *not* a useful user identifier.
|
|
288
|
+
* a user could have multiple tabs open and would appear as multiple PeerIds.
|
|
289
|
+
* every message source must have a unique PeerId.
|
|
290
|
+
*/
|
|
291
|
+
broadcast(message) {
|
|
292
|
+
this.emit("ephemeral-message-outbound", {
|
|
293
|
+
handle: this,
|
|
294
|
+
data: encode(message),
|
|
295
|
+
});
|
|
296
|
+
}
|
|
261
297
|
}
|
|
262
298
|
// STATE MACHINE TYPES
|
|
263
299
|
// state
|
|
@@ -268,6 +304,7 @@ export const HandleState = {
|
|
|
268
304
|
READY: "ready",
|
|
269
305
|
FAILED: "failed",
|
|
270
306
|
DELETED: "deleted",
|
|
307
|
+
UNAVAILABLE: "unavailable",
|
|
271
308
|
};
|
|
272
309
|
// events
|
|
273
310
|
export const Event = {
|
|
@@ -278,7 +315,8 @@ export const Event = {
|
|
|
278
315
|
UPDATE: "UPDATE",
|
|
279
316
|
TIMEOUT: "TIMEOUT",
|
|
280
317
|
DELETE: "DELETE",
|
|
318
|
+
MARK_UNAVAILABLE: "MARK_UNAVAILABLE",
|
|
281
319
|
};
|
|
282
320
|
// CONSTANTS
|
|
283
|
-
export const { IDLE, LOADING, REQUESTING, READY, FAILED, DELETED } = HandleState;
|
|
284
|
-
const { CREATE, FIND, REQUEST, UPDATE, TIMEOUT, DELETE, REQUEST_COMPLETE } = Event;
|
|
321
|
+
export const { IDLE, LOADING, REQUESTING, READY, FAILED, DELETED, UNAVAILABLE, } = HandleState;
|
|
322
|
+
const { CREATE, FIND, REQUEST, UPDATE, TIMEOUT, DELETE, REQUEST_COMPLETE, MARK_UNAVAILABLE, } = Event;
|
package/dist/DocUrl.d.ts
CHANGED
|
@@ -4,11 +4,11 @@ export declare const urlPrefix = "automerge:";
|
|
|
4
4
|
* given an Automerge URL, return a decoded DocumentId (and the encoded DocumentId)
|
|
5
5
|
*
|
|
6
6
|
* @param url
|
|
7
|
-
* @returns {
|
|
7
|
+
* @returns { binaryDocumentId: BinaryDocumentId, documentId: DocumentId }
|
|
8
8
|
*/
|
|
9
9
|
export declare const parseAutomergeUrl: (url: AutomergeUrl) => {
|
|
10
10
|
binaryDocumentId: BinaryDocumentId;
|
|
11
|
-
|
|
11
|
+
documentId: DocumentId;
|
|
12
12
|
};
|
|
13
13
|
interface StringifyAutomergeUrlOptions {
|
|
14
14
|
documentId: DocumentId | BinaryDocumentId;
|
|
@@ -17,7 +17,7 @@ interface StringifyAutomergeUrlOptions {
|
|
|
17
17
|
* Given a documentId in either canonical form, return an Automerge URL
|
|
18
18
|
* Throws on invalid input.
|
|
19
19
|
* Note: this is an object because we anticipate adding fields in the future.
|
|
20
|
-
* @param { documentId:
|
|
20
|
+
* @param { documentId: BinaryDocumentId | DocumentId }
|
|
21
21
|
* @returns AutomergeUrl
|
|
22
22
|
*/
|
|
23
23
|
export declare const stringifyAutomergeUrl: ({ documentId, }: StringifyAutomergeUrlOptions) => AutomergeUrl;
|
package/dist/DocUrl.js
CHANGED
|
@@ -5,19 +5,19 @@ export const urlPrefix = "automerge:";
|
|
|
5
5
|
* given an Automerge URL, return a decoded DocumentId (and the encoded DocumentId)
|
|
6
6
|
*
|
|
7
7
|
* @param url
|
|
8
|
-
* @returns {
|
|
8
|
+
* @returns { binaryDocumentId: BinaryDocumentId, documentId: DocumentId }
|
|
9
9
|
*/
|
|
10
10
|
export const parseAutomergeUrl = (url) => {
|
|
11
|
-
const { binaryDocumentId
|
|
11
|
+
const { binaryDocumentId, documentId } = parts(url);
|
|
12
12
|
if (!binaryDocumentId)
|
|
13
13
|
throw new Error("Invalid document URL: " + url);
|
|
14
|
-
return { binaryDocumentId,
|
|
14
|
+
return { binaryDocumentId, documentId };
|
|
15
15
|
};
|
|
16
16
|
/**
|
|
17
17
|
* Given a documentId in either canonical form, return an Automerge URL
|
|
18
18
|
* Throws on invalid input.
|
|
19
19
|
* Note: this is an object because we anticipate adding fields in the future.
|
|
20
|
-
* @param { documentId:
|
|
20
|
+
* @param { documentId: BinaryDocumentId | DocumentId }
|
|
21
21
|
* @returns AutomergeUrl
|
|
22
22
|
*/
|
|
23
23
|
export const stringifyAutomergeUrl = ({ documentId, }) => {
|
|
@@ -56,12 +56,12 @@ export const binaryToDocumentId = (docId) => bs58check.encode(docId);
|
|
|
56
56
|
* eventually this could include things like heads, so we use this structure
|
|
57
57
|
* we return both a binary & string-encoded version of the document ID
|
|
58
58
|
* @param str
|
|
59
|
-
* @returns { binaryDocumentId,
|
|
59
|
+
* @returns { binaryDocumentId, documentId }
|
|
60
60
|
*/
|
|
61
61
|
const parts = (str) => {
|
|
62
62
|
const regex = new RegExp(`^${urlPrefix}(\\w+)$`);
|
|
63
|
-
const [
|
|
64
|
-
const
|
|
65
|
-
const binaryDocumentId = documentIdToBinary(
|
|
66
|
-
return { binaryDocumentId,
|
|
63
|
+
const [_, docMatch] = str.match(regex) || [];
|
|
64
|
+
const documentId = docMatch;
|
|
65
|
+
const binaryDocumentId = documentIdToBinary(documentId);
|
|
66
|
+
return { binaryDocumentId, documentId };
|
|
67
67
|
};
|
package/dist/EphemeralData.d.ts
CHANGED
|
@@ -1,27 +1,19 @@
|
|
|
1
|
-
import
|
|
2
|
-
import {
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
* heartbeats, etc. — that is useful in the moment but not worth persisting.
|
|
7
|
-
*/
|
|
8
|
-
export declare class EphemeralData extends EventEmitter<EphemeralDataMessageEvents> {
|
|
9
|
-
/** Broadcast an ephemeral message */
|
|
10
|
-
broadcast(channelId: ChannelId, message: unknown): void;
|
|
11
|
-
/** Receive an ephemeral message */
|
|
12
|
-
receive(senderId: PeerId, grossChannelId: ChannelId, message: Uint8Array): void;
|
|
13
|
-
}
|
|
1
|
+
import { DocumentId, PeerId } from "./index.js";
|
|
2
|
+
import { EphemeralMessageContents } from "./network/messages.js";
|
|
3
|
+
export type SessionId = string & {
|
|
4
|
+
__SessionId: false;
|
|
5
|
+
};
|
|
14
6
|
export interface EphemeralDataPayload {
|
|
15
|
-
|
|
7
|
+
documentId: DocumentId;
|
|
16
8
|
peerId: PeerId;
|
|
17
9
|
data: {
|
|
18
10
|
peerId: PeerId;
|
|
19
|
-
|
|
11
|
+
documentId: DocumentId;
|
|
20
12
|
data: unknown;
|
|
21
13
|
};
|
|
22
14
|
}
|
|
23
15
|
export type EphemeralDataMessageEvents = {
|
|
24
|
-
message: (event:
|
|
16
|
+
message: (event: EphemeralMessageContents) => void;
|
|
25
17
|
data: (event: EphemeralDataPayload) => void;
|
|
26
18
|
};
|
|
27
19
|
//# sourceMappingURL=EphemeralData.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"EphemeralData.d.ts","sourceRoot":"","sources":["../src/EphemeralData.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"EphemeralData.d.ts","sourceRoot":"","sources":["../src/EphemeralData.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAC/C,OAAO,EAAE,wBAAwB,EAAE,MAAM,uBAAuB,CAAA;AAGhE,MAAM,MAAM,SAAS,GAAG,MAAM,GAAG;IAAE,WAAW,EAAE,KAAK,CAAA;CAAE,CAAA;AAEvD,MAAM,WAAW,oBAAoB;IACnC,UAAU,EAAE,UAAU,CAAA;IACtB,MAAM,EAAE,MAAM,CAAA;IACd,IAAI,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,UAAU,CAAC;QAAC,IAAI,EAAE,OAAO,CAAA;KAAE,CAAA;CAChE;AAED,MAAM,MAAM,0BAA0B,GAAG;IACvC,OAAO,EAAE,CAAC,KAAK,EAAE,wBAAwB,KAAK,IAAI,CAAA;IAClD,IAAI,EAAE,CAAC,KAAK,EAAE,oBAAoB,KAAK,IAAI,CAAA;CAC5C,CAAA"}
|
package/dist/EphemeralData.js
CHANGED
|
@@ -1,28 +1 @@
|
|
|
1
|
-
|
|
2
|
-
import EventEmitter from "eventemitter3";
|
|
3
|
-
/**
|
|
4
|
-
* EphemeralData provides a mechanism to broadcast short-lived data — cursor positions, presence,
|
|
5
|
-
* heartbeats, etc. — that is useful in the moment but not worth persisting.
|
|
6
|
-
*/
|
|
7
|
-
export class EphemeralData extends EventEmitter {
|
|
8
|
-
/** Broadcast an ephemeral message */
|
|
9
|
-
broadcast(channelId, message) {
|
|
10
|
-
const messageBytes = encode(message);
|
|
11
|
-
this.emit("message", {
|
|
12
|
-
targetId: "*",
|
|
13
|
-
channelId: ("m/" + channelId),
|
|
14
|
-
message: messageBytes,
|
|
15
|
-
broadcast: true,
|
|
16
|
-
});
|
|
17
|
-
}
|
|
18
|
-
/** Receive an ephemeral message */
|
|
19
|
-
receive(senderId, grossChannelId, message) {
|
|
20
|
-
const data = decode(message);
|
|
21
|
-
const channelId = grossChannelId.slice(2);
|
|
22
|
-
this.emit("data", {
|
|
23
|
-
peerId: senderId,
|
|
24
|
-
channelId,
|
|
25
|
-
data,
|
|
26
|
-
});
|
|
27
|
-
}
|
|
28
|
-
}
|
|
1
|
+
export {};
|
package/dist/Repo.d.ts
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import { DocCollection } from "./DocCollection.js";
|
|
2
|
-
import { EphemeralData } from "./EphemeralData.js";
|
|
3
2
|
import { NetworkAdapter } from "./network/NetworkAdapter.js";
|
|
4
3
|
import { NetworkSubsystem } from "./network/NetworkSubsystem.js";
|
|
5
4
|
import { StorageAdapter } from "./storage/StorageAdapter.js";
|
|
@@ -10,7 +9,6 @@ export declare class Repo extends DocCollection {
|
|
|
10
9
|
#private;
|
|
11
10
|
networkSubsystem: NetworkSubsystem;
|
|
12
11
|
storageSubsystem?: StorageSubsystem;
|
|
13
|
-
ephemeralData: EphemeralData;
|
|
14
12
|
constructor({ storage, network, peerId, sharePolicy }: RepoConfig);
|
|
15
13
|
}
|
|
16
14
|
export interface RepoConfig {
|
package/dist/Repo.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Repo.d.ts","sourceRoot":"","sources":["../src/Repo.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"Repo.d.ts","sourceRoot":"","sources":["../src/Repo.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAA;AAClD,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAChE,OAAO,EAAE,cAAc,EAAE,MAAM,6BAA6B,CAAA;AAC5D,OAAO,EAAE,gBAAgB,EAAE,MAAM,+BAA+B,CAAA;AAEhE,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAE/C,oFAAoF;AACpF,qBAAa,IAAK,SAAQ,aAAa;;IAGrC,gBAAgB,EAAE,gBAAgB,CAAA;IAClC,gBAAgB,CAAC,EAAE,gBAAgB,CAAA;gBAEvB,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,WAAW,EAAE,EAAE,UAAU;CAiFlE;AAED,MAAM,WAAW,UAAU;IACzB,4BAA4B;IAC5B,MAAM,CAAC,EAAE,MAAM,CAAA;IAEf,gDAAgD;IAChD,OAAO,CAAC,EAAE,cAAc,CAAA;IAExB,oDAAoD;IACpD,OAAO,EAAE,cAAc,EAAE,CAAA;IAEzB;;;OAGG;IACH,WAAW,CAAC,EAAE,WAAW,CAAA;CAC1B;AAED,MAAM,MAAM,WAAW,GAAG,CACxB,MAAM,EAAE,MAAM,EACd,UAAU,CAAC,EAAE,UAAU,KACpB,OAAO,CAAC,OAAO,CAAC,CAAA"}
|
package/dist/Repo.js
CHANGED
|
@@ -1,15 +1,13 @@
|
|
|
1
|
+
import debug from "debug";
|
|
1
2
|
import { DocCollection } from "./DocCollection.js";
|
|
2
|
-
import { EphemeralData } from "./EphemeralData.js";
|
|
3
3
|
import { NetworkSubsystem } from "./network/NetworkSubsystem.js";
|
|
4
4
|
import { StorageSubsystem } from "./storage/StorageSubsystem.js";
|
|
5
5
|
import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js";
|
|
6
|
-
import debug from "debug";
|
|
7
6
|
/** A Repo is a DocCollection with networking, syncing, and storage capabilities. */
|
|
8
7
|
export class Repo extends DocCollection {
|
|
9
8
|
#log;
|
|
10
9
|
networkSubsystem;
|
|
11
10
|
storageSubsystem;
|
|
12
|
-
ephemeralData;
|
|
13
11
|
constructor({ storage, network, peerId, sharePolicy }) {
|
|
14
12
|
super();
|
|
15
13
|
this.#log = debug(`automerge-repo:repo`);
|
|
@@ -29,24 +27,30 @@ export class Repo extends DocCollection {
|
|
|
29
27
|
handle.update(() => loadedDoc);
|
|
30
28
|
}
|
|
31
29
|
}
|
|
30
|
+
handle.on("unavailable", () => {
|
|
31
|
+
this.#log("document unavailable", { documentId: handle.documentId });
|
|
32
|
+
this.emit("unavailable-document", {
|
|
33
|
+
documentId: handle.documentId,
|
|
34
|
+
});
|
|
35
|
+
});
|
|
32
36
|
handle.request();
|
|
33
37
|
// Register the document with the synchronizer. This advertises our interest in the document.
|
|
34
38
|
synchronizer.addDocument(handle.documentId);
|
|
35
39
|
});
|
|
36
|
-
this.on("delete-document", ({
|
|
40
|
+
this.on("delete-document", ({ documentId }) => {
|
|
37
41
|
// TODO Pass the delete on to the network
|
|
38
42
|
// synchronizer.removeDocument(documentId)
|
|
39
43
|
if (storageSubsystem) {
|
|
40
|
-
storageSubsystem.remove(
|
|
44
|
+
storageSubsystem.remove(documentId);
|
|
41
45
|
}
|
|
42
46
|
});
|
|
43
47
|
// SYNCHRONIZER
|
|
44
48
|
// The synchronizer uses the network subsystem to keep documents in sync with peers.
|
|
45
49
|
const synchronizer = new CollectionSynchronizer(this);
|
|
46
50
|
// When the synchronizer emits sync messages, send them to peers
|
|
47
|
-
synchronizer.on("message",
|
|
48
|
-
this.#log(`sending sync message to ${targetId}`);
|
|
49
|
-
networkSubsystem.
|
|
51
|
+
synchronizer.on("message", message => {
|
|
52
|
+
this.#log(`sending sync message to ${message.targetId}`);
|
|
53
|
+
networkSubsystem.send(message);
|
|
50
54
|
});
|
|
51
55
|
// STORAGE
|
|
52
56
|
// The storage subsystem has access to some form of persistence, and deals with save and loading documents.
|
|
@@ -67,31 +71,7 @@ export class Repo extends DocCollection {
|
|
|
67
71
|
});
|
|
68
72
|
// Handle incoming messages
|
|
69
73
|
networkSubsystem.on("message", async (msg) => {
|
|
70
|
-
|
|
71
|
-
// TODO: this demands a more principled way of associating channels with recipients
|
|
72
|
-
// Ephemeral channel ids start with "m/"
|
|
73
|
-
if (channelId.startsWith("m/")) {
|
|
74
|
-
// Ephemeral message
|
|
75
|
-
this.#log(`receiving ephemeral message from ${senderId}`);
|
|
76
|
-
ephemeralData.receive(senderId, channelId, message);
|
|
77
|
-
}
|
|
78
|
-
else {
|
|
79
|
-
// Sync message
|
|
80
|
-
this.#log(`receiving sync message from ${senderId}`);
|
|
81
|
-
await synchronizer.receiveSyncMessage(senderId, channelId, message);
|
|
82
|
-
}
|
|
83
|
-
});
|
|
84
|
-
// We establish a special channel for sync messages
|
|
85
|
-
networkSubsystem.join();
|
|
86
|
-
// EPHEMERAL DATA
|
|
87
|
-
// The ephemeral data subsystem uses the network to send and receive messages that are not
|
|
88
|
-
// persisted to storage, e.g. cursor position, presence, etc.
|
|
89
|
-
const ephemeralData = new EphemeralData();
|
|
90
|
-
this.ephemeralData = ephemeralData;
|
|
91
|
-
// Send ephemeral messages to peers
|
|
92
|
-
ephemeralData.on("message", ({ targetId, channelId, message, broadcast }) => {
|
|
93
|
-
this.#log(`sending ephemeral message to ${targetId}`);
|
|
94
|
-
networkSubsystem.sendMessage(targetId, channelId, message, broadcast);
|
|
74
|
+
await synchronizer.receiveMessage(msg);
|
|
95
75
|
});
|
|
96
76
|
}
|
|
97
77
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"network-adapter-tests.d.ts","sourceRoot":"","sources":["../../../src/helpers/tests/network-adapter-tests.ts"],"names":[],"mappings":"AAAA,OAAO,EAAgB,KAAK,cAAc,
|
|
1
|
+
{"version":3,"file":"network-adapter-tests.d.ts","sourceRoot":"","sources":["../../../src/helpers/tests/network-adapter-tests.ts"],"names":[],"mappings":"AAAA,OAAO,EAAgB,KAAK,cAAc,EAAc,MAAM,gBAAgB,CAAA;AAM9E;;;;;;;;;;;GAWG;AACH,wBAAgB,eAAe,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CA8HrE;AAID,KAAK,OAAO,GAAG,cAAc,GAAG,cAAc,EAAE,CAAA;AAEhD,MAAM,MAAM,OAAO,GAAG,MAAM,OAAO,CAAC;IAClC,QAAQ,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;IACrC,QAAQ,CAAC,EAAE,MAAM,IAAI,CAAA;CACtB,CAAC,CAAA"}
|
|
@@ -2,6 +2,7 @@ import { Repo } from "../../index.js";
|
|
|
2
2
|
import { eventPromise, eventPromises } from "../eventPromise.js";
|
|
3
3
|
import { assert } from "chai";
|
|
4
4
|
import { describe, it } from "mocha";
|
|
5
|
+
import { pause } from "../pause.js";
|
|
5
6
|
/**
|
|
6
7
|
* Runs a series of tests against a set of three peers, each represented by one or more instantiated
|
|
7
8
|
* network adapters.
|
|
@@ -38,14 +39,14 @@ export function runAdapterTests(_setup, title) {
|
|
|
38
39
|
});
|
|
39
40
|
// Bob receives the change
|
|
40
41
|
await eventPromise(bobHandle, "change");
|
|
41
|
-
assert.equal((await bobHandle.doc())
|
|
42
|
+
assert.equal((await bobHandle.doc())?.foo, "bar");
|
|
42
43
|
// Bob changes the document
|
|
43
44
|
bobHandle.change(d => {
|
|
44
45
|
d.foo = "baz";
|
|
45
46
|
});
|
|
46
47
|
// Alice receives the change
|
|
47
48
|
await eventPromise(aliceHandle, "change");
|
|
48
|
-
assert.equal((await aliceHandle.doc())
|
|
49
|
+
assert.equal((await aliceHandle.doc())?.foo, "baz");
|
|
49
50
|
};
|
|
50
51
|
// Run the test in both directions, in case they're different types of adapters
|
|
51
52
|
{
|
|
@@ -80,32 +81,33 @@ export function runAdapterTests(_setup, title) {
|
|
|
80
81
|
});
|
|
81
82
|
// Bob and Charlie receive the change
|
|
82
83
|
await eventPromises([bobHandle, charlieHandle], "change");
|
|
83
|
-
assert.equal((await bobHandle.doc())
|
|
84
|
-
assert.equal((await charlieHandle.doc())
|
|
84
|
+
assert.equal((await bobHandle.doc())?.foo, "bar");
|
|
85
|
+
assert.equal((await charlieHandle.doc())?.foo, "bar");
|
|
85
86
|
// Charlie changes the document
|
|
86
87
|
charlieHandle.change(d => {
|
|
87
88
|
d.foo = "baz";
|
|
88
89
|
});
|
|
89
90
|
// Alice and Bob receive the change
|
|
90
91
|
await eventPromises([aliceHandle, bobHandle], "change");
|
|
91
|
-
assert.equal((await bobHandle.doc())
|
|
92
|
-
assert.equal((await charlieHandle.doc())
|
|
92
|
+
assert.equal((await bobHandle.doc())?.foo, "baz");
|
|
93
|
+
assert.equal((await charlieHandle.doc())?.foo, "baz");
|
|
93
94
|
teardown();
|
|
94
95
|
});
|
|
95
|
-
|
|
96
|
-
// because the network has cycles (see #92)
|
|
97
|
-
it.skip("can broadcast a message", async () => {
|
|
96
|
+
it("can broadcast a message", async () => {
|
|
98
97
|
const { adapters, teardown } = await setup();
|
|
99
98
|
const [a, b, c] = adapters;
|
|
100
99
|
const aliceRepo = new Repo({ network: a, peerId: alice });
|
|
101
100
|
const bobRepo = new Repo({ network: b, peerId: bob });
|
|
102
101
|
const charlieRepo = new Repo({ network: c, peerId: charlie });
|
|
103
102
|
await eventPromises([aliceRepo, bobRepo, charlieRepo].map(r => r.networkSubsystem), "peer");
|
|
104
|
-
const
|
|
103
|
+
const aliceHandle = aliceRepo.create();
|
|
104
|
+
const charlieHandle = charlieRepo.find(aliceHandle.url);
|
|
105
|
+
// pause to give charlie a chance to let alice know it wants the doc
|
|
106
|
+
await pause(100);
|
|
105
107
|
const alicePresenceData = { presence: "alice" };
|
|
106
|
-
|
|
107
|
-
const {
|
|
108
|
-
assert.deepStrictEqual(
|
|
108
|
+
aliceHandle.broadcast(alicePresenceData);
|
|
109
|
+
const { message } = await eventPromise(charlieHandle, "ephemeral-message");
|
|
110
|
+
assert.deepStrictEqual(message, alicePresenceData);
|
|
109
111
|
teardown();
|
|
110
112
|
});
|
|
111
113
|
});
|