@automerge/automerge-repo 1.0.0-alpha.2 → 1.0.0-alpha.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/DocCollection.d.ts +2 -1
- package/dist/DocCollection.d.ts.map +1 -1
- package/dist/DocCollection.js +17 -8
- package/dist/DocHandle.d.ts +27 -4
- package/dist/DocHandle.d.ts.map +1 -1
- package/dist/DocHandle.js +44 -6
- package/dist/DocUrl.d.ts +3 -3
- package/dist/DocUrl.js +9 -9
- package/dist/EphemeralData.d.ts +8 -16
- package/dist/EphemeralData.d.ts.map +1 -1
- package/dist/EphemeralData.js +1 -28
- package/dist/Repo.d.ts +0 -2
- package/dist/Repo.d.ts.map +1 -1
- package/dist/Repo.js +13 -33
- package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
- package/dist/helpers/tests/network-adapter-tests.js +15 -13
- package/dist/index.d.ts +2 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/network/NetworkAdapter.d.ts +4 -13
- package/dist/network/NetworkAdapter.d.ts.map +1 -1
- package/dist/network/NetworkSubsystem.d.ts +5 -4
- package/dist/network/NetworkSubsystem.d.ts.map +1 -1
- package/dist/network/NetworkSubsystem.js +39 -25
- package/dist/network/messages.d.ts +57 -0
- package/dist/network/messages.d.ts.map +1 -0
- package/dist/network/messages.js +21 -0
- package/dist/synchronizer/CollectionSynchronizer.d.ts +3 -2
- package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.js +19 -13
- package/dist/synchronizer/DocSynchronizer.d.ts +9 -3
- package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/DocSynchronizer.js +145 -29
- package/dist/synchronizer/Synchronizer.d.ts +3 -4
- package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
- package/dist/types.d.ts +1 -3
- package/dist/types.d.ts.map +1 -1
- package/fuzz/fuzz.ts +4 -4
- package/package.json +2 -2
- package/src/DocCollection.ts +19 -9
- package/src/DocHandle.ts +87 -10
- package/src/DocUrl.ts +9 -9
- package/src/EphemeralData.ts +6 -36
- package/src/Repo.ts +15 -49
- package/src/helpers/tests/network-adapter-tests.ts +18 -14
- package/src/index.ts +12 -2
- package/src/network/NetworkAdapter.ts +4 -20
- package/src/network/NetworkSubsystem.ts +61 -38
- package/src/network/messages.ts +123 -0
- package/src/synchronizer/CollectionSynchronizer.ts +38 -19
- package/src/synchronizer/DocSynchronizer.ts +196 -38
- package/src/synchronizer/Synchronizer.ts +3 -8
- package/src/types.ts +4 -1
- package/test/CollectionSynchronizer.test.ts +6 -7
- package/test/DocHandle.test.ts +28 -13
- package/test/DocSynchronizer.test.ts +85 -9
- package/test/Repo.test.ts +221 -59
- package/test/StorageSubsystem.test.ts +2 -2
- package/test/helpers/DummyNetworkAdapter.ts +1 -1
- package/tsconfig.json +2 -1
- package/test/EphemeralData.test.ts +0 -44
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
import * as A from "@automerge/automerge";
|
|
2
|
-
import { READY, REQUESTING } from "../DocHandle.js";
|
|
2
|
+
import { READY, REQUESTING, UNAVAILABLE, } from "../DocHandle.js";
|
|
3
3
|
import { Synchronizer } from "./Synchronizer.js";
|
|
4
4
|
import debug from "debug";
|
|
5
|
+
import { isRequestMessage, } from "../network/messages.js";
|
|
6
|
+
import { decode } from "cbor-x";
|
|
5
7
|
/**
|
|
6
8
|
* DocSynchronizer takes a handle to an Automerge document, and receives & dispatches sync messages
|
|
7
9
|
* to bring it inline with all other peers' versions.
|
|
@@ -13,9 +15,11 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
13
15
|
#opsLog;
|
|
14
16
|
/** Active peers */
|
|
15
17
|
#peers = [];
|
|
18
|
+
#peerDocumentStatuses = {};
|
|
16
19
|
/** Sync state for each peer we've communicated with (including inactive peers) */
|
|
17
20
|
#syncStates = {};
|
|
18
21
|
#pendingSyncMessages = [];
|
|
22
|
+
#syncStarted = false;
|
|
19
23
|
constructor(handle) {
|
|
20
24
|
super();
|
|
21
25
|
this.handle = handle;
|
|
@@ -24,12 +28,16 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
24
28
|
this.#log = debug(`automerge-repo:docsync:${docId}`);
|
|
25
29
|
this.#opsLog = debug(`automerge-repo:ops:docsync:${docId}`); // Log list of ops of each message
|
|
26
30
|
handle.on("change", () => this.#syncWithPeers());
|
|
31
|
+
handle.on("ephemeral-message-outbound", payload => this.#broadcastToPeers(payload));
|
|
27
32
|
// Process pending sync messages immediately after the handle becomes ready.
|
|
28
33
|
void (async () => {
|
|
29
34
|
await handle.doc([READY, REQUESTING]);
|
|
30
35
|
this.#processAllPendingSyncMessages();
|
|
31
36
|
})();
|
|
32
37
|
}
|
|
38
|
+
get peerStates() {
|
|
39
|
+
return this.#peerDocumentStatuses;
|
|
40
|
+
}
|
|
33
41
|
get documentId() {
|
|
34
42
|
return this.handle.documentId;
|
|
35
43
|
}
|
|
@@ -37,13 +45,32 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
37
45
|
async #syncWithPeers() {
|
|
38
46
|
this.#log(`syncWithPeers`);
|
|
39
47
|
const doc = await this.handle.doc();
|
|
48
|
+
if (doc === undefined)
|
|
49
|
+
return;
|
|
40
50
|
this.#peers.forEach(peerId => this.#sendSyncMessage(peerId, doc));
|
|
41
51
|
}
|
|
52
|
+
async #broadcastToPeers({ data }) {
|
|
53
|
+
this.#log(`broadcastToPeers`, this.#peers);
|
|
54
|
+
this.#peers.forEach(peerId => this.#sendEphemeralMessage(peerId, data));
|
|
55
|
+
}
|
|
56
|
+
#sendEphemeralMessage(peerId, data) {
|
|
57
|
+
this.#log(`sendEphemeralMessage ->${peerId}`);
|
|
58
|
+
this.emit("message", {
|
|
59
|
+
type: "ephemeral",
|
|
60
|
+
targetId: peerId,
|
|
61
|
+
documentId: this.handle.documentId,
|
|
62
|
+
data,
|
|
63
|
+
});
|
|
64
|
+
}
|
|
42
65
|
#getSyncState(peerId) {
|
|
43
66
|
if (!this.#peers.includes(peerId)) {
|
|
44
67
|
this.#log("adding a new peer", peerId);
|
|
45
68
|
this.#peers.push(peerId);
|
|
46
69
|
}
|
|
70
|
+
// when a peer is added, we don't know if it has the document or not
|
|
71
|
+
if (!(peerId in this.#peerDocumentStatuses)) {
|
|
72
|
+
this.#peerDocumentStatuses[peerId] = "unknown";
|
|
73
|
+
}
|
|
47
74
|
return this.#syncStates[peerId] ?? A.initSyncState();
|
|
48
75
|
}
|
|
49
76
|
#setSyncState(peerId, syncState) {
|
|
@@ -59,16 +86,32 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
59
86
|
this.#setSyncState(peerId, newSyncState);
|
|
60
87
|
if (message) {
|
|
61
88
|
this.#logMessage(`sendSyncMessage 🡒 ${peerId}`, message);
|
|
62
|
-
const
|
|
63
|
-
this.
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
89
|
+
const decoded = A.decodeSyncMessage(message);
|
|
90
|
+
if (!this.handle.isReady() &&
|
|
91
|
+
decoded.heads.length === 0 &&
|
|
92
|
+
newSyncState.sharedHeads.length === 0 &&
|
|
93
|
+
!Object.values(this.#peerDocumentStatuses).includes("has") &&
|
|
94
|
+
this.#peerDocumentStatuses[peerId] === "unknown") {
|
|
95
|
+
// we don't have the document (or access to it), so we request it
|
|
96
|
+
this.emit("message", {
|
|
97
|
+
type: "request",
|
|
98
|
+
targetId: peerId,
|
|
99
|
+
documentId: this.handle.documentId,
|
|
100
|
+
data: message,
|
|
101
|
+
});
|
|
102
|
+
}
|
|
103
|
+
else {
|
|
104
|
+
this.emit("message", {
|
|
105
|
+
type: "sync",
|
|
106
|
+
targetId: peerId,
|
|
107
|
+
data: message,
|
|
108
|
+
documentId: this.handle.documentId,
|
|
109
|
+
});
|
|
110
|
+
}
|
|
111
|
+
// if we have sent heads, then the peer now has or will have the document
|
|
112
|
+
if (decoded.heads.length > 0) {
|
|
113
|
+
this.#peerDocumentStatuses[peerId] = "has";
|
|
114
|
+
}
|
|
72
115
|
}
|
|
73
116
|
}
|
|
74
117
|
#logMessage = (label, message) => {
|
|
@@ -89,48 +132,121 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
89
132
|
hasPeer(peerId) {
|
|
90
133
|
return this.#peers.includes(peerId);
|
|
91
134
|
}
|
|
92
|
-
beginSync(
|
|
93
|
-
this.#log(`beginSync: ${
|
|
135
|
+
beginSync(peerIds) {
|
|
136
|
+
this.#log(`beginSync: ${peerIds.join(", ")}`);
|
|
94
137
|
// At this point if we don't have anything in our storage, we need to use an empty doc to sync
|
|
95
138
|
// with; but we don't want to surface that state to the front end
|
|
96
|
-
void this.handle.doc([READY, REQUESTING]).then(doc => {
|
|
139
|
+
void this.handle.doc([READY, REQUESTING, UNAVAILABLE]).then(doc => {
|
|
140
|
+
// if we don't have any peers, then we can say the document is unavailable
|
|
97
141
|
// HACK: if we have a sync state already, we round-trip it through the encoding system to make
|
|
98
142
|
// sure state is preserved. This prevents an infinite loop caused by failed attempts to send
|
|
99
143
|
// messages during disconnection.
|
|
100
144
|
// TODO: cover that case with a test and remove this hack
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
145
|
+
peerIds.forEach(peerId => {
|
|
146
|
+
const syncStateRaw = this.#getSyncState(peerId);
|
|
147
|
+
const syncState = A.decodeSyncState(A.encodeSyncState(syncStateRaw));
|
|
148
|
+
this.#setSyncState(peerId, syncState);
|
|
149
|
+
});
|
|
150
|
+
// we register out peers first, then say that sync has started
|
|
151
|
+
this.#syncStarted = true;
|
|
152
|
+
this.#checkDocUnavailable();
|
|
153
|
+
if (doc === undefined)
|
|
154
|
+
return;
|
|
155
|
+
peerIds.forEach(peerId => {
|
|
156
|
+
this.#sendSyncMessage(peerId, doc);
|
|
157
|
+
});
|
|
105
158
|
});
|
|
106
159
|
}
|
|
107
160
|
endSync(peerId) {
|
|
108
161
|
this.#log(`removing peer ${peerId}`);
|
|
109
162
|
this.#peers = this.#peers.filter(p => p !== peerId);
|
|
110
163
|
}
|
|
111
|
-
|
|
112
|
-
|
|
164
|
+
receiveMessage(message) {
|
|
165
|
+
switch (message.type) {
|
|
166
|
+
case "sync":
|
|
167
|
+
case "request":
|
|
168
|
+
this.receiveSyncMessage(message);
|
|
169
|
+
break;
|
|
170
|
+
case "ephemeral":
|
|
171
|
+
this.receiveEphemeralMessage(message);
|
|
172
|
+
break;
|
|
173
|
+
case "doc-unavailable":
|
|
174
|
+
this.#peerDocumentStatuses[message.senderId] = "unavailable";
|
|
175
|
+
this.#checkDocUnavailable();
|
|
176
|
+
break;
|
|
177
|
+
default:
|
|
178
|
+
throw new Error(`unknown message type: ${message}`);
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
receiveEphemeralMessage(message) {
|
|
182
|
+
if (message.documentId !== this.handle.documentId)
|
|
183
|
+
throw new Error(`channelId doesn't match documentId`);
|
|
184
|
+
const { senderId, data } = message;
|
|
185
|
+
const contents = decode(data);
|
|
186
|
+
this.handle.emit("ephemeral-message", {
|
|
187
|
+
handle: this.handle,
|
|
188
|
+
senderId,
|
|
189
|
+
message: contents,
|
|
190
|
+
});
|
|
191
|
+
this.#peers.forEach(peerId => {
|
|
192
|
+
if (peerId === senderId)
|
|
193
|
+
return;
|
|
194
|
+
this.emit("message", {
|
|
195
|
+
...message,
|
|
196
|
+
targetId: peerId,
|
|
197
|
+
});
|
|
198
|
+
});
|
|
199
|
+
}
|
|
200
|
+
receiveSyncMessage(message) {
|
|
201
|
+
if (message.documentId !== this.handle.documentId)
|
|
113
202
|
throw new Error(`channelId doesn't match documentId`);
|
|
114
203
|
// We need to block receiving the syncMessages until we've checked local storage
|
|
115
|
-
if (!this.handle.inState([READY, REQUESTING])) {
|
|
116
|
-
this.#pendingSyncMessages.push(
|
|
204
|
+
if (!this.handle.inState([READY, REQUESTING, UNAVAILABLE])) {
|
|
205
|
+
this.#pendingSyncMessages.push(message);
|
|
117
206
|
return;
|
|
118
207
|
}
|
|
119
208
|
this.#processAllPendingSyncMessages();
|
|
120
|
-
this.#processSyncMessage(
|
|
209
|
+
this.#processSyncMessage(message);
|
|
121
210
|
}
|
|
122
|
-
#processSyncMessage(
|
|
211
|
+
#processSyncMessage(message) {
|
|
212
|
+
if (isRequestMessage(message)) {
|
|
213
|
+
this.#peerDocumentStatuses[message.senderId] = "wants";
|
|
214
|
+
}
|
|
215
|
+
this.#checkDocUnavailable();
|
|
216
|
+
// if the message has heads, then the peer has the document
|
|
217
|
+
if (A.decodeSyncMessage(message.data).heads.length > 0) {
|
|
218
|
+
this.#peerDocumentStatuses[message.senderId] = "has";
|
|
219
|
+
}
|
|
123
220
|
this.handle.update(doc => {
|
|
124
|
-
const [newDoc, newSyncState] = A.receiveSyncMessage(doc, this.#getSyncState(
|
|
125
|
-
this.#setSyncState(
|
|
221
|
+
const [newDoc, newSyncState] = A.receiveSyncMessage(doc, this.#getSyncState(message.senderId), message.data);
|
|
222
|
+
this.#setSyncState(message.senderId, newSyncState);
|
|
126
223
|
// respond to just this peer (as required)
|
|
127
|
-
this.#sendSyncMessage(
|
|
224
|
+
this.#sendSyncMessage(message.senderId, doc);
|
|
128
225
|
return newDoc;
|
|
129
226
|
});
|
|
227
|
+
this.#checkDocUnavailable();
|
|
228
|
+
}
|
|
229
|
+
#checkDocUnavailable() {
|
|
230
|
+
// if we know none of the peers have the document, tell all our peers that we don't either
|
|
231
|
+
if (this.#syncStarted &&
|
|
232
|
+
this.handle.inState([REQUESTING]) &&
|
|
233
|
+
this.#peers.every(peerId => this.#peerDocumentStatuses[peerId] === "unavailable" ||
|
|
234
|
+
this.#peerDocumentStatuses[peerId] === "wants")) {
|
|
235
|
+
this.#peers
|
|
236
|
+
.filter(peerId => this.#peerDocumentStatuses[peerId] === "wants")
|
|
237
|
+
.forEach(peerId => {
|
|
238
|
+
this.emit("message", {
|
|
239
|
+
type: "doc-unavailable",
|
|
240
|
+
documentId: this.handle.documentId,
|
|
241
|
+
targetId: peerId,
|
|
242
|
+
});
|
|
243
|
+
});
|
|
244
|
+
this.handle.unavailable();
|
|
245
|
+
}
|
|
130
246
|
}
|
|
131
247
|
#processAllPendingSyncMessages() {
|
|
132
|
-
for (const
|
|
133
|
-
this.#processSyncMessage(
|
|
248
|
+
for (const message of this.#pendingSyncMessages) {
|
|
249
|
+
this.#processSyncMessage(message);
|
|
134
250
|
}
|
|
135
251
|
this.#pendingSyncMessages = [];
|
|
136
252
|
}
|
|
@@ -1,10 +1,9 @@
|
|
|
1
1
|
import EventEmitter from "eventemitter3";
|
|
2
|
-
import {
|
|
3
|
-
import { MessagePayload } from "../network/NetworkAdapter.js";
|
|
2
|
+
import { Message, MessageContents } from "../network/messages.js";
|
|
4
3
|
export declare abstract class Synchronizer extends EventEmitter<SynchronizerEvents> {
|
|
5
|
-
abstract
|
|
4
|
+
abstract receiveMessage(message: Message): void;
|
|
6
5
|
}
|
|
7
6
|
export interface SynchronizerEvents {
|
|
8
|
-
message: (arg:
|
|
7
|
+
message: (arg: MessageContents) => void;
|
|
9
8
|
}
|
|
10
9
|
//# sourceMappingURL=Synchronizer.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Synchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/Synchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EAAE,
|
|
1
|
+
{"version":3,"file":"Synchronizer.d.ts","sourceRoot":"","sources":["../../src/synchronizer/Synchronizer.ts"],"names":[],"mappings":"AAAA,OAAO,YAAY,MAAM,eAAe,CAAA;AACxC,OAAO,EAAE,OAAO,EAAE,eAAe,EAAE,MAAM,wBAAwB,CAAA;AAEjE,8BAAsB,YAAa,SAAQ,YAAY,CAAC,kBAAkB,CAAC;IACzE,QAAQ,CAAC,cAAc,CAAC,OAAO,EAAE,OAAO,GAAG,IAAI;CAChD;AAED,MAAM,WAAW,kBAAkB;IACjC,OAAO,EAAE,CAAC,GAAG,EAAE,eAAe,KAAK,IAAI,CAAA;CACxC"}
|
package/dist/types.d.ts
CHANGED
|
@@ -10,7 +10,5 @@ export type BinaryDocumentId = Uint8Array & {
|
|
|
10
10
|
export type PeerId = string & {
|
|
11
11
|
__peerId: false;
|
|
12
12
|
};
|
|
13
|
-
export type
|
|
14
|
-
__channelId: false;
|
|
15
|
-
};
|
|
13
|
+
export type DistributiveOmit<T, K extends keyof any> = T extends any ? Omit<T, K> : never;
|
|
16
14
|
//# sourceMappingURL=types.d.ts.map
|
package/dist/types.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,UAAU,GAAG,MAAM,GAAG;IAAE,YAAY,EAAE,IAAI,CAAA;CAAE,CAAA;AACxD,MAAM,MAAM,YAAY,GAAG,MAAM,GAAG;IAAE,aAAa,EAAE,IAAI,CAAA;CAAE,CAAA;AAC3D,MAAM,MAAM,gBAAgB,GAAG,UAAU,GAAG;IAAE,kBAAkB,EAAE,IAAI,CAAA;CAAE,CAAA;AAExE,MAAM,MAAM,MAAM,GAAG,MAAM,GAAG;IAAE,QAAQ,EAAE,KAAK,CAAA;CAAE,CAAA;
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,UAAU,GAAG,MAAM,GAAG;IAAE,YAAY,EAAE,IAAI,CAAA;CAAE,CAAA;AACxD,MAAM,MAAM,YAAY,GAAG,MAAM,GAAG;IAAE,aAAa,EAAE,IAAI,CAAA;CAAE,CAAA;AAC3D,MAAM,MAAM,gBAAgB,GAAG,UAAU,GAAG;IAAE,kBAAkB,EAAE,IAAI,CAAA;CAAE,CAAA;AAExE,MAAM,MAAM,MAAM,GAAG,MAAM,GAAG;IAAE,QAAQ,EAAE,KAAK,CAAA;CAAE,CAAA;AAEjD,MAAM,MAAM,gBAAgB,CAAC,CAAC,EAAE,CAAC,SAAS,MAAM,GAAG,IAAI,CAAC,SAAS,GAAG,GAChE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,GACV,KAAK,CAAA"}
|
package/fuzz/fuzz.ts
CHANGED
|
@@ -2,7 +2,7 @@ import assert from "assert"
|
|
|
2
2
|
import { MessageChannelNetworkAdapter } from "@automerge/automerge-repo-network-messagechannel"
|
|
3
3
|
import * as Automerge from "@automerge/automerge"
|
|
4
4
|
|
|
5
|
-
import {
|
|
5
|
+
import { DocHandle, DocumentId, PeerId, SharePolicy } from "../src"
|
|
6
6
|
import { eventPromise } from "../src/helpers/eventPromise.js"
|
|
7
7
|
import { pause } from "../src/helpers/pause.js"
|
|
8
8
|
import { Repo } from "../src/Repo.js"
|
|
@@ -105,9 +105,9 @@ for (let i = 0; i < 100000; i++) {
|
|
|
105
105
|
})
|
|
106
106
|
|
|
107
107
|
await pause(0)
|
|
108
|
-
const a = await aliceRepo.find(doc.
|
|
109
|
-
const b = await bobRepo.find(doc.
|
|
110
|
-
const c = await charlieRepo.find(doc.
|
|
108
|
+
const a = await aliceRepo.find(doc.url).doc()
|
|
109
|
+
const b = await bobRepo.find(doc.url).doc()
|
|
110
|
+
const c = await charlieRepo.find(doc.url).doc()
|
|
111
111
|
assert.deepStrictEqual(a, b, "A and B should be equal")
|
|
112
112
|
assert.deepStrictEqual(b, c, "B and C should be equal")
|
|
113
113
|
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@automerge/automerge-repo",
|
|
3
|
-
"version": "1.0.0-alpha.
|
|
3
|
+
"version": "1.0.0-alpha.3",
|
|
4
4
|
"description": "A repository object to manage a collection of automerge documents",
|
|
5
5
|
"repository": "https://github.com/automerge/automerge-repo",
|
|
6
6
|
"author": "Peter van Hardenberg <pvh@pvh.ca>",
|
|
@@ -65,5 +65,5 @@
|
|
|
65
65
|
"publishConfig": {
|
|
66
66
|
"access": "public"
|
|
67
67
|
},
|
|
68
|
-
"gitHead": "
|
|
68
|
+
"gitHead": "0ed108273084319aeea64ceccb49c3d58709f107"
|
|
69
69
|
}
|
package/src/DocCollection.ts
CHANGED
|
@@ -72,8 +72,8 @@ export class DocCollection extends EventEmitter<DocCollectionEvents> {
|
|
|
72
72
|
// - pass a "reify" function that takes a `<any>` and returns `<T>`
|
|
73
73
|
|
|
74
74
|
// Generate a new UUID and store it in the buffer
|
|
75
|
-
const {
|
|
76
|
-
const handle = this.#getHandle<T>(
|
|
75
|
+
const { documentId } = parseAutomergeUrl(generateAutomergeUrl())
|
|
76
|
+
const handle = this.#getHandle<T>(documentId, true) as DocHandle<T>
|
|
77
77
|
this.emit("document", { handle })
|
|
78
78
|
return handle
|
|
79
79
|
}
|
|
@@ -90,12 +90,21 @@ export class DocCollection extends EventEmitter<DocCollectionEvents> {
|
|
|
90
90
|
throw new Error(`Invalid AutomergeUrl: '${automergeUrl}'`)
|
|
91
91
|
}
|
|
92
92
|
|
|
93
|
-
const {
|
|
93
|
+
const { documentId } = parseAutomergeUrl(automergeUrl)
|
|
94
94
|
// If we have the handle cached, return it
|
|
95
|
-
if (this.#handleCache[
|
|
96
|
-
|
|
95
|
+
if (this.#handleCache[documentId]) {
|
|
96
|
+
if (this.#handleCache[documentId].isUnavailable()) {
|
|
97
|
+
// this ensures that the event fires after the handle has been returned
|
|
98
|
+
setTimeout(() => {
|
|
99
|
+
this.#handleCache[documentId].emit("unavailable", {
|
|
100
|
+
handle: this.#handleCache[documentId],
|
|
101
|
+
})
|
|
102
|
+
})
|
|
103
|
+
}
|
|
104
|
+
return this.#handleCache[documentId]
|
|
105
|
+
}
|
|
97
106
|
|
|
98
|
-
const handle = this.#getHandle<T>(
|
|
107
|
+
const handle = this.#getHandle<T>(documentId, false) as DocHandle<T>
|
|
99
108
|
this.emit("document", { handle })
|
|
100
109
|
return handle
|
|
101
110
|
}
|
|
@@ -105,7 +114,7 @@ export class DocCollection extends EventEmitter<DocCollectionEvents> {
|
|
|
105
114
|
id: DocumentId | AutomergeUrl
|
|
106
115
|
) {
|
|
107
116
|
if (isValidAutomergeUrl(id)) {
|
|
108
|
-
;({
|
|
117
|
+
;({ documentId: id } = parseAutomergeUrl(id))
|
|
109
118
|
}
|
|
110
119
|
|
|
111
120
|
const handle = this.#getHandle(id, false)
|
|
@@ -113,7 +122,7 @@ export class DocCollection extends EventEmitter<DocCollectionEvents> {
|
|
|
113
122
|
|
|
114
123
|
delete this.#handleCache[id]
|
|
115
124
|
this.emit("delete-document", {
|
|
116
|
-
|
|
125
|
+
documentId: id,
|
|
117
126
|
})
|
|
118
127
|
}
|
|
119
128
|
}
|
|
@@ -122,6 +131,7 @@ export class DocCollection extends EventEmitter<DocCollectionEvents> {
|
|
|
122
131
|
interface DocCollectionEvents {
|
|
123
132
|
document: (arg: DocumentPayload) => void
|
|
124
133
|
"delete-document": (arg: DeleteDocumentPayload) => void
|
|
134
|
+
"unavailable-document": (arg: DeleteDocumentPayload) => void
|
|
125
135
|
}
|
|
126
136
|
|
|
127
137
|
interface DocumentPayload {
|
|
@@ -129,5 +139,5 @@ interface DocumentPayload {
|
|
|
129
139
|
}
|
|
130
140
|
|
|
131
141
|
interface DeleteDocumentPayload {
|
|
132
|
-
|
|
142
|
+
documentId: DocumentId
|
|
133
143
|
}
|
package/src/DocHandle.ts
CHANGED
|
@@ -17,8 +17,9 @@ import { waitFor } from "xstate/lib/waitFor.js"
|
|
|
17
17
|
import { headsAreSame } from "./helpers/headsAreSame.js"
|
|
18
18
|
import { pause } from "./helpers/pause.js"
|
|
19
19
|
import { TimeoutError, withTimeout } from "./helpers/withTimeout.js"
|
|
20
|
-
import type {
|
|
20
|
+
import type { DocumentId, PeerId, AutomergeUrl } from "./types.js"
|
|
21
21
|
import { stringifyAutomergeUrl } from "./DocUrl.js"
|
|
22
|
+
import { encode } from "cbor-x"
|
|
22
23
|
|
|
23
24
|
/** DocHandle is a wrapper around a single Automerge document that lets us listen for changes. */
|
|
24
25
|
export class DocHandle<T> //
|
|
@@ -92,6 +93,10 @@ export class DocHandle<T> //
|
|
|
92
93
|
},
|
|
93
94
|
requesting: {
|
|
94
95
|
on: {
|
|
96
|
+
MARK_UNAVAILABLE: {
|
|
97
|
+
target: UNAVAILABLE,
|
|
98
|
+
actions: "onUnavailable",
|
|
99
|
+
},
|
|
95
100
|
// UPDATE is called by the Repo when we receive changes from the network
|
|
96
101
|
UPDATE: { actions: "onUpdate" },
|
|
97
102
|
// REQUEST_COMPLETE is called from `onUpdate` when the doc has been fully loaded from the network
|
|
@@ -118,6 +123,14 @@ export class DocHandle<T> //
|
|
|
118
123
|
deleted: {
|
|
119
124
|
type: "final",
|
|
120
125
|
},
|
|
126
|
+
unavailable: {
|
|
127
|
+
on: {
|
|
128
|
+
UPDATE: { actions: "onUpdate" },
|
|
129
|
+
// REQUEST_COMPLETE is called from `onUpdate` when the doc has been fully loaded from the network
|
|
130
|
+
REQUEST_COMPLETE: { target: READY },
|
|
131
|
+
DELETE: { actions: "onDelete", target: DELETED },
|
|
132
|
+
},
|
|
133
|
+
},
|
|
121
134
|
},
|
|
122
135
|
},
|
|
123
136
|
|
|
@@ -136,6 +149,12 @@ export class DocHandle<T> //
|
|
|
136
149
|
this.emit("delete", { handle: this })
|
|
137
150
|
return { doc: undefined }
|
|
138
151
|
}),
|
|
152
|
+
onUnavailable: assign(context => {
|
|
153
|
+
const { doc } = context
|
|
154
|
+
|
|
155
|
+
this.emit("unavailable", { handle: this })
|
|
156
|
+
return { doc }
|
|
157
|
+
}),
|
|
139
158
|
},
|
|
140
159
|
}
|
|
141
160
|
)
|
|
@@ -144,9 +163,12 @@ export class DocHandle<T> //
|
|
|
144
163
|
const oldDoc = history?.context?.doc
|
|
145
164
|
const newDoc = context.doc
|
|
146
165
|
|
|
147
|
-
|
|
166
|
+
this.#log(`${history?.value}: ${event.type} → ${state}`, newDoc)
|
|
148
167
|
|
|
149
|
-
const docChanged =
|
|
168
|
+
const docChanged =
|
|
169
|
+
newDoc &&
|
|
170
|
+
oldDoc &&
|
|
171
|
+
!headsAreSame(A.getHeads(newDoc), A.getHeads(oldDoc))
|
|
150
172
|
if (docChanged) {
|
|
151
173
|
this.emit("heads-changed", { handle: this, doc: newDoc })
|
|
152
174
|
|
|
@@ -210,6 +232,7 @@ export class DocHandle<T> //
|
|
|
210
232
|
* @returns true if the document has been marked as deleted
|
|
211
233
|
*/
|
|
212
234
|
isDeleted = () => this.inState([HandleState.DELETED])
|
|
235
|
+
isUnavailable = () => this.inState([HandleState.UNAVAILABLE])
|
|
213
236
|
inState = (states: HandleState[]) =>
|
|
214
237
|
states.some(this.#machine?.getSnapshot().matches)
|
|
215
238
|
|
|
@@ -234,7 +257,9 @@ export class DocHandle<T> //
|
|
|
234
257
|
*
|
|
235
258
|
* @param {awaitStates=[READY]} optional states to wait for, such as "LOADING". mostly for internal use.
|
|
236
259
|
*/
|
|
237
|
-
async doc(
|
|
260
|
+
async doc(
|
|
261
|
+
awaitStates: HandleState[] = [READY, UNAVAILABLE]
|
|
262
|
+
): Promise<A.Doc<T> | undefined> {
|
|
238
263
|
await pause() // yield one tick because reasons
|
|
239
264
|
try {
|
|
240
265
|
// wait for the document to enter one of the desired states
|
|
@@ -245,7 +270,7 @@ export class DocHandle<T> //
|
|
|
245
270
|
else throw error
|
|
246
271
|
}
|
|
247
272
|
// Return the document
|
|
248
|
-
return this.#doc
|
|
273
|
+
return !this.isUnavailable() ? this.#doc : undefined
|
|
249
274
|
}
|
|
250
275
|
|
|
251
276
|
/**
|
|
@@ -308,6 +333,10 @@ export class DocHandle<T> //
|
|
|
308
333
|
})
|
|
309
334
|
}
|
|
310
335
|
|
|
336
|
+
unavailable() {
|
|
337
|
+
this.#machine.send(MARK_UNAVAILABLE)
|
|
338
|
+
}
|
|
339
|
+
|
|
311
340
|
/** `request` is called by the repo when the document is not found in storage */
|
|
312
341
|
request() {
|
|
313
342
|
if (this.#state === LOADING) this.#machine.send(REQUEST)
|
|
@@ -317,6 +346,19 @@ export class DocHandle<T> //
|
|
|
317
346
|
delete() {
|
|
318
347
|
this.#machine.send(DELETE)
|
|
319
348
|
}
|
|
349
|
+
|
|
350
|
+
/** `broadcast` sends an arbitrary ephemeral message out to all reachable peers who would receive sync messages from you
|
|
351
|
+
* it has no guarantee of delivery, and is not persisted to the underlying automerge doc in any way.
|
|
352
|
+
* messages will have a sending PeerId but this is *not* a useful user identifier.
|
|
353
|
+
* a user could have multiple tabs open and would appear as multiple PeerIds.
|
|
354
|
+
* every message source must have a unique PeerId.
|
|
355
|
+
*/
|
|
356
|
+
broadcast(message: any) {
|
|
357
|
+
this.emit("ephemeral-message-outbound", {
|
|
358
|
+
handle: this,
|
|
359
|
+
data: encode(message),
|
|
360
|
+
})
|
|
361
|
+
}
|
|
320
362
|
}
|
|
321
363
|
|
|
322
364
|
// WRAPPER CLASS TYPES
|
|
@@ -328,7 +370,7 @@ interface DocHandleOptions {
|
|
|
328
370
|
|
|
329
371
|
export interface DocHandleMessagePayload {
|
|
330
372
|
destinationId: PeerId
|
|
331
|
-
|
|
373
|
+
documentId: DocumentId
|
|
332
374
|
data: Uint8Array
|
|
333
375
|
}
|
|
334
376
|
|
|
@@ -348,10 +390,26 @@ export interface DocHandleChangePayload<T> {
|
|
|
348
390
|
patchInfo: A.PatchInfo<T>
|
|
349
391
|
}
|
|
350
392
|
|
|
393
|
+
export interface DocHandleEphemeralMessagePayload {
|
|
394
|
+
handle: DocHandle<any>
|
|
395
|
+
senderId: PeerId
|
|
396
|
+
message: unknown
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
export interface DocHandleOutboundEphemeralMessagePayload {
|
|
400
|
+
handle: DocHandle<any>
|
|
401
|
+
data: Uint8Array
|
|
402
|
+
}
|
|
403
|
+
|
|
351
404
|
export interface DocHandleEvents<T> {
|
|
352
405
|
"heads-changed": (payload: DocHandleEncodedChangePayload<T>) => void
|
|
353
406
|
change: (payload: DocHandleChangePayload<T>) => void
|
|
354
407
|
delete: (payload: DocHandleDeletePayload<T>) => void
|
|
408
|
+
unavailable: (payload: DocHandleDeletePayload<T>) => void
|
|
409
|
+
"ephemeral-message": (payload: DocHandleEphemeralMessagePayload) => void
|
|
410
|
+
"ephemeral-message-outbound": (
|
|
411
|
+
payload: DocHandleOutboundEphemeralMessagePayload
|
|
412
|
+
) => void
|
|
355
413
|
}
|
|
356
414
|
|
|
357
415
|
// STATE MACHINE TYPES
|
|
@@ -365,6 +423,7 @@ export const HandleState = {
|
|
|
365
423
|
READY: "ready",
|
|
366
424
|
FAILED: "failed",
|
|
367
425
|
DELETED: "deleted",
|
|
426
|
+
UNAVAILABLE: "unavailable",
|
|
368
427
|
} as const
|
|
369
428
|
export type HandleState = (typeof HandleState)[keyof typeof HandleState]
|
|
370
429
|
|
|
@@ -392,6 +451,7 @@ export const Event = {
|
|
|
392
451
|
UPDATE: "UPDATE",
|
|
393
452
|
TIMEOUT: "TIMEOUT",
|
|
394
453
|
DELETE: "DELETE",
|
|
454
|
+
MARK_UNAVAILABLE: "MARK_UNAVAILABLE",
|
|
395
455
|
} as const
|
|
396
456
|
type Event = (typeof Event)[keyof typeof Event]
|
|
397
457
|
|
|
@@ -405,6 +465,7 @@ type UpdateEvent<T> = {
|
|
|
405
465
|
payload: { callback: (doc: A.Doc<T>) => A.Doc<T> }
|
|
406
466
|
}
|
|
407
467
|
type TimeoutEvent = { type: typeof TIMEOUT }
|
|
468
|
+
type MarkUnavailableEvent = { type: typeof MARK_UNAVAILABLE }
|
|
408
469
|
|
|
409
470
|
type DocHandleEvent<T> =
|
|
410
471
|
| CreateEvent
|
|
@@ -414,6 +475,7 @@ type DocHandleEvent<T> =
|
|
|
414
475
|
| UpdateEvent<T>
|
|
415
476
|
| TimeoutEvent
|
|
416
477
|
| DeleteEvent
|
|
478
|
+
| MarkUnavailableEvent
|
|
417
479
|
|
|
418
480
|
type DocHandleXstateMachine<T> = Interpreter<
|
|
419
481
|
DocHandleContext<T>,
|
|
@@ -432,7 +494,22 @@ type DocHandleXstateMachine<T> = Interpreter<
|
|
|
432
494
|
>
|
|
433
495
|
|
|
434
496
|
// CONSTANTS
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
497
|
+
export const {
|
|
498
|
+
IDLE,
|
|
499
|
+
LOADING,
|
|
500
|
+
REQUESTING,
|
|
501
|
+
READY,
|
|
502
|
+
FAILED,
|
|
503
|
+
DELETED,
|
|
504
|
+
UNAVAILABLE,
|
|
505
|
+
} = HandleState
|
|
506
|
+
const {
|
|
507
|
+
CREATE,
|
|
508
|
+
FIND,
|
|
509
|
+
REQUEST,
|
|
510
|
+
UPDATE,
|
|
511
|
+
TIMEOUT,
|
|
512
|
+
DELETE,
|
|
513
|
+
REQUEST_COMPLETE,
|
|
514
|
+
MARK_UNAVAILABLE,
|
|
515
|
+
} = Event
|
package/src/DocUrl.ts
CHANGED
|
@@ -12,12 +12,12 @@ export const urlPrefix = "automerge:"
|
|
|
12
12
|
* given an Automerge URL, return a decoded DocumentId (and the encoded DocumentId)
|
|
13
13
|
*
|
|
14
14
|
* @param url
|
|
15
|
-
* @returns {
|
|
15
|
+
* @returns { binaryDocumentId: BinaryDocumentId, documentId: DocumentId }
|
|
16
16
|
*/
|
|
17
17
|
export const parseAutomergeUrl = (url: AutomergeUrl) => {
|
|
18
|
-
const { binaryDocumentId
|
|
18
|
+
const { binaryDocumentId, documentId } = parts(url)
|
|
19
19
|
if (!binaryDocumentId) throw new Error("Invalid document URL: " + url)
|
|
20
|
-
return { binaryDocumentId,
|
|
20
|
+
return { binaryDocumentId, documentId }
|
|
21
21
|
}
|
|
22
22
|
|
|
23
23
|
interface StringifyAutomergeUrlOptions {
|
|
@@ -28,7 +28,7 @@ interface StringifyAutomergeUrlOptions {
|
|
|
28
28
|
* Given a documentId in either canonical form, return an Automerge URL
|
|
29
29
|
* Throws on invalid input.
|
|
30
30
|
* Note: this is an object because we anticipate adding fields in the future.
|
|
31
|
-
* @param { documentId:
|
|
31
|
+
* @param { documentId: BinaryDocumentId | DocumentId }
|
|
32
32
|
* @returns AutomergeUrl
|
|
33
33
|
*/
|
|
34
34
|
export const stringifyAutomergeUrl = ({
|
|
@@ -79,12 +79,12 @@ export const binaryToDocumentId = (docId: BinaryDocumentId): DocumentId =>
|
|
|
79
79
|
* eventually this could include things like heads, so we use this structure
|
|
80
80
|
* we return both a binary & string-encoded version of the document ID
|
|
81
81
|
* @param str
|
|
82
|
-
* @returns { binaryDocumentId,
|
|
82
|
+
* @returns { binaryDocumentId, documentId }
|
|
83
83
|
*/
|
|
84
84
|
const parts = (str: string) => {
|
|
85
85
|
const regex = new RegExp(`^${urlPrefix}(\\w+)$`)
|
|
86
|
-
const [
|
|
87
|
-
const
|
|
88
|
-
const binaryDocumentId = documentIdToBinary(
|
|
89
|
-
return { binaryDocumentId,
|
|
86
|
+
const [_, docMatch] = str.match(regex) || []
|
|
87
|
+
const documentId = docMatch as DocumentId
|
|
88
|
+
const binaryDocumentId = documentIdToBinary(documentId)
|
|
89
|
+
return { binaryDocumentId, documentId }
|
|
90
90
|
}
|