cojson 0.2.0 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/coValueCore.d.ts +6 -1
- package/dist/coValueCore.js +86 -42
- package/dist/coValueCore.js.map +1 -1
- package/dist/coValues/coStream.d.ts +1 -1
- package/dist/coValues/coStream.js +18 -9
- package/dist/coValues/coStream.js.map +1 -1
- package/dist/index.d.ts +2 -2
- package/dist/index.js +2 -2
- package/dist/index.js.map +1 -1
- package/dist/streamUtils.js +14 -5
- package/dist/streamUtils.js.map +1 -1
- package/dist/sync.js +35 -14
- package/dist/sync.js.map +1 -1
- package/package.json +2 -2
- package/src/coValue.test.ts +113 -4
- package/src/coValueCore.ts +156 -72
- package/src/coValues/coStream.ts +19 -15
- package/src/index.ts +2 -1
- package/src/streamUtils.ts +26 -6
- package/src/sync.test.ts +3 -2
- package/src/sync.ts +46 -24
package/src/streamUtils.ts
CHANGED
|
@@ -34,7 +34,14 @@ export function connectedPeers(
|
|
|
34
34
|
trace &&
|
|
35
35
|
console.debug(
|
|
36
36
|
`${peer2id} -> ${peer1id}`,
|
|
37
|
-
JSON.stringify(
|
|
37
|
+
JSON.stringify(
|
|
38
|
+
chunk,
|
|
39
|
+
(k, v) =>
|
|
40
|
+
(k === "changes" || k === "encryptedChanges")
|
|
41
|
+
? v.slice(0, 20) + "..."
|
|
42
|
+
: v,
|
|
43
|
+
2
|
|
44
|
+
)
|
|
38
45
|
);
|
|
39
46
|
controller.enqueue(chunk);
|
|
40
47
|
},
|
|
@@ -52,7 +59,14 @@ export function connectedPeers(
|
|
|
52
59
|
trace &&
|
|
53
60
|
console.debug(
|
|
54
61
|
`${peer1id} -> ${peer2id}`,
|
|
55
|
-
JSON.stringify(
|
|
62
|
+
JSON.stringify(
|
|
63
|
+
chunk,
|
|
64
|
+
(k, v) =>
|
|
65
|
+
(k === "changes" || k === "encryptedChanges")
|
|
66
|
+
? v.slice(0, 20) + "..."
|
|
67
|
+
: v,
|
|
68
|
+
2
|
|
69
|
+
)
|
|
56
70
|
);
|
|
57
71
|
controller.enqueue(chunk);
|
|
58
72
|
},
|
|
@@ -102,16 +116,22 @@ export function newStreamPair<T>(): [ReadableStream<T>, WritableStream<T>] {
|
|
|
102
116
|
},
|
|
103
117
|
});
|
|
104
118
|
|
|
119
|
+
let lastWritePromise = Promise.resolve();
|
|
120
|
+
|
|
105
121
|
const writable = new WritableStream<T>({
|
|
106
122
|
async write(chunk) {
|
|
107
123
|
const enqueue = await enqueuePromise;
|
|
108
124
|
if (readerClosed) {
|
|
109
125
|
throw new Error("Reader closed");
|
|
110
126
|
} else {
|
|
111
|
-
// make sure write resolves before corresponding read
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
127
|
+
// make sure write resolves before corresponding read, but make sure writes are still in order
|
|
128
|
+
await lastWritePromise;
|
|
129
|
+
lastWritePromise = new Promise((resolve) => {
|
|
130
|
+
setTimeout(() => {
|
|
131
|
+
enqueue(chunk);
|
|
132
|
+
resolve();
|
|
133
|
+
});
|
|
134
|
+
});
|
|
115
135
|
}
|
|
116
136
|
},
|
|
117
137
|
async abort(reason) {
|
package/src/sync.test.ts
CHANGED
|
@@ -436,8 +436,9 @@ test("No matter the optimistic known state, node respects invalid known state me
|
|
|
436
436
|
editable.set("goodbye", "world", "trusting");
|
|
437
437
|
});
|
|
438
438
|
|
|
439
|
-
const
|
|
440
|
-
|
|
439
|
+
const _mapEditMsgs = await reader.read();
|
|
440
|
+
|
|
441
|
+
console.log("Sending correction");
|
|
441
442
|
|
|
442
443
|
await writer.write({
|
|
443
444
|
action: "known",
|
package/src/sync.ts
CHANGED
|
@@ -215,14 +215,32 @@ export class SyncManager {
|
|
|
215
215
|
await this.sendNewContentIncludingDependencies(id, peer);
|
|
216
216
|
}
|
|
217
217
|
|
|
218
|
-
const
|
|
218
|
+
const newContentPieces = coValue.newContentSince(
|
|
219
219
|
peer.optimisticKnownStates[id]
|
|
220
220
|
);
|
|
221
221
|
|
|
222
|
-
if (
|
|
223
|
-
|
|
222
|
+
if (newContentPieces) {
|
|
223
|
+
const optimisticKnownStateBefore =
|
|
224
|
+
peer.optimisticKnownStates[id] || emptyKnownState(id);
|
|
225
|
+
|
|
226
|
+
const sendPieces = async () => {
|
|
227
|
+
for (const [i, piece] of newContentPieces.entries()) {
|
|
228
|
+
// console.log(
|
|
229
|
+
// `${id} -> ${peer.id}: Sending content piece ${i + 1}/${newContentPieces.length} header: ${!!piece.header}`,
|
|
230
|
+
// // Object.values(piece.new).map((s) => s.newTransactions)
|
|
231
|
+
// );
|
|
232
|
+
await this.trySendToPeer(peer, piece);
|
|
233
|
+
}
|
|
234
|
+
};
|
|
235
|
+
|
|
236
|
+
sendPieces().catch((e) => {
|
|
237
|
+
console.error("Error sending new content piece, retrying", e);
|
|
238
|
+
peer.optimisticKnownStates[id] = optimisticKnownStateBefore;
|
|
239
|
+
return this.sendNewContentIncludingDependencies(id, peer);
|
|
240
|
+
});
|
|
241
|
+
|
|
224
242
|
peer.optimisticKnownStates[id] = combinedKnownStates(
|
|
225
|
-
|
|
243
|
+
optimisticKnownStateBefore,
|
|
226
244
|
coValue.knownState()
|
|
227
245
|
);
|
|
228
246
|
}
|
|
@@ -261,6 +279,9 @@ export class SyncManager {
|
|
|
261
279
|
for await (const msg of peerState.incoming) {
|
|
262
280
|
try {
|
|
263
281
|
await this.handleSyncMessage(msg, peerState);
|
|
282
|
+
await new Promise<void>((resolve) => {
|
|
283
|
+
setTimeout(resolve, 0);
|
|
284
|
+
});
|
|
264
285
|
} catch (e) {
|
|
265
286
|
console.error(
|
|
266
287
|
`Error reading from peer ${peer.id}, handling msg`,
|
|
@@ -445,6 +466,10 @@ export class SyncManager {
|
|
|
445
466
|
const newTransactions =
|
|
446
467
|
newContentForSession.newTransactions.slice(alreadyKnownOffset);
|
|
447
468
|
|
|
469
|
+
if (newTransactions.length === 0) {
|
|
470
|
+
continue;
|
|
471
|
+
}
|
|
472
|
+
|
|
448
473
|
const before = performance.now();
|
|
449
474
|
const success = await coValue.tryAddTransactionsAsync(
|
|
450
475
|
sessionID,
|
|
@@ -454,20 +479,26 @@ export class SyncManager {
|
|
|
454
479
|
);
|
|
455
480
|
const after = performance.now();
|
|
456
481
|
if (after - before > 10) {
|
|
457
|
-
const totalTxLength = newTransactions
|
|
482
|
+
const totalTxLength = newTransactions
|
|
483
|
+
.map((t) =>
|
|
484
|
+
t.privacy === "private"
|
|
485
|
+
? t.encryptedChanges.length
|
|
486
|
+
: t.changes.length
|
|
487
|
+
)
|
|
488
|
+
.reduce((a, b) => a + b, 0);
|
|
458
489
|
console.log(
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
(
|
|
490
|
+
`Adding incoming transactions took ${(
|
|
491
|
+
after - before
|
|
492
|
+
).toFixed(2)}ms for ${totalTxLength} bytes = bandwidth: ${(
|
|
493
|
+
(1000 * totalTxLength) /
|
|
494
|
+
(after - before) /
|
|
495
|
+
(1024 * 1024)
|
|
496
|
+
).toFixed(2)} MB/s`
|
|
466
497
|
);
|
|
467
498
|
}
|
|
468
499
|
|
|
469
500
|
if (!success) {
|
|
470
|
-
console.error("Failed to add transactions", newTransactions);
|
|
501
|
+
console.error("Failed to add transactions", msg.id, newTransactions);
|
|
471
502
|
continue;
|
|
472
503
|
}
|
|
473
504
|
|
|
@@ -492,18 +523,9 @@ export class SyncManager {
|
|
|
492
523
|
}
|
|
493
524
|
|
|
494
525
|
async handleCorrection(msg: KnownStateMessage, peer: PeerState) {
|
|
495
|
-
|
|
526
|
+
peer.optimisticKnownStates[msg.id] = msg;
|
|
496
527
|
|
|
497
|
-
|
|
498
|
-
msg,
|
|
499
|
-
coValue.knownState()
|
|
500
|
-
);
|
|
501
|
-
|
|
502
|
-
const newContent = coValue.newContentSince(msg);
|
|
503
|
-
|
|
504
|
-
if (newContent) {
|
|
505
|
-
await this.trySendToPeer(peer, newContent);
|
|
506
|
-
}
|
|
528
|
+
return this.sendNewContentIncludingDependencies(msg.id, peer);
|
|
507
529
|
}
|
|
508
530
|
|
|
509
531
|
handleUnsubscribe(_msg: DoneMessage) {
|