cojson 0.2.0 → 0.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/coValueCore.d.ts +6 -1
- package/dist/coValueCore.js +86 -42
- package/dist/coValueCore.js.map +1 -1
- package/dist/coValues/coStream.d.ts +1 -1
- package/dist/coValues/coStream.js +18 -9
- package/dist/coValues/coStream.js.map +1 -1
- package/dist/index.d.ts +2 -2
- package/dist/index.js +2 -2
- package/dist/index.js.map +1 -1
- package/dist/streamUtils.js +14 -5
- package/dist/streamUtils.js.map +1 -1
- package/dist/sync.js +38 -15
- package/dist/sync.js.map +1 -1
- package/package.json +2 -2
- package/src/coValue.test.ts +113 -4
- package/src/coValueCore.ts +156 -72
- package/src/coValues/coStream.ts +19 -15
- package/src/index.ts +2 -1
- package/src/streamUtils.ts +26 -6
- package/src/sync.test.ts +3 -2
- package/src/sync.ts +55 -25
package/src/streamUtils.ts
CHANGED
|
@@ -34,7 +34,14 @@ export function connectedPeers(
|
|
|
34
34
|
trace &&
|
|
35
35
|
console.debug(
|
|
36
36
|
`${peer2id} -> ${peer1id}`,
|
|
37
|
-
JSON.stringify(
|
|
37
|
+
JSON.stringify(
|
|
38
|
+
chunk,
|
|
39
|
+
(k, v) =>
|
|
40
|
+
(k === "changes" || k === "encryptedChanges")
|
|
41
|
+
? v.slice(0, 20) + "..."
|
|
42
|
+
: v,
|
|
43
|
+
2
|
|
44
|
+
)
|
|
38
45
|
);
|
|
39
46
|
controller.enqueue(chunk);
|
|
40
47
|
},
|
|
@@ -52,7 +59,14 @@ export function connectedPeers(
|
|
|
52
59
|
trace &&
|
|
53
60
|
console.debug(
|
|
54
61
|
`${peer1id} -> ${peer2id}`,
|
|
55
|
-
JSON.stringify(
|
|
62
|
+
JSON.stringify(
|
|
63
|
+
chunk,
|
|
64
|
+
(k, v) =>
|
|
65
|
+
(k === "changes" || k === "encryptedChanges")
|
|
66
|
+
? v.slice(0, 20) + "..."
|
|
67
|
+
: v,
|
|
68
|
+
2
|
|
69
|
+
)
|
|
56
70
|
);
|
|
57
71
|
controller.enqueue(chunk);
|
|
58
72
|
},
|
|
@@ -102,16 +116,22 @@ export function newStreamPair<T>(): [ReadableStream<T>, WritableStream<T>] {
|
|
|
102
116
|
},
|
|
103
117
|
});
|
|
104
118
|
|
|
119
|
+
let lastWritePromise = Promise.resolve();
|
|
120
|
+
|
|
105
121
|
const writable = new WritableStream<T>({
|
|
106
122
|
async write(chunk) {
|
|
107
123
|
const enqueue = await enqueuePromise;
|
|
108
124
|
if (readerClosed) {
|
|
109
125
|
throw new Error("Reader closed");
|
|
110
126
|
} else {
|
|
111
|
-
// make sure write resolves before corresponding read
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
127
|
+
// make sure write resolves before corresponding read, but make sure writes are still in order
|
|
128
|
+
await lastWritePromise;
|
|
129
|
+
lastWritePromise = new Promise((resolve) => {
|
|
130
|
+
setTimeout(() => {
|
|
131
|
+
enqueue(chunk);
|
|
132
|
+
resolve();
|
|
133
|
+
});
|
|
134
|
+
});
|
|
115
135
|
}
|
|
116
136
|
},
|
|
117
137
|
async abort(reason) {
|
package/src/sync.test.ts
CHANGED
|
@@ -436,8 +436,9 @@ test("No matter the optimistic known state, node respects invalid known state me
|
|
|
436
436
|
editable.set("goodbye", "world", "trusting");
|
|
437
437
|
});
|
|
438
438
|
|
|
439
|
-
const
|
|
440
|
-
|
|
439
|
+
const _mapEditMsgs = await reader.read();
|
|
440
|
+
|
|
441
|
+
console.log("Sending correction");
|
|
441
442
|
|
|
442
443
|
await writer.write({
|
|
443
444
|
action: "known",
|
package/src/sync.ts
CHANGED
|
@@ -215,14 +215,32 @@ export class SyncManager {
|
|
|
215
215
|
await this.sendNewContentIncludingDependencies(id, peer);
|
|
216
216
|
}
|
|
217
217
|
|
|
218
|
-
const
|
|
218
|
+
const newContentPieces = coValue.newContentSince(
|
|
219
219
|
peer.optimisticKnownStates[id]
|
|
220
220
|
);
|
|
221
221
|
|
|
222
|
-
if (
|
|
223
|
-
|
|
222
|
+
if (newContentPieces) {
|
|
223
|
+
const optimisticKnownStateBefore =
|
|
224
|
+
peer.optimisticKnownStates[id] || emptyKnownState(id);
|
|
225
|
+
|
|
226
|
+
const sendPieces = async () => {
|
|
227
|
+
for (const [i, piece] of newContentPieces.entries()) {
|
|
228
|
+
// console.log(
|
|
229
|
+
// `${id} -> ${peer.id}: Sending content piece ${i + 1}/${newContentPieces.length} header: ${!!piece.header}`,
|
|
230
|
+
// // Object.values(piece.new).map((s) => s.newTransactions)
|
|
231
|
+
// );
|
|
232
|
+
await this.trySendToPeer(peer, piece);
|
|
233
|
+
}
|
|
234
|
+
};
|
|
235
|
+
|
|
236
|
+
sendPieces().catch((e) => {
|
|
237
|
+
console.error("Error sending new content piece, retrying", e);
|
|
238
|
+
peer.optimisticKnownStates[id] = optimisticKnownStateBefore;
|
|
239
|
+
return this.sendNewContentIncludingDependencies(id, peer);
|
|
240
|
+
});
|
|
241
|
+
|
|
224
242
|
peer.optimisticKnownStates[id] = combinedKnownStates(
|
|
225
|
-
|
|
243
|
+
optimisticKnownStateBefore,
|
|
226
244
|
coValue.knownState()
|
|
227
245
|
);
|
|
228
246
|
}
|
|
@@ -261,10 +279,17 @@ export class SyncManager {
|
|
|
261
279
|
for await (const msg of peerState.incoming) {
|
|
262
280
|
try {
|
|
263
281
|
await this.handleSyncMessage(msg, peerState);
|
|
282
|
+
await new Promise<void>((resolve) => {
|
|
283
|
+
setTimeout(resolve, 0);
|
|
284
|
+
});
|
|
264
285
|
} catch (e) {
|
|
265
286
|
console.error(
|
|
266
287
|
`Error reading from peer ${peer.id}, handling msg`,
|
|
267
|
-
JSON.stringify(msg)
|
|
288
|
+
JSON.stringify(msg, (k, v) =>
|
|
289
|
+
k === "changes" || k === "encryptedChanges"
|
|
290
|
+
? v.slice(0, 20) + "..."
|
|
291
|
+
: v
|
|
292
|
+
),
|
|
268
293
|
e
|
|
269
294
|
);
|
|
270
295
|
}
|
|
@@ -445,6 +470,10 @@ export class SyncManager {
|
|
|
445
470
|
const newTransactions =
|
|
446
471
|
newContentForSession.newTransactions.slice(alreadyKnownOffset);
|
|
447
472
|
|
|
473
|
+
if (newTransactions.length === 0) {
|
|
474
|
+
continue;
|
|
475
|
+
}
|
|
476
|
+
|
|
448
477
|
const before = performance.now();
|
|
449
478
|
const success = await coValue.tryAddTransactionsAsync(
|
|
450
479
|
sessionID,
|
|
@@ -454,20 +483,30 @@ export class SyncManager {
|
|
|
454
483
|
);
|
|
455
484
|
const after = performance.now();
|
|
456
485
|
if (after - before > 10) {
|
|
457
|
-
const totalTxLength = newTransactions
|
|
486
|
+
const totalTxLength = newTransactions
|
|
487
|
+
.map((t) =>
|
|
488
|
+
t.privacy === "private"
|
|
489
|
+
? t.encryptedChanges.length
|
|
490
|
+
: t.changes.length
|
|
491
|
+
)
|
|
492
|
+
.reduce((a, b) => a + b, 0);
|
|
458
493
|
console.log(
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
(
|
|
494
|
+
`Adding incoming transactions took ${(
|
|
495
|
+
after - before
|
|
496
|
+
).toFixed(2)}ms for ${totalTxLength} bytes = bandwidth: ${(
|
|
497
|
+
(1000 * totalTxLength) /
|
|
498
|
+
(after - before) /
|
|
499
|
+
(1024 * 1024)
|
|
500
|
+
).toFixed(2)} MB/s`
|
|
466
501
|
);
|
|
467
502
|
}
|
|
468
503
|
|
|
469
504
|
if (!success) {
|
|
470
|
-
console.error(
|
|
505
|
+
console.error(
|
|
506
|
+
"Failed to add transactions",
|
|
507
|
+
msg.id,
|
|
508
|
+
newTransactions
|
|
509
|
+
);
|
|
471
510
|
continue;
|
|
472
511
|
}
|
|
473
512
|
|
|
@@ -492,18 +531,9 @@ export class SyncManager {
|
|
|
492
531
|
}
|
|
493
532
|
|
|
494
533
|
async handleCorrection(msg: KnownStateMessage, peer: PeerState) {
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
peer.optimisticKnownStates[msg.id] = combinedKnownStates(
|
|
498
|
-
msg,
|
|
499
|
-
coValue.knownState()
|
|
500
|
-
);
|
|
534
|
+
peer.optimisticKnownStates[msg.id] = msg;
|
|
501
535
|
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
if (newContent) {
|
|
505
|
-
await this.trySendToPeer(peer, newContent);
|
|
506
|
-
}
|
|
536
|
+
return this.sendNewContentIncludingDependencies(msg.id, peer);
|
|
507
537
|
}
|
|
508
538
|
|
|
509
539
|
handleUnsubscribe(_msg: DoneMessage) {
|