cojson 0.19.18 → 0.19.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +1 -1
- package/CHANGELOG.md +9 -0
- package/dist/PeerState.d.ts.map +1 -1
- package/dist/SyncStateManager.d.ts +5 -2
- package/dist/SyncStateManager.d.ts.map +1 -1
- package/dist/SyncStateManager.js +49 -12
- package/dist/SyncStateManager.js.map +1 -1
- package/dist/UnsyncedCoValuesTracker.d.ts +81 -0
- package/dist/UnsyncedCoValuesTracker.d.ts.map +1 -0
- package/dist/UnsyncedCoValuesTracker.js +209 -0
- package/dist/UnsyncedCoValuesTracker.js.map +1 -0
- package/dist/config.d.ts +6 -0
- package/dist/config.d.ts.map +1 -1
- package/dist/config.js +10 -0
- package/dist/config.js.map +1 -1
- package/dist/exports.d.ts +11 -3
- package/dist/exports.d.ts.map +1 -1
- package/dist/exports.js +6 -1
- package/dist/exports.js.map +1 -1
- package/dist/localNode.d.ts +9 -5
- package/dist/localNode.d.ts.map +1 -1
- package/dist/localNode.js +12 -8
- package/dist/localNode.js.map +1 -1
- package/dist/queue/IncomingMessagesQueue.d.ts +6 -7
- package/dist/queue/IncomingMessagesQueue.d.ts.map +1 -1
- package/dist/queue/IncomingMessagesQueue.js +7 -30
- package/dist/queue/IncomingMessagesQueue.js.map +1 -1
- package/dist/queue/LinkedList.d.ts +1 -1
- package/dist/queue/LinkedList.d.ts.map +1 -1
- package/dist/queue/LinkedList.js.map +1 -1
- package/dist/queue/StorageStreamingQueue.d.ts +43 -0
- package/dist/queue/StorageStreamingQueue.d.ts.map +1 -0
- package/dist/queue/StorageStreamingQueue.js +70 -0
- package/dist/queue/StorageStreamingQueue.js.map +1 -0
- package/dist/storage/knownState.d.ts +1 -1
- package/dist/storage/knownState.js +4 -4
- package/dist/storage/sqlite/client.d.ts +8 -0
- package/dist/storage/sqlite/client.d.ts.map +1 -1
- package/dist/storage/sqlite/client.js +17 -0
- package/dist/storage/sqlite/client.js.map +1 -1
- package/dist/storage/sqlite/sqliteMigrations.d.ts.map +1 -1
- package/dist/storage/sqlite/sqliteMigrations.js +9 -0
- package/dist/storage/sqlite/sqliteMigrations.js.map +1 -1
- package/dist/storage/sqliteAsync/client.d.ts +8 -0
- package/dist/storage/sqliteAsync/client.d.ts.map +1 -1
- package/dist/storage/sqliteAsync/client.js +19 -0
- package/dist/storage/sqliteAsync/client.js.map +1 -1
- package/dist/storage/storageAsync.d.ts +9 -2
- package/dist/storage/storageAsync.d.ts.map +1 -1
- package/dist/storage/storageAsync.js +9 -0
- package/dist/storage/storageAsync.js.map +1 -1
- package/dist/storage/storageSync.d.ts +17 -4
- package/dist/storage/storageSync.d.ts.map +1 -1
- package/dist/storage/storageSync.js +67 -44
- package/dist/storage/storageSync.js.map +1 -1
- package/dist/storage/types.d.ts +35 -0
- package/dist/storage/types.d.ts.map +1 -1
- package/dist/sync.d.ts +38 -1
- package/dist/sync.d.ts.map +1 -1
- package/dist/sync.js +181 -7
- package/dist/sync.js.map +1 -1
- package/dist/tests/IncomingMessagesQueue.test.js +4 -150
- package/dist/tests/IncomingMessagesQueue.test.js.map +1 -1
- package/dist/tests/StorageStreamingQueue.test.d.ts +2 -0
- package/dist/tests/StorageStreamingQueue.test.d.ts.map +1 -0
- package/dist/tests/StorageStreamingQueue.test.js +213 -0
- package/dist/tests/StorageStreamingQueue.test.js.map +1 -0
- package/dist/tests/SyncManager.processQueues.test.d.ts +2 -0
- package/dist/tests/SyncManager.processQueues.test.d.ts.map +1 -0
- package/dist/tests/SyncManager.processQueues.test.js +208 -0
- package/dist/tests/SyncManager.processQueues.test.js.map +1 -0
- package/dist/tests/SyncStateManager.test.js +3 -3
- package/dist/tests/SyncStateManager.test.js.map +1 -1
- package/dist/tests/coValueCore.loadFromStorage.test.js +3 -0
- package/dist/tests/coValueCore.loadFromStorage.test.js.map +1 -1
- package/dist/tests/setup.d.ts +2 -0
- package/dist/tests/setup.d.ts.map +1 -0
- package/dist/tests/setup.js +4 -0
- package/dist/tests/setup.js.map +1 -0
- package/dist/tests/sync.garbageCollection.test.js.map +1 -1
- package/dist/tests/sync.mesh.test.js +19 -19
- package/dist/tests/sync.storage.test.js +176 -20
- package/dist/tests/sync.storage.test.js.map +1 -1
- package/dist/tests/sync.test.js +1 -1
- package/dist/tests/sync.test.js.map +1 -1
- package/dist/tests/sync.tracking.test.d.ts +2 -0
- package/dist/tests/sync.tracking.test.d.ts.map +1 -0
- package/dist/tests/sync.tracking.test.js +261 -0
- package/dist/tests/sync.tracking.test.js.map +1 -0
- package/dist/tests/testUtils.d.ts +4 -3
- package/dist/tests/testUtils.d.ts.map +1 -1
- package/dist/tests/testUtils.js +4 -4
- package/dist/tests/testUtils.js.map +1 -1
- package/package.json +4 -4
- package/src/PeerState.ts +2 -2
- package/src/SyncStateManager.ts +63 -12
- package/src/UnsyncedCoValuesTracker.ts +272 -0
- package/src/config.ts +13 -0
- package/src/exports.ts +10 -1
- package/src/localNode.ts +15 -3
- package/src/queue/IncomingMessagesQueue.ts +7 -39
- package/src/queue/LinkedList.ts +1 -1
- package/src/queue/StorageStreamingQueue.ts +96 -0
- package/src/storage/knownState.ts +4 -4
- package/src/storage/sqlite/client.ts +31 -0
- package/src/storage/sqlite/sqliteMigrations.ts +9 -0
- package/src/storage/sqliteAsync/client.ts +35 -0
- package/src/storage/storageAsync.ts +18 -1
- package/src/storage/storageSync.ts +119 -56
- package/src/storage/types.ts +42 -0
- package/src/sync.ts +235 -8
- package/src/tests/IncomingMessagesQueue.test.ts +4 -206
- package/src/tests/StorageStreamingQueue.test.ts +276 -0
- package/src/tests/SyncManager.processQueues.test.ts +287 -0
- package/src/tests/SyncStateManager.test.ts +3 -0
- package/src/tests/coValueCore.loadFromStorage.test.ts +11 -0
- package/src/tests/setup.ts +4 -0
- package/src/tests/sync.garbageCollection.test.ts +1 -3
- package/src/tests/sync.mesh.test.ts +19 -19
- package/src/tests/sync.storage.test.ts +224 -32
- package/src/tests/sync.test.ts +1 -9
- package/src/tests/sync.tracking.test.ts +396 -0
- package/src/tests/testUtils.ts +11 -5
- package/vitest.config.ts +1 -0
package/src/sync.ts
CHANGED
|
@@ -2,6 +2,8 @@ import { md5 } from "@noble/hashes/legacy";
|
|
|
2
2
|
import { Histogram, ValueType, metrics } from "@opentelemetry/api";
|
|
3
3
|
import { PeerState } from "./PeerState.js";
|
|
4
4
|
import { SyncStateManager } from "./SyncStateManager.js";
|
|
5
|
+
import { UnsyncedCoValuesTracker } from "./UnsyncedCoValuesTracker.js";
|
|
6
|
+
import { SYNC_SCHEDULER_CONFIG } from "./config.js";
|
|
5
7
|
import {
|
|
6
8
|
getContenDebugInfo,
|
|
7
9
|
getNewTransactionsFromContentMessage,
|
|
@@ -18,11 +20,13 @@ import { logger } from "./logger.js";
|
|
|
18
20
|
import { CoValuePriority } from "./priority.js";
|
|
19
21
|
import { IncomingMessagesQueue } from "./queue/IncomingMessagesQueue.js";
|
|
20
22
|
import { LocalTransactionsSyncQueue } from "./queue/LocalTransactionsSyncQueue.js";
|
|
23
|
+
import type { StorageStreamingQueue } from "./queue/StorageStreamingQueue.js";
|
|
21
24
|
import {
|
|
22
25
|
CoValueKnownState,
|
|
23
26
|
knownStateFrom,
|
|
24
27
|
KnownStateSessions,
|
|
25
28
|
} from "./knownState.js";
|
|
29
|
+
import { StorageAPI } from "./storage/index.js";
|
|
26
30
|
|
|
27
31
|
export type SyncMessage =
|
|
28
32
|
| LoadMessage
|
|
@@ -63,6 +67,15 @@ export type DoneMessage = {
|
|
|
63
67
|
id: RawCoID;
|
|
64
68
|
};
|
|
65
69
|
|
|
70
|
+
/**
|
|
71
|
+
* Determines when network sync is enabled.
|
|
72
|
+
* - "always": sync is enabled for both Anonymous Authentication and Authenticated Account
|
|
73
|
+
* - "signedUp": sync is enabled when the user is authenticated
|
|
74
|
+
* - "never": sync is disabled, content stays local
|
|
75
|
+
* Can be dynamically modified to control sync behavior at runtime.
|
|
76
|
+
*/
|
|
77
|
+
export type SyncWhen = "always" | "signedUp" | "never";
|
|
78
|
+
|
|
66
79
|
export type PeerID = string;
|
|
67
80
|
|
|
68
81
|
export type DisconnectedError = "Disconnected";
|
|
@@ -121,6 +134,7 @@ export class SyncManager {
|
|
|
121
134
|
constructor(local: LocalNode) {
|
|
122
135
|
this.local = local;
|
|
123
136
|
this.syncState = new SyncStateManager(this);
|
|
137
|
+
this.unsyncedTracker = new UnsyncedCoValuesTracker();
|
|
124
138
|
|
|
125
139
|
this.transactionsSizeHistogram = metrics
|
|
126
140
|
.getMeter("cojson")
|
|
@@ -132,6 +146,7 @@ export class SyncManager {
|
|
|
132
146
|
}
|
|
133
147
|
|
|
134
148
|
syncState: SyncStateManager;
|
|
149
|
+
unsyncedTracker: UnsyncedCoValuesTracker;
|
|
135
150
|
|
|
136
151
|
disableTransactionVerification() {
|
|
137
152
|
this.skipVerify = true;
|
|
@@ -154,6 +169,10 @@ export class SyncManager {
|
|
|
154
169
|
: serverPeers;
|
|
155
170
|
}
|
|
156
171
|
|
|
172
|
+
getPersistentServerPeers(id: RawCoID): PeerState[] {
|
|
173
|
+
return this.getServerPeers(id).filter((peer) => peer.persistent);
|
|
174
|
+
}
|
|
175
|
+
|
|
157
176
|
handleSyncMessage(msg: SyncMessage, peer: PeerState) {
|
|
158
177
|
if (!isRawCoID(msg.id)) {
|
|
159
178
|
const errorType = msg.id ? "invalid" : "undefined";
|
|
@@ -259,7 +278,88 @@ export class SyncManager {
|
|
|
259
278
|
}
|
|
260
279
|
}
|
|
261
280
|
|
|
281
|
+
async resumeUnsyncedCoValues(): Promise<void> {
|
|
282
|
+
if (!this.local.storage) {
|
|
283
|
+
// No storage available, skip resumption
|
|
284
|
+
return;
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
await new Promise<void>((resolve, reject) => {
|
|
288
|
+
// Load all persisted unsynced CoValues from storage
|
|
289
|
+
this.local.storage?.getUnsyncedCoValueIDs((unsyncedCoValueIDs) => {
|
|
290
|
+
const coValuesToLoad = unsyncedCoValueIDs.filter(
|
|
291
|
+
(coValueId) => !this.local.hasCoValue(coValueId),
|
|
292
|
+
);
|
|
293
|
+
if (coValuesToLoad.length === 0) {
|
|
294
|
+
resolve();
|
|
295
|
+
return;
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
const BATCH_SIZE = 10;
|
|
299
|
+
let processed = 0;
|
|
300
|
+
|
|
301
|
+
const processBatch = async () => {
|
|
302
|
+
const batch = coValuesToLoad.slice(processed, processed + BATCH_SIZE);
|
|
303
|
+
|
|
304
|
+
await Promise.all(
|
|
305
|
+
batch.map(
|
|
306
|
+
async (coValueId) =>
|
|
307
|
+
new Promise<void>((resolve) => {
|
|
308
|
+
try {
|
|
309
|
+
// Clear previous tracking (as it may include outdated peers)
|
|
310
|
+
this.local.storage?.stopTrackingSyncState(coValueId);
|
|
311
|
+
|
|
312
|
+
// Resume tracking sync state for this CoValue
|
|
313
|
+
// This will add it back to the tracker and set up subscriptions
|
|
314
|
+
this.trackSyncState(coValueId);
|
|
315
|
+
|
|
316
|
+
// Load the CoValue from storage (this will trigger sync if peers are connected)
|
|
317
|
+
const coValue = this.local.getCoValue(coValueId);
|
|
318
|
+
coValue.loadFromStorage((found) => {
|
|
319
|
+
if (!found) {
|
|
320
|
+
// CoValue could not be loaded from storage, stop tracking
|
|
321
|
+
this.unsyncedTracker.removeAll(coValueId);
|
|
322
|
+
}
|
|
323
|
+
resolve();
|
|
324
|
+
});
|
|
325
|
+
} catch (error) {
|
|
326
|
+
// Handle errors gracefully - log but don't fail the entire resumption
|
|
327
|
+
logger.warn(
|
|
328
|
+
`Failed to resume sync for CoValue ${coValueId}:`,
|
|
329
|
+
{
|
|
330
|
+
err: error,
|
|
331
|
+
coValueId,
|
|
332
|
+
},
|
|
333
|
+
);
|
|
334
|
+
this.unsyncedTracker.removeAll(coValueId);
|
|
335
|
+
resolve();
|
|
336
|
+
}
|
|
337
|
+
}),
|
|
338
|
+
),
|
|
339
|
+
);
|
|
340
|
+
|
|
341
|
+
processed += batch.length;
|
|
342
|
+
|
|
343
|
+
if (processed < coValuesToLoad.length) {
|
|
344
|
+
processBatch().catch(reject);
|
|
345
|
+
} else {
|
|
346
|
+
resolve();
|
|
347
|
+
}
|
|
348
|
+
};
|
|
349
|
+
|
|
350
|
+
processBatch().catch(reject);
|
|
351
|
+
});
|
|
352
|
+
});
|
|
353
|
+
}
|
|
354
|
+
|
|
262
355
|
startPeerReconciliation(peer: PeerState) {
|
|
356
|
+
if (peer.role === "server" && peer.persistent) {
|
|
357
|
+
// Resume syncing unsynced CoValues asynchronously
|
|
358
|
+
this.resumeUnsyncedCoValues().catch((error) => {
|
|
359
|
+
logger.warn("Failed to resume unsynced CoValues:", error);
|
|
360
|
+
});
|
|
361
|
+
}
|
|
362
|
+
|
|
263
363
|
const coValuesOrderedByDependency: CoValueCore[] = [];
|
|
264
364
|
|
|
265
365
|
const seen = new Set<string>();
|
|
@@ -328,17 +428,87 @@ export class SyncManager {
|
|
|
328
428
|
}
|
|
329
429
|
}
|
|
330
430
|
|
|
331
|
-
messagesQueue = new IncomingMessagesQueue();
|
|
431
|
+
messagesQueue = new IncomingMessagesQueue(() => this.processQueues());
|
|
432
|
+
private processing = false;
|
|
433
|
+
|
|
332
434
|
pushMessage(incoming: SyncMessage, peer: PeerState) {
|
|
333
435
|
this.messagesQueue.push(incoming, peer);
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
/**
|
|
439
|
+
* Get the storage streaming queue if available.
|
|
440
|
+
* Returns undefined if storage doesn't have a streaming queue.
|
|
441
|
+
*/
|
|
442
|
+
private getStorageStreamingQueue(): StorageStreamingQueue | undefined {
|
|
443
|
+
const storage = this.local.storage;
|
|
444
|
+
if (storage && "streamingQueue" in storage) {
|
|
445
|
+
return storage.streamingQueue as StorageStreamingQueue;
|
|
446
|
+
}
|
|
447
|
+
return undefined;
|
|
448
|
+
}
|
|
334
449
|
|
|
335
|
-
|
|
450
|
+
/**
|
|
451
|
+
* Unified queue processing that coordinates both incoming messages
|
|
452
|
+
* and storage streaming entries.
|
|
453
|
+
*
|
|
454
|
+
* Processes items from both queues with priority ordering:
|
|
455
|
+
* - Incoming messages are processed via round-robin across peers
|
|
456
|
+
* - Storage streaming entries are processed by priority (MEDIUM before LOW)
|
|
457
|
+
*
|
|
458
|
+
* Implements time budget scheduling to avoid blocking the main thread.
|
|
459
|
+
*/
|
|
460
|
+
private async processQueues() {
|
|
461
|
+
if (this.processing) {
|
|
336
462
|
return;
|
|
337
463
|
}
|
|
338
464
|
|
|
339
|
-
this.
|
|
340
|
-
|
|
341
|
-
|
|
465
|
+
this.processing = true;
|
|
466
|
+
let lastTimer = performance.now();
|
|
467
|
+
|
|
468
|
+
const streamingQueue = this.getStorageStreamingQueue();
|
|
469
|
+
|
|
470
|
+
while (true) {
|
|
471
|
+
// First, try to pull from incoming messages queue
|
|
472
|
+
const messageEntry = this.messagesQueue.pull();
|
|
473
|
+
if (messageEntry) {
|
|
474
|
+
try {
|
|
475
|
+
this.handleSyncMessage(messageEntry.msg, messageEntry.peer);
|
|
476
|
+
} catch (err) {
|
|
477
|
+
logger.error("Error processing message", { err });
|
|
478
|
+
}
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
// Then, try to pull from storage streaming queue
|
|
482
|
+
const pushStreamingContent = streamingQueue?.pull();
|
|
483
|
+
if (pushStreamingContent) {
|
|
484
|
+
try {
|
|
485
|
+
// Invoke the pushContent callback to stream the content
|
|
486
|
+
pushStreamingContent();
|
|
487
|
+
} catch (err) {
|
|
488
|
+
logger.error("Error processing storage streaming entry", {
|
|
489
|
+
err,
|
|
490
|
+
});
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
// If both queues are empty, we're done
|
|
495
|
+
if (!messageEntry && !pushStreamingContent) {
|
|
496
|
+
break;
|
|
497
|
+
}
|
|
498
|
+
|
|
499
|
+
// Check if we have blocked the main thread for too long
|
|
500
|
+
// and if so, yield to the event loop
|
|
501
|
+
const currentTimer = performance.now();
|
|
502
|
+
if (
|
|
503
|
+
currentTimer - lastTimer >
|
|
504
|
+
SYNC_SCHEDULER_CONFIG.INCOMING_MESSAGES_TIME_BUDGET
|
|
505
|
+
) {
|
|
506
|
+
await new Promise<void>((resolve) => setTimeout(resolve));
|
|
507
|
+
lastTimer = performance.now();
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
this.processing = false;
|
|
342
512
|
}
|
|
343
513
|
|
|
344
514
|
addPeer(peer: Peer, skipReconciliation: boolean = false) {
|
|
@@ -732,6 +902,9 @@ export class SyncManager {
|
|
|
732
902
|
|
|
733
903
|
if (from !== "storage" && hasNewContent) {
|
|
734
904
|
this.storeContent(validNewContent);
|
|
905
|
+
if (from === "import") {
|
|
906
|
+
this.trackSyncState(coValue.id);
|
|
907
|
+
}
|
|
735
908
|
}
|
|
736
909
|
|
|
737
910
|
for (const peer of this.getPeers(coValue.id)) {
|
|
@@ -787,6 +960,8 @@ export class SyncManager {
|
|
|
787
960
|
|
|
788
961
|
this.storeContent(content);
|
|
789
962
|
|
|
963
|
+
this.trackSyncState(coValue.id);
|
|
964
|
+
|
|
790
965
|
const contentKnownState = knownStateFromContent(content);
|
|
791
966
|
|
|
792
967
|
for (const peer of this.getPeers(coValue.id)) {
|
|
@@ -811,6 +986,37 @@ export class SyncManager {
|
|
|
811
986
|
}
|
|
812
987
|
}
|
|
813
988
|
|
|
989
|
+
private trackSyncState(coValueId: RawCoID): void {
|
|
990
|
+
const peers = this.getPersistentServerPeers(coValueId);
|
|
991
|
+
|
|
992
|
+
const isSyncRequired = this.local.syncWhen !== "never";
|
|
993
|
+
if (isSyncRequired && peers.length === 0) {
|
|
994
|
+
this.unsyncedTracker.add(coValueId);
|
|
995
|
+
return;
|
|
996
|
+
}
|
|
997
|
+
|
|
998
|
+
for (const peer of peers) {
|
|
999
|
+
if (this.syncState.isSynced(peer, coValueId)) {
|
|
1000
|
+
continue;
|
|
1001
|
+
}
|
|
1002
|
+
const alreadyTracked = this.unsyncedTracker.add(coValueId, peer.id);
|
|
1003
|
+
if (alreadyTracked) {
|
|
1004
|
+
continue;
|
|
1005
|
+
}
|
|
1006
|
+
|
|
1007
|
+
const unsubscribe = this.syncState.subscribeToPeerUpdates(
|
|
1008
|
+
peer.id,
|
|
1009
|
+
coValueId,
|
|
1010
|
+
(_knownState, syncState) => {
|
|
1011
|
+
if (syncState.uploaded) {
|
|
1012
|
+
this.unsyncedTracker.remove(coValueId, peer.id);
|
|
1013
|
+
unsubscribe();
|
|
1014
|
+
}
|
|
1015
|
+
},
|
|
1016
|
+
);
|
|
1017
|
+
}
|
|
1018
|
+
}
|
|
1019
|
+
|
|
814
1020
|
private storeContent(content: NewContentMessage) {
|
|
815
1021
|
const storage = this.local.storage;
|
|
816
1022
|
|
|
@@ -860,8 +1066,9 @@ export class SyncManager {
|
|
|
860
1066
|
return new Promise((resolve, reject) => {
|
|
861
1067
|
const unsubscribe = this.syncState.subscribeToPeerUpdates(
|
|
862
1068
|
peerId,
|
|
863
|
-
|
|
864
|
-
|
|
1069
|
+
id,
|
|
1070
|
+
(_knownState, syncState) => {
|
|
1071
|
+
if (syncState.uploaded) {
|
|
865
1072
|
resolve(true);
|
|
866
1073
|
unsubscribe?.();
|
|
867
1074
|
clearTimeout(timeoutId);
|
|
@@ -916,10 +1123,30 @@ export class SyncManager {
|
|
|
916
1123
|
);
|
|
917
1124
|
}
|
|
918
1125
|
|
|
919
|
-
|
|
1126
|
+
setStorage(storage: StorageAPI) {
|
|
1127
|
+
this.unsyncedTracker.setStorage(storage);
|
|
1128
|
+
|
|
1129
|
+
const storageStreamingQueue = this.getStorageStreamingQueue();
|
|
1130
|
+
if (storageStreamingQueue) {
|
|
1131
|
+
storageStreamingQueue.setListener(() => {
|
|
1132
|
+
this.processQueues();
|
|
1133
|
+
});
|
|
1134
|
+
}
|
|
1135
|
+
}
|
|
1136
|
+
|
|
1137
|
+
removeStorage() {
|
|
1138
|
+
this.unsyncedTracker.removeStorage();
|
|
1139
|
+
}
|
|
1140
|
+
|
|
1141
|
+
/**
|
|
1142
|
+
* Closes all the peer connections and ensures the list of unsynced coValues is persisted to storage.
|
|
1143
|
+
* @returns Promise of the current pending store operation, if any.
|
|
1144
|
+
*/
|
|
1145
|
+
gracefulShutdown(): Promise<void> | undefined {
|
|
920
1146
|
for (const peer of Object.values(this.peers)) {
|
|
921
1147
|
peer.gracefulShutdown();
|
|
922
1148
|
}
|
|
1149
|
+
return this.unsyncedTracker.forcePersist();
|
|
923
1150
|
}
|
|
924
1151
|
}
|
|
925
1152
|
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { afterEach,
|
|
1
|
+
import { afterEach, describe, expect, test, vi } from "vitest";
|
|
2
2
|
import { PeerState } from "../PeerState.js";
|
|
3
3
|
import { IncomingMessagesQueue } from "../queue/IncomingMessagesQueue.js";
|
|
4
4
|
import { ConnectedPeerChannel } from "../streamUtils.js";
|
|
@@ -8,13 +8,6 @@ import {
|
|
|
8
8
|
tearDownTestMetricReader,
|
|
9
9
|
} from "./testUtils.js";
|
|
10
10
|
|
|
11
|
-
// Mock performance.now for consistent timing tests
|
|
12
|
-
let mockPerformanceNow = vi.spyOn(performance, "now");
|
|
13
|
-
|
|
14
|
-
beforeEach(() => {
|
|
15
|
-
vi.clearAllMocks();
|
|
16
|
-
});
|
|
17
|
-
|
|
18
11
|
function createMockPeer(id: string): Peer {
|
|
19
12
|
return {
|
|
20
13
|
id,
|
|
@@ -60,18 +53,15 @@ function createMockSyncMessage(
|
|
|
60
53
|
}
|
|
61
54
|
|
|
62
55
|
function setup() {
|
|
56
|
+
const processQueues = vi.fn();
|
|
63
57
|
const metricReader = createTestMetricReader();
|
|
64
|
-
const queue = new IncomingMessagesQueue();
|
|
58
|
+
const queue = new IncomingMessagesQueue(processQueues);
|
|
65
59
|
const peer1 = createMockPeerState("peer1");
|
|
66
60
|
const peer2 = createMockPeerState("peer2");
|
|
67
61
|
|
|
68
|
-
return { queue, peer1, peer2, metricReader };
|
|
62
|
+
return { queue, peer1, peer2, metricReader, processQueues };
|
|
69
63
|
}
|
|
70
64
|
|
|
71
|
-
beforeEach(() => {
|
|
72
|
-
mockPerformanceNow.mockReturnValue(0);
|
|
73
|
-
});
|
|
74
|
-
|
|
75
65
|
afterEach(() => {
|
|
76
66
|
tearDownTestMetricReader();
|
|
77
67
|
});
|
|
@@ -82,7 +72,6 @@ describe("IncomingMessagesQueue", () => {
|
|
|
82
72
|
const { queue } = setup();
|
|
83
73
|
expect(queue["queues"]).toEqual([]);
|
|
84
74
|
expect(queue.currentQueue).toBe(0);
|
|
85
|
-
expect(queue.processing).toBe(false);
|
|
86
75
|
});
|
|
87
76
|
});
|
|
88
77
|
|
|
@@ -217,142 +206,6 @@ describe("IncomingMessagesQueue", () => {
|
|
|
217
206
|
});
|
|
218
207
|
});
|
|
219
208
|
|
|
220
|
-
describe("processQueue", () => {
|
|
221
|
-
test("should process all messages in queue", async () => {
|
|
222
|
-
const { queue, peer1, peer2 } = setup();
|
|
223
|
-
const msg1 = createMockSyncMessage("test1");
|
|
224
|
-
const msg2 = createMockSyncMessage("test2");
|
|
225
|
-
const msg3 = createMockSyncMessage("test3");
|
|
226
|
-
|
|
227
|
-
queue.push(msg1, peer1);
|
|
228
|
-
queue.push(msg2, peer1);
|
|
229
|
-
queue.push(msg3, peer2);
|
|
230
|
-
|
|
231
|
-
const processedMessages: Array<{ msg: SyncMessage; peer: PeerState }> =
|
|
232
|
-
[];
|
|
233
|
-
|
|
234
|
-
await queue.processQueue((msg, peer) => {
|
|
235
|
-
processedMessages.push({ msg, peer });
|
|
236
|
-
});
|
|
237
|
-
|
|
238
|
-
expect(processedMessages).toEqual([
|
|
239
|
-
{ msg: msg1, peer: peer1 },
|
|
240
|
-
{ msg: msg3, peer: peer2 },
|
|
241
|
-
{ msg: msg2, peer: peer1 },
|
|
242
|
-
]);
|
|
243
|
-
expect(queue.processing).toBe(false);
|
|
244
|
-
});
|
|
245
|
-
|
|
246
|
-
test("should set processing flag during execution", async () => {
|
|
247
|
-
const { queue, peer1 } = setup();
|
|
248
|
-
const msg = createMockSyncMessage("test");
|
|
249
|
-
queue.push(msg, peer1);
|
|
250
|
-
|
|
251
|
-
let processingFlagDuringExecution = false;
|
|
252
|
-
const processingPromise = queue.processQueue(() => {
|
|
253
|
-
processingFlagDuringExecution = queue.processing;
|
|
254
|
-
});
|
|
255
|
-
|
|
256
|
-
await processingPromise;
|
|
257
|
-
expect(processingFlagDuringExecution).toBe(true);
|
|
258
|
-
expect(queue.processing).toBe(false);
|
|
259
|
-
});
|
|
260
|
-
|
|
261
|
-
test("should handle empty queue", async () => {
|
|
262
|
-
const { queue } = setup();
|
|
263
|
-
const callback = vi.fn();
|
|
264
|
-
|
|
265
|
-
await queue.processQueue(callback);
|
|
266
|
-
|
|
267
|
-
expect(callback).not.toHaveBeenCalled();
|
|
268
|
-
expect(queue.processing).toBe(false);
|
|
269
|
-
});
|
|
270
|
-
|
|
271
|
-
test("should yield to event loop when processing takes too long", async () => {
|
|
272
|
-
const { queue, peer1 } = setup();
|
|
273
|
-
const msg1 = createMockSyncMessage("test1");
|
|
274
|
-
const msg2 = createMockSyncMessage("test2");
|
|
275
|
-
|
|
276
|
-
queue.push(msg1, peer1);
|
|
277
|
-
queue.push(msg2, peer1);
|
|
278
|
-
|
|
279
|
-
// Mock timing to simulate long processing
|
|
280
|
-
mockPerformanceNow
|
|
281
|
-
.mockReturnValueOnce(0) // Initial time
|
|
282
|
-
.mockReturnValueOnce(60); // After first message (60ms > 50ms threshold)
|
|
283
|
-
|
|
284
|
-
const setTimeoutSpy = vi.spyOn(global, "setTimeout");
|
|
285
|
-
|
|
286
|
-
await queue.processQueue(() => {
|
|
287
|
-
// Simulate some processing time
|
|
288
|
-
});
|
|
289
|
-
|
|
290
|
-
expect(setTimeoutSpy).toHaveBeenCalledWith(expect.any(Function), 0);
|
|
291
|
-
});
|
|
292
|
-
|
|
293
|
-
test("should not yield to event loop when processing is fast", async () => {
|
|
294
|
-
const { queue, peer1 } = setup();
|
|
295
|
-
const msg = createMockSyncMessage("test");
|
|
296
|
-
queue.push(msg, peer1);
|
|
297
|
-
|
|
298
|
-
// Mock timing to simulate fast processing
|
|
299
|
-
mockPerformanceNow
|
|
300
|
-
.mockReturnValueOnce(0) // Initial time
|
|
301
|
-
.mockReturnValueOnce(30); // After message (30ms < 50ms threshold)
|
|
302
|
-
|
|
303
|
-
const setTimeoutSpy = vi.spyOn(global, "setTimeout");
|
|
304
|
-
|
|
305
|
-
await queue.processQueue(() => {
|
|
306
|
-
// Simulate some processing time
|
|
307
|
-
});
|
|
308
|
-
|
|
309
|
-
expect(setTimeoutSpy).not.toHaveBeenCalled();
|
|
310
|
-
});
|
|
311
|
-
|
|
312
|
-
test("should handle callback errors gracefully", async () => {
|
|
313
|
-
const { queue, peer1 } = setup();
|
|
314
|
-
const msg = createMockSyncMessage("test");
|
|
315
|
-
queue.push(msg, peer1);
|
|
316
|
-
|
|
317
|
-
const error = new Error("Callback error");
|
|
318
|
-
|
|
319
|
-
await queue.processQueue(() => {
|
|
320
|
-
throw error;
|
|
321
|
-
});
|
|
322
|
-
|
|
323
|
-
// The processing flag should be reset even when an error occurs
|
|
324
|
-
expect(queue.processing).toBe(false);
|
|
325
|
-
});
|
|
326
|
-
|
|
327
|
-
test("should process messages in correct round-robin order", async () => {
|
|
328
|
-
const { queue, peer1, peer2 } = setup();
|
|
329
|
-
const msg1 = createMockSyncMessage("test1");
|
|
330
|
-
const msg2 = createMockSyncMessage("test2");
|
|
331
|
-
const msg3 = createMockSyncMessage("test3");
|
|
332
|
-
const msg4 = createMockSyncMessage("test4");
|
|
333
|
-
|
|
334
|
-
queue.push(msg1, peer1);
|
|
335
|
-
queue.push(msg2, peer1);
|
|
336
|
-
queue.push(msg3, peer2);
|
|
337
|
-
queue.push(msg4, peer2);
|
|
338
|
-
|
|
339
|
-
const processedMessages: Array<{ msg: SyncMessage; peer: PeerState }> =
|
|
340
|
-
[];
|
|
341
|
-
|
|
342
|
-
await queue.processQueue((msg, peer) => {
|
|
343
|
-
processedMessages.push({ msg, peer });
|
|
344
|
-
});
|
|
345
|
-
|
|
346
|
-
// Should process in round-robin: peer1, peer2, peer1, peer2
|
|
347
|
-
expect(processedMessages).toEqual([
|
|
348
|
-
{ msg: msg1, peer: peer1 },
|
|
349
|
-
{ msg: msg3, peer: peer2 },
|
|
350
|
-
{ msg: msg2, peer: peer1 },
|
|
351
|
-
{ msg: msg4, peer: peer2 },
|
|
352
|
-
]);
|
|
353
|
-
});
|
|
354
|
-
});
|
|
355
|
-
|
|
356
209
|
describe("edge cases", () => {
|
|
357
210
|
test("should handle peer with multiple messages correctly", () => {
|
|
358
211
|
const { queue, peer1 } = setup();
|
|
@@ -411,35 +264,6 @@ describe("IncomingMessagesQueue", () => {
|
|
|
411
264
|
});
|
|
412
265
|
});
|
|
413
266
|
|
|
414
|
-
describe("concurrent operations", () => {
|
|
415
|
-
test("should prevent multiple concurrent processQueue calls", async () => {
|
|
416
|
-
const { queue, peer1 } = setup();
|
|
417
|
-
const msg = createMockSyncMessage("test");
|
|
418
|
-
queue.push(msg, peer1);
|
|
419
|
-
|
|
420
|
-
const firstProcessSpy = vi.fn();
|
|
421
|
-
|
|
422
|
-
const firstProcess = queue.processQueue((msg, peer) => {
|
|
423
|
-
firstProcessSpy(msg, peer);
|
|
424
|
-
});
|
|
425
|
-
|
|
426
|
-
const secondProcessSpy = vi.fn();
|
|
427
|
-
|
|
428
|
-
// Second process should not interfere
|
|
429
|
-
const secondProcess = queue.processQueue(() => {
|
|
430
|
-
secondProcessSpy();
|
|
431
|
-
});
|
|
432
|
-
|
|
433
|
-
await firstProcess;
|
|
434
|
-
await secondProcess;
|
|
435
|
-
|
|
436
|
-
expect(firstProcessSpy).toHaveBeenCalled();
|
|
437
|
-
expect(secondProcessSpy).not.toHaveBeenCalled();
|
|
438
|
-
|
|
439
|
-
expect(queue.processing).toBe(false);
|
|
440
|
-
});
|
|
441
|
-
});
|
|
442
|
-
|
|
443
267
|
describe("metrics", () => {
|
|
444
268
|
test("should increment push counter when pushing messages", async () => {
|
|
445
269
|
const { queue, peer1, metricReader } = setup();
|
|
@@ -594,31 +418,5 @@ describe("IncomingMessagesQueue", () => {
|
|
|
594
418
|
expect(clientPullValue).toBe(0);
|
|
595
419
|
expect(serverPullValue).toBe(0);
|
|
596
420
|
});
|
|
597
|
-
|
|
598
|
-
test("should track metrics during processQueue execution", async () => {
|
|
599
|
-
const { queue, peer1, metricReader } = setup();
|
|
600
|
-
const msg1 = createMockSyncMessage("test1");
|
|
601
|
-
const msg2 = createMockSyncMessage("test2");
|
|
602
|
-
|
|
603
|
-
queue.push(msg1, peer1);
|
|
604
|
-
queue.push(msg2, peer1);
|
|
605
|
-
|
|
606
|
-
await queue.processQueue(() => {
|
|
607
|
-
// Process messages
|
|
608
|
-
});
|
|
609
|
-
|
|
610
|
-
const pushValue = await metricReader.getMetricValue(
|
|
611
|
-
"jazz.messagequeue.incoming.pushed",
|
|
612
|
-
{ peerRole: "client" },
|
|
613
|
-
);
|
|
614
|
-
|
|
615
|
-
const pullValue = await metricReader.getMetricValue(
|
|
616
|
-
"jazz.messagequeue.incoming.pulled",
|
|
617
|
-
{ peerRole: "client" },
|
|
618
|
-
);
|
|
619
|
-
|
|
620
|
-
expect(pushValue).toBe(2);
|
|
621
|
-
expect(pullValue).toBe(2);
|
|
622
|
-
});
|
|
623
421
|
});
|
|
624
422
|
});
|