@webex/plugin-meetings 3.12.0-mobius-socket.2 → 3.12.0-mobius-socket.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AGENTS.md +9 -0
- package/dist/aiEnableRequest/index.js +15 -2
- package/dist/aiEnableRequest/index.js.map +1 -1
- package/dist/breakouts/breakout.js +8 -3
- package/dist/breakouts/breakout.js.map +1 -1
- package/dist/breakouts/index.js +3 -2
- package/dist/breakouts/index.js.map +1 -1
- package/dist/config.js +1 -0
- package/dist/config.js.map +1 -1
- package/dist/constants.js +6 -3
- package/dist/constants.js.map +1 -1
- package/dist/controls-options-manager/constants.js +11 -1
- package/dist/controls-options-manager/constants.js.map +1 -1
- package/dist/controls-options-manager/index.js +38 -24
- package/dist/controls-options-manager/index.js.map +1 -1
- package/dist/controls-options-manager/util.js +91 -0
- package/dist/controls-options-manager/util.js.map +1 -1
- package/dist/hashTree/constants.js +10 -1
- package/dist/hashTree/constants.js.map +1 -1
- package/dist/hashTree/hashTreeParser.js +651 -382
- package/dist/hashTree/hashTreeParser.js.map +1 -1
- package/dist/hashTree/utils.js +22 -0
- package/dist/hashTree/utils.js.map +1 -1
- package/dist/index.js +7 -0
- package/dist/index.js.map +1 -1
- package/dist/interceptors/locusRetry.js +23 -8
- package/dist/interceptors/locusRetry.js.map +1 -1
- package/dist/interpretation/index.js +10 -1
- package/dist/interpretation/index.js.map +1 -1
- package/dist/interpretation/siLanguage.js +1 -1
- package/dist/locus-info/controlsUtils.js +4 -1
- package/dist/locus-info/controlsUtils.js.map +1 -1
- package/dist/locus-info/index.js +289 -87
- package/dist/locus-info/index.js.map +1 -1
- package/dist/locus-info/types.js +19 -0
- package/dist/locus-info/types.js.map +1 -1
- package/dist/media/properties.js +1 -0
- package/dist/media/properties.js.map +1 -1
- package/dist/meeting/in-meeting-actions.js +3 -1
- package/dist/meeting/in-meeting-actions.js.map +1 -1
- package/dist/meeting/index.js +848 -582
- package/dist/meeting/index.js.map +1 -1
- package/dist/meeting/util.js +19 -2
- package/dist/meeting/util.js.map +1 -1
- package/dist/meetings/index.js +205 -77
- package/dist/meetings/index.js.map +1 -1
- package/dist/meetings/meetings.types.js +6 -1
- package/dist/meetings/meetings.types.js.map +1 -1
- package/dist/meetings/request.js +39 -0
- package/dist/meetings/request.js.map +1 -1
- package/dist/meetings/util.js +67 -5
- package/dist/meetings/util.js.map +1 -1
- package/dist/member/index.js +10 -0
- package/dist/member/index.js.map +1 -1
- package/dist/member/types.js.map +1 -1
- package/dist/member/util.js +3 -0
- package/dist/member/util.js.map +1 -1
- package/dist/metrics/constants.js +4 -1
- package/dist/metrics/constants.js.map +1 -1
- package/dist/multistream/receiveSlot.js +9 -0
- package/dist/multistream/receiveSlot.js.map +1 -1
- package/dist/reactions/reactions.type.js.map +1 -1
- package/dist/recording-controller/index.js +1 -3
- package/dist/recording-controller/index.js.map +1 -1
- package/dist/types/config.d.ts +1 -0
- package/dist/types/constants.d.ts +2 -0
- package/dist/types/controls-options-manager/constants.d.ts +6 -1
- package/dist/types/controls-options-manager/index.d.ts +10 -0
- package/dist/types/hashTree/constants.d.ts +1 -0
- package/dist/types/hashTree/hashTreeParser.d.ts +83 -16
- package/dist/types/hashTree/utils.d.ts +11 -0
- package/dist/types/index.d.ts +2 -0
- package/dist/types/interceptors/locusRetry.d.ts +4 -4
- package/dist/types/locus-info/index.d.ts +46 -6
- package/dist/types/locus-info/types.d.ts +21 -1
- package/dist/types/media/properties.d.ts +1 -0
- package/dist/types/meeting/in-meeting-actions.d.ts +2 -0
- package/dist/types/meeting/index.d.ts +65 -1
- package/dist/types/meeting/util.d.ts +8 -0
- package/dist/types/meetings/index.d.ts +20 -2
- package/dist/types/meetings/meetings.types.d.ts +15 -0
- package/dist/types/meetings/request.d.ts +14 -0
- package/dist/types/member/index.d.ts +1 -0
- package/dist/types/member/types.d.ts +1 -0
- package/dist/types/member/util.d.ts +1 -0
- package/dist/types/metrics/constants.d.ts +3 -0
- package/dist/types/reactions/reactions.type.d.ts +3 -0
- package/dist/webinar/index.js +68 -17
- package/dist/webinar/index.js.map +1 -1
- package/package.json +22 -22
- package/src/aiEnableRequest/index.ts +16 -0
- package/src/breakouts/breakout.ts +3 -1
- package/src/breakouts/index.ts +1 -0
- package/src/config.ts +1 -0
- package/src/constants.ts +5 -1
- package/src/controls-options-manager/constants.ts +14 -1
- package/src/controls-options-manager/index.ts +47 -24
- package/src/controls-options-manager/util.ts +81 -1
- package/src/hashTree/constants.ts +9 -0
- package/src/hashTree/hashTreeParser.ts +375 -197
- package/src/hashTree/utils.ts +17 -0
- package/src/index.ts +5 -0
- package/src/interceptors/locusRetry.ts +25 -4
- package/src/interpretation/index.ts +25 -8
- package/src/locus-info/controlsUtils.ts +3 -1
- package/src/locus-info/index.ts +291 -97
- package/src/locus-info/types.ts +25 -1
- package/src/media/properties.ts +1 -0
- package/src/meeting/in-meeting-actions.ts +4 -0
- package/src/meeting/index.ts +260 -23
- package/src/meeting/util.ts +20 -2
- package/src/meetings/index.ts +109 -43
- package/src/meetings/meetings.types.ts +19 -0
- package/src/meetings/request.ts +43 -0
- package/src/meetings/util.ts +80 -1
- package/src/member/index.ts +10 -0
- package/src/member/types.ts +1 -0
- package/src/member/util.ts +3 -0
- package/src/metrics/constants.ts +3 -0
- package/src/multistream/receiveSlot.ts +18 -0
- package/src/reactions/reactions.type.ts +3 -0
- package/src/recording-controller/index.ts +1 -2
- package/src/webinar/index.ts +88 -21
- package/test/unit/spec/aiEnableRequest/index.ts +86 -0
- package/test/unit/spec/breakouts/breakout.ts +9 -3
- package/test/unit/spec/breakouts/index.ts +2 -0
- package/test/unit/spec/controls-options-manager/index.js +140 -29
- package/test/unit/spec/controls-options-manager/util.js +165 -0
- package/test/unit/spec/hashTree/hashTreeParser.ts +1263 -157
- package/test/unit/spec/hashTree/utils.ts +88 -1
- package/test/unit/spec/interceptors/locusRetry.ts +205 -4
- package/test/unit/spec/interpretation/index.ts +26 -4
- package/test/unit/spec/locus-info/controlsUtils.js +172 -57
- package/test/unit/spec/locus-info/index.js +475 -81
- package/test/unit/spec/meeting/in-meeting-actions.ts +2 -0
- package/test/unit/spec/meeting/index.js +902 -14
- package/test/unit/spec/meeting/muteState.js +3 -0
- package/test/unit/spec/meeting/utils.js +33 -0
- package/test/unit/spec/meetings/index.js +309 -10
- package/test/unit/spec/meetings/request.js +141 -0
- package/test/unit/spec/meetings/utils.js +161 -0
- package/test/unit/spec/member/index.js +7 -0
- package/test/unit/spec/member/util.js +24 -0
- package/test/unit/spec/recording-controller/index.js +9 -8
- package/test/unit/spec/webinar/index.ts +81 -16
|
@@ -1,11 +1,13 @@
|
|
|
1
1
|
import {cloneDeep, isEmpty, zip} from 'lodash';
|
|
2
2
|
import HashTree, {LeafDataItem} from './hashTree';
|
|
3
3
|
import LoggerProxy from '../common/logs/logger-proxy';
|
|
4
|
+
import Metrics from '../metrics';
|
|
5
|
+
import BEHAVIORAL_METRICS from '../metrics/constants';
|
|
4
6
|
import {Enum, HTTP_VERBS} from '../constants';
|
|
5
|
-
import {DataSetNames, EMPTY_HASH} from './constants';
|
|
7
|
+
import {DataSetNames, DATA_SET_INIT_PRIORITY, EMPTY_HASH} from './constants';
|
|
6
8
|
import {ObjectType, HtMeta, HashTreeObject} from './types';
|
|
7
|
-
import {LocusDTO} from '../locus-info/types';
|
|
8
|
-
import {deleteNestedObjectsWithHtMeta, isMetadata} from './utils';
|
|
9
|
+
import {LocusDTO, LocusErrorCodes} from '../locus-info/types';
|
|
10
|
+
import {deleteNestedObjectsWithHtMeta, isMetadata, sortByInitPriority} from './utils';
|
|
9
11
|
|
|
10
12
|
export interface DataSet {
|
|
11
13
|
url: string;
|
|
@@ -54,13 +56,24 @@ type WebexRequestMethod = (options: Record<string, any>) => Promise<any>;
|
|
|
54
56
|
export const LocusInfoUpdateType = {
|
|
55
57
|
OBJECTS_UPDATED: 'OBJECTS_UPDATED',
|
|
56
58
|
MEETING_ENDED: 'MEETING_ENDED',
|
|
59
|
+
LOCUS_NOT_FOUND: 'LOCUS_NOT_FOUND',
|
|
57
60
|
} as const;
|
|
58
61
|
|
|
59
62
|
export type LocusInfoUpdateType = Enum<typeof LocusInfoUpdateType>;
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
63
|
+
|
|
64
|
+
interface LocusUpdatePayloads {
|
|
65
|
+
[LocusInfoUpdateType.OBJECTS_UPDATED]: {updatedObjects: HashTreeObject[]};
|
|
66
|
+
[LocusInfoUpdateType.MEETING_ENDED]: unknown; // No extra data
|
|
67
|
+
[LocusInfoUpdateType.LOCUS_NOT_FOUND]: unknown; // No extra data
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
export type LocusInfoUpdate = {
|
|
71
|
+
[K in keyof LocusUpdatePayloads]: {
|
|
72
|
+
updateType: K;
|
|
73
|
+
} & LocusUpdatePayloads[K];
|
|
74
|
+
}[keyof LocusUpdatePayloads];
|
|
75
|
+
|
|
76
|
+
export type LocusInfoUpdateCallback = (update: LocusInfoUpdate) => void;
|
|
64
77
|
|
|
65
78
|
interface LeafInfo {
|
|
66
79
|
type: ObjectType;
|
|
@@ -75,6 +88,13 @@ interface LeafInfo {
|
|
|
75
88
|
*/
|
|
76
89
|
export class MeetingEndedError extends Error {}
|
|
77
90
|
|
|
91
|
+
/**
|
|
92
|
+
* This error is thrown when a 404 is received from Locus hash tree endpoints, indicating that the locus URL
|
|
93
|
+
* is no longer valid (e.g. participant moved to a breakout room, or meeting ended).
|
|
94
|
+
* It's handled internally by HashTreeParser and results in LOCUS_NOT_FOUND being sent up.
|
|
95
|
+
*/
|
|
96
|
+
export class LocusNotFoundError extends Error {}
|
|
97
|
+
|
|
78
98
|
/* Currently Locus always sends Metadata objects only in the "self" dataset.
|
|
79
99
|
* If this ever changes, update all the code that relies on this constant.
|
|
80
100
|
*/
|
|
@@ -99,6 +119,10 @@ class HashTreeParser {
|
|
|
99
119
|
heartbeatIntervalMs?: number;
|
|
100
120
|
private excludedDataSets: string[];
|
|
101
121
|
state: 'active' | 'stopped';
|
|
122
|
+
private syncQueue: Array<{dataSetName: string; reason: string; isInitialization?: boolean}> = [];
|
|
123
|
+
private isSyncInProgress = false;
|
|
124
|
+
private isSyncAllInProgress = false;
|
|
125
|
+
private syncQueueProcessingPromise: Promise<void> = Promise.resolve();
|
|
102
126
|
|
|
103
127
|
/**
|
|
104
128
|
* Constructor for HashTreeParser
|
|
@@ -224,16 +248,16 @@ class HashTreeParser {
|
|
|
224
248
|
* @param {DataSet} dataSetInfo The new data set to be added
|
|
225
249
|
* @returns {Promise}
|
|
226
250
|
*/
|
|
227
|
-
private initializeNewVisibleDataSet(
|
|
251
|
+
private async initializeNewVisibleDataSet(
|
|
228
252
|
visibleDataSetInfo: VisibleDataSetInfo,
|
|
229
253
|
dataSetInfo: DataSet
|
|
230
|
-
): Promise<
|
|
254
|
+
): Promise<void> {
|
|
231
255
|
if (this.isVisibleDataSet(dataSetInfo.name)) {
|
|
232
256
|
LoggerProxy.logger.info(
|
|
233
257
|
`HashTreeParser#initializeNewVisibleDataSet --> ${this.debugId} Data set "${dataSetInfo.name}" already exists, skipping init`
|
|
234
258
|
);
|
|
235
259
|
|
|
236
|
-
return
|
|
260
|
+
return;
|
|
237
261
|
}
|
|
238
262
|
|
|
239
263
|
LoggerProxy.logger.info(
|
|
@@ -241,7 +265,7 @@ class HashTreeParser {
|
|
|
241
265
|
);
|
|
242
266
|
|
|
243
267
|
if (!this.addToVisibleDataSetsList(visibleDataSetInfo)) {
|
|
244
|
-
return
|
|
268
|
+
return;
|
|
245
269
|
}
|
|
246
270
|
|
|
247
271
|
const hashTree = new HashTree([], dataSetInfo.leafCount);
|
|
@@ -251,51 +275,8 @@ class HashTreeParser {
|
|
|
251
275
|
hashTree,
|
|
252
276
|
};
|
|
253
277
|
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
/**
|
|
258
|
-
* Sends a special sync request to Locus with all leaves empty - this is a way to get all the data for a given dataset.
|
|
259
|
-
*
|
|
260
|
-
* @param {string} datasetName - name of the dataset for which to send the request
|
|
261
|
-
* @param {string} debugText - text to include in logs
|
|
262
|
-
* @returns {Promise}
|
|
263
|
-
*/
|
|
264
|
-
private sendInitializationSyncRequestToLocus(
|
|
265
|
-
datasetName: string,
|
|
266
|
-
debugText: string
|
|
267
|
-
): Promise<{updateType: LocusInfoUpdateType; updatedObjects?: HashTreeObject[]}> {
|
|
268
|
-
const dataset = this.dataSets[datasetName];
|
|
269
|
-
|
|
270
|
-
if (!dataset) {
|
|
271
|
-
LoggerProxy.logger.warn(
|
|
272
|
-
`HashTreeParser#sendInitializationSyncRequestToLocus --> ${this.debugId} No data set found for ${datasetName}, cannot send the request for leaf data`
|
|
273
|
-
);
|
|
274
|
-
|
|
275
|
-
return Promise.resolve(null);
|
|
276
|
-
}
|
|
277
|
-
|
|
278
|
-
const emptyLeavesData = new Array(dataset.leafCount).fill([]);
|
|
279
|
-
|
|
280
|
-
LoggerProxy.logger.info(
|
|
281
|
-
`HashTreeParser#sendInitializationSyncRequestToLocus --> ${this.debugId} Sending initial sync request to Locus for data set "${datasetName}" with empty leaf data`
|
|
282
|
-
);
|
|
283
|
-
|
|
284
|
-
return this.sendSyncRequestToLocus(this.dataSets[datasetName], emptyLeavesData).then(
|
|
285
|
-
(syncResponse) => {
|
|
286
|
-
if (syncResponse) {
|
|
287
|
-
return {
|
|
288
|
-
updateType: LocusInfoUpdateType.OBJECTS_UPDATED,
|
|
289
|
-
updatedObjects: this.parseMessage(
|
|
290
|
-
syncResponse,
|
|
291
|
-
`via empty leaves /sync API call for ${debugText}`
|
|
292
|
-
),
|
|
293
|
-
};
|
|
294
|
-
}
|
|
295
|
-
|
|
296
|
-
return {updateType: LocusInfoUpdateType.OBJECTS_UPDATED, updatedObjects: []};
|
|
297
|
-
}
|
|
298
|
-
);
|
|
278
|
+
this.enqueueSyncForDataset(dataSetInfo.name, 'new visible data set initialization', true);
|
|
279
|
+
await this.syncQueueProcessingPromise;
|
|
299
280
|
}
|
|
300
281
|
|
|
301
282
|
/**
|
|
@@ -382,9 +363,8 @@ class HashTreeParser {
|
|
|
382
363
|
if (this.state === 'stopped') {
|
|
383
364
|
return;
|
|
384
365
|
}
|
|
385
|
-
const updatedObjects: HashTreeObject[] = [];
|
|
386
366
|
|
|
387
|
-
for (const dataSet of visibleDataSets) {
|
|
367
|
+
for (const dataSet of sortByInitPriority(visibleDataSets, DATA_SET_INIT_PRIORITY)) {
|
|
388
368
|
const {name, leafCount, url} = dataSet;
|
|
389
369
|
|
|
390
370
|
if (!this.dataSets[name]) {
|
|
@@ -420,19 +400,12 @@ class HashTreeParser {
|
|
|
420
400
|
);
|
|
421
401
|
this.dataSets[name].hashTree = new HashTree([], leafCount);
|
|
422
402
|
|
|
423
|
-
|
|
424
|
-
const data = await this.sendInitializationSyncRequestToLocus(name, debugText);
|
|
425
|
-
|
|
426
|
-
if (data.updateType === LocusInfoUpdateType.OBJECTS_UPDATED) {
|
|
427
|
-
updatedObjects.push(...(data.updatedObjects || []));
|
|
428
|
-
}
|
|
403
|
+
this.enqueueSyncForDataset(name, `initialization sync for ${debugText}`, true);
|
|
429
404
|
}
|
|
430
405
|
}
|
|
431
406
|
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
updatedObjects,
|
|
435
|
-
});
|
|
407
|
+
// wait for all enqueued initialization syncs to complete
|
|
408
|
+
await this.syncQueueProcessingPromise;
|
|
436
409
|
}
|
|
437
410
|
|
|
438
411
|
/**
|
|
@@ -573,25 +546,38 @@ class HashTreeParser {
|
|
|
573
546
|
private handleRootHashHeartBeatMessage(message: RootHashMessage): void {
|
|
574
547
|
const {dataSets} = message;
|
|
575
548
|
|
|
576
|
-
LoggerProxy.logger.info(
|
|
577
|
-
`HashTreeParser#handleRootHashMessage --> ${
|
|
578
|
-
this.debugId
|
|
579
|
-
} Received heartbeat root hash message with data sets: ${JSON.stringify(
|
|
580
|
-
dataSets.map(({name, root, leafCount, version}) => ({
|
|
581
|
-
name,
|
|
582
|
-
root,
|
|
583
|
-
leafCount,
|
|
584
|
-
version,
|
|
585
|
-
}))
|
|
586
|
-
)}`
|
|
587
|
-
);
|
|
588
|
-
|
|
589
549
|
dataSets.forEach((dataSet) => {
|
|
590
550
|
this.updateDataSetInfo(dataSet);
|
|
591
551
|
this.runSyncAlgorithm(dataSet);
|
|
592
552
|
});
|
|
593
553
|
}
|
|
594
554
|
|
|
555
|
+
/**
|
|
556
|
+
* Handles known errors that can happen during syncs
|
|
557
|
+
*
|
|
558
|
+
* @param {any} error - The error to handle
|
|
559
|
+
* @returns {boolean} true if the error was recognized and handled, false otherwise
|
|
560
|
+
*/
|
|
561
|
+
private handleSyncErrors(error: any) {
|
|
562
|
+
if (error instanceof MeetingEndedError) {
|
|
563
|
+
this.callLocusInfoUpdateCallback({
|
|
564
|
+
updateType: LocusInfoUpdateType.MEETING_ENDED,
|
|
565
|
+
});
|
|
566
|
+
|
|
567
|
+
return true;
|
|
568
|
+
}
|
|
569
|
+
if (error instanceof LocusNotFoundError) {
|
|
570
|
+
this.callLocusInfoUpdateCallback({
|
|
571
|
+
updateType: LocusInfoUpdateType.LOCUS_NOT_FOUND,
|
|
572
|
+
});
|
|
573
|
+
this.stop();
|
|
574
|
+
|
|
575
|
+
return true;
|
|
576
|
+
}
|
|
577
|
+
|
|
578
|
+
return false;
|
|
579
|
+
}
|
|
580
|
+
|
|
595
581
|
/**
|
|
596
582
|
* Asynchronously initializes new visible data sets
|
|
597
583
|
*
|
|
@@ -608,11 +594,7 @@ class HashTreeParser {
|
|
|
608
594
|
);
|
|
609
595
|
queueMicrotask(() => {
|
|
610
596
|
this.initializeNewVisibleDataSets(dataSetsRequiringInitialization).catch((error) => {
|
|
611
|
-
if (error
|
|
612
|
-
this.callLocusInfoUpdateCallback({
|
|
613
|
-
updateType: LocusInfoUpdateType.MEETING_ENDED,
|
|
614
|
-
});
|
|
615
|
-
} else {
|
|
597
|
+
if (!this.handleSyncErrors(error)) {
|
|
616
598
|
LoggerProxy.logger.warn(
|
|
617
599
|
`HashTreeParser#queueInitForNewVisibleDataSets --> ${
|
|
618
600
|
this.debugId
|
|
@@ -690,7 +672,14 @@ class HashTreeParser {
|
|
|
690
672
|
|
|
691
673
|
const {dataSets, locus, metadata} = update;
|
|
692
674
|
|
|
675
|
+
LoggerProxy.logger.info(
|
|
676
|
+
`HashTreeParser#handleLocusUpdate --> ${this.debugId} received update with dataSets=${dataSets
|
|
677
|
+
?.map((ds) => ds.name)
|
|
678
|
+
.join(',')} metadata=${metadata ? 'yes' : 'no'}`
|
|
679
|
+
);
|
|
680
|
+
|
|
693
681
|
if (!dataSets) {
|
|
682
|
+
// this happens for example when we handle GET /loci response
|
|
694
683
|
LoggerProxy.logger.info(
|
|
695
684
|
`HashTreeParser#handleLocusUpdate --> ${this.debugId} received hash tree update without dataSets`
|
|
696
685
|
);
|
|
@@ -794,6 +783,18 @@ class HashTreeParser {
|
|
|
794
783
|
}
|
|
795
784
|
}
|
|
796
785
|
|
|
786
|
+
/**
|
|
787
|
+
* Updates the leaf count for a data set, resizing its hash tree accordingly.
|
|
788
|
+
*
|
|
789
|
+
* @param {InternalDataSet} dataSet - The data set to update
|
|
790
|
+
* @param {number} newLeafCount - The new leaf count
|
|
791
|
+
* @returns {void}
|
|
792
|
+
*/
|
|
793
|
+
private updateDataSetLeafCount(dataSet: InternalDataSet, newLeafCount: number): void {
|
|
794
|
+
dataSet.hashTree?.resize(newLeafCount);
|
|
795
|
+
dataSet.leafCount = newLeafCount;
|
|
796
|
+
}
|
|
797
|
+
|
|
797
798
|
/**
|
|
798
799
|
* Checks for changes in the visible data sets based on the updated objects.
|
|
799
800
|
* @param {HashTreeObject[]} updatedObjects - The list of updated hash tree objects.
|
|
@@ -960,7 +961,7 @@ class HashTreeParser {
|
|
|
960
961
|
}
|
|
961
962
|
const allDataSets = await this.getAllVisibleDataSetsFromLocus();
|
|
962
963
|
|
|
963
|
-
for (const ds of addedDataSets) {
|
|
964
|
+
for (const ds of sortByInitPriority(addedDataSets, DATA_SET_INIT_PRIORITY)) {
|
|
964
965
|
const dataSetInfo = allDataSets.find((d) => d.name === ds.name);
|
|
965
966
|
|
|
966
967
|
LoggerProxy.logger.info(
|
|
@@ -972,12 +973,8 @@ class HashTreeParser {
|
|
|
972
973
|
`HashTreeParser#initializeNewVisibleDataSets --> ${this.debugId} missing info about data set "${ds.name}" in Locus response from visibleDataSetsUrl`
|
|
973
974
|
);
|
|
974
975
|
} else {
|
|
975
|
-
// we're awaiting in a loop, because in practice there will be only one new data set at a time,
|
|
976
|
-
// so no point in trying to parallelize this
|
|
977
976
|
// eslint-disable-next-line no-await-in-loop
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
this.callLocusInfoUpdateCallback(updates);
|
|
977
|
+
await this.initializeNewVisibleDataSet(ds, dataSetInfo);
|
|
981
978
|
}
|
|
982
979
|
}
|
|
983
980
|
}
|
|
@@ -997,14 +994,27 @@ class HashTreeParser {
|
|
|
997
994
|
const {dataSets, visibleDataSetsUrl} = message;
|
|
998
995
|
|
|
999
996
|
LoggerProxy.logger.info(
|
|
1000
|
-
`HashTreeParser#parseMessage --> ${this.debugId}
|
|
1001
|
-
|
|
997
|
+
`HashTreeParser#parseMessage --> ${this.debugId} ${
|
|
998
|
+
debugText || ''
|
|
999
|
+
} dataSets: ${message.dataSets
|
|
1000
|
+
?.map(({name, version}) => `${name}:${version}`)
|
|
1001
|
+
.join(',')}, elements: ${message.locusStateElements
|
|
1002
|
+
?.map(
|
|
1003
|
+
(el) =>
|
|
1004
|
+
`${el.htMeta.elementId.type}:${el.htMeta.elementId.id}:${el.htMeta.elementId.version}${
|
|
1005
|
+
el.data ? '+' : '-'
|
|
1006
|
+
}`
|
|
1007
|
+
)
|
|
1008
|
+
.join(',')}`
|
|
1002
1009
|
);
|
|
1010
|
+
|
|
1003
1011
|
if (message.locusStateElements?.length === 0) {
|
|
1004
1012
|
LoggerProxy.logger.warn(
|
|
1005
1013
|
`HashTreeParser#parseMessage --> ${this.debugId} got empty locusStateElements!!!`
|
|
1006
1014
|
);
|
|
1007
|
-
|
|
1015
|
+
Metrics.sendBehavioralMetric(BEHAVIORAL_METRICS.HASH_TREE_EMPTY_LOCUS_STATE_ELEMENTS, {
|
|
1016
|
+
debugId: this.debugId,
|
|
1017
|
+
});
|
|
1008
1018
|
}
|
|
1009
1019
|
|
|
1010
1020
|
// first, update our metadata about the datasets with info from the message
|
|
@@ -1015,7 +1025,7 @@ class HashTreeParser {
|
|
|
1015
1025
|
|
|
1016
1026
|
// when we detect new visible datasets, it may be that the metadata about them is not
|
|
1017
1027
|
// available in the message, they will require separate async initialization
|
|
1018
|
-
let dataSetsRequiringInitialization = [];
|
|
1028
|
+
let dataSetsRequiringInitialization: VisibleDataSetInfo[] = [];
|
|
1019
1029
|
|
|
1020
1030
|
// first find out if there are any visible data set changes - they're signalled in Metadata object updates
|
|
1021
1031
|
const metadataUpdates = (message.locusStateElements || []).filter((object) =>
|
|
@@ -1023,7 +1033,7 @@ class HashTreeParser {
|
|
|
1023
1033
|
);
|
|
1024
1034
|
|
|
1025
1035
|
if (metadataUpdates.length > 0) {
|
|
1026
|
-
const updatedMetadataObjects = [];
|
|
1036
|
+
const updatedMetadataObjects: HashTreeObject[] = [];
|
|
1027
1037
|
|
|
1028
1038
|
metadataUpdates.forEach((object) => {
|
|
1029
1039
|
// todo: once Locus supports it, we will use the "view" field here instead of dataSetNames
|
|
@@ -1052,7 +1062,7 @@ class HashTreeParser {
|
|
|
1052
1062
|
}
|
|
1053
1063
|
}
|
|
1054
1064
|
|
|
1055
|
-
if (message.locusStateElements
|
|
1065
|
+
if (message.locusStateElements && message.locusStateElements.length > 0) {
|
|
1056
1066
|
// by this point we now have this.dataSets setup for data sets from this message
|
|
1057
1067
|
// and hash trees created for the new visible data sets,
|
|
1058
1068
|
// so we can now process all the updates from the message
|
|
@@ -1148,20 +1158,17 @@ class HashTreeParser {
|
|
|
1148
1158
|
* @param {Object} updates parsed from a Locus message
|
|
1149
1159
|
* @returns {void}
|
|
1150
1160
|
*/
|
|
1151
|
-
private callLocusInfoUpdateCallback(updates: {
|
|
1152
|
-
updateType: LocusInfoUpdateType;
|
|
1153
|
-
updatedObjects?: HashTreeObject[];
|
|
1154
|
-
}) {
|
|
1161
|
+
private callLocusInfoUpdateCallback(updates: LocusInfoUpdate) {
|
|
1155
1162
|
if (this.state === 'stopped') {
|
|
1156
1163
|
return;
|
|
1157
1164
|
}
|
|
1158
1165
|
|
|
1159
|
-
const {updateType
|
|
1166
|
+
const {updateType} = updates;
|
|
1160
1167
|
|
|
1161
|
-
if (updateType === LocusInfoUpdateType.OBJECTS_UPDATED && updatedObjects?.length > 0) {
|
|
1168
|
+
if (updateType === LocusInfoUpdateType.OBJECTS_UPDATED && updates.updatedObjects?.length > 0) {
|
|
1162
1169
|
// Filter out updates for objects that already have a higher version in their datasets,
|
|
1163
1170
|
// or removals for objects that still exist in any of their datasets
|
|
1164
|
-
const filteredUpdates = updatedObjects.filter((object) => {
|
|
1171
|
+
const filteredUpdates = updates.updatedObjects.filter((object) => {
|
|
1165
1172
|
const {elementId} = object.htMeta;
|
|
1166
1173
|
const {type, id, version} = elementId;
|
|
1167
1174
|
|
|
@@ -1198,10 +1205,10 @@ class HashTreeParser {
|
|
|
1198
1205
|
});
|
|
1199
1206
|
|
|
1200
1207
|
if (filteredUpdates.length > 0) {
|
|
1201
|
-
this.locusInfoUpdateCallback(updateType,
|
|
1208
|
+
this.locusInfoUpdateCallback({updateType, updatedObjects: filteredUpdates});
|
|
1202
1209
|
}
|
|
1203
1210
|
} else if (updateType !== LocusInfoUpdateType.OBJECTS_UPDATED) {
|
|
1204
|
-
this.locusInfoUpdateCallback(updateType
|
|
1211
|
+
this.locusInfoUpdateCallback({updateType});
|
|
1205
1212
|
}
|
|
1206
1213
|
}
|
|
1207
1214
|
|
|
@@ -1223,76 +1230,89 @@ class HashTreeParser {
|
|
|
1223
1230
|
* Performs a sync for the given data set.
|
|
1224
1231
|
*
|
|
1225
1232
|
* @param {InternalDataSet} dataSet - The data set to sync
|
|
1226
|
-
* @param {string} rootHash - Our current root hash for this data set
|
|
1227
1233
|
* @param {string} reason - The reason for the sync (used for logging)
|
|
1234
|
+
* @param {boolean} [isInitialization] - Whether this is an initialization sync (sends empty leaves data instead of comparing hashes)
|
|
1228
1235
|
* @returns {Promise<void>}
|
|
1229
1236
|
*/
|
|
1230
1237
|
private async performSync(
|
|
1231
1238
|
dataSet: InternalDataSet,
|
|
1232
|
-
|
|
1233
|
-
|
|
1239
|
+
reason: string,
|
|
1240
|
+
isInitialization?: boolean
|
|
1234
1241
|
): Promise<void> {
|
|
1235
1242
|
if (!dataSet.hashTree) {
|
|
1236
1243
|
return;
|
|
1237
1244
|
}
|
|
1238
1245
|
|
|
1246
|
+
const {hashTree} = dataSet;
|
|
1247
|
+
const rootHash = hashTree.getRootHash();
|
|
1248
|
+
|
|
1239
1249
|
try {
|
|
1240
1250
|
LoggerProxy.logger.info(
|
|
1241
1251
|
`HashTreeParser#performSync --> ${this.debugId} ${reason}, syncing data set "${dataSet.name}"`
|
|
1242
1252
|
);
|
|
1243
1253
|
|
|
1244
|
-
|
|
1254
|
+
let leavesData: Record<number, LeafDataItem[]> = {};
|
|
1245
1255
|
|
|
1246
|
-
if (
|
|
1247
|
-
|
|
1256
|
+
if (!isInitialization) {
|
|
1257
|
+
if (dataSet.leafCount !== 1) {
|
|
1258
|
+
let receivedHashes;
|
|
1248
1259
|
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
|
|
1252
|
-
dataSet.name,
|
|
1253
|
-
rootHash
|
|
1254
|
-
);
|
|
1260
|
+
try {
|
|
1261
|
+
// request hashes from sender
|
|
1262
|
+
const hashesResult = await this.getHashesFromLocus(dataSet.name, rootHash);
|
|
1255
1263
|
|
|
1256
|
-
|
|
1264
|
+
if (!hashesResult) {
|
|
1265
|
+
// hashes match, no sync needed
|
|
1266
|
+
return;
|
|
1267
|
+
}
|
|
1257
1268
|
|
|
1258
|
-
|
|
1259
|
-
|
|
1260
|
-
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
|
-
|
|
1264
|
-
|
|
1269
|
+
receivedHashes = hashesResult.hashes;
|
|
1270
|
+
|
|
1271
|
+
this.updateDataSetLeafCount(dataSet, hashesResult.dataSet.leafCount);
|
|
1272
|
+
} catch (error: any) {
|
|
1273
|
+
if (error?.statusCode === 409) {
|
|
1274
|
+
// this is a leaf count mismatch, we should do nothing, just wait for another heartbeat message from Locus
|
|
1275
|
+
LoggerProxy.logger.info(
|
|
1276
|
+
`HashTreeParser#getHashesFromLocus --> ${this.debugId} Got 409 when fetching hashes for data set "${dataSet.name}": ${error.message}`
|
|
1277
|
+
);
|
|
1265
1278
|
|
|
1266
|
-
|
|
1279
|
+
return;
|
|
1280
|
+
}
|
|
1281
|
+
throw error;
|
|
1267
1282
|
}
|
|
1268
|
-
throw error;
|
|
1269
|
-
}
|
|
1270
1283
|
|
|
1271
|
-
|
|
1272
|
-
|
|
1284
|
+
// identify mismatched leaves
|
|
1285
|
+
const mismatchedLeaveIndexes = hashTree.diffHashes(receivedHashes);
|
|
1273
1286
|
|
|
1274
|
-
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
|
|
1278
|
-
|
|
1287
|
+
mismatchedLeaveIndexes.forEach((index) => {
|
|
1288
|
+
leavesData[index] = hashTree.getLeafData(index);
|
|
1289
|
+
});
|
|
1290
|
+
} else {
|
|
1291
|
+
leavesData = {0: hashTree.getLeafData(0)};
|
|
1292
|
+
}
|
|
1279
1293
|
}
|
|
1280
1294
|
// request sync for mismatched leaves
|
|
1281
|
-
|
|
1282
|
-
const syncResponse = await this.sendSyncRequestToLocus(dataSet, mismatchedLeavesData);
|
|
1295
|
+
let syncResponse: HashTreeMessage | null = null;
|
|
1283
1296
|
|
|
1284
|
-
|
|
1285
|
-
|
|
1286
|
-
|
|
1287
|
-
|
|
1288
|
-
|
|
1297
|
+
if (isInitialization) {
|
|
1298
|
+
syncResponse = await this.sendSyncRequestToLocus(dataSet, {isInitialization: true});
|
|
1299
|
+
} else if (Object.keys(leavesData).length > 0) {
|
|
1300
|
+
syncResponse = await this.sendSyncRequestToLocus(dataSet, {
|
|
1301
|
+
mismatchedLeavesData: leavesData,
|
|
1302
|
+
});
|
|
1303
|
+
}
|
|
1304
|
+
|
|
1305
|
+
// sync API may return nothing (in that case data will arrive via messages)
|
|
1306
|
+
// or it may return a response in the same format as messages
|
|
1307
|
+
// We still need to restart the sync timer as a safety net in case the messages don't arrive.
|
|
1308
|
+
this.runSyncAlgorithm(dataSet);
|
|
1309
|
+
|
|
1310
|
+
if (syncResponse) {
|
|
1311
|
+
// the format of sync response is the same as messages, so we can reuse the same handler
|
|
1312
|
+
this.handleMessage(syncResponse, 'via sync API');
|
|
1289
1313
|
}
|
|
1290
1314
|
} catch (error) {
|
|
1291
|
-
if (error
|
|
1292
|
-
this.callLocusInfoUpdateCallback({
|
|
1293
|
-
updateType: LocusInfoUpdateType.MEETING_ENDED,
|
|
1294
|
-
});
|
|
1295
|
-
} else {
|
|
1315
|
+
if (!this.handleSyncErrors(error)) {
|
|
1296
1316
|
LoggerProxy.logger.warn(
|
|
1297
1317
|
`HashTreeParser#performSync --> ${this.debugId} error during sync for data set "${dataSet.name}":`,
|
|
1298
1318
|
error
|
|
@@ -1301,6 +1321,105 @@ class HashTreeParser {
|
|
|
1301
1321
|
}
|
|
1302
1322
|
}
|
|
1303
1323
|
|
|
1324
|
+
/**
|
|
1325
|
+
* Enqueues a sync for the given data set. If the data set is already in the queue, the request is ignored.
|
|
1326
|
+
* This ensures that all syncs are executed sequentially and no more than 1 sync runs at a time.
|
|
1327
|
+
*
|
|
1328
|
+
* @param {string} dataSetName - The name of the data set to sync
|
|
1329
|
+
* @param {string} reason - The reason for the sync (used for logging)
|
|
1330
|
+
* @param {boolean} [isInitialization=false] - Whether this is an initialization sync (uses empty leaves data instead of hash comparison)
|
|
1331
|
+
* @returns {void}
|
|
1332
|
+
*/
|
|
1333
|
+
private enqueueSyncForDataset(
|
|
1334
|
+
dataSetName: string,
|
|
1335
|
+
reason: string,
|
|
1336
|
+
isInitialization = false
|
|
1337
|
+
): void {
|
|
1338
|
+
if (this.state === 'stopped') return;
|
|
1339
|
+
|
|
1340
|
+
const existingEntry = this.syncQueue.find((entry) => entry.dataSetName === dataSetName);
|
|
1341
|
+
|
|
1342
|
+
if (existingEntry) {
|
|
1343
|
+
if (isInitialization) {
|
|
1344
|
+
existingEntry.isInitialization = true;
|
|
1345
|
+
}
|
|
1346
|
+
LoggerProxy.logger.info(
|
|
1347
|
+
`HashTreeParser#enqueueSyncForDataset --> ${this.debugId} data set "${dataSetName}" already in sync queue, skipping`
|
|
1348
|
+
);
|
|
1349
|
+
|
|
1350
|
+
return;
|
|
1351
|
+
}
|
|
1352
|
+
|
|
1353
|
+
this.syncQueue.push({dataSetName, reason, isInitialization});
|
|
1354
|
+
|
|
1355
|
+
if (!this.isSyncInProgress) {
|
|
1356
|
+
this.syncQueueProcessingPromise = this.processSyncQueue();
|
|
1357
|
+
}
|
|
1358
|
+
}
|
|
1359
|
+
|
|
1360
|
+
/**
|
|
1361
|
+
* Processes the sync queue sequentially. Only one instance of this method runs at a time.
|
|
1362
|
+
*
|
|
1363
|
+
* @returns {Promise<void>}
|
|
1364
|
+
*/
|
|
1365
|
+
private async processSyncQueue(): Promise<void> {
|
|
1366
|
+
if (this.isSyncInProgress) return;
|
|
1367
|
+
|
|
1368
|
+
this.isSyncInProgress = true;
|
|
1369
|
+
try {
|
|
1370
|
+
while (this.syncQueue.length > 0 && this.state !== 'stopped') {
|
|
1371
|
+
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|
1372
|
+
const {dataSetName, reason, isInitialization} = this.syncQueue.shift()!;
|
|
1373
|
+
const dataSet = this.dataSets[dataSetName];
|
|
1374
|
+
|
|
1375
|
+
if (!dataSet?.hashTree) {
|
|
1376
|
+
// eslint-disable-next-line no-continue
|
|
1377
|
+
continue;
|
|
1378
|
+
}
|
|
1379
|
+
|
|
1380
|
+
// eslint-disable-next-line no-await-in-loop
|
|
1381
|
+
await this.performSync(dataSet, reason, isInitialization);
|
|
1382
|
+
}
|
|
1383
|
+
} finally {
|
|
1384
|
+
this.isSyncInProgress = false;
|
|
1385
|
+
}
|
|
1386
|
+
}
|
|
1387
|
+
|
|
1388
|
+
/**
|
|
1389
|
+
* Syncs all data sets that have hash trees, one by one in sequence, using the priority order
|
|
1390
|
+
* provided by sortByInitPriority(). Does nothing if the parser is stopped or if a syncAllDatasets
|
|
1391
|
+
* call is already in progress.
|
|
1392
|
+
*
|
|
1393
|
+
* @returns {Promise<void>}
|
|
1394
|
+
*/
|
|
1395
|
+
public async syncAllDatasets(): Promise<void> {
|
|
1396
|
+
if (this.state === 'stopped') return;
|
|
1397
|
+
if (this.isSyncAllInProgress) return;
|
|
1398
|
+
|
|
1399
|
+
this.isSyncAllInProgress = true;
|
|
1400
|
+
try {
|
|
1401
|
+
const dataSetsWithHashTrees = Object.values(this.dataSets)
|
|
1402
|
+
.filter((dataSet) => dataSet?.hashTree)
|
|
1403
|
+
.map((dataSet) => ({name: dataSet.name}));
|
|
1404
|
+
|
|
1405
|
+
const sorted = sortByInitPriority(dataSetsWithHashTrees, DATA_SET_INIT_PRIORITY);
|
|
1406
|
+
|
|
1407
|
+
LoggerProxy.logger.info(
|
|
1408
|
+
`HashTreeParser#syncAllDatasets --> ${this.debugId} syncing datasets: ${sorted
|
|
1409
|
+
.map((ds) => ds.name)
|
|
1410
|
+
.join(', ')}`
|
|
1411
|
+
);
|
|
1412
|
+
|
|
1413
|
+
for (const ds of sorted) {
|
|
1414
|
+
this.enqueueSyncForDataset(ds.name, 'syncAllDatasets');
|
|
1415
|
+
}
|
|
1416
|
+
|
|
1417
|
+
await this.syncQueueProcessingPromise;
|
|
1418
|
+
} finally {
|
|
1419
|
+
this.isSyncAllInProgress = false;
|
|
1420
|
+
}
|
|
1421
|
+
}
|
|
1422
|
+
|
|
1304
1423
|
/**
|
|
1305
1424
|
* Runs the sync algorithm for the given data set.
|
|
1306
1425
|
*
|
|
@@ -1319,21 +1438,14 @@ class HashTreeParser {
|
|
|
1319
1438
|
}
|
|
1320
1439
|
|
|
1321
1440
|
if (!dataSet.hashTree) {
|
|
1322
|
-
|
|
1323
|
-
|
|
1324
|
-
);
|
|
1441
|
+
// no hash tree, so no need to do any syncing
|
|
1442
|
+
// we fall into this branch often, because Locus sends dataSets in messages that are not visible to us
|
|
1325
1443
|
|
|
1326
1444
|
return;
|
|
1327
1445
|
}
|
|
1328
1446
|
|
|
1329
1447
|
dataSet.hashTree.resize(receivedDataSet.leafCount);
|
|
1330
1448
|
|
|
1331
|
-
// temporary log for the workshop // todo: remove
|
|
1332
|
-
const ourCurrentRootHash = dataSet.hashTree.getRootHash();
|
|
1333
|
-
LoggerProxy.logger.info(
|
|
1334
|
-
`HashTreeParser#runSyncAlgorithm --> ${this.debugId} dataSet="${dataSet.name}" version=${dataSet.version} hashes before starting timer: ours=${ourCurrentRootHash} Locus=${dataSet.root}`
|
|
1335
|
-
);
|
|
1336
|
-
|
|
1337
1449
|
const delay = dataSet.idleMs + this.getWeightedBackoffTime(dataSet.backoff);
|
|
1338
1450
|
|
|
1339
1451
|
if (delay > 0) {
|
|
@@ -1341,11 +1453,7 @@ class HashTreeParser {
|
|
|
1341
1453
|
clearTimeout(dataSet.timer);
|
|
1342
1454
|
}
|
|
1343
1455
|
|
|
1344
|
-
|
|
1345
|
-
`HashTreeParser#runSyncAlgorithm --> ${this.debugId} setting "${dataSet.name}" sync timer for ${delay}`
|
|
1346
|
-
);
|
|
1347
|
-
|
|
1348
|
-
dataSet.timer = setTimeout(async () => {
|
|
1456
|
+
dataSet.timer = setTimeout(() => {
|
|
1349
1457
|
dataSet.timer = undefined;
|
|
1350
1458
|
|
|
1351
1459
|
if (!dataSet.hashTree) {
|
|
@@ -1359,15 +1467,10 @@ class HashTreeParser {
|
|
|
1359
1467
|
const rootHash = dataSet.hashTree.getRootHash();
|
|
1360
1468
|
|
|
1361
1469
|
if (dataSet.root !== rootHash) {
|
|
1362
|
-
|
|
1363
|
-
dataSet,
|
|
1364
|
-
rootHash,
|
|
1470
|
+
this.enqueueSyncForDataset(
|
|
1471
|
+
dataSet.name,
|
|
1365
1472
|
`Root hash mismatch: received=${dataSet.root}, ours=${rootHash}`
|
|
1366
1473
|
);
|
|
1367
|
-
} else {
|
|
1368
|
-
LoggerProxy.logger.info(
|
|
1369
|
-
`HashTreeParser#runSyncAlgorithm --> ${this.debugId} "${dataSet.name}" root hash matching: ${rootHash}, version=${dataSet.version}`
|
|
1370
|
-
);
|
|
1371
1474
|
}
|
|
1372
1475
|
}, delay);
|
|
1373
1476
|
} else {
|
|
@@ -1407,18 +1510,20 @@ class HashTreeParser {
|
|
|
1407
1510
|
const backoffTime = this.getWeightedBackoffTime(dataSet.backoff);
|
|
1408
1511
|
const delay = this.heartbeatIntervalMs + backoffTime;
|
|
1409
1512
|
|
|
1410
|
-
dataSet.heartbeatWatchdogTimer = setTimeout(
|
|
1513
|
+
dataSet.heartbeatWatchdogTimer = setTimeout(() => {
|
|
1411
1514
|
dataSet.heartbeatWatchdogTimer = undefined;
|
|
1412
1515
|
|
|
1413
1516
|
LoggerProxy.logger.warn(
|
|
1414
1517
|
`HashTreeParser#resetHeartbeatWatchdogs --> ${this.debugId} Heartbeat watchdog fired for data set "${dataSet.name}" - no heartbeat received within expected interval, initiating sync`
|
|
1415
1518
|
);
|
|
1416
1519
|
|
|
1417
|
-
|
|
1418
|
-
|
|
1419
|
-
dataSet.
|
|
1420
|
-
|
|
1421
|
-
|
|
1520
|
+
Metrics.sendBehavioralMetric(BEHAVIORAL_METRICS.HASH_TREE_HEARTBEAT_WATCHDOG_EXPIRED, {
|
|
1521
|
+
debugId: this.debugId,
|
|
1522
|
+
dataSetName: dataSet.name,
|
|
1523
|
+
});
|
|
1524
|
+
|
|
1525
|
+
this.enqueueSyncForDataset(dataSet.name, `heartbeat watchdog expired`);
|
|
1526
|
+
this.resetHeartbeatWatchdogs([dataSet]);
|
|
1422
1527
|
}, delay);
|
|
1423
1528
|
}
|
|
1424
1529
|
}
|
|
@@ -1451,6 +1556,7 @@ class HashTreeParser {
|
|
|
1451
1556
|
`HashTreeParser#stop --> ${this.debugId} Stopping HashTreeParser, clearing timers and hash trees`
|
|
1452
1557
|
);
|
|
1453
1558
|
this.stopAllTimers();
|
|
1559
|
+
this.syncQueue = [];
|
|
1454
1560
|
Object.values(this.dataSets).forEach((dataSet) => {
|
|
1455
1561
|
dataSet.hashTree = undefined;
|
|
1456
1562
|
});
|
|
@@ -1459,17 +1565,27 @@ class HashTreeParser {
|
|
|
1459
1565
|
}
|
|
1460
1566
|
|
|
1461
1567
|
/**
|
|
1462
|
-
*
|
|
1568
|
+
* Cleans up the HashTreeParser, stopping all timers and clearing all internal state.
|
|
1569
|
+
* After calling this, the parser should not be used anymore.
|
|
1570
|
+
* @returns {void}
|
|
1571
|
+
*/
|
|
1572
|
+
public cleanUp() {
|
|
1573
|
+
this.stop();
|
|
1574
|
+
this.dataSets = {};
|
|
1575
|
+
}
|
|
1576
|
+
|
|
1577
|
+
/**
|
|
1578
|
+
* Resumes the HashTreeParser that was previously stopped, using a hash tree message.
|
|
1463
1579
|
* @param {HashTreeMessage} message - The message to resume with, it must contain metadata with visible data sets info
|
|
1464
1580
|
* @returns {void}
|
|
1465
1581
|
*/
|
|
1466
|
-
public
|
|
1582
|
+
public resumeFromMessage(message: HashTreeMessage) {
|
|
1467
1583
|
// check that message contains metadata with visible data sets - this is essential to be able to resume
|
|
1468
1584
|
const metadataObject = message.locusStateElements?.find((el) => isMetadata(el));
|
|
1469
1585
|
|
|
1470
1586
|
if (!metadataObject?.data?.visibleDataSets) {
|
|
1471
1587
|
LoggerProxy.logger.warn(
|
|
1472
|
-
`HashTreeParser#
|
|
1588
|
+
`HashTreeParser#resumeFromMessage --> ${this.debugId} Cannot resume HashTreeParser because the message is missing metadata with visible data sets info`
|
|
1473
1589
|
);
|
|
1474
1590
|
|
|
1475
1591
|
return;
|
|
@@ -1490,7 +1606,7 @@ class HashTreeParser {
|
|
|
1490
1606
|
};
|
|
1491
1607
|
}
|
|
1492
1608
|
LoggerProxy.logger.info(
|
|
1493
|
-
`HashTreeParser#
|
|
1609
|
+
`HashTreeParser#resumeFromMessage --> ${
|
|
1494
1610
|
this.debugId
|
|
1495
1611
|
} Resuming HashTreeParser with data sets: ${Object.keys(this.dataSets).join(
|
|
1496
1612
|
', '
|
|
@@ -1501,18 +1617,47 @@ class HashTreeParser {
|
|
|
1501
1617
|
this.handleMessage(message, 'on resume');
|
|
1502
1618
|
}
|
|
1503
1619
|
|
|
1620
|
+
/**
|
|
1621
|
+
* Resumes the HashTreeParser that was previously stopped, using a Locus API response.
|
|
1622
|
+
* Unlike resumeFromMessage(), this does not require metadata/dataSets in the input,
|
|
1623
|
+
* as it fetches all necessary information from Locus via initializeFromGetLociResponse.
|
|
1624
|
+
* @param {LocusDTO} locus - locus object from an API response
|
|
1625
|
+
* @returns {Promise}
|
|
1626
|
+
*/
|
|
1627
|
+
public async resumeFromApiResponse(locus: LocusDTO) {
|
|
1628
|
+
this.state = 'active';
|
|
1629
|
+
this.dataSets = {};
|
|
1630
|
+
|
|
1631
|
+
LoggerProxy.logger.info(
|
|
1632
|
+
`HashTreeParser#resumeFromApiResponse --> ${this.debugId} Resuming HashTreeParser from API response`
|
|
1633
|
+
);
|
|
1634
|
+
|
|
1635
|
+
await this.initializeFromGetLociResponse(locus);
|
|
1636
|
+
}
|
|
1637
|
+
|
|
1504
1638
|
private checkForSentinelHttpResponse(error: any, dataSetName?: string) {
|
|
1639
|
+
// 404 for any dataset means the locus is no longer available at this URL - could be replaced or ended
|
|
1640
|
+
// if a dataset is just not visible, we would get a 400
|
|
1641
|
+
if (error.statusCode === 404) {
|
|
1642
|
+
LoggerProxy.logger.info(
|
|
1643
|
+
`HashTreeParser#checkForSentinelHttpResponse --> ${this.debugId} Received 404 for data set "${dataSetName}", locus not found`
|
|
1644
|
+
);
|
|
1645
|
+
this.stopAllTimers();
|
|
1646
|
+
|
|
1647
|
+
throw new LocusNotFoundError();
|
|
1648
|
+
}
|
|
1649
|
+
|
|
1505
1650
|
const isValidDataSetForSentinel =
|
|
1506
1651
|
dataSetName === undefined ||
|
|
1507
1652
|
PossibleSentinelMessageDataSetNames.includes(dataSetName.toLowerCase());
|
|
1508
1653
|
|
|
1509
1654
|
if (
|
|
1510
|
-
|
|
1511
|
-
|
|
1655
|
+
error.statusCode === 409 &&
|
|
1656
|
+
error.body?.errorCode === LocusErrorCodes.LOCUS_INACTIVE &&
|
|
1512
1657
|
isValidDataSetForSentinel
|
|
1513
1658
|
) {
|
|
1514
1659
|
LoggerProxy.logger.info(
|
|
1515
|
-
`HashTreeParser#checkForSentinelHttpResponse --> ${this.debugId} Received ${error.statusCode} for data set "${dataSetName}", indicating that the meeting has ended`
|
|
1660
|
+
`HashTreeParser#checkForSentinelHttpResponse --> ${this.debugId} Received ${error.statusCode}/${error.body?.errorCode} for data set "${dataSetName}", indicating that the meeting has ended`
|
|
1516
1661
|
);
|
|
1517
1662
|
this.stopAllTimers();
|
|
1518
1663
|
|
|
@@ -1524,7 +1669,7 @@ class HashTreeParser {
|
|
|
1524
1669
|
* Gets the current hashes from the locus for a specific data set.
|
|
1525
1670
|
* @param {string} dataSetName
|
|
1526
1671
|
* @param {string} currentRootHash
|
|
1527
|
-
* @returns {
|
|
1672
|
+
* @returns {Object|null} An object containing the hashes and leaf count, or null if the hashes match and no sync is needed
|
|
1528
1673
|
*/
|
|
1529
1674
|
private getHashesFromLocus(dataSetName: string, currentRootHash: string) {
|
|
1530
1675
|
LoggerProxy.logger.info(
|
|
@@ -1543,6 +1688,15 @@ class HashTreeParser {
|
|
|
1543
1688
|
},
|
|
1544
1689
|
})
|
|
1545
1690
|
.then((response) => {
|
|
1691
|
+
if (!response.body || isEmpty(response.body)) {
|
|
1692
|
+
// 204 with empty body means our hashes match Locus, no sync needed
|
|
1693
|
+
LoggerProxy.logger.info(
|
|
1694
|
+
`HashTreeParser#getHashesFromLocus --> ${this.debugId} Got ${response.statusCode} with empty body for data set "${dataSetName}", hashes match - no sync needed`
|
|
1695
|
+
);
|
|
1696
|
+
|
|
1697
|
+
return null;
|
|
1698
|
+
}
|
|
1699
|
+
|
|
1546
1700
|
const hashes = response.body?.hashes as string[] | undefined;
|
|
1547
1701
|
const dataSetFromResponse = response.body?.dataSet;
|
|
1548
1702
|
|
|
@@ -1571,6 +1725,13 @@ class HashTreeParser {
|
|
|
1571
1725
|
error
|
|
1572
1726
|
);
|
|
1573
1727
|
this.checkForSentinelHttpResponse(error, dataSet.name);
|
|
1728
|
+
Metrics.sendBehavioralMetric(BEHAVIORAL_METRICS.HASH_TREE_SYNC_FAILURE, {
|
|
1729
|
+
debugId: this.debugId,
|
|
1730
|
+
dataSetName,
|
|
1731
|
+
request: 'GET /hashtree',
|
|
1732
|
+
statusCode: error.statusCode,
|
|
1733
|
+
reason: error.message,
|
|
1734
|
+
});
|
|
1574
1735
|
|
|
1575
1736
|
throw error;
|
|
1576
1737
|
});
|
|
@@ -1580,29 +1741,43 @@ class HashTreeParser {
|
|
|
1580
1741
|
* Sends a sync request to Locus for the specified data set.
|
|
1581
1742
|
*
|
|
1582
1743
|
* @param {InternalDataSet} dataSet The data set to sync.
|
|
1583
|
-
* @param {
|
|
1744
|
+
* @param {Object} options Either `{ isInitialization: true }` for init syncs (uses leafCount=1 with empty leaf data) or `{ mismatchedLeavesData }` for normal syncs.
|
|
1584
1745
|
* @returns {Promise<HashTreeMessage|null>}
|
|
1585
1746
|
*/
|
|
1586
1747
|
private sendSyncRequestToLocus(
|
|
1587
1748
|
dataSet: InternalDataSet,
|
|
1588
|
-
mismatchedLeavesData: Record<number, LeafDataItem[]>
|
|
1749
|
+
options: {isInitialization: true} | {mismatchedLeavesData: Record<number, LeafDataItem[]>}
|
|
1589
1750
|
): Promise<HashTreeMessage | null> {
|
|
1590
1751
|
LoggerProxy.logger.info(
|
|
1591
1752
|
`HashTreeParser#sendSyncRequestToLocus --> ${this.debugId} Sending sync request for data set "${dataSet.name}"`
|
|
1592
1753
|
);
|
|
1593
1754
|
|
|
1755
|
+
const isInitialization = 'isInitialization' in options;
|
|
1756
|
+
|
|
1594
1757
|
const url = `${dataSet.url}/sync`;
|
|
1595
|
-
const body
|
|
1596
|
-
leafCount:
|
|
1758
|
+
const body: {
|
|
1759
|
+
leafCount: number;
|
|
1760
|
+
leafDataEntries: {leafIndex: number; elementIds: LeafDataItem[]}[];
|
|
1761
|
+
} = {
|
|
1762
|
+
leafCount: isInitialization ? 1 : dataSet.leafCount,
|
|
1597
1763
|
leafDataEntries: [],
|
|
1598
1764
|
};
|
|
1599
1765
|
|
|
1600
|
-
|
|
1601
|
-
|
|
1602
|
-
|
|
1603
|
-
|
|
1766
|
+
if (isInitialization) {
|
|
1767
|
+
// initialization sync: Locus requires leafCount=1 with a single empty leaf
|
|
1768
|
+
body.leafDataEntries.push({leafIndex: 0, elementIds: []});
|
|
1769
|
+
} else {
|
|
1770
|
+
const {mismatchedLeavesData} = options;
|
|
1771
|
+
|
|
1772
|
+
Object.keys(mismatchedLeavesData).forEach((index) => {
|
|
1773
|
+
const leafIndex = parseInt(index, 10);
|
|
1774
|
+
|
|
1775
|
+
body.leafDataEntries.push({
|
|
1776
|
+
leafIndex,
|
|
1777
|
+
elementIds: mismatchedLeavesData[leafIndex],
|
|
1778
|
+
});
|
|
1604
1779
|
});
|
|
1605
|
-
}
|
|
1780
|
+
}
|
|
1606
1781
|
|
|
1607
1782
|
const ourCurrentRootHash = dataSet.hashTree ? dataSet.hashTree.getRootHash() : EMPTY_HASH;
|
|
1608
1783
|
|
|
@@ -1615,10 +1790,6 @@ class HashTreeParser {
|
|
|
1615
1790
|
body,
|
|
1616
1791
|
})
|
|
1617
1792
|
.then((resp) => {
|
|
1618
|
-
LoggerProxy.logger.info(
|
|
1619
|
-
`HashTreeParser#sendSyncRequestToLocus --> ${this.debugId} Sync request succeeded for "${dataSet.name}"`
|
|
1620
|
-
);
|
|
1621
|
-
|
|
1622
1793
|
if (!resp.body || isEmpty(resp.body)) {
|
|
1623
1794
|
LoggerProxy.logger.info(
|
|
1624
1795
|
`HashTreeParser#sendSyncRequestToLocus --> ${this.debugId} Got ${resp.statusCode} with empty body for sync request for data set "${dataSet.name}", data should arrive via messages`
|
|
@@ -1635,6 +1806,13 @@ class HashTreeParser {
|
|
|
1635
1806
|
error
|
|
1636
1807
|
);
|
|
1637
1808
|
this.checkForSentinelHttpResponse(error, dataSet.name);
|
|
1809
|
+
Metrics.sendBehavioralMetric(BEHAVIORAL_METRICS.HASH_TREE_SYNC_FAILURE, {
|
|
1810
|
+
debugId: this.debugId,
|
|
1811
|
+
dataSetName: dataSet.name,
|
|
1812
|
+
request: 'POST /sync',
|
|
1813
|
+
statusCode: error.statusCode,
|
|
1814
|
+
reason: error.message,
|
|
1815
|
+
});
|
|
1638
1816
|
|
|
1639
1817
|
throw error;
|
|
1640
1818
|
});
|