@webex/plugin-meetings 3.11.0 → 3.12.0-next.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/aiEnableRequest/index.js +184 -0
- package/dist/aiEnableRequest/index.js.map +1 -0
- package/dist/aiEnableRequest/utils.js +36 -0
- package/dist/aiEnableRequest/utils.js.map +1 -0
- package/dist/annotation/index.js +14 -5
- package/dist/annotation/index.js.map +1 -1
- package/dist/breakouts/breakout.js +1 -1
- package/dist/breakouts/index.js +1 -1
- package/dist/config.js +7 -2
- package/dist/config.js.map +1 -1
- package/dist/constants.js +28 -6
- package/dist/constants.js.map +1 -1
- package/dist/hashTree/constants.js +3 -1
- package/dist/hashTree/constants.js.map +1 -1
- package/dist/hashTree/hashTree.js +18 -0
- package/dist/hashTree/hashTree.js.map +1 -1
- package/dist/hashTree/hashTreeParser.js +850 -410
- package/dist/hashTree/hashTreeParser.js.map +1 -1
- package/dist/hashTree/types.js +4 -2
- package/dist/hashTree/types.js.map +1 -1
- package/dist/hashTree/utils.js +10 -0
- package/dist/hashTree/utils.js.map +1 -1
- package/dist/index.js +11 -2
- package/dist/index.js.map +1 -1
- package/dist/interceptors/constant.js +12 -0
- package/dist/interceptors/constant.js.map +1 -0
- package/dist/interceptors/dataChannelAuthToken.js +290 -0
- package/dist/interceptors/dataChannelAuthToken.js.map +1 -0
- package/dist/interceptors/index.js +7 -0
- package/dist/interceptors/index.js.map +1 -1
- package/dist/interceptors/utils.js +27 -0
- package/dist/interceptors/utils.js.map +1 -0
- package/dist/interpretation/index.js +2 -2
- package/dist/interpretation/index.js.map +1 -1
- package/dist/interpretation/siLanguage.js +1 -1
- package/dist/locus-info/controlsUtils.js +5 -3
- package/dist/locus-info/controlsUtils.js.map +1 -1
- package/dist/locus-info/index.js +522 -131
- package/dist/locus-info/index.js.map +1 -1
- package/dist/locus-info/selfUtils.js +1 -0
- package/dist/locus-info/selfUtils.js.map +1 -1
- package/dist/locus-info/types.js.map +1 -1
- package/dist/media/MediaConnectionAwaiter.js +57 -1
- package/dist/media/MediaConnectionAwaiter.js.map +1 -1
- package/dist/media/properties.js +4 -2
- package/dist/media/properties.js.map +1 -1
- package/dist/meeting/in-meeting-actions.js +7 -1
- package/dist/meeting/in-meeting-actions.js.map +1 -1
- package/dist/meeting/index.js +1173 -877
- package/dist/meeting/index.js.map +1 -1
- package/dist/meeting/request.js +50 -0
- package/dist/meeting/request.js.map +1 -1
- package/dist/meeting/request.type.js.map +1 -1
- package/dist/meeting/util.js +133 -3
- package/dist/meeting/util.js.map +1 -1
- package/dist/meetings/index.js +117 -48
- package/dist/meetings/index.js.map +1 -1
- package/dist/member/index.js +10 -0
- package/dist/member/index.js.map +1 -1
- package/dist/member/util.js +10 -0
- package/dist/member/util.js.map +1 -1
- package/dist/metrics/constants.js +2 -1
- package/dist/metrics/constants.js.map +1 -1
- package/dist/multistream/mediaRequestManager.js +9 -60
- package/dist/multistream/mediaRequestManager.js.map +1 -1
- package/dist/multistream/remoteMediaManager.js +11 -0
- package/dist/multistream/remoteMediaManager.js.map +1 -1
- package/dist/reachability/index.js +18 -10
- package/dist/reachability/index.js.map +1 -1
- package/dist/reactions/reactions.type.js.map +1 -1
- package/dist/reconnection-manager/index.js +0 -1
- package/dist/reconnection-manager/index.js.map +1 -1
- package/dist/types/aiEnableRequest/index.d.ts +5 -0
- package/dist/types/aiEnableRequest/utils.d.ts +2 -0
- package/dist/types/config.d.ts +4 -0
- package/dist/types/constants.d.ts +23 -1
- package/dist/types/hashTree/constants.d.ts +1 -0
- package/dist/types/hashTree/hashTree.d.ts +7 -0
- package/dist/types/hashTree/hashTreeParser.d.ts +122 -14
- package/dist/types/hashTree/types.d.ts +3 -0
- package/dist/types/hashTree/utils.d.ts +6 -0
- package/dist/types/index.d.ts +1 -0
- package/dist/types/interceptors/constant.d.ts +5 -0
- package/dist/types/interceptors/dataChannelAuthToken.d.ts +43 -0
- package/dist/types/interceptors/index.d.ts +2 -1
- package/dist/types/interceptors/utils.d.ts +1 -0
- package/dist/types/locus-info/index.d.ts +60 -8
- package/dist/types/locus-info/types.d.ts +7 -0
- package/dist/types/media/MediaConnectionAwaiter.d.ts +10 -1
- package/dist/types/media/properties.d.ts +2 -1
- package/dist/types/meeting/in-meeting-actions.d.ts +6 -0
- package/dist/types/meeting/index.d.ts +61 -7
- package/dist/types/meeting/request.d.ts +16 -1
- package/dist/types/meeting/request.type.d.ts +5 -0
- package/dist/types/meeting/util.d.ts +31 -0
- package/dist/types/meetings/index.d.ts +4 -2
- package/dist/types/member/index.d.ts +1 -0
- package/dist/types/member/util.d.ts +5 -0
- package/dist/types/metrics/constants.d.ts +1 -0
- package/dist/types/multistream/mediaRequestManager.d.ts +0 -23
- package/dist/types/reactions/reactions.type.d.ts +1 -0
- package/dist/types/webinar/utils.d.ts +6 -0
- package/dist/webinar/index.js +291 -91
- package/dist/webinar/index.js.map +1 -1
- package/dist/webinar/utils.js +25 -0
- package/dist/webinar/utils.js.map +1 -0
- package/package.json +24 -23
- package/src/aiEnableRequest/README.md +84 -0
- package/src/aiEnableRequest/index.ts +170 -0
- package/src/aiEnableRequest/utils.ts +25 -0
- package/src/annotation/index.ts +27 -7
- package/src/config.ts +4 -0
- package/src/constants.ts +29 -1
- package/src/hashTree/constants.ts +1 -0
- package/src/hashTree/hashTree.ts +17 -0
- package/src/hashTree/hashTreeParser.ts +745 -252
- package/src/hashTree/types.ts +4 -0
- package/src/hashTree/utils.ts +9 -0
- package/src/index.ts +8 -1
- package/src/interceptors/constant.ts +6 -0
- package/src/interceptors/dataChannelAuthToken.ts +170 -0
- package/src/interceptors/index.ts +2 -1
- package/src/interceptors/utils.ts +16 -0
- package/src/interpretation/index.ts +2 -2
- package/src/locus-info/controlsUtils.ts +11 -0
- package/src/locus-info/index.ts +579 -113
- package/src/locus-info/selfUtils.ts +1 -0
- package/src/locus-info/types.ts +8 -0
- package/src/media/MediaConnectionAwaiter.ts +41 -1
- package/src/media/properties.ts +3 -1
- package/src/meeting/in-meeting-actions.ts +12 -0
- package/src/meeting/index.ts +291 -76
- package/src/meeting/request.ts +42 -0
- package/src/meeting/request.type.ts +6 -0
- package/src/meeting/util.ts +160 -2
- package/src/meetings/index.ts +157 -44
- package/src/member/index.ts +10 -0
- package/src/member/util.ts +12 -0
- package/src/metrics/constants.ts +1 -0
- package/src/multistream/mediaRequestManager.ts +4 -54
- package/src/multistream/remoteMediaManager.ts +13 -0
- package/src/reachability/index.ts +9 -0
- package/src/reactions/reactions.type.ts +1 -0
- package/src/reconnection-manager/index.ts +0 -1
- package/src/webinar/index.ts +191 -6
- package/src/webinar/utils.ts +16 -0
- package/test/unit/spec/aiEnableRequest/index.ts +981 -0
- package/test/unit/spec/aiEnableRequest/utils.ts +130 -0
- package/test/unit/spec/annotation/index.ts +69 -7
- package/test/unit/spec/hashTree/hashTree.ts +66 -0
- package/test/unit/spec/hashTree/hashTreeParser.ts +2225 -189
- package/test/unit/spec/interceptors/dataChannelAuthToken.ts +210 -0
- package/test/unit/spec/interceptors/utils.ts +75 -0
- package/test/unit/spec/locus-info/controlsUtils.js +29 -0
- package/test/unit/spec/locus-info/index.js +1134 -55
- package/test/unit/spec/media/MediaConnectionAwaiter.ts +41 -1
- package/test/unit/spec/media/properties.ts +12 -3
- package/test/unit/spec/meeting/in-meeting-actions.ts +8 -2
- package/test/unit/spec/meeting/index.js +829 -115
- package/test/unit/spec/meeting/request.js +70 -0
- package/test/unit/spec/meeting/utils.js +438 -26
- package/test/unit/spec/meetings/index.js +653 -32
- package/test/unit/spec/member/index.js +28 -4
- package/test/unit/spec/member/util.js +65 -27
- package/test/unit/spec/multistream/mediaRequestManager.ts +2 -85
- package/test/unit/spec/multistream/remoteMediaManager.ts +30 -0
- package/test/unit/spec/reachability/index.ts +23 -0
- package/test/unit/spec/reconnection-manager/index.js +4 -8
- package/test/unit/spec/webinar/index.ts +474 -37
- package/test/unit/spec/webinar/utils.ts +39 -0
|
@@ -5,7 +5,7 @@ import {Enum, HTTP_VERBS} from '../constants';
|
|
|
5
5
|
import {DataSetNames, EMPTY_HASH} from './constants';
|
|
6
6
|
import {ObjectType, HtMeta, HashTreeObject} from './types';
|
|
7
7
|
import {LocusDTO} from '../locus-info/types';
|
|
8
|
-
import {deleteNestedObjectsWithHtMeta,
|
|
8
|
+
import {deleteNestedObjectsWithHtMeta, isMetadata} from './utils';
|
|
9
9
|
|
|
10
10
|
export interface DataSet {
|
|
11
11
|
url: string;
|
|
@@ -29,11 +29,24 @@ export interface HashTreeMessage {
|
|
|
29
29
|
locusStateElements?: Array<HashTreeObject>;
|
|
30
30
|
locusSessionId?: string;
|
|
31
31
|
locusUrl: string;
|
|
32
|
+
heartbeatIntervalMs?: number;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export interface VisibleDataSetInfo {
|
|
36
|
+
name: string;
|
|
37
|
+
url: string;
|
|
38
|
+
dataChannelUrl?: string;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
export interface Metadata {
|
|
42
|
+
htMeta: HtMeta;
|
|
43
|
+
visibleDataSets: VisibleDataSetInfo[];
|
|
32
44
|
}
|
|
33
45
|
|
|
34
46
|
interface InternalDataSet extends DataSet {
|
|
35
47
|
hashTree?: HashTree; // set only for visible data sets
|
|
36
48
|
timer?: ReturnType<typeof setTimeout>;
|
|
49
|
+
heartbeatWatchdogTimer?: ReturnType<typeof setTimeout>;
|
|
37
50
|
}
|
|
38
51
|
|
|
39
52
|
type WebexRequestMethod = (options: Record<string, any>) => Promise<any>;
|
|
@@ -49,11 +62,29 @@ export type LocusInfoUpdateCallback = (
|
|
|
49
62
|
data?: {updatedObjects: HashTreeObject[]}
|
|
50
63
|
) => void;
|
|
51
64
|
|
|
65
|
+
interface LeafInfo {
|
|
66
|
+
type: ObjectType;
|
|
67
|
+
id: number;
|
|
68
|
+
version: number;
|
|
69
|
+
data?: any;
|
|
70
|
+
}
|
|
71
|
+
|
|
52
72
|
/**
|
|
53
73
|
* This error is thrown if we receive information that the meeting has ended while we're processing some hash messages.
|
|
54
74
|
* It's handled internally by HashTreeParser and results in MEETING_ENDED being sent up.
|
|
55
75
|
*/
|
|
56
|
-
class MeetingEndedError extends Error {}
|
|
76
|
+
export class MeetingEndedError extends Error {}
|
|
77
|
+
|
|
78
|
+
/* Currently Locus always sends Metadata objects only in the "self" dataset.
|
|
79
|
+
* If this ever changes, update all the code that relies on this constant.
|
|
80
|
+
*/
|
|
81
|
+
const MetadataDataSetName = DataSetNames.SELF;
|
|
82
|
+
|
|
83
|
+
const PossibleSentinelMessageDataSetNames = [
|
|
84
|
+
DataSetNames.MAIN,
|
|
85
|
+
DataSetNames.SELF,
|
|
86
|
+
DataSetNames.UNJOINED,
|
|
87
|
+
];
|
|
57
88
|
|
|
58
89
|
/**
|
|
59
90
|
* Parses hash tree eventing locus data
|
|
@@ -63,8 +94,11 @@ class HashTreeParser {
|
|
|
63
94
|
visibleDataSetsUrl: string; // url from which we can get info about all data sets
|
|
64
95
|
webexRequest: WebexRequestMethod;
|
|
65
96
|
locusInfoUpdateCallback: LocusInfoUpdateCallback;
|
|
66
|
-
visibleDataSets:
|
|
97
|
+
visibleDataSets: VisibleDataSetInfo[];
|
|
67
98
|
debugId: string;
|
|
99
|
+
heartbeatIntervalMs?: number;
|
|
100
|
+
private excludedDataSets: string[];
|
|
101
|
+
state: 'active' | 'stopped';
|
|
68
102
|
|
|
69
103
|
/**
|
|
70
104
|
* Constructor for HashTreeParser
|
|
@@ -76,29 +110,42 @@ class HashTreeParser {
|
|
|
76
110
|
dataSets: Array<DataSet>;
|
|
77
111
|
locus: any;
|
|
78
112
|
};
|
|
113
|
+
metadata: Metadata | null;
|
|
79
114
|
webexRequest: WebexRequestMethod;
|
|
80
115
|
locusInfoUpdateCallback: LocusInfoUpdateCallback;
|
|
81
116
|
debugId: string;
|
|
117
|
+
excludedDataSets?: string[];
|
|
82
118
|
}) {
|
|
83
119
|
const {dataSets, locus} = options.initialLocus; // extract dataSets from initialLocus
|
|
84
120
|
|
|
85
121
|
this.debugId = options.debugId;
|
|
86
122
|
this.webexRequest = options.webexRequest;
|
|
87
123
|
this.locusInfoUpdateCallback = options.locusInfoUpdateCallback;
|
|
88
|
-
this.
|
|
124
|
+
this.excludedDataSets = options.excludedDataSets || [];
|
|
125
|
+
this.visibleDataSetsUrl = locus?.links?.resources?.visibleDataSets?.url;
|
|
126
|
+
this.setVisibleDataSets(options.metadata?.visibleDataSets || [], dataSets);
|
|
127
|
+
|
|
128
|
+
this.state = 'active';
|
|
89
129
|
|
|
90
|
-
if (
|
|
130
|
+
if (options.metadata?.visibleDataSets?.length === 0) {
|
|
91
131
|
LoggerProxy.logger.warn(
|
|
92
|
-
`HashTreeParser#constructor --> ${this.debugId} No visibleDataSets found in
|
|
132
|
+
`HashTreeParser#constructor --> ${this.debugId} No visibleDataSets found in Metadata`
|
|
93
133
|
);
|
|
94
134
|
}
|
|
95
135
|
// object mapping dataset names to arrays of leaf data
|
|
96
136
|
const leafData = this.analyzeLocusHtMeta(locus);
|
|
97
137
|
|
|
138
|
+
if (options.metadata) {
|
|
139
|
+
// add also the metadata that's outside of locus object itself
|
|
140
|
+
this.analyzeMetadata(leafData, options.metadata);
|
|
141
|
+
}
|
|
142
|
+
|
|
98
143
|
LoggerProxy.logger.info(
|
|
99
|
-
`HashTreeParser#constructor -->
|
|
144
|
+
`HashTreeParser#constructor --> ${
|
|
145
|
+
this.debugId
|
|
146
|
+
} creating HashTreeParser for datasets: ${JSON.stringify(
|
|
100
147
|
dataSets.map((ds) => ds.name)
|
|
101
|
-
)}`
|
|
148
|
+
)} with visible datasets: ${JSON.stringify(this.visibleDataSets.map((vds) => vds.name))}`
|
|
102
149
|
);
|
|
103
150
|
|
|
104
151
|
for (const dataSet of dataSets) {
|
|
@@ -106,46 +153,105 @@ class HashTreeParser {
|
|
|
106
153
|
|
|
107
154
|
this.dataSets[name] = {
|
|
108
155
|
...dataSet,
|
|
109
|
-
hashTree: this.
|
|
156
|
+
hashTree: this.isVisibleDataSet(name)
|
|
110
157
|
? new HashTree(leafData[name] || [], leafCount)
|
|
111
158
|
: undefined,
|
|
112
159
|
};
|
|
113
160
|
}
|
|
114
161
|
}
|
|
115
162
|
|
|
163
|
+
/**
|
|
164
|
+
* Sets the visible data sets list for the HashTreeParser. This method should be called only at the start, to initialize
|
|
165
|
+
* the visible data sets, before any message processsing, so for example from the constructor or when resuming the parser.
|
|
166
|
+
*
|
|
167
|
+
* @param {Array<VisibleDataSetInfo>} visibleDataSets - The visible data sets to set
|
|
168
|
+
* @param {Array<DataSet>} dataSets - The "dataSets" list from Locus (yes, Locus sends visibleDataSets and dataSets as separate lists and they can differ)
|
|
169
|
+
* @returns {void}
|
|
170
|
+
*/
|
|
171
|
+
private setVisibleDataSets(visibleDataSets: VisibleDataSetInfo[], dataSets: Array<DataSet>) {
|
|
172
|
+
this.visibleDataSets = cloneDeep(visibleDataSets).filter(
|
|
173
|
+
(vds) =>
|
|
174
|
+
// exclude data sets we will never care about
|
|
175
|
+
!this.isExcludedDataSet(vds.name) &&
|
|
176
|
+
// and make sure that visibleDataSets list is consistent with dataSets list
|
|
177
|
+
dataSets.some((ds) => ds.name === vds.name)
|
|
178
|
+
);
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
/**
|
|
182
|
+
* Checks if the given data set name is in the list of visible data sets
|
|
183
|
+
* @param {string} dataSetName data set name to check
|
|
184
|
+
* @returns {Boolean} True if the data set is visible, false otherwise
|
|
185
|
+
*/
|
|
186
|
+
private isVisibleDataSet(dataSetName: string): boolean {
|
|
187
|
+
return this.visibleDataSets.some((vds) => vds.name === dataSetName);
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
/**
|
|
191
|
+
* Checks if the given data set name is in the excluded list
|
|
192
|
+
* @param {string} dataSetName data set name to check
|
|
193
|
+
* @returns {boolean} True if the data set is excluded, false otherwise
|
|
194
|
+
*/
|
|
195
|
+
private isExcludedDataSet(dataSetName: string): boolean {
|
|
196
|
+
return this.excludedDataSets.some((name) => name === dataSetName);
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
/**
|
|
200
|
+
* Adds a data set to the visible data sets list, unless it is in the excluded list.
|
|
201
|
+
* @param {VisibleDataSetInfo} dataSetInfo data set info to add
|
|
202
|
+
* @returns {boolean} True if the data set was added, false if it was excluded
|
|
203
|
+
*/
|
|
204
|
+
private addToVisibleDataSetsList(dataSetInfo: VisibleDataSetInfo): boolean {
|
|
205
|
+
if (this.isExcludedDataSet(dataSetInfo.name)) {
|
|
206
|
+
LoggerProxy.logger.info(
|
|
207
|
+
`HashTreeParser#addToVisibleDataSetsList --> ${this.debugId} Data set "${dataSetInfo.name}" is in the excluded list, ignoring`
|
|
208
|
+
);
|
|
209
|
+
|
|
210
|
+
return false;
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
this.visibleDataSets.push(dataSetInfo);
|
|
214
|
+
|
|
215
|
+
return true;
|
|
216
|
+
}
|
|
217
|
+
|
|
116
218
|
/**
|
|
117
219
|
* Initializes a new visible data set by creating a hash tree for it, adding it to all the internal structures,
|
|
118
220
|
* and sending an initial sync request to Locus with empty leaf data - that will trigger Locus to gives us all the data
|
|
119
221
|
* from that dataset (in the response or via messages).
|
|
120
222
|
*
|
|
121
|
-
* @param {
|
|
223
|
+
* @param {VisibleDataSetInfo} visibleDataSetInfo Information about the new visible data set
|
|
224
|
+
* @param {DataSet} dataSetInfo The new data set to be added
|
|
122
225
|
* @returns {Promise}
|
|
123
226
|
*/
|
|
124
227
|
private initializeNewVisibleDataSet(
|
|
125
|
-
|
|
228
|
+
visibleDataSetInfo: VisibleDataSetInfo,
|
|
229
|
+
dataSetInfo: DataSet
|
|
126
230
|
): Promise<{updateType: LocusInfoUpdateType; updatedObjects?: HashTreeObject[]}> {
|
|
127
|
-
if (this.
|
|
231
|
+
if (this.isVisibleDataSet(dataSetInfo.name)) {
|
|
128
232
|
LoggerProxy.logger.info(
|
|
129
|
-
`HashTreeParser#initializeNewVisibleDataSet --> ${this.debugId} Data set "${
|
|
233
|
+
`HashTreeParser#initializeNewVisibleDataSet --> ${this.debugId} Data set "${dataSetInfo.name}" already exists, skipping init`
|
|
130
234
|
);
|
|
131
235
|
|
|
132
236
|
return Promise.resolve({updateType: LocusInfoUpdateType.OBJECTS_UPDATED, updatedObjects: []});
|
|
133
237
|
}
|
|
134
238
|
|
|
135
239
|
LoggerProxy.logger.info(
|
|
136
|
-
`HashTreeParser#initializeNewVisibleDataSet --> ${this.debugId} Adding visible data set "${
|
|
240
|
+
`HashTreeParser#initializeNewVisibleDataSet --> ${this.debugId} Adding visible data set "${dataSetInfo.name}"`
|
|
137
241
|
);
|
|
138
242
|
|
|
139
|
-
this.
|
|
243
|
+
if (!this.addToVisibleDataSetsList(visibleDataSetInfo)) {
|
|
244
|
+
return Promise.resolve({updateType: LocusInfoUpdateType.OBJECTS_UPDATED, updatedObjects: []});
|
|
245
|
+
}
|
|
140
246
|
|
|
141
|
-
const hashTree = new HashTree([],
|
|
247
|
+
const hashTree = new HashTree([], dataSetInfo.leafCount);
|
|
142
248
|
|
|
143
|
-
this.dataSets[
|
|
144
|
-
...
|
|
249
|
+
this.dataSets[dataSetInfo.name] = {
|
|
250
|
+
...dataSetInfo,
|
|
145
251
|
hashTree,
|
|
146
252
|
};
|
|
147
253
|
|
|
148
|
-
return this.sendInitializationSyncRequestToLocus(
|
|
254
|
+
return this.sendInitializationSyncRequestToLocus(dataSetInfo.name, 'new visible data set');
|
|
149
255
|
}
|
|
150
256
|
|
|
151
257
|
/**
|
|
@@ -178,10 +284,13 @@ class HashTreeParser {
|
|
|
178
284
|
return this.sendSyncRequestToLocus(this.dataSets[datasetName], emptyLeavesData).then(
|
|
179
285
|
(syncResponse) => {
|
|
180
286
|
if (syncResponse) {
|
|
181
|
-
return
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
287
|
+
return {
|
|
288
|
+
updateType: LocusInfoUpdateType.OBJECTS_UPDATED,
|
|
289
|
+
updatedObjects: this.parseMessage(
|
|
290
|
+
syncResponse,
|
|
291
|
+
`via empty leaves /sync API call for ${debugText}`
|
|
292
|
+
),
|
|
293
|
+
};
|
|
185
294
|
}
|
|
186
295
|
|
|
187
296
|
return {updateType: LocusInfoUpdateType.OBJECTS_UPDATED, updatedObjects: []};
|
|
@@ -190,18 +299,31 @@ class HashTreeParser {
|
|
|
190
299
|
}
|
|
191
300
|
|
|
192
301
|
/**
|
|
193
|
-
* Queries Locus for information about all
|
|
302
|
+
* Queries Locus for all up-to-date information about all visible data sets
|
|
194
303
|
*
|
|
195
|
-
* @param {string} url - url from which we can get info about all data sets
|
|
196
304
|
* @returns {Promise}
|
|
197
305
|
*/
|
|
198
|
-
private
|
|
306
|
+
private getAllVisibleDataSetsFromLocus() {
|
|
307
|
+
if (!this.visibleDataSetsUrl) {
|
|
308
|
+
LoggerProxy.logger.warn(
|
|
309
|
+
`HashTreeParser#getAllVisibleDataSetsFromLocus --> ${this.debugId} No visibleDataSetsUrl, cannot get data sets information`
|
|
310
|
+
);
|
|
311
|
+
|
|
312
|
+
return Promise.resolve([]);
|
|
313
|
+
}
|
|
314
|
+
|
|
199
315
|
return this.webexRequest({
|
|
200
316
|
method: HTTP_VERBS.GET,
|
|
201
|
-
uri:
|
|
202
|
-
})
|
|
203
|
-
|
|
204
|
-
|
|
317
|
+
uri: this.visibleDataSetsUrl,
|
|
318
|
+
})
|
|
319
|
+
.then((response) => {
|
|
320
|
+
return response.body.dataSets as Array<DataSet>;
|
|
321
|
+
})
|
|
322
|
+
.catch((error) => {
|
|
323
|
+
this.checkForSentinelHttpResponse(error);
|
|
324
|
+
|
|
325
|
+
throw error;
|
|
326
|
+
});
|
|
205
327
|
}
|
|
206
328
|
|
|
207
329
|
/**
|
|
@@ -211,12 +333,14 @@ class HashTreeParser {
|
|
|
211
333
|
* @returns {Promise}
|
|
212
334
|
*/
|
|
213
335
|
async initializeFromMessage(message: HashTreeMessage) {
|
|
336
|
+
this.visibleDataSetsUrl = message.visibleDataSetsUrl;
|
|
337
|
+
|
|
214
338
|
LoggerProxy.logger.info(
|
|
215
|
-
`HashTreeParser#initializeFromMessage --> ${this.debugId} visibleDataSetsUrl=${
|
|
339
|
+
`HashTreeParser#initializeFromMessage --> ${this.debugId} visibleDataSetsUrl=${this.visibleDataSetsUrl}`
|
|
216
340
|
);
|
|
217
|
-
const
|
|
341
|
+
const visibleDataSets = await this.getAllVisibleDataSetsFromLocus();
|
|
218
342
|
|
|
219
|
-
await this.initializeDataSets(
|
|
343
|
+
await this.initializeDataSets(visibleDataSets, 'initialization from message');
|
|
220
344
|
}
|
|
221
345
|
|
|
222
346
|
/**
|
|
@@ -236,28 +360,32 @@ class HashTreeParser {
|
|
|
236
360
|
|
|
237
361
|
return;
|
|
238
362
|
}
|
|
363
|
+
this.visibleDataSetsUrl = locus.links.resources.visibleDataSets.url;
|
|
239
364
|
|
|
240
365
|
LoggerProxy.logger.info(
|
|
241
|
-
`HashTreeParser#initializeFromGetLociResponse --> ${this.debugId} visibleDataSets url: ${
|
|
366
|
+
`HashTreeParser#initializeFromGetLociResponse --> ${this.debugId} visibleDataSets url: ${this.visibleDataSetsUrl}`
|
|
242
367
|
);
|
|
243
368
|
|
|
244
|
-
const
|
|
369
|
+
const visibleDataSets = await this.getAllVisibleDataSetsFromLocus();
|
|
245
370
|
|
|
246
|
-
await this.initializeDataSets(
|
|
371
|
+
await this.initializeDataSets(visibleDataSets, 'initialization from GET /loci response');
|
|
247
372
|
}
|
|
248
373
|
|
|
249
374
|
/**
|
|
250
375
|
* Initializes data sets by doing an initialization sync on each visible data set that doesn't have a hash tree yet.
|
|
251
376
|
*
|
|
252
|
-
* @param {DataSet[]}
|
|
377
|
+
* @param {DataSet[]} visibleDataSets Array of visible DataSet objects to initialize
|
|
253
378
|
* @param {string} debugText Text to include in logs for debugging purposes
|
|
254
379
|
* @returns {Promise}
|
|
255
380
|
*/
|
|
256
|
-
private async initializeDataSets(
|
|
381
|
+
private async initializeDataSets(visibleDataSets: Array<DataSet>, debugText: string) {
|
|
382
|
+
if (this.state === 'stopped') {
|
|
383
|
+
return;
|
|
384
|
+
}
|
|
257
385
|
const updatedObjects: HashTreeObject[] = [];
|
|
258
386
|
|
|
259
|
-
for (const dataSet of
|
|
260
|
-
const {name, leafCount} = dataSet;
|
|
387
|
+
for (const dataSet of visibleDataSets) {
|
|
388
|
+
const {name, leafCount, url} = dataSet;
|
|
261
389
|
|
|
262
390
|
if (!this.dataSets[name]) {
|
|
263
391
|
LoggerProxy.logger.info(
|
|
@@ -273,7 +401,20 @@ class HashTreeParser {
|
|
|
273
401
|
);
|
|
274
402
|
}
|
|
275
403
|
|
|
276
|
-
if (this.
|
|
404
|
+
if (!this.isVisibleDataSet(name)) {
|
|
405
|
+
if (
|
|
406
|
+
!this.addToVisibleDataSetsList({
|
|
407
|
+
name,
|
|
408
|
+
url,
|
|
409
|
+
})
|
|
410
|
+
) {
|
|
411
|
+
// dataset is excluded, skip it
|
|
412
|
+
// eslint-disable-next-line no-continue
|
|
413
|
+
continue;
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
if (!this.dataSets[name].hashTree) {
|
|
277
418
|
LoggerProxy.logger.info(
|
|
278
419
|
`HashTreeParser#initializeDataSets --> ${this.debugId} creating hash tree for visible dataset "${name}" (${debugText})`
|
|
279
420
|
);
|
|
@@ -282,15 +423,6 @@ class HashTreeParser {
|
|
|
282
423
|
// eslint-disable-next-line no-await-in-loop
|
|
283
424
|
const data = await this.sendInitializationSyncRequestToLocus(name, debugText);
|
|
284
425
|
|
|
285
|
-
if (data.updateType === LocusInfoUpdateType.MEETING_ENDED) {
|
|
286
|
-
LoggerProxy.logger.warn(
|
|
287
|
-
`HashTreeParser#initializeDataSets --> ${this.debugId} meeting ended while initializing new visible data set "${name}"`
|
|
288
|
-
);
|
|
289
|
-
|
|
290
|
-
// throw an error, it will be caught higher up and the meeting will be destroyed
|
|
291
|
-
throw new MeetingEndedError();
|
|
292
|
-
}
|
|
293
|
-
|
|
294
426
|
if (data.updateType === LocusInfoUpdateType.OBJECTS_UPDATED) {
|
|
295
427
|
updatedObjects.push(...(data.updatedObjects || []));
|
|
296
428
|
}
|
|
@@ -316,10 +448,7 @@ class HashTreeParser {
|
|
|
316
448
|
private analyzeLocusHtMeta(locus: any, options?: {copyData?: boolean}) {
|
|
317
449
|
const {copyData = false} = options || {};
|
|
318
450
|
// object mapping dataset names to arrays of leaf data
|
|
319
|
-
const leafInfo: Record<
|
|
320
|
-
string,
|
|
321
|
-
Array<{type: ObjectType; id: number; version: number; data?: any}>
|
|
322
|
-
> = {};
|
|
451
|
+
const leafInfo: Record<string, Array<LeafInfo>> = {};
|
|
323
452
|
|
|
324
453
|
const findAndStoreMetaData = (currentLocusPart: any) => {
|
|
325
454
|
if (typeof currentLocusPart !== 'object' || currentLocusPart === null) {
|
|
@@ -329,7 +458,7 @@ class HashTreeParser {
|
|
|
329
458
|
if (currentLocusPart.htMeta && currentLocusPart.htMeta.dataSetNames) {
|
|
330
459
|
const {type, id, version} = currentLocusPart.htMeta.elementId;
|
|
331
460
|
const {dataSetNames} = currentLocusPart.htMeta;
|
|
332
|
-
const newLeafInfo:
|
|
461
|
+
const newLeafInfo: LeafInfo = {
|
|
333
462
|
type,
|
|
334
463
|
id,
|
|
335
464
|
version,
|
|
@@ -369,27 +498,62 @@ class HashTreeParser {
|
|
|
369
498
|
}
|
|
370
499
|
|
|
371
500
|
/**
|
|
372
|
-
*
|
|
501
|
+
* Analyzes the Metadata object that is sent outside of Locus object, and appends its data to passed in leafInfo
|
|
502
|
+
* structure.
|
|
373
503
|
*
|
|
374
|
-
* @param {
|
|
375
|
-
* @
|
|
504
|
+
* @param {Record<string, LeafInfo[]>} leafInfo the structure to which the Metadata info will be appended
|
|
505
|
+
* @param {Metadata} metadata Metadata object
|
|
506
|
+
* @returns {void}
|
|
376
507
|
*/
|
|
377
|
-
private
|
|
378
|
-
const
|
|
379
|
-
(dataSet) => dataSet.name.toLowerCase() === DataSetNames.MAIN
|
|
380
|
-
);
|
|
508
|
+
private analyzeMetadata(leafInfo: Record<string, LeafInfo[]>, metadata: Metadata) {
|
|
509
|
+
const {htMeta} = metadata;
|
|
381
510
|
|
|
382
511
|
if (
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
mainDataSet.root === EMPTY_HASH &&
|
|
386
|
-
this.dataSets[DataSetNames.MAIN].version < mainDataSet.version
|
|
512
|
+
htMeta?.dataSetNames?.length === 1 &&
|
|
513
|
+
htMeta.dataSetNames[0].toLowerCase() === MetadataDataSetName
|
|
387
514
|
) {
|
|
388
|
-
|
|
389
|
-
|
|
515
|
+
const {type, id, version} = metadata.htMeta.elementId;
|
|
516
|
+
|
|
517
|
+
const dataSetName = htMeta.dataSetNames[0];
|
|
518
|
+
|
|
519
|
+
if (!leafInfo[dataSetName]) {
|
|
520
|
+
leafInfo[dataSetName] = [];
|
|
521
|
+
}
|
|
522
|
+
|
|
523
|
+
leafInfo[dataSetName].push({
|
|
524
|
+
type,
|
|
525
|
+
id,
|
|
526
|
+
version,
|
|
527
|
+
});
|
|
528
|
+
} else {
|
|
529
|
+
throw new Error(
|
|
530
|
+
`${this.debugId} Metadata htMeta has unexpected dataSetNames: ${
|
|
531
|
+
htMeta && htMeta.dataSetNames.join(',')
|
|
532
|
+
}`
|
|
533
|
+
);
|
|
390
534
|
}
|
|
535
|
+
}
|
|
391
536
|
|
|
392
|
-
|
|
537
|
+
/**
|
|
538
|
+
* Checks if the provided hash tree message indicates the end of the meeting and that there won't be any more updates.
|
|
539
|
+
*
|
|
540
|
+
* @param {HashTreeMessage} message - The hash tree message to check
|
|
541
|
+
* @returns {boolean} - Returns true if the message indicates the end of the meeting, false otherwise
|
|
542
|
+
*/
|
|
543
|
+
private isEndMessage(message: HashTreeMessage) {
|
|
544
|
+
return message.dataSets.some((dataSet) => {
|
|
545
|
+
if (
|
|
546
|
+
dataSet.leafCount === 1 &&
|
|
547
|
+
dataSet.root === EMPTY_HASH &&
|
|
548
|
+
(!this.dataSets[dataSet.name] || this.dataSets[dataSet.name].version < dataSet.version) &&
|
|
549
|
+
PossibleSentinelMessageDataSetNames.includes(dataSet.name.toLowerCase())
|
|
550
|
+
) {
|
|
551
|
+
// this is a special way for Locus to indicate that this meeting has ended
|
|
552
|
+
return true;
|
|
553
|
+
}
|
|
554
|
+
|
|
555
|
+
return false;
|
|
556
|
+
});
|
|
393
557
|
}
|
|
394
558
|
|
|
395
559
|
/**
|
|
@@ -420,6 +584,90 @@ class HashTreeParser {
|
|
|
420
584
|
});
|
|
421
585
|
}
|
|
422
586
|
|
|
587
|
+
/**
|
|
588
|
+
* Asynchronously initializes new visible data sets
|
|
589
|
+
*
|
|
590
|
+
* @param {VisibleDataSetInfo[]} dataSetsRequiringInitialization list of datasets to initialize
|
|
591
|
+
* @returns {void}
|
|
592
|
+
*/
|
|
593
|
+
private queueInitForNewVisibleDataSets(dataSetsRequiringInitialization: VisibleDataSetInfo[]) {
|
|
594
|
+
LoggerProxy.logger.info(
|
|
595
|
+
`HashTreeParser#queueInitForNewVisibleDataSets --> ${
|
|
596
|
+
this.debugId
|
|
597
|
+
} queuing initialization of new visible datasets: ${dataSetsRequiringInitialization
|
|
598
|
+
.map((ds) => ds.name)
|
|
599
|
+
.join(', ')}`
|
|
600
|
+
);
|
|
601
|
+
queueMicrotask(() => {
|
|
602
|
+
this.initializeNewVisibleDataSets(dataSetsRequiringInitialization).catch((error) => {
|
|
603
|
+
if (error instanceof MeetingEndedError) {
|
|
604
|
+
this.callLocusInfoUpdateCallback({
|
|
605
|
+
updateType: LocusInfoUpdateType.MEETING_ENDED,
|
|
606
|
+
});
|
|
607
|
+
} else {
|
|
608
|
+
LoggerProxy.logger.warn(
|
|
609
|
+
`HashTreeParser#queueInitForNewVisibleDataSets --> ${
|
|
610
|
+
this.debugId
|
|
611
|
+
} error while initializing new visible datasets: ${dataSetsRequiringInitialization
|
|
612
|
+
.map((ds) => ds.name)
|
|
613
|
+
.join(', ')}: `,
|
|
614
|
+
error
|
|
615
|
+
);
|
|
616
|
+
}
|
|
617
|
+
});
|
|
618
|
+
});
|
|
619
|
+
}
|
|
620
|
+
|
|
621
|
+
/**
|
|
622
|
+
* Handles updates to Metadata object that we receive from Locus via other means than messages. Right now
|
|
623
|
+
* that means only in the API response alongside locus object.
|
|
624
|
+
*
|
|
625
|
+
* @param {Metadata} metadata received in Locus update other than a message (for example in an API response)
|
|
626
|
+
* @param {HashTreeObject[]} updatedObjects a list of updated hash tree objects to which any updates resulting from new Metadata will be added
|
|
627
|
+
* @returns {void}
|
|
628
|
+
*/
|
|
629
|
+
handleMetadataUpdate(metadata: Metadata, updatedObjects: HashTreeObject[]): void {
|
|
630
|
+
let dataSetsRequiringInitialization: VisibleDataSetInfo[] = [];
|
|
631
|
+
|
|
632
|
+
// current assumption based on Locus docs is that Metadata object lives always in "self" data set
|
|
633
|
+
const hashTree = this.dataSets[MetadataDataSetName]?.hashTree;
|
|
634
|
+
|
|
635
|
+
if (!hashTree) {
|
|
636
|
+
LoggerProxy.logger.warn(
|
|
637
|
+
`HashTreeParser#handleLocusUpdate --> ${this.debugId} received Metadata object but no hash tree for "${MetadataDataSetName}" data set exists`
|
|
638
|
+
);
|
|
639
|
+
} else {
|
|
640
|
+
const metadataUpdated = hashTree.putItem(metadata.htMeta.elementId);
|
|
641
|
+
|
|
642
|
+
if (metadataUpdated) {
|
|
643
|
+
// metadata in Locus API response is in a slightly different format than the objects in messages, so need to adapt it
|
|
644
|
+
const metadataObject: HashTreeObject = {
|
|
645
|
+
htMeta: metadata.htMeta,
|
|
646
|
+
data: metadata,
|
|
647
|
+
};
|
|
648
|
+
|
|
649
|
+
updatedObjects.push(metadataObject);
|
|
650
|
+
|
|
651
|
+
const {changeDetected, removedDataSets, addedDataSets} = this.checkForVisibleDataSetChanges(
|
|
652
|
+
[metadataObject]
|
|
653
|
+
);
|
|
654
|
+
|
|
655
|
+
if (changeDetected) {
|
|
656
|
+
dataSetsRequiringInitialization = this.processVisibleDataSetChanges(
|
|
657
|
+
removedDataSets,
|
|
658
|
+
addedDataSets,
|
|
659
|
+
updatedObjects
|
|
660
|
+
);
|
|
661
|
+
}
|
|
662
|
+
|
|
663
|
+
if (dataSetsRequiringInitialization.length > 0) {
|
|
664
|
+
// there are some data sets that we need to initialize asynchronously
|
|
665
|
+
this.queueInitForNewVisibleDataSets(dataSetsRequiringInitialization);
|
|
666
|
+
}
|
|
667
|
+
}
|
|
668
|
+
}
|
|
669
|
+
}
|
|
670
|
+
|
|
423
671
|
/**
|
|
424
672
|
* This method should be called when we receive a partial locus DTO that contains dataSets and htMeta information
|
|
425
673
|
* It updates the hash trees with the new leaf data based on the received Locus
|
|
@@ -427,22 +675,32 @@ class HashTreeParser {
|
|
|
427
675
|
* @param {Object} update - The locus update containing data sets and locus information
|
|
428
676
|
* @returns {void}
|
|
429
677
|
*/
|
|
430
|
-
handleLocusUpdate(update: {dataSets?: Array<DataSet>; locus: any}): void {
|
|
431
|
-
|
|
678
|
+
handleLocusUpdate(update: {dataSets?: Array<DataSet>; locus: any; metadata?: Metadata}): void {
|
|
679
|
+
if (this.state === 'stopped') {
|
|
680
|
+
return;
|
|
681
|
+
}
|
|
682
|
+
|
|
683
|
+
const {dataSets, locus, metadata} = update;
|
|
432
684
|
|
|
433
685
|
if (!dataSets) {
|
|
434
|
-
LoggerProxy.logger.
|
|
686
|
+
LoggerProxy.logger.info(
|
|
435
687
|
`HashTreeParser#handleLocusUpdate --> ${this.debugId} received hash tree update without dataSets`
|
|
436
688
|
);
|
|
437
|
-
}
|
|
438
|
-
|
|
439
|
-
|
|
689
|
+
} else {
|
|
690
|
+
for (const dataSet of dataSets) {
|
|
691
|
+
this.updateDataSetInfo(dataSet);
|
|
692
|
+
}
|
|
440
693
|
}
|
|
441
694
|
const updatedObjects: HashTreeObject[] = [];
|
|
442
695
|
|
|
443
696
|
// first, analyze the locus object to extract the hash tree objects' htMeta and data from it
|
|
444
697
|
const leafInfo = this.analyzeLocusHtMeta(locus, {copyData: true});
|
|
445
698
|
|
|
699
|
+
// if we got metadata, process it (currently that means only potential visible data set list changes)
|
|
700
|
+
if (metadata) {
|
|
701
|
+
this.handleMetadataUpdate(metadata, updatedObjects);
|
|
702
|
+
}
|
|
703
|
+
|
|
446
704
|
// then process the data in hash trees, if it is a new version, then add it to updatedObjects
|
|
447
705
|
Object.keys(leafInfo).forEach((dataSetName) => {
|
|
448
706
|
if (this.dataSets[dataSetName]) {
|
|
@@ -477,7 +735,7 @@ class HashTreeParser {
|
|
|
477
735
|
);
|
|
478
736
|
}
|
|
479
737
|
} else {
|
|
480
|
-
LoggerProxy.logger.
|
|
738
|
+
LoggerProxy.logger.info(
|
|
481
739
|
`HashTreeParser#handleLocusUpdate --> ${this.debugId} received leaf data for unknown data set "${dataSetName}", ignoring`
|
|
482
740
|
);
|
|
483
741
|
}
|
|
@@ -493,9 +751,6 @@ class HashTreeParser {
|
|
|
493
751
|
updatedObjects,
|
|
494
752
|
});
|
|
495
753
|
}
|
|
496
|
-
|
|
497
|
-
// todo: once Locus design on how visible data sets will be communicated in subsequent API responses is confirmed,
|
|
498
|
-
// we'll need to check here if visible data sets have changed and update this.visibleDataSets, remove/create hash trees etc
|
|
499
754
|
}
|
|
500
755
|
|
|
501
756
|
/**
|
|
@@ -511,7 +766,7 @@ class HashTreeParser {
|
|
|
511
766
|
};
|
|
512
767
|
|
|
513
768
|
LoggerProxy.logger.info(
|
|
514
|
-
`HashTreeParser#
|
|
769
|
+
`HashTreeParser#updateDataSetInfo --> ${this.debugId} created entry for "${receivedDataSet.name}" dataset: version=${receivedDataSet.version}, root=${receivedDataSet.root}`
|
|
515
770
|
);
|
|
516
771
|
|
|
517
772
|
return;
|
|
@@ -526,7 +781,7 @@ class HashTreeParser {
|
|
|
526
781
|
exponent: receivedDataSet.backoff.exponent,
|
|
527
782
|
};
|
|
528
783
|
LoggerProxy.logger.info(
|
|
529
|
-
`HashTreeParser#
|
|
784
|
+
`HashTreeParser#updateDataSetInfo --> ${this.debugId} updated "${receivedDataSet.name}" dataset to version=${receivedDataSet.version}, root=${receivedDataSet.root}`
|
|
530
785
|
);
|
|
531
786
|
}
|
|
532
787
|
}
|
|
@@ -537,25 +792,30 @@ class HashTreeParser {
|
|
|
537
792
|
* @returns {Object} An object containing the removed and added visible data sets.
|
|
538
793
|
*/
|
|
539
794
|
private checkForVisibleDataSetChanges(updatedObjects: HashTreeObject[]) {
|
|
540
|
-
let removedDataSets:
|
|
541
|
-
let addedDataSets:
|
|
795
|
+
let removedDataSets: VisibleDataSetInfo[] = [];
|
|
796
|
+
let addedDataSets: VisibleDataSetInfo[] = [];
|
|
542
797
|
|
|
543
|
-
// visibleDataSets can only be changed by
|
|
798
|
+
// visibleDataSets can only be changed by Metadata object updates
|
|
544
799
|
updatedObjects.forEach((object) => {
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
800
|
+
if (isMetadata(object) && object.data?.visibleDataSets) {
|
|
801
|
+
const newVisibleDataSets = object.data.visibleDataSets.filter(
|
|
802
|
+
(vds) => !this.isExcludedDataSet(vds.name)
|
|
803
|
+
);
|
|
548
804
|
|
|
549
|
-
removedDataSets = this.visibleDataSets.filter(
|
|
550
|
-
|
|
805
|
+
removedDataSets = this.visibleDataSets.filter(
|
|
806
|
+
(ds) => !newVisibleDataSets.some((nvs) => nvs.name === ds.name)
|
|
807
|
+
);
|
|
808
|
+
addedDataSets = newVisibleDataSets.filter((nvs) =>
|
|
809
|
+
this.visibleDataSets.every((ds) => ds.name !== nvs.name)
|
|
810
|
+
);
|
|
551
811
|
|
|
552
812
|
if (removedDataSets.length > 0 || addedDataSets.length > 0) {
|
|
553
813
|
LoggerProxy.logger.info(
|
|
554
814
|
`HashTreeParser#checkForVisibleDataSetChanges --> ${
|
|
555
815
|
this.debugId
|
|
556
|
-
} visible data sets change: removed: ${removedDataSets
|
|
557
|
-
|
|
558
|
-
|
|
816
|
+
} visible data sets change: removed: ${removedDataSets
|
|
817
|
+
.map((ds) => ds.name)
|
|
818
|
+
.join(', ')}, added: ${addedDataSets.map((ds) => ds.name).join(', ')}`
|
|
559
819
|
);
|
|
560
820
|
}
|
|
561
821
|
}
|
|
@@ -577,11 +837,15 @@ class HashTreeParser {
|
|
|
577
837
|
private deleteHashTree(dataSetName: string) {
|
|
578
838
|
this.dataSets[dataSetName].hashTree = undefined;
|
|
579
839
|
|
|
580
|
-
// we also need to stop the
|
|
840
|
+
// we also need to stop the timers as there is no hash tree anymore to sync
|
|
581
841
|
if (this.dataSets[dataSetName].timer) {
|
|
582
842
|
clearTimeout(this.dataSets[dataSetName].timer);
|
|
583
843
|
this.dataSets[dataSetName].timer = undefined;
|
|
584
844
|
}
|
|
845
|
+
if (this.dataSets[dataSetName].heartbeatWatchdogTimer) {
|
|
846
|
+
clearTimeout(this.dataSets[dataSetName].heartbeatWatchdogTimer);
|
|
847
|
+
this.dataSets[dataSetName].heartbeatWatchdogTimer = undefined;
|
|
848
|
+
}
|
|
585
849
|
}
|
|
586
850
|
|
|
587
851
|
/**
|
|
@@ -593,49 +857,51 @@ class HashTreeParser {
|
|
|
593
857
|
* visible data sets and they require async initialization, the names of these data sets
|
|
594
858
|
* are returned in an array.
|
|
595
859
|
*
|
|
596
|
-
* @param {
|
|
597
|
-
* @param {
|
|
860
|
+
* @param {VisibleDataSetInfo[]} removedDataSets - The list of removed data sets.
|
|
861
|
+
* @param {VisibleDataSetInfo[]} addedDataSets - The list of added data sets.
|
|
598
862
|
* @param {HashTreeObject[]} updatedObjects - The list of updated hash tree objects to which changes will be added.
|
|
599
|
-
* @returns {
|
|
863
|
+
* @returns {VisibleDataSetInfo[]} list of data sets that couldn't be initialized synchronously
|
|
600
864
|
*/
|
|
601
865
|
private processVisibleDataSetChanges(
|
|
602
|
-
removedDataSets:
|
|
603
|
-
addedDataSets:
|
|
866
|
+
removedDataSets: VisibleDataSetInfo[],
|
|
867
|
+
addedDataSets: VisibleDataSetInfo[],
|
|
604
868
|
updatedObjects: HashTreeObject[]
|
|
605
|
-
):
|
|
606
|
-
const dataSetsRequiringInitialization = [];
|
|
869
|
+
): VisibleDataSetInfo[] {
|
|
870
|
+
const dataSetsRequiringInitialization: VisibleDataSetInfo[] = [];
|
|
607
871
|
|
|
608
872
|
// if a visible data set was removed, we need to tell our client that all objects from it are removed
|
|
609
873
|
const removedObjects: HashTreeObject[] = [];
|
|
610
874
|
|
|
611
875
|
removedDataSets.forEach((ds) => {
|
|
612
|
-
if (this.dataSets[ds]?.hashTree) {
|
|
613
|
-
for (let i = 0; i < this.dataSets[ds].hashTree.numLeaves; i += 1) {
|
|
876
|
+
if (this.dataSets[ds.name]?.hashTree) {
|
|
877
|
+
for (let i = 0; i < this.dataSets[ds.name].hashTree.numLeaves; i += 1) {
|
|
614
878
|
removedObjects.push(
|
|
615
|
-
...this.dataSets[ds].hashTree.getLeafData(i).map((elementId) => ({
|
|
879
|
+
...this.dataSets[ds.name].hashTree.getLeafData(i).map((elementId) => ({
|
|
616
880
|
htMeta: {
|
|
617
881
|
elementId,
|
|
618
|
-
dataSetNames: [ds],
|
|
882
|
+
dataSetNames: [ds.name],
|
|
619
883
|
},
|
|
620
884
|
data: null,
|
|
621
885
|
}))
|
|
622
886
|
);
|
|
623
887
|
}
|
|
624
888
|
|
|
625
|
-
this.deleteHashTree(ds);
|
|
889
|
+
this.deleteHashTree(ds.name);
|
|
626
890
|
}
|
|
627
891
|
});
|
|
628
|
-
this.visibleDataSets = this.visibleDataSets.filter(
|
|
892
|
+
this.visibleDataSets = this.visibleDataSets.filter(
|
|
893
|
+
(vds) => !removedDataSets.some((rds) => rds.name === vds.name)
|
|
894
|
+
);
|
|
629
895
|
updatedObjects.push(...removedObjects);
|
|
630
896
|
|
|
631
897
|
// now setup the new visible data sets
|
|
632
898
|
for (const ds of addedDataSets) {
|
|
633
|
-
const dataSetInfo = this.dataSets[ds];
|
|
899
|
+
const dataSetInfo = this.dataSets[ds.name];
|
|
634
900
|
|
|
635
901
|
if (dataSetInfo) {
|
|
636
|
-
if (this.
|
|
902
|
+
if (this.isVisibleDataSet(dataSetInfo.name)) {
|
|
637
903
|
LoggerProxy.logger.info(
|
|
638
|
-
`HashTreeParser#processVisibleDataSetChanges --> ${this.debugId} Data set "${ds}" is already visible, skipping`
|
|
904
|
+
`HashTreeParser#processVisibleDataSetChanges --> ${this.debugId} Data set "${ds.name}" is already visible, skipping`
|
|
639
905
|
);
|
|
640
906
|
|
|
641
907
|
// eslint-disable-next-line no-continue
|
|
@@ -643,10 +909,13 @@ class HashTreeParser {
|
|
|
643
909
|
}
|
|
644
910
|
|
|
645
911
|
LoggerProxy.logger.info(
|
|
646
|
-
`HashTreeParser#processVisibleDataSetChanges --> ${this.debugId} Adding visible data set "${ds}"`
|
|
912
|
+
`HashTreeParser#processVisibleDataSetChanges --> ${this.debugId} Adding visible data set "${ds.name}"`
|
|
647
913
|
);
|
|
648
914
|
|
|
649
|
-
this.
|
|
915
|
+
if (!this.addToVisibleDataSetsList(ds)) {
|
|
916
|
+
// eslint-disable-next-line no-continue
|
|
917
|
+
continue;
|
|
918
|
+
}
|
|
650
919
|
|
|
651
920
|
const hashTree = new HashTree([], dataSetInfo.leafCount);
|
|
652
921
|
|
|
@@ -654,9 +923,13 @@ class HashTreeParser {
|
|
|
654
923
|
...dataSetInfo,
|
|
655
924
|
hashTree,
|
|
656
925
|
};
|
|
926
|
+
|
|
927
|
+
// this call is needed here for the edge case where we receive a message with new visible data sets
|
|
928
|
+
// and there are no objects belonging to these data sets in the message but we already have the info about them in this.dataSets
|
|
929
|
+
this.runSyncAlgorithm(this.dataSets[dataSetInfo.name]);
|
|
657
930
|
} else {
|
|
658
931
|
LoggerProxy.logger.info(
|
|
659
|
-
`HashTreeParser#processVisibleDataSetChanges --> ${this.debugId} visible data set "${ds}" added but no info about it in our dataSets structures`
|
|
932
|
+
`HashTreeParser#processVisibleDataSetChanges --> ${this.debugId} visible data set "${ds.name}" added but no info about it in our dataSets structures`
|
|
660
933
|
);
|
|
661
934
|
// todo: add a metric here
|
|
662
935
|
dataSetsRequiringInitialization.push(ds);
|
|
@@ -670,32 +943,31 @@ class HashTreeParser {
|
|
|
670
943
|
* Adds entries to the passed in updateObjects array
|
|
671
944
|
* for the changes that result from adding and removing visible data sets.
|
|
672
945
|
*
|
|
673
|
-
* @param {
|
|
674
|
-
* @param {string[]} addedDataSets - The list of added data sets.
|
|
946
|
+
* @param {VisibleDataSetInfo[]} addedDataSets - The list of added data sets.
|
|
675
947
|
* @returns {Promise<void>}
|
|
676
948
|
*/
|
|
677
|
-
private async initializeNewVisibleDataSets(
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
const allDataSets = await this.
|
|
949
|
+
private async initializeNewVisibleDataSets(addedDataSets: VisibleDataSetInfo[]): Promise<void> {
|
|
950
|
+
if (this.state === 'stopped') {
|
|
951
|
+
return;
|
|
952
|
+
}
|
|
953
|
+
const allDataSets = await this.getAllVisibleDataSetsFromLocus();
|
|
682
954
|
|
|
683
955
|
for (const ds of addedDataSets) {
|
|
684
|
-
const dataSetInfo = allDataSets.find((d) => d.name === ds);
|
|
956
|
+
const dataSetInfo = allDataSets.find((d) => d.name === ds.name);
|
|
685
957
|
|
|
686
958
|
LoggerProxy.logger.info(
|
|
687
|
-
`HashTreeParser#initializeNewVisibleDataSets --> ${this.debugId} initializing data set "${ds}"`
|
|
959
|
+
`HashTreeParser#initializeNewVisibleDataSets --> ${this.debugId} initializing data set "${ds.name}"`
|
|
688
960
|
);
|
|
689
961
|
|
|
690
962
|
if (!dataSetInfo) {
|
|
691
963
|
LoggerProxy.logger.warn(
|
|
692
|
-
`HashTreeParser#
|
|
964
|
+
`HashTreeParser#initializeNewVisibleDataSets --> ${this.debugId} missing info about data set "${ds.name}" in Locus response from visibleDataSetsUrl`
|
|
693
965
|
);
|
|
694
966
|
} else {
|
|
695
967
|
// we're awaiting in a loop, because in practice there will be only one new data set at a time,
|
|
696
968
|
// so no point in trying to parallelize this
|
|
697
969
|
// eslint-disable-next-line no-await-in-loop
|
|
698
|
-
const updates = await this.initializeNewVisibleDataSet(dataSetInfo);
|
|
970
|
+
const updates = await this.initializeNewVisibleDataSet(ds, dataSetInfo);
|
|
699
971
|
|
|
700
972
|
this.callLocusInfoUpdateCallback(updates);
|
|
701
973
|
}
|
|
@@ -707,12 +979,13 @@ class HashTreeParser {
|
|
|
707
979
|
*
|
|
708
980
|
* @param {HashTreeMessage} message - The hash tree message containing data sets and objects to be processed
|
|
709
981
|
* @param {string} [debugText] - Optional debug text to include in logs
|
|
710
|
-
* @returns {
|
|
982
|
+
* @returns {HashTreeObject[]} list of hash tree objects that were updated as a result of processing the message
|
|
711
983
|
*/
|
|
712
|
-
private
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
984
|
+
private parseMessage(message: HashTreeMessage, debugText?: string): HashTreeObject[] {
|
|
985
|
+
if (this.state === 'stopped') {
|
|
986
|
+
return [];
|
|
987
|
+
}
|
|
988
|
+
|
|
716
989
|
const {dataSets, visibleDataSetsUrl} = message;
|
|
717
990
|
|
|
718
991
|
LoggerProxy.logger.info(
|
|
@@ -730,48 +1003,37 @@ class HashTreeParser {
|
|
|
730
1003
|
this.visibleDataSetsUrl = visibleDataSetsUrl;
|
|
731
1004
|
dataSets.forEach((dataSet) => this.updateDataSetInfo(dataSet));
|
|
732
1005
|
|
|
733
|
-
if (this.isEndMessage(message)) {
|
|
734
|
-
LoggerProxy.logger.info(
|
|
735
|
-
`HashTreeParser#parseMessage --> ${this.debugId} received END message`
|
|
736
|
-
);
|
|
737
|
-
this.stopAllTimers();
|
|
738
|
-
|
|
739
|
-
return {updateType: LocusInfoUpdateType.MEETING_ENDED};
|
|
740
|
-
}
|
|
741
|
-
|
|
742
|
-
let isRosterDropped = false;
|
|
743
1006
|
const updatedObjects: HashTreeObject[] = [];
|
|
744
1007
|
|
|
745
1008
|
// when we detect new visible datasets, it may be that the metadata about them is not
|
|
746
1009
|
// available in the message, they will require separate async initialization
|
|
747
1010
|
let dataSetsRequiringInitialization = [];
|
|
748
1011
|
|
|
749
|
-
// first find out if there are any visible data set changes - they're signalled in
|
|
750
|
-
const
|
|
751
|
-
|
|
752
|
-
isSelf(object)
|
|
1012
|
+
// first find out if there are any visible data set changes - they're signalled in Metadata object updates
|
|
1013
|
+
const metadataUpdates = (message.locusStateElements || []).filter((object) =>
|
|
1014
|
+
isMetadata(object)
|
|
753
1015
|
);
|
|
754
1016
|
|
|
755
|
-
if (
|
|
756
|
-
const
|
|
1017
|
+
if (metadataUpdates.length > 0) {
|
|
1018
|
+
const updatedMetadataObjects = [];
|
|
757
1019
|
|
|
758
|
-
|
|
1020
|
+
metadataUpdates.forEach((object) => {
|
|
759
1021
|
// todo: once Locus supports it, we will use the "view" field here instead of dataSetNames
|
|
760
1022
|
for (const dataSetName of object.htMeta.dataSetNames) {
|
|
761
1023
|
const hashTree = this.dataSets[dataSetName]?.hashTree;
|
|
762
1024
|
|
|
763
1025
|
if (hashTree && object.data) {
|
|
764
1026
|
if (hashTree.putItem(object.htMeta.elementId)) {
|
|
765
|
-
|
|
1027
|
+
updatedMetadataObjects.push(object);
|
|
766
1028
|
}
|
|
767
1029
|
}
|
|
768
1030
|
}
|
|
769
1031
|
});
|
|
770
1032
|
|
|
771
|
-
updatedObjects.push(...
|
|
1033
|
+
updatedObjects.push(...updatedMetadataObjects);
|
|
772
1034
|
|
|
773
1035
|
const {changeDetected, removedDataSets, addedDataSets} =
|
|
774
|
-
this.checkForVisibleDataSetChanges(
|
|
1036
|
+
this.checkForVisibleDataSetChanges(updatedMetadataObjects);
|
|
775
1037
|
|
|
776
1038
|
if (changeDetected) {
|
|
777
1039
|
dataSetsRequiringInitialization = this.processVisibleDataSetChanges(
|
|
@@ -782,64 +1044,49 @@ class HashTreeParser {
|
|
|
782
1044
|
}
|
|
783
1045
|
}
|
|
784
1046
|
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
object
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
(
|
|
807
|
-
|
|
808
|
-
if (
|
|
809
|
-
|
|
1047
|
+
if (message.locusStateElements?.length > 0) {
|
|
1048
|
+
// by this point we now have this.dataSets setup for data sets from this message
|
|
1049
|
+
// and hash trees created for the new visible data sets,
|
|
1050
|
+
// so we can now process all the updates from the message
|
|
1051
|
+
dataSets.forEach((dataSet) => {
|
|
1052
|
+
if (this.dataSets[dataSet.name]) {
|
|
1053
|
+
const {hashTree} = this.dataSets[dataSet.name];
|
|
1054
|
+
|
|
1055
|
+
if (hashTree) {
|
|
1056
|
+
const locusStateElementsForThisSet = message.locusStateElements.filter((object) =>
|
|
1057
|
+
object.htMeta.dataSetNames.includes(dataSet.name)
|
|
1058
|
+
);
|
|
1059
|
+
|
|
1060
|
+
const appliedChangesList = hashTree.updateItems(
|
|
1061
|
+
locusStateElementsForThisSet.map((object) =>
|
|
1062
|
+
object.data
|
|
1063
|
+
? {operation: 'update', item: object.htMeta.elementId}
|
|
1064
|
+
: {operation: 'remove', item: object.htMeta.elementId}
|
|
1065
|
+
)
|
|
1066
|
+
);
|
|
1067
|
+
|
|
1068
|
+
zip(appliedChangesList, locusStateElementsForThisSet).forEach(
|
|
1069
|
+
([changeApplied, object]) => {
|
|
1070
|
+
if (changeApplied) {
|
|
1071
|
+
// add to updatedObjects so that our locus DTO will get updated with the new object
|
|
1072
|
+
updatedObjects.push(object);
|
|
810
1073
|
}
|
|
811
|
-
// add to updatedObjects so that our locus DTO will get updated with the new object
|
|
812
|
-
updatedObjects.push(object);
|
|
813
1074
|
}
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
1075
|
+
);
|
|
1076
|
+
} else {
|
|
1077
|
+
LoggerProxy.logger.info(
|
|
1078
|
+
`Locus-info:index#parseMessage --> ${this.debugId} unexpected (not visible) dataSet ${dataSet.name} received in hash tree message`
|
|
1079
|
+
);
|
|
1080
|
+
}
|
|
820
1081
|
}
|
|
821
|
-
}
|
|
822
1082
|
|
|
823
|
-
if (!isRosterDropped) {
|
|
824
1083
|
this.runSyncAlgorithm(dataSet);
|
|
825
|
-
}
|
|
826
|
-
});
|
|
827
|
-
|
|
828
|
-
if (isRosterDropped) {
|
|
829
|
-
LoggerProxy.logger.info(
|
|
830
|
-
`HashTreeParser#parseMessage --> ${this.debugId} detected roster drop`
|
|
831
|
-
);
|
|
832
|
-
this.stopAllTimers();
|
|
833
|
-
|
|
834
|
-
// in case of roster drop we don't care about other updates
|
|
835
|
-
return {updateType: LocusInfoUpdateType.MEETING_ENDED};
|
|
1084
|
+
});
|
|
836
1085
|
}
|
|
837
1086
|
|
|
838
1087
|
if (dataSetsRequiringInitialization.length > 0) {
|
|
839
1088
|
// there are some data sets that we need to initialize asynchronously
|
|
840
|
-
|
|
841
|
-
this.initializeNewVisibleDataSets(message, dataSetsRequiringInitialization);
|
|
842
|
-
});
|
|
1089
|
+
this.queueInitForNewVisibleDataSets(dataSetsRequiringInitialization);
|
|
843
1090
|
}
|
|
844
1091
|
|
|
845
1092
|
if (updatedObjects.length === 0) {
|
|
@@ -848,7 +1095,7 @@ class HashTreeParser {
|
|
|
848
1095
|
);
|
|
849
1096
|
}
|
|
850
1097
|
|
|
851
|
-
return
|
|
1098
|
+
return updatedObjects;
|
|
852
1099
|
}
|
|
853
1100
|
|
|
854
1101
|
/**
|
|
@@ -858,13 +1105,32 @@ class HashTreeParser {
|
|
|
858
1105
|
* @param {string} [debugText] - Optional debug text to include in logs
|
|
859
1106
|
* @returns {void}
|
|
860
1107
|
*/
|
|
861
|
-
|
|
862
|
-
if (
|
|
1108
|
+
handleMessage(message: HashTreeMessage, debugText?: string) {
|
|
1109
|
+
if (this.state === 'stopped') {
|
|
1110
|
+
return;
|
|
1111
|
+
}
|
|
1112
|
+
|
|
1113
|
+
if (message.heartbeatIntervalMs) {
|
|
1114
|
+
this.heartbeatIntervalMs = message.heartbeatIntervalMs;
|
|
1115
|
+
}
|
|
1116
|
+
if (this.isEndMessage(message)) {
|
|
1117
|
+
LoggerProxy.logger.info(
|
|
1118
|
+
`HashTreeParser#handleMessage --> ${this.debugId} received sentinel END MEETING message`
|
|
1119
|
+
);
|
|
1120
|
+
this.stopAllTimers();
|
|
1121
|
+
|
|
1122
|
+
this.callLocusInfoUpdateCallback({updateType: LocusInfoUpdateType.MEETING_ENDED});
|
|
1123
|
+
} else if (message.locusStateElements === undefined) {
|
|
863
1124
|
this.handleRootHashHeartBeatMessage(message);
|
|
1125
|
+
this.resetHeartbeatWatchdogs(message.dataSets);
|
|
864
1126
|
} else {
|
|
865
|
-
const
|
|
1127
|
+
const updatedObjects = this.parseMessage(message, debugText);
|
|
866
1128
|
|
|
867
|
-
this.
|
|
1129
|
+
this.resetHeartbeatWatchdogs(message.dataSets);
|
|
1130
|
+
this.callLocusInfoUpdateCallback({
|
|
1131
|
+
updateType: LocusInfoUpdateType.OBJECTS_UPDATED,
|
|
1132
|
+
updatedObjects,
|
|
1133
|
+
});
|
|
868
1134
|
}
|
|
869
1135
|
}
|
|
870
1136
|
|
|
@@ -878,9 +1144,55 @@ class HashTreeParser {
|
|
|
878
1144
|
updateType: LocusInfoUpdateType;
|
|
879
1145
|
updatedObjects?: HashTreeObject[];
|
|
880
1146
|
}) {
|
|
1147
|
+
if (this.state === 'stopped') {
|
|
1148
|
+
return;
|
|
1149
|
+
}
|
|
1150
|
+
|
|
881
1151
|
const {updateType, updatedObjects} = updates;
|
|
882
1152
|
|
|
883
|
-
if (updateType
|
|
1153
|
+
if (updateType === LocusInfoUpdateType.OBJECTS_UPDATED && updatedObjects?.length > 0) {
|
|
1154
|
+
// Filter out updates for objects that already have a higher version in their datasets,
|
|
1155
|
+
// or removals for objects that still exist in any of their datasets
|
|
1156
|
+
const filteredUpdates = updatedObjects.filter((object) => {
|
|
1157
|
+
const {elementId} = object.htMeta;
|
|
1158
|
+
const {type, id, version} = elementId;
|
|
1159
|
+
|
|
1160
|
+
// Check all datasets
|
|
1161
|
+
for (const dataSetName of Object.keys(this.dataSets)) {
|
|
1162
|
+
const dataSet = this.dataSets[dataSetName];
|
|
1163
|
+
|
|
1164
|
+
// only visible datasets have hash trees set
|
|
1165
|
+
if (dataSet?.hashTree) {
|
|
1166
|
+
const existingVersion = dataSet.hashTree.getItemVersion(id, type);
|
|
1167
|
+
if (existingVersion !== undefined) {
|
|
1168
|
+
if (object.data) {
|
|
1169
|
+
// For updates: filter out if any dataset has a higher version
|
|
1170
|
+
if (existingVersion > version) {
|
|
1171
|
+
LoggerProxy.logger.info(
|
|
1172
|
+
`HashTreeParser#callLocusInfoUpdateCallback --> ${this.debugId} Filtering out update for ${type}:${id} v${version} because dataset "${dataSetName}" has v${existingVersion}`
|
|
1173
|
+
);
|
|
1174
|
+
|
|
1175
|
+
return false;
|
|
1176
|
+
}
|
|
1177
|
+
} else if (existingVersion >= version) {
|
|
1178
|
+
// For removals: filter out if the object still exists in any dataset
|
|
1179
|
+
LoggerProxy.logger.info(
|
|
1180
|
+
`HashTreeParser#callLocusInfoUpdateCallback --> ${this.debugId} Filtering out removal for ${type}:${id} v${version} because dataset "${dataSetName}" still has v${existingVersion}`
|
|
1181
|
+
);
|
|
1182
|
+
|
|
1183
|
+
return false;
|
|
1184
|
+
}
|
|
1185
|
+
}
|
|
1186
|
+
}
|
|
1187
|
+
}
|
|
1188
|
+
|
|
1189
|
+
return true;
|
|
1190
|
+
});
|
|
1191
|
+
|
|
1192
|
+
if (filteredUpdates.length > 0) {
|
|
1193
|
+
this.locusInfoUpdateCallback(updateType, {updatedObjects: filteredUpdates});
|
|
1194
|
+
}
|
|
1195
|
+
} else if (updateType !== LocusInfoUpdateType.OBJECTS_UPDATED) {
|
|
884
1196
|
this.locusInfoUpdateCallback(updateType, {updatedObjects});
|
|
885
1197
|
}
|
|
886
1198
|
}
|
|
@@ -899,6 +1211,88 @@ class HashTreeParser {
|
|
|
899
1211
|
return Math.round(randomValue ** exponent * maxMs);
|
|
900
1212
|
}
|
|
901
1213
|
|
|
1214
|
+
/**
|
|
1215
|
+
* Performs a sync for the given data set.
|
|
1216
|
+
*
|
|
1217
|
+
* @param {InternalDataSet} dataSet - The data set to sync
|
|
1218
|
+
* @param {string} rootHash - Our current root hash for this data set
|
|
1219
|
+
* @param {string} reason - The reason for the sync (used for logging)
|
|
1220
|
+
* @returns {Promise<void>}
|
|
1221
|
+
*/
|
|
1222
|
+
private async performSync(
|
|
1223
|
+
dataSet: InternalDataSet,
|
|
1224
|
+
rootHash: string,
|
|
1225
|
+
reason: string
|
|
1226
|
+
): Promise<void> {
|
|
1227
|
+
if (!dataSet.hashTree) {
|
|
1228
|
+
return;
|
|
1229
|
+
}
|
|
1230
|
+
|
|
1231
|
+
try {
|
|
1232
|
+
LoggerProxy.logger.info(
|
|
1233
|
+
`HashTreeParser#performSync --> ${this.debugId} ${reason}, syncing data set "${dataSet.name}"`
|
|
1234
|
+
);
|
|
1235
|
+
|
|
1236
|
+
const mismatchedLeavesData: Record<number, LeafDataItem[]> = {};
|
|
1237
|
+
|
|
1238
|
+
if (dataSet.leafCount !== 1) {
|
|
1239
|
+
let receivedHashes;
|
|
1240
|
+
|
|
1241
|
+
try {
|
|
1242
|
+
// request hashes from sender
|
|
1243
|
+
const {hashes, dataSet: latestDataSetInfo} = await this.getHashesFromLocus(
|
|
1244
|
+
dataSet.name,
|
|
1245
|
+
rootHash
|
|
1246
|
+
);
|
|
1247
|
+
|
|
1248
|
+
receivedHashes = hashes;
|
|
1249
|
+
|
|
1250
|
+
dataSet.hashTree.resize(latestDataSetInfo.leafCount);
|
|
1251
|
+
} catch (error) {
|
|
1252
|
+
if (error.statusCode === 409) {
|
|
1253
|
+
// this is a leaf count mismatch, we should do nothing, just wait for another heartbeat message from Locus
|
|
1254
|
+
LoggerProxy.logger.info(
|
|
1255
|
+
`HashTreeParser#getHashesFromLocus --> ${this.debugId} Got 409 when fetching hashes for data set "${dataSet.name}": ${error.message}`
|
|
1256
|
+
);
|
|
1257
|
+
|
|
1258
|
+
return;
|
|
1259
|
+
}
|
|
1260
|
+
throw error;
|
|
1261
|
+
}
|
|
1262
|
+
|
|
1263
|
+
// identify mismatched leaves
|
|
1264
|
+
const mismatchedLeaveIndexes = dataSet.hashTree.diffHashes(receivedHashes);
|
|
1265
|
+
|
|
1266
|
+
mismatchedLeaveIndexes.forEach((index) => {
|
|
1267
|
+
mismatchedLeavesData[index] = dataSet.hashTree.getLeafData(index);
|
|
1268
|
+
});
|
|
1269
|
+
} else {
|
|
1270
|
+
mismatchedLeavesData[0] = dataSet.hashTree.getLeafData(0);
|
|
1271
|
+
}
|
|
1272
|
+
// request sync for mismatched leaves
|
|
1273
|
+
if (Object.keys(mismatchedLeavesData).length > 0) {
|
|
1274
|
+
const syncResponse = await this.sendSyncRequestToLocus(dataSet, mismatchedLeavesData);
|
|
1275
|
+
|
|
1276
|
+
// sync API may return nothing (in that case data will arrive via messages)
|
|
1277
|
+
// or it may return a response in the same format as messages
|
|
1278
|
+
if (syncResponse) {
|
|
1279
|
+
this.handleMessage(syncResponse, 'via sync API');
|
|
1280
|
+
}
|
|
1281
|
+
}
|
|
1282
|
+
} catch (error) {
|
|
1283
|
+
if (error instanceof MeetingEndedError) {
|
|
1284
|
+
this.callLocusInfoUpdateCallback({
|
|
1285
|
+
updateType: LocusInfoUpdateType.MEETING_ENDED,
|
|
1286
|
+
});
|
|
1287
|
+
} else {
|
|
1288
|
+
LoggerProxy.logger.warn(
|
|
1289
|
+
`HashTreeParser#performSync --> ${this.debugId} error during sync for data set "${dataSet.name}":`,
|
|
1290
|
+
error
|
|
1291
|
+
);
|
|
1292
|
+
}
|
|
1293
|
+
}
|
|
1294
|
+
}
|
|
1295
|
+
|
|
902
1296
|
/**
|
|
903
1297
|
* Runs the sync algorithm for the given data set.
|
|
904
1298
|
*
|
|
@@ -957,55 +1351,11 @@ class HashTreeParser {
|
|
|
957
1351
|
const rootHash = dataSet.hashTree.getRootHash();
|
|
958
1352
|
|
|
959
1353
|
if (dataSet.root !== rootHash) {
|
|
960
|
-
|
|
961
|
-
|
|
1354
|
+
await this.performSync(
|
|
1355
|
+
dataSet,
|
|
1356
|
+
rootHash,
|
|
1357
|
+
`Root hash mismatch: received=${dataSet.root}, ours=${rootHash}`
|
|
962
1358
|
);
|
|
963
|
-
|
|
964
|
-
const mismatchedLeavesData: Record<number, LeafDataItem[]> = {};
|
|
965
|
-
|
|
966
|
-
if (dataSet.leafCount !== 1) {
|
|
967
|
-
let receivedHashes;
|
|
968
|
-
|
|
969
|
-
try {
|
|
970
|
-
// request hashes from sender
|
|
971
|
-
const {hashes, dataSet: latestDataSetInfo} = await this.getHashesFromLocus(
|
|
972
|
-
dataSet.name
|
|
973
|
-
);
|
|
974
|
-
|
|
975
|
-
receivedHashes = hashes;
|
|
976
|
-
|
|
977
|
-
dataSet.hashTree.resize(latestDataSetInfo.leafCount);
|
|
978
|
-
} catch (error) {
|
|
979
|
-
if (error.statusCode === 409) {
|
|
980
|
-
// this is a leaf count mismatch, we should do nothing, just wait for another heartbeat message from Locus
|
|
981
|
-
LoggerProxy.logger.info(
|
|
982
|
-
`HashTreeParser#getHashesFromLocus --> ${this.debugId} Got 409 when fetching hashes for data set "${dataSet.name}": ${error.message}`
|
|
983
|
-
);
|
|
984
|
-
|
|
985
|
-
return;
|
|
986
|
-
}
|
|
987
|
-
throw error;
|
|
988
|
-
}
|
|
989
|
-
|
|
990
|
-
// identify mismatched leaves
|
|
991
|
-
const mismatchedLeaveIndexes = dataSet.hashTree.diffHashes(receivedHashes);
|
|
992
|
-
|
|
993
|
-
mismatchedLeaveIndexes.forEach((index) => {
|
|
994
|
-
mismatchedLeavesData[index] = dataSet.hashTree.getLeafData(index);
|
|
995
|
-
});
|
|
996
|
-
} else {
|
|
997
|
-
mismatchedLeavesData[0] = dataSet.hashTree.getLeafData(0);
|
|
998
|
-
}
|
|
999
|
-
// request sync for mismatched leaves
|
|
1000
|
-
if (Object.keys(mismatchedLeavesData).length > 0) {
|
|
1001
|
-
const syncResponse = await this.sendSyncRequestToLocus(dataSet, mismatchedLeavesData);
|
|
1002
|
-
|
|
1003
|
-
// sync API may return nothing (in that case data will arrive via messages)
|
|
1004
|
-
// or it may return a response in the same format as messages
|
|
1005
|
-
if (syncResponse) {
|
|
1006
|
-
this.handleMessage(syncResponse, 'via sync API');
|
|
1007
|
-
}
|
|
1008
|
-
}
|
|
1009
1359
|
} else {
|
|
1010
1360
|
LoggerProxy.logger.info(
|
|
1011
1361
|
`HashTreeParser#runSyncAlgorithm --> ${this.debugId} "${dataSet.name}" root hash matching: ${rootHash}, version=${dataSet.version}`
|
|
@@ -1019,6 +1369,52 @@ class HashTreeParser {
|
|
|
1019
1369
|
}
|
|
1020
1370
|
}
|
|
1021
1371
|
|
|
1372
|
+
/**
|
|
1373
|
+
* Resets the heartbeat watchdog timers for the specified data sets. Each data set has its own
|
|
1374
|
+
* watchdog timer that monitors whether heartbeats are being received within the expected interval.
|
|
1375
|
+
* If a heartbeat is not received for a specific data set within heartbeatIntervalMs plus
|
|
1376
|
+
* a backoff-calculated time, the sync algorithm is initiated for that data set
|
|
1377
|
+
*
|
|
1378
|
+
* @param {Array<DataSet>} receivedDataSets - The data sets from the received message for which watchdog timers should be reset
|
|
1379
|
+
* @returns {void}
|
|
1380
|
+
*/
|
|
1381
|
+
private resetHeartbeatWatchdogs(receivedDataSets: Array<DataSet>): void {
|
|
1382
|
+
if (!this.heartbeatIntervalMs) {
|
|
1383
|
+
return;
|
|
1384
|
+
}
|
|
1385
|
+
|
|
1386
|
+
for (const receivedDataSet of receivedDataSets) {
|
|
1387
|
+
const dataSet = this.dataSets[receivedDataSet.name];
|
|
1388
|
+
|
|
1389
|
+
if (!dataSet?.hashTree) {
|
|
1390
|
+
// eslint-disable-next-line no-continue
|
|
1391
|
+
continue;
|
|
1392
|
+
}
|
|
1393
|
+
|
|
1394
|
+
if (dataSet.heartbeatWatchdogTimer) {
|
|
1395
|
+
clearTimeout(dataSet.heartbeatWatchdogTimer);
|
|
1396
|
+
dataSet.heartbeatWatchdogTimer = undefined;
|
|
1397
|
+
}
|
|
1398
|
+
|
|
1399
|
+
const backoffTime = this.getWeightedBackoffTime(dataSet.backoff);
|
|
1400
|
+
const delay = this.heartbeatIntervalMs + backoffTime;
|
|
1401
|
+
|
|
1402
|
+
dataSet.heartbeatWatchdogTimer = setTimeout(async () => {
|
|
1403
|
+
dataSet.heartbeatWatchdogTimer = undefined;
|
|
1404
|
+
|
|
1405
|
+
LoggerProxy.logger.warn(
|
|
1406
|
+
`HashTreeParser#resetHeartbeatWatchdogs --> ${this.debugId} Heartbeat watchdog fired for data set "${dataSet.name}" - no heartbeat received within expected interval, initiating sync`
|
|
1407
|
+
);
|
|
1408
|
+
|
|
1409
|
+
await this.performSync(
|
|
1410
|
+
dataSet,
|
|
1411
|
+
dataSet.hashTree.getRootHash(),
|
|
1412
|
+
`heartbeat watchdog expired`
|
|
1413
|
+
);
|
|
1414
|
+
}, delay);
|
|
1415
|
+
}
|
|
1416
|
+
}
|
|
1417
|
+
|
|
1022
1418
|
/**
|
|
1023
1419
|
* Stops all timers for the data sets to prevent any further sync attempts.
|
|
1024
1420
|
* @returns {void}
|
|
@@ -1029,15 +1425,100 @@ class HashTreeParser {
|
|
|
1029
1425
|
clearTimeout(dataSet.timer);
|
|
1030
1426
|
dataSet.timer = undefined;
|
|
1031
1427
|
}
|
|
1428
|
+
if (dataSet.heartbeatWatchdogTimer) {
|
|
1429
|
+
clearTimeout(dataSet.heartbeatWatchdogTimer);
|
|
1430
|
+
dataSet.heartbeatWatchdogTimer = undefined;
|
|
1431
|
+
}
|
|
1032
1432
|
});
|
|
1033
1433
|
}
|
|
1034
1434
|
|
|
1435
|
+
/**
|
|
1436
|
+
* Stops the HashTreeParser, preventing it from processing any further messages and clearing all timers.
|
|
1437
|
+
* It also clears all the hash trees, so if the parser is resumed later, it will need to do a sync
|
|
1438
|
+
* to be up-to-date.
|
|
1439
|
+
* @returns {void}
|
|
1440
|
+
*/
|
|
1441
|
+
public stop() {
|
|
1442
|
+
LoggerProxy.logger.info(
|
|
1443
|
+
`HashTreeParser#stop --> ${this.debugId} Stopping HashTreeParser, clearing timers and hash trees`
|
|
1444
|
+
);
|
|
1445
|
+
this.stopAllTimers();
|
|
1446
|
+
Object.values(this.dataSets).forEach((dataSet) => {
|
|
1447
|
+
dataSet.hashTree = undefined;
|
|
1448
|
+
});
|
|
1449
|
+
this.visibleDataSets = [];
|
|
1450
|
+
this.state = 'stopped';
|
|
1451
|
+
}
|
|
1452
|
+
|
|
1453
|
+
/**
|
|
1454
|
+
* Resumes the HashTreeParser that was previously stopped.
|
|
1455
|
+
* @param {HashTreeMessage} message - The message to resume with, it must contain metadata with visible data sets info
|
|
1456
|
+
* @returns {void}
|
|
1457
|
+
*/
|
|
1458
|
+
public resume(message: HashTreeMessage) {
|
|
1459
|
+
// check that message contains metadata with visible data sets - this is essential to be able to resume
|
|
1460
|
+
const metadataObject = message.locusStateElements?.find((el) => isMetadata(el));
|
|
1461
|
+
|
|
1462
|
+
if (!metadataObject?.data?.visibleDataSets) {
|
|
1463
|
+
LoggerProxy.logger.warn(
|
|
1464
|
+
`HashTreeParser#resume --> ${this.debugId} Cannot resume HashTreeParser because the message is missing metadata with visible data sets info`
|
|
1465
|
+
);
|
|
1466
|
+
|
|
1467
|
+
return;
|
|
1468
|
+
}
|
|
1469
|
+
this.setVisibleDataSets(
|
|
1470
|
+
metadataObject.data.visibleDataSets as VisibleDataSetInfo[],
|
|
1471
|
+
message.dataSets
|
|
1472
|
+
);
|
|
1473
|
+
|
|
1474
|
+
this.dataSets = {};
|
|
1475
|
+
|
|
1476
|
+
for (const dataSet of message.dataSets) {
|
|
1477
|
+
const {name, leafCount} = dataSet;
|
|
1478
|
+
|
|
1479
|
+
this.dataSets[name] = {
|
|
1480
|
+
...dataSet,
|
|
1481
|
+
hashTree: this.isVisibleDataSet(name) ? new HashTree([], leafCount) : undefined,
|
|
1482
|
+
};
|
|
1483
|
+
}
|
|
1484
|
+
LoggerProxy.logger.info(
|
|
1485
|
+
`HashTreeParser#resume --> ${
|
|
1486
|
+
this.debugId
|
|
1487
|
+
} Resuming HashTreeParser with data sets: ${Object.keys(this.dataSets).join(
|
|
1488
|
+
', '
|
|
1489
|
+
)}, visible data sets: ${this.visibleDataSets.map((ds) => ds.name).join(', ')}`
|
|
1490
|
+
);
|
|
1491
|
+
this.state = 'active';
|
|
1492
|
+
|
|
1493
|
+
this.handleMessage(message, 'on resume');
|
|
1494
|
+
}
|
|
1495
|
+
|
|
1496
|
+
private checkForSentinelHttpResponse(error: any, dataSetName?: string) {
|
|
1497
|
+
const isValidDataSetForSentinel =
|
|
1498
|
+
dataSetName === undefined ||
|
|
1499
|
+
PossibleSentinelMessageDataSetNames.includes(dataSetName.toLowerCase());
|
|
1500
|
+
|
|
1501
|
+
if (
|
|
1502
|
+
((error.statusCode === 409 && error.body?.errorCode === 2403004) ||
|
|
1503
|
+
error.statusCode === 404) &&
|
|
1504
|
+
isValidDataSetForSentinel
|
|
1505
|
+
) {
|
|
1506
|
+
LoggerProxy.logger.info(
|
|
1507
|
+
`HashTreeParser#checkForSentinelHttpResponse --> ${this.debugId} Received ${error.statusCode} for data set "${dataSetName}", indicating that the meeting has ended`
|
|
1508
|
+
);
|
|
1509
|
+
this.stopAllTimers();
|
|
1510
|
+
|
|
1511
|
+
throw new MeetingEndedError();
|
|
1512
|
+
}
|
|
1513
|
+
}
|
|
1514
|
+
|
|
1035
1515
|
/**
|
|
1036
1516
|
* Gets the current hashes from the locus for a specific data set.
|
|
1037
1517
|
* @param {string} dataSetName
|
|
1518
|
+
* @param {string} currentRootHash
|
|
1038
1519
|
* @returns {string[]}
|
|
1039
1520
|
*/
|
|
1040
|
-
private getHashesFromLocus(dataSetName: string) {
|
|
1521
|
+
private getHashesFromLocus(dataSetName: string, currentRootHash: string) {
|
|
1041
1522
|
LoggerProxy.logger.info(
|
|
1042
1523
|
`HashTreeParser#getHashesFromLocus --> ${this.debugId} Requesting hashes for data set "${dataSetName}"`
|
|
1043
1524
|
);
|
|
@@ -1049,6 +1530,9 @@ class HashTreeParser {
|
|
|
1049
1530
|
return this.webexRequest({
|
|
1050
1531
|
method: HTTP_VERBS.GET,
|
|
1051
1532
|
uri: url,
|
|
1533
|
+
qs: {
|
|
1534
|
+
rootHash: currentRootHash,
|
|
1535
|
+
},
|
|
1052
1536
|
})
|
|
1053
1537
|
.then((response) => {
|
|
1054
1538
|
const hashes = response.body?.hashes as string[] | undefined;
|
|
@@ -1078,6 +1562,8 @@ class HashTreeParser {
|
|
|
1078
1562
|
`HashTreeParser#getHashesFromLocus --> ${this.debugId} Error ${error.statusCode} fetching hashes for data set "${dataSetName}":`,
|
|
1079
1563
|
error
|
|
1080
1564
|
);
|
|
1565
|
+
this.checkForSentinelHttpResponse(error, dataSet.name);
|
|
1566
|
+
|
|
1081
1567
|
throw error;
|
|
1082
1568
|
});
|
|
1083
1569
|
}
|
|
@@ -1110,9 +1596,14 @@ class HashTreeParser {
|
|
|
1110
1596
|
});
|
|
1111
1597
|
});
|
|
1112
1598
|
|
|
1599
|
+
const ourCurrentRootHash = dataSet.hashTree ? dataSet.hashTree.getRootHash() : EMPTY_HASH;
|
|
1600
|
+
|
|
1113
1601
|
return this.webexRequest({
|
|
1114
1602
|
method: HTTP_VERBS.POST,
|
|
1115
1603
|
uri: url,
|
|
1604
|
+
qs: {
|
|
1605
|
+
rootHash: ourCurrentRootHash,
|
|
1606
|
+
},
|
|
1116
1607
|
body,
|
|
1117
1608
|
})
|
|
1118
1609
|
.then((resp) => {
|
|
@@ -1135,6 +1626,8 @@ class HashTreeParser {
|
|
|
1135
1626
|
`HashTreeParser#sendSyncRequestToLocus --> ${this.debugId} Error ${error.statusCode} sending sync request for data set "${dataSet.name}":`,
|
|
1136
1627
|
error
|
|
1137
1628
|
);
|
|
1629
|
+
this.checkForSentinelHttpResponse(error, dataSet.name);
|
|
1630
|
+
|
|
1138
1631
|
throw error;
|
|
1139
1632
|
});
|
|
1140
1633
|
}
|