@webex/plugin-meetings 3.10.0-next.9 → 3.10.0-webex-services-ready.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/breakouts/breakout.js +1 -1
- package/dist/breakouts/index.js +1 -1
- package/dist/constants.js +11 -3
- package/dist/constants.js.map +1 -1
- package/dist/hashTree/constants.js +20 -0
- package/dist/hashTree/constants.js.map +1 -0
- package/dist/hashTree/hashTree.js +515 -0
- package/dist/hashTree/hashTree.js.map +1 -0
- package/dist/hashTree/hashTreeParser.js +1266 -0
- package/dist/hashTree/hashTreeParser.js.map +1 -0
- package/dist/hashTree/types.js +21 -0
- package/dist/hashTree/types.js.map +1 -0
- package/dist/hashTree/utils.js +48 -0
- package/dist/hashTree/utils.js.map +1 -0
- package/dist/interpretation/index.js +1 -1
- package/dist/interpretation/siLanguage.js +1 -1
- package/dist/locus-info/index.js +511 -48
- package/dist/locus-info/index.js.map +1 -1
- package/dist/locus-info/types.js +7 -0
- package/dist/locus-info/types.js.map +1 -0
- package/dist/meeting/index.js +41 -15
- package/dist/meeting/index.js.map +1 -1
- package/dist/meeting/util.js +1 -0
- package/dist/meeting/util.js.map +1 -1
- package/dist/meetings/index.js +112 -70
- package/dist/meetings/index.js.map +1 -1
- package/dist/metrics/constants.js +3 -1
- package/dist/metrics/constants.js.map +1 -1
- package/dist/reachability/clusterReachability.js +44 -358
- package/dist/reachability/clusterReachability.js.map +1 -1
- package/dist/reachability/reachability.types.js +14 -1
- package/dist/reachability/reachability.types.js.map +1 -1
- package/dist/reachability/reachabilityPeerConnection.js +445 -0
- package/dist/reachability/reachabilityPeerConnection.js.map +1 -0
- package/dist/types/constants.d.ts +26 -21
- package/dist/types/hashTree/constants.d.ts +8 -0
- package/dist/types/hashTree/hashTree.d.ts +129 -0
- package/dist/types/hashTree/hashTreeParser.d.ts +260 -0
- package/dist/types/hashTree/types.d.ts +25 -0
- package/dist/types/hashTree/utils.d.ts +9 -0
- package/dist/types/locus-info/index.d.ts +91 -42
- package/dist/types/locus-info/types.d.ts +46 -0
- package/dist/types/meeting/index.d.ts +22 -9
- package/dist/types/meetings/index.d.ts +9 -2
- package/dist/types/metrics/constants.d.ts +2 -0
- package/dist/types/reachability/clusterReachability.d.ts +10 -88
- package/dist/types/reachability/reachability.types.d.ts +12 -1
- package/dist/types/reachability/reachabilityPeerConnection.d.ts +111 -0
- package/dist/webinar/index.js +1 -1
- package/package.json +22 -21
- package/src/constants.ts +13 -1
- package/src/hashTree/constants.ts +9 -0
- package/src/hashTree/hashTree.ts +463 -0
- package/src/hashTree/hashTreeParser.ts +1161 -0
- package/src/hashTree/types.ts +30 -0
- package/src/hashTree/utils.ts +42 -0
- package/src/locus-info/index.ts +556 -85
- package/src/locus-info/types.ts +48 -0
- package/src/meeting/index.ts +58 -26
- package/src/meeting/util.ts +1 -0
- package/src/meetings/index.ts +104 -51
- package/src/metrics/constants.ts +2 -0
- package/src/reachability/clusterReachability.ts +50 -347
- package/src/reachability/reachability.types.ts +15 -1
- package/src/reachability/reachabilityPeerConnection.ts +416 -0
- package/test/unit/spec/hashTree/hashTree.ts +655 -0
- package/test/unit/spec/hashTree/hashTreeParser.ts +1532 -0
- package/test/unit/spec/hashTree/utils.ts +103 -0
- package/test/unit/spec/locus-info/index.js +667 -1
- package/test/unit/spec/meeting/index.js +91 -20
- package/test/unit/spec/meeting/utils.js +77 -0
- package/test/unit/spec/meetings/index.js +71 -26
- package/test/unit/spec/reachability/clusterReachability.ts +281 -138
|
@@ -0,0 +1,1266 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _Reflect$construct = require("@babel/runtime-corejs2/core-js/reflect/construct");
|
|
4
|
+
var _Array$from = require("@babel/runtime-corejs2/core-js/array/from");
|
|
5
|
+
var _Symbol = require("@babel/runtime-corejs2/core-js/symbol");
|
|
6
|
+
var _Symbol$iterator = require("@babel/runtime-corejs2/core-js/symbol/iterator");
|
|
7
|
+
var _Array$isArray2 = require("@babel/runtime-corejs2/core-js/array/is-array");
|
|
8
|
+
var _Object$keys3 = require("@babel/runtime-corejs2/core-js/object/keys");
|
|
9
|
+
var _Object$getOwnPropertySymbols = require("@babel/runtime-corejs2/core-js/object/get-own-property-symbols");
|
|
10
|
+
var _Object$getOwnPropertyDescriptor = require("@babel/runtime-corejs2/core-js/object/get-own-property-descriptor");
|
|
11
|
+
var _Object$getOwnPropertyDescriptors = require("@babel/runtime-corejs2/core-js/object/get-own-property-descriptors");
|
|
12
|
+
var _Object$defineProperties = require("@babel/runtime-corejs2/core-js/object/define-properties");
|
|
13
|
+
var _Object$defineProperty = require("@babel/runtime-corejs2/core-js/object/define-property");
|
|
14
|
+
var _interopRequireDefault = require("@babel/runtime-corejs2/helpers/interopRequireDefault");
|
|
15
|
+
_Object$defineProperty(exports, "__esModule", {
|
|
16
|
+
value: true
|
|
17
|
+
});
|
|
18
|
+
exports.default = exports.LocusInfoUpdateType = void 0;
|
|
19
|
+
exports.isSelf = isSelf;
|
|
20
|
+
var _regenerator = _interopRequireDefault(require("@babel/runtime-corejs2/regenerator"));
|
|
21
|
+
var _stringify = _interopRequireDefault(require("@babel/runtime-corejs2/core-js/json/stringify"));
|
|
22
|
+
var _promise = _interopRequireDefault(require("@babel/runtime-corejs2/core-js/promise"));
|
|
23
|
+
var _isArray = _interopRequireDefault(require("@babel/runtime-corejs2/core-js/array/is-array"));
|
|
24
|
+
var _keys = _interopRequireDefault(require("@babel/runtime-corejs2/core-js/object/keys"));
|
|
25
|
+
var _values = _interopRequireDefault(require("@babel/runtime-corejs2/core-js/object/values"));
|
|
26
|
+
var _parseInt2 = _interopRequireDefault(require("@babel/runtime-corejs2/core-js/parse-int"));
|
|
27
|
+
var _slicedToArray2 = _interopRequireDefault(require("@babel/runtime-corejs2/helpers/slicedToArray"));
|
|
28
|
+
var _typeof2 = _interopRequireDefault(require("@babel/runtime-corejs2/helpers/typeof"));
|
|
29
|
+
var _toConsumableArray2 = _interopRequireDefault(require("@babel/runtime-corejs2/helpers/toConsumableArray"));
|
|
30
|
+
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime-corejs2/helpers/asyncToGenerator"));
|
|
31
|
+
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime-corejs2/helpers/defineProperty"));
|
|
32
|
+
var _createClass2 = _interopRequireDefault(require("@babel/runtime-corejs2/helpers/createClass"));
|
|
33
|
+
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs2/helpers/classCallCheck"));
|
|
34
|
+
var _possibleConstructorReturn2 = _interopRequireDefault(require("@babel/runtime-corejs2/helpers/possibleConstructorReturn"));
|
|
35
|
+
var _getPrototypeOf2 = _interopRequireDefault(require("@babel/runtime-corejs2/helpers/getPrototypeOf"));
|
|
36
|
+
var _inherits2 = _interopRequireDefault(require("@babel/runtime-corejs2/helpers/inherits"));
|
|
37
|
+
var _wrapNativeSuper2 = _interopRequireDefault(require("@babel/runtime-corejs2/helpers/wrapNativeSuper"));
|
|
38
|
+
var _lodash = require("lodash");
|
|
39
|
+
var _hashTree = _interopRequireDefault(require("./hashTree"));
|
|
40
|
+
var _loggerProxy = _interopRequireDefault(require("../common/logs/logger-proxy"));
|
|
41
|
+
var _constants = require("../constants");
|
|
42
|
+
var _constants2 = require("./constants");
|
|
43
|
+
var _types = require("./types");
|
|
44
|
+
var _utils = require("./utils");
|
|
45
|
+
function ownKeys(e, r) { var t = _Object$keys3(e); if (_Object$getOwnPropertySymbols) { var o = _Object$getOwnPropertySymbols(e); r && (o = o.filter(function (r) { return _Object$getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
|
|
46
|
+
function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? ownKeys(Object(t), !0).forEach(function (r) { (0, _defineProperty2.default)(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors ? _Object$defineProperties(e, _Object$getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) { _Object$defineProperty(e, r, _Object$getOwnPropertyDescriptor(t, r)); }); } return e; }
|
|
47
|
+
function _createForOfIteratorHelper(r, e) { var t = "undefined" != typeof _Symbol && r[_Symbol$iterator] || r["@@iterator"]; if (!t) { if (_Array$isArray2(r) || (t = _unsupportedIterableToArray(r)) || e && r && "number" == typeof r.length) { t && (r = t); var _n = 0, F = function F() {}; return { s: F, n: function n() { return _n >= r.length ? { done: !0 } : { done: !1, value: r[_n++] }; }, e: function e(r) { throw r; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var o, a = !0, u = !1; return { s: function s() { t = t.call(r); }, n: function n() { var r = t.next(); return a = r.done, r; }, e: function e(r) { u = !0, o = r; }, f: function f() { try { a || null == t.return || t.return(); } finally { if (u) throw o; } } }; }
|
|
48
|
+
function _unsupportedIterableToArray(r, a) { if (r) { if ("string" == typeof r) return _arrayLikeToArray(r, a); var t = {}.toString.call(r).slice(8, -1); return "Object" === t && r.constructor && (t = r.constructor.name), "Map" === t || "Set" === t ? _Array$from(r) : "Arguments" === t || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(t) ? _arrayLikeToArray(r, a) : void 0; } }
|
|
49
|
+
function _arrayLikeToArray(r, a) { (null == a || a > r.length) && (a = r.length); for (var e = 0, n = Array(a); e < a; e++) n[e] = r[e]; return n; }
|
|
50
|
+
function _callSuper(t, o, e) { return o = (0, _getPrototypeOf2.default)(o), (0, _possibleConstructorReturn2.default)(t, _isNativeReflectConstruct() ? _Reflect$construct(o, e || [], (0, _getPrototypeOf2.default)(t).constructor) : o.apply(t, e)); }
|
|
51
|
+
function _isNativeReflectConstruct() { try { var t = !Boolean.prototype.valueOf.call(_Reflect$construct(Boolean, [], function () {})); } catch (t) {} return (_isNativeReflectConstruct = function _isNativeReflectConstruct() { return !!t; })(); }
|
|
52
|
+
var LocusInfoUpdateType = exports.LocusInfoUpdateType = {
|
|
53
|
+
OBJECTS_UPDATED: 'OBJECTS_UPDATED',
|
|
54
|
+
MEETING_ENDED: 'MEETING_ENDED'
|
|
55
|
+
};
|
|
56
|
+
/**
|
|
57
|
+
* This error is thrown if we receive information that the meeting has ended while we're processing some hash messages.
|
|
58
|
+
* It's handled internally by HashTreeParser and results in MEETING_ENDED being sent up.
|
|
59
|
+
*/
|
|
60
|
+
var MeetingEndedError = /*#__PURE__*/function (_Error) {
|
|
61
|
+
function MeetingEndedError() {
|
|
62
|
+
(0, _classCallCheck2.default)(this, MeetingEndedError);
|
|
63
|
+
return _callSuper(this, MeetingEndedError, arguments);
|
|
64
|
+
}
|
|
65
|
+
(0, _inherits2.default)(MeetingEndedError, _Error);
|
|
66
|
+
return (0, _createClass2.default)(MeetingEndedError);
|
|
67
|
+
}(/*#__PURE__*/(0, _wrapNativeSuper2.default)(Error));
|
|
68
|
+
/**
|
|
69
|
+
* Checks if the given hash tree object is of type "self"
|
|
70
|
+
* @param {HashTreeObject} object object to check
|
|
71
|
+
* @returns {boolean} True if the object is of type "self", false otherwise
|
|
72
|
+
*/
|
|
73
|
+
function isSelf(object) {
|
|
74
|
+
return object.htMeta.elementId.type.toLowerCase() === _types.ObjectType.self;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/**
|
|
78
|
+
* Parses hash tree eventing locus data
|
|
79
|
+
*/
|
|
80
|
+
var HashTreeParser = /*#__PURE__*/function () {
|
|
81
|
+
/**
|
|
82
|
+
* Constructor for HashTreeParser
|
|
83
|
+
* @param {Object} options
|
|
84
|
+
* @param {Object} options.initialLocus The initial locus data containing the hash tree information
|
|
85
|
+
*/
|
|
86
|
+
function HashTreeParser(options) {
|
|
87
|
+
var _locus$self;
|
|
88
|
+
(0, _classCallCheck2.default)(this, HashTreeParser);
|
|
89
|
+
(0, _defineProperty2.default)(this, "dataSets", {});
|
|
90
|
+
(0, _defineProperty2.default)(this, "visibleDataSetsUrl", void 0);
|
|
91
|
+
// url from which we can get info about all data sets
|
|
92
|
+
(0, _defineProperty2.default)(this, "webexRequest", void 0);
|
|
93
|
+
(0, _defineProperty2.default)(this, "locusInfoUpdateCallback", void 0);
|
|
94
|
+
(0, _defineProperty2.default)(this, "visibleDataSets", void 0);
|
|
95
|
+
(0, _defineProperty2.default)(this, "debugId", void 0);
|
|
96
|
+
var _options$initialLocus = options.initialLocus,
|
|
97
|
+
dataSets = _options$initialLocus.dataSets,
|
|
98
|
+
locus = _options$initialLocus.locus; // extract dataSets from initialLocus
|
|
99
|
+
|
|
100
|
+
this.debugId = options.debugId;
|
|
101
|
+
this.webexRequest = options.webexRequest;
|
|
102
|
+
this.locusInfoUpdateCallback = options.locusInfoUpdateCallback;
|
|
103
|
+
this.visibleDataSets = (locus === null || locus === void 0 ? void 0 : (_locus$self = locus.self) === null || _locus$self === void 0 ? void 0 : _locus$self.visibleDataSets) || [];
|
|
104
|
+
if (this.visibleDataSets.length === 0) {
|
|
105
|
+
_loggerProxy.default.logger.warn("HashTreeParser#constructor --> ".concat(this.debugId, " No visibleDataSets found in locus.self"));
|
|
106
|
+
}
|
|
107
|
+
// object mapping dataset names to arrays of leaf data
|
|
108
|
+
var leafData = this.analyzeLocusHtMeta(locus);
|
|
109
|
+
_loggerProxy.default.logger.info("HashTreeParser#constructor --> creating HashTreeParser for datasets: ".concat((0, _stringify.default)(dataSets.map(function (ds) {
|
|
110
|
+
return ds.name;
|
|
111
|
+
}))));
|
|
112
|
+
var _iterator = _createForOfIteratorHelper(dataSets),
|
|
113
|
+
_step;
|
|
114
|
+
try {
|
|
115
|
+
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
|
116
|
+
var dataSet = _step.value;
|
|
117
|
+
var name = dataSet.name,
|
|
118
|
+
leafCount = dataSet.leafCount;
|
|
119
|
+
this.dataSets[name] = _objectSpread(_objectSpread({}, dataSet), {}, {
|
|
120
|
+
hashTree: this.visibleDataSets.includes(name) ? new _hashTree.default(leafData[name] || [], leafCount) : undefined
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
} catch (err) {
|
|
124
|
+
_iterator.e(err);
|
|
125
|
+
} finally {
|
|
126
|
+
_iterator.f();
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
/**
|
|
131
|
+
* Initializes a new visible data set by creating a hash tree for it, adding it to all the internal structures,
|
|
132
|
+
* and sending an initial sync request to Locus with empty leaf data - that will trigger Locus to gives us all the data
|
|
133
|
+
* from that dataset (in the response or via messages).
|
|
134
|
+
*
|
|
135
|
+
* @param {DataSet} dataSet The new data set to be added
|
|
136
|
+
* @returns {Promise}
|
|
137
|
+
*/
|
|
138
|
+
return (0, _createClass2.default)(HashTreeParser, [{
|
|
139
|
+
key: "initializeNewVisibleDataSet",
|
|
140
|
+
value: function initializeNewVisibleDataSet(dataSet) {
|
|
141
|
+
if (this.visibleDataSets.includes(dataSet.name)) {
|
|
142
|
+
_loggerProxy.default.logger.info("HashTreeParser#initializeNewVisibleDataSet --> ".concat(this.debugId, " Data set \"").concat(dataSet.name, "\" already exists, skipping init"));
|
|
143
|
+
return _promise.default.resolve({
|
|
144
|
+
updateType: LocusInfoUpdateType.OBJECTS_UPDATED,
|
|
145
|
+
updatedObjects: []
|
|
146
|
+
});
|
|
147
|
+
}
|
|
148
|
+
_loggerProxy.default.logger.info("HashTreeParser#initializeNewVisibleDataSet --> ".concat(this.debugId, " Adding visible data set \"").concat(dataSet.name, "\""));
|
|
149
|
+
this.visibleDataSets.push(dataSet.name);
|
|
150
|
+
var hashTree = new _hashTree.default([], dataSet.leafCount);
|
|
151
|
+
this.dataSets[dataSet.name] = _objectSpread(_objectSpread({}, dataSet), {}, {
|
|
152
|
+
hashTree: hashTree
|
|
153
|
+
});
|
|
154
|
+
return this.sendInitializationSyncRequestToLocus(dataSet.name, 'new visible data set');
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
/**
|
|
158
|
+
* Sends a special sync request to Locus with all leaves empty - this is a way to get all the data for a given dataset.
|
|
159
|
+
*
|
|
160
|
+
* @param {string} datasetName - name of the dataset for which to send the request
|
|
161
|
+
* @param {string} debugText - text to include in logs
|
|
162
|
+
* @returns {Promise}
|
|
163
|
+
*/
|
|
164
|
+
}, {
|
|
165
|
+
key: "sendInitializationSyncRequestToLocus",
|
|
166
|
+
value: function sendInitializationSyncRequestToLocus(datasetName, debugText) {
|
|
167
|
+
var _this = this;
|
|
168
|
+
var dataset = this.dataSets[datasetName];
|
|
169
|
+
if (!dataset) {
|
|
170
|
+
_loggerProxy.default.logger.warn("HashTreeParser#sendInitializationSyncRequestToLocus --> ".concat(this.debugId, " No data set found for ").concat(datasetName, ", cannot send the request for leaf data"));
|
|
171
|
+
return _promise.default.resolve(null);
|
|
172
|
+
}
|
|
173
|
+
var emptyLeavesData = new Array(dataset.leafCount).fill([]);
|
|
174
|
+
_loggerProxy.default.logger.info("HashTreeParser#sendInitializationSyncRequestToLocus --> ".concat(this.debugId, " Sending initial sync request to Locus for data set \"").concat(datasetName, "\" with empty leaf data"));
|
|
175
|
+
return this.sendSyncRequestToLocus(this.dataSets[datasetName], emptyLeavesData).then(function (syncResponse) {
|
|
176
|
+
if (syncResponse) {
|
|
177
|
+
return _this.parseMessage(syncResponse, "via empty leaves /sync API call for ".concat(debugText));
|
|
178
|
+
}
|
|
179
|
+
return {
|
|
180
|
+
updateType: LocusInfoUpdateType.OBJECTS_UPDATED,
|
|
181
|
+
updatedObjects: []
|
|
182
|
+
};
|
|
183
|
+
});
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
/**
|
|
187
|
+
* Queries Locus for information about all the data sets
|
|
188
|
+
*
|
|
189
|
+
* @param {string} url - url from which we can get info about all data sets
|
|
190
|
+
* @returns {Promise}
|
|
191
|
+
*/
|
|
192
|
+
}, {
|
|
193
|
+
key: "getAllDataSetsMetadata",
|
|
194
|
+
value: function getAllDataSetsMetadata(url) {
|
|
195
|
+
return this.webexRequest({
|
|
196
|
+
method: _constants.HTTP_VERBS.GET,
|
|
197
|
+
uri: url
|
|
198
|
+
}).then(function (response) {
|
|
199
|
+
return response.body.dataSets;
|
|
200
|
+
});
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
/**
|
|
204
|
+
* Initializes the hash tree parser from a message received from Locus.
|
|
205
|
+
*
|
|
206
|
+
* @param {HashTreeMessage} message - initial hash tree message received from Locus
|
|
207
|
+
* @returns {Promise}
|
|
208
|
+
*/
|
|
209
|
+
}, {
|
|
210
|
+
key: "initializeFromMessage",
|
|
211
|
+
value: (function () {
|
|
212
|
+
var _initializeFromMessage = (0, _asyncToGenerator2.default)(/*#__PURE__*/_regenerator.default.mark(function _callee(message) {
|
|
213
|
+
var dataSets;
|
|
214
|
+
return _regenerator.default.wrap(function (_context) {
|
|
215
|
+
while (1) switch (_context.prev = _context.next) {
|
|
216
|
+
case 0:
|
|
217
|
+
_loggerProxy.default.logger.info("HashTreeParser#initializeFromMessage --> ".concat(this.debugId, " visibleDataSetsUrl=").concat(message.visibleDataSetsUrl));
|
|
218
|
+
_context.next = 1;
|
|
219
|
+
return this.getAllDataSetsMetadata(message.visibleDataSetsUrl);
|
|
220
|
+
case 1:
|
|
221
|
+
dataSets = _context.sent;
|
|
222
|
+
_context.next = 2;
|
|
223
|
+
return this.initializeDataSets(dataSets, 'initialization from message');
|
|
224
|
+
case 2:
|
|
225
|
+
case "end":
|
|
226
|
+
return _context.stop();
|
|
227
|
+
}
|
|
228
|
+
}, _callee, this);
|
|
229
|
+
}));
|
|
230
|
+
function initializeFromMessage(_x) {
|
|
231
|
+
return _initializeFromMessage.apply(this, arguments);
|
|
232
|
+
}
|
|
233
|
+
return initializeFromMessage;
|
|
234
|
+
}()
|
|
235
|
+
/**
|
|
236
|
+
* Initializes the hash tree parser from GET /loci API response by fetching all data sets metadata
|
|
237
|
+
* first and then doing an initialization sync on each data set
|
|
238
|
+
*
|
|
239
|
+
* This function requires that this.visibleDataSets have been already populated correctly by the constructor.
|
|
240
|
+
*
|
|
241
|
+
* @param {LocusDTO} locus - locus object received from GET /loci
|
|
242
|
+
* @returns {Promise}
|
|
243
|
+
*/
|
|
244
|
+
)
|
|
245
|
+
}, {
|
|
246
|
+
key: "initializeFromGetLociResponse",
|
|
247
|
+
value: (function () {
|
|
248
|
+
var _initializeFromGetLociResponse = (0, _asyncToGenerator2.default)(/*#__PURE__*/_regenerator.default.mark(function _callee2(locus) {
|
|
249
|
+
var _locus$links, _locus$links$resource, _locus$links$resource2;
|
|
250
|
+
var dataSets;
|
|
251
|
+
return _regenerator.default.wrap(function (_context2) {
|
|
252
|
+
while (1) switch (_context2.prev = _context2.next) {
|
|
253
|
+
case 0:
|
|
254
|
+
if (locus !== null && locus !== void 0 && (_locus$links = locus.links) !== null && _locus$links !== void 0 && (_locus$links$resource = _locus$links.resources) !== null && _locus$links$resource !== void 0 && (_locus$links$resource2 = _locus$links$resource.visibleDataSets) !== null && _locus$links$resource2 !== void 0 && _locus$links$resource2.url) {
|
|
255
|
+
_context2.next = 1;
|
|
256
|
+
break;
|
|
257
|
+
}
|
|
258
|
+
_loggerProxy.default.logger.warn("HashTreeParser#initializeFromGetLociResponse --> ".concat(this.debugId, " missing visibleDataSets url in GET Loci response, cannot initialize hash trees"));
|
|
259
|
+
return _context2.abrupt("return");
|
|
260
|
+
case 1:
|
|
261
|
+
_loggerProxy.default.logger.info("HashTreeParser#initializeFromGetLociResponse --> ".concat(this.debugId, " visibleDataSets url: ").concat(locus.links.resources.visibleDataSets.url));
|
|
262
|
+
_context2.next = 2;
|
|
263
|
+
return this.getAllDataSetsMetadata(locus.links.resources.visibleDataSets.url);
|
|
264
|
+
case 2:
|
|
265
|
+
dataSets = _context2.sent;
|
|
266
|
+
_context2.next = 3;
|
|
267
|
+
return this.initializeDataSets(dataSets, 'initialization from GET /loci response');
|
|
268
|
+
case 3:
|
|
269
|
+
case "end":
|
|
270
|
+
return _context2.stop();
|
|
271
|
+
}
|
|
272
|
+
}, _callee2, this);
|
|
273
|
+
}));
|
|
274
|
+
function initializeFromGetLociResponse(_x2) {
|
|
275
|
+
return _initializeFromGetLociResponse.apply(this, arguments);
|
|
276
|
+
}
|
|
277
|
+
return initializeFromGetLociResponse;
|
|
278
|
+
}()
|
|
279
|
+
/**
|
|
280
|
+
* Initializes data sets by doing an initialization sync on each visible data set that doesn't have a hash tree yet.
|
|
281
|
+
*
|
|
282
|
+
* @param {DataSet[]} dataSets Array of DataSet objects to initialize
|
|
283
|
+
* @param {string} debugText Text to include in logs for debugging purposes
|
|
284
|
+
* @returns {Promise}
|
|
285
|
+
*/
|
|
286
|
+
)
|
|
287
|
+
}, {
|
|
288
|
+
key: "initializeDataSets",
|
|
289
|
+
value: (function () {
|
|
290
|
+
var _initializeDataSets = (0, _asyncToGenerator2.default)(/*#__PURE__*/_regenerator.default.mark(function _callee3(dataSets, debugText) {
|
|
291
|
+
var updatedObjects, _iterator2, _step2, dataSet, name, leafCount, _data, _t;
|
|
292
|
+
return _regenerator.default.wrap(function (_context3) {
|
|
293
|
+
while (1) switch (_context3.prev = _context3.next) {
|
|
294
|
+
case 0:
|
|
295
|
+
updatedObjects = [];
|
|
296
|
+
_iterator2 = _createForOfIteratorHelper(dataSets);
|
|
297
|
+
_context3.prev = 1;
|
|
298
|
+
_iterator2.s();
|
|
299
|
+
case 2:
|
|
300
|
+
if ((_step2 = _iterator2.n()).done) {
|
|
301
|
+
_context3.next = 6;
|
|
302
|
+
break;
|
|
303
|
+
}
|
|
304
|
+
dataSet = _step2.value;
|
|
305
|
+
name = dataSet.name, leafCount = dataSet.leafCount;
|
|
306
|
+
if (!this.dataSets[name]) {
|
|
307
|
+
_loggerProxy.default.logger.info("HashTreeParser#initializeDataSets --> ".concat(this.debugId, " initializing dataset \"").concat(name, "\" (").concat(debugText, ")"));
|
|
308
|
+
this.dataSets[name] = _objectSpread({}, dataSet);
|
|
309
|
+
} else {
|
|
310
|
+
_loggerProxy.default.logger.info("HashTreeParser#initializeDataSets --> ".concat(this.debugId, " dataset \"").concat(name, "\" already exists (").concat(debugText, ")"));
|
|
311
|
+
}
|
|
312
|
+
if (!(this.visibleDataSets.includes(name) && !this.dataSets[name].hashTree)) {
|
|
313
|
+
_context3.next = 5;
|
|
314
|
+
break;
|
|
315
|
+
}
|
|
316
|
+
_loggerProxy.default.logger.info("HashTreeParser#initializeDataSets --> ".concat(this.debugId, " creating hash tree for visible dataset \"").concat(name, "\" (").concat(debugText, ")"));
|
|
317
|
+
this.dataSets[name].hashTree = new _hashTree.default([], leafCount);
|
|
318
|
+
|
|
319
|
+
// eslint-disable-next-line no-await-in-loop
|
|
320
|
+
_context3.next = 3;
|
|
321
|
+
return this.sendInitializationSyncRequestToLocus(name, debugText);
|
|
322
|
+
case 3:
|
|
323
|
+
_data = _context3.sent;
|
|
324
|
+
if (!(_data.updateType === LocusInfoUpdateType.MEETING_ENDED)) {
|
|
325
|
+
_context3.next = 4;
|
|
326
|
+
break;
|
|
327
|
+
}
|
|
328
|
+
_loggerProxy.default.logger.warn("HashTreeParser#initializeDataSets --> ".concat(this.debugId, " meeting ended while initializing new visible data set \"").concat(name, "\""));
|
|
329
|
+
|
|
330
|
+
// throw an error, it will be caught higher up and the meeting will be destroyed
|
|
331
|
+
throw new MeetingEndedError();
|
|
332
|
+
case 4:
|
|
333
|
+
if (_data.updateType === LocusInfoUpdateType.OBJECTS_UPDATED) {
|
|
334
|
+
updatedObjects.push.apply(updatedObjects, (0, _toConsumableArray2.default)(_data.updatedObjects || []));
|
|
335
|
+
}
|
|
336
|
+
case 5:
|
|
337
|
+
_context3.next = 2;
|
|
338
|
+
break;
|
|
339
|
+
case 6:
|
|
340
|
+
_context3.next = 8;
|
|
341
|
+
break;
|
|
342
|
+
case 7:
|
|
343
|
+
_context3.prev = 7;
|
|
344
|
+
_t = _context3["catch"](1);
|
|
345
|
+
_iterator2.e(_t);
|
|
346
|
+
case 8:
|
|
347
|
+
_context3.prev = 8;
|
|
348
|
+
_iterator2.f();
|
|
349
|
+
return _context3.finish(8);
|
|
350
|
+
case 9:
|
|
351
|
+
this.callLocusInfoUpdateCallback({
|
|
352
|
+
updateType: LocusInfoUpdateType.OBJECTS_UPDATED,
|
|
353
|
+
updatedObjects: updatedObjects
|
|
354
|
+
});
|
|
355
|
+
case 10:
|
|
356
|
+
case "end":
|
|
357
|
+
return _context3.stop();
|
|
358
|
+
}
|
|
359
|
+
}, _callee3, this, [[1, 7, 8, 9]]);
|
|
360
|
+
}));
|
|
361
|
+
function initializeDataSets(_x3, _x4) {
|
|
362
|
+
return _initializeDataSets.apply(this, arguments);
|
|
363
|
+
}
|
|
364
|
+
return initializeDataSets;
|
|
365
|
+
}()
|
|
366
|
+
/**
|
|
367
|
+
* Each dataset exists at a different place in the dto
|
|
368
|
+
* iterate recursively over the locus and if it has a htMeta key,
|
|
369
|
+
* create an object with the type, id and version and add it to the appropriate leafData array
|
|
370
|
+
*
|
|
371
|
+
* @param {any} locus - The current part of the locus being processed
|
|
372
|
+
* @param {Object} [options]
|
|
373
|
+
* @param {boolean} [options.copyData=false] - Whether to copy the data for each leaf into returned result
|
|
374
|
+
* @returns {any} - An object mapping dataset names to arrays of leaf data
|
|
375
|
+
*/
|
|
376
|
+
)
|
|
377
|
+
}, {
|
|
378
|
+
key: "analyzeLocusHtMeta",
|
|
379
|
+
value: function analyzeLocusHtMeta(locus, options) {
|
|
380
|
+
var _ref = options || {},
|
|
381
|
+
_ref$copyData = _ref.copyData,
|
|
382
|
+
copyData = _ref$copyData === void 0 ? false : _ref$copyData;
|
|
383
|
+
// object mapping dataset names to arrays of leaf data
|
|
384
|
+
var leafInfo = {};
|
|
385
|
+
var _findAndStoreMetaData = function findAndStoreMetaData(currentLocusPart) {
|
|
386
|
+
if ((0, _typeof2.default)(currentLocusPart) !== 'object' || currentLocusPart === null) {
|
|
387
|
+
return;
|
|
388
|
+
}
|
|
389
|
+
if (currentLocusPart.htMeta && currentLocusPart.htMeta.dataSetNames) {
|
|
390
|
+
var _currentLocusPart$htM = currentLocusPart.htMeta.elementId,
|
|
391
|
+
type = _currentLocusPart$htM.type,
|
|
392
|
+
id = _currentLocusPart$htM.id,
|
|
393
|
+
version = _currentLocusPart$htM.version;
|
|
394
|
+
var dataSetNames = currentLocusPart.htMeta.dataSetNames;
|
|
395
|
+
var newLeafInfo = {
|
|
396
|
+
type: type,
|
|
397
|
+
id: id,
|
|
398
|
+
version: version
|
|
399
|
+
};
|
|
400
|
+
if (copyData) {
|
|
401
|
+
newLeafInfo.data = (0, _lodash.cloneDeep)(currentLocusPart);
|
|
402
|
+
|
|
403
|
+
// remove any nested other objects that have their own htMeta
|
|
404
|
+
(0, _utils.deleteNestedObjectsWithHtMeta)(newLeafInfo.data);
|
|
405
|
+
}
|
|
406
|
+
var _iterator3 = _createForOfIteratorHelper(dataSetNames),
|
|
407
|
+
_step3;
|
|
408
|
+
try {
|
|
409
|
+
for (_iterator3.s(); !(_step3 = _iterator3.n()).done;) {
|
|
410
|
+
var dataSetName = _step3.value;
|
|
411
|
+
if (!leafInfo[dataSetName]) {
|
|
412
|
+
leafInfo[dataSetName] = [];
|
|
413
|
+
}
|
|
414
|
+
leafInfo[dataSetName].push(newLeafInfo);
|
|
415
|
+
}
|
|
416
|
+
} catch (err) {
|
|
417
|
+
_iterator3.e(err);
|
|
418
|
+
} finally {
|
|
419
|
+
_iterator3.f();
|
|
420
|
+
}
|
|
421
|
+
}
|
|
422
|
+
if ((0, _isArray.default)(currentLocusPart)) {
|
|
423
|
+
var _iterator4 = _createForOfIteratorHelper(currentLocusPart),
|
|
424
|
+
_step4;
|
|
425
|
+
try {
|
|
426
|
+
for (_iterator4.s(); !(_step4 = _iterator4.n()).done;) {
|
|
427
|
+
var item = _step4.value;
|
|
428
|
+
_findAndStoreMetaData(item);
|
|
429
|
+
}
|
|
430
|
+
} catch (err) {
|
|
431
|
+
_iterator4.e(err);
|
|
432
|
+
} finally {
|
|
433
|
+
_iterator4.f();
|
|
434
|
+
}
|
|
435
|
+
} else {
|
|
436
|
+
for (var _i = 0, _Object$keys = (0, _keys.default)(currentLocusPart); _i < _Object$keys.length; _i++) {
|
|
437
|
+
var key = _Object$keys[_i];
|
|
438
|
+
if (Object.prototype.hasOwnProperty.call(currentLocusPart, key)) {
|
|
439
|
+
_findAndStoreMetaData(currentLocusPart[key]);
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
}
|
|
443
|
+
};
|
|
444
|
+
_findAndStoreMetaData(locus);
|
|
445
|
+
return leafInfo;
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
/**
|
|
449
|
+
* Checks if the provided hash tree message indicates the end of the meeting and that there won't be any more updates.
|
|
450
|
+
*
|
|
451
|
+
* @param {HashTreeMessage} message - The hash tree message to check
|
|
452
|
+
* @returns {boolean} - Returns true if the message indicates the end of the meeting, false otherwise
|
|
453
|
+
*/
|
|
454
|
+
}, {
|
|
455
|
+
key: "isEndMessage",
|
|
456
|
+
value: function isEndMessage(message) {
|
|
457
|
+
var mainDataSet = message.dataSets.find(function (dataSet) {
|
|
458
|
+
return dataSet.name.toLowerCase() === _constants2.DataSetNames.MAIN;
|
|
459
|
+
});
|
|
460
|
+
if (mainDataSet && mainDataSet.leafCount === 1 && mainDataSet.root === _constants2.EMPTY_HASH && this.dataSets[_constants2.DataSetNames.MAIN].version < mainDataSet.version) {
|
|
461
|
+
// this is a special way for Locus to indicate that this meeting has ended
|
|
462
|
+
return true;
|
|
463
|
+
}
|
|
464
|
+
return false;
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
/**
|
|
468
|
+
* Handles the root hash heartbeat message
|
|
469
|
+
*
|
|
470
|
+
* @param {RootHashMessage} message - The root hash heartbeat message
|
|
471
|
+
* @returns {void}
|
|
472
|
+
*/
|
|
473
|
+
}, {
|
|
474
|
+
key: "handleRootHashHeartBeatMessage",
|
|
475
|
+
value: function handleRootHashHeartBeatMessage(message) {
|
|
476
|
+
var _this2 = this;
|
|
477
|
+
var dataSets = message.dataSets;
|
|
478
|
+
_loggerProxy.default.logger.info("HashTreeParser#handleRootHashMessage --> ".concat(this.debugId, " Received heartbeat root hash message with data sets: ").concat((0, _stringify.default)(dataSets.map(function (_ref2) {
|
|
479
|
+
var name = _ref2.name,
|
|
480
|
+
root = _ref2.root,
|
|
481
|
+
leafCount = _ref2.leafCount,
|
|
482
|
+
version = _ref2.version;
|
|
483
|
+
return {
|
|
484
|
+
name: name,
|
|
485
|
+
root: root,
|
|
486
|
+
leafCount: leafCount,
|
|
487
|
+
version: version
|
|
488
|
+
};
|
|
489
|
+
}))));
|
|
490
|
+
dataSets.forEach(function (dataSet) {
|
|
491
|
+
_this2.updateDataSetInfo(dataSet);
|
|
492
|
+
_this2.runSyncAlgorithm(dataSet);
|
|
493
|
+
});
|
|
494
|
+
}
|
|
495
|
+
|
|
496
|
+
/**
|
|
497
|
+
* This method should be called when we receive a partial locus DTO that contains dataSets and htMeta information
|
|
498
|
+
* It updates the hash trees with the new leaf data based on the received Locus
|
|
499
|
+
*
|
|
500
|
+
* @param {Object} update - The locus update containing data sets and locus information
|
|
501
|
+
* @returns {void}
|
|
502
|
+
*/
|
|
503
|
+
}, {
|
|
504
|
+
key: "handleLocusUpdate",
|
|
505
|
+
value: function handleLocusUpdate(update) {
|
|
506
|
+
var _this3 = this;
|
|
507
|
+
var dataSets = update.dataSets,
|
|
508
|
+
locus = update.locus;
|
|
509
|
+
if (!dataSets) {
|
|
510
|
+
_loggerProxy.default.logger.warn("HashTreeParser#handleLocusUpdate --> ".concat(this.debugId, " received hash tree update without dataSets"));
|
|
511
|
+
}
|
|
512
|
+
var _iterator5 = _createForOfIteratorHelper(dataSets),
|
|
513
|
+
_step5;
|
|
514
|
+
try {
|
|
515
|
+
for (_iterator5.s(); !(_step5 = _iterator5.n()).done;) {
|
|
516
|
+
var dataSet = _step5.value;
|
|
517
|
+
this.updateDataSetInfo(dataSet);
|
|
518
|
+
}
|
|
519
|
+
} catch (err) {
|
|
520
|
+
_iterator5.e(err);
|
|
521
|
+
} finally {
|
|
522
|
+
_iterator5.f();
|
|
523
|
+
}
|
|
524
|
+
var updatedObjects = [];
|
|
525
|
+
|
|
526
|
+
// first, analyze the locus object to extract the hash tree objects' htMeta and data from it
|
|
527
|
+
var leafInfo = this.analyzeLocusHtMeta(locus, {
|
|
528
|
+
copyData: true
|
|
529
|
+
});
|
|
530
|
+
|
|
531
|
+
// then process the data in hash trees, if it is a new version, then add it to updatedObjects
|
|
532
|
+
(0, _keys.default)(leafInfo).forEach(function (dataSetName) {
|
|
533
|
+
if (_this3.dataSets[dataSetName]) {
|
|
534
|
+
if (_this3.dataSets[dataSetName].hashTree) {
|
|
535
|
+
var appliedChangesList = _this3.dataSets[dataSetName].hashTree.putItems(leafInfo[dataSetName].map(function (leaf) {
|
|
536
|
+
return {
|
|
537
|
+
id: leaf.id,
|
|
538
|
+
type: leaf.type,
|
|
539
|
+
version: leaf.version
|
|
540
|
+
};
|
|
541
|
+
}));
|
|
542
|
+
(0, _lodash.zip)(appliedChangesList, leafInfo[dataSetName]).forEach(function (_ref3) {
|
|
543
|
+
var _ref4 = (0, _slicedToArray2.default)(_ref3, 2),
|
|
544
|
+
changeApplied = _ref4[0],
|
|
545
|
+
leaf = _ref4[1];
|
|
546
|
+
if (changeApplied) {
|
|
547
|
+
updatedObjects.push({
|
|
548
|
+
htMeta: {
|
|
549
|
+
elementId: {
|
|
550
|
+
type: leaf.type,
|
|
551
|
+
id: leaf.id,
|
|
552
|
+
version: leaf.version
|
|
553
|
+
},
|
|
554
|
+
dataSetNames: [dataSetName]
|
|
555
|
+
},
|
|
556
|
+
data: leaf.data
|
|
557
|
+
});
|
|
558
|
+
}
|
|
559
|
+
});
|
|
560
|
+
} else {
|
|
561
|
+
// no hash tree means that the data set is not visible
|
|
562
|
+
_loggerProxy.default.logger.warn("HashTreeParser#handleLocusUpdate --> ".concat(_this3.debugId, " received leaf data for data set \"").concat(dataSetName, "\" that has no hash tree created, ignoring"));
|
|
563
|
+
}
|
|
564
|
+
} else {
|
|
565
|
+
_loggerProxy.default.logger.warn("HashTreeParser#handleLocusUpdate --> ".concat(_this3.debugId, " received leaf data for unknown data set \"").concat(dataSetName, "\", ignoring"));
|
|
566
|
+
}
|
|
567
|
+
});
|
|
568
|
+
if (updatedObjects.length === 0) {
|
|
569
|
+
_loggerProxy.default.logger.info("HashTreeParser#handleLocusUpdate --> ".concat(this.debugId, " No objects updated as a result of received API response"));
|
|
570
|
+
} else {
|
|
571
|
+
this.callLocusInfoUpdateCallback({
|
|
572
|
+
updateType: LocusInfoUpdateType.OBJECTS_UPDATED,
|
|
573
|
+
updatedObjects: updatedObjects
|
|
574
|
+
});
|
|
575
|
+
}
|
|
576
|
+
|
|
577
|
+
// todo: once Locus design on how visible data sets will be communicated in subsequent API responses is confirmed,
|
|
578
|
+
// we'll need to check here if visible data sets have changed and update this.visibleDataSets, remove/create hash trees etc
|
|
579
|
+
}
|
|
580
|
+
|
|
581
|
+
/**
|
|
582
|
+
* Updates the internal data set information based on the received data set from Locus.
|
|
583
|
+
*
|
|
584
|
+
* @param {DataSet} receivedDataSet - The latest data set information received from Locus to update the internal state.
|
|
585
|
+
* @returns {void}
|
|
586
|
+
*/
|
|
587
|
+
}, {
|
|
588
|
+
key: "updateDataSetInfo",
|
|
589
|
+
value: function updateDataSetInfo(receivedDataSet) {
|
|
590
|
+
if (!this.dataSets[receivedDataSet.name]) {
|
|
591
|
+
this.dataSets[receivedDataSet.name] = _objectSpread({}, receivedDataSet);
|
|
592
|
+
_loggerProxy.default.logger.info("HashTreeParser#handleMessage --> ".concat(this.debugId, " created entry for \"").concat(receivedDataSet.name, "\" dataset: version=").concat(receivedDataSet.version, ", root=").concat(receivedDataSet.root));
|
|
593
|
+
return;
|
|
594
|
+
}
|
|
595
|
+
// update our version of the dataSet
|
|
596
|
+
if (this.dataSets[receivedDataSet.name].version < receivedDataSet.version) {
|
|
597
|
+
this.dataSets[receivedDataSet.name].version = receivedDataSet.version;
|
|
598
|
+
this.dataSets[receivedDataSet.name].root = receivedDataSet.root;
|
|
599
|
+
this.dataSets[receivedDataSet.name].idleMs = receivedDataSet.idleMs;
|
|
600
|
+
this.dataSets[receivedDataSet.name].backoff = {
|
|
601
|
+
maxMs: receivedDataSet.backoff.maxMs,
|
|
602
|
+
exponent: receivedDataSet.backoff.exponent
|
|
603
|
+
};
|
|
604
|
+
_loggerProxy.default.logger.info("HashTreeParser#handleMessage --> ".concat(this.debugId, " updated \"").concat(receivedDataSet.name, "\" to version=").concat(receivedDataSet.version, ", root=").concat(receivedDataSet.root));
|
|
605
|
+
}
|
|
606
|
+
}
|
|
607
|
+
|
|
608
|
+
/**
|
|
609
|
+
* Checks for changes in the visible data sets based on the updated objects.
|
|
610
|
+
* @param {HashTreeObject[]} updatedObjects - The list of updated hash tree objects.
|
|
611
|
+
* @returns {Object} An object containing the removed and added visible data sets.
|
|
612
|
+
*/
|
|
613
|
+
}, {
|
|
614
|
+
key: "checkForVisibleDataSetChanges",
|
|
615
|
+
value: function checkForVisibleDataSetChanges(updatedObjects) {
|
|
616
|
+
var _this4 = this;
|
|
617
|
+
var removedDataSets = [];
|
|
618
|
+
var addedDataSets = [];
|
|
619
|
+
|
|
620
|
+
// visibleDataSets can only be changed by self object updates
|
|
621
|
+
updatedObjects.forEach(function (object) {
|
|
622
|
+
var _object$data;
|
|
623
|
+
// todo: in the future visibleDataSets will be in "Metadata" object, not in "self"
|
|
624
|
+
if (isSelf(object) && (_object$data = object.data) !== null && _object$data !== void 0 && _object$data.visibleDataSets) {
|
|
625
|
+
var newVisibleDataSets = object.data.visibleDataSets;
|
|
626
|
+
removedDataSets = _this4.visibleDataSets.filter(function (ds) {
|
|
627
|
+
return !newVisibleDataSets.includes(ds);
|
|
628
|
+
});
|
|
629
|
+
addedDataSets = newVisibleDataSets.filter(function (ds) {
|
|
630
|
+
return !_this4.visibleDataSets.includes(ds);
|
|
631
|
+
});
|
|
632
|
+
if (removedDataSets.length > 0 || addedDataSets.length > 0) {
|
|
633
|
+
_loggerProxy.default.logger.info("HashTreeParser#checkForVisibleDataSetChanges --> ".concat(_this4.debugId, " visible data sets change: removed: ").concat(removedDataSets.join(', '), ", added: ").concat(addedDataSets.join(', ')));
|
|
634
|
+
}
|
|
635
|
+
}
|
|
636
|
+
});
|
|
637
|
+
return {
|
|
638
|
+
changeDetected: removedDataSets.length > 0 || addedDataSets.length > 0,
|
|
639
|
+
removedDataSets: removedDataSets,
|
|
640
|
+
addedDataSets: addedDataSets
|
|
641
|
+
};
|
|
642
|
+
}
|
|
643
|
+
|
|
644
|
+
/**
|
|
645
|
+
* Deletes the hash tree for the specified data set.
|
|
646
|
+
*
|
|
647
|
+
* @param {string} dataSetName name of the data set to delete
|
|
648
|
+
* @returns {void}
|
|
649
|
+
*/
|
|
650
|
+
}, {
|
|
651
|
+
key: "deleteHashTree",
|
|
652
|
+
value: function deleteHashTree(dataSetName) {
|
|
653
|
+
this.dataSets[dataSetName].hashTree = undefined;
|
|
654
|
+
|
|
655
|
+
// we also need to stop the timer as there is no hash tree anymore to sync
|
|
656
|
+
if (this.dataSets[dataSetName].timer) {
|
|
657
|
+
clearTimeout(this.dataSets[dataSetName].timer);
|
|
658
|
+
this.dataSets[dataSetName].timer = undefined;
|
|
659
|
+
}
|
|
660
|
+
}
|
|
661
|
+
|
|
662
|
+
/**
|
|
663
|
+
* Adds entries to the passed in updateObjects array
|
|
664
|
+
* for the changes that result from removing visible data sets and creates hash
|
|
665
|
+
* trees for the new visible data sets, but without populating the hash trees.
|
|
666
|
+
*
|
|
667
|
+
* This function is synchronous. If we are missing information about some new
|
|
668
|
+
* visible data sets and they require async initialization, the names of these data sets
|
|
669
|
+
* are returned in an array.
|
|
670
|
+
*
|
|
671
|
+
* @param {string[]} removedDataSets - The list of removed data sets.
|
|
672
|
+
* @param {string[]} addedDataSets - The list of added data sets.
|
|
673
|
+
* @param {HashTreeObject[]} updatedObjects - The list of updated hash tree objects to which changes will be added.
|
|
674
|
+
* @returns {string[]} names of data sets that couldn't be initialized synchronously
|
|
675
|
+
*/
|
|
676
|
+
}, {
|
|
677
|
+
key: "processVisibleDataSetChanges",
|
|
678
|
+
value: function processVisibleDataSetChanges(removedDataSets, addedDataSets, updatedObjects) {
|
|
679
|
+
var _this5 = this;
|
|
680
|
+
var dataSetsRequiringInitialization = [];
|
|
681
|
+
|
|
682
|
+
// if a visible data set was removed, we need to tell our client that all objects from it are removed
|
|
683
|
+
var removedObjects = [];
|
|
684
|
+
removedDataSets.forEach(function (ds) {
|
|
685
|
+
var _this5$dataSets$ds;
|
|
686
|
+
if ((_this5$dataSets$ds = _this5.dataSets[ds]) !== null && _this5$dataSets$ds !== void 0 && _this5$dataSets$ds.hashTree) {
|
|
687
|
+
for (var i = 0; i < _this5.dataSets[ds].hashTree.numLeaves; i += 1) {
|
|
688
|
+
removedObjects.push.apply(removedObjects, (0, _toConsumableArray2.default)(_this5.dataSets[ds].hashTree.getLeafData(i).map(function (elementId) {
|
|
689
|
+
return {
|
|
690
|
+
htMeta: {
|
|
691
|
+
elementId: elementId,
|
|
692
|
+
dataSetNames: [ds]
|
|
693
|
+
},
|
|
694
|
+
data: null
|
|
695
|
+
};
|
|
696
|
+
})));
|
|
697
|
+
}
|
|
698
|
+
_this5.deleteHashTree(ds);
|
|
699
|
+
}
|
|
700
|
+
});
|
|
701
|
+
this.visibleDataSets = this.visibleDataSets.filter(function (vds) {
|
|
702
|
+
return !removedDataSets.includes(vds);
|
|
703
|
+
});
|
|
704
|
+
updatedObjects.push.apply(updatedObjects, removedObjects);
|
|
705
|
+
|
|
706
|
+
// now setup the new visible data sets
|
|
707
|
+
var _iterator6 = _createForOfIteratorHelper(addedDataSets),
|
|
708
|
+
_step6;
|
|
709
|
+
try {
|
|
710
|
+
for (_iterator6.s(); !(_step6 = _iterator6.n()).done;) {
|
|
711
|
+
var ds = _step6.value;
|
|
712
|
+
var dataSetInfo = this.dataSets[ds];
|
|
713
|
+
if (dataSetInfo) {
|
|
714
|
+
if (this.visibleDataSets.includes(dataSetInfo.name)) {
|
|
715
|
+
_loggerProxy.default.logger.info("HashTreeParser#processVisibleDataSetChanges --> ".concat(this.debugId, " Data set \"").concat(ds, "\" is already visible, skipping"));
|
|
716
|
+
|
|
717
|
+
// eslint-disable-next-line no-continue
|
|
718
|
+
continue;
|
|
719
|
+
}
|
|
720
|
+
_loggerProxy.default.logger.info("HashTreeParser#processVisibleDataSetChanges --> ".concat(this.debugId, " Adding visible data set \"").concat(ds, "\""));
|
|
721
|
+
this.visibleDataSets.push(ds);
|
|
722
|
+
var hashTree = new _hashTree.default([], dataSetInfo.leafCount);
|
|
723
|
+
this.dataSets[dataSetInfo.name] = _objectSpread(_objectSpread({}, dataSetInfo), {}, {
|
|
724
|
+
hashTree: hashTree
|
|
725
|
+
});
|
|
726
|
+
} else {
|
|
727
|
+
_loggerProxy.default.logger.info("HashTreeParser#processVisibleDataSetChanges --> ".concat(this.debugId, " visible data set \"").concat(ds, "\" added but no info about it in our dataSets structures"));
|
|
728
|
+
// todo: add a metric here
|
|
729
|
+
dataSetsRequiringInitialization.push(ds);
|
|
730
|
+
}
|
|
731
|
+
}
|
|
732
|
+
} catch (err) {
|
|
733
|
+
_iterator6.e(err);
|
|
734
|
+
} finally {
|
|
735
|
+
_iterator6.f();
|
|
736
|
+
}
|
|
737
|
+
return dataSetsRequiringInitialization;
|
|
738
|
+
}
|
|
739
|
+
|
|
740
|
+
/**
|
|
741
|
+
* Adds entries to the passed in updateObjects array
|
|
742
|
+
* for the changes that result from adding and removing visible data sets.
|
|
743
|
+
*
|
|
744
|
+
* @param {HashTreeMessage} message - The hash tree message that triggered the visible data set changes.
|
|
745
|
+
* @param {string[]} addedDataSets - The list of added data sets.
|
|
746
|
+
* @returns {Promise<void>}
|
|
747
|
+
*/
|
|
748
|
+
}, {
|
|
749
|
+
key: "initializeNewVisibleDataSets",
|
|
750
|
+
value: (function () {
|
|
751
|
+
var _initializeNewVisibleDataSets = (0, _asyncToGenerator2.default)(/*#__PURE__*/_regenerator.default.mark(function _callee4(message, addedDataSets) {
|
|
752
|
+
var _this6 = this;
|
|
753
|
+
var allDataSets, _iterator7, _step7, _loop, _t2;
|
|
754
|
+
return _regenerator.default.wrap(function (_context5) {
|
|
755
|
+
while (1) switch (_context5.prev = _context5.next) {
|
|
756
|
+
case 0:
|
|
757
|
+
_context5.next = 1;
|
|
758
|
+
return this.getAllDataSetsMetadata(message.visibleDataSetsUrl);
|
|
759
|
+
case 1:
|
|
760
|
+
allDataSets = _context5.sent;
|
|
761
|
+
_iterator7 = _createForOfIteratorHelper(addedDataSets);
|
|
762
|
+
_context5.prev = 2;
|
|
763
|
+
_loop = /*#__PURE__*/_regenerator.default.mark(function _loop() {
|
|
764
|
+
var ds, dataSetInfo, updates;
|
|
765
|
+
return _regenerator.default.wrap(function (_context4) {
|
|
766
|
+
while (1) switch (_context4.prev = _context4.next) {
|
|
767
|
+
case 0:
|
|
768
|
+
ds = _step7.value;
|
|
769
|
+
dataSetInfo = allDataSets.find(function (d) {
|
|
770
|
+
return d.name === ds;
|
|
771
|
+
});
|
|
772
|
+
_loggerProxy.default.logger.info("HashTreeParser#initializeNewVisibleDataSets --> ".concat(_this6.debugId, " initializing data set \"").concat(ds, "\""));
|
|
773
|
+
if (dataSetInfo) {
|
|
774
|
+
_context4.next = 1;
|
|
775
|
+
break;
|
|
776
|
+
}
|
|
777
|
+
_loggerProxy.default.logger.warn("HashTreeParser#handleHashTreeMessage --> ".concat(_this6.debugId, " missing info about data set \"").concat(ds, "\" in Locus response from visibleDataSetsUrl"));
|
|
778
|
+
_context4.next = 3;
|
|
779
|
+
break;
|
|
780
|
+
case 1:
|
|
781
|
+
_context4.next = 2;
|
|
782
|
+
return _this6.initializeNewVisibleDataSet(dataSetInfo);
|
|
783
|
+
case 2:
|
|
784
|
+
updates = _context4.sent;
|
|
785
|
+
_this6.callLocusInfoUpdateCallback(updates);
|
|
786
|
+
case 3:
|
|
787
|
+
case "end":
|
|
788
|
+
return _context4.stop();
|
|
789
|
+
}
|
|
790
|
+
}, _loop);
|
|
791
|
+
});
|
|
792
|
+
_iterator7.s();
|
|
793
|
+
case 3:
|
|
794
|
+
if ((_step7 = _iterator7.n()).done) {
|
|
795
|
+
_context5.next = 5;
|
|
796
|
+
break;
|
|
797
|
+
}
|
|
798
|
+
return _context5.delegateYield(_loop(), "t0", 4);
|
|
799
|
+
case 4:
|
|
800
|
+
_context5.next = 3;
|
|
801
|
+
break;
|
|
802
|
+
case 5:
|
|
803
|
+
_context5.next = 7;
|
|
804
|
+
break;
|
|
805
|
+
case 6:
|
|
806
|
+
_context5.prev = 6;
|
|
807
|
+
_t2 = _context5["catch"](2);
|
|
808
|
+
_iterator7.e(_t2);
|
|
809
|
+
case 7:
|
|
810
|
+
_context5.prev = 7;
|
|
811
|
+
_iterator7.f();
|
|
812
|
+
return _context5.finish(7);
|
|
813
|
+
case 8:
|
|
814
|
+
case "end":
|
|
815
|
+
return _context5.stop();
|
|
816
|
+
}
|
|
817
|
+
}, _callee4, this, [[2, 6, 7, 8]]);
|
|
818
|
+
}));
|
|
819
|
+
function initializeNewVisibleDataSets(_x5, _x6) {
|
|
820
|
+
return _initializeNewVisibleDataSets.apply(this, arguments);
|
|
821
|
+
}
|
|
822
|
+
return initializeNewVisibleDataSets;
|
|
823
|
+
}()
|
|
824
|
+
/**
|
|
825
|
+
* Parses incoming hash tree messages, updates the hash trees and returns information about the changes
|
|
826
|
+
*
|
|
827
|
+
* @param {HashTreeMessage} message - The hash tree message containing data sets and objects to be processed
|
|
828
|
+
* @param {string} [debugText] - Optional debug text to include in logs
|
|
829
|
+
* @returns {Promise}
|
|
830
|
+
*/
|
|
831
|
+
)
|
|
832
|
+
}, {
|
|
833
|
+
key: "parseMessage",
|
|
834
|
+
value: (function () {
|
|
835
|
+
var _parseMessage = (0, _asyncToGenerator2.default)(/*#__PURE__*/_regenerator.default.mark(function _callee5(message, debugText) {
|
|
836
|
+
var _message$locusStateEl,
|
|
837
|
+
_this7 = this;
|
|
838
|
+
var dataSets, visibleDataSetsUrl, isRosterDropped, updatedObjects, dataSetsRequiringInitialization, selfUpdates, updatedSelfObjects, _this$checkForVisible, changeDetected, removedDataSets, addedDataSets;
|
|
839
|
+
return _regenerator.default.wrap(function (_context6) {
|
|
840
|
+
while (1) switch (_context6.prev = _context6.next) {
|
|
841
|
+
case 0:
|
|
842
|
+
dataSets = message.dataSets, visibleDataSetsUrl = message.visibleDataSetsUrl;
|
|
843
|
+
_loggerProxy.default.logger.info("HashTreeParser#parseMessage --> ".concat(this.debugId, " received message ").concat(debugText || '', ":"), message);
|
|
844
|
+
if (((_message$locusStateEl = message.locusStateElements) === null || _message$locusStateEl === void 0 ? void 0 : _message$locusStateEl.length) === 0) {
|
|
845
|
+
_loggerProxy.default.logger.warn("HashTreeParser#parseMessage --> ".concat(this.debugId, " got empty locusStateElements!!!"));
|
|
846
|
+
// todo: send a metric
|
|
847
|
+
}
|
|
848
|
+
|
|
849
|
+
// first, update our metadata about the datasets with info from the message
|
|
850
|
+
this.visibleDataSetsUrl = visibleDataSetsUrl;
|
|
851
|
+
dataSets.forEach(function (dataSet) {
|
|
852
|
+
return _this7.updateDataSetInfo(dataSet);
|
|
853
|
+
});
|
|
854
|
+
if (!this.isEndMessage(message)) {
|
|
855
|
+
_context6.next = 1;
|
|
856
|
+
break;
|
|
857
|
+
}
|
|
858
|
+
_loggerProxy.default.logger.info("HashTreeParser#parseMessage --> ".concat(this.debugId, " received END message"));
|
|
859
|
+
this.stopAllTimers();
|
|
860
|
+
return _context6.abrupt("return", {
|
|
861
|
+
updateType: LocusInfoUpdateType.MEETING_ENDED
|
|
862
|
+
});
|
|
863
|
+
case 1:
|
|
864
|
+
isRosterDropped = false;
|
|
865
|
+
updatedObjects = []; // when we detect new visible datasets, it may be that the metadata about them is not
|
|
866
|
+
// available in the message, they will require separate async initialization
|
|
867
|
+
dataSetsRequiringInitialization = []; // first find out if there are any visible data set changes - they're signalled in SELF object updates
|
|
868
|
+
selfUpdates = (message.locusStateElements || []).filter(function (object) {
|
|
869
|
+
return (
|
|
870
|
+
// todo: SPARK-744859 once Locus supports it, we will filter for "Metadata" type here instead of "self"
|
|
871
|
+
isSelf(object)
|
|
872
|
+
);
|
|
873
|
+
});
|
|
874
|
+
if (selfUpdates.length > 0) {
|
|
875
|
+
updatedSelfObjects = [];
|
|
876
|
+
selfUpdates.forEach(function (object) {
|
|
877
|
+
// todo: once Locus supports it, we will use the "view" field here instead of dataSetNames
|
|
878
|
+
var _iterator8 = _createForOfIteratorHelper(object.htMeta.dataSetNames),
|
|
879
|
+
_step8;
|
|
880
|
+
try {
|
|
881
|
+
for (_iterator8.s(); !(_step8 = _iterator8.n()).done;) {
|
|
882
|
+
var _this7$dataSets$dataS;
|
|
883
|
+
var dataSetName = _step8.value;
|
|
884
|
+
var hashTree = (_this7$dataSets$dataS = _this7.dataSets[dataSetName]) === null || _this7$dataSets$dataS === void 0 ? void 0 : _this7$dataSets$dataS.hashTree;
|
|
885
|
+
if (hashTree && object.data) {
|
|
886
|
+
if (hashTree.putItem(object.htMeta.elementId)) {
|
|
887
|
+
updatedSelfObjects.push(object);
|
|
888
|
+
}
|
|
889
|
+
}
|
|
890
|
+
}
|
|
891
|
+
} catch (err) {
|
|
892
|
+
_iterator8.e(err);
|
|
893
|
+
} finally {
|
|
894
|
+
_iterator8.f();
|
|
895
|
+
}
|
|
896
|
+
});
|
|
897
|
+
updatedObjects.push.apply(updatedObjects, updatedSelfObjects);
|
|
898
|
+
_this$checkForVisible = this.checkForVisibleDataSetChanges(updatedSelfObjects), changeDetected = _this$checkForVisible.changeDetected, removedDataSets = _this$checkForVisible.removedDataSets, addedDataSets = _this$checkForVisible.addedDataSets;
|
|
899
|
+
if (changeDetected) {
|
|
900
|
+
dataSetsRequiringInitialization = this.processVisibleDataSetChanges(removedDataSets, addedDataSets, updatedObjects);
|
|
901
|
+
}
|
|
902
|
+
}
|
|
903
|
+
|
|
904
|
+
// by this point we now have this.dataSets setup for data sets from this message
|
|
905
|
+
// and hash trees created for the new visible data sets,
|
|
906
|
+
// so we can now process all the updates from the message
|
|
907
|
+
dataSets.forEach(function (dataSet) {
|
|
908
|
+
if (_this7.dataSets[dataSet.name]) {
|
|
909
|
+
var hashTree = _this7.dataSets[dataSet.name].hashTree;
|
|
910
|
+
if (hashTree) {
|
|
911
|
+
var locusStateElementsForThisSet = message.locusStateElements.filter(function (object) {
|
|
912
|
+
return object.htMeta.dataSetNames.includes(dataSet.name);
|
|
913
|
+
});
|
|
914
|
+
var appliedChangesList = hashTree.updateItems(locusStateElementsForThisSet.map(function (object) {
|
|
915
|
+
return object.data ? {
|
|
916
|
+
operation: 'update',
|
|
917
|
+
item: object.htMeta.elementId
|
|
918
|
+
} : {
|
|
919
|
+
operation: 'remove',
|
|
920
|
+
item: object.htMeta.elementId
|
|
921
|
+
};
|
|
922
|
+
}));
|
|
923
|
+
(0, _lodash.zip)(appliedChangesList, locusStateElementsForThisSet).forEach(function (_ref5) {
|
|
924
|
+
var _ref6 = (0, _slicedToArray2.default)(_ref5, 2),
|
|
925
|
+
changeApplied = _ref6[0],
|
|
926
|
+
object = _ref6[1];
|
|
927
|
+
if (changeApplied) {
|
|
928
|
+
if (isSelf(object) && !object.data) {
|
|
929
|
+
isRosterDropped = true;
|
|
930
|
+
}
|
|
931
|
+
// add to updatedObjects so that our locus DTO will get updated with the new object
|
|
932
|
+
updatedObjects.push(object);
|
|
933
|
+
}
|
|
934
|
+
});
|
|
935
|
+
} else {
|
|
936
|
+
_loggerProxy.default.logger.info("Locus-info:index#parseMessage --> ".concat(_this7.debugId, " unexpected (not visible) dataSet ").concat(dataSet.name, " received in hash tree message"));
|
|
937
|
+
}
|
|
938
|
+
}
|
|
939
|
+
if (!isRosterDropped) {
|
|
940
|
+
_this7.runSyncAlgorithm(dataSet);
|
|
941
|
+
}
|
|
942
|
+
});
|
|
943
|
+
if (!isRosterDropped) {
|
|
944
|
+
_context6.next = 2;
|
|
945
|
+
break;
|
|
946
|
+
}
|
|
947
|
+
_loggerProxy.default.logger.info("HashTreeParser#parseMessage --> ".concat(this.debugId, " detected roster drop"));
|
|
948
|
+
this.stopAllTimers();
|
|
949
|
+
|
|
950
|
+
// in case of roster drop we don't care about other updates
|
|
951
|
+
return _context6.abrupt("return", {
|
|
952
|
+
updateType: LocusInfoUpdateType.MEETING_ENDED
|
|
953
|
+
});
|
|
954
|
+
case 2:
|
|
955
|
+
if (dataSetsRequiringInitialization.length > 0) {
|
|
956
|
+
// there are some data sets that we need to initialize asynchronously
|
|
957
|
+
queueMicrotask(function () {
|
|
958
|
+
_this7.initializeNewVisibleDataSets(message, dataSetsRequiringInitialization);
|
|
959
|
+
});
|
|
960
|
+
}
|
|
961
|
+
if (updatedObjects.length === 0) {
|
|
962
|
+
_loggerProxy.default.logger.info("HashTreeParser#parseMessage --> ".concat(this.debugId, " No objects updated as a result of received message"));
|
|
963
|
+
}
|
|
964
|
+
return _context6.abrupt("return", {
|
|
965
|
+
updateType: LocusInfoUpdateType.OBJECTS_UPDATED,
|
|
966
|
+
updatedObjects: updatedObjects
|
|
967
|
+
});
|
|
968
|
+
case 3:
|
|
969
|
+
case "end":
|
|
970
|
+
return _context6.stop();
|
|
971
|
+
}
|
|
972
|
+
}, _callee5, this);
|
|
973
|
+
}));
|
|
974
|
+
function parseMessage(_x7, _x8) {
|
|
975
|
+
return _parseMessage.apply(this, arguments);
|
|
976
|
+
}
|
|
977
|
+
return parseMessage;
|
|
978
|
+
}()
|
|
979
|
+
/**
|
|
980
|
+
* Handles incoming hash tree messages, updates the hash trees and calls locusInfoUpdateCallback
|
|
981
|
+
*
|
|
982
|
+
* @param {HashTreeMessage} message - The hash tree message containing data sets and objects to be processed
|
|
983
|
+
* @param {string} [debugText] - Optional debug text to include in logs
|
|
984
|
+
* @returns {void}
|
|
985
|
+
*/
|
|
986
|
+
)
|
|
987
|
+
}, {
|
|
988
|
+
key: "handleMessage",
|
|
989
|
+
value: (function () {
|
|
990
|
+
var _handleMessage = (0, _asyncToGenerator2.default)(/*#__PURE__*/_regenerator.default.mark(function _callee6(message, debugText) {
|
|
991
|
+
var updates;
|
|
992
|
+
return _regenerator.default.wrap(function (_context7) {
|
|
993
|
+
while (1) switch (_context7.prev = _context7.next) {
|
|
994
|
+
case 0:
|
|
995
|
+
if (!(message.locusStateElements === undefined)) {
|
|
996
|
+
_context7.next = 1;
|
|
997
|
+
break;
|
|
998
|
+
}
|
|
999
|
+
this.handleRootHashHeartBeatMessage(message);
|
|
1000
|
+
_context7.next = 3;
|
|
1001
|
+
break;
|
|
1002
|
+
case 1:
|
|
1003
|
+
_context7.next = 2;
|
|
1004
|
+
return this.parseMessage(message, debugText);
|
|
1005
|
+
case 2:
|
|
1006
|
+
updates = _context7.sent;
|
|
1007
|
+
this.callLocusInfoUpdateCallback(updates);
|
|
1008
|
+
case 3:
|
|
1009
|
+
case "end":
|
|
1010
|
+
return _context7.stop();
|
|
1011
|
+
}
|
|
1012
|
+
}, _callee6, this);
|
|
1013
|
+
}));
|
|
1014
|
+
function handleMessage(_x9, _x0) {
|
|
1015
|
+
return _handleMessage.apply(this, arguments);
|
|
1016
|
+
}
|
|
1017
|
+
return handleMessage;
|
|
1018
|
+
}()
|
|
1019
|
+
/**
|
|
1020
|
+
* Calls the updateInfo callback if there are any updates to report
|
|
1021
|
+
*
|
|
1022
|
+
* @param {Object} updates parsed from a Locus message
|
|
1023
|
+
* @returns {void}
|
|
1024
|
+
*/
|
|
1025
|
+
)
|
|
1026
|
+
}, {
|
|
1027
|
+
key: "callLocusInfoUpdateCallback",
|
|
1028
|
+
value: function callLocusInfoUpdateCallback(updates) {
|
|
1029
|
+
var updateType = updates.updateType,
|
|
1030
|
+
updatedObjects = updates.updatedObjects;
|
|
1031
|
+
if (updateType !== LocusInfoUpdateType.OBJECTS_UPDATED || (updatedObjects === null || updatedObjects === void 0 ? void 0 : updatedObjects.length) > 0) {
|
|
1032
|
+
this.locusInfoUpdateCallback(updateType, {
|
|
1033
|
+
updatedObjects: updatedObjects
|
|
1034
|
+
});
|
|
1035
|
+
}
|
|
1036
|
+
}
|
|
1037
|
+
|
|
1038
|
+
/**
|
|
1039
|
+
* Calculates a weighted backoff time that should be used for syncs
|
|
1040
|
+
*
|
|
1041
|
+
* @param {Object} backoff - The backoff configuration containing maxMs and exponent
|
|
1042
|
+
* @returns {number} - A weighted backoff time based on the provided configuration, using algorithm supplied by Locus team
|
|
1043
|
+
*/
|
|
1044
|
+
}, {
|
|
1045
|
+
key: "getWeightedBackoffTime",
|
|
1046
|
+
value: function getWeightedBackoffTime(backoff) {
|
|
1047
|
+
var maxMs = backoff.maxMs,
|
|
1048
|
+
exponent = backoff.exponent;
|
|
1049
|
+
var randomValue = Math.random();
|
|
1050
|
+
return Math.round(Math.pow(randomValue, exponent) * maxMs);
|
|
1051
|
+
}
|
|
1052
|
+
|
|
1053
|
+
/**
|
|
1054
|
+
* Runs the sync algorithm for the given data set.
|
|
1055
|
+
*
|
|
1056
|
+
* @param {DataSet} receivedDataSet - The data set to run the sync algorithm for.
|
|
1057
|
+
* @returns {void}
|
|
1058
|
+
*/
|
|
1059
|
+
}, {
|
|
1060
|
+
key: "runSyncAlgorithm",
|
|
1061
|
+
value: function runSyncAlgorithm(receivedDataSet) {
|
|
1062
|
+
var _this8 = this;
|
|
1063
|
+
var dataSet = this.dataSets[receivedDataSet.name];
|
|
1064
|
+
if (!dataSet) {
|
|
1065
|
+
_loggerProxy.default.logger.warn("HashTreeParser#runSyncAlgorithm --> ".concat(this.debugId, " No data set found for ").concat(receivedDataSet.name, ", skipping sync algorithm"));
|
|
1066
|
+
return;
|
|
1067
|
+
}
|
|
1068
|
+
if (!dataSet.hashTree) {
|
|
1069
|
+
_loggerProxy.default.logger.info("HashTreeParser#runSyncAlgorithm --> ".concat(this.debugId, " Data set \"").concat(dataSet.name, "\" has no hash tree, skipping sync algorithm"));
|
|
1070
|
+
return;
|
|
1071
|
+
}
|
|
1072
|
+
dataSet.hashTree.resize(receivedDataSet.leafCount);
|
|
1073
|
+
|
|
1074
|
+
// temporary log for the workshop // todo: remove
|
|
1075
|
+
var ourCurrentRootHash = dataSet.hashTree.getRootHash();
|
|
1076
|
+
_loggerProxy.default.logger.info("HashTreeParser#runSyncAlgorithm --> ".concat(this.debugId, " dataSet=\"").concat(dataSet.name, "\" version=").concat(dataSet.version, " hashes before starting timer: ours=").concat(ourCurrentRootHash, " Locus=").concat(dataSet.root));
|
|
1077
|
+
var delay = dataSet.idleMs + this.getWeightedBackoffTime(dataSet.backoff);
|
|
1078
|
+
if (delay > 0) {
|
|
1079
|
+
if (dataSet.timer) {
|
|
1080
|
+
clearTimeout(dataSet.timer);
|
|
1081
|
+
}
|
|
1082
|
+
_loggerProxy.default.logger.info("HashTreeParser#runSyncAlgorithm --> ".concat(this.debugId, " setting \"").concat(dataSet.name, "\" sync timer for ").concat(delay));
|
|
1083
|
+
dataSet.timer = setTimeout(/*#__PURE__*/(0, _asyncToGenerator2.default)(/*#__PURE__*/_regenerator.default.mark(function _callee7() {
|
|
1084
|
+
var rootHash, mismatchedLeavesData, receivedHashes, _yield$_this8$getHash, hashes, latestDataSetInfo, mismatchedLeaveIndexes, syncResponse, _t3;
|
|
1085
|
+
return _regenerator.default.wrap(function (_context8) {
|
|
1086
|
+
while (1) switch (_context8.prev = _context8.next) {
|
|
1087
|
+
case 0:
|
|
1088
|
+
dataSet.timer = undefined;
|
|
1089
|
+
if (dataSet.hashTree) {
|
|
1090
|
+
_context8.next = 1;
|
|
1091
|
+
break;
|
|
1092
|
+
}
|
|
1093
|
+
_loggerProxy.default.logger.warn("HashTreeParser#runSyncAlgorithm --> ".concat(_this8.debugId, " Data set \"").concat(dataSet.name, "\" no longer has a hash tree, cannot run sync algorithm"));
|
|
1094
|
+
return _context8.abrupt("return");
|
|
1095
|
+
case 1:
|
|
1096
|
+
rootHash = dataSet.hashTree.getRootHash();
|
|
1097
|
+
if (!(dataSet.root !== rootHash)) {
|
|
1098
|
+
_context8.next = 11;
|
|
1099
|
+
break;
|
|
1100
|
+
}
|
|
1101
|
+
_loggerProxy.default.logger.info("HashTreeParser#runSyncAlgorithm --> ".concat(_this8.debugId, " Root hash mismatch: received=").concat(dataSet.root, ", ours=").concat(rootHash, ", syncing data set \"").concat(dataSet.name, "\""));
|
|
1102
|
+
mismatchedLeavesData = {};
|
|
1103
|
+
if (!(dataSet.leafCount !== 1)) {
|
|
1104
|
+
_context8.next = 7;
|
|
1105
|
+
break;
|
|
1106
|
+
}
|
|
1107
|
+
_context8.prev = 2;
|
|
1108
|
+
_context8.next = 3;
|
|
1109
|
+
return _this8.getHashesFromLocus(dataSet.name);
|
|
1110
|
+
case 3:
|
|
1111
|
+
_yield$_this8$getHash = _context8.sent;
|
|
1112
|
+
hashes = _yield$_this8$getHash.hashes;
|
|
1113
|
+
latestDataSetInfo = _yield$_this8$getHash.dataSet;
|
|
1114
|
+
receivedHashes = hashes;
|
|
1115
|
+
dataSet.hashTree.resize(latestDataSetInfo.leafCount);
|
|
1116
|
+
_context8.next = 6;
|
|
1117
|
+
break;
|
|
1118
|
+
case 4:
|
|
1119
|
+
_context8.prev = 4;
|
|
1120
|
+
_t3 = _context8["catch"](2);
|
|
1121
|
+
if (!(_t3.statusCode === 409)) {
|
|
1122
|
+
_context8.next = 5;
|
|
1123
|
+
break;
|
|
1124
|
+
}
|
|
1125
|
+
// this is a leaf count mismatch, we should do nothing, just wait for another heartbeat message from Locus
|
|
1126
|
+
_loggerProxy.default.logger.info("HashTreeParser#getHashesFromLocus --> ".concat(_this8.debugId, " Got 409 when fetching hashes for data set \"").concat(dataSet.name, "\": ").concat(_t3.message));
|
|
1127
|
+
return _context8.abrupt("return");
|
|
1128
|
+
case 5:
|
|
1129
|
+
throw _t3;
|
|
1130
|
+
case 6:
|
|
1131
|
+
// identify mismatched leaves
|
|
1132
|
+
mismatchedLeaveIndexes = dataSet.hashTree.diffHashes(receivedHashes);
|
|
1133
|
+
mismatchedLeaveIndexes.forEach(function (index) {
|
|
1134
|
+
mismatchedLeavesData[index] = dataSet.hashTree.getLeafData(index);
|
|
1135
|
+
});
|
|
1136
|
+
_context8.next = 8;
|
|
1137
|
+
break;
|
|
1138
|
+
case 7:
|
|
1139
|
+
mismatchedLeavesData[0] = dataSet.hashTree.getLeafData(0);
|
|
1140
|
+
case 8:
|
|
1141
|
+
if (!((0, _keys.default)(mismatchedLeavesData).length > 0)) {
|
|
1142
|
+
_context8.next = 10;
|
|
1143
|
+
break;
|
|
1144
|
+
}
|
|
1145
|
+
_context8.next = 9;
|
|
1146
|
+
return _this8.sendSyncRequestToLocus(dataSet, mismatchedLeavesData);
|
|
1147
|
+
case 9:
|
|
1148
|
+
syncResponse = _context8.sent;
|
|
1149
|
+
// sync API may return nothing (in that case data will arrive via messages)
|
|
1150
|
+
// or it may return a response in the same format as messages
|
|
1151
|
+
if (syncResponse) {
|
|
1152
|
+
_this8.handleMessage(syncResponse, 'via sync API');
|
|
1153
|
+
}
|
|
1154
|
+
case 10:
|
|
1155
|
+
_context8.next = 12;
|
|
1156
|
+
break;
|
|
1157
|
+
case 11:
|
|
1158
|
+
_loggerProxy.default.logger.info("HashTreeParser#runSyncAlgorithm --> ".concat(_this8.debugId, " \"").concat(dataSet.name, "\" root hash matching: ").concat(rootHash, ", version=").concat(dataSet.version));
|
|
1159
|
+
case 12:
|
|
1160
|
+
case "end":
|
|
1161
|
+
return _context8.stop();
|
|
1162
|
+
}
|
|
1163
|
+
}, _callee7, null, [[2, 4]]);
|
|
1164
|
+
})), delay);
|
|
1165
|
+
} else {
|
|
1166
|
+
_loggerProxy.default.logger.info("HashTreeParser#runSyncAlgorithm --> ".concat(this.debugId, " No delay for \"").concat(dataSet.name, "\" data set, skipping sync timer reset/setup"));
|
|
1167
|
+
}
|
|
1168
|
+
}
|
|
1169
|
+
|
|
1170
|
+
/**
|
|
1171
|
+
* Stops all timers for the data sets to prevent any further sync attempts.
|
|
1172
|
+
* @returns {void}
|
|
1173
|
+
*/
|
|
1174
|
+
}, {
|
|
1175
|
+
key: "stopAllTimers",
|
|
1176
|
+
value: function stopAllTimers() {
|
|
1177
|
+
(0, _values.default)(this.dataSets).forEach(function (dataSet) {
|
|
1178
|
+
if (dataSet.timer) {
|
|
1179
|
+
clearTimeout(dataSet.timer);
|
|
1180
|
+
dataSet.timer = undefined;
|
|
1181
|
+
}
|
|
1182
|
+
});
|
|
1183
|
+
}
|
|
1184
|
+
|
|
1185
|
+
/**
|
|
1186
|
+
* Gets the current hashes from the locus for a specific data set.
|
|
1187
|
+
* @param {string} dataSetName
|
|
1188
|
+
* @returns {string[]}
|
|
1189
|
+
*/
|
|
1190
|
+
}, {
|
|
1191
|
+
key: "getHashesFromLocus",
|
|
1192
|
+
value: function getHashesFromLocus(dataSetName) {
|
|
1193
|
+
var _this9 = this;
|
|
1194
|
+
_loggerProxy.default.logger.info("HashTreeParser#getHashesFromLocus --> ".concat(this.debugId, " Requesting hashes for data set \"").concat(dataSetName, "\""));
|
|
1195
|
+
var dataSet = this.dataSets[dataSetName];
|
|
1196
|
+
var url = "".concat(dataSet.url, "/hashtree");
|
|
1197
|
+
return this.webexRequest({
|
|
1198
|
+
method: _constants.HTTP_VERBS.GET,
|
|
1199
|
+
uri: url
|
|
1200
|
+
}).then(function (response) {
|
|
1201
|
+
var _response$body, _response$body2;
|
|
1202
|
+
var hashes = (_response$body = response.body) === null || _response$body === void 0 ? void 0 : _response$body.hashes;
|
|
1203
|
+
var dataSetFromResponse = (_response$body2 = response.body) === null || _response$body2 === void 0 ? void 0 : _response$body2.dataSet;
|
|
1204
|
+
if (!hashes || !(0, _isArray.default)(hashes)) {
|
|
1205
|
+
_loggerProxy.default.logger.warn("HashTreeParser#getHashesFromLocus --> ".concat(_this9.debugId, " Locus returned invalid hashes, response body="), response.body);
|
|
1206
|
+
throw new Error("Locus returned invalid hashes: ".concat(hashes));
|
|
1207
|
+
}
|
|
1208
|
+
_loggerProxy.default.logger.info("HashTreeParser#getHashesFromLocus --> ".concat(_this9.debugId, " Received hashes for data set \"").concat(dataSetName, "\": ").concat((0, _stringify.default)(hashes)));
|
|
1209
|
+
return {
|
|
1210
|
+
hashes: hashes,
|
|
1211
|
+
dataSet: dataSetFromResponse
|
|
1212
|
+
};
|
|
1213
|
+
}).catch(function (error) {
|
|
1214
|
+
_loggerProxy.default.logger.error("HashTreeParser#getHashesFromLocus --> ".concat(_this9.debugId, " Error ").concat(error.statusCode, " fetching hashes for data set \"").concat(dataSetName, "\":"), error);
|
|
1215
|
+
throw error;
|
|
1216
|
+
});
|
|
1217
|
+
}
|
|
1218
|
+
|
|
1219
|
+
/**
|
|
1220
|
+
* Sends a sync request to Locus for the specified data set.
|
|
1221
|
+
*
|
|
1222
|
+
* @param {InternalDataSet} dataSet The data set to sync.
|
|
1223
|
+
* @param {Record<number, LeafDataItem[]>} mismatchedLeavesData The mismatched leaves data to include in the sync request.
|
|
1224
|
+
* @returns {Promise<HashTreeMessage|null>}
|
|
1225
|
+
*/
|
|
1226
|
+
}, {
|
|
1227
|
+
key: "sendSyncRequestToLocus",
|
|
1228
|
+
value: function sendSyncRequestToLocus(dataSet, mismatchedLeavesData) {
|
|
1229
|
+
var _dataSet$hashTree,
|
|
1230
|
+
_this0 = this;
|
|
1231
|
+
_loggerProxy.default.logger.info("HashTreeParser#sendSyncRequestToLocus --> ".concat(this.debugId, " Sending sync request for data set \"").concat(dataSet.name, "\""));
|
|
1232
|
+
var url = "".concat(dataSet.url, "/sync");
|
|
1233
|
+
var body = {
|
|
1234
|
+
dataSet: {
|
|
1235
|
+
name: dataSet.name,
|
|
1236
|
+
leafCount: dataSet.leafCount,
|
|
1237
|
+
root: (_dataSet$hashTree = dataSet.hashTree) === null || _dataSet$hashTree === void 0 ? void 0 : _dataSet$hashTree.getRootHash()
|
|
1238
|
+
},
|
|
1239
|
+
leafDataEntries: []
|
|
1240
|
+
};
|
|
1241
|
+
(0, _keys.default)(mismatchedLeavesData).forEach(function (index) {
|
|
1242
|
+
body.leafDataEntries.push({
|
|
1243
|
+
leafIndex: (0, _parseInt2.default)(index, 10),
|
|
1244
|
+
elementIds: mismatchedLeavesData[index]
|
|
1245
|
+
});
|
|
1246
|
+
});
|
|
1247
|
+
return this.webexRequest({
|
|
1248
|
+
method: _constants.HTTP_VERBS.POST,
|
|
1249
|
+
uri: url,
|
|
1250
|
+
body: body
|
|
1251
|
+
}).then(function (resp) {
|
|
1252
|
+
_loggerProxy.default.logger.info("HashTreeParser#sendSyncRequestToLocus --> ".concat(_this0.debugId, " Sync request succeeded for \"").concat(dataSet.name, "\""));
|
|
1253
|
+
if (!resp.body || (0, _lodash.isEmpty)(resp.body)) {
|
|
1254
|
+
_loggerProxy.default.logger.info("HashTreeParser#sendSyncRequestToLocus --> ".concat(_this0.debugId, " Got ").concat(resp.statusCode, " with empty body for sync request for data set \"").concat(dataSet.name, "\", data should arrive via messages"));
|
|
1255
|
+
return null;
|
|
1256
|
+
}
|
|
1257
|
+
return resp.body;
|
|
1258
|
+
}).catch(function (error) {
|
|
1259
|
+
_loggerProxy.default.logger.error("HashTreeParser#sendSyncRequestToLocus --> ".concat(_this0.debugId, " Error ").concat(error.statusCode, " sending sync request for data set \"").concat(dataSet.name, "\":"), error);
|
|
1260
|
+
throw error;
|
|
1261
|
+
});
|
|
1262
|
+
}
|
|
1263
|
+
}]);
|
|
1264
|
+
}();
|
|
1265
|
+
var _default = exports.default = HashTreeParser;
|
|
1266
|
+
//# sourceMappingURL=hashTreeParser.js.map
|