@splitsoftware/splitio-commons 1.16.1-rc.0 → 1.16.1-rc.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cjs/logger/messages/info.js +1 -1
- package/cjs/readiness/readinessManager.js +4 -5
- package/cjs/storages/pluggable/inMemoryWrapper.js +1 -1
- package/cjs/sync/polling/fetchers/mySegmentsFetcher.js +5 -1
- package/cjs/sync/polling/pollingManagerCS.js +11 -17
- package/cjs/sync/polling/syncTasks/mySegmentsSyncTask.js +2 -2
- package/cjs/sync/polling/updaters/mySegmentsUpdater.js +17 -23
- package/cjs/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.js +10 -1
- package/cjs/sync/streaming/parseUtils.js +3 -8
- package/cjs/sync/streaming/pushManager.js +15 -18
- package/esm/logger/messages/info.js +1 -1
- package/esm/readiness/readinessManager.js +4 -5
- package/esm/storages/pluggable/inMemoryWrapper.js +1 -1
- package/esm/sync/polling/fetchers/mySegmentsFetcher.js +5 -1
- package/esm/sync/polling/pollingManagerCS.js +11 -17
- package/esm/sync/polling/syncTasks/mySegmentsSyncTask.js +2 -2
- package/esm/sync/polling/updaters/mySegmentsUpdater.js +17 -23
- package/esm/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.js +10 -1
- package/esm/sync/streaming/parseUtils.js +3 -8
- package/esm/sync/streaming/pushManager.js +15 -18
- package/package.json +1 -1
- package/src/dtos/types.ts +10 -6
- package/src/logger/messages/info.ts +1 -1
- package/src/readiness/readinessManager.ts +3 -5
- package/src/storages/pluggable/inMemoryWrapper.ts +1 -1
- package/src/storages/types.ts +1 -1
- package/src/sync/polling/fetchers/mySegmentsFetcher.ts +6 -2
- package/src/sync/polling/pollingManagerCS.ts +12 -15
- package/src/sync/polling/syncTasks/mySegmentsSyncTask.ts +3 -2
- package/src/sync/polling/types.ts +2 -2
- package/src/sync/polling/updaters/mySegmentsUpdater.ts +14 -20
- package/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +12 -2
- package/src/sync/streaming/UpdateWorkers/SegmentsUpdateWorker.ts +1 -1
- package/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts +1 -1
- package/src/sync/streaming/UpdateWorkers/types.ts +2 -2
- package/src/sync/streaming/parseUtils.ts +6 -10
- package/src/sync/streaming/pushManager.ts +12 -16
- package/types/dtos/types.d.ts +9 -5
- package/types/storages/pluggable/inMemoryWrapper.d.ts +1 -1
- package/types/sync/polling/syncTasks/mySegmentsSyncTask.d.ts +1 -1
- package/types/sync/polling/types.d.ts +2 -2
- package/types/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.d.ts +2 -2
- package/types/sync/streaming/UpdateWorkers/SegmentsUpdateWorker.d.ts +2 -1
- package/types/sync/streaming/UpdateWorkers/SplitsUpdateWorker.d.ts +3 -2
- package/types/sync/streaming/UpdateWorkers/types.d.ts +2 -2
- package/types/sync/streaming/parseUtils.d.ts +2 -4
- package/types/sync/streaming/pushManager.d.ts +1 -1
|
@@ -23,7 +23,7 @@ exports.codesInfo = warn_1.codesWarn.concat([
|
|
|
23
23
|
[c.POLLING_START, c.LOG_PREFIX_SYNC_POLLING + 'Starting polling'],
|
|
24
24
|
[c.POLLING_STOP, c.LOG_PREFIX_SYNC_POLLING + 'Stopping polling'],
|
|
25
25
|
[c.SYNC_SPLITS_FETCH_RETRY, c.LOG_PREFIX_SYNC_SPLITS + 'Retrying download of feature flags #%s. Reason: %s'],
|
|
26
|
-
[c.SUBMITTERS_PUSH_FULL_QUEUE, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Flushing full %s queue and
|
|
26
|
+
[c.SUBMITTERS_PUSH_FULL_QUEUE, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Flushing full %s queue and resetting timer.'],
|
|
27
27
|
[c.SUBMITTERS_PUSH, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Pushing %s.'],
|
|
28
28
|
[c.STREAMING_REFRESH_TOKEN, c.LOG_PREFIX_SYNC_STREAMING + 'Refreshing streaming token in %s seconds, and connecting streaming in %s seconds.'],
|
|
29
29
|
[c.STREAMING_RECONNECT, c.LOG_PREFIX_SYNC_STREAMING + 'Attempting to reconnect streaming in %s seconds.'],
|
|
@@ -16,10 +16,9 @@ function splitsEventEmitterFactory(EventEmitter) {
|
|
|
16
16
|
splitsEventEmitter.once(constants_1.SDK_SPLITS_CACHE_LOADED, function () { splitsEventEmitter.splitsCacheLoaded = true; });
|
|
17
17
|
return splitsEventEmitter;
|
|
18
18
|
}
|
|
19
|
-
function segmentsEventEmitterFactory(EventEmitter) {
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
});
|
|
19
|
+
function segmentsEventEmitterFactory(EventEmitter, segmentsArrived) {
|
|
20
|
+
if (segmentsArrived === void 0) { segmentsArrived = false; }
|
|
21
|
+
var segmentsEventEmitter = (0, objectAssign_1.objectAssign)(new EventEmitter(), { segmentsArrived: segmentsArrived });
|
|
23
22
|
segmentsEventEmitter.once(constants_1.SDK_SEGMENTS_ARRIVED, function () { segmentsEventEmitter.segmentsArrived = true; });
|
|
24
23
|
return segmentsEventEmitter;
|
|
25
24
|
}
|
|
@@ -30,7 +29,7 @@ function readinessManagerFactory(EventEmitter, settings, splits) {
|
|
|
30
29
|
if (splits === void 0) { splits = splitsEventEmitterFactory(EventEmitter); }
|
|
31
30
|
var _a = settings.startup, readyTimeout = _a.readyTimeout, waitForLargeSegments = _a.waitForLargeSegments, largeSegmentsEnabled = settings.sync.largeSegmentsEnabled;
|
|
32
31
|
var segments = segmentsEventEmitterFactory(EventEmitter);
|
|
33
|
-
var largeSegments = largeSegmentsEnabled
|
|
32
|
+
var largeSegments = largeSegmentsEnabled ? segmentsEventEmitterFactory(EventEmitter, !waitForLargeSegments) : undefined;
|
|
34
33
|
var gate = new EventEmitter();
|
|
35
34
|
// emit SDK_READY_FROM_CACHE
|
|
36
35
|
var isReadyFromCache = false;
|
|
@@ -8,7 +8,7 @@ var sets_1 = require("../../utils/lang/sets");
|
|
|
8
8
|
* The `_cache` property is the object were items are stored.
|
|
9
9
|
* Intended for testing purposes.
|
|
10
10
|
*
|
|
11
|
-
* @param connDelay delay in millis for `connect` resolve. If not provided, `connect` resolves
|
|
11
|
+
* @param connDelay delay in millis for `connect` resolve. If not provided, `connect` resolves immediately.
|
|
12
12
|
*/
|
|
13
13
|
function inMemoryWrapperFactory(connDelay) {
|
|
14
14
|
var _cache = {};
|
|
@@ -15,7 +15,11 @@ function mySegmentsFetcherFactory(fetchMySegments) {
|
|
|
15
15
|
// Extract segment names
|
|
16
16
|
return mySegmentsPromise
|
|
17
17
|
.then(function (resp) { return resp.json(); })
|
|
18
|
-
.then(function (json) {
|
|
18
|
+
.then(function (json) {
|
|
19
|
+
return json.mySegments ?
|
|
20
|
+
json.mySegments.map(function (segment) { return segment.name; }) :
|
|
21
|
+
json.myLargeSegments;
|
|
22
|
+
});
|
|
19
23
|
};
|
|
20
24
|
}
|
|
21
25
|
exports.mySegmentsFetcherFactory = mySegmentsFetcherFactory;
|
|
@@ -28,12 +28,12 @@ function pollingManagerCSFactory(params) {
|
|
|
28
28
|
if (splitsHaveSegments)
|
|
29
29
|
msSyncTask.start();
|
|
30
30
|
else
|
|
31
|
-
msSyncTask.stop();
|
|
31
|
+
msSyncTask.stop(); // smart pausing
|
|
32
32
|
if (mlsSyncTask) {
|
|
33
33
|
if (splitsHaveLargeSegments)
|
|
34
34
|
mlsSyncTask.start();
|
|
35
35
|
else
|
|
36
|
-
mlsSyncTask.stop();
|
|
36
|
+
mlsSyncTask.stop(); // smart pausing
|
|
37
37
|
}
|
|
38
38
|
});
|
|
39
39
|
}
|
|
@@ -44,37 +44,31 @@ function pollingManagerCSFactory(params) {
|
|
|
44
44
|
mlsSyncTask && mlsSyncTask.stop();
|
|
45
45
|
});
|
|
46
46
|
}
|
|
47
|
-
// smart pausing
|
|
48
47
|
readiness.splits.on(constants_1.SDK_SPLITS_ARRIVED, function () {
|
|
49
|
-
// smart pausing of mySegments polling
|
|
50
48
|
if (splitsSyncTask.isRunning())
|
|
51
49
|
startMySegmentsSyncTasks();
|
|
52
50
|
});
|
|
53
51
|
function add(matchingKey, readiness, storage) {
|
|
54
|
-
var msSyncTask = (0, mySegmentsSyncTask_1.mySegmentsSyncTaskFactory)(splitApi.fetchMySegments, storage.segments, function () {
|
|
55
|
-
|
|
56
|
-
readiness.segments.emit(constants_1.SDK_SEGMENTS_ARRIVED);
|
|
57
|
-
}, settings, matchingKey, settings.scheduler.segmentsRefreshRate);
|
|
52
|
+
var msSyncTask = (0, mySegmentsSyncTask_1.mySegmentsSyncTaskFactory)(splitApi.fetchMySegments, storage.segments, function () { if (storage.splits.usesMatcher(constants_3.IN_SEGMENT))
|
|
53
|
+
readiness.segments.emit(constants_1.SDK_SEGMENTS_ARRIVED); }, settings, matchingKey, settings.scheduler.segmentsRefreshRate, 'mySegmentsUpdater');
|
|
58
54
|
var mlsSyncTask;
|
|
59
55
|
if (settings.sync.largeSegmentsEnabled) {
|
|
60
|
-
mlsSyncTask = (0, mySegmentsSyncTask_1.mySegmentsSyncTaskFactory)(splitApi.fetchMyLargeSegments, storage.largeSegments, function () {
|
|
61
|
-
|
|
62
|
-
readiness.largeSegments.emit(constants_1.SDK_SEGMENTS_ARRIVED);
|
|
63
|
-
}, settings, matchingKey, settings.scheduler.largeSegmentsRefreshRate);
|
|
56
|
+
mlsSyncTask = (0, mySegmentsSyncTask_1.mySegmentsSyncTaskFactory)(splitApi.fetchMyLargeSegments, storage.largeSegments, function () { if (readiness.largeSegments && storage.splits.usesMatcher(constants_3.IN_LARGE_SEGMENT))
|
|
57
|
+
readiness.largeSegments.emit(constants_1.SDK_SEGMENTS_ARRIVED); }, settings, matchingKey, settings.scheduler.largeSegmentsRefreshRate, 'myLargeSegmentsUpdater');
|
|
64
58
|
}
|
|
65
59
|
// smart ready
|
|
66
60
|
function smartReady() {
|
|
67
61
|
if (!readiness.isReady()) {
|
|
68
|
-
if (!storage.splits.usesMatcher(constants_3.IN_SEGMENT))
|
|
69
|
-
readiness.segments.emit(constants_1.SDK_SEGMENTS_ARRIVED);
|
|
70
62
|
if (readiness.largeSegments && !storage.splits.usesMatcher(constants_3.IN_LARGE_SEGMENT))
|
|
71
63
|
readiness.largeSegments.emit(constants_1.SDK_SEGMENTS_ARRIVED);
|
|
64
|
+
if (!storage.splits.usesMatcher(constants_3.IN_SEGMENT))
|
|
65
|
+
readiness.segments.emit(constants_1.SDK_SEGMENTS_ARRIVED);
|
|
72
66
|
}
|
|
73
67
|
}
|
|
74
|
-
if (
|
|
75
|
-
setTimeout(smartReady, 0);
|
|
76
|
-
else
|
|
68
|
+
if (storage.splits.usesMatcher(constants_3.IN_SEGMENT) && storage.splits.usesMatcher(constants_3.IN_LARGE_SEGMENT))
|
|
77
69
|
readiness.splits.once(constants_1.SDK_SPLITS_ARRIVED, smartReady);
|
|
70
|
+
else
|
|
71
|
+
setTimeout(smartReady, 0);
|
|
78
72
|
mySegmentsSyncTasks[matchingKey] = { msSyncTask: msSyncTask, mlsSyncTask: mlsSyncTask };
|
|
79
73
|
return {
|
|
80
74
|
msSyncTask: msSyncTask,
|
|
@@ -7,7 +7,7 @@ var mySegmentsUpdater_1 = require("../updaters/mySegmentsUpdater");
|
|
|
7
7
|
/**
|
|
8
8
|
* Creates a sync task that periodically executes a `mySegmentsUpdater` task
|
|
9
9
|
*/
|
|
10
|
-
function mySegmentsSyncTaskFactory(fetchMySegments, mySegmentsCache, notifyUpdate, settings, matchingKey, segmentsRefreshRate) {
|
|
11
|
-
return (0, syncTask_1.syncTaskFactory)(settings.log, (0, mySegmentsUpdater_1.mySegmentsUpdaterFactory)(settings.log, (0, mySegmentsFetcher_1.mySegmentsFetcherFactory)(fetchMySegments), mySegmentsCache, notifyUpdate, settings.startup.requestTimeoutBeforeReady, settings.startup.retriesOnFailureBeforeReady, matchingKey), segmentsRefreshRate,
|
|
10
|
+
function mySegmentsSyncTaskFactory(fetchMySegments, mySegmentsCache, notifyUpdate, settings, matchingKey, segmentsRefreshRate, NAME) {
|
|
11
|
+
return (0, syncTask_1.syncTaskFactory)(settings.log, (0, mySegmentsUpdater_1.mySegmentsUpdaterFactory)(settings.log, (0, mySegmentsFetcher_1.mySegmentsFetcherFactory)(fetchMySegments), mySegmentsCache, notifyUpdate, settings.startup.requestTimeoutBeforeReady, settings.startup.retriesOnFailureBeforeReady, matchingKey), segmentsRefreshRate, NAME);
|
|
12
12
|
}
|
|
13
13
|
exports.mySegmentsSyncTaskFactory = mySegmentsSyncTaskFactory;
|
|
@@ -3,6 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.mySegmentsUpdaterFactory = void 0;
|
|
4
4
|
var timeout_1 = require("../../../utils/promise/timeout");
|
|
5
5
|
var constants_1 = require("../../../logger/constants");
|
|
6
|
+
var lang_1 = require("../../../utils/lang");
|
|
6
7
|
/**
|
|
7
8
|
* factory of MySegments updater, a task that:
|
|
8
9
|
* - fetches mySegments using `mySegmentsFetcher`
|
|
@@ -21,23 +22,22 @@ function mySegmentsUpdaterFactory(log, mySegmentsFetcher, mySegmentsCache, notif
|
|
|
21
22
|
// @TODO if allowing pluggable storages, handle async execution
|
|
22
23
|
function updateSegments(segmentsData) {
|
|
23
24
|
var shouldNotifyUpdate;
|
|
24
|
-
if (
|
|
25
|
-
//
|
|
26
|
-
|
|
25
|
+
if ((0, lang_1.isObject)(segmentsData[0])) {
|
|
26
|
+
// Add/Delete the segment names
|
|
27
|
+
segmentsData.forEach(function (_a) {
|
|
28
|
+
var name = _a.name, add = _a.add;
|
|
29
|
+
if (mySegmentsCache.isInSegment(name) !== add) {
|
|
30
|
+
shouldNotifyUpdate = true;
|
|
31
|
+
if (add)
|
|
32
|
+
mySegmentsCache.addToSegment(name);
|
|
33
|
+
else
|
|
34
|
+
mySegmentsCache.removeFromSegment(name);
|
|
35
|
+
}
|
|
36
|
+
});
|
|
27
37
|
}
|
|
28
38
|
else {
|
|
29
|
-
//
|
|
30
|
-
|
|
31
|
-
if (mySegmentsCache.isInSegment(name_1) !== add) {
|
|
32
|
-
shouldNotifyUpdate = true;
|
|
33
|
-
if (add)
|
|
34
|
-
mySegmentsCache.addToSegment(name_1);
|
|
35
|
-
else
|
|
36
|
-
mySegmentsCache.removeFromSegment(name_1);
|
|
37
|
-
}
|
|
38
|
-
else {
|
|
39
|
-
shouldNotifyUpdate = false;
|
|
40
|
-
}
|
|
39
|
+
// Reset the list of segment names
|
|
40
|
+
shouldNotifyUpdate = mySegmentsCache.resetSegments(segmentsData);
|
|
41
41
|
}
|
|
42
42
|
// Notify update if required
|
|
43
43
|
if (shouldNotifyUpdate || readyOnAlreadyExistentState) {
|
|
@@ -78,14 +78,8 @@ function mySegmentsUpdaterFactory(log, mySegmentsFetcher, mySegmentsCache, notif
|
|
|
78
78
|
* (3) or `undefined`, for which the updater will fetch mySegments in order to sync the storage.
|
|
79
79
|
* @param {boolean | undefined} noCache true to revalidate data to fetch
|
|
80
80
|
*/
|
|
81
|
-
return function mySegmentsUpdater(segmentsData, noCache
|
|
82
|
-
return
|
|
83
|
-
new Promise(function (res) {
|
|
84
|
-
setTimeout(function () {
|
|
85
|
-
_mySegmentsUpdater(0, segmentsData, noCache).then(res);
|
|
86
|
-
}, delay);
|
|
87
|
-
}) :
|
|
88
|
-
_mySegmentsUpdater(0, segmentsData, noCache);
|
|
81
|
+
return function mySegmentsUpdater(segmentsData, noCache) {
|
|
82
|
+
return _mySegmentsUpdater(0, segmentsData, noCache);
|
|
89
83
|
};
|
|
90
84
|
}
|
|
91
85
|
exports.mySegmentsUpdaterFactory = mySegmentsUpdaterFactory;
|
|
@@ -12,6 +12,7 @@ function MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker, updateType
|
|
|
12
12
|
var isHandlingEvent;
|
|
13
13
|
var _segmentsData; // keeps the segmentsData (if included in notification payload) from the queued event with maximum changeNumber
|
|
14
14
|
var _delay;
|
|
15
|
+
var _delayTimeoutID;
|
|
15
16
|
var backoff = new Backoff_1.Backoff(__handleMySegmentsUpdateCall);
|
|
16
17
|
function __handleMySegmentsUpdateCall() {
|
|
17
18
|
isHandlingEvent = true;
|
|
@@ -19,7 +20,14 @@ function MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker, updateType
|
|
|
19
20
|
handleNewEvent = false;
|
|
20
21
|
var currentMaxChangeNumber_1 = maxChangeNumber;
|
|
21
22
|
// fetch mySegments revalidating data if cached
|
|
22
|
-
|
|
23
|
+
var syncTask = _delay ?
|
|
24
|
+
new Promise(function (res) {
|
|
25
|
+
_delayTimeoutID = setTimeout(function () {
|
|
26
|
+
mySegmentsSyncTask.execute(_segmentsData, true).then(res);
|
|
27
|
+
}, _delay);
|
|
28
|
+
}) :
|
|
29
|
+
mySegmentsSyncTask.execute(_segmentsData, true);
|
|
30
|
+
syncTask.then(function (result) {
|
|
23
31
|
if (!isHandlingEvent)
|
|
24
32
|
return; // halt if `stop` has been called
|
|
25
33
|
if (result !== false) { // Unlike `Splits|SegmentsUpdateWorker`, we cannot use `mySegmentsCache.getChangeNumber` since `/mySegments` endpoint doesn't provide this value.
|
|
@@ -58,6 +66,7 @@ function MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker, updateType
|
|
|
58
66
|
backoff.reset();
|
|
59
67
|
},
|
|
60
68
|
stop: function () {
|
|
69
|
+
clearTimeout(_delayTimeoutID);
|
|
61
70
|
isHandlingEvent = false;
|
|
62
71
|
backoff.reset();
|
|
63
72
|
}
|
|
@@ -80,15 +80,10 @@ function isInBitmap(bitmap, hash64hex) {
|
|
|
80
80
|
exports.isInBitmap = isInBitmap;
|
|
81
81
|
/**
|
|
82
82
|
* Parse feature flags notifications for instant feature flag updates
|
|
83
|
-
*
|
|
84
|
-
* @param {ISplitUpdateData} data
|
|
85
|
-
* @returns {KeyList}
|
|
86
83
|
*/
|
|
87
84
|
function parseFFUpdatePayload(compression, data) {
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
else
|
|
92
|
-
return JSON.parse((0, base64_1.decodeFromBase64)(data));
|
|
85
|
+
return compression > 0 ?
|
|
86
|
+
parseKeyList(data, compression, false) :
|
|
87
|
+
JSON.parse((0, base64_1.decodeFromBase64)(data));
|
|
93
88
|
}
|
|
94
89
|
exports.parseFFUpdatePayload = parseFFUpdatePayload;
|
|
@@ -21,7 +21,6 @@ var murmur3_64_1 = require("../../utils/murmur3/murmur3_64");
|
|
|
21
21
|
var constants_3 = require("../../utils/constants");
|
|
22
22
|
function getDelay(parsedData, matchingKey) {
|
|
23
23
|
var interval = parsedData.i || 60000;
|
|
24
|
-
// const hashType = parsedData.h || 0;
|
|
25
24
|
var seed = parsedData.s || 0;
|
|
26
25
|
return (0, murmur3_1.hash)(matchingKey, seed) % interval;
|
|
27
26
|
}
|
|
@@ -245,14 +244,14 @@ function pushManagerFactory(params, pollingManager) {
|
|
|
245
244
|
var add = added_1.has(hash64.dec) ? true : removed_1.has(hash64.dec) ? false : undefined;
|
|
246
245
|
if (add !== undefined) {
|
|
247
246
|
isLS ?
|
|
248
|
-
workerLarge && workerLarge.put(parsedData.changeNumber, {
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
worker.put(parsedData.changeNumber, {
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
247
|
+
workerLarge && workerLarge.put(parsedData.changeNumber, [{
|
|
248
|
+
name: parsedData.largeSegments[0],
|
|
249
|
+
add: add
|
|
250
|
+
}]) :
|
|
251
|
+
worker.put(parsedData.changeNumber, [{
|
|
252
|
+
name: parsedData.segmentName,
|
|
253
|
+
add: add
|
|
254
|
+
}]);
|
|
256
255
|
}
|
|
257
256
|
});
|
|
258
257
|
return;
|
|
@@ -265,16 +264,14 @@ function pushManagerFactory(params, pollingManager) {
|
|
|
265
264
|
(0, lang_1.forOwn)(clients, function (_a) {
|
|
266
265
|
var worker = _a.worker, workerLarge = _a.workerLarge;
|
|
267
266
|
isLS ?
|
|
268
|
-
workerLarge && parsedData.largeSegments.
|
|
269
|
-
|
|
270
|
-
name: largeSegment,
|
|
271
|
-
add: false
|
|
272
|
-
});
|
|
273
|
-
}) :
|
|
274
|
-
worker.put(parsedData.changeNumber, {
|
|
275
|
-
name: parsedData.segmentName,
|
|
267
|
+
workerLarge && workerLarge.put(parsedData.changeNumber, parsedData.largeSegments.map(function (largeSegment) { return ({
|
|
268
|
+
name: largeSegment,
|
|
276
269
|
add: false
|
|
277
|
-
});
|
|
270
|
+
}); })) :
|
|
271
|
+
worker.put(parsedData.changeNumber, [{
|
|
272
|
+
name: parsedData.segmentName,
|
|
273
|
+
add: false
|
|
274
|
+
}]);
|
|
278
275
|
});
|
|
279
276
|
return;
|
|
280
277
|
}
|
|
@@ -19,7 +19,7 @@ export var codesInfo = codesWarn.concat([
|
|
|
19
19
|
[c.POLLING_START, c.LOG_PREFIX_SYNC_POLLING + 'Starting polling'],
|
|
20
20
|
[c.POLLING_STOP, c.LOG_PREFIX_SYNC_POLLING + 'Stopping polling'],
|
|
21
21
|
[c.SYNC_SPLITS_FETCH_RETRY, c.LOG_PREFIX_SYNC_SPLITS + 'Retrying download of feature flags #%s. Reason: %s'],
|
|
22
|
-
[c.SUBMITTERS_PUSH_FULL_QUEUE, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Flushing full %s queue and
|
|
22
|
+
[c.SUBMITTERS_PUSH_FULL_QUEUE, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Flushing full %s queue and resetting timer.'],
|
|
23
23
|
[c.SUBMITTERS_PUSH, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Pushing %s.'],
|
|
24
24
|
[c.STREAMING_REFRESH_TOKEN, c.LOG_PREFIX_SYNC_STREAMING + 'Refreshing streaming token in %s seconds, and connecting streaming in %s seconds.'],
|
|
25
25
|
[c.STREAMING_RECONNECT, c.LOG_PREFIX_SYNC_STREAMING + 'Attempting to reconnect streaming in %s seconds.'],
|
|
@@ -13,10 +13,9 @@ function splitsEventEmitterFactory(EventEmitter) {
|
|
|
13
13
|
splitsEventEmitter.once(SDK_SPLITS_CACHE_LOADED, function () { splitsEventEmitter.splitsCacheLoaded = true; });
|
|
14
14
|
return splitsEventEmitter;
|
|
15
15
|
}
|
|
16
|
-
function segmentsEventEmitterFactory(EventEmitter) {
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
});
|
|
16
|
+
function segmentsEventEmitterFactory(EventEmitter, segmentsArrived) {
|
|
17
|
+
if (segmentsArrived === void 0) { segmentsArrived = false; }
|
|
18
|
+
var segmentsEventEmitter = objectAssign(new EventEmitter(), { segmentsArrived: segmentsArrived });
|
|
20
19
|
segmentsEventEmitter.once(SDK_SEGMENTS_ARRIVED, function () { segmentsEventEmitter.segmentsArrived = true; });
|
|
21
20
|
return segmentsEventEmitter;
|
|
22
21
|
}
|
|
@@ -27,7 +26,7 @@ export function readinessManagerFactory(EventEmitter, settings, splits) {
|
|
|
27
26
|
if (splits === void 0) { splits = splitsEventEmitterFactory(EventEmitter); }
|
|
28
27
|
var _a = settings.startup, readyTimeout = _a.readyTimeout, waitForLargeSegments = _a.waitForLargeSegments, largeSegmentsEnabled = settings.sync.largeSegmentsEnabled;
|
|
29
28
|
var segments = segmentsEventEmitterFactory(EventEmitter);
|
|
30
|
-
var largeSegments = largeSegmentsEnabled
|
|
29
|
+
var largeSegments = largeSegmentsEnabled ? segmentsEventEmitterFactory(EventEmitter, !waitForLargeSegments) : undefined;
|
|
31
30
|
var gate = new EventEmitter();
|
|
32
31
|
// emit SDK_READY_FROM_CACHE
|
|
33
32
|
var isReadyFromCache = false;
|
|
@@ -5,7 +5,7 @@ import { setToArray, _Set } from '../../utils/lang/sets';
|
|
|
5
5
|
* The `_cache` property is the object were items are stored.
|
|
6
6
|
* Intended for testing purposes.
|
|
7
7
|
*
|
|
8
|
-
* @param connDelay delay in millis for `connect` resolve. If not provided, `connect` resolves
|
|
8
|
+
* @param connDelay delay in millis for `connect` resolve. If not provided, `connect` resolves immediately.
|
|
9
9
|
*/
|
|
10
10
|
export function inMemoryWrapperFactory(connDelay) {
|
|
11
11
|
var _cache = {};
|
|
@@ -12,6 +12,10 @@ export function mySegmentsFetcherFactory(fetchMySegments) {
|
|
|
12
12
|
// Extract segment names
|
|
13
13
|
return mySegmentsPromise
|
|
14
14
|
.then(function (resp) { return resp.json(); })
|
|
15
|
-
.then(function (json) {
|
|
15
|
+
.then(function (json) {
|
|
16
|
+
return json.mySegments ?
|
|
17
|
+
json.mySegments.map(function (segment) { return segment.name; }) :
|
|
18
|
+
json.myLargeSegments;
|
|
19
|
+
});
|
|
16
20
|
};
|
|
17
21
|
}
|
|
@@ -25,12 +25,12 @@ export function pollingManagerCSFactory(params) {
|
|
|
25
25
|
if (splitsHaveSegments)
|
|
26
26
|
msSyncTask.start();
|
|
27
27
|
else
|
|
28
|
-
msSyncTask.stop();
|
|
28
|
+
msSyncTask.stop(); // smart pausing
|
|
29
29
|
if (mlsSyncTask) {
|
|
30
30
|
if (splitsHaveLargeSegments)
|
|
31
31
|
mlsSyncTask.start();
|
|
32
32
|
else
|
|
33
|
-
mlsSyncTask.stop();
|
|
33
|
+
mlsSyncTask.stop(); // smart pausing
|
|
34
34
|
}
|
|
35
35
|
});
|
|
36
36
|
}
|
|
@@ -41,37 +41,31 @@ export function pollingManagerCSFactory(params) {
|
|
|
41
41
|
mlsSyncTask && mlsSyncTask.stop();
|
|
42
42
|
});
|
|
43
43
|
}
|
|
44
|
-
// smart pausing
|
|
45
44
|
readiness.splits.on(SDK_SPLITS_ARRIVED, function () {
|
|
46
|
-
// smart pausing of mySegments polling
|
|
47
45
|
if (splitsSyncTask.isRunning())
|
|
48
46
|
startMySegmentsSyncTasks();
|
|
49
47
|
});
|
|
50
48
|
function add(matchingKey, readiness, storage) {
|
|
51
|
-
var msSyncTask = mySegmentsSyncTaskFactory(splitApi.fetchMySegments, storage.segments, function () {
|
|
52
|
-
|
|
53
|
-
readiness.segments.emit(SDK_SEGMENTS_ARRIVED);
|
|
54
|
-
}, settings, matchingKey, settings.scheduler.segmentsRefreshRate);
|
|
49
|
+
var msSyncTask = mySegmentsSyncTaskFactory(splitApi.fetchMySegments, storage.segments, function () { if (storage.splits.usesMatcher(IN_SEGMENT))
|
|
50
|
+
readiness.segments.emit(SDK_SEGMENTS_ARRIVED); }, settings, matchingKey, settings.scheduler.segmentsRefreshRate, 'mySegmentsUpdater');
|
|
55
51
|
var mlsSyncTask;
|
|
56
52
|
if (settings.sync.largeSegmentsEnabled) {
|
|
57
|
-
mlsSyncTask = mySegmentsSyncTaskFactory(splitApi.fetchMyLargeSegments, storage.largeSegments, function () {
|
|
58
|
-
|
|
59
|
-
readiness.largeSegments.emit(SDK_SEGMENTS_ARRIVED);
|
|
60
|
-
}, settings, matchingKey, settings.scheduler.largeSegmentsRefreshRate);
|
|
53
|
+
mlsSyncTask = mySegmentsSyncTaskFactory(splitApi.fetchMyLargeSegments, storage.largeSegments, function () { if (readiness.largeSegments && storage.splits.usesMatcher(IN_LARGE_SEGMENT))
|
|
54
|
+
readiness.largeSegments.emit(SDK_SEGMENTS_ARRIVED); }, settings, matchingKey, settings.scheduler.largeSegmentsRefreshRate, 'myLargeSegmentsUpdater');
|
|
61
55
|
}
|
|
62
56
|
// smart ready
|
|
63
57
|
function smartReady() {
|
|
64
58
|
if (!readiness.isReady()) {
|
|
65
|
-
if (!storage.splits.usesMatcher(IN_SEGMENT))
|
|
66
|
-
readiness.segments.emit(SDK_SEGMENTS_ARRIVED);
|
|
67
59
|
if (readiness.largeSegments && !storage.splits.usesMatcher(IN_LARGE_SEGMENT))
|
|
68
60
|
readiness.largeSegments.emit(SDK_SEGMENTS_ARRIVED);
|
|
61
|
+
if (!storage.splits.usesMatcher(IN_SEGMENT))
|
|
62
|
+
readiness.segments.emit(SDK_SEGMENTS_ARRIVED);
|
|
69
63
|
}
|
|
70
64
|
}
|
|
71
|
-
if (
|
|
72
|
-
setTimeout(smartReady, 0);
|
|
73
|
-
else
|
|
65
|
+
if (storage.splits.usesMatcher(IN_SEGMENT) && storage.splits.usesMatcher(IN_LARGE_SEGMENT))
|
|
74
66
|
readiness.splits.once(SDK_SPLITS_ARRIVED, smartReady);
|
|
67
|
+
else
|
|
68
|
+
setTimeout(smartReady, 0);
|
|
75
69
|
mySegmentsSyncTasks[matchingKey] = { msSyncTask: msSyncTask, mlsSyncTask: mlsSyncTask };
|
|
76
70
|
return {
|
|
77
71
|
msSyncTask: msSyncTask,
|
|
@@ -4,6 +4,6 @@ import { mySegmentsUpdaterFactory } from '../updaters/mySegmentsUpdater';
|
|
|
4
4
|
/**
|
|
5
5
|
* Creates a sync task that periodically executes a `mySegmentsUpdater` task
|
|
6
6
|
*/
|
|
7
|
-
export function mySegmentsSyncTaskFactory(fetchMySegments, mySegmentsCache, notifyUpdate, settings, matchingKey, segmentsRefreshRate) {
|
|
8
|
-
return syncTaskFactory(settings.log, mySegmentsUpdaterFactory(settings.log, mySegmentsFetcherFactory(fetchMySegments), mySegmentsCache, notifyUpdate, settings.startup.requestTimeoutBeforeReady, settings.startup.retriesOnFailureBeforeReady, matchingKey), segmentsRefreshRate,
|
|
7
|
+
export function mySegmentsSyncTaskFactory(fetchMySegments, mySegmentsCache, notifyUpdate, settings, matchingKey, segmentsRefreshRate, NAME) {
|
|
8
|
+
return syncTaskFactory(settings.log, mySegmentsUpdaterFactory(settings.log, mySegmentsFetcherFactory(fetchMySegments), mySegmentsCache, notifyUpdate, settings.startup.requestTimeoutBeforeReady, settings.startup.retriesOnFailureBeforeReady, matchingKey), segmentsRefreshRate, NAME);
|
|
9
9
|
}
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { timeout } from '../../../utils/promise/timeout';
|
|
2
2
|
import { SYNC_MYSEGMENTS_FETCH_RETRY } from '../../../logger/constants';
|
|
3
|
+
import { isObject } from '../../../utils/lang';
|
|
3
4
|
/**
|
|
4
5
|
* factory of MySegments updater, a task that:
|
|
5
6
|
* - fetches mySegments using `mySegmentsFetcher`
|
|
@@ -18,23 +19,22 @@ export function mySegmentsUpdaterFactory(log, mySegmentsFetcher, mySegmentsCache
|
|
|
18
19
|
// @TODO if allowing pluggable storages, handle async execution
|
|
19
20
|
function updateSegments(segmentsData) {
|
|
20
21
|
var shouldNotifyUpdate;
|
|
21
|
-
if (
|
|
22
|
-
//
|
|
23
|
-
|
|
22
|
+
if (isObject(segmentsData[0])) {
|
|
23
|
+
// Add/Delete the segment names
|
|
24
|
+
segmentsData.forEach(function (_a) {
|
|
25
|
+
var name = _a.name, add = _a.add;
|
|
26
|
+
if (mySegmentsCache.isInSegment(name) !== add) {
|
|
27
|
+
shouldNotifyUpdate = true;
|
|
28
|
+
if (add)
|
|
29
|
+
mySegmentsCache.addToSegment(name);
|
|
30
|
+
else
|
|
31
|
+
mySegmentsCache.removeFromSegment(name);
|
|
32
|
+
}
|
|
33
|
+
});
|
|
24
34
|
}
|
|
25
35
|
else {
|
|
26
|
-
//
|
|
27
|
-
|
|
28
|
-
if (mySegmentsCache.isInSegment(name_1) !== add) {
|
|
29
|
-
shouldNotifyUpdate = true;
|
|
30
|
-
if (add)
|
|
31
|
-
mySegmentsCache.addToSegment(name_1);
|
|
32
|
-
else
|
|
33
|
-
mySegmentsCache.removeFromSegment(name_1);
|
|
34
|
-
}
|
|
35
|
-
else {
|
|
36
|
-
shouldNotifyUpdate = false;
|
|
37
|
-
}
|
|
36
|
+
// Reset the list of segment names
|
|
37
|
+
shouldNotifyUpdate = mySegmentsCache.resetSegments(segmentsData);
|
|
38
38
|
}
|
|
39
39
|
// Notify update if required
|
|
40
40
|
if (shouldNotifyUpdate || readyOnAlreadyExistentState) {
|
|
@@ -75,13 +75,7 @@ export function mySegmentsUpdaterFactory(log, mySegmentsFetcher, mySegmentsCache
|
|
|
75
75
|
* (3) or `undefined`, for which the updater will fetch mySegments in order to sync the storage.
|
|
76
76
|
* @param {boolean | undefined} noCache true to revalidate data to fetch
|
|
77
77
|
*/
|
|
78
|
-
return function mySegmentsUpdater(segmentsData, noCache
|
|
79
|
-
return
|
|
80
|
-
new Promise(function (res) {
|
|
81
|
-
setTimeout(function () {
|
|
82
|
-
_mySegmentsUpdater(0, segmentsData, noCache).then(res);
|
|
83
|
-
}, delay);
|
|
84
|
-
}) :
|
|
85
|
-
_mySegmentsUpdater(0, segmentsData, noCache);
|
|
78
|
+
return function mySegmentsUpdater(segmentsData, noCache) {
|
|
79
|
+
return _mySegmentsUpdater(0, segmentsData, noCache);
|
|
86
80
|
};
|
|
87
81
|
}
|
|
@@ -9,6 +9,7 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker, upd
|
|
|
9
9
|
var isHandlingEvent;
|
|
10
10
|
var _segmentsData; // keeps the segmentsData (if included in notification payload) from the queued event with maximum changeNumber
|
|
11
11
|
var _delay;
|
|
12
|
+
var _delayTimeoutID;
|
|
12
13
|
var backoff = new Backoff(__handleMySegmentsUpdateCall);
|
|
13
14
|
function __handleMySegmentsUpdateCall() {
|
|
14
15
|
isHandlingEvent = true;
|
|
@@ -16,7 +17,14 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker, upd
|
|
|
16
17
|
handleNewEvent = false;
|
|
17
18
|
var currentMaxChangeNumber_1 = maxChangeNumber;
|
|
18
19
|
// fetch mySegments revalidating data if cached
|
|
19
|
-
|
|
20
|
+
var syncTask = _delay ?
|
|
21
|
+
new Promise(function (res) {
|
|
22
|
+
_delayTimeoutID = setTimeout(function () {
|
|
23
|
+
mySegmentsSyncTask.execute(_segmentsData, true).then(res);
|
|
24
|
+
}, _delay);
|
|
25
|
+
}) :
|
|
26
|
+
mySegmentsSyncTask.execute(_segmentsData, true);
|
|
27
|
+
syncTask.then(function (result) {
|
|
20
28
|
if (!isHandlingEvent)
|
|
21
29
|
return; // halt if `stop` has been called
|
|
22
30
|
if (result !== false) { // Unlike `Splits|SegmentsUpdateWorker`, we cannot use `mySegmentsCache.getChangeNumber` since `/mySegments` endpoint doesn't provide this value.
|
|
@@ -55,6 +63,7 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker, upd
|
|
|
55
63
|
backoff.reset();
|
|
56
64
|
},
|
|
57
65
|
stop: function () {
|
|
66
|
+
clearTimeout(_delayTimeoutID);
|
|
58
67
|
isHandlingEvent = false;
|
|
59
68
|
backoff.reset();
|
|
60
69
|
}
|
|
@@ -74,14 +74,9 @@ export function isInBitmap(bitmap, hash64hex) {
|
|
|
74
74
|
}
|
|
75
75
|
/**
|
|
76
76
|
* Parse feature flags notifications for instant feature flag updates
|
|
77
|
-
*
|
|
78
|
-
* @param {ISplitUpdateData} data
|
|
79
|
-
* @returns {KeyList}
|
|
80
77
|
*/
|
|
81
78
|
export function parseFFUpdatePayload(compression, data) {
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
else
|
|
86
|
-
return JSON.parse(decodeFromBase64(data));
|
|
79
|
+
return compression > 0 ?
|
|
80
|
+
parseKeyList(data, compression, false) :
|
|
81
|
+
JSON.parse(decodeFromBase64(data));
|
|
87
82
|
}
|
|
@@ -18,7 +18,6 @@ import { hash64 } from '../../utils/murmur3/murmur3_64';
|
|
|
18
18
|
import { TOKEN_REFRESH, AUTH_REJECTION, MY_LARGE_SEGMENT, MY_SEGMENT } from '../../utils/constants';
|
|
19
19
|
export function getDelay(parsedData, matchingKey) {
|
|
20
20
|
var interval = parsedData.i || 60000;
|
|
21
|
-
// const hashType = parsedData.h || 0;
|
|
22
21
|
var seed = parsedData.s || 0;
|
|
23
22
|
return hash(matchingKey, seed) % interval;
|
|
24
23
|
}
|
|
@@ -241,14 +240,14 @@ export function pushManagerFactory(params, pollingManager) {
|
|
|
241
240
|
var add = added_1.has(hash64.dec) ? true : removed_1.has(hash64.dec) ? false : undefined;
|
|
242
241
|
if (add !== undefined) {
|
|
243
242
|
isLS ?
|
|
244
|
-
workerLarge && workerLarge.put(parsedData.changeNumber, {
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
worker.put(parsedData.changeNumber, {
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
243
|
+
workerLarge && workerLarge.put(parsedData.changeNumber, [{
|
|
244
|
+
name: parsedData.largeSegments[0],
|
|
245
|
+
add: add
|
|
246
|
+
}]) :
|
|
247
|
+
worker.put(parsedData.changeNumber, [{
|
|
248
|
+
name: parsedData.segmentName,
|
|
249
|
+
add: add
|
|
250
|
+
}]);
|
|
252
251
|
}
|
|
253
252
|
});
|
|
254
253
|
return;
|
|
@@ -261,16 +260,14 @@ export function pushManagerFactory(params, pollingManager) {
|
|
|
261
260
|
forOwn(clients, function (_a) {
|
|
262
261
|
var worker = _a.worker, workerLarge = _a.workerLarge;
|
|
263
262
|
isLS ?
|
|
264
|
-
workerLarge && parsedData.largeSegments.
|
|
265
|
-
|
|
266
|
-
name: largeSegment,
|
|
267
|
-
add: false
|
|
268
|
-
});
|
|
269
|
-
}) :
|
|
270
|
-
worker.put(parsedData.changeNumber, {
|
|
271
|
-
name: parsedData.segmentName,
|
|
263
|
+
workerLarge && workerLarge.put(parsedData.changeNumber, parsedData.largeSegments.map(function (largeSegment) { return ({
|
|
264
|
+
name: largeSegment,
|
|
272
265
|
add: false
|
|
273
|
-
});
|
|
266
|
+
}); })) :
|
|
267
|
+
worker.put(parsedData.changeNumber, [{
|
|
268
|
+
name: parsedData.segmentName,
|
|
269
|
+
add: false
|
|
270
|
+
}]);
|
|
274
271
|
});
|
|
275
272
|
return;
|
|
276
273
|
}
|
package/package.json
CHANGED
package/src/dtos/types.ts
CHANGED
|
@@ -224,14 +224,18 @@ export interface ISegmentChangesResponse {
|
|
|
224
224
|
till: number
|
|
225
225
|
}
|
|
226
226
|
|
|
227
|
-
export interface IMySegmentsResponseItem {
|
|
228
|
-
id: string,
|
|
229
|
-
name: string
|
|
230
|
-
}
|
|
231
|
-
|
|
232
227
|
/** Interface of the parsed JSON response of `/mySegments/{userKey}` */
|
|
233
228
|
export interface IMySegmentsResponse {
|
|
234
|
-
mySegments:
|
|
229
|
+
mySegments: {
|
|
230
|
+
id: string,
|
|
231
|
+
name: string
|
|
232
|
+
}[]
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
/** Interface of the parsed JSON response of `/myLargeSegments/{userKey}` */
|
|
236
|
+
export interface IMyLargeSegmentsResponse {
|
|
237
|
+
myLargeSegments: string[],
|
|
238
|
+
changeNumber: number
|
|
235
239
|
}
|
|
236
240
|
|
|
237
241
|
/** Metadata internal type for storages */
|
|
@@ -22,7 +22,7 @@ export const codesInfo: [number, string][] = codesWarn.concat([
|
|
|
22
22
|
[c.POLLING_START, c.LOG_PREFIX_SYNC_POLLING + 'Starting polling'],
|
|
23
23
|
[c.POLLING_STOP, c.LOG_PREFIX_SYNC_POLLING + 'Stopping polling'],
|
|
24
24
|
[c.SYNC_SPLITS_FETCH_RETRY, c.LOG_PREFIX_SYNC_SPLITS + 'Retrying download of feature flags #%s. Reason: %s'],
|
|
25
|
-
[c.SUBMITTERS_PUSH_FULL_QUEUE, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Flushing full %s queue and
|
|
25
|
+
[c.SUBMITTERS_PUSH_FULL_QUEUE, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Flushing full %s queue and resetting timer.'],
|
|
26
26
|
[c.SUBMITTERS_PUSH, c.LOG_PREFIX_SYNC_SUBMITTERS + 'Pushing %s.'],
|
|
27
27
|
[c.STREAMING_REFRESH_TOKEN, c.LOG_PREFIX_SYNC_STREAMING + 'Refreshing streaming token in %s seconds, and connecting streaming in %s seconds.'],
|
|
28
28
|
[c.STREAMING_RECONNECT, c.LOG_PREFIX_SYNC_STREAMING + 'Attempting to reconnect streaming in %s seconds.'],
|
|
@@ -18,10 +18,8 @@ function splitsEventEmitterFactory(EventEmitter: new () => IEventEmitter): ISpli
|
|
|
18
18
|
return splitsEventEmitter;
|
|
19
19
|
}
|
|
20
20
|
|
|
21
|
-
function segmentsEventEmitterFactory(EventEmitter: new () => IEventEmitter): ISegmentsEventEmitter {
|
|
22
|
-
const segmentsEventEmitter = objectAssign(new EventEmitter(), {
|
|
23
|
-
segmentsArrived: false
|
|
24
|
-
});
|
|
21
|
+
function segmentsEventEmitterFactory(EventEmitter: new () => IEventEmitter, segmentsArrived = false): ISegmentsEventEmitter {
|
|
22
|
+
const segmentsEventEmitter = objectAssign(new EventEmitter(), { segmentsArrived });
|
|
25
23
|
|
|
26
24
|
segmentsEventEmitter.once(SDK_SEGMENTS_ARRIVED, () => { segmentsEventEmitter.segmentsArrived = true; });
|
|
27
25
|
|
|
@@ -39,7 +37,7 @@ export function readinessManagerFactory(
|
|
|
39
37
|
const { startup: { readyTimeout, waitForLargeSegments }, sync: { largeSegmentsEnabled } } = settings;
|
|
40
38
|
|
|
41
39
|
const segments: ISegmentsEventEmitter = segmentsEventEmitterFactory(EventEmitter);
|
|
42
|
-
const largeSegments = largeSegmentsEnabled
|
|
40
|
+
const largeSegments = largeSegmentsEnabled ? segmentsEventEmitterFactory(EventEmitter, !waitForLargeSegments) : undefined;
|
|
43
41
|
const gate: IReadinessEventEmitter = new EventEmitter();
|
|
44
42
|
|
|
45
43
|
// emit SDK_READY_FROM_CACHE
|
|
@@ -7,7 +7,7 @@ import { ISet, setToArray, _Set } from '../../utils/lang/sets';
|
|
|
7
7
|
* The `_cache` property is the object were items are stored.
|
|
8
8
|
* Intended for testing purposes.
|
|
9
9
|
*
|
|
10
|
-
* @param connDelay delay in millis for `connect` resolve. If not provided, `connect` resolves
|
|
10
|
+
* @param connDelay delay in millis for `connect` resolve. If not provided, `connect` resolves immediately.
|
|
11
11
|
*/
|
|
12
12
|
export function inMemoryWrapperFactory(connDelay?: number): IPluggableStorageWrapper & { _cache: Record<string, string | string[] | ISet<string>>, _setConnDelay(connDelay: number): void } {
|
|
13
13
|
|
package/src/storages/types.ts
CHANGED
|
@@ -204,7 +204,7 @@ export interface ISplitsCacheBase {
|
|
|
204
204
|
getSplitNames(): MaybeThenable<string[]>,
|
|
205
205
|
// should never reject or throw an exception. Instead return true by default, asssuming the TT might exist.
|
|
206
206
|
trafficTypeExists(trafficType: string): MaybeThenable<boolean>,
|
|
207
|
-
// only for Client-Side
|
|
207
|
+
// only for Client-Side. Returns true if the storage is not synchronized yet (getChangeNumber() === 1) or contains a FF using the given matcher
|
|
208
208
|
usesMatcher(matcherType: string): MaybeThenable<boolean>,
|
|
209
209
|
clear(): MaybeThenable<boolean | void>,
|
|
210
210
|
// should never reject or throw an exception. Instead return false by default, to avoid emitting SDK_READY_FROM_CACHE.
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { IFetchMySegments, IResponse } from '../../../services/types';
|
|
2
|
-
import {
|
|
2
|
+
import { IMySegmentsResponse, IMyLargeSegmentsResponse } from '../../../dtos/types';
|
|
3
3
|
import { IMySegmentsFetcher } from './types';
|
|
4
4
|
|
|
5
5
|
/**
|
|
@@ -21,7 +21,11 @@ export function mySegmentsFetcherFactory(fetchMySegments: IFetchMySegments): IMy
|
|
|
21
21
|
// Extract segment names
|
|
22
22
|
return mySegmentsPromise
|
|
23
23
|
.then(resp => resp.json())
|
|
24
|
-
.then(json
|
|
24
|
+
.then((json: IMySegmentsResponse | IMyLargeSegmentsResponse) => {
|
|
25
|
+
return (json as IMySegmentsResponse).mySegments ?
|
|
26
|
+
(json as IMySegmentsResponse).mySegments.map((segment) => segment.name) :
|
|
27
|
+
(json as IMyLargeSegmentsResponse).myLargeSegments;
|
|
28
|
+
});
|
|
25
29
|
};
|
|
26
30
|
|
|
27
31
|
}
|
|
@@ -35,11 +35,11 @@ export function pollingManagerCSFactory(
|
|
|
35
35
|
|
|
36
36
|
forOwn(mySegmentsSyncTasks, ({ msSyncTask, mlsSyncTask }) => {
|
|
37
37
|
if (splitsHaveSegments) msSyncTask.start();
|
|
38
|
-
else msSyncTask.stop();
|
|
38
|
+
else msSyncTask.stop(); // smart pausing
|
|
39
39
|
|
|
40
40
|
if (mlsSyncTask) {
|
|
41
41
|
if (splitsHaveLargeSegments) mlsSyncTask.start();
|
|
42
|
-
else mlsSyncTask.stop();
|
|
42
|
+
else mlsSyncTask.stop(); // smart pausing
|
|
43
43
|
}
|
|
44
44
|
});
|
|
45
45
|
}
|
|
@@ -51,9 +51,7 @@ export function pollingManagerCSFactory(
|
|
|
51
51
|
});
|
|
52
52
|
}
|
|
53
53
|
|
|
54
|
-
// smart pausing
|
|
55
54
|
readiness.splits.on(SDK_SPLITS_ARRIVED, () => {
|
|
56
|
-
// smart pausing of mySegments polling
|
|
57
55
|
if (splitsSyncTask.isRunning()) startMySegmentsSyncTasks();
|
|
58
56
|
});
|
|
59
57
|
|
|
@@ -61,12 +59,11 @@ export function pollingManagerCSFactory(
|
|
|
61
59
|
const msSyncTask = mySegmentsSyncTaskFactory(
|
|
62
60
|
splitApi.fetchMySegments,
|
|
63
61
|
storage.segments,
|
|
64
|
-
() => {
|
|
65
|
-
if (storage.splits.usesMatcher(IN_SEGMENT)) readiness.segments.emit(SDK_SEGMENTS_ARRIVED);
|
|
66
|
-
},
|
|
62
|
+
() => { if (storage.splits.usesMatcher(IN_SEGMENT)) readiness.segments.emit(SDK_SEGMENTS_ARRIVED); },
|
|
67
63
|
settings,
|
|
68
64
|
matchingKey,
|
|
69
|
-
settings.scheduler.segmentsRefreshRate
|
|
65
|
+
settings.scheduler.segmentsRefreshRate,
|
|
66
|
+
'mySegmentsUpdater'
|
|
70
67
|
);
|
|
71
68
|
|
|
72
69
|
let mlsSyncTask;
|
|
@@ -74,24 +71,24 @@ export function pollingManagerCSFactory(
|
|
|
74
71
|
mlsSyncTask = mySegmentsSyncTaskFactory(
|
|
75
72
|
splitApi.fetchMyLargeSegments,
|
|
76
73
|
storage.largeSegments!,
|
|
77
|
-
() => {
|
|
78
|
-
if (readiness.largeSegments && storage.splits.usesMatcher(IN_LARGE_SEGMENT)) readiness.largeSegments.emit(SDK_SEGMENTS_ARRIVED);
|
|
79
|
-
},
|
|
74
|
+
() => { if (readiness.largeSegments && storage.splits.usesMatcher(IN_LARGE_SEGMENT)) readiness.largeSegments.emit(SDK_SEGMENTS_ARRIVED); },
|
|
80
75
|
settings,
|
|
81
76
|
matchingKey,
|
|
82
|
-
settings.scheduler.largeSegmentsRefreshRate
|
|
77
|
+
settings.scheduler.largeSegmentsRefreshRate,
|
|
78
|
+
'myLargeSegmentsUpdater'
|
|
83
79
|
);
|
|
84
80
|
}
|
|
85
81
|
|
|
86
82
|
// smart ready
|
|
87
83
|
function smartReady() {
|
|
88
84
|
if (!readiness.isReady()) {
|
|
89
|
-
if (!storage.splits.usesMatcher(IN_SEGMENT)) readiness.segments.emit(SDK_SEGMENTS_ARRIVED);
|
|
90
85
|
if (readiness.largeSegments && !storage.splits.usesMatcher(IN_LARGE_SEGMENT)) readiness.largeSegments.emit(SDK_SEGMENTS_ARRIVED);
|
|
86
|
+
if (!storage.splits.usesMatcher(IN_SEGMENT)) readiness.segments.emit(SDK_SEGMENTS_ARRIVED);
|
|
91
87
|
}
|
|
92
88
|
}
|
|
93
|
-
|
|
94
|
-
|
|
89
|
+
|
|
90
|
+
if (storage.splits.usesMatcher(IN_SEGMENT) && storage.splits.usesMatcher(IN_LARGE_SEGMENT)) readiness.splits.once(SDK_SPLITS_ARRIVED, smartReady);
|
|
91
|
+
else setTimeout(smartReady, 0);
|
|
95
92
|
|
|
96
93
|
mySegmentsSyncTasks[matchingKey] = { msSyncTask: msSyncTask, mlsSyncTask: mlsSyncTask };
|
|
97
94
|
|
|
@@ -15,7 +15,8 @@ export function mySegmentsSyncTaskFactory(
|
|
|
15
15
|
notifyUpdate: () => void,
|
|
16
16
|
settings: ISettings,
|
|
17
17
|
matchingKey: string,
|
|
18
|
-
segmentsRefreshRate: number
|
|
18
|
+
segmentsRefreshRate: number,
|
|
19
|
+
NAME: string
|
|
19
20
|
): IMySegmentsSyncTask {
|
|
20
21
|
return syncTaskFactory(
|
|
21
22
|
settings.log,
|
|
@@ -29,6 +30,6 @@ export function mySegmentsSyncTaskFactory(
|
|
|
29
30
|
matchingKey
|
|
30
31
|
),
|
|
31
32
|
segmentsRefreshRate,
|
|
32
|
-
|
|
33
|
+
NAME,
|
|
33
34
|
);
|
|
34
35
|
}
|
|
@@ -12,9 +12,9 @@ export type MySegmentsData = string[] | {
|
|
|
12
12
|
name: string,
|
|
13
13
|
/* action: `true` for add, and `false` for delete */
|
|
14
14
|
add: boolean
|
|
15
|
-
}
|
|
15
|
+
}[]
|
|
16
16
|
|
|
17
|
-
export interface IMySegmentsSyncTask extends ISyncTask<[segmentsData?: MySegmentsData, noCache?: boolean
|
|
17
|
+
export interface IMySegmentsSyncTask extends ISyncTask<[segmentsData?: MySegmentsData, noCache?: boolean], boolean> { }
|
|
18
18
|
|
|
19
19
|
export interface IPollingManager extends ITask {
|
|
20
20
|
syncAll(): Promise<any>
|
|
@@ -4,6 +4,7 @@ import { timeout } from '../../../utils/promise/timeout';
|
|
|
4
4
|
import { ILogger } from '../../../logger/types';
|
|
5
5
|
import { SYNC_MYSEGMENTS_FETCH_RETRY } from '../../../logger/constants';
|
|
6
6
|
import { MySegmentsData } from '../types';
|
|
7
|
+
import { isObject } from '../../../utils/lang';
|
|
7
8
|
|
|
8
9
|
type IMySegmentsUpdater = (segmentList?: MySegmentsData, noCache?: boolean) => Promise<boolean>
|
|
9
10
|
|
|
@@ -36,19 +37,18 @@ export function mySegmentsUpdaterFactory(
|
|
|
36
37
|
function updateSegments(segmentsData: MySegmentsData) {
|
|
37
38
|
|
|
38
39
|
let shouldNotifyUpdate;
|
|
39
|
-
if (
|
|
40
|
-
//
|
|
41
|
-
|
|
40
|
+
if (isObject(segmentsData[0])) {
|
|
41
|
+
// Add/Delete the segment names
|
|
42
|
+
(segmentsData as { name: string, add: boolean }[]).forEach(({ name, add }) => {
|
|
43
|
+
if (mySegmentsCache.isInSegment(name) !== add) {
|
|
44
|
+
shouldNotifyUpdate = true;
|
|
45
|
+
if (add) mySegmentsCache.addToSegment(name);
|
|
46
|
+
else mySegmentsCache.removeFromSegment(name);
|
|
47
|
+
}
|
|
48
|
+
});
|
|
42
49
|
} else {
|
|
43
|
-
//
|
|
44
|
-
|
|
45
|
-
if (mySegmentsCache.isInSegment(name) !== add) {
|
|
46
|
-
shouldNotifyUpdate = true;
|
|
47
|
-
if (add) mySegmentsCache.addToSegment(name);
|
|
48
|
-
else mySegmentsCache.removeFromSegment(name);
|
|
49
|
-
} else {
|
|
50
|
-
shouldNotifyUpdate = false;
|
|
51
|
-
}
|
|
50
|
+
// Reset the list of segment names
|
|
51
|
+
shouldNotifyUpdate = mySegmentsCache.resetSegments(segmentsData as string[]);
|
|
52
52
|
}
|
|
53
53
|
|
|
54
54
|
// Notify update if required
|
|
@@ -94,14 +94,8 @@ export function mySegmentsUpdaterFactory(
|
|
|
94
94
|
* (3) or `undefined`, for which the updater will fetch mySegments in order to sync the storage.
|
|
95
95
|
* @param {boolean | undefined} noCache true to revalidate data to fetch
|
|
96
96
|
*/
|
|
97
|
-
return function mySegmentsUpdater(segmentsData?: MySegmentsData, noCache?: boolean
|
|
98
|
-
return
|
|
99
|
-
new Promise(res => {
|
|
100
|
-
setTimeout(() => {
|
|
101
|
-
_mySegmentsUpdater(0, segmentsData, noCache).then(res);
|
|
102
|
-
}, delay);
|
|
103
|
-
}) :
|
|
104
|
-
_mySegmentsUpdater(0, segmentsData, noCache);
|
|
97
|
+
return function mySegmentsUpdater(segmentsData?: MySegmentsData, noCache?: boolean) {
|
|
98
|
+
return _mySegmentsUpdater(0, segmentsData, noCache);
|
|
105
99
|
};
|
|
106
100
|
|
|
107
101
|
}
|
|
@@ -7,7 +7,7 @@ import { UpdatesFromSSEEnum } from '../../submitters/types';
|
|
|
7
7
|
/**
|
|
8
8
|
* MySegmentsUpdateWorker factory
|
|
9
9
|
*/
|
|
10
|
-
export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker, updateType: UpdatesFromSSEEnum): IUpdateWorker {
|
|
10
|
+
export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker, updateType: UpdatesFromSSEEnum): IUpdateWorker<[changeNumber: number, segmentsData?: MySegmentsData, delay?: number]> {
|
|
11
11
|
|
|
12
12
|
let maxChangeNumber = 0; // keeps the maximum changeNumber among queued events
|
|
13
13
|
let currentChangeNumber = -1;
|
|
@@ -15,6 +15,7 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask,
|
|
|
15
15
|
let isHandlingEvent: boolean;
|
|
16
16
|
let _segmentsData: MySegmentsData | undefined; // keeps the segmentsData (if included in notification payload) from the queued event with maximum changeNumber
|
|
17
17
|
let _delay: undefined | number;
|
|
18
|
+
let _delayTimeoutID: undefined | number;
|
|
18
19
|
const backoff = new Backoff(__handleMySegmentsUpdateCall);
|
|
19
20
|
|
|
20
21
|
function __handleMySegmentsUpdateCall() {
|
|
@@ -24,7 +25,15 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask,
|
|
|
24
25
|
const currentMaxChangeNumber = maxChangeNumber;
|
|
25
26
|
|
|
26
27
|
// fetch mySegments revalidating data if cached
|
|
27
|
-
|
|
28
|
+
const syncTask = _delay ?
|
|
29
|
+
new Promise(res => {
|
|
30
|
+
_delayTimeoutID = setTimeout(() => {
|
|
31
|
+
mySegmentsSyncTask.execute(_segmentsData, true).then(res);
|
|
32
|
+
}, _delay);
|
|
33
|
+
}) :
|
|
34
|
+
mySegmentsSyncTask.execute(_segmentsData, true);
|
|
35
|
+
|
|
36
|
+
syncTask.then((result) => {
|
|
28
37
|
if (!isHandlingEvent) return; // halt if `stop` has been called
|
|
29
38
|
if (result !== false) {// Unlike `Splits|SegmentsUpdateWorker`, we cannot use `mySegmentsCache.getChangeNumber` since `/mySegments` endpoint doesn't provide this value.
|
|
30
39
|
if (_segmentsData) telemetryTracker.trackUpdatesFromSSE(updateType);
|
|
@@ -61,6 +70,7 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask,
|
|
|
61
70
|
},
|
|
62
71
|
|
|
63
72
|
stop() {
|
|
73
|
+
clearTimeout(_delayTimeoutID);
|
|
64
74
|
isHandlingEvent = false;
|
|
65
75
|
backoff.reset();
|
|
66
76
|
}
|
|
@@ -9,7 +9,7 @@ import { IUpdateWorker } from './types';
|
|
|
9
9
|
/**
|
|
10
10
|
* SegmentsUpdateWorker factory
|
|
11
11
|
*/
|
|
12
|
-
export function SegmentsUpdateWorker(log: ILogger, segmentsSyncTask: ISegmentsSyncTask, segmentsCache: ISegmentsCacheSync): IUpdateWorker {
|
|
12
|
+
export function SegmentsUpdateWorker(log: ILogger, segmentsSyncTask: ISegmentsSyncTask, segmentsCache: ISegmentsCacheSync): IUpdateWorker<[ISegmentUpdateData]> {
|
|
13
13
|
|
|
14
14
|
// Handles retries with CDN bypass per segment name
|
|
15
15
|
function SegmentUpdateWorker(segment: string) {
|
|
@@ -14,7 +14,7 @@ import { IUpdateWorker } from './types';
|
|
|
14
14
|
/**
|
|
15
15
|
* SplitsUpdateWorker factory
|
|
16
16
|
*/
|
|
17
|
-
export function SplitsUpdateWorker(log: ILogger, splitsCache: ISplitsCacheSync, splitsSyncTask: ISplitsSyncTask, splitsEventEmitter: ISplitsEventEmitter, telemetryTracker: ITelemetryTracker, segmentsSyncTask?: ISegmentsSyncTask): IUpdateWorker & { killSplit(event: ISplitKillData): void } {
|
|
17
|
+
export function SplitsUpdateWorker(log: ILogger, splitsCache: ISplitsCacheSync, splitsSyncTask: ISplitsSyncTask, splitsEventEmitter: ISplitsEventEmitter, telemetryTracker: ITelemetryTracker, segmentsSyncTask?: ISegmentsSyncTask): IUpdateWorker<[updateData: ISplitUpdateData, payload?: ISplit]> & { killSplit(event: ISplitKillData): void } {
|
|
18
18
|
|
|
19
19
|
let maxChangeNumber = 0;
|
|
20
20
|
let handleNewEvent = false;
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { algorithms } from '../../utils/decompress';
|
|
2
2
|
import { decodeFromBase64 } from '../../utils/base64';
|
|
3
3
|
import { Compression, KeyList } from './SSEHandler/types';
|
|
4
|
+
import { ISplit } from '../../dtos/types';
|
|
4
5
|
|
|
5
6
|
const GZIP = 1;
|
|
6
7
|
const ZLIB = 2;
|
|
@@ -42,7 +43,7 @@ function decompress(data: string, compression: Compression) {
|
|
|
42
43
|
* @returns {{a?: string[], r?: string[] }}
|
|
43
44
|
* @throws if data string cannot be decoded, decompressed or parsed
|
|
44
45
|
*/
|
|
45
|
-
export function parseKeyList(data: string, compression: Compression, avoidPrecisionLoss
|
|
46
|
+
export function parseKeyList(data: string, compression: Compression, avoidPrecisionLoss = true): KeyList {
|
|
46
47
|
const binKeyList = decompress(data, compression);
|
|
47
48
|
let strKeyList = Uint8ArrayToString(binKeyList);
|
|
48
49
|
// replace numbers to strings, to avoid losing precision
|
|
@@ -80,14 +81,9 @@ export function isInBitmap(bitmap: Uint8Array, hash64hex: string) {
|
|
|
80
81
|
|
|
81
82
|
/**
|
|
82
83
|
* Parse feature flags notifications for instant feature flag updates
|
|
83
|
-
*
|
|
84
|
-
* @param {ISplitUpdateData} data
|
|
85
|
-
* @returns {KeyList}
|
|
86
84
|
*/
|
|
87
|
-
export function parseFFUpdatePayload(compression: Compression, data: string):
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
else
|
|
92
|
-
return JSON.parse(decodeFromBase64(data));
|
|
85
|
+
export function parseFFUpdatePayload(compression: Compression, data: string): ISplit | undefined {
|
|
86
|
+
return compression > 0 ?
|
|
87
|
+
parseKeyList(data, compression, false) :
|
|
88
|
+
JSON.parse(decodeFromBase64(data));
|
|
93
89
|
}
|
|
@@ -21,11 +21,9 @@ import { Hash64, hash64 } from '../../utils/murmur3/murmur3_64';
|
|
|
21
21
|
import { IAuthTokenPushEnabled } from './AuthClient/types';
|
|
22
22
|
import { TOKEN_REFRESH, AUTH_REJECTION, MY_LARGE_SEGMENT, MY_SEGMENT } from '../../utils/constants';
|
|
23
23
|
import { ISdkFactoryContextSync } from '../../sdkFactory/types';
|
|
24
|
-
import { IUpdateWorker } from './UpdateWorkers/types';
|
|
25
24
|
|
|
26
|
-
export function getDelay(parsedData: IMyLargeSegmentsUpdateData, matchingKey: string) {
|
|
25
|
+
export function getDelay(parsedData: Pick<IMyLargeSegmentsUpdateData, 'i' | 'h' | 's'>, matchingKey: string) {
|
|
27
26
|
const interval = parsedData.i || 60000;
|
|
28
|
-
// const hashType = parsedData.h || 0;
|
|
29
27
|
const seed = parsedData.s || 0;
|
|
30
28
|
|
|
31
29
|
return hash(matchingKey, seed) % interval;
|
|
@@ -73,7 +71,7 @@ export function pushManagerFactory(
|
|
|
73
71
|
const userKeyHashes: Record<string, string> = {};
|
|
74
72
|
// [Only for client-side] map of user keys to their corresponding hash64 and MySegmentsUpdateWorkers.
|
|
75
73
|
// Hash64 is used to process MY_SEGMENTS_UPDATE_V2 events and dispatch actions to the corresponding MySegmentsUpdateWorker.
|
|
76
|
-
const clients: Record<string, { hash64: Hash64, worker:
|
|
74
|
+
const clients: Record<string, { hash64: Hash64, worker: ReturnType<typeof MySegmentsUpdateWorker>, workerLarge?: ReturnType<typeof MySegmentsUpdateWorker> }> = {};
|
|
77
75
|
|
|
78
76
|
// [Only for client-side] variable to flag that a new client was added. It is needed to reconnect streaming.
|
|
79
77
|
let connectForNewClient = false;
|
|
@@ -285,14 +283,14 @@ export function pushManagerFactory(
|
|
|
285
283
|
const add = added.has(hash64.dec) ? true : removed.has(hash64.dec) ? false : undefined;
|
|
286
284
|
if (add !== undefined) {
|
|
287
285
|
isLS ?
|
|
288
|
-
workerLarge && workerLarge.put(parsedData.changeNumber, {
|
|
286
|
+
workerLarge && workerLarge.put(parsedData.changeNumber, [{
|
|
289
287
|
name: parsedData.largeSegments[0],
|
|
290
288
|
add
|
|
291
|
-
}) :
|
|
292
|
-
worker.put(parsedData.changeNumber, {
|
|
289
|
+
}]) :
|
|
290
|
+
worker.put(parsedData.changeNumber, [{
|
|
293
291
|
name: parsedData.segmentName,
|
|
294
292
|
add
|
|
295
|
-
});
|
|
293
|
+
}]);
|
|
296
294
|
}
|
|
297
295
|
});
|
|
298
296
|
return;
|
|
@@ -305,16 +303,14 @@ export function pushManagerFactory(
|
|
|
305
303
|
|
|
306
304
|
forOwn(clients, ({ worker, workerLarge }) => {
|
|
307
305
|
isLS ?
|
|
308
|
-
workerLarge && parsedData.largeSegments.
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
}) :
|
|
314
|
-
worker.put(parsedData.changeNumber, {
|
|
306
|
+
workerLarge && workerLarge.put(parsedData.changeNumber, parsedData.largeSegments.map(largeSegment => ({
|
|
307
|
+
name: largeSegment,
|
|
308
|
+
add: false
|
|
309
|
+
}))) :
|
|
310
|
+
worker.put(parsedData.changeNumber, [{
|
|
315
311
|
name: parsedData.segmentName,
|
|
316
312
|
add: false
|
|
317
|
-
});
|
|
313
|
+
}]);
|
|
318
314
|
});
|
|
319
315
|
return;
|
|
320
316
|
}
|
package/types/dtos/types.d.ts
CHANGED
|
@@ -177,13 +177,17 @@ export interface ISegmentChangesResponse {
|
|
|
177
177
|
since: number;
|
|
178
178
|
till: number;
|
|
179
179
|
}
|
|
180
|
-
export interface IMySegmentsResponseItem {
|
|
181
|
-
id: string;
|
|
182
|
-
name: string;
|
|
183
|
-
}
|
|
184
180
|
/** Interface of the parsed JSON response of `/mySegments/{userKey}` */
|
|
185
181
|
export interface IMySegmentsResponse {
|
|
186
|
-
mySegments:
|
|
182
|
+
mySegments: {
|
|
183
|
+
id: string;
|
|
184
|
+
name: string;
|
|
185
|
+
}[];
|
|
186
|
+
}
|
|
187
|
+
/** Interface of the parsed JSON response of `/myLargeSegments/{userKey}` */
|
|
188
|
+
export interface IMyLargeSegmentsResponse {
|
|
189
|
+
myLargeSegments: string[];
|
|
190
|
+
changeNumber: number;
|
|
187
191
|
}
|
|
188
192
|
/** Metadata internal type for storages */
|
|
189
193
|
export interface IMetadata {
|
|
@@ -5,7 +5,7 @@ import { ISet } from '../../utils/lang/sets';
|
|
|
5
5
|
* The `_cache` property is the object were items are stored.
|
|
6
6
|
* Intended for testing purposes.
|
|
7
7
|
*
|
|
8
|
-
* @param connDelay delay in millis for `connect` resolve. If not provided, `connect` resolves
|
|
8
|
+
* @param connDelay delay in millis for `connect` resolve. If not provided, `connect` resolves immediately.
|
|
9
9
|
*/
|
|
10
10
|
export declare function inMemoryWrapperFactory(connDelay?: number): IPluggableStorageWrapper & {
|
|
11
11
|
_cache: Record<string, string | string[] | ISet<string>>;
|
|
@@ -5,4 +5,4 @@ import { ISettings } from '../../../types';
|
|
|
5
5
|
/**
|
|
6
6
|
* Creates a sync task that periodically executes a `mySegmentsUpdater` task
|
|
7
7
|
*/
|
|
8
|
-
export declare function mySegmentsSyncTaskFactory(fetchMySegments: IFetchMySegments, mySegmentsCache: ISegmentsCacheSync, notifyUpdate: () => void, settings: ISettings, matchingKey: string, segmentsRefreshRate: number): IMySegmentsSyncTask;
|
|
8
|
+
export declare function mySegmentsSyncTaskFactory(fetchMySegments: IFetchMySegments, mySegmentsCache: ISegmentsCacheSync, notifyUpdate: () => void, settings: ISettings, matchingKey: string, segmentsRefreshRate: number, NAME: string): IMySegmentsSyncTask;
|
|
@@ -12,8 +12,8 @@ export interface ISegmentsSyncTask extends ISyncTask<[fetchOnlyNew?: boolean, se
|
|
|
12
12
|
export declare type MySegmentsData = string[] | {
|
|
13
13
|
name: string;
|
|
14
14
|
add: boolean;
|
|
15
|
-
};
|
|
16
|
-
export interface IMySegmentsSyncTask extends ISyncTask<[segmentsData?: MySegmentsData, noCache?: boolean
|
|
15
|
+
}[];
|
|
16
|
+
export interface IMySegmentsSyncTask extends ISyncTask<[segmentsData?: MySegmentsData, noCache?: boolean], boolean> {
|
|
17
17
|
}
|
|
18
18
|
export interface IPollingManager extends ITask {
|
|
19
19
|
syncAll(): Promise<any>;
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { IMySegmentsSyncTask } from '../../polling/types';
|
|
1
|
+
import { IMySegmentsSyncTask, MySegmentsData } from '../../polling/types';
|
|
2
2
|
import { IUpdateWorker } from './types';
|
|
3
3
|
import { ITelemetryTracker } from '../../../trackers/types';
|
|
4
4
|
import { UpdatesFromSSEEnum } from '../../submitters/types';
|
|
5
5
|
/**
|
|
6
6
|
* MySegmentsUpdateWorker factory
|
|
7
7
|
*/
|
|
8
|
-
export declare function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker, updateType: UpdatesFromSSEEnum): IUpdateWorker
|
|
8
|
+
export declare function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker, updateType: UpdatesFromSSEEnum): IUpdateWorker<[changeNumber: number, segmentsData?: MySegmentsData, delay?: number]>;
|
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import { ILogger } from '../../../logger/types';
|
|
2
2
|
import { ISegmentsCacheSync } from '../../../storages/types';
|
|
3
3
|
import { ISegmentsSyncTask } from '../../polling/types';
|
|
4
|
+
import { ISegmentUpdateData } from '../SSEHandler/types';
|
|
4
5
|
import { IUpdateWorker } from './types';
|
|
5
6
|
/**
|
|
6
7
|
* SegmentsUpdateWorker factory
|
|
7
8
|
*/
|
|
8
|
-
export declare function SegmentsUpdateWorker(log: ILogger, segmentsSyncTask: ISegmentsSyncTask, segmentsCache: ISegmentsCacheSync): IUpdateWorker
|
|
9
|
+
export declare function SegmentsUpdateWorker(log: ILogger, segmentsSyncTask: ISegmentsSyncTask, segmentsCache: ISegmentsCacheSync): IUpdateWorker<[ISegmentUpdateData]>;
|
|
@@ -1,13 +1,14 @@
|
|
|
1
|
+
import { ISplit } from '../../../dtos/types';
|
|
1
2
|
import { ILogger } from '../../../logger/types';
|
|
2
3
|
import { ISplitsEventEmitter } from '../../../readiness/types';
|
|
3
4
|
import { ISplitsCacheSync } from '../../../storages/types';
|
|
4
5
|
import { ITelemetryTracker } from '../../../trackers/types';
|
|
5
6
|
import { ISegmentsSyncTask, ISplitsSyncTask } from '../../polling/types';
|
|
6
|
-
import { ISplitKillData } from '../SSEHandler/types';
|
|
7
|
+
import { ISplitKillData, ISplitUpdateData } from '../SSEHandler/types';
|
|
7
8
|
import { IUpdateWorker } from './types';
|
|
8
9
|
/**
|
|
9
10
|
* SplitsUpdateWorker factory
|
|
10
11
|
*/
|
|
11
|
-
export declare function SplitsUpdateWorker(log: ILogger, splitsCache: ISplitsCacheSync, splitsSyncTask: ISplitsSyncTask, splitsEventEmitter: ISplitsEventEmitter, telemetryTracker: ITelemetryTracker, segmentsSyncTask?: ISegmentsSyncTask): IUpdateWorker & {
|
|
12
|
+
export declare function SplitsUpdateWorker(log: ILogger, splitsCache: ISplitsCacheSync, splitsSyncTask: ISplitsSyncTask, splitsEventEmitter: ISplitsEventEmitter, telemetryTracker: ITelemetryTracker, segmentsSyncTask?: ISegmentsSyncTask): IUpdateWorker<[updateData: ISplitUpdateData, payload?: ISplit]> & {
|
|
12
13
|
killSplit(event: ISplitKillData): void;
|
|
13
14
|
};
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { Compression, KeyList } from './SSEHandler/types';
|
|
2
|
+
import { ISplit } from '../../dtos/types';
|
|
2
3
|
/**
|
|
3
4
|
* Decode, decompress and parse the provided 'data' into a KeyList object
|
|
4
5
|
*
|
|
@@ -28,8 +29,5 @@ export declare function parseBitmap(data: string, compression: Compression): Uin
|
|
|
28
29
|
export declare function isInBitmap(bitmap: Uint8Array, hash64hex: string): boolean;
|
|
29
30
|
/**
|
|
30
31
|
* Parse feature flags notifications for instant feature flag updates
|
|
31
|
-
*
|
|
32
|
-
* @param {ISplitUpdateData} data
|
|
33
|
-
* @returns {KeyList}
|
|
34
32
|
*/
|
|
35
|
-
export declare function parseFFUpdatePayload(compression: Compression, data: string):
|
|
33
|
+
export declare function parseFFUpdatePayload(compression: Compression, data: string): ISplit | undefined;
|
|
@@ -2,7 +2,7 @@ import { IPushManager } from './types';
|
|
|
2
2
|
import { IPollingManager } from '../polling/types';
|
|
3
3
|
import { IMyLargeSegmentsUpdateData } from './SSEHandler/types';
|
|
4
4
|
import { ISdkFactoryContextSync } from '../../sdkFactory/types';
|
|
5
|
-
export declare function getDelay(parsedData: IMyLargeSegmentsUpdateData, matchingKey: string): number;
|
|
5
|
+
export declare function getDelay(parsedData: Pick<IMyLargeSegmentsUpdateData, 'i' | 'h' | 's'>, matchingKey: string): number;
|
|
6
6
|
/**
|
|
7
7
|
* PushManager factory:
|
|
8
8
|
* - for server-side if key is not provided in settings.
|