@splitsoftware/splitio-commons 1.16.1-rc.0 → 1.16.1-rc.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGES.txt +4 -0
- package/cjs/logger/constants.js +5 -4
- package/cjs/logger/messages/info.js +2 -1
- package/cjs/logger/messages/warn.js +1 -1
- package/cjs/readiness/readinessManager.js +3 -9
- package/cjs/services/splitApi.js +4 -8
- package/cjs/storages/AbstractSegmentsCacheSync.js +1 -6
- package/cjs/storages/AbstractSplitsCacheAsync.js +2 -2
- package/cjs/storages/AbstractSplitsCacheSync.js +7 -5
- package/cjs/storages/KeyBuilder.js +0 -3
- package/cjs/storages/KeyBuilderCS.js +6 -0
- package/cjs/storages/inLocalStorage/MySegmentsCacheInLocal.js +23 -2
- package/cjs/storages/inLocalStorage/SplitsCacheInLocal.js +4 -16
- package/cjs/storages/inMemory/MySegmentsCacheInMemory.js +5 -1
- package/cjs/storages/inMemory/SplitsCacheInMemory.js +6 -15
- package/cjs/storages/pluggable/inMemoryWrapper.js +1 -1
- package/cjs/sync/polling/fetchers/mySegmentsFetcher.js +4 -7
- package/cjs/sync/polling/fetchers/segmentChangesFetcher.js +1 -1
- package/cjs/sync/polling/pollingManagerCS.js +30 -54
- package/cjs/sync/polling/syncTasks/mySegmentsSyncTask.js +2 -2
- package/cjs/sync/polling/updaters/mySegmentsUpdater.js +25 -27
- package/cjs/sync/streaming/SSEHandler/index.js +7 -8
- package/cjs/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.js +18 -5
- package/cjs/sync/streaming/constants.js +2 -3
- package/cjs/sync/streaming/parseUtils.js +14 -9
- package/cjs/sync/streaming/pushManager.js +29 -53
- package/cjs/sync/submitters/telemetrySubmitter.js +0 -2
- package/cjs/sync/syncManagerOnline.js +14 -24
- package/cjs/utils/constants/index.js +1 -1
- package/cjs/utils/settingsValidation/index.js +1 -5
- package/esm/logger/constants.js +2 -1
- package/esm/logger/messages/info.js +2 -1
- package/esm/logger/messages/warn.js +1 -1
- package/esm/readiness/readinessManager.js +3 -9
- package/esm/services/splitApi.js +5 -9
- package/esm/storages/AbstractSegmentsCacheSync.js +1 -6
- package/esm/storages/AbstractSplitsCacheAsync.js +2 -2
- package/esm/storages/AbstractSplitsCacheSync.js +5 -3
- package/esm/storages/KeyBuilder.js +0 -3
- package/esm/storages/KeyBuilderCS.js +6 -0
- package/esm/storages/inLocalStorage/MySegmentsCacheInLocal.js +23 -2
- package/esm/storages/inLocalStorage/SplitsCacheInLocal.js +5 -17
- package/esm/storages/inMemory/MySegmentsCacheInMemory.js +5 -1
- package/esm/storages/inMemory/SplitsCacheInMemory.js +7 -16
- package/esm/storages/pluggable/inMemoryWrapper.js +1 -1
- package/esm/sync/polling/fetchers/mySegmentsFetcher.js +4 -7
- package/esm/sync/polling/fetchers/segmentChangesFetcher.js +1 -1
- package/esm/sync/polling/pollingManagerCS.js +31 -55
- package/esm/sync/polling/syncTasks/mySegmentsSyncTask.js +2 -2
- package/esm/sync/polling/updaters/mySegmentsUpdater.js +23 -25
- package/esm/sync/streaming/SSEHandler/index.js +8 -9
- package/esm/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.js +18 -5
- package/esm/sync/streaming/constants.js +1 -2
- package/esm/sync/streaming/parseUtils.js +12 -8
- package/esm/sync/streaming/pushManager.js +31 -54
- package/esm/sync/submitters/telemetrySubmitter.js +0 -2
- package/esm/sync/syncManagerOnline.js +15 -25
- package/esm/utils/constants/index.js +1 -1
- package/esm/utils/settingsValidation/index.js +1 -5
- package/package.json +1 -1
- package/src/dtos/types.ts +14 -8
- package/src/logger/constants.ts +2 -1
- package/src/logger/messages/info.ts +2 -1
- package/src/logger/messages/warn.ts +1 -1
- package/src/readiness/readinessManager.ts +3 -7
- package/src/readiness/types.ts +0 -1
- package/src/services/splitApi.ts +6 -11
- package/src/services/splitHttpClient.ts +1 -1
- package/src/services/types.ts +2 -3
- package/src/storages/AbstractSegmentsCacheSync.ts +2 -6
- package/src/storages/AbstractSplitsCacheAsync.ts +2 -2
- package/src/storages/AbstractSplitsCacheSync.ts +6 -4
- package/src/storages/KeyBuilder.ts +0 -3
- package/src/storages/KeyBuilderCS.ts +9 -0
- package/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts +26 -2
- package/src/storages/inLocalStorage/SplitsCacheInLocal.ts +5 -20
- package/src/storages/inMemory/MySegmentsCacheInMemory.ts +7 -1
- package/src/storages/inMemory/SplitsCacheInMemory.ts +7 -13
- package/src/storages/pluggable/inMemoryWrapper.ts +1 -1
- package/src/storages/types.ts +6 -5
- package/src/sync/polling/fetchers/mySegmentsFetcher.ts +7 -10
- package/src/sync/polling/fetchers/segmentChangesFetcher.ts +1 -1
- package/src/sync/polling/fetchers/types.ts +2 -2
- package/src/sync/polling/pollingManagerCS.ts +27 -62
- package/src/sync/polling/syncTasks/mySegmentsSyncTask.ts +11 -11
- package/src/sync/polling/types.ts +9 -8
- package/src/sync/polling/updaters/mySegmentsUpdater.ts +22 -24
- package/src/sync/streaming/SSEClient/index.ts +4 -6
- package/src/sync/streaming/SSEHandler/index.ts +11 -13
- package/src/sync/streaming/SSEHandler/types.ts +13 -25
- package/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +21 -7
- package/src/sync/streaming/UpdateWorkers/SegmentsUpdateWorker.ts +1 -1
- package/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts +1 -1
- package/src/sync/streaming/UpdateWorkers/types.ts +2 -2
- package/src/sync/streaming/constants.ts +1 -2
- package/src/sync/streaming/parseUtils.ts +19 -11
- package/src/sync/streaming/pushManager.ts +37 -65
- package/src/sync/streaming/types.ts +9 -11
- package/src/sync/submitters/telemetrySubmitter.ts +0 -2
- package/src/sync/submitters/types.ts +1 -3
- package/src/sync/syncManagerOnline.ts +11 -19
- package/src/types.ts +1 -26
- package/src/utils/constants/index.ts +1 -1
- package/src/utils/settingsValidation/index.ts +1 -5
- package/types/dtos/types.d.ts +14 -7
- package/types/logger/constants.d.ts +2 -1
- package/types/readiness/types.d.ts +0 -1
- package/types/services/decorateHeaders.d.ts +2 -0
- package/types/services/splitApi.d.ts +1 -1
- package/types/services/splitHttpClient.d.ts +1 -1
- package/types/services/types.d.ts +2 -3
- package/types/storages/AbstractSegmentsCacheSync.d.ts +2 -6
- package/types/storages/AbstractSplitsCacheAsync.d.ts +1 -1
- package/types/storages/AbstractSplitsCacheSync.d.ts +3 -3
- package/types/storages/KeyBuilder.d.ts +0 -1
- package/types/storages/KeyBuilderCS.d.ts +2 -0
- package/types/storages/inLocalStorage/MySegmentsCacheInLocal.d.ts +3 -2
- package/types/storages/inLocalStorage/SplitsCacheInLocal.d.ts +1 -1
- package/types/storages/inMemory/MySegmentsCacheInMemory.d.ts +3 -1
- package/types/storages/inMemory/SplitsCacheInMemory.d.ts +1 -2
- package/types/storages/pluggable/inMemoryWrapper.d.ts +1 -1
- package/types/storages/types.d.ts +4 -4
- package/types/sync/polling/fetchers/mySegmentsFetcher.d.ts +2 -2
- package/types/sync/polling/fetchers/types.d.ts +2 -2
- package/types/sync/polling/syncTasks/mySegmentsSyncTask.d.ts +4 -3
- package/types/sync/polling/types.d.ts +7 -13
- package/types/sync/polling/updaters/mySegmentsUpdater.d.ts +3 -2
- package/types/sync/streaming/SSEHandler/types.d.ts +13 -22
- package/types/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.d.ts +2 -2
- package/types/sync/streaming/UpdateWorkers/SegmentsUpdateWorker.d.ts +2 -1
- package/types/sync/streaming/UpdateWorkers/SplitsUpdateWorker.d.ts +3 -2
- package/types/sync/streaming/UpdateWorkers/types.d.ts +2 -2
- package/types/sync/streaming/constants.d.ts +1 -2
- package/types/sync/streaming/parseUtils.d.ts +4 -5
- package/types/sync/streaming/pushManager.d.ts +0 -2
- package/types/sync/streaming/pushManagerCS_Spec1_3.d.ts +9 -0
- package/types/sync/streaming/pushManager_Spec1_3.d.ts +9 -0
- package/types/sync/streaming/types.d.ts +8 -9
- package/types/sync/submitters/types.d.ts +1 -3
- package/types/types.d.ts +0 -25
- package/types/utils/constants/index.d.ts +1 -1
- package/types/utils/settingsValidation/index.d.ts +0 -2
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { ISplit } from '../../dtos/types';
|
|
1
|
+
import { IMembershipsResponse, ISplit } from '../../dtos/types';
|
|
2
2
|
import { IReadinessManager } from '../../readiness/types';
|
|
3
3
|
import { IStorageSync } from '../../storages/types';
|
|
4
4
|
import { ITask, ISyncTask } from '../types';
|
|
@@ -7,27 +7,28 @@ export interface ISplitsSyncTask extends ISyncTask<[noCache?: boolean, till?: nu
|
|
|
7
7
|
|
|
8
8
|
export interface ISegmentsSyncTask extends ISyncTask<[fetchOnlyNew?: boolean, segmentName?: string, noCache?: boolean, till?: number], boolean> { }
|
|
9
9
|
|
|
10
|
-
export type MySegmentsData =
|
|
10
|
+
export type MySegmentsData = IMembershipsResponse | {
|
|
11
|
+
/* segment type */
|
|
12
|
+
isLS?: boolean
|
|
11
13
|
/* segment name */
|
|
12
|
-
name: string
|
|
14
|
+
name: string
|
|
13
15
|
/* action: `true` for add, and `false` for delete */
|
|
14
16
|
add: boolean
|
|
15
|
-
}
|
|
17
|
+
}[]
|
|
16
18
|
|
|
17
|
-
export interface IMySegmentsSyncTask extends ISyncTask<[segmentsData?: MySegmentsData, noCache?: boolean
|
|
19
|
+
export interface IMySegmentsSyncTask extends ISyncTask<[segmentsData?: MySegmentsData, noCache?: boolean], boolean> { }
|
|
18
20
|
|
|
19
21
|
export interface IPollingManager extends ITask {
|
|
20
22
|
syncAll(): Promise<any>
|
|
21
23
|
splitsSyncTask: ISplitsSyncTask
|
|
22
24
|
segmentsSyncTask: ISyncTask
|
|
23
|
-
largeSegmentsSyncTask?: ISyncTask
|
|
24
25
|
}
|
|
25
26
|
|
|
26
27
|
/**
|
|
27
28
|
* PollingManager for client-side with support for multiple clients
|
|
28
29
|
*/
|
|
29
30
|
export interface IPollingManagerCS extends IPollingManager {
|
|
30
|
-
add(matchingKey: string, readiness: IReadinessManager, storage: IStorageSync):
|
|
31
|
+
add(matchingKey: string, readiness: IReadinessManager, storage: IStorageSync): IMySegmentsSyncTask
|
|
31
32
|
remove(matchingKey: string): void;
|
|
32
|
-
get(matchingKey: string):
|
|
33
|
+
get(matchingKey: string): IMySegmentsSyncTask | undefined
|
|
33
34
|
}
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import { IMySegmentsFetcher } from '../fetchers/types';
|
|
2
|
-
import {
|
|
2
|
+
import { IStorageSync } from '../../../storages/types';
|
|
3
|
+
import { ISegmentsEventEmitter } from '../../../readiness/types';
|
|
3
4
|
import { timeout } from '../../../utils/promise/timeout';
|
|
5
|
+
import { SDK_SEGMENTS_ARRIVED } from '../../../readiness/constants';
|
|
4
6
|
import { ILogger } from '../../../logger/types';
|
|
5
7
|
import { SYNC_MYSEGMENTS_FETCH_RETRY } from '../../../logger/constants';
|
|
6
8
|
import { MySegmentsData } from '../types';
|
|
@@ -16,13 +18,14 @@ type IMySegmentsUpdater = (segmentList?: MySegmentsData, noCache?: boolean) => P
|
|
|
16
18
|
export function mySegmentsUpdaterFactory(
|
|
17
19
|
log: ILogger,
|
|
18
20
|
mySegmentsFetcher: IMySegmentsFetcher,
|
|
19
|
-
|
|
20
|
-
|
|
21
|
+
storage: IStorageSync,
|
|
22
|
+
segmentsEventEmitter: ISegmentsEventEmitter,
|
|
21
23
|
requestTimeoutBeforeReady: number,
|
|
22
24
|
retriesOnFailureBeforeReady: number,
|
|
23
25
|
matchingKey: string
|
|
24
26
|
): IMySegmentsUpdater {
|
|
25
27
|
|
|
28
|
+
const { splits, segments, largeSegments } = storage;
|
|
26
29
|
let readyOnAlreadyExistentState = true;
|
|
27
30
|
let startingUp = true;
|
|
28
31
|
|
|
@@ -37,24 +40,25 @@ export function mySegmentsUpdaterFactory(
|
|
|
37
40
|
|
|
38
41
|
let shouldNotifyUpdate;
|
|
39
42
|
if (Array.isArray(segmentsData)) {
|
|
40
|
-
//
|
|
41
|
-
|
|
43
|
+
// Add/Delete the segment names
|
|
44
|
+
segmentsData.forEach(({ isLS, name, add }) => {
|
|
45
|
+
const cache = isLS ? largeSegments : segments;
|
|
46
|
+
if (cache!.isInSegment(name) !== add) {
|
|
47
|
+
shouldNotifyUpdate = true;
|
|
48
|
+
if (add) cache!.addToSegment(name);
|
|
49
|
+
else cache!.removeFromSegment(name);
|
|
50
|
+
}
|
|
51
|
+
});
|
|
42
52
|
} else {
|
|
43
|
-
//
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
shouldNotifyUpdate = true;
|
|
47
|
-
if (add) mySegmentsCache.addToSegment(name);
|
|
48
|
-
else mySegmentsCache.removeFromSegment(name);
|
|
49
|
-
} else {
|
|
50
|
-
shouldNotifyUpdate = false;
|
|
51
|
-
}
|
|
53
|
+
// Reset the list of segment names
|
|
54
|
+
shouldNotifyUpdate = segments.resetSegments((segmentsData.ms?.k || []).map((segment) => segment.n), segmentsData.ms?.cn);
|
|
55
|
+
shouldNotifyUpdate = largeSegments!.resetSegments((segmentsData.ls?.k || []).map((segment) => segment.n), segmentsData.ls?.cn) || shouldNotifyUpdate;
|
|
52
56
|
}
|
|
53
57
|
|
|
54
58
|
// Notify update if required
|
|
55
|
-
if (shouldNotifyUpdate || readyOnAlreadyExistentState) {
|
|
59
|
+
if (splits.usesSegments() && (shouldNotifyUpdate || readyOnAlreadyExistentState)) {
|
|
56
60
|
readyOnAlreadyExistentState = false;
|
|
57
|
-
|
|
61
|
+
segmentsEventEmitter.emit(SDK_SEGMENTS_ARRIVED);
|
|
58
62
|
}
|
|
59
63
|
}
|
|
60
64
|
|
|
@@ -94,14 +98,8 @@ export function mySegmentsUpdaterFactory(
|
|
|
94
98
|
* (3) or `undefined`, for which the updater will fetch mySegments in order to sync the storage.
|
|
95
99
|
* @param {boolean | undefined} noCache true to revalidate data to fetch
|
|
96
100
|
*/
|
|
97
|
-
return function mySegmentsUpdater(segmentsData?: MySegmentsData, noCache?: boolean
|
|
98
|
-
return
|
|
99
|
-
new Promise(res => {
|
|
100
|
-
setTimeout(() => {
|
|
101
|
-
_mySegmentsUpdater(0, segmentsData, noCache).then(res);
|
|
102
|
-
}, delay);
|
|
103
|
-
}) :
|
|
104
|
-
_mySegmentsUpdater(0, segmentsData, noCache);
|
|
101
|
+
return function mySegmentsUpdater(segmentsData?: MySegmentsData, noCache?: boolean) {
|
|
102
|
+
return _mySegmentsUpdater(0, segmentsData, noCache);
|
|
105
103
|
};
|
|
106
104
|
|
|
107
105
|
}
|
|
@@ -76,12 +76,10 @@ export class SSEClient implements ISSEClient {
|
|
|
76
76
|
open(authToken: IAuthTokenPushEnabled) {
|
|
77
77
|
this.close(); // it closes connection if previously opened
|
|
78
78
|
|
|
79
|
-
const channelsQueryParam = Object.keys(authToken.channels).map(
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
}
|
|
84
|
-
).join(',');
|
|
79
|
+
const channelsQueryParam = Object.keys(authToken.channels).map((channel) => {
|
|
80
|
+
const params = CONTROL_CHANNEL_REGEX.test(channel) ? '[?occupancy=metrics.publishers]' : '';
|
|
81
|
+
return encodeURIComponent(params + channel);
|
|
82
|
+
}).join(',');
|
|
85
83
|
const url = `${this.streamingUrl}?channels=${channelsQueryParam}&accessToken=${authToken.token}&v=${ABLY_API_VERSION}&heartbeats=true`; // same results using `&heartbeats=false`
|
|
86
84
|
|
|
87
85
|
this.connection = new this.eventSource!(
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import { errorParser, messageParser } from './NotificationParser';
|
|
2
2
|
import { notificationKeeperFactory } from './NotificationKeeper';
|
|
3
|
-
import { PUSH_RETRYABLE_ERROR, PUSH_NONRETRYABLE_ERROR, OCCUPANCY, CONTROL,
|
|
3
|
+
import { PUSH_RETRYABLE_ERROR, PUSH_NONRETRYABLE_ERROR, OCCUPANCY, CONTROL, MY_SEGMENTS_UPDATE_V3, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, MY_LARGE_SEGMENTS_UPDATE } from '../constants';
|
|
4
4
|
import { IPushEventEmitter } from '../types';
|
|
5
5
|
import { ISseEventHandler } from '../SSEClient/types';
|
|
6
|
-
import { INotificationError, INotificationMessage } from './types';
|
|
6
|
+
import { IControlData, INotificationError, INotificationMessage, IOccupancyData } from './types';
|
|
7
7
|
import { ILogger } from '../../../logger/types';
|
|
8
8
|
import { STREAMING_PARSING_ERROR_FAILS, ERROR_STREAMING_SSE, STREAMING_PARSING_MESSAGE_FAILS, STREAMING_NEW_MESSAGE } from '../../../logger/constants';
|
|
9
9
|
import { ABLY_ERROR, NON_REQUESTED, SSE_CONNECTION_ERROR } from '../../../utils/constants';
|
|
@@ -74,29 +74,27 @@ export function SSEHandlerFactory(log: ILogger, pushEmitter: IPushEventEmitter,
|
|
|
74
74
|
const { parsedData, data, channel, timestamp } = messageWithParsedData;
|
|
75
75
|
log.debug(STREAMING_NEW_MESSAGE, [data]);
|
|
76
76
|
|
|
77
|
-
// we only handle update events if streaming is up
|
|
78
|
-
|
|
79
|
-
|
|
77
|
+
// we only handle update events if streaming is up
|
|
78
|
+
// @ts-expect-error
|
|
79
|
+
const type = parsedData.type || parsedData.t;
|
|
80
|
+
if (!notificationKeeper.isStreamingUp() && [OCCUPANCY, CONTROL].indexOf(type) === -1) return;
|
|
80
81
|
|
|
81
|
-
switch (
|
|
82
|
+
switch (type) {
|
|
82
83
|
/* update events */
|
|
83
84
|
case SPLIT_UPDATE:
|
|
84
85
|
case SEGMENT_UPDATE:
|
|
85
|
-
case
|
|
86
|
+
case MY_SEGMENTS_UPDATE_V3:
|
|
86
87
|
case MY_LARGE_SEGMENTS_UPDATE:
|
|
87
88
|
case SPLIT_KILL:
|
|
88
|
-
pushEmitter.emit(
|
|
89
|
-
break;
|
|
90
|
-
case MY_SEGMENTS_UPDATE:
|
|
91
|
-
pushEmitter.emit(parsedData.type, parsedData, channel);
|
|
89
|
+
pushEmitter.emit(type, parsedData);
|
|
92
90
|
break;
|
|
93
91
|
|
|
94
92
|
/* occupancy & control events, handled by NotificationManagerKeeper */
|
|
95
93
|
case OCCUPANCY:
|
|
96
|
-
notificationKeeper.handleOccupancyEvent(parsedData.metrics.publishers, channel, timestamp);
|
|
94
|
+
notificationKeeper.handleOccupancyEvent((parsedData as IOccupancyData).metrics.publishers, channel, timestamp);
|
|
97
95
|
break;
|
|
98
96
|
case CONTROL:
|
|
99
|
-
notificationKeeper.handleControlEvent(parsedData.controlType, channel, timestamp);
|
|
97
|
+
notificationKeeper.handleControlEvent((parsedData as IControlData).controlType, channel, timestamp);
|
|
100
98
|
break;
|
|
101
99
|
|
|
102
100
|
default:
|
|
@@ -1,12 +1,5 @@
|
|
|
1
1
|
import { ControlType } from '../constants';
|
|
2
|
-
import {
|
|
3
|
-
|
|
4
|
-
export interface IMySegmentsUpdateData {
|
|
5
|
-
type: MY_SEGMENTS_UPDATE,
|
|
6
|
-
changeNumber: number,
|
|
7
|
-
includesPayload: boolean,
|
|
8
|
-
segmentList?: string[]
|
|
9
|
-
}
|
|
2
|
+
import { SEGMENT_UPDATE, SPLIT_UPDATE, SPLIT_KILL, CONTROL, OCCUPANCY, MY_LARGE_SEGMENTS_UPDATE, MY_SEGMENTS_UPDATE_V3 } from '../types';
|
|
10
3
|
|
|
11
4
|
export enum Compression {
|
|
12
5
|
None = 0,
|
|
@@ -26,27 +19,22 @@ export interface KeyList {
|
|
|
26
19
|
r?: string[], // decimal hash64 of user keys
|
|
27
20
|
}
|
|
28
21
|
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
c
|
|
34
|
-
d
|
|
35
|
-
u: UpdateStrategy,
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
export interface IMyLargeSegmentsUpdateData {
|
|
39
|
-
type: MY_LARGE_SEGMENTS_UPDATE,
|
|
40
|
-
changeNumber: number,
|
|
41
|
-
largeSegments: string[],
|
|
42
|
-
c: Compression,
|
|
43
|
-
d: string,
|
|
22
|
+
interface IMySegmentsUpdateData<T extends string> {
|
|
23
|
+
t: T,
|
|
24
|
+
cn: number,
|
|
25
|
+
n?: string[],
|
|
26
|
+
c?: Compression,
|
|
27
|
+
d?: string,
|
|
44
28
|
u: UpdateStrategy,
|
|
45
29
|
i?: number, // time interval in millis
|
|
46
|
-
h?: number, // hash function
|
|
30
|
+
h?: number, // hash function
|
|
47
31
|
s?: number, // seed for hash function
|
|
48
32
|
}
|
|
49
33
|
|
|
34
|
+
export interface IMySegmentsUpdateV3Data extends IMySegmentsUpdateData<MY_SEGMENTS_UPDATE_V3> { }
|
|
35
|
+
|
|
36
|
+
export interface IMyLargeSegmentsUpdateData extends IMySegmentsUpdateData<MY_LARGE_SEGMENTS_UPDATE> { }
|
|
37
|
+
|
|
50
38
|
export interface ISegmentUpdateData {
|
|
51
39
|
type: SEGMENT_UPDATE,
|
|
52
40
|
changeNumber: number,
|
|
@@ -80,6 +68,6 @@ export interface IOccupancyData {
|
|
|
80
68
|
}
|
|
81
69
|
}
|
|
82
70
|
|
|
83
|
-
export type INotificationData =
|
|
71
|
+
export type INotificationData = IMySegmentsUpdateV3Data | IMyLargeSegmentsUpdateData | ISegmentUpdateData | ISplitUpdateData | ISplitKillData | IControlData | IOccupancyData
|
|
84
72
|
export type INotificationMessage = { parsedData: INotificationData, channel: string, timestamp: number, data: string }
|
|
85
73
|
export type INotificationError = Event & { parsedData?: any, message?: string }
|
|
@@ -7,7 +7,7 @@ import { UpdatesFromSSEEnum } from '../../submitters/types';
|
|
|
7
7
|
/**
|
|
8
8
|
* MySegmentsUpdateWorker factory
|
|
9
9
|
*/
|
|
10
|
-
export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker, updateType: UpdatesFromSSEEnum): IUpdateWorker {
|
|
10
|
+
export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker, updateType: UpdatesFromSSEEnum): IUpdateWorker<[changeNumber: number, segmentsData?: MySegmentsData, delay?: number]> {
|
|
11
11
|
|
|
12
12
|
let maxChangeNumber = 0; // keeps the maximum changeNumber among queued events
|
|
13
13
|
let currentChangeNumber = -1;
|
|
@@ -15,6 +15,7 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask,
|
|
|
15
15
|
let isHandlingEvent: boolean;
|
|
16
16
|
let _segmentsData: MySegmentsData | undefined; // keeps the segmentsData (if included in notification payload) from the queued event with maximum changeNumber
|
|
17
17
|
let _delay: undefined | number;
|
|
18
|
+
let _delayTimeoutID: any;
|
|
18
19
|
const backoff = new Backoff(__handleMySegmentsUpdateCall);
|
|
19
20
|
|
|
20
21
|
function __handleMySegmentsUpdateCall() {
|
|
@@ -24,9 +25,18 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask,
|
|
|
24
25
|
const currentMaxChangeNumber = maxChangeNumber;
|
|
25
26
|
|
|
26
27
|
// fetch mySegments revalidating data if cached
|
|
27
|
-
|
|
28
|
+
const syncTask = _delay ?
|
|
29
|
+
new Promise(res => {
|
|
30
|
+
_delayTimeoutID = setTimeout(() => {
|
|
31
|
+
_delay = undefined;
|
|
32
|
+
mySegmentsSyncTask.execute(_segmentsData, true).then(res);
|
|
33
|
+
}, _delay);
|
|
34
|
+
}) :
|
|
35
|
+
mySegmentsSyncTask.execute(_segmentsData, true);
|
|
36
|
+
|
|
37
|
+
syncTask.then((result) => {
|
|
28
38
|
if (!isHandlingEvent) return; // halt if `stop` has been called
|
|
29
|
-
if (result !== false) {// Unlike `Splits|SegmentsUpdateWorker`, we cannot use `mySegmentsCache.getChangeNumber` since `/mySegments` endpoint doesn't provide this value.
|
|
39
|
+
if (result !== false) { // Unlike `Splits|SegmentsUpdateWorker`, we cannot use `mySegmentsCache.getChangeNumber` since `/mySegments` endpoint doesn't provide this value.
|
|
30
40
|
if (_segmentsData) telemetryTracker.trackUpdatesFromSSE(updateType);
|
|
31
41
|
currentChangeNumber = Math.max(currentChangeNumber, currentMaxChangeNumber); // use `currentMaxChangeNumber`, in case that `maxChangeNumber` was updated during fetch.
|
|
32
42
|
}
|
|
@@ -43,13 +53,15 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask,
|
|
|
43
53
|
|
|
44
54
|
return {
|
|
45
55
|
/**
|
|
46
|
-
* Invoked by NotificationProcessor on
|
|
56
|
+
* Invoked by NotificationProcessor on MY_(LARGE)_SEGMENTS_UPDATE notifications
|
|
47
57
|
*
|
|
48
|
-
* @param
|
|
49
|
-
* @param
|
|
58
|
+
* @param changeNumber change number of the notification
|
|
59
|
+
* @param segmentsData data for KeyList or SegmentRemoval instant updates
|
|
60
|
+
* @param delay optional time to wait for BoundedFetchRequest or BoundedFetchRequest updates
|
|
50
61
|
*/
|
|
51
62
|
put(changeNumber: number, segmentsData?: MySegmentsData, delay?: number) {
|
|
52
|
-
if
|
|
63
|
+
// Ignore event if it is outdated or if there is a pending fetch request (_delay is set)
|
|
64
|
+
if (changeNumber <= currentChangeNumber || changeNumber <= maxChangeNumber || _delay) return;
|
|
53
65
|
|
|
54
66
|
maxChangeNumber = changeNumber;
|
|
55
67
|
handleNewEvent = true;
|
|
@@ -61,6 +73,8 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask,
|
|
|
61
73
|
},
|
|
62
74
|
|
|
63
75
|
stop() {
|
|
76
|
+
clearTimeout(_delayTimeoutID);
|
|
77
|
+
_delay = undefined;
|
|
64
78
|
isHandlingEvent = false;
|
|
65
79
|
backoff.reset();
|
|
66
80
|
}
|
|
@@ -9,7 +9,7 @@ import { IUpdateWorker } from './types';
|
|
|
9
9
|
/**
|
|
10
10
|
* SegmentsUpdateWorker factory
|
|
11
11
|
*/
|
|
12
|
-
export function SegmentsUpdateWorker(log: ILogger, segmentsSyncTask: ISegmentsSyncTask, segmentsCache: ISegmentsCacheSync): IUpdateWorker {
|
|
12
|
+
export function SegmentsUpdateWorker(log: ILogger, segmentsSyncTask: ISegmentsSyncTask, segmentsCache: ISegmentsCacheSync): IUpdateWorker<[ISegmentUpdateData]> {
|
|
13
13
|
|
|
14
14
|
// Handles retries with CDN bypass per segment name
|
|
15
15
|
function SegmentUpdateWorker(segment: string) {
|
|
@@ -14,7 +14,7 @@ import { IUpdateWorker } from './types';
|
|
|
14
14
|
/**
|
|
15
15
|
* SplitsUpdateWorker factory
|
|
16
16
|
*/
|
|
17
|
-
export function SplitsUpdateWorker(log: ILogger, splitsCache: ISplitsCacheSync, splitsSyncTask: ISplitsSyncTask, splitsEventEmitter: ISplitsEventEmitter, telemetryTracker: ITelemetryTracker, segmentsSyncTask?: ISegmentsSyncTask): IUpdateWorker & { killSplit(event: ISplitKillData): void } {
|
|
17
|
+
export function SplitsUpdateWorker(log: ILogger, splitsCache: ISplitsCacheSync, splitsSyncTask: ISplitsSyncTask, splitsEventEmitter: ISplitsEventEmitter, telemetryTracker: ITelemetryTracker, segmentsSyncTask?: ISegmentsSyncTask): IUpdateWorker<[updateData: ISplitUpdateData, payload?: ISplit]> & { killSplit(event: ISplitKillData): void } {
|
|
18
18
|
|
|
19
19
|
let maxChangeNumber = 0;
|
|
20
20
|
let handleNewEvent = false;
|
|
@@ -25,8 +25,7 @@ export const PUSH_SUBSYSTEM_UP = 'PUSH_SUBSYSTEM_UP';
|
|
|
25
25
|
export const PUSH_SUBSYSTEM_DOWN = 'PUSH_SUBSYSTEM_DOWN';
|
|
26
26
|
|
|
27
27
|
// Update-type push notifications, handled by NotificationProcessor
|
|
28
|
-
export const
|
|
29
|
-
export const MY_SEGMENTS_UPDATE_V2 = 'MY_SEGMENTS_UPDATE_V2';
|
|
28
|
+
export const MY_SEGMENTS_UPDATE_V3 = 'MY_SEGMENTS_UPDATE_V3';
|
|
30
29
|
export const SEGMENT_UPDATE = 'SEGMENT_UPDATE';
|
|
31
30
|
export const SPLIT_KILL = 'SPLIT_KILL';
|
|
32
31
|
export const SPLIT_UPDATE = 'SPLIT_UPDATE';
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import { algorithms } from '../../utils/decompress';
|
|
2
2
|
import { decodeFromBase64 } from '../../utils/base64';
|
|
3
|
-
import {
|
|
3
|
+
import { hash } from '../../utils/murmur3/murmur3';
|
|
4
|
+
import { Compression, IMyLargeSegmentsUpdateData, KeyList } from './SSEHandler/types';
|
|
5
|
+
import { ISplit } from '../../dtos/types';
|
|
4
6
|
|
|
5
7
|
const GZIP = 1;
|
|
6
8
|
const ZLIB = 2;
|
|
@@ -42,7 +44,7 @@ function decompress(data: string, compression: Compression) {
|
|
|
42
44
|
* @returns {{a?: string[], r?: string[] }}
|
|
43
45
|
* @throws if data string cannot be decoded, decompressed or parsed
|
|
44
46
|
*/
|
|
45
|
-
export function parseKeyList(data: string, compression: Compression, avoidPrecisionLoss
|
|
47
|
+
export function parseKeyList(data: string, compression: Compression, avoidPrecisionLoss = true): KeyList {
|
|
46
48
|
const binKeyList = decompress(data, compression);
|
|
47
49
|
let strKeyList = Uint8ArrayToString(binKeyList);
|
|
48
50
|
// replace numbers to strings, to avoid losing precision
|
|
@@ -80,14 +82,20 @@ export function isInBitmap(bitmap: Uint8Array, hash64hex: string) {
|
|
|
80
82
|
|
|
81
83
|
/**
|
|
82
84
|
* Parse feature flags notifications for instant feature flag updates
|
|
83
|
-
*
|
|
84
|
-
* @param {ISplitUpdateData} data
|
|
85
|
-
* @returns {KeyList}
|
|
86
85
|
*/
|
|
87
|
-
export function parseFFUpdatePayload(compression: Compression, data: string):
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
86
|
+
export function parseFFUpdatePayload(compression: Compression, data: string): ISplit | undefined {
|
|
87
|
+
return compression > 0 ?
|
|
88
|
+
parseKeyList(data, compression, false) :
|
|
89
|
+
JSON.parse(decodeFromBase64(data));
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
const DEFAULT_MAX_INTERVAL = 60000;
|
|
93
|
+
|
|
94
|
+
export function getDelay(parsedData: Pick<IMyLargeSegmentsUpdateData, 'i' | 'h' | 's'>, matchingKey: string) {
|
|
95
|
+
if (parsedData.h === 0) return 0;
|
|
96
|
+
|
|
97
|
+
const interval = parsedData.i || DEFAULT_MAX_INTERVAL;
|
|
98
|
+
const seed = parsedData.s || 0;
|
|
99
|
+
|
|
100
|
+
return hash(matchingKey, seed) % interval;
|
|
93
101
|
}
|
|
@@ -11,25 +11,15 @@ import { authenticateFactory, hashUserKey } from './AuthClient';
|
|
|
11
11
|
import { forOwn } from '../../utils/lang';
|
|
12
12
|
import { SSEClient } from './SSEClient';
|
|
13
13
|
import { getMatching } from '../../utils/key';
|
|
14
|
-
import {
|
|
15
|
-
import { STREAMING_FALLBACK, STREAMING_REFRESH_TOKEN, STREAMING_CONNECTING, STREAMING_DISABLED, ERROR_STREAMING_AUTH, STREAMING_DISCONNECTING, STREAMING_RECONNECT,
|
|
16
|
-
import { IMyLargeSegmentsUpdateData,
|
|
17
|
-
import { isInBitmap, parseBitmap, parseFFUpdatePayload, parseKeyList } from './parseUtils';
|
|
14
|
+
import { MY_SEGMENTS_UPDATE_V3, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, SECONDS_BEFORE_EXPIRATION, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, PUSH_RETRYABLE_ERROR, PUSH_SUBSYSTEM_UP, ControlType, MY_LARGE_SEGMENTS_UPDATE } from './constants';
|
|
15
|
+
import { STREAMING_FALLBACK, STREAMING_REFRESH_TOKEN, STREAMING_CONNECTING, STREAMING_DISABLED, ERROR_STREAMING_AUTH, STREAMING_DISCONNECTING, STREAMING_RECONNECT, STREAMING_PARSING_MY_SEGMENTS_UPDATE, STREAMING_PARSING_SPLIT_UPDATE } from '../../logger/constants';
|
|
16
|
+
import { IMyLargeSegmentsUpdateData, IMySegmentsUpdateV3Data, KeyList, UpdateStrategy } from './SSEHandler/types';
|
|
17
|
+
import { getDelay, isInBitmap, parseBitmap, parseFFUpdatePayload, parseKeyList } from './parseUtils';
|
|
18
18
|
import { ISet, _Set } from '../../utils/lang/sets';
|
|
19
|
-
import { hash } from '../../utils/murmur3/murmur3';
|
|
20
19
|
import { Hash64, hash64 } from '../../utils/murmur3/murmur3_64';
|
|
21
20
|
import { IAuthTokenPushEnabled } from './AuthClient/types';
|
|
22
21
|
import { TOKEN_REFRESH, AUTH_REJECTION, MY_LARGE_SEGMENT, MY_SEGMENT } from '../../utils/constants';
|
|
23
22
|
import { ISdkFactoryContextSync } from '../../sdkFactory/types';
|
|
24
|
-
import { IUpdateWorker } from './UpdateWorkers/types';
|
|
25
|
-
|
|
26
|
-
export function getDelay(parsedData: IMyLargeSegmentsUpdateData, matchingKey: string) {
|
|
27
|
-
const interval = parsedData.i || 60000;
|
|
28
|
-
// const hashType = parsedData.h || 0;
|
|
29
|
-
const seed = parsedData.s || 0;
|
|
30
|
-
|
|
31
|
-
return hash(matchingKey, seed) % interval;
|
|
32
|
-
}
|
|
33
23
|
|
|
34
24
|
/**
|
|
35
25
|
* PushManager factory:
|
|
@@ -72,8 +62,8 @@ export function pushManagerFactory(
|
|
|
72
62
|
// [Only for client-side] map of hashes to user keys, to dispatch MY_SEGMENTS_UPDATE events to the corresponding MySegmentsUpdateWorker
|
|
73
63
|
const userKeyHashes: Record<string, string> = {};
|
|
74
64
|
// [Only for client-side] map of user keys to their corresponding hash64 and MySegmentsUpdateWorkers.
|
|
75
|
-
// Hash64 is used to process
|
|
76
|
-
const clients: Record<string, { hash64: Hash64, worker:
|
|
65
|
+
// Hash64 is used to process MY_SEGMENTS_UPDATE events and dispatch actions to the corresponding MySegmentsUpdateWorker.
|
|
66
|
+
const clients: Record<string, { hash64: Hash64, worker: ReturnType<typeof MySegmentsUpdateWorker>, workerLarge: ReturnType<typeof MySegmentsUpdateWorker> }> = {};
|
|
77
67
|
|
|
78
68
|
// [Only for client-side] variable to flag that a new client was added. It is needed to reconnect streaming.
|
|
79
69
|
let connectForNewClient = false;
|
|
@@ -182,7 +172,7 @@ export function pushManagerFactory(
|
|
|
182
172
|
splitsUpdateWorker.stop();
|
|
183
173
|
if (userKey) forOwn(clients, ({ worker, workerLarge }) => {
|
|
184
174
|
worker.stop();
|
|
185
|
-
workerLarge
|
|
175
|
+
workerLarge.stop();
|
|
186
176
|
});
|
|
187
177
|
else segmentsUpdateWorker!.stop();
|
|
188
178
|
}
|
|
@@ -248,24 +238,22 @@ export function pushManagerFactory(
|
|
|
248
238
|
splitsUpdateWorker.put(parsedData);
|
|
249
239
|
});
|
|
250
240
|
|
|
251
|
-
function handleMySegmentsUpdate(parsedData:
|
|
252
|
-
const isLS = parsedData.
|
|
241
|
+
function handleMySegmentsUpdate(parsedData: IMySegmentsUpdateV3Data | IMyLargeSegmentsUpdateData) {
|
|
242
|
+
const isLS = parsedData.t === MY_LARGE_SEGMENTS_UPDATE;
|
|
253
243
|
|
|
254
244
|
switch (parsedData.u) {
|
|
255
245
|
case UpdateStrategy.BoundedFetchRequest: {
|
|
256
246
|
let bitmap: Uint8Array;
|
|
257
247
|
try {
|
|
258
|
-
bitmap = parseBitmap(parsedData.d
|
|
248
|
+
bitmap = parseBitmap(parsedData.d!, parsedData.c!);
|
|
259
249
|
} catch (e) {
|
|
260
|
-
log.warn(
|
|
250
|
+
log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE, ['BoundedFetchRequest', e]);
|
|
261
251
|
break;
|
|
262
252
|
}
|
|
263
253
|
|
|
264
254
|
forOwn(clients, ({ hash64, worker, workerLarge }, matchingKey) => {
|
|
265
255
|
if (isInBitmap(bitmap, hash64.hex)) {
|
|
266
|
-
isLS ?
|
|
267
|
-
workerLarge && workerLarge.put(parsedData.changeNumber, undefined, getDelay(parsedData, matchingKey)) :
|
|
268
|
-
worker.put(parsedData.changeNumber);
|
|
256
|
+
(isLS ? workerLarge : worker).put(parsedData.cn, undefined, getDelay(parsedData, matchingKey));
|
|
269
257
|
}
|
|
270
258
|
});
|
|
271
259
|
return;
|
|
@@ -273,72 +261,56 @@ export function pushManagerFactory(
|
|
|
273
261
|
case UpdateStrategy.KeyList: {
|
|
274
262
|
let keyList: KeyList, added: ISet<string>, removed: ISet<string>;
|
|
275
263
|
try {
|
|
276
|
-
keyList = parseKeyList(parsedData.d
|
|
264
|
+
keyList = parseKeyList(parsedData.d!, parsedData.c!);
|
|
277
265
|
added = new _Set(keyList.a);
|
|
278
266
|
removed = new _Set(keyList.r);
|
|
279
267
|
} catch (e) {
|
|
280
|
-
log.warn(
|
|
268
|
+
log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE, ['KeyList', e]);
|
|
269
|
+
break;
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
if (!parsedData.n || !parsedData.n.length) {
|
|
273
|
+
log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE, ['KeyList', 'No segment name was provided']);
|
|
281
274
|
break;
|
|
282
275
|
}
|
|
283
276
|
|
|
284
277
|
forOwn(clients, ({ hash64, worker, workerLarge }) => {
|
|
285
278
|
const add = added.has(hash64.dec) ? true : removed.has(hash64.dec) ? false : undefined;
|
|
286
279
|
if (add !== undefined) {
|
|
287
|
-
isLS ?
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
worker.put(parsedData.changeNumber, {
|
|
293
|
-
name: parsedData.segmentName,
|
|
294
|
-
add
|
|
295
|
-
});
|
|
280
|
+
(isLS ? workerLarge : worker).put(parsedData.cn, [{
|
|
281
|
+
isLS,
|
|
282
|
+
name: parsedData.n![0],
|
|
283
|
+
add,
|
|
284
|
+
}]);
|
|
296
285
|
}
|
|
297
286
|
});
|
|
298
287
|
return;
|
|
299
288
|
}
|
|
300
289
|
case UpdateStrategy.SegmentRemoval:
|
|
301
|
-
if (
|
|
302
|
-
log.warn(
|
|
290
|
+
if (!parsedData.n || !parsedData.n.length) {
|
|
291
|
+
log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE, ['SegmentRemoval', 'No segment name was provided']);
|
|
303
292
|
break;
|
|
304
293
|
}
|
|
305
294
|
|
|
306
295
|
forOwn(clients, ({ worker, workerLarge }) => {
|
|
307
|
-
isLS ?
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
}) :
|
|
314
|
-
worker.put(parsedData.changeNumber, {
|
|
315
|
-
name: parsedData.segmentName,
|
|
316
|
-
add: false
|
|
317
|
-
});
|
|
296
|
+
(isLS ? workerLarge : worker).put(parsedData.cn, parsedData.n!.map(largeSegment => ({
|
|
297
|
+
isLS,
|
|
298
|
+
name: largeSegment,
|
|
299
|
+
add: false,
|
|
300
|
+
cn: parsedData.cn
|
|
301
|
+
})));
|
|
318
302
|
});
|
|
319
303
|
return;
|
|
320
304
|
}
|
|
321
305
|
|
|
322
306
|
// `UpdateStrategy.UnboundedFetchRequest` and fallbacks of other cases
|
|
323
307
|
forOwn(clients, ({ worker, workerLarge }, matchingKey) => {
|
|
324
|
-
isLS ?
|
|
325
|
-
workerLarge && workerLarge.put(parsedData.changeNumber, undefined, getDelay(parsedData, matchingKey)) :
|
|
326
|
-
worker.put(parsedData.changeNumber);
|
|
308
|
+
(isLS ? workerLarge : worker).put(parsedData.cn, undefined, getDelay(parsedData, matchingKey));
|
|
327
309
|
});
|
|
328
310
|
}
|
|
329
311
|
|
|
330
312
|
if (userKey) {
|
|
331
|
-
pushEmitter.on(
|
|
332
|
-
const userKeyHash = channel.split('_')[2];
|
|
333
|
-
const userKey = userKeyHashes[userKeyHash];
|
|
334
|
-
if (userKey && clients[userKey]) { // check existence since it can be undefined if client has been destroyed
|
|
335
|
-
clients[userKey].worker.put(
|
|
336
|
-
parsedData.changeNumber,
|
|
337
|
-
parsedData.includesPayload ? parsedData.segmentList ? parsedData.segmentList : [] : undefined);
|
|
338
|
-
}
|
|
339
|
-
});
|
|
340
|
-
|
|
341
|
-
pushEmitter.on(MY_SEGMENTS_UPDATE_V2, handleMySegmentsUpdate);
|
|
313
|
+
pushEmitter.on(MY_SEGMENTS_UPDATE_V3, handleMySegmentsUpdate);
|
|
342
314
|
pushEmitter.on(MY_LARGE_SEGMENTS_UPDATE, handleMySegmentsUpdate);
|
|
343
315
|
} else {
|
|
344
316
|
pushEmitter.on(SEGMENT_UPDATE, segmentsUpdateWorker!.put);
|
|
@@ -362,7 +334,7 @@ export function pushManagerFactory(
|
|
|
362
334
|
if (disabled || disconnected === false) return;
|
|
363
335
|
disconnected = false;
|
|
364
336
|
|
|
365
|
-
if (userKey) this.add(userKey, pollingManager.segmentsSyncTask
|
|
337
|
+
if (userKey) this.add(userKey, pollingManager.segmentsSyncTask); // client-side
|
|
366
338
|
else setTimeout(connectPush); // server-side runs in next cycle as in client-side, for consistency with client-side
|
|
367
339
|
},
|
|
368
340
|
|
|
@@ -372,7 +344,7 @@ export function pushManagerFactory(
|
|
|
372
344
|
},
|
|
373
345
|
|
|
374
346
|
// [Only for client-side]
|
|
375
|
-
add(userKey: string, mySegmentsSyncTask: IMySegmentsSyncTask
|
|
347
|
+
add(userKey: string, mySegmentsSyncTask: IMySegmentsSyncTask) {
|
|
376
348
|
const hash = hashUserKey(userKey);
|
|
377
349
|
|
|
378
350
|
if (!userKeyHashes[hash]) {
|
|
@@ -380,7 +352,7 @@ export function pushManagerFactory(
|
|
|
380
352
|
clients[userKey] = {
|
|
381
353
|
hash64: hash64(userKey),
|
|
382
354
|
worker: MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker, MY_SEGMENT),
|
|
383
|
-
workerLarge:
|
|
355
|
+
workerLarge: MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker, MY_LARGE_SEGMENT)
|
|
384
356
|
};
|
|
385
357
|
connectForNewClient = true; // we must reconnect on start, to listen the channel for the new user key
|
|
386
358
|
|