@splitsoftware/splitio-commons 1.16.0 → 1.16.1-rc.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGES.txt +3 -0
- package/cjs/evaluator/matchers/index.js +3 -1
- package/cjs/evaluator/matchers/large_segment.js +16 -0
- package/cjs/evaluator/matchers/matcherTypes.js +1 -0
- package/cjs/evaluator/matchersTransform/index.js +1 -1
- package/cjs/logger/constants.js +4 -4
- package/cjs/logger/messages/info.js +0 -1
- package/cjs/readiness/readinessManager.js +11 -6
- package/cjs/readiness/sdkReadinessManager.js +5 -6
- package/cjs/sdkClient/sdkClientMethodCS.js +3 -4
- package/cjs/sdkClient/sdkClientMethodCSWithTT.js +4 -5
- package/cjs/sdkFactory/index.js +1 -1
- package/cjs/services/splitApi.js +4 -0
- package/cjs/storages/AbstractSplitsCacheAsync.js +2 -2
- package/cjs/storages/AbstractSplitsCacheSync.js +5 -5
- package/cjs/storages/KeyBuilder.js +3 -0
- package/cjs/storages/KeyBuilderCS.js +17 -5
- package/cjs/storages/inLocalStorage/SplitsCacheInLocal.js +16 -4
- package/cjs/storages/inLocalStorage/index.js +6 -2
- package/cjs/storages/inMemory/InMemoryStorageCS.js +5 -0
- package/cjs/storages/inMemory/SplitsCacheInMemory.js +20 -11
- package/cjs/storages/inMemory/TelemetryCacheInMemory.js +7 -10
- package/cjs/sync/polling/pollingManagerCS.js +54 -30
- package/cjs/sync/polling/syncTasks/mySegmentsSyncTask.js +2 -2
- package/cjs/sync/polling/updaters/mySegmentsUpdater.js +13 -8
- package/cjs/sync/polling/updaters/splitChangesUpdater.js +2 -1
- package/cjs/sync/streaming/SSEHandler/index.js +1 -0
- package/cjs/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.js +6 -5
- package/cjs/sync/streaming/constants.js +2 -1
- package/cjs/sync/streaming/pushManager.js +96 -64
- package/cjs/sync/submitters/telemetrySubmitter.js +2 -0
- package/cjs/sync/syncManagerOnline.js +24 -14
- package/cjs/utils/constants/index.js +5 -1
- package/cjs/utils/settingsValidation/index.js +9 -4
- package/esm/evaluator/matchers/index.js +3 -1
- package/esm/evaluator/matchers/large_segment.js +12 -0
- package/esm/evaluator/matchers/matcherTypes.js +1 -0
- package/esm/evaluator/matchersTransform/index.js +1 -1
- package/esm/logger/constants.js +1 -1
- package/esm/logger/messages/info.js +0 -1
- package/esm/readiness/readinessManager.js +11 -6
- package/esm/readiness/sdkReadinessManager.js +5 -6
- package/esm/sdkClient/sdkClientMethodCS.js +4 -5
- package/esm/sdkClient/sdkClientMethodCSWithTT.js +5 -6
- package/esm/sdkFactory/index.js +1 -1
- package/esm/services/splitApi.js +5 -1
- package/esm/storages/AbstractSplitsCacheAsync.js +2 -2
- package/esm/storages/AbstractSplitsCacheSync.js +3 -3
- package/esm/storages/KeyBuilder.js +3 -0
- package/esm/storages/KeyBuilderCS.js +15 -4
- package/esm/storages/inLocalStorage/SplitsCacheInLocal.js +17 -5
- package/esm/storages/inLocalStorage/index.js +7 -3
- package/esm/storages/inMemory/InMemoryStorageCS.js +5 -0
- package/esm/storages/inMemory/SplitsCacheInMemory.js +21 -12
- package/esm/storages/inMemory/TelemetryCacheInMemory.js +7 -10
- package/esm/sync/polling/pollingManagerCS.js +55 -31
- package/esm/sync/polling/syncTasks/mySegmentsSyncTask.js +2 -2
- package/esm/sync/polling/updaters/mySegmentsUpdater.js +11 -6
- package/esm/sync/polling/updaters/splitChangesUpdater.js +2 -1
- package/esm/sync/streaming/SSEHandler/index.js +2 -1
- package/esm/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.js +6 -5
- package/esm/sync/streaming/constants.js +1 -0
- package/esm/sync/streaming/pushManager.js +96 -65
- package/esm/sync/submitters/telemetrySubmitter.js +2 -0
- package/esm/sync/syncManagerOnline.js +25 -15
- package/esm/utils/constants/index.js +4 -0
- package/esm/utils/settingsValidation/index.js +10 -5
- package/package.json +1 -1
- package/src/dtos/types.ts +7 -1
- package/src/evaluator/matchers/index.ts +2 -0
- package/src/evaluator/matchers/large_segment.ts +18 -0
- package/src/evaluator/matchers/matcherTypes.ts +1 -0
- package/src/evaluator/matchersTransform/index.ts +1 -1
- package/src/logger/constants.ts +1 -1
- package/src/logger/messages/info.ts +0 -1
- package/src/readiness/readinessManager.ts +11 -5
- package/src/readiness/sdkReadinessManager.ts +7 -7
- package/src/readiness/types.ts +3 -2
- package/src/sdkClient/sdkClientMethodCS.ts +4 -6
- package/src/sdkClient/sdkClientMethodCSWithTT.ts +5 -7
- package/src/sdkFactory/index.ts +1 -1
- package/src/services/splitApi.ts +6 -1
- package/src/services/types.ts +1 -0
- package/src/storages/AbstractSplitsCacheAsync.ts +2 -2
- package/src/storages/AbstractSplitsCacheSync.ts +4 -4
- package/src/storages/KeyBuilder.ts +3 -0
- package/src/storages/KeyBuilderCS.ts +25 -5
- package/src/storages/inLocalStorage/MySegmentsCacheInLocal.ts +3 -3
- package/src/storages/inLocalStorage/SplitsCacheInLocal.ts +20 -5
- package/src/storages/inLocalStorage/index.ts +8 -4
- package/src/storages/inMemory/InMemoryStorageCS.ts +5 -0
- package/src/storages/inMemory/SplitsCacheInMemory.ts +15 -10
- package/src/storages/inMemory/TelemetryCacheInMemory.ts +7 -11
- package/src/storages/types.ts +6 -4
- package/src/sync/polling/pollingManagerCS.ts +62 -27
- package/src/sync/polling/syncTasks/mySegmentsSyncTask.ts +8 -9
- package/src/sync/polling/types.ts +4 -3
- package/src/sync/polling/updaters/mySegmentsUpdater.ts +13 -10
- package/src/sync/polling/updaters/splitChangesUpdater.ts +4 -3
- package/src/sync/streaming/SSEHandler/index.ts +2 -1
- package/src/sync/streaming/SSEHandler/types.ts +14 -2
- package/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +7 -5
- package/src/sync/streaming/constants.ts +1 -0
- package/src/sync/streaming/pushManager.ts +101 -63
- package/src/sync/streaming/types.ts +5 -3
- package/src/sync/submitters/telemetrySubmitter.ts +2 -0
- package/src/sync/submitters/types.ts +10 -4
- package/src/sync/syncManagerOnline.ts +19 -11
- package/src/types.ts +26 -1
- package/src/utils/constants/index.ts +5 -0
- package/src/utils/settingsValidation/index.ts +11 -6
- package/src/utils/settingsValidation/types.ts +1 -1
- package/types/dtos/types.d.ts +5 -1
- package/types/evaluator/matchers/large_segment.d.ts +5 -0
- package/types/logger/constants.d.ts +1 -1
- package/types/readiness/readinessManager.d.ts +2 -2
- package/types/readiness/sdkReadinessManager.d.ts +2 -3
- package/types/readiness/types.d.ts +3 -2
- package/types/services/types.d.ts +1 -0
- package/types/storages/AbstractSplitsCacheAsync.d.ts +1 -1
- package/types/storages/AbstractSplitsCacheSync.d.ts +3 -3
- package/types/storages/KeyBuilder.d.ts +1 -0
- package/types/storages/KeyBuilderCS.d.ts +7 -2
- package/types/storages/inLocalStorage/MySegmentsCacheInLocal.d.ts +2 -2
- package/types/storages/inLocalStorage/SplitsCacheInLocal.d.ts +1 -1
- package/types/storages/inMemory/SplitsCacheInMemory.d.ts +3 -2
- package/types/storages/inMemory/TelemetryCacheInMemory.d.ts +4 -6
- package/types/storages/types.d.ts +4 -3
- package/types/sync/polling/syncTasks/mySegmentsSyncTask.d.ts +2 -3
- package/types/sync/polling/types.d.ts +10 -3
- package/types/sync/polling/updaters/mySegmentsUpdater.d.ts +4 -4
- package/types/sync/streaming/SSEHandler/types.d.ts +13 -2
- package/types/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.d.ts +2 -1
- package/types/sync/streaming/constants.d.ts +1 -0
- package/types/sync/streaming/pushManager.d.ts +2 -0
- package/types/sync/streaming/types.d.ts +5 -4
- package/types/sync/submitters/types.d.ts +9 -3
- package/types/types.d.ts +25 -0
- package/types/utils/constants/index.d.ts +3 -0
- package/types/utils/settingsValidation/index.d.ts +2 -0
- package/types/utils/settingsValidation/types.d.ts +1 -1
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { ControlType } from '../constants';
|
|
2
|
-
import { MY_SEGMENTS_UPDATE, MY_SEGMENTS_UPDATE_V2, SEGMENT_UPDATE, SPLIT_UPDATE, SPLIT_KILL, CONTROL, OCCUPANCY } from '../types';
|
|
2
|
+
import { MY_SEGMENTS_UPDATE, MY_SEGMENTS_UPDATE_V2, SEGMENT_UPDATE, SPLIT_UPDATE, SPLIT_KILL, CONTROL, OCCUPANCY, MY_LARGE_SEGMENTS_UPDATE } from '../types';
|
|
3
3
|
|
|
4
4
|
export interface IMySegmentsUpdateData {
|
|
5
5
|
type: MY_SEGMENTS_UPDATE,
|
|
@@ -35,6 +35,18 @@ export interface IMySegmentsUpdateV2Data {
|
|
|
35
35
|
u: UpdateStrategy,
|
|
36
36
|
}
|
|
37
37
|
|
|
38
|
+
export interface IMyLargeSegmentsUpdateData {
|
|
39
|
+
type: MY_LARGE_SEGMENTS_UPDATE,
|
|
40
|
+
changeNumber: number,
|
|
41
|
+
largeSegments: string[],
|
|
42
|
+
c: Compression,
|
|
43
|
+
d: string,
|
|
44
|
+
u: UpdateStrategy,
|
|
45
|
+
i?: number, // time interval in millis
|
|
46
|
+
h?: number, // hash function. 0 for murmur3_32, 1 for murmur3_64
|
|
47
|
+
s?: number, // seed for hash function
|
|
48
|
+
}
|
|
49
|
+
|
|
38
50
|
export interface ISegmentUpdateData {
|
|
39
51
|
type: SEGMENT_UPDATE,
|
|
40
52
|
changeNumber: number,
|
|
@@ -68,6 +80,6 @@ export interface IOccupancyData {
|
|
|
68
80
|
}
|
|
69
81
|
}
|
|
70
82
|
|
|
71
|
-
export type INotificationData = IMySegmentsUpdateData | IMySegmentsUpdateV2Data | ISegmentUpdateData | ISplitUpdateData | ISplitKillData | IControlData | IOccupancyData
|
|
83
|
+
export type INotificationData = IMySegmentsUpdateData | IMySegmentsUpdateV2Data | IMyLargeSegmentsUpdateData | ISegmentUpdateData | ISplitUpdateData | ISplitKillData | IControlData | IOccupancyData
|
|
72
84
|
export type INotificationMessage = { parsedData: INotificationData, channel: string, timestamp: number, data: string }
|
|
73
85
|
export type INotificationError = Event & { parsedData?: any, message?: string }
|
|
@@ -1,19 +1,20 @@
|
|
|
1
1
|
import { IMySegmentsSyncTask, MySegmentsData } from '../../polling/types';
|
|
2
2
|
import { Backoff } from '../../../utils/Backoff';
|
|
3
3
|
import { IUpdateWorker } from './types';
|
|
4
|
-
import { MY_SEGMENT } from '../../../utils/constants';
|
|
5
4
|
import { ITelemetryTracker } from '../../../trackers/types';
|
|
5
|
+
import { UpdatesFromSSEEnum } from '../../submitters/types';
|
|
6
6
|
|
|
7
7
|
/**
|
|
8
8
|
* MySegmentsUpdateWorker factory
|
|
9
9
|
*/
|
|
10
|
-
export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker): IUpdateWorker {
|
|
10
|
+
export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask, telemetryTracker: ITelemetryTracker, updateType: UpdatesFromSSEEnum): IUpdateWorker {
|
|
11
11
|
|
|
12
12
|
let maxChangeNumber = 0; // keeps the maximum changeNumber among queued events
|
|
13
13
|
let currentChangeNumber = -1;
|
|
14
14
|
let handleNewEvent = false;
|
|
15
15
|
let isHandlingEvent: boolean;
|
|
16
16
|
let _segmentsData: MySegmentsData | undefined; // keeps the segmentsData (if included in notification payload) from the queued event with maximum changeNumber
|
|
17
|
+
let _delay: undefined | number;
|
|
17
18
|
const backoff = new Backoff(__handleMySegmentsUpdateCall);
|
|
18
19
|
|
|
19
20
|
function __handleMySegmentsUpdateCall() {
|
|
@@ -23,10 +24,10 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask,
|
|
|
23
24
|
const currentMaxChangeNumber = maxChangeNumber;
|
|
24
25
|
|
|
25
26
|
// fetch mySegments revalidating data if cached
|
|
26
|
-
mySegmentsSyncTask.execute(_segmentsData, true).then((result) => {
|
|
27
|
+
mySegmentsSyncTask.execute(_segmentsData, true, _delay).then((result) => {
|
|
27
28
|
if (!isHandlingEvent) return; // halt if `stop` has been called
|
|
28
29
|
if (result !== false) {// Unlike `Splits|SegmentsUpdateWorker`, we cannot use `mySegmentsCache.getChangeNumber` since `/mySegments` endpoint doesn't provide this value.
|
|
29
|
-
if (_segmentsData) telemetryTracker.trackUpdatesFromSSE(
|
|
30
|
+
if (_segmentsData) telemetryTracker.trackUpdatesFromSSE(updateType);
|
|
30
31
|
currentChangeNumber = Math.max(currentChangeNumber, currentMaxChangeNumber); // use `currentMaxChangeNumber`, in case that `maxChangeNumber` was updated during fetch.
|
|
31
32
|
}
|
|
32
33
|
if (handleNewEvent) {
|
|
@@ -47,12 +48,13 @@ export function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask,
|
|
|
47
48
|
* @param {number} changeNumber change number of the MY_SEGMENTS_UPDATE notification
|
|
48
49
|
* @param {SegmentsData | undefined} segmentsData might be undefined
|
|
49
50
|
*/
|
|
50
|
-
put(changeNumber: number, segmentsData?: MySegmentsData) {
|
|
51
|
+
put(changeNumber: number, segmentsData?: MySegmentsData, delay?: number) {
|
|
51
52
|
if (changeNumber <= currentChangeNumber || changeNumber <= maxChangeNumber) return;
|
|
52
53
|
|
|
53
54
|
maxChangeNumber = changeNumber;
|
|
54
55
|
handleNewEvent = true;
|
|
55
56
|
_segmentsData = segmentsData;
|
|
57
|
+
_delay = delay;
|
|
56
58
|
|
|
57
59
|
if (backoff.timeoutID || !isHandlingEvent) __handleMySegmentsUpdateCall();
|
|
58
60
|
backoff.reset();
|
|
@@ -30,6 +30,7 @@ export const MY_SEGMENTS_UPDATE_V2 = 'MY_SEGMENTS_UPDATE_V2';
|
|
|
30
30
|
export const SEGMENT_UPDATE = 'SEGMENT_UPDATE';
|
|
31
31
|
export const SPLIT_KILL = 'SPLIT_KILL';
|
|
32
32
|
export const SPLIT_UPDATE = 'SPLIT_UPDATE';
|
|
33
|
+
export const MY_LARGE_SEGMENTS_UPDATE = 'MY_LARGE_SEGMENTS_UPDATE';
|
|
33
34
|
|
|
34
35
|
// Control-type push notifications, handled by NotificationKeeper
|
|
35
36
|
export const CONTROL = 'CONTROL';
|
|
@@ -11,17 +11,26 @@ import { authenticateFactory, hashUserKey } from './AuthClient';
|
|
|
11
11
|
import { forOwn } from '../../utils/lang';
|
|
12
12
|
import { SSEClient } from './SSEClient';
|
|
13
13
|
import { getMatching } from '../../utils/key';
|
|
14
|
-
import { MY_SEGMENTS_UPDATE, MY_SEGMENTS_UPDATE_V2, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, SECONDS_BEFORE_EXPIRATION, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, PUSH_RETRYABLE_ERROR, PUSH_SUBSYSTEM_UP, ControlType } from './constants';
|
|
14
|
+
import { MY_SEGMENTS_UPDATE, MY_SEGMENTS_UPDATE_V2, PUSH_NONRETRYABLE_ERROR, PUSH_SUBSYSTEM_DOWN, SECONDS_BEFORE_EXPIRATION, SEGMENT_UPDATE, SPLIT_KILL, SPLIT_UPDATE, PUSH_RETRYABLE_ERROR, PUSH_SUBSYSTEM_UP, ControlType, MY_LARGE_SEGMENTS_UPDATE } from './constants';
|
|
15
15
|
import { STREAMING_FALLBACK, STREAMING_REFRESH_TOKEN, STREAMING_CONNECTING, STREAMING_DISABLED, ERROR_STREAMING_AUTH, STREAMING_DISCONNECTING, STREAMING_RECONNECT, STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, STREAMING_PARSING_SPLIT_UPDATE } from '../../logger/constants';
|
|
16
|
-
import { KeyList, UpdateStrategy } from './SSEHandler/types';
|
|
16
|
+
import { IMyLargeSegmentsUpdateData, IMySegmentsUpdateV2Data, KeyList, UpdateStrategy } from './SSEHandler/types';
|
|
17
17
|
import { isInBitmap, parseBitmap, parseFFUpdatePayload, parseKeyList } from './parseUtils';
|
|
18
18
|
import { ISet, _Set } from '../../utils/lang/sets';
|
|
19
|
+
import { hash } from '../../utils/murmur3/murmur3';
|
|
19
20
|
import { Hash64, hash64 } from '../../utils/murmur3/murmur3_64';
|
|
20
21
|
import { IAuthTokenPushEnabled } from './AuthClient/types';
|
|
21
|
-
import { TOKEN_REFRESH, AUTH_REJECTION } from '../../utils/constants';
|
|
22
|
+
import { TOKEN_REFRESH, AUTH_REJECTION, MY_LARGE_SEGMENT, MY_SEGMENT } from '../../utils/constants';
|
|
22
23
|
import { ISdkFactoryContextSync } from '../../sdkFactory/types';
|
|
23
24
|
import { IUpdateWorker } from './UpdateWorkers/types';
|
|
24
25
|
|
|
26
|
+
export function getDelay(parsedData: IMyLargeSegmentsUpdateData, matchingKey: string) {
|
|
27
|
+
const interval = parsedData.i || 60000;
|
|
28
|
+
// const hashType = parsedData.h || 0;
|
|
29
|
+
const seed = parsedData.s || 0;
|
|
30
|
+
|
|
31
|
+
return hash(matchingKey, seed) % interval;
|
|
32
|
+
}
|
|
33
|
+
|
|
25
34
|
/**
|
|
26
35
|
* PushManager factory:
|
|
27
36
|
* - for server-side if key is not provided in settings.
|
|
@@ -64,7 +73,7 @@ export function pushManagerFactory(
|
|
|
64
73
|
const userKeyHashes: Record<string, string> = {};
|
|
65
74
|
// [Only for client-side] map of user keys to their corresponding hash64 and MySegmentsUpdateWorkers.
|
|
66
75
|
// Hash64 is used to process MY_SEGMENTS_UPDATE_V2 events and dispatch actions to the corresponding MySegmentsUpdateWorker.
|
|
67
|
-
const clients: Record<string, { hash64: Hash64, worker: IUpdateWorker }> = {};
|
|
76
|
+
const clients: Record<string, { hash64: Hash64, worker: IUpdateWorker, workerLarge?: IUpdateWorker }> = {};
|
|
68
77
|
|
|
69
78
|
// [Only for client-side] variable to flag that a new client was added. It is needed to reconnect streaming.
|
|
70
79
|
let connectForNewClient = false;
|
|
@@ -171,7 +180,10 @@ export function pushManagerFactory(
|
|
|
171
180
|
// cancel scheduled fetch retries of Splits, Segments, and MySegments Update Workers
|
|
172
181
|
function stopWorkers() {
|
|
173
182
|
splitsUpdateWorker.stop();
|
|
174
|
-
if (userKey) forOwn(clients, ({ worker }) =>
|
|
183
|
+
if (userKey) forOwn(clients, ({ worker, workerLarge }) => {
|
|
184
|
+
worker.stop();
|
|
185
|
+
workerLarge && workerLarge.stop();
|
|
186
|
+
});
|
|
175
187
|
else segmentsUpdateWorker!.stop();
|
|
176
188
|
}
|
|
177
189
|
|
|
@@ -236,76 +248,98 @@ export function pushManagerFactory(
|
|
|
236
248
|
splitsUpdateWorker.put(parsedData);
|
|
237
249
|
});
|
|
238
250
|
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
switch (parsedData.u) {
|
|
251
|
-
case UpdateStrategy.BoundedFetchRequest: {
|
|
252
|
-
let bitmap: Uint8Array;
|
|
253
|
-
try {
|
|
254
|
-
bitmap = parseBitmap(parsedData.d, parsedData.c);
|
|
255
|
-
} catch (e) {
|
|
256
|
-
log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, ['BoundedFetchRequest', e]);
|
|
257
|
-
break;
|
|
258
|
-
}
|
|
259
|
-
|
|
260
|
-
forOwn(clients, ({ hash64, worker }) => {
|
|
261
|
-
if (isInBitmap(bitmap, hash64.hex)) {
|
|
262
|
-
worker.put(parsedData.changeNumber); // fetch mySegments
|
|
263
|
-
}
|
|
264
|
-
});
|
|
265
|
-
return;
|
|
251
|
+
function handleMySegmentsUpdate(parsedData: IMySegmentsUpdateV2Data | IMyLargeSegmentsUpdateData) {
|
|
252
|
+
const isLS = parsedData.type === MY_LARGE_SEGMENTS_UPDATE;
|
|
253
|
+
|
|
254
|
+
switch (parsedData.u) {
|
|
255
|
+
case UpdateStrategy.BoundedFetchRequest: {
|
|
256
|
+
let bitmap: Uint8Array;
|
|
257
|
+
try {
|
|
258
|
+
bitmap = parseBitmap(parsedData.d, parsedData.c);
|
|
259
|
+
} catch (e) {
|
|
260
|
+
log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, ['BoundedFetchRequest', e]);
|
|
261
|
+
break;
|
|
266
262
|
}
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
} catch (e) {
|
|
274
|
-
log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, ['KeyList', e]);
|
|
275
|
-
break;
|
|
263
|
+
|
|
264
|
+
forOwn(clients, ({ hash64, worker, workerLarge }, matchingKey) => {
|
|
265
|
+
if (isInBitmap(bitmap, hash64.hex)) {
|
|
266
|
+
isLS ?
|
|
267
|
+
workerLarge && workerLarge.put(parsedData.changeNumber, undefined, getDelay(parsedData, matchingKey)) :
|
|
268
|
+
worker.put(parsedData.changeNumber);
|
|
276
269
|
}
|
|
270
|
+
});
|
|
271
|
+
return;
|
|
272
|
+
}
|
|
273
|
+
case UpdateStrategy.KeyList: {
|
|
274
|
+
let keyList: KeyList, added: ISet<string>, removed: ISet<string>;
|
|
275
|
+
try {
|
|
276
|
+
keyList = parseKeyList(parsedData.d, parsedData.c);
|
|
277
|
+
added = new _Set(keyList.a);
|
|
278
|
+
removed = new _Set(keyList.r);
|
|
279
|
+
} catch (e) {
|
|
280
|
+
log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, ['KeyList', e]);
|
|
281
|
+
break;
|
|
282
|
+
}
|
|
277
283
|
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
284
|
+
forOwn(clients, ({ hash64, worker, workerLarge }) => {
|
|
285
|
+
const add = added.has(hash64.dec) ? true : removed.has(hash64.dec) ? false : undefined;
|
|
286
|
+
if (add !== undefined) {
|
|
287
|
+
isLS ?
|
|
288
|
+
workerLarge && workerLarge.put(parsedData.changeNumber, {
|
|
289
|
+
name: parsedData.largeSegments[0],
|
|
290
|
+
add
|
|
291
|
+
}) :
|
|
281
292
|
worker.put(parsedData.changeNumber, {
|
|
282
293
|
name: parsedData.segmentName,
|
|
283
294
|
add
|
|
284
295
|
});
|
|
285
|
-
}
|
|
286
|
-
});
|
|
287
|
-
return;
|
|
288
|
-
}
|
|
289
|
-
case UpdateStrategy.SegmentRemoval:
|
|
290
|
-
if (!parsedData.segmentName) {
|
|
291
|
-
log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, ['SegmentRemoval', 'No segment name was provided']);
|
|
292
|
-
break;
|
|
293
296
|
}
|
|
297
|
+
});
|
|
298
|
+
return;
|
|
299
|
+
}
|
|
300
|
+
case UpdateStrategy.SegmentRemoval:
|
|
301
|
+
if ((isLS && parsedData.largeSegments.length === 0) || (!isLS && !parsedData.segmentName)) {
|
|
302
|
+
log.warn(STREAMING_PARSING_MY_SEGMENTS_UPDATE_V2, ['SegmentRemoval', 'No segment name was provided']);
|
|
303
|
+
break;
|
|
304
|
+
}
|
|
294
305
|
|
|
295
|
-
|
|
306
|
+
forOwn(clients, ({ worker, workerLarge }) => {
|
|
307
|
+
isLS ?
|
|
308
|
+
workerLarge && parsedData.largeSegments.forEach(largeSegment => {
|
|
309
|
+
workerLarge.put(parsedData.changeNumber, {
|
|
310
|
+
name: largeSegment,
|
|
311
|
+
add: false
|
|
312
|
+
});
|
|
313
|
+
}) :
|
|
296
314
|
worker.put(parsedData.changeNumber, {
|
|
297
315
|
name: parsedData.segmentName,
|
|
298
316
|
add: false
|
|
299
|
-
})
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
317
|
+
});
|
|
318
|
+
});
|
|
319
|
+
return;
|
|
320
|
+
}
|
|
303
321
|
|
|
304
|
-
|
|
305
|
-
|
|
322
|
+
// `UpdateStrategy.UnboundedFetchRequest` and fallbacks of other cases
|
|
323
|
+
forOwn(clients, ({ worker, workerLarge }, matchingKey) => {
|
|
324
|
+
isLS ?
|
|
325
|
+
workerLarge && workerLarge.put(parsedData.changeNumber, undefined, getDelay(parsedData, matchingKey)) :
|
|
306
326
|
worker.put(parsedData.changeNumber);
|
|
307
|
-
});
|
|
308
327
|
});
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
if (userKey) {
|
|
331
|
+
pushEmitter.on(MY_SEGMENTS_UPDATE, function handleMySegmentsUpdate(parsedData, channel) {
|
|
332
|
+
const userKeyHash = channel.split('_')[2];
|
|
333
|
+
const userKey = userKeyHashes[userKeyHash];
|
|
334
|
+
if (userKey && clients[userKey]) { // check existence since it can be undefined if client has been destroyed
|
|
335
|
+
clients[userKey].worker.put(
|
|
336
|
+
parsedData.changeNumber,
|
|
337
|
+
parsedData.includesPayload ? parsedData.segmentList ? parsedData.segmentList : [] : undefined);
|
|
338
|
+
}
|
|
339
|
+
});
|
|
340
|
+
|
|
341
|
+
pushEmitter.on(MY_SEGMENTS_UPDATE_V2, handleMySegmentsUpdate);
|
|
342
|
+
pushEmitter.on(MY_LARGE_SEGMENTS_UPDATE, handleMySegmentsUpdate);
|
|
309
343
|
} else {
|
|
310
344
|
pushEmitter.on(SEGMENT_UPDATE, segmentsUpdateWorker!.put);
|
|
311
345
|
}
|
|
@@ -328,7 +362,7 @@ export function pushManagerFactory(
|
|
|
328
362
|
if (disabled || disconnected === false) return;
|
|
329
363
|
disconnected = false;
|
|
330
364
|
|
|
331
|
-
if (userKey) this.add(userKey, pollingManager.segmentsSyncTask
|
|
365
|
+
if (userKey) this.add(userKey, pollingManager.segmentsSyncTask, pollingManager.largeSegmentsSyncTask!); // client-side
|
|
332
366
|
else setTimeout(connectPush); // server-side runs in next cycle as in client-side, for consistency with client-side
|
|
333
367
|
},
|
|
334
368
|
|
|
@@ -338,12 +372,16 @@ export function pushManagerFactory(
|
|
|
338
372
|
},
|
|
339
373
|
|
|
340
374
|
// [Only for client-side]
|
|
341
|
-
add(userKey: string, mySegmentsSyncTask: IMySegmentsSyncTask) {
|
|
375
|
+
add(userKey: string, mySegmentsSyncTask: IMySegmentsSyncTask, myLargeSegmentsSyncTask?: IMySegmentsSyncTask) {
|
|
342
376
|
const hash = hashUserKey(userKey);
|
|
343
377
|
|
|
344
378
|
if (!userKeyHashes[hash]) {
|
|
345
379
|
userKeyHashes[hash] = userKey;
|
|
346
|
-
clients[userKey] = {
|
|
380
|
+
clients[userKey] = {
|
|
381
|
+
hash64: hash64(userKey),
|
|
382
|
+
worker: MySegmentsUpdateWorker(mySegmentsSyncTask, telemetryTracker, MY_SEGMENT),
|
|
383
|
+
workerLarge: myLargeSegmentsSyncTask ? MySegmentsUpdateWorker(myLargeSegmentsSyncTask, telemetryTracker, MY_LARGE_SEGMENT) : undefined
|
|
384
|
+
};
|
|
347
385
|
connectForNewClient = true; // we must reconnect on start, to listen the channel for the new user key
|
|
348
386
|
|
|
349
387
|
// Reconnects in case of a new client.
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { IMySegmentsUpdateData, IMySegmentsUpdateV2Data, ISegmentUpdateData, ISplitUpdateData, ISplitKillData } from './SSEHandler/types';
|
|
1
|
+
import { IMySegmentsUpdateData, IMySegmentsUpdateV2Data, ISegmentUpdateData, ISplitUpdateData, ISplitKillData, IMyLargeSegmentsUpdateData } from './SSEHandler/types';
|
|
2
2
|
import { ITask } from '../types';
|
|
3
3
|
import { IMySegmentsSyncTask } from '../polling/types';
|
|
4
4
|
import { IEventEmitter } from '../../types';
|
|
@@ -16,16 +16,18 @@ export type MY_SEGMENTS_UPDATE_V2 = 'MY_SEGMENTS_UPDATE_V2';
|
|
|
16
16
|
export type SEGMENT_UPDATE = 'SEGMENT_UPDATE';
|
|
17
17
|
export type SPLIT_KILL = 'SPLIT_KILL';
|
|
18
18
|
export type SPLIT_UPDATE = 'SPLIT_UPDATE';
|
|
19
|
+
export type MY_LARGE_SEGMENTS_UPDATE = 'MY_LARGE_SEGMENTS_UPDATE';
|
|
19
20
|
|
|
20
21
|
// Control-type push notifications, handled by NotificationKeeper
|
|
21
22
|
export type CONTROL = 'CONTROL';
|
|
22
23
|
export type OCCUPANCY = 'OCCUPANCY';
|
|
23
24
|
|
|
24
|
-
export type IPushEvent = PUSH_SUBSYSTEM_UP | PUSH_SUBSYSTEM_DOWN | PUSH_NONRETRYABLE_ERROR | PUSH_RETRYABLE_ERROR | MY_SEGMENTS_UPDATE | MY_SEGMENTS_UPDATE_V2 | SEGMENT_UPDATE | SPLIT_UPDATE | SPLIT_KILL | ControlType.STREAMING_RESET
|
|
25
|
+
export type IPushEvent = PUSH_SUBSYSTEM_UP | PUSH_SUBSYSTEM_DOWN | PUSH_NONRETRYABLE_ERROR | PUSH_RETRYABLE_ERROR | MY_SEGMENTS_UPDATE | MY_SEGMENTS_UPDATE_V2 | SEGMENT_UPDATE | SPLIT_UPDATE | SPLIT_KILL | MY_LARGE_SEGMENTS_UPDATE | ControlType.STREAMING_RESET
|
|
25
26
|
|
|
26
27
|
type IParsedData<T extends IPushEvent> =
|
|
27
28
|
T extends MY_SEGMENTS_UPDATE ? IMySegmentsUpdateData :
|
|
28
29
|
T extends MY_SEGMENTS_UPDATE_V2 ? IMySegmentsUpdateV2Data :
|
|
30
|
+
T extends MY_LARGE_SEGMENTS_UPDATE ? IMyLargeSegmentsUpdateData :
|
|
29
31
|
T extends SEGMENT_UPDATE ? ISegmentUpdateData :
|
|
30
32
|
T extends SPLIT_UPDATE ? ISplitUpdateData :
|
|
31
33
|
T extends SPLIT_KILL ? ISplitKillData : undefined;
|
|
@@ -45,6 +47,6 @@ export interface IPushEventEmitter extends IEventEmitter {
|
|
|
45
47
|
*/
|
|
46
48
|
export interface IPushManager extends ITask, IPushEventEmitter {
|
|
47
49
|
// Methods used in client-side, to support multiple clients
|
|
48
|
-
add(userKey: string, mySegmentsSyncTask: IMySegmentsSyncTask): void,
|
|
50
|
+
add(userKey: string, mySegmentsSyncTask: IMySegmentsSyncTask, myLargeSegmentsSyncTask?: IMySegmentsSyncTask): void,
|
|
49
51
|
remove(userKey: string): void
|
|
50
52
|
}
|
|
@@ -76,10 +76,12 @@ export function telemetryCacheConfigAdapter(telemetry: ITelemetryCacheSync, sett
|
|
|
76
76
|
|
|
77
77
|
return objectAssign(getTelemetryConfigStats(settings.mode, settings.storage.type), {
|
|
78
78
|
sE: settings.streamingEnabled,
|
|
79
|
+
lE: isClientSide ? settings.sync.largeSegmentsEnabled : undefined,
|
|
79
80
|
rR: {
|
|
80
81
|
sp: scheduler.featuresRefreshRate / 1000,
|
|
81
82
|
se: isClientSide ? undefined : scheduler.segmentsRefreshRate / 1000,
|
|
82
83
|
ms: isClientSide ? scheduler.segmentsRefreshRate / 1000 : undefined,
|
|
84
|
+
mls: isClientSide && settings.sync.largeSegmentsEnabled ? scheduler.largeSegmentsRefreshRate / 1000 : undefined,
|
|
83
85
|
im: scheduler.impressionsRefreshRate / 1000,
|
|
84
86
|
ev: scheduler.eventsPushRate / 1000,
|
|
85
87
|
te: scheduler.telemetryRefreshRate / 1000,
|
|
@@ -103,7 +103,7 @@ export type DROPPED = 1;
|
|
|
103
103
|
export type DEDUPED = 2;
|
|
104
104
|
export type ImpressionDataType = QUEUED | DROPPED | DEDUPED
|
|
105
105
|
export type EventDataType = QUEUED | DROPPED;
|
|
106
|
-
export type UpdatesFromSSEEnum = SPLITS | MY_SEGMENT;
|
|
106
|
+
export type UpdatesFromSSEEnum = SPLITS | MY_SEGMENT | MY_LARGE_SEGMENT;
|
|
107
107
|
|
|
108
108
|
export type SPLITS = 'sp';
|
|
109
109
|
export type IMPRESSIONS = 'im';
|
|
@@ -113,7 +113,8 @@ export type TELEMETRY = 'te';
|
|
|
113
113
|
export type TOKEN = 'to';
|
|
114
114
|
export type SEGMENT = 'se';
|
|
115
115
|
export type MY_SEGMENT = 'ms';
|
|
116
|
-
export type
|
|
116
|
+
export type MY_LARGE_SEGMENT = 'mls';
|
|
117
|
+
export type OperationType = SPLITS | IMPRESSIONS | IMPRESSIONS_COUNT | EVENTS | TELEMETRY | TOKEN | SEGMENT | MY_SEGMENT | MY_LARGE_SEGMENT;
|
|
117
118
|
|
|
118
119
|
export type LastSync = Partial<Record<OperationType, number | undefined>>
|
|
119
120
|
export type HttpErrors = Partial<Record<OperationType, { [statusCode: string]: number }>>
|
|
@@ -158,8 +159,9 @@ export type TelemetryUsageStats = {
|
|
|
158
159
|
|
|
159
160
|
// amount of instant updates that we are doing by avoiding fetching to Split servers
|
|
160
161
|
export type UpdatesFromSSE = {
|
|
161
|
-
sp
|
|
162
|
+
sp?: number, // splits
|
|
162
163
|
ms?: number, // my segments
|
|
164
|
+
mls?: number // my large segments
|
|
163
165
|
}
|
|
164
166
|
|
|
165
167
|
// 'metrics/usage' JSON request body
|
|
@@ -175,12 +177,14 @@ export type TelemetryUsageStatsPayload = TelemetryUsageStats & {
|
|
|
175
177
|
spC?: number, // splitCount
|
|
176
178
|
seC?: number, // segmentCount
|
|
177
179
|
skC?: number, // segmentKeyCount
|
|
180
|
+
lseC?: number, // largeSegmentCount
|
|
181
|
+
lskC?: number, // largeSegmentKeyCount
|
|
178
182
|
sL?: number, // sessionLengthMs
|
|
179
183
|
eQ: number, // eventsQueued
|
|
180
184
|
eD: number, // eventsDropped
|
|
181
185
|
sE: Array<StreamingEvent>, // streamingEvents
|
|
182
186
|
t?: Array<string>, // tags
|
|
183
|
-
ufs?: UpdatesFromSSE, //
|
|
187
|
+
ufs?: UpdatesFromSSE, // instant updates
|
|
184
188
|
}
|
|
185
189
|
|
|
186
190
|
/**
|
|
@@ -201,6 +205,7 @@ export type RefreshRates = {
|
|
|
201
205
|
sp: number, // splits
|
|
202
206
|
se?: number, // segments
|
|
203
207
|
ms?: number, // mySegments
|
|
208
|
+
mls?: number, // myLargeSegments
|
|
204
209
|
im: number, // impressions
|
|
205
210
|
ev: number, // events
|
|
206
211
|
te: number, // telemetry
|
|
@@ -226,6 +231,7 @@ export type TelemetryConfigStats = {
|
|
|
226
231
|
// 'metrics/config' JSON request body
|
|
227
232
|
export type TelemetryConfigStatsPayload = TelemetryConfigStats & {
|
|
228
233
|
sE: boolean, // streamingEnabled
|
|
234
|
+
lE?: boolean, // largeSegmentsEnabled
|
|
229
235
|
rR: RefreshRates, // refreshRates
|
|
230
236
|
uO: UrlOverrides, // urlOverrides
|
|
231
237
|
iQ: number, // impressionsQueueSize
|
|
@@ -7,7 +7,7 @@ import { IPollingManager, IPollingManagerCS } from './polling/types';
|
|
|
7
7
|
import { PUSH_SUBSYSTEM_UP, PUSH_SUBSYSTEM_DOWN } from './streaming/constants';
|
|
8
8
|
import { SYNC_START_POLLING, SYNC_CONTINUE_POLLING, SYNC_STOP_POLLING } from '../logger/constants';
|
|
9
9
|
import { isConsentGranted } from '../consent';
|
|
10
|
-
import { POLLING, STREAMING, SYNC_MODE_UPDATE } from '../utils/constants';
|
|
10
|
+
import { IN_LARGE_SEGMENT, IN_SEGMENT, POLLING, STREAMING, SYNC_MODE_UPDATE } from '../utils/constants';
|
|
11
11
|
import { ISdkFactoryContextSync } from '../sdkFactory/types';
|
|
12
12
|
|
|
13
13
|
/**
|
|
@@ -141,36 +141,44 @@ export function syncManagerOnlineFactory(
|
|
|
141
141
|
shared(matchingKey: string, readinessManager: IReadinessManager, storage: IStorageSync) {
|
|
142
142
|
if (!pollingManager) return;
|
|
143
143
|
|
|
144
|
-
const
|
|
144
|
+
const { msSyncTask, mlsSyncTask } = (pollingManager as IPollingManagerCS).add(matchingKey, readinessManager, storage);
|
|
145
145
|
|
|
146
146
|
return {
|
|
147
|
-
isRunning:
|
|
147
|
+
isRunning: msSyncTask.isRunning,
|
|
148
148
|
start() {
|
|
149
149
|
if (syncEnabled) {
|
|
150
150
|
if (pushManager) {
|
|
151
151
|
if (pollingManager!.isRunning()) {
|
|
152
152
|
// if doing polling, we must start the periodic fetch of data
|
|
153
|
-
if (storage.splits.
|
|
153
|
+
if (storage.splits.usesMatcher(IN_SEGMENT)) msSyncTask.start();
|
|
154
|
+
if (mlsSyncTask && storage.splits.usesMatcher(IN_LARGE_SEGMENT)) mlsSyncTask.start();
|
|
154
155
|
} else {
|
|
155
156
|
// if not polling, we must execute the sync task for the initial fetch
|
|
156
157
|
// of segments since `syncAll` was already executed when starting the main client
|
|
157
|
-
|
|
158
|
+
msSyncTask.execute();
|
|
159
|
+
mlsSyncTask && mlsSyncTask.execute();
|
|
158
160
|
}
|
|
159
|
-
pushManager.add(matchingKey,
|
|
161
|
+
pushManager.add(matchingKey, msSyncTask, mlsSyncTask);
|
|
160
162
|
} else {
|
|
161
|
-
if (storage.splits.
|
|
163
|
+
if (storage.splits.usesMatcher(IN_SEGMENT)) msSyncTask.start();
|
|
164
|
+
if (mlsSyncTask && storage.splits.usesMatcher(IN_LARGE_SEGMENT)) mlsSyncTask.start();
|
|
162
165
|
}
|
|
163
166
|
} else {
|
|
164
|
-
if (!readinessManager.isReady())
|
|
167
|
+
if (!readinessManager.isReady()) {
|
|
168
|
+
msSyncTask.execute();
|
|
169
|
+
mlsSyncTask && mlsSyncTask.execute();
|
|
170
|
+
}
|
|
165
171
|
}
|
|
166
172
|
},
|
|
167
173
|
stop() {
|
|
168
174
|
// check in case `client.destroy()` has been invoked more than once for the same client
|
|
169
|
-
const
|
|
170
|
-
if (
|
|
175
|
+
const syncTasks = (pollingManager as IPollingManagerCS).get(matchingKey);
|
|
176
|
+
if (syncTasks) {
|
|
177
|
+
const { msSyncTask, mlsSyncTask } = syncTasks;
|
|
171
178
|
// stop syncing
|
|
172
179
|
if (pushManager) pushManager.remove(matchingKey);
|
|
173
|
-
if (
|
|
180
|
+
if (msSyncTask.isRunning()) msSyncTask.stop();
|
|
181
|
+
if (mlsSyncTask && mlsSyncTask.isRunning()) mlsSyncTask.stop();
|
|
174
182
|
|
|
175
183
|
(pollingManager as IPollingManagerCS).remove(matchingKey);
|
|
176
184
|
}
|
package/src/types.ts
CHANGED
|
@@ -86,6 +86,7 @@ export interface ISettings {
|
|
|
86
86
|
metricsRefreshRate?: number,
|
|
87
87
|
telemetryRefreshRate: number,
|
|
88
88
|
segmentsRefreshRate: number,
|
|
89
|
+
largeSegmentsRefreshRate: number,
|
|
89
90
|
offlineRefreshRate: number,
|
|
90
91
|
eventsPushRate: number,
|
|
91
92
|
eventsQueueSize: number,
|
|
@@ -95,7 +96,8 @@ export interface ISettings {
|
|
|
95
96
|
readyTimeout: number,
|
|
96
97
|
requestTimeoutBeforeReady: number,
|
|
97
98
|
retriesOnFailureBeforeReady: number,
|
|
98
|
-
eventsFirstPushWindow: number
|
|
99
|
+
eventsFirstPushWindow: number,
|
|
100
|
+
waitForLargeSegments: boolean
|
|
99
101
|
},
|
|
100
102
|
readonly storage: IStorageSyncFactory | IStorageAsyncFactory,
|
|
101
103
|
readonly integrations: Array<{
|
|
@@ -119,6 +121,7 @@ export interface ISettings {
|
|
|
119
121
|
__splitFiltersValidation: ISplitFiltersValidation,
|
|
120
122
|
localhostMode?: SplitIO.LocalhostFactory,
|
|
121
123
|
enabled: boolean,
|
|
124
|
+
largeSegmentsEnabled: boolean,
|
|
122
125
|
flagSpecVersion: string
|
|
123
126
|
},
|
|
124
127
|
readonly runtime: {
|
|
@@ -813,6 +816,13 @@ export namespace SplitIO {
|
|
|
813
816
|
* @default 10
|
|
814
817
|
*/
|
|
815
818
|
eventsFirstPushWindow?: number,
|
|
819
|
+
/**
|
|
820
|
+
* Whether the SDK should wait for large segments to be ready before emitting SDK_READY event.
|
|
821
|
+
* It only applies if largeSegmentsEnabled is true.
|
|
822
|
+
* @property {number} waitForLargeSegments
|
|
823
|
+
* @default true
|
|
824
|
+
*/
|
|
825
|
+
waitForLargeSegments?: boolean
|
|
816
826
|
},
|
|
817
827
|
/**
|
|
818
828
|
* SDK scheduler settings.
|
|
@@ -857,6 +867,13 @@ export namespace SplitIO {
|
|
|
857
867
|
* @default 60
|
|
858
868
|
*/
|
|
859
869
|
segmentsRefreshRate?: number,
|
|
870
|
+
/**
|
|
871
|
+
* The SDK polls Split servers for changes to large segment definitions. This parameter controls this polling period in seconds.
|
|
872
|
+
* It only applies if largeSegmentsEnabled is true.
|
|
873
|
+
* @property {number} largeSegmentsRefreshRate
|
|
874
|
+
* @default 60
|
|
875
|
+
*/
|
|
876
|
+
largeSegmentsRefreshRate?: number,
|
|
860
877
|
/**
|
|
861
878
|
* The SDK posts the queued events data in bulks. This parameter controls the posting rate in seconds.
|
|
862
879
|
* @property {number} eventsPushRate
|
|
@@ -929,6 +946,14 @@ export namespace SplitIO {
|
|
|
929
946
|
* @property {Object} urls
|
|
930
947
|
*/
|
|
931
948
|
urls?: UrlSettings,
|
|
949
|
+
sync?: ISharedSettings['sync'] & {
|
|
950
|
+
/**
|
|
951
|
+
* Enables synchronization of large segments.
|
|
952
|
+
* @property {boolean} largeSegmentsEnabled
|
|
953
|
+
* @default false
|
|
954
|
+
*/
|
|
955
|
+
largeSegmentsEnabled?: boolean
|
|
956
|
+
}
|
|
932
957
|
}
|
|
933
958
|
/**
|
|
934
959
|
* Settings interface for SDK instances created on NodeJS.
|
|
@@ -76,6 +76,7 @@ export const TELEMETRY = 'te';
|
|
|
76
76
|
export const TOKEN = 'to';
|
|
77
77
|
export const SEGMENT = 'se';
|
|
78
78
|
export const MY_SEGMENT = 'ms';
|
|
79
|
+
export const MY_LARGE_SEGMENT = 'mls';
|
|
79
80
|
|
|
80
81
|
export const TREATMENT = 't';
|
|
81
82
|
export const TREATMENTS = 'ts';
|
|
@@ -106,3 +107,7 @@ export const ENABLED = 1;
|
|
|
106
107
|
export const PAUSED = 2;
|
|
107
108
|
|
|
108
109
|
export const FLAG_SPEC_VERSION = '1.1';
|
|
110
|
+
|
|
111
|
+
// Matcher types
|
|
112
|
+
export const IN_SEGMENT = 'IN_SEGMENT';
|
|
113
|
+
export const IN_LARGE_SEGMENT = 'IN_LARGE_SEGMENT';
|
|
@@ -7,7 +7,7 @@ import { ISettingsValidationParams } from './types';
|
|
|
7
7
|
import { ISettings } from '../../types';
|
|
8
8
|
import { validateKey } from '../inputValidation/key';
|
|
9
9
|
import { validateTrafficType } from '../inputValidation/trafficType';
|
|
10
|
-
import { ERROR_MIN_CONFIG_PARAM } from '../../logger/constants';
|
|
10
|
+
import { ERROR_MIN_CONFIG_PARAM, LOG_PREFIX_CLIENT_INSTANTIATION } from '../../logger/constants';
|
|
11
11
|
|
|
12
12
|
// Exported for telemetry
|
|
13
13
|
export const base = {
|
|
@@ -32,6 +32,8 @@ export const base = {
|
|
|
32
32
|
featuresRefreshRate: 60,
|
|
33
33
|
// fetch segments updates each 60 sec
|
|
34
34
|
segmentsRefreshRate: 60,
|
|
35
|
+
// fetch large segments updates each 60 sec
|
|
36
|
+
largeSegmentsRefreshRate: 60,
|
|
35
37
|
// publish telemetry stats each 3600 secs (1 hour)
|
|
36
38
|
telemetryRefreshRate: 3600,
|
|
37
39
|
// publish evaluations each 300 sec (default value for OPTIMIZED impressions mode)
|
|
@@ -85,7 +87,8 @@ export const base = {
|
|
|
85
87
|
impressionsMode: OPTIMIZED,
|
|
86
88
|
localhostMode: undefined,
|
|
87
89
|
enabled: true,
|
|
88
|
-
flagSpecVersion: FLAG_SPEC_VERSION
|
|
90
|
+
flagSpecVersion: FLAG_SPEC_VERSION,
|
|
91
|
+
largeSegmentsEnabled: false
|
|
89
92
|
},
|
|
90
93
|
|
|
91
94
|
// Logger
|
|
@@ -132,6 +135,7 @@ export function settingsValidation(config: unknown, validationParams: ISettingsV
|
|
|
132
135
|
const { scheduler, startup } = withDefaults;
|
|
133
136
|
scheduler.featuresRefreshRate = fromSecondsToMillis(scheduler.featuresRefreshRate);
|
|
134
137
|
scheduler.segmentsRefreshRate = fromSecondsToMillis(scheduler.segmentsRefreshRate);
|
|
138
|
+
scheduler.largeSegmentsRefreshRate = fromSecondsToMillis(scheduler.largeSegmentsRefreshRate);
|
|
135
139
|
scheduler.offlineRefreshRate = fromSecondsToMillis(scheduler.offlineRefreshRate);
|
|
136
140
|
scheduler.eventsPushRate = fromSecondsToMillis(scheduler.eventsPushRate);
|
|
137
141
|
scheduler.telemetryRefreshRate = fromSecondsToMillis(validateMinValue('telemetryRefreshRate', scheduler.telemetryRefreshRate, 60));
|
|
@@ -166,13 +170,13 @@ export function settingsValidation(config: unknown, validationParams: ISettingsV
|
|
|
166
170
|
// Keeping same behaviour than JS SDK: if settings key or TT are invalid,
|
|
167
171
|
// `false` value is used as bound key/TT of the default client, which leads to some issues.
|
|
168
172
|
// @ts-ignore, @TODO handle invalid keys as a non-recoverable error?
|
|
169
|
-
withDefaults.core.key = validateKey(log, maybeKey,
|
|
173
|
+
withDefaults.core.key = validateKey(log, maybeKey, LOG_PREFIX_CLIENT_INSTANTIATION);
|
|
170
174
|
}
|
|
171
175
|
|
|
172
176
|
if (validationParams.acceptTT) {
|
|
173
177
|
const maybeTT = withDefaults.core.trafficType;
|
|
174
178
|
if (maybeTT !== undefined) { // @ts-ignore
|
|
175
|
-
withDefaults.core.trafficType = validateTrafficType(log, maybeTT,
|
|
179
|
+
withDefaults.core.trafficType = validateTrafficType(log, maybeTT, LOG_PREFIX_CLIENT_INSTANTIATION);
|
|
176
180
|
}
|
|
177
181
|
}
|
|
178
182
|
} else {
|
|
@@ -209,11 +213,12 @@ export function settingsValidation(config: unknown, validationParams: ISettingsV
|
|
|
209
213
|
const splitFiltersValidation = validateSplitFilters(log, sync.splitFilters, withDefaults.mode);
|
|
210
214
|
sync.splitFilters = splitFiltersValidation.validFilters;
|
|
211
215
|
sync.__splitFiltersValidation = splitFiltersValidation;
|
|
212
|
-
sync.flagSpecVersion = flagSpec ? flagSpec(withDefaults) : FLAG_SPEC_VERSION;
|
|
213
216
|
|
|
217
|
+
// ensure a valid flag spec version
|
|
218
|
+
sync.flagSpecVersion = flagSpec ? flagSpec(withDefaults) : FLAG_SPEC_VERSION;
|
|
214
219
|
// ensure a valid user consent value
|
|
215
220
|
// @ts-ignore, modify readonly prop
|
|
216
|
-
withDefaults.userConsent = consent(withDefaults);
|
|
221
|
+
withDefaults.userConsent = consent ? consent(withDefaults) : undefined;
|
|
217
222
|
|
|
218
223
|
return withDefaults;
|
|
219
224
|
}
|