@splitsoftware/splitio-commons 1.5.1-rc.0 → 1.5.1-rc.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGES.txt +4 -2
- package/cjs/integrations/ga/GaToSplit.js +1 -1
- package/cjs/services/splitApi.js +4 -4
- package/cjs/sync/polling/fetchers/segmentChangesFetcher.js +5 -5
- package/cjs/sync/polling/fetchers/splitChangesFetcher.js +2 -2
- package/cjs/sync/polling/updaters/segmentChangesUpdater.js +34 -34
- package/cjs/sync/polling/updaters/splitChangesUpdater.js +4 -3
- package/cjs/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.js +46 -46
- package/cjs/sync/streaming/UpdateWorkers/SegmentsUpdateWorker.js +82 -64
- package/cjs/sync/streaming/UpdateWorkers/SplitsUpdateWorker.js +74 -58
- package/cjs/sync/streaming/UpdateWorkers/constants.js +6 -0
- package/cjs/sync/streaming/pushManager.js +6 -7
- package/cjs/sync/syncTask.js +13 -16
- package/cjs/utils/Backoff.js +3 -2
- package/esm/integrations/ga/GaToSplit.js +1 -1
- package/esm/services/splitApi.js +4 -4
- package/esm/sync/polling/fetchers/segmentChangesFetcher.js +5 -5
- package/esm/sync/polling/fetchers/splitChangesFetcher.js +2 -2
- package/esm/sync/polling/updaters/segmentChangesUpdater.js +34 -34
- package/esm/sync/polling/updaters/splitChangesUpdater.js +4 -3
- package/esm/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.js +46 -47
- package/esm/sync/streaming/UpdateWorkers/SegmentsUpdateWorker.js +82 -65
- package/esm/sync/streaming/UpdateWorkers/SplitsUpdateWorker.js +74 -59
- package/esm/sync/streaming/UpdateWorkers/constants.js +3 -0
- package/esm/sync/streaming/pushManager.js +6 -7
- package/esm/sync/syncTask.js +13 -16
- package/esm/utils/Backoff.js +3 -2
- package/package.json +1 -5
- package/src/integrations/ga/GaToSplit.ts +1 -1
- package/src/integrations/ga/autoRequire.js +16 -16
- package/src/services/splitApi.ts +4 -4
- package/src/services/types.ts +2 -2
- package/src/sync/polling/fetchers/segmentChangesFetcher.ts +5 -4
- package/src/sync/polling/fetchers/splitChangesFetcher.ts +2 -1
- package/src/sync/polling/fetchers/types.ts +2 -0
- package/src/sync/polling/pollingManagerCS.ts +5 -5
- package/src/sync/polling/syncTasks/mySegmentsSyncTask.ts +2 -2
- package/src/sync/polling/types.ts +14 -6
- package/src/sync/polling/updaters/mySegmentsUpdater.ts +4 -4
- package/src/sync/polling/updaters/segmentChangesUpdater.ts +34 -32
- package/src/sync/polling/updaters/splitChangesUpdater.ts +5 -4
- package/src/sync/streaming/SSEHandler/types.ts +0 -7
- package/src/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.ts +45 -54
- package/src/sync/streaming/UpdateWorkers/SegmentsUpdateWorker.ts +78 -63
- package/src/sync/streaming/UpdateWorkers/SplitsUpdateWorker.ts +73 -61
- package/src/sync/streaming/UpdateWorkers/constants.ts +3 -0
- package/src/sync/streaming/UpdateWorkers/types.ts +2 -4
- package/src/sync/streaming/pushManager.ts +12 -12
- package/src/sync/streaming/types.ts +2 -2
- package/src/sync/syncTask.ts +16 -18
- package/src/utils/Backoff.ts +7 -2
- package/types/services/types.d.ts +2 -2
- package/types/sync/polling/fetchers/types.d.ts +2 -2
- package/types/sync/polling/syncTasks/mySegmentsSyncTask.d.ts +2 -2
- package/types/sync/polling/types.d.ts +11 -6
- package/types/sync/polling/updaters/segmentChangesUpdater.d.ts +1 -1
- package/types/sync/polling/updaters/splitChangesUpdater.d.ts +1 -1
- package/types/sync/streaming/SSEHandler/types.d.ts +0 -4
- package/types/sync/streaming/UpdateWorkers/MySegmentsUpdateWorker.d.ts +3 -24
- package/types/sync/streaming/UpdateWorkers/SegmentsUpdateWorker.d.ts +3 -23
- package/types/sync/streaming/UpdateWorkers/SplitsUpdateWorker.d.ts +6 -33
- package/types/sync/streaming/UpdateWorkers/types.d.ts +1 -2
- package/types/sync/streaming/types.d.ts +2 -2
- package/types/sync/syncTask.d.ts +2 -3
- package/types/utils/Backoff.d.ts +2 -0
- package/cjs/sync/offline/LocalhostFromFile.js +0 -13
- package/cjs/sync/offline/splitsParser/splitsParserFromFile.js +0 -151
- package/esm/sync/offline/LocalhostFromFile.js +0 -9
- package/esm/sync/offline/splitsParser/splitsParserFromFile.js +0 -146
- package/src/sync/offline/LocalhostFromFile.ts +0 -12
- package/src/sync/offline/splitsParser/splitsParserFromFile.ts +0 -182
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { IPushEventEmitter, IPushManager } from './types';
|
|
2
2
|
import { ISSEClient } from './SSEClient/types';
|
|
3
|
-
import {
|
|
3
|
+
import { IMySegmentsSyncTask, IPollingManager, ISegmentsSyncTask } from '../polling/types';
|
|
4
4
|
import { objectAssign } from '../../utils/lang/objectAssign';
|
|
5
5
|
import { Backoff } from '../../utils/Backoff';
|
|
6
6
|
import { SSEHandlerFactory } from './SSEHandler';
|
|
@@ -20,6 +20,7 @@ import { Hash64, hash64 } from '../../utils/murmur3/murmur3_64';
|
|
|
20
20
|
import { IAuthTokenPushEnabled } from './AuthClient/types';
|
|
21
21
|
import { TOKEN_REFRESH, AUTH_REJECTION } from '../../utils/constants';
|
|
22
22
|
import { ISdkFactoryContextSync } from '../../sdkFactory/types';
|
|
23
|
+
import { IUpdateWorker } from './UpdateWorkers/types';
|
|
23
24
|
|
|
24
25
|
/**
|
|
25
26
|
* PushManager factory:
|
|
@@ -55,15 +56,15 @@ export function pushManagerFactory(
|
|
|
55
56
|
|
|
56
57
|
// init workers
|
|
57
58
|
// MySegmentsUpdateWorker (client-side) are initiated in `add` method
|
|
58
|
-
const segmentsUpdateWorker = userKey ? undefined :
|
|
59
|
+
const segmentsUpdateWorker = userKey ? undefined : SegmentsUpdateWorker(log, pollingManager.segmentsSyncTask as ISegmentsSyncTask, storage.segments);
|
|
59
60
|
// For server-side we pass the segmentsSyncTask, used by SplitsUpdateWorker to fetch new segments
|
|
60
|
-
const splitsUpdateWorker =
|
|
61
|
+
const splitsUpdateWorker = SplitsUpdateWorker(log, storage.splits, pollingManager.splitsSyncTask, readiness.splits, userKey ? undefined : pollingManager.segmentsSyncTask as ISegmentsSyncTask);
|
|
61
62
|
|
|
62
63
|
// [Only for client-side] map of hashes to user keys, to dispatch MY_SEGMENTS_UPDATE events to the corresponding MySegmentsUpdateWorker
|
|
63
64
|
const userKeyHashes: Record<string, string> = {};
|
|
64
65
|
// [Only for client-side] map of user keys to their corresponding hash64 and MySegmentsUpdateWorkers.
|
|
65
66
|
// Hash64 is used to process MY_SEGMENTS_UPDATE_V2 events and dispatch actions to the corresponding MySegmentsUpdateWorker.
|
|
66
|
-
const clients: Record<string, { hash64: Hash64, worker:
|
|
67
|
+
const clients: Record<string, { hash64: Hash64, worker: IUpdateWorker }> = {};
|
|
67
68
|
|
|
68
69
|
// [Only for client-side] variable to flag that a new client was added. It is needed to reconnect streaming.
|
|
69
70
|
let connectForNewClient = false;
|
|
@@ -169,9 +170,9 @@ export function pushManagerFactory(
|
|
|
169
170
|
|
|
170
171
|
// cancel scheduled fetch retries of Splits, Segments, and MySegments Update Workers
|
|
171
172
|
function stopWorkers() {
|
|
172
|
-
splitsUpdateWorker.
|
|
173
|
-
if (userKey) forOwn(clients, ({ worker }) => worker.
|
|
174
|
-
else
|
|
173
|
+
splitsUpdateWorker.stop();
|
|
174
|
+
if (userKey) forOwn(clients, ({ worker }) => worker.stop());
|
|
175
|
+
else segmentsUpdateWorker!.stop();
|
|
175
176
|
}
|
|
176
177
|
|
|
177
178
|
pushEmitter.on(PUSH_SUBSYSTEM_DOWN, stopWorkers);
|
|
@@ -180,7 +181,6 @@ export function pushManagerFactory(
|
|
|
180
181
|
// Otherwise it is unnecessary (e.g, STREAMING_RESUMED).
|
|
181
182
|
pushEmitter.on(PUSH_SUBSYSTEM_UP, () => {
|
|
182
183
|
connectPushRetryBackoff.reset();
|
|
183
|
-
stopWorkers();
|
|
184
184
|
});
|
|
185
185
|
|
|
186
186
|
/** Fallback to polling without retry due to: STREAMING_DISABLED control event, or 'pushEnabled: false', or non-recoverable SSE and Authentication errors */
|
|
@@ -294,7 +294,7 @@ export function pushManagerFactory(
|
|
|
294
294
|
});
|
|
295
295
|
});
|
|
296
296
|
} else {
|
|
297
|
-
pushEmitter.on(SEGMENT_UPDATE,
|
|
297
|
+
pushEmitter.on(SEGMENT_UPDATE, segmentsUpdateWorker!.put);
|
|
298
298
|
}
|
|
299
299
|
|
|
300
300
|
return objectAssign(
|
|
@@ -315,7 +315,7 @@ export function pushManagerFactory(
|
|
|
315
315
|
if (disabled || disconnected === false) return;
|
|
316
316
|
disconnected = false;
|
|
317
317
|
|
|
318
|
-
if (userKey) this.add(userKey, pollingManager.segmentsSyncTask); // client-side
|
|
318
|
+
if (userKey) this.add(userKey, pollingManager.segmentsSyncTask as IMySegmentsSyncTask); // client-side
|
|
319
319
|
else setTimeout(connectPush); // server-side runs in next cycle as in client-side, for consistency with client-side
|
|
320
320
|
},
|
|
321
321
|
|
|
@@ -325,12 +325,12 @@ export function pushManagerFactory(
|
|
|
325
325
|
},
|
|
326
326
|
|
|
327
327
|
// [Only for client-side]
|
|
328
|
-
add(userKey: string, mySegmentsSyncTask:
|
|
328
|
+
add(userKey: string, mySegmentsSyncTask: IMySegmentsSyncTask) {
|
|
329
329
|
const hash = hashUserKey(userKey);
|
|
330
330
|
|
|
331
331
|
if (!userKeyHashes[hash]) {
|
|
332
332
|
userKeyHashes[hash] = userKey;
|
|
333
|
-
clients[userKey] = { hash64: hash64(userKey), worker:
|
|
333
|
+
clients[userKey] = { hash64: hash64(userKey), worker: MySegmentsUpdateWorker(mySegmentsSyncTask) };
|
|
334
334
|
connectForNewClient = true; // we must reconnect on start, to listen the channel for the new user key
|
|
335
335
|
|
|
336
336
|
// Reconnects in case of a new client.
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { IMySegmentsUpdateData, IMySegmentsUpdateV2Data, ISegmentUpdateData, ISplitUpdateData, ISplitKillData } from './SSEHandler/types';
|
|
2
2
|
import { ITask } from '../types';
|
|
3
|
-
import {
|
|
3
|
+
import { IMySegmentsSyncTask } from '../polling/types';
|
|
4
4
|
import { IEventEmitter } from '../../types';
|
|
5
5
|
import { ControlType } from './constants';
|
|
6
6
|
|
|
@@ -45,6 +45,6 @@ export interface IPushEventEmitter extends IEventEmitter {
|
|
|
45
45
|
*/
|
|
46
46
|
export interface IPushManager extends ITask, IPushEventEmitter {
|
|
47
47
|
// Methods used in client-side, to support multiple clients
|
|
48
|
-
add(userKey: string, mySegmentsSyncTask:
|
|
48
|
+
add(userKey: string, mySegmentsSyncTask: IMySegmentsSyncTask): void,
|
|
49
49
|
remove(userKey: string): void
|
|
50
50
|
}
|
package/src/sync/syncTask.ts
CHANGED
|
@@ -3,9 +3,8 @@ import { ILogger } from '../logger/types';
|
|
|
3
3
|
import { ISyncTask } from './types';
|
|
4
4
|
|
|
5
5
|
/**
|
|
6
|
-
* Creates
|
|
7
|
-
* The task can be also executed by calling the "execute" method. Multiple
|
|
8
|
-
* For example, submitters executed on SDK destroy or full queue, while periodic execution is pending.
|
|
6
|
+
* Creates an object that handles the periodic execution of a given task via "start" and "stop" methods.
|
|
7
|
+
* The task can be also executed by calling the "execute" method. Multiple calls run sequentially to avoid race conditions (e.g., submitters executed on SDK destroy or full queue, while periodic execution is pending).
|
|
9
8
|
*
|
|
10
9
|
* @param log Logger instance.
|
|
11
10
|
* @param task Task to execute that returns a promise that NEVER REJECTS. Otherwise, periodic execution can result in Unhandled Promise Rejections.
|
|
@@ -15,8 +14,10 @@ import { ISyncTask } from './types';
|
|
|
15
14
|
*/
|
|
16
15
|
export function syncTaskFactory<Input extends any[], Output = any>(log: ILogger, task: (...args: Input) => Promise<Output>, period: number, taskName = 'task'): ISyncTask<Input, Output> {
|
|
17
16
|
|
|
18
|
-
//
|
|
19
|
-
let
|
|
17
|
+
// Flag that indicates if the task is executing
|
|
18
|
+
let executing = 0;
|
|
19
|
+
// Promise chain to resolve tasks sequentially
|
|
20
|
+
let promiseChain: Promise<Output> | undefined;
|
|
20
21
|
// flag that indicates if the task periodic execution has been started/stopped.
|
|
21
22
|
let running = false;
|
|
22
23
|
// Auxiliar counter used to avoid race condition when calling `start` & `stop` intermittently
|
|
@@ -27,20 +28,17 @@ export function syncTaskFactory<Input extends any[], Output = any>(log: ILogger,
|
|
|
27
28
|
let timeoutID: any;
|
|
28
29
|
|
|
29
30
|
function execute(...args: Input): Promise<Output> {
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
31
|
+
executing++;
|
|
32
|
+
log.debug(SYNC_TASK_EXECUTE, [taskName]);
|
|
33
|
+
|
|
34
|
+
// Update `promiseChain` with last promise, to run tasks serially
|
|
35
|
+
promiseChain = (promiseChain ? promiseChain.then(() => task(...args)) : task(...args))
|
|
36
|
+
.then(result => {
|
|
37
|
+
executing--;
|
|
38
|
+
return result;
|
|
34
39
|
});
|
|
35
|
-
}
|
|
36
40
|
|
|
37
|
-
|
|
38
|
-
log.debug(SYNC_TASK_EXECUTE, [taskName]);
|
|
39
|
-
pendingTask = task(...args).then(result => {
|
|
40
|
-
pendingTask = undefined;
|
|
41
|
-
return result;
|
|
42
|
-
});
|
|
43
|
-
return pendingTask;
|
|
41
|
+
return promiseChain;
|
|
44
42
|
}
|
|
45
43
|
|
|
46
44
|
function periodicExecute(currentRunningId: number) {
|
|
@@ -56,7 +54,7 @@ export function syncTaskFactory<Input extends any[], Output = any>(log: ILogger,
|
|
|
56
54
|
execute,
|
|
57
55
|
|
|
58
56
|
isExecuting() {
|
|
59
|
-
return
|
|
57
|
+
return executing > 0;
|
|
60
58
|
},
|
|
61
59
|
|
|
62
60
|
start(...args: Input) {
|
package/src/utils/Backoff.ts
CHANGED
|
@@ -1,5 +1,9 @@
|
|
|
1
1
|
export class Backoff {
|
|
2
2
|
|
|
3
|
+
// For testing purposes, assign to overwrite the provided value by param
|
|
4
|
+
static __TEST__BASE_MILLIS?: number;
|
|
5
|
+
static __TEST__MAX_MILLIS?: number;
|
|
6
|
+
|
|
3
7
|
static DEFAULT_BASE_MILLIS = 1000; // 1 second
|
|
4
8
|
static DEFAULT_MAX_MILLIS = 1800000; // 30 minutes
|
|
5
9
|
|
|
@@ -17,8 +21,8 @@ export class Backoff {
|
|
|
17
21
|
* @param {number} maxMillis
|
|
18
22
|
*/
|
|
19
23
|
constructor(cb: (...args: any[]) => any, baseMillis?: number, maxMillis?: number) {
|
|
20
|
-
this.baseMillis = baseMillis || Backoff.DEFAULT_BASE_MILLIS;
|
|
21
|
-
this.maxMillis = maxMillis || Backoff.DEFAULT_MAX_MILLIS;
|
|
24
|
+
this.baseMillis = Backoff.__TEST__BASE_MILLIS || baseMillis || Backoff.DEFAULT_BASE_MILLIS;
|
|
25
|
+
this.maxMillis = Backoff.__TEST__MAX_MILLIS || maxMillis || Backoff.DEFAULT_MAX_MILLIS;
|
|
22
26
|
this.attempts = 0;
|
|
23
27
|
this.cb = cb;
|
|
24
28
|
}
|
|
@@ -32,6 +36,7 @@ export class Backoff {
|
|
|
32
36
|
|
|
33
37
|
if (this.timeoutID) clearTimeout(this.timeoutID);
|
|
34
38
|
this.timeoutID = setTimeout(() => {
|
|
39
|
+
this.timeoutID = undefined;
|
|
35
40
|
this.cb();
|
|
36
41
|
}, delayInMillis);
|
|
37
42
|
this.attempts++;
|
|
@@ -16,8 +16,8 @@ export declare type IFetch = (url: string, options?: IRequestOptions) => Promise
|
|
|
16
16
|
export declare type IHealthCheckAPI = () => Promise<boolean>;
|
|
17
17
|
export declare type ISplitHttpClient = (url: string, options?: IRequestOptions, latencyTracker?: (error?: NetworkError) => void, logErrorsAsInfo?: boolean) => Promise<IResponse>;
|
|
18
18
|
export declare type IFetchAuth = (userKeys?: string[]) => Promise<IResponse>;
|
|
19
|
-
export declare type IFetchSplitChanges = (since: number, noCache?: boolean) => Promise<IResponse>;
|
|
20
|
-
export declare type IFetchSegmentChanges = (since: number, segmentName: string, noCache?: boolean) => Promise<IResponse>;
|
|
19
|
+
export declare type IFetchSplitChanges = (since: number, noCache?: boolean, till?: number) => Promise<IResponse>;
|
|
20
|
+
export declare type IFetchSegmentChanges = (since: number, segmentName: string, noCache?: boolean, till?: number) => Promise<IResponse>;
|
|
21
21
|
export declare type IFetchMySegments = (userMatchingKey: string, noCache?: boolean) => Promise<IResponse>;
|
|
22
22
|
export declare type IPostEventsBulk = (body: string, headers?: Record<string, string>) => Promise<IResponse>;
|
|
23
23
|
export declare type IPostTestImpressionsBulk = (body: string, headers?: Record<string, string>) => Promise<IResponse>;
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { ISplitChangesResponse, ISegmentChangesResponse } from '../../../dtos/types';
|
|
2
2
|
import { IResponse } from '../../../services/types';
|
|
3
|
-
export declare type ISplitChangesFetcher = (since: number, noCache?: boolean, decorator?: (promise: Promise<IResponse>) => Promise<IResponse>) => Promise<ISplitChangesResponse>;
|
|
4
|
-
export declare type ISegmentChangesFetcher = (since: number, segmentName: string, noCache?: boolean, decorator?: (promise: Promise<ISegmentChangesResponse[]>) => Promise<ISegmentChangesResponse[]>) => Promise<ISegmentChangesResponse[]>;
|
|
3
|
+
export declare type ISplitChangesFetcher = (since: number, noCache?: boolean, till?: number, decorator?: (promise: Promise<IResponse>) => Promise<IResponse>) => Promise<ISplitChangesResponse>;
|
|
4
|
+
export declare type ISegmentChangesFetcher = (since: number, segmentName: string, noCache?: boolean, till?: number, decorator?: (promise: Promise<ISegmentChangesResponse[]>) => Promise<ISegmentChangesResponse[]>) => Promise<ISegmentChangesResponse[]>;
|
|
5
5
|
export declare type IMySegmentsFetcher = (userMatchingKey: string, noCache?: boolean, decorator?: (promise: Promise<IResponse>) => Promise<IResponse>) => Promise<string[]>;
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import { IStorageSync } from '../../../storages/types';
|
|
2
2
|
import { IReadinessManager } from '../../../readiness/types';
|
|
3
|
-
import {
|
|
3
|
+
import { IMySegmentsSyncTask } from '../types';
|
|
4
4
|
import { IFetchMySegments } from '../../../services/types';
|
|
5
5
|
import { ISettings } from '../../../types';
|
|
6
6
|
/**
|
|
7
7
|
* Creates a sync task that periodically executes a `mySegmentsUpdater` task
|
|
8
8
|
*/
|
|
9
|
-
export declare function mySegmentsSyncTaskFactory(fetchMySegments: IFetchMySegments, storage: IStorageSync, readiness: IReadinessManager, settings: ISettings, matchingKey: string):
|
|
9
|
+
export declare function mySegmentsSyncTaskFactory(fetchMySegments: IFetchMySegments, storage: IStorageSync, readiness: IReadinessManager, settings: ISettings, matchingKey: string): IMySegmentsSyncTask;
|
|
@@ -1,21 +1,26 @@
|
|
|
1
1
|
import { IReadinessManager } from '../../readiness/types';
|
|
2
2
|
import { IStorageSync } from '../../storages/types';
|
|
3
|
-
import { SegmentsData } from '../streaming/SSEHandler/types';
|
|
4
3
|
import { ITask, ISyncTask } from '../types';
|
|
5
|
-
export interface ISplitsSyncTask extends ISyncTask<[noCache?: boolean], boolean> {
|
|
4
|
+
export interface ISplitsSyncTask extends ISyncTask<[noCache?: boolean, till?: number], boolean> {
|
|
6
5
|
}
|
|
7
|
-
export interface ISegmentsSyncTask extends ISyncTask<[
|
|
6
|
+
export interface ISegmentsSyncTask extends ISyncTask<[fetchOnlyNew?: boolean, segmentName?: string, noCache?: boolean, till?: number], boolean> {
|
|
7
|
+
}
|
|
8
|
+
export declare type MySegmentsData = string[] | {
|
|
9
|
+
name: string;
|
|
10
|
+
add: boolean;
|
|
11
|
+
};
|
|
12
|
+
export interface IMySegmentsSyncTask extends ISyncTask<[segmentsData?: MySegmentsData, noCache?: boolean], boolean> {
|
|
8
13
|
}
|
|
9
14
|
export interface IPollingManager extends ITask {
|
|
10
15
|
syncAll(): Promise<any>;
|
|
11
16
|
splitsSyncTask: ISplitsSyncTask;
|
|
12
|
-
segmentsSyncTask:
|
|
17
|
+
segmentsSyncTask: ISyncTask;
|
|
13
18
|
}
|
|
14
19
|
/**
|
|
15
20
|
* PollingManager for client-side with support for multiple clients
|
|
16
21
|
*/
|
|
17
22
|
export interface IPollingManagerCS extends IPollingManager {
|
|
18
|
-
add(matchingKey: string, readiness: IReadinessManager, storage: IStorageSync):
|
|
23
|
+
add(matchingKey: string, readiness: IReadinessManager, storage: IStorageSync): IMySegmentsSyncTask;
|
|
19
24
|
remove(matchingKey: string): void;
|
|
20
|
-
get(matchingKey: string):
|
|
25
|
+
get(matchingKey: string): IMySegmentsSyncTask | undefined;
|
|
21
26
|
}
|
|
@@ -2,7 +2,7 @@ import { ISegmentChangesFetcher } from '../fetchers/types';
|
|
|
2
2
|
import { ISegmentsCacheBase } from '../../../storages/types';
|
|
3
3
|
import { IReadinessManager } from '../../../readiness/types';
|
|
4
4
|
import { ILogger } from '../../../logger/types';
|
|
5
|
-
declare type ISegmentChangesUpdater = (
|
|
5
|
+
declare type ISegmentChangesUpdater = (fetchOnlyNew?: boolean, segmentName?: string, noCache?: boolean, till?: number) => Promise<boolean>;
|
|
6
6
|
/**
|
|
7
7
|
* Factory of SegmentChanges updater, a task that:
|
|
8
8
|
* - fetches segment changes using `segmentChangesFetcher`
|
|
@@ -4,7 +4,7 @@ import { ISplitChangesFetcher } from '../fetchers/types';
|
|
|
4
4
|
import { ISplit } from '../../../dtos/types';
|
|
5
5
|
import { ISplitsEventEmitter } from '../../../readiness/types';
|
|
6
6
|
import { ILogger } from '../../../logger/types';
|
|
7
|
-
declare type ISplitChangesUpdater = (noCache?: boolean) => Promise<boolean>;
|
|
7
|
+
declare type ISplitChangesUpdater = (noCache?: boolean, till?: number) => Promise<boolean>;
|
|
8
8
|
/**
|
|
9
9
|
* Collect segments from a raw split definition.
|
|
10
10
|
* Exported for testing purposes.
|
|
@@ -1,27 +1,6 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { Backoff } from '../../../utils/Backoff';
|
|
1
|
+
import { IMySegmentsSyncTask } from '../../polling/types';
|
|
3
2
|
import { IUpdateWorker } from './types';
|
|
4
|
-
import { SegmentsData } from '../SSEHandler/types';
|
|
5
3
|
/**
|
|
6
|
-
* MySegmentsUpdateWorker
|
|
4
|
+
* MySegmentsUpdateWorker factory
|
|
7
5
|
*/
|
|
8
|
-
export declare
|
|
9
|
-
private readonly mySegmentsSyncTask;
|
|
10
|
-
private maxChangeNumber;
|
|
11
|
-
private handleNewEvent;
|
|
12
|
-
private segmentsData?;
|
|
13
|
-
private currentChangeNumber;
|
|
14
|
-
readonly backoff: Backoff;
|
|
15
|
-
/**
|
|
16
|
-
* @param {Object} mySegmentsSyncTask task for syncing mySegments data
|
|
17
|
-
*/
|
|
18
|
-
constructor(mySegmentsSyncTask: ISegmentsSyncTask);
|
|
19
|
-
__handleMySegmentsUpdateCall(): void;
|
|
20
|
-
/**
|
|
21
|
-
* Invoked by NotificationProcessor on MY_SEGMENTS_UPDATE event
|
|
22
|
-
*
|
|
23
|
-
* @param {number} changeNumber change number of the MY_SEGMENTS_UPDATE notification
|
|
24
|
-
* @param {SegmentsData | undefined} segmentsData might be undefined
|
|
25
|
-
*/
|
|
26
|
-
put(changeNumber: number, segmentsData?: SegmentsData): void;
|
|
27
|
-
}
|
|
6
|
+
export declare function MySegmentsUpdateWorker(mySegmentsSyncTask: IMySegmentsSyncTask): IUpdateWorker;
|
|
@@ -1,28 +1,8 @@
|
|
|
1
|
+
import { ILogger } from '../../../logger/types';
|
|
1
2
|
import { ISegmentsCacheSync } from '../../../storages/types';
|
|
2
|
-
import { Backoff } from '../../../utils/Backoff';
|
|
3
3
|
import { ISegmentsSyncTask } from '../../polling/types';
|
|
4
|
-
import { ISegmentUpdateData } from '../SSEHandler/types';
|
|
5
4
|
import { IUpdateWorker } from './types';
|
|
6
5
|
/**
|
|
7
|
-
*
|
|
6
|
+
* SegmentsUpdateWorker factory
|
|
8
7
|
*/
|
|
9
|
-
export declare
|
|
10
|
-
private readonly segmentsCache;
|
|
11
|
-
private readonly segmentsSyncTask;
|
|
12
|
-
private readonly maxChangeNumbers;
|
|
13
|
-
private handleNewEvent;
|
|
14
|
-
readonly backoff: Backoff;
|
|
15
|
-
/**
|
|
16
|
-
* @param {Object} segmentsCache segments data cache
|
|
17
|
-
* @param {Object} segmentsSyncTask task for syncing segments data
|
|
18
|
-
*/
|
|
19
|
-
constructor(segmentsSyncTask: ISegmentsSyncTask, segmentsCache: ISegmentsCacheSync);
|
|
20
|
-
__handleSegmentUpdateCall(): void;
|
|
21
|
-
/**
|
|
22
|
-
* Invoked by NotificationProcessor on SEGMENT_UPDATE event
|
|
23
|
-
*
|
|
24
|
-
* @param {number} changeNumber change number of the SEGMENT_UPDATE notification
|
|
25
|
-
* @param {string} segmentName segment name of the SEGMENT_UPDATE notification
|
|
26
|
-
*/
|
|
27
|
-
put({ changeNumber, segmentName }: ISegmentUpdateData): void;
|
|
28
|
-
}
|
|
8
|
+
export declare function SegmentsUpdateWorker(log: ILogger, segmentsSyncTask: ISegmentsSyncTask, segmentsCache: ISegmentsCacheSync): IUpdateWorker;
|
|
@@ -1,39 +1,12 @@
|
|
|
1
|
+
import { ILogger } from '../../../logger/types';
|
|
1
2
|
import { ISplitsEventEmitter } from '../../../readiness/types';
|
|
2
3
|
import { ISplitsCacheSync } from '../../../storages/types';
|
|
3
|
-
import { Backoff } from '../../../utils/Backoff';
|
|
4
4
|
import { ISegmentsSyncTask, ISplitsSyncTask } from '../../polling/types';
|
|
5
|
-
import { ISplitKillData
|
|
5
|
+
import { ISplitKillData } from '../SSEHandler/types';
|
|
6
6
|
import { IUpdateWorker } from './types';
|
|
7
7
|
/**
|
|
8
|
-
* SplitsUpdateWorker
|
|
8
|
+
* SplitsUpdateWorker factory
|
|
9
9
|
*/
|
|
10
|
-
export declare
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
private readonly splitsEventEmitter;
|
|
14
|
-
private readonly segmentsSyncTask?;
|
|
15
|
-
private maxChangeNumber;
|
|
16
|
-
private handleNewEvent;
|
|
17
|
-
readonly backoff: Backoff;
|
|
18
|
-
/**
|
|
19
|
-
* @param {Object} splitsCache splits data cache
|
|
20
|
-
* @param {Object} splitsSyncTask task for syncing splits data
|
|
21
|
-
* @param {Object} splitsEventEmitter emitter for splits data events
|
|
22
|
-
*/
|
|
23
|
-
constructor(splitsCache: ISplitsCacheSync, splitsSyncTask: ISplitsSyncTask, splitsEventEmitter: ISplitsEventEmitter, segmentsSyncTask?: ISegmentsSyncTask);
|
|
24
|
-
__handleSplitUpdateCall(): void;
|
|
25
|
-
/**
|
|
26
|
-
* Invoked by NotificationProcessor on SPLIT_UPDATE event
|
|
27
|
-
*
|
|
28
|
-
* @param {number} changeNumber change number of the SPLIT_UPDATE notification
|
|
29
|
-
*/
|
|
30
|
-
put({ changeNumber }: Pick<ISplitUpdateData, 'changeNumber'>): void;
|
|
31
|
-
/**
|
|
32
|
-
* Invoked by NotificationProcessor on SPLIT_KILL event
|
|
33
|
-
*
|
|
34
|
-
* @param {number} changeNumber change number of the SPLIT_UPDATE notification
|
|
35
|
-
* @param {string} splitName name of split to kill
|
|
36
|
-
* @param {string} defaultTreatment default treatment value
|
|
37
|
-
*/
|
|
38
|
-
killSplit({ changeNumber, splitName, defaultTreatment }: ISplitKillData): void;
|
|
39
|
-
}
|
|
10
|
+
export declare function SplitsUpdateWorker(log: ILogger, splitsCache: ISplitsCacheSync, splitsSyncTask: ISplitsSyncTask, splitsEventEmitter: ISplitsEventEmitter, segmentsSyncTask?: ISegmentsSyncTask): IUpdateWorker & {
|
|
11
|
+
killSplit(event: ISplitKillData): void;
|
|
12
|
+
};
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { IMySegmentsUpdateData, IMySegmentsUpdateV2Data, ISegmentUpdateData, ISplitUpdateData, ISplitKillData } from './SSEHandler/types';
|
|
2
2
|
import { ITask } from '../types';
|
|
3
|
-
import {
|
|
3
|
+
import { IMySegmentsSyncTask } from '../polling/types';
|
|
4
4
|
import { IEventEmitter } from '../../types';
|
|
5
5
|
import { ControlType } from './constants';
|
|
6
6
|
export declare type PUSH_SUBSYSTEM_UP = 'PUSH_SUBSYSTEM_UP';
|
|
@@ -29,7 +29,7 @@ export interface IPushEventEmitter extends IEventEmitter {
|
|
|
29
29
|
* PushManager
|
|
30
30
|
*/
|
|
31
31
|
export interface IPushManager extends ITask, IPushEventEmitter {
|
|
32
|
-
add(userKey: string, mySegmentsSyncTask:
|
|
32
|
+
add(userKey: string, mySegmentsSyncTask: IMySegmentsSyncTask): void;
|
|
33
33
|
remove(userKey: string): void;
|
|
34
34
|
}
|
|
35
35
|
export {};
|
package/types/sync/syncTask.d.ts
CHANGED
|
@@ -1,9 +1,8 @@
|
|
|
1
1
|
import { ILogger } from '../logger/types';
|
|
2
2
|
import { ISyncTask } from './types';
|
|
3
3
|
/**
|
|
4
|
-
* Creates
|
|
5
|
-
* The task can be also executed by calling the "execute" method. Multiple
|
|
6
|
-
* For example, submitters executed on SDK destroy or full queue, while periodic execution is pending.
|
|
4
|
+
* Creates an object that handles the periodic execution of a given task via "start" and "stop" methods.
|
|
5
|
+
* The task can be also executed by calling the "execute" method. Multiple calls run sequentially to avoid race conditions (e.g., submitters executed on SDK destroy or full queue, while periodic execution is pending).
|
|
7
6
|
*
|
|
8
7
|
* @param log Logger instance.
|
|
9
8
|
* @param task Task to execute that returns a promise that NEVER REJECTS. Otherwise, periodic execution can result in Unhandled Promise Rejections.
|
package/types/utils/Backoff.d.ts
CHANGED
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.LocalhostFromFile = void 0;
|
|
4
|
-
var splitsParserFromFile_1 = require("./splitsParser/splitsParserFromFile");
|
|
5
|
-
var syncManagerOffline_1 = require("./syncManagerOffline");
|
|
6
|
-
// Singleton instance of the factory function for offline SyncManager from YAML file (a.k.a. localhostFromFile)
|
|
7
|
-
// Requires Node 'fs' and 'path' APIs.
|
|
8
|
-
var localhostFromFile = (0, syncManagerOffline_1.syncManagerOfflineFactory)(splitsParserFromFile_1.splitsParserFromFileFactory);
|
|
9
|
-
localhostFromFile.type = 'LocalhostFromFile';
|
|
10
|
-
function LocalhostFromFile() {
|
|
11
|
-
return localhostFromFile;
|
|
12
|
-
}
|
|
13
|
-
exports.LocalhostFromFile = LocalhostFromFile;
|
|
@@ -1,151 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.splitsParserFromFileFactory = void 0;
|
|
4
|
-
var tslib_1 = require("tslib");
|
|
5
|
-
/* eslint-disable no-undef */
|
|
6
|
-
// @TODO consider moving it to Node-SDK and remove js-yaml dependency from Js-commons
|
|
7
|
-
var fs_1 = (0, tslib_1.__importDefault)(require("fs"));
|
|
8
|
-
var path_1 = (0, tslib_1.__importDefault)(require("path"));
|
|
9
|
-
// @ts-ignore
|
|
10
|
-
var js_yaml_1 = (0, tslib_1.__importDefault)(require("js-yaml"));
|
|
11
|
-
var lang_1 = require("../../../utils/lang");
|
|
12
|
-
var parseCondition_1 = require("./parseCondition");
|
|
13
|
-
var logPrefix = 'sync:offline:splits-fetcher: ';
|
|
14
|
-
var DEFAULT_FILENAME = '.split';
|
|
15
|
-
function configFilesPath(configFilePath) {
|
|
16
|
-
if (configFilePath === DEFAULT_FILENAME || !(0, lang_1.isString)(configFilePath)) {
|
|
17
|
-
var root = process.env.HOME;
|
|
18
|
-
if (process.env.SPLIT_CONFIG_ROOT)
|
|
19
|
-
root = process.env.SPLIT_CONFIG_ROOT;
|
|
20
|
-
if (!root)
|
|
21
|
-
throw new Error('Missing split mock configuration root.');
|
|
22
|
-
configFilePath = path_1.default.join(root, DEFAULT_FILENAME);
|
|
23
|
-
}
|
|
24
|
-
// Validate the extensions
|
|
25
|
-
if (!((0, lang_1.endsWith)(configFilePath, '.yaml', true) || (0, lang_1.endsWith)(configFilePath, '.yml', true) || (0, lang_1.endsWith)(configFilePath, '.split', true)))
|
|
26
|
-
throw new Error("Invalid extension specified for Splits mock file. Accepted extensions are \".yml\" and \".yaml\". Your specified file is " + configFilePath);
|
|
27
|
-
if (!fs_1.default.existsSync(configFilePath))
|
|
28
|
-
throw new Error("Split configuration not found in " + configFilePath + " - Please review your Split file location.");
|
|
29
|
-
return configFilePath;
|
|
30
|
-
}
|
|
31
|
-
// This function is not pure nor meant to be. Here we apply modifications to cover
|
|
32
|
-
// for behaviour that's ensured by the BE.
|
|
33
|
-
function arrangeConditions(mocksData) {
|
|
34
|
-
// Iterate through each Split data
|
|
35
|
-
(0, lang_1.forOwn)(mocksData, function (data) {
|
|
36
|
-
var conditions = data.conditions;
|
|
37
|
-
// On the manager, as the split jsons come with all treatments on the partitions prop,
|
|
38
|
-
// we'll add all the treatments to the first condition.
|
|
39
|
-
var firstRolloutCondition = (0, lang_1.find)(conditions, function (cond) { return cond.conditionType === 'ROLLOUT'; });
|
|
40
|
-
// Malformed mocks may have
|
|
41
|
-
var treatments = (0, lang_1.uniq)(data.treatments);
|
|
42
|
-
// If they're only specifying a whitelist we add the treatments there.
|
|
43
|
-
var allTreatmentsCondition = firstRolloutCondition ? firstRolloutCondition : conditions[0];
|
|
44
|
-
var fullyAllocatedTreatment = allTreatmentsCondition.partitions[0].treatment;
|
|
45
|
-
treatments.forEach(function (treatment) {
|
|
46
|
-
if (treatment !== fullyAllocatedTreatment) {
|
|
47
|
-
allTreatmentsCondition.partitions.push({
|
|
48
|
-
treatment: treatment,
|
|
49
|
-
size: 0
|
|
50
|
-
});
|
|
51
|
-
}
|
|
52
|
-
});
|
|
53
|
-
// Don't need these anymore
|
|
54
|
-
// @ts-expect-error
|
|
55
|
-
delete data.treatments;
|
|
56
|
-
});
|
|
57
|
-
}
|
|
58
|
-
function splitsParserFromFileFactory() {
|
|
59
|
-
var previousMock = 'NO_MOCK_LOADED';
|
|
60
|
-
// Parse `.split` configuration file and return a map of "Split Objects"
|
|
61
|
-
function readSplitConfigFile(log, filePath) {
|
|
62
|
-
var SPLIT_POSITION = 0;
|
|
63
|
-
var TREATMENT_POSITION = 1;
|
|
64
|
-
var data;
|
|
65
|
-
try {
|
|
66
|
-
data = fs_1.default.readFileSync(filePath, 'utf-8');
|
|
67
|
-
}
|
|
68
|
-
catch (e) {
|
|
69
|
-
log.error(e && e.message);
|
|
70
|
-
return {};
|
|
71
|
-
}
|
|
72
|
-
if (data === previousMock)
|
|
73
|
-
return false;
|
|
74
|
-
previousMock = data;
|
|
75
|
-
var splitObjects = data.split(/\r?\n/).reduce(function (accum, line, index) {
|
|
76
|
-
var tuple = line.trim();
|
|
77
|
-
if (tuple === '' || tuple.charAt(0) === '#') {
|
|
78
|
-
log.debug(logPrefix + ("Ignoring empty line or comment at #" + index));
|
|
79
|
-
}
|
|
80
|
-
else {
|
|
81
|
-
tuple = tuple.split(/\s+/);
|
|
82
|
-
if (tuple.length !== 2) {
|
|
83
|
-
log.debug(logPrefix + ("Ignoring line since it does not have exactly two columns #" + index));
|
|
84
|
-
}
|
|
85
|
-
else {
|
|
86
|
-
var splitName = tuple[SPLIT_POSITION];
|
|
87
|
-
var condition = (0, parseCondition_1.parseCondition)({ treatment: tuple[TREATMENT_POSITION] });
|
|
88
|
-
accum[splitName] = { conditions: [condition], configurations: {}, trafficTypeName: 'localhost' };
|
|
89
|
-
}
|
|
90
|
-
}
|
|
91
|
-
return accum;
|
|
92
|
-
}, {});
|
|
93
|
-
return splitObjects;
|
|
94
|
-
}
|
|
95
|
-
// Parse `.yml` or `.yaml` configuration files and return a map of "Split Objects"
|
|
96
|
-
function readYAMLConfigFile(log, filePath) {
|
|
97
|
-
var data = '';
|
|
98
|
-
var yamldoc = null;
|
|
99
|
-
try {
|
|
100
|
-
data = fs_1.default.readFileSync(filePath, 'utf8');
|
|
101
|
-
if (data === previousMock)
|
|
102
|
-
return false;
|
|
103
|
-
previousMock = data;
|
|
104
|
-
yamldoc = js_yaml_1.default.safeLoad(data);
|
|
105
|
-
}
|
|
106
|
-
catch (e) {
|
|
107
|
-
log.error(e);
|
|
108
|
-
return {};
|
|
109
|
-
}
|
|
110
|
-
// Each entry will be mapped to a condition, but we'll also keep the configurations map.
|
|
111
|
-
var mocksData = yamldoc.reduce(function (accum, splitEntry) {
|
|
112
|
-
var splitName = Object.keys(splitEntry)[0];
|
|
113
|
-
if (!splitName || !(0, lang_1.isString)(splitEntry[splitName].treatment))
|
|
114
|
-
log.error(logPrefix + 'Ignoring entry on YAML since the format is incorrect.');
|
|
115
|
-
var mockData = splitEntry[splitName];
|
|
116
|
-
// "Template" for each split accumulated data
|
|
117
|
-
if (!accum[splitName]) {
|
|
118
|
-
accum[splitName] = {
|
|
119
|
-
configurations: {}, conditions: [], treatments: [], trafficTypeName: 'localhost'
|
|
120
|
-
};
|
|
121
|
-
}
|
|
122
|
-
// Assign the config if there is one on the mock
|
|
123
|
-
if (mockData.config)
|
|
124
|
-
accum[splitName].configurations[mockData.treatment] = mockData.config;
|
|
125
|
-
// Parse the condition from the entry.
|
|
126
|
-
var condition = (0, parseCondition_1.parseCondition)(mockData);
|
|
127
|
-
accum[splitName].conditions[condition.conditionType === 'ROLLOUT' ? 'push' : 'unshift'](condition);
|
|
128
|
-
// Also keep track of the treatments, will be useful for manager functionality.
|
|
129
|
-
accum[splitName].treatments.push(mockData.treatment);
|
|
130
|
-
return accum;
|
|
131
|
-
}, {});
|
|
132
|
-
arrangeConditions(mocksData);
|
|
133
|
-
return mocksData;
|
|
134
|
-
}
|
|
135
|
-
// Load the content of a configuration file into an Object
|
|
136
|
-
return function splitsParserFromFile(_a) {
|
|
137
|
-
var features = _a.features, log = _a.log;
|
|
138
|
-
var filePath = configFilesPath(features);
|
|
139
|
-
var mockData;
|
|
140
|
-
// If we have a filePath, it means the extension is correct, choose the parser.
|
|
141
|
-
if ((0, lang_1.endsWith)(filePath, '.split')) {
|
|
142
|
-
log.warn(logPrefix + '.split mocks will be deprecated soon in favor of YAML files, which provide more targeting power. Take a look in our documentation.');
|
|
143
|
-
mockData = readSplitConfigFile(log, filePath);
|
|
144
|
-
}
|
|
145
|
-
else {
|
|
146
|
-
mockData = readYAMLConfigFile(log, filePath);
|
|
147
|
-
}
|
|
148
|
-
return mockData;
|
|
149
|
-
};
|
|
150
|
-
}
|
|
151
|
-
exports.splitsParserFromFileFactory = splitsParserFromFileFactory;
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
import { splitsParserFromFileFactory } from './splitsParser/splitsParserFromFile';
|
|
2
|
-
import { syncManagerOfflineFactory } from './syncManagerOffline';
|
|
3
|
-
// Singleton instance of the factory function for offline SyncManager from YAML file (a.k.a. localhostFromFile)
|
|
4
|
-
// Requires Node 'fs' and 'path' APIs.
|
|
5
|
-
var localhostFromFile = syncManagerOfflineFactory(splitsParserFromFileFactory);
|
|
6
|
-
localhostFromFile.type = 'LocalhostFromFile';
|
|
7
|
-
export function LocalhostFromFile() {
|
|
8
|
-
return localhostFromFile;
|
|
9
|
-
}
|