@statsig/client-core 3.31.1-beta.1 → 3.31.1-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/BatchedEventsQueue.d.ts +14 -0
- package/src/BatchedEventsQueue.js +51 -0
- package/src/ErrorBoundary.d.ts +2 -0
- package/src/ErrorBoundary.js +23 -3
- package/src/EventLogger.d.ts +14 -23
- package/src/EventLogger.js +85 -185
- package/src/EventRetryConstants.d.ts +2 -0
- package/src/EventRetryConstants.js +2 -0
- package/src/EventSender.d.ts +23 -0
- package/src/EventSender.js +96 -0
- package/src/FlushCoordinator.d.ts +50 -0
- package/src/FlushCoordinator.js +332 -0
- package/src/FlushInterval.d.ts +13 -0
- package/src/FlushInterval.js +44 -0
- package/src/FlushTypes.d.ts +7 -0
- package/src/FlushTypes.js +12 -0
- package/src/NetworkCore.d.ts +1 -0
- package/src/NetworkCore.js +5 -3
- package/src/PendingEvents.d.ts +10 -0
- package/src/PendingEvents.js +26 -0
- package/src/StatsigClientBase.js +1 -1
- package/src/StatsigMetadata.d.ts +1 -1
- package/src/StatsigMetadata.js +1 -1
package/package.json
CHANGED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { EventBatch } from './EventBatch';
|
|
2
|
+
import { StatsigEventInternal } from './StatsigEvent';
|
|
3
|
+
export declare class BatchQueue {
|
|
4
|
+
private _batches;
|
|
5
|
+
private _batchSize;
|
|
6
|
+
constructor(batchSize?: number);
|
|
7
|
+
batchSize(): number;
|
|
8
|
+
requeueBatch(batch: EventBatch): number;
|
|
9
|
+
hasFullBatch(): boolean;
|
|
10
|
+
takeNextBatch(): EventBatch | null;
|
|
11
|
+
takeAllBatches(): EventBatch[];
|
|
12
|
+
createBatches(events: StatsigEventInternal[]): number;
|
|
13
|
+
private _enqueueBatch;
|
|
14
|
+
}
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.BatchQueue = void 0;
|
|
4
|
+
const EventBatch_1 = require("./EventBatch");
|
|
5
|
+
const EventRetryConstants_1 = require("./EventRetryConstants");
|
|
6
|
+
class BatchQueue {
|
|
7
|
+
constructor(batchSize = EventRetryConstants_1.EventRetryConstants.DEFAULT_BATCH_SIZE) {
|
|
8
|
+
this._batches = [];
|
|
9
|
+
this._batchSize = batchSize;
|
|
10
|
+
}
|
|
11
|
+
batchSize() {
|
|
12
|
+
return this._batchSize;
|
|
13
|
+
}
|
|
14
|
+
requeueBatch(batch) {
|
|
15
|
+
return this._enqueueBatch(batch);
|
|
16
|
+
}
|
|
17
|
+
hasFullBatch() {
|
|
18
|
+
return this._batches.some((batch) => batch.events.length >= this._batchSize);
|
|
19
|
+
}
|
|
20
|
+
takeNextBatch() {
|
|
21
|
+
var _a;
|
|
22
|
+
return (_a = this._batches.shift()) !== null && _a !== void 0 ? _a : null;
|
|
23
|
+
}
|
|
24
|
+
takeAllBatches() {
|
|
25
|
+
const batches = this._batches;
|
|
26
|
+
this._batches = [];
|
|
27
|
+
return batches;
|
|
28
|
+
}
|
|
29
|
+
createBatches(events) {
|
|
30
|
+
let i = 0;
|
|
31
|
+
let droppedCount = 0;
|
|
32
|
+
while (i < events.length) {
|
|
33
|
+
const chunk = events.slice(i, i + this._batchSize);
|
|
34
|
+
droppedCount += this._enqueueBatch(new EventBatch_1.EventBatch(chunk));
|
|
35
|
+
i += this._batchSize;
|
|
36
|
+
}
|
|
37
|
+
return droppedCount;
|
|
38
|
+
}
|
|
39
|
+
_enqueueBatch(batch) {
|
|
40
|
+
this._batches.push(batch);
|
|
41
|
+
let droppedEventCount = 0;
|
|
42
|
+
while (this._batches.length > EventRetryConstants_1.EventRetryConstants.MAX_PENDING_BATCHES) {
|
|
43
|
+
const dropped = this._batches.shift();
|
|
44
|
+
if (dropped) {
|
|
45
|
+
droppedEventCount += dropped.events.length;
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
return droppedEventCount;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
exports.BatchQueue = BatchQueue;
|
package/src/ErrorBoundary.d.ts
CHANGED
|
@@ -10,6 +10,8 @@ export declare class ErrorBoundary {
|
|
|
10
10
|
constructor(_sdkKey: string, _options: AnyStatsigOptions | null, _emitter?: StatsigClientEmitEventFunc | undefined, _lastSeenError?: Error | undefined);
|
|
11
11
|
wrap(instance: unknown, namePrefix?: string): void;
|
|
12
12
|
logError(tag: string, error: unknown): void;
|
|
13
|
+
logDroppedEvents(count: number, reason: string, metadata?: Record<string, unknown>): void;
|
|
14
|
+
logEventRequestFailure(count: number, reason: string, flushType: string, statusCode: number): void;
|
|
13
15
|
getLastSeenErrorAndReset(): Error | null;
|
|
14
16
|
attachErrorIfNoneExists(error: unknown): void;
|
|
15
17
|
private _capture;
|
package/src/ErrorBoundary.js
CHANGED
|
@@ -44,6 +44,25 @@ class ErrorBoundary {
|
|
|
44
44
|
logError(tag, error) {
|
|
45
45
|
this._onError(tag, error);
|
|
46
46
|
}
|
|
47
|
+
logDroppedEvents(count, reason, metadata) {
|
|
48
|
+
const extra = {
|
|
49
|
+
eventCount: String(count),
|
|
50
|
+
};
|
|
51
|
+
if (metadata) {
|
|
52
|
+
Object.entries(metadata).forEach(([key, value]) => {
|
|
53
|
+
extra[key] = String(value);
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
this._onError(`statsig::log_event_dropped_event_count`, new Error(reason), true, extra);
|
|
57
|
+
}
|
|
58
|
+
logEventRequestFailure(count, reason, flushType, statusCode) {
|
|
59
|
+
const extra = {
|
|
60
|
+
eventCount: String(count),
|
|
61
|
+
flushType: flushType,
|
|
62
|
+
statusCode: String(statusCode),
|
|
63
|
+
};
|
|
64
|
+
this._onError(`statsig::log_event_failed`, new Error(reason), true, extra);
|
|
65
|
+
}
|
|
47
66
|
getLastSeenErrorAndReset() {
|
|
48
67
|
const tempError = this._lastSeenError;
|
|
49
68
|
this._lastSeenError = undefined;
|
|
@@ -68,7 +87,7 @@ class ErrorBoundary {
|
|
|
68
87
|
return null;
|
|
69
88
|
}
|
|
70
89
|
}
|
|
71
|
-
_onError(tag, error) {
|
|
90
|
+
_onError(tag, error, bypassDedupe = false, extra) {
|
|
72
91
|
try {
|
|
73
92
|
Log_1.Log.warn(`Caught error in ${tag}`, { error });
|
|
74
93
|
const impl = () => __awaiter(this, void 0, void 0, function* () {
|
|
@@ -78,7 +97,7 @@ class ErrorBoundary {
|
|
|
78
97
|
const name = isError ? unwrapped.name : 'No Name';
|
|
79
98
|
const resolvedError = _resolveError(unwrapped);
|
|
80
99
|
this._lastSeenError = resolvedError;
|
|
81
|
-
if (this._seen.has(name)) {
|
|
100
|
+
if (!bypassDedupe && this._seen.has(name)) {
|
|
82
101
|
return;
|
|
83
102
|
}
|
|
84
103
|
this._seen.add(name);
|
|
@@ -93,7 +112,8 @@ class ErrorBoundary {
|
|
|
93
112
|
const sdkType = SDKType_1.SDKType._get(this._sdkKey);
|
|
94
113
|
const statsigMetadata = StatsigMetadata_1.StatsigMetadataProvider.get();
|
|
95
114
|
const info = isError ? unwrapped.stack : _getDescription(unwrapped);
|
|
96
|
-
const body = Object.assign({ tag, exception: name, info,
|
|
115
|
+
const body = Object.assign({ tag, exception: name, info,
|
|
116
|
+
extra, statsigOptions: _getStatsigOptionLoggingCopy(this._options) }, Object.assign(Object.assign({}, statsigMetadata), { sdkType }));
|
|
97
117
|
const func = (_f = (_e = (_d = this._options) === null || _d === void 0 ? void 0 : _d.networkConfig) === null || _e === void 0 ? void 0 : _e.networkOverrideFunc) !== null && _f !== void 0 ? _f : fetch;
|
|
98
118
|
yield func(exports.EXCEPTION_ENDPOINT, {
|
|
99
119
|
method: 'POST',
|
package/src/EventLogger.d.ts
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { ErrorBoundary } from './ErrorBoundary';
|
|
1
2
|
import { NetworkCore } from './NetworkCore';
|
|
2
3
|
import { StatsigClientEmitEventFunc } from './StatsigClientBase';
|
|
3
4
|
import { StatsigEventInternal } from './StatsigEvent';
|
|
@@ -7,18 +8,18 @@ export declare class EventLogger {
|
|
|
7
8
|
private _emitter;
|
|
8
9
|
private _network;
|
|
9
10
|
private _options;
|
|
10
|
-
private
|
|
11
|
-
private
|
|
11
|
+
private _errorBoundary;
|
|
12
|
+
private _pendingEvents;
|
|
13
|
+
private _batchQueue;
|
|
14
|
+
private _flushCoordinator;
|
|
12
15
|
private _lastExposureTimeMap;
|
|
13
16
|
private _nonExposedChecks;
|
|
14
|
-
private _maxQueueSize;
|
|
15
|
-
private _hasRunQuickFlush;
|
|
16
|
-
private _creationTime;
|
|
17
17
|
private _loggingEnabled;
|
|
18
18
|
private _logEventUrlConfig;
|
|
19
|
+
private _isShuttingDown;
|
|
20
|
+
private _storageKey;
|
|
19
21
|
private static _safeFlushAndForget;
|
|
20
|
-
|
|
21
|
-
constructor(_sdkKey: string, _emitter: StatsigClientEmitEventFunc, _network: NetworkCore, _options: StatsigOptionsCommon<NetworkConfigCommon> | null);
|
|
22
|
+
constructor(_sdkKey: string, _emitter: StatsigClientEmitEventFunc, _network: NetworkCore, _options: StatsigOptionsCommon<NetworkConfigCommon> | null, _errorBoundary: ErrorBoundary);
|
|
22
23
|
setLogEventCompressionMode(mode: LogEventCompressionMode): void;
|
|
23
24
|
setLoggingEnabled(loggingEnabled: LoggingEnabledOption): void;
|
|
24
25
|
enqueue(event: StatsigEventInternal): void;
|
|
@@ -27,22 +28,12 @@ export declare class EventLogger {
|
|
|
27
28
|
start(): void;
|
|
28
29
|
stop(): Promise<void>;
|
|
29
30
|
flush(): Promise<void>;
|
|
30
|
-
|
|
31
|
-
* We 'Quick Flush' following the very first event enqueued
|
|
32
|
-
* within the quick flush window
|
|
33
|
-
*/
|
|
34
|
-
private _quickFlushIfNeeded;
|
|
31
|
+
appendAndResetNonExposedChecks(): void;
|
|
35
32
|
private _shouldLogEvent;
|
|
36
|
-
private _sendEvents;
|
|
37
|
-
private _sendEventsViaPost;
|
|
38
|
-
private _sendEventsViaBeacon;
|
|
39
|
-
private _getRequestData;
|
|
40
|
-
private _saveFailedLogsToStorage;
|
|
41
|
-
private _getFailedLogsFromStorage;
|
|
42
|
-
private _retryFailedLogs;
|
|
43
|
-
private _getStorageKey;
|
|
44
|
-
private _normalizeAndAppendEvent;
|
|
45
|
-
private _appendAndResetNonExposedChecks;
|
|
46
33
|
private _getCurrentPageUrl;
|
|
47
|
-
private
|
|
34
|
+
private _getStorageKey;
|
|
35
|
+
private _storeEventToStorage;
|
|
36
|
+
private _getEventsFromStorage;
|
|
37
|
+
private _loadStoredEvents;
|
|
38
|
+
private _normalizeEvent;
|
|
48
39
|
}
|
package/src/EventLogger.js
CHANGED
|
@@ -10,22 +10,22 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
|
10
10
|
};
|
|
11
11
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
12
|
exports.EventLogger = void 0;
|
|
13
|
+
const BatchedEventsQueue_1 = require("./BatchedEventsQueue");
|
|
13
14
|
const CacheKey_1 = require("./CacheKey");
|
|
15
|
+
const EventRetryConstants_1 = require("./EventRetryConstants");
|
|
16
|
+
const FlushCoordinator_1 = require("./FlushCoordinator");
|
|
14
17
|
const Hashing_1 = require("./Hashing");
|
|
15
18
|
const Log_1 = require("./Log");
|
|
16
19
|
const NetworkConfig_1 = require("./NetworkConfig");
|
|
20
|
+
const PendingEvents_1 = require("./PendingEvents");
|
|
17
21
|
const SafeJs_1 = require("./SafeJs");
|
|
18
22
|
const StatsigEvent_1 = require("./StatsigEvent");
|
|
19
23
|
const StatsigOptionsCommon_1 = require("./StatsigOptionsCommon");
|
|
20
24
|
const StorageProvider_1 = require("./StorageProvider");
|
|
21
25
|
const UrlConfiguration_1 = require("./UrlConfiguration");
|
|
22
26
|
const VisibilityObserving_1 = require("./VisibilityObserving");
|
|
23
|
-
const DEFAULT_QUEUE_SIZE = 100;
|
|
24
|
-
const DEFAULT_FLUSH_INTERVAL_MS = 10000;
|
|
25
27
|
const MAX_DEDUPER_KEYS = 1000;
|
|
26
28
|
const DEDUPER_WINDOW_DURATION_MS = 600000;
|
|
27
|
-
const MAX_FAILED_LOGS = 500;
|
|
28
|
-
const QUICK_FLUSH_WINDOW_MS = 200;
|
|
29
29
|
const EVENT_LOGGER_MAP = {};
|
|
30
30
|
class EventLogger {
|
|
31
31
|
static _safeFlushAndForget(sdkKey) {
|
|
@@ -34,21 +34,17 @@ class EventLogger {
|
|
|
34
34
|
// noop
|
|
35
35
|
});
|
|
36
36
|
}
|
|
37
|
-
|
|
38
|
-
var _a;
|
|
39
|
-
(_a = EVENT_LOGGER_MAP[sdkKey]) === null || _a === void 0 ? void 0 : _a._retryFailedLogs();
|
|
40
|
-
}
|
|
41
|
-
constructor(_sdkKey, _emitter, _network, _options) {
|
|
37
|
+
constructor(_sdkKey, _emitter, _network, _options, _errorBoundary) {
|
|
42
38
|
var _a, _b;
|
|
43
39
|
this._sdkKey = _sdkKey;
|
|
44
40
|
this._emitter = _emitter;
|
|
45
41
|
this._network = _network;
|
|
46
42
|
this._options = _options;
|
|
47
|
-
this.
|
|
43
|
+
this._errorBoundary = _errorBoundary;
|
|
48
44
|
this._lastExposureTimeMap = {};
|
|
49
45
|
this._nonExposedChecks = {};
|
|
50
|
-
this.
|
|
51
|
-
this.
|
|
46
|
+
this._isShuttingDown = false;
|
|
47
|
+
this._storageKey = null;
|
|
52
48
|
this._loggingEnabled =
|
|
53
49
|
(_a = _options === null || _options === void 0 ? void 0 : _options.loggingEnabled) !== null && _a !== void 0 ? _a : ((_options === null || _options === void 0 ? void 0 : _options.disableLogging) === true
|
|
54
50
|
? StatsigOptionsCommon_1.LoggingEnabledOption.disabled
|
|
@@ -56,34 +52,45 @@ class EventLogger {
|
|
|
56
52
|
if ((_options === null || _options === void 0 ? void 0 : _options.loggingEnabled) && _options.disableLogging !== undefined) {
|
|
57
53
|
Log_1.Log.warn('Detected both loggingEnabled and disableLogging options. loggingEnabled takes precedence - please remove disableLogging.');
|
|
58
54
|
}
|
|
59
|
-
this._maxQueueSize = (_b = _options === null || _options === void 0 ? void 0 : _options.loggingBufferMaxSize) !== null && _b !== void 0 ? _b : DEFAULT_QUEUE_SIZE;
|
|
60
55
|
const config = _options === null || _options === void 0 ? void 0 : _options.networkConfig;
|
|
61
56
|
this._logEventUrlConfig = new UrlConfiguration_1.UrlConfiguration(NetworkConfig_1.Endpoint._rgstr, config === null || config === void 0 ? void 0 : config.logEventUrl, config === null || config === void 0 ? void 0 : config.api, config === null || config === void 0 ? void 0 : config.logEventFallbackUrls);
|
|
57
|
+
const batchSize = (_b = _options === null || _options === void 0 ? void 0 : _options.loggingBufferMaxSize) !== null && _b !== void 0 ? _b : EventRetryConstants_1.EventRetryConstants.DEFAULT_BATCH_SIZE;
|
|
58
|
+
this._pendingEvents = new PendingEvents_1.PendingEvents(batchSize);
|
|
59
|
+
this._batchQueue = new BatchedEventsQueue_1.BatchQueue(batchSize);
|
|
60
|
+
this._flushCoordinator = new FlushCoordinator_1.FlushCoordinator(this._batchQueue, this._pendingEvents, () => this.appendAndResetNonExposedChecks(), this._sdkKey, this._network, this._emitter, this._logEventUrlConfig, this._options, this._loggingEnabled, this._errorBoundary);
|
|
62
61
|
}
|
|
63
62
|
setLogEventCompressionMode(mode) {
|
|
64
|
-
this.
|
|
63
|
+
this._flushCoordinator.setLogEventCompressionMode(mode);
|
|
65
64
|
}
|
|
66
65
|
setLoggingEnabled(loggingEnabled) {
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
66
|
+
const wasDisabled = this._loggingEnabled === 'disabled';
|
|
67
|
+
const isNowEnabled = loggingEnabled !== 'disabled';
|
|
68
|
+
this._loggingEnabled = loggingEnabled;
|
|
69
|
+
this._flushCoordinator.setLoggingEnabled(loggingEnabled);
|
|
70
|
+
if (wasDisabled && isNowEnabled) {
|
|
71
|
+
const events = this._loadStoredEvents();
|
|
72
|
+
Log_1.Log.debug(`Loaded ${events.length} stored event(s) from storage`);
|
|
73
|
+
if (events.length > 0) {
|
|
74
|
+
events.forEach((event) => {
|
|
75
|
+
this._flushCoordinator.addEvent(event);
|
|
76
|
+
});
|
|
77
|
+
this.flush().catch((error) => {
|
|
78
|
+
Log_1.Log.warn('Failed to flush events after enabling logging:', error);
|
|
79
|
+
});
|
|
73
80
|
}
|
|
74
|
-
StorageProvider_1.Storage.removeItem(storageKey);
|
|
75
81
|
}
|
|
76
|
-
this._loggingEnabled = loggingEnabled;
|
|
77
82
|
}
|
|
78
83
|
enqueue(event) {
|
|
79
84
|
if (!this._shouldLogEvent(event)) {
|
|
80
85
|
return;
|
|
81
86
|
}
|
|
82
|
-
this.
|
|
83
|
-
this.
|
|
84
|
-
|
|
85
|
-
|
|
87
|
+
const normalizedEvent = this._normalizeEvent(event);
|
|
88
|
+
if (this._loggingEnabled === 'disabled') {
|
|
89
|
+
this._storeEventToStorage(normalizedEvent);
|
|
90
|
+
return;
|
|
86
91
|
}
|
|
92
|
+
this._flushCoordinator.addEvent(normalizedEvent);
|
|
93
|
+
this._flushCoordinator.checkQuickFlush();
|
|
87
94
|
}
|
|
88
95
|
incrementNonExposureCount(name) {
|
|
89
96
|
var _a;
|
|
@@ -110,47 +117,41 @@ class EventLogger {
|
|
|
110
117
|
EventLogger._safeFlushAndForget(this._sdkKey);
|
|
111
118
|
}
|
|
112
119
|
else if (visibility === 'foreground') {
|
|
113
|
-
|
|
120
|
+
this._flushCoordinator.startScheduledFlushCycle();
|
|
114
121
|
}
|
|
115
122
|
});
|
|
116
123
|
}
|
|
117
|
-
this.
|
|
118
|
-
|
|
124
|
+
this._flushCoordinator.loadAndRetryShutdownFailedEvents().catch((error) => {
|
|
125
|
+
Log_1.Log.warn('Failed to load failed shutdown events:', error);
|
|
126
|
+
});
|
|
127
|
+
this._flushCoordinator.startScheduledFlushCycle();
|
|
119
128
|
}
|
|
120
129
|
stop() {
|
|
121
130
|
return __awaiter(this, void 0, void 0, function* () {
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
this._flushIntervalId = null;
|
|
125
|
-
}
|
|
131
|
+
this._isShuttingDown = true;
|
|
132
|
+
yield this._flushCoordinator.processShutdown();
|
|
126
133
|
delete EVENT_LOGGER_MAP[this._sdkKey];
|
|
127
|
-
yield this.flush();
|
|
128
134
|
});
|
|
129
135
|
}
|
|
130
136
|
flush() {
|
|
131
137
|
return __awaiter(this, void 0, void 0, function* () {
|
|
132
|
-
this.
|
|
133
|
-
if (this._queue.length === 0) {
|
|
134
|
-
return;
|
|
135
|
-
}
|
|
136
|
-
const events = this._queue;
|
|
137
|
-
this._queue = [];
|
|
138
|
-
yield this._sendEvents(events);
|
|
138
|
+
return this._flushCoordinator.processManualFlush();
|
|
139
139
|
});
|
|
140
140
|
}
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
* within the quick flush window
|
|
144
|
-
*/
|
|
145
|
-
_quickFlushIfNeeded() {
|
|
146
|
-
if (this._hasRunQuickFlush) {
|
|
147
|
-
return;
|
|
148
|
-
}
|
|
149
|
-
this._hasRunQuickFlush = true;
|
|
150
|
-
if (Date.now() - this._creationTime > QUICK_FLUSH_WINDOW_MS) {
|
|
141
|
+
appendAndResetNonExposedChecks() {
|
|
142
|
+
if (Object.keys(this._nonExposedChecks).length === 0) {
|
|
151
143
|
return;
|
|
152
144
|
}
|
|
153
|
-
|
|
145
|
+
const event = this._normalizeEvent({
|
|
146
|
+
eventName: 'statsig::non_exposed_checks',
|
|
147
|
+
user: null,
|
|
148
|
+
time: Date.now(),
|
|
149
|
+
metadata: {
|
|
150
|
+
checks: Object.assign({}, this._nonExposedChecks),
|
|
151
|
+
},
|
|
152
|
+
});
|
|
153
|
+
this._flushCoordinator.addEvent(event);
|
|
154
|
+
this._nonExposedChecks = {};
|
|
154
155
|
}
|
|
155
156
|
_shouldLogEvent(event) {
|
|
156
157
|
var _a;
|
|
@@ -185,118 +186,54 @@ class EventLogger {
|
|
|
185
186
|
this._lastExposureTimeMap[key] = now;
|
|
186
187
|
return true;
|
|
187
188
|
}
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
}
|
|
195
|
-
try {
|
|
196
|
-
const isClosing = (0, VisibilityObserving_1._isUnloading)();
|
|
197
|
-
const shouldUseBeacon = isClosing &&
|
|
198
|
-
this._network.isBeaconSupported() &&
|
|
199
|
-
((_b = (_a = this._options) === null || _a === void 0 ? void 0 : _a.networkConfig) === null || _b === void 0 ? void 0 : _b.networkOverrideFunc) == null;
|
|
200
|
-
this._emitter({
|
|
201
|
-
name: 'pre_logs_flushed',
|
|
202
|
-
events,
|
|
203
|
-
});
|
|
204
|
-
const response = shouldUseBeacon
|
|
205
|
-
? this._sendEventsViaBeacon(events)
|
|
206
|
-
: yield this._sendEventsViaPost(events);
|
|
207
|
-
if (response.success) {
|
|
208
|
-
this._emitter({
|
|
209
|
-
name: 'logs_flushed',
|
|
210
|
-
events,
|
|
211
|
-
});
|
|
212
|
-
return true;
|
|
213
|
-
}
|
|
214
|
-
else {
|
|
215
|
-
Log_1.Log.warn('Failed to flush events.');
|
|
216
|
-
this._saveFailedLogsToStorage(events);
|
|
217
|
-
return false;
|
|
218
|
-
}
|
|
219
|
-
}
|
|
220
|
-
catch (_c) {
|
|
221
|
-
Log_1.Log.warn('Failed to flush events.');
|
|
222
|
-
return false;
|
|
223
|
-
}
|
|
224
|
-
});
|
|
225
|
-
}
|
|
226
|
-
_sendEventsViaPost(events) {
|
|
227
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
228
|
-
var _a;
|
|
229
|
-
const result = yield this._network.post(this._getRequestData(events));
|
|
230
|
-
const code = (_a = result === null || result === void 0 ? void 0 : result.code) !== null && _a !== void 0 ? _a : -1;
|
|
231
|
-
return { success: code >= 200 && code < 300 };
|
|
232
|
-
});
|
|
233
|
-
}
|
|
234
|
-
_sendEventsViaBeacon(events) {
|
|
235
|
-
return {
|
|
236
|
-
success: this._network.beacon(this._getRequestData(events)),
|
|
237
|
-
};
|
|
238
|
-
}
|
|
239
|
-
_getRequestData(events) {
|
|
240
|
-
return {
|
|
241
|
-
sdkKey: this._sdkKey,
|
|
242
|
-
data: {
|
|
243
|
-
events,
|
|
244
|
-
},
|
|
245
|
-
urlConfig: this._logEventUrlConfig,
|
|
246
|
-
retries: 3,
|
|
247
|
-
isCompressable: true,
|
|
248
|
-
params: {
|
|
249
|
-
[NetworkConfig_1.NetworkParam.EventCount]: String(events.length),
|
|
250
|
-
},
|
|
251
|
-
credentials: 'same-origin',
|
|
252
|
-
};
|
|
189
|
+
_getCurrentPageUrl() {
|
|
190
|
+
var _a;
|
|
191
|
+
if (((_a = this._options) === null || _a === void 0 ? void 0 : _a.includeCurrentPageUrlWithEvents) === false) {
|
|
192
|
+
return;
|
|
193
|
+
}
|
|
194
|
+
return (0, SafeJs_1._getCurrentPageUrlSafe)();
|
|
253
195
|
}
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
196
|
+
_getStorageKey() {
|
|
197
|
+
if (!this._storageKey) {
|
|
198
|
+
this._storageKey = `statsig.pending_events.${(0, Hashing_1._DJB2)(this._sdkKey)}`;
|
|
257
199
|
}
|
|
200
|
+
return this._storageKey;
|
|
201
|
+
}
|
|
202
|
+
_storeEventToStorage(event) {
|
|
258
203
|
const storageKey = this._getStorageKey();
|
|
259
204
|
try {
|
|
260
|
-
|
|
261
|
-
|
|
205
|
+
let existingEvents = this._getEventsFromStorage(storageKey);
|
|
206
|
+
existingEvents.push(event);
|
|
207
|
+
if (existingEvents.length > EventRetryConstants_1.EventRetryConstants.MAX_LOCAL_STORAGE) {
|
|
208
|
+
existingEvents = existingEvents.slice(-EventRetryConstants_1.EventRetryConstants.MAX_LOCAL_STORAGE);
|
|
209
|
+
}
|
|
210
|
+
(0, StorageProvider_1._setObjectInStorage)(storageKey, existingEvents);
|
|
262
211
|
}
|
|
263
|
-
catch (
|
|
264
|
-
Log_1.Log.warn('Unable to save
|
|
212
|
+
catch (error) {
|
|
213
|
+
Log_1.Log.warn('Unable to save events to storage');
|
|
265
214
|
}
|
|
266
215
|
}
|
|
267
|
-
|
|
268
|
-
let savedEvents = [];
|
|
216
|
+
_getEventsFromStorage(storageKey) {
|
|
269
217
|
try {
|
|
270
|
-
const
|
|
271
|
-
if (Array.isArray(
|
|
272
|
-
|
|
218
|
+
const events = (0, StorageProvider_1._getObjectFromStorage)(storageKey);
|
|
219
|
+
if (Array.isArray(events)) {
|
|
220
|
+
return events;
|
|
273
221
|
}
|
|
274
|
-
return
|
|
222
|
+
return [];
|
|
275
223
|
}
|
|
276
224
|
catch (_a) {
|
|
277
225
|
return [];
|
|
278
226
|
}
|
|
279
227
|
}
|
|
280
|
-
|
|
228
|
+
_loadStoredEvents() {
|
|
281
229
|
const storageKey = this._getStorageKey();
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
yield StorageProvider_1.Storage.isReadyResolver();
|
|
285
|
-
}
|
|
286
|
-
const events = (0, StorageProvider_1._getObjectFromStorage)(storageKey);
|
|
287
|
-
if (!events) {
|
|
288
|
-
return;
|
|
289
|
-
}
|
|
230
|
+
const events = this._getEventsFromStorage(storageKey);
|
|
231
|
+
if (events.length > 0) {
|
|
290
232
|
StorageProvider_1.Storage.removeItem(storageKey);
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
Log_1.Log.warn('Failed to flush stored logs');
|
|
294
|
-
});
|
|
295
|
-
}
|
|
296
|
-
_getStorageKey() {
|
|
297
|
-
return `statsig.failed_logs.${(0, Hashing_1._DJB2)(this._sdkKey)}`;
|
|
233
|
+
}
|
|
234
|
+
return events;
|
|
298
235
|
}
|
|
299
|
-
|
|
236
|
+
_normalizeEvent(event) {
|
|
300
237
|
if (event.user) {
|
|
301
238
|
event.user = Object.assign({}, event.user);
|
|
302
239
|
delete event.user.privateAttributes;
|
|
@@ -306,44 +243,7 @@ class EventLogger {
|
|
|
306
243
|
if (currentPage) {
|
|
307
244
|
extras.statsigMetadata = { currentPage };
|
|
308
245
|
}
|
|
309
|
-
|
|
310
|
-
Log_1.Log.debug('Enqueued Event:', final);
|
|
311
|
-
this._queue.push(final);
|
|
312
|
-
}
|
|
313
|
-
_appendAndResetNonExposedChecks() {
|
|
314
|
-
if (Object.keys(this._nonExposedChecks).length === 0) {
|
|
315
|
-
return;
|
|
316
|
-
}
|
|
317
|
-
this._normalizeAndAppendEvent({
|
|
318
|
-
eventName: 'statsig::non_exposed_checks',
|
|
319
|
-
user: null,
|
|
320
|
-
time: Date.now(),
|
|
321
|
-
metadata: {
|
|
322
|
-
checks: Object.assign({}, this._nonExposedChecks),
|
|
323
|
-
},
|
|
324
|
-
});
|
|
325
|
-
this._nonExposedChecks = {};
|
|
326
|
-
}
|
|
327
|
-
_getCurrentPageUrl() {
|
|
328
|
-
var _a;
|
|
329
|
-
if (((_a = this._options) === null || _a === void 0 ? void 0 : _a.includeCurrentPageUrlWithEvents) === false) {
|
|
330
|
-
return;
|
|
331
|
-
}
|
|
332
|
-
return (0, SafeJs_1._getCurrentPageUrlSafe)();
|
|
333
|
-
}
|
|
334
|
-
_startBackgroundFlushInterval() {
|
|
335
|
-
var _a, _b;
|
|
336
|
-
const flushInterval = (_b = (_a = this._options) === null || _a === void 0 ? void 0 : _a.loggingIntervalMs) !== null && _b !== void 0 ? _b : DEFAULT_FLUSH_INTERVAL_MS;
|
|
337
|
-
const intervalId = setInterval(() => {
|
|
338
|
-
const logger = EVENT_LOGGER_MAP[this._sdkKey];
|
|
339
|
-
if (!logger || logger._flushIntervalId !== intervalId) {
|
|
340
|
-
clearInterval(intervalId);
|
|
341
|
-
}
|
|
342
|
-
else {
|
|
343
|
-
EventLogger._safeFlushAndForget(this._sdkKey);
|
|
344
|
-
}
|
|
345
|
-
}, flushInterval);
|
|
346
|
-
this._flushIntervalId = intervalId;
|
|
246
|
+
return Object.assign(Object.assign({}, event), extras);
|
|
347
247
|
}
|
|
348
248
|
}
|
|
349
249
|
exports.EventLogger = EventLogger;
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { EventBatch } from './EventBatch';
|
|
2
|
+
import { NetworkCore } from './NetworkCore';
|
|
3
|
+
import { StatsigClientEmitEventFunc } from './StatsigClientBase';
|
|
4
|
+
import { LogEventCompressionMode, LoggingEnabledOption, NetworkConfigCommon, StatsigOptionsCommon } from './StatsigOptionsCommon';
|
|
5
|
+
import { UrlConfiguration } from './UrlConfiguration';
|
|
6
|
+
export declare class EventSender {
|
|
7
|
+
private _network;
|
|
8
|
+
private _sdkKey;
|
|
9
|
+
private _options;
|
|
10
|
+
private _logEventUrlConfig;
|
|
11
|
+
private _emitter;
|
|
12
|
+
private _loggingEnabled;
|
|
13
|
+
constructor(sdkKey: string, network: NetworkCore, emitter: StatsigClientEmitEventFunc, logEventUrlConfig: UrlConfiguration, options: StatsigOptionsCommon<NetworkConfigCommon> | null, loggingEnabled: LoggingEnabledOption);
|
|
14
|
+
setLogEventCompressionMode(mode: LogEventCompressionMode): void;
|
|
15
|
+
setLoggingEnabled(enabled: LoggingEnabledOption): void;
|
|
16
|
+
sendBatch(batch: EventBatch): Promise<{
|
|
17
|
+
success: boolean;
|
|
18
|
+
statusCode: number;
|
|
19
|
+
}>;
|
|
20
|
+
private _sendEventsViaPost;
|
|
21
|
+
private _sendEventsViaBeacon;
|
|
22
|
+
private _getRequestData;
|
|
23
|
+
}
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.EventSender = void 0;
|
|
13
|
+
const Log_1 = require("./Log");
|
|
14
|
+
const NetworkConfig_1 = require("./NetworkConfig");
|
|
15
|
+
const VisibilityObserving_1 = require("./VisibilityObserving");
|
|
16
|
+
class EventSender {
|
|
17
|
+
constructor(sdkKey, network, emitter, logEventUrlConfig, options, loggingEnabled) {
|
|
18
|
+
this._sdkKey = sdkKey;
|
|
19
|
+
this._network = network;
|
|
20
|
+
this._emitter = emitter;
|
|
21
|
+
this._options = options;
|
|
22
|
+
this._logEventUrlConfig = logEventUrlConfig;
|
|
23
|
+
this._loggingEnabled = loggingEnabled;
|
|
24
|
+
}
|
|
25
|
+
setLogEventCompressionMode(mode) {
|
|
26
|
+
this._network.setLogEventCompressionMode(mode);
|
|
27
|
+
}
|
|
28
|
+
setLoggingEnabled(enabled) {
|
|
29
|
+
this._loggingEnabled = enabled;
|
|
30
|
+
}
|
|
31
|
+
sendBatch(batch) {
|
|
32
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
33
|
+
var _a, _b;
|
|
34
|
+
try {
|
|
35
|
+
const isClosing = (0, VisibilityObserving_1._isUnloading)();
|
|
36
|
+
const shouldUseBeacon = isClosing &&
|
|
37
|
+
this._network.isBeaconSupported() &&
|
|
38
|
+
((_b = (_a = this._options) === null || _a === void 0 ? void 0 : _a.networkConfig) === null || _b === void 0 ? void 0 : _b.networkOverrideFunc) == null;
|
|
39
|
+
this._emitter({
|
|
40
|
+
name: 'pre_logs_flushed',
|
|
41
|
+
events: batch.events,
|
|
42
|
+
});
|
|
43
|
+
const response = shouldUseBeacon
|
|
44
|
+
? this._sendEventsViaBeacon(batch)
|
|
45
|
+
: yield this._sendEventsViaPost(batch);
|
|
46
|
+
if (response.success) {
|
|
47
|
+
this._emitter({
|
|
48
|
+
name: 'logs_flushed',
|
|
49
|
+
events: batch.events,
|
|
50
|
+
});
|
|
51
|
+
return response;
|
|
52
|
+
}
|
|
53
|
+
return { success: false, statusCode: -1 };
|
|
54
|
+
}
|
|
55
|
+
catch (error) {
|
|
56
|
+
Log_1.Log.warn('Failed to send batch:', error);
|
|
57
|
+
return { success: false, statusCode: -1 };
|
|
58
|
+
}
|
|
59
|
+
});
|
|
60
|
+
}
|
|
61
|
+
_sendEventsViaPost(batch) {
|
|
62
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
63
|
+
var _a;
|
|
64
|
+
const result = yield this._network.post(this._getRequestData(batch));
|
|
65
|
+
const code = (_a = result === null || result === void 0 ? void 0 : result.code) !== null && _a !== void 0 ? _a : -1;
|
|
66
|
+
return { success: code >= 200 && code < 300, statusCode: code };
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
_sendEventsViaBeacon(batch) {
|
|
70
|
+
const success = this._network.beacon(this._getRequestData(batch));
|
|
71
|
+
return {
|
|
72
|
+
success,
|
|
73
|
+
statusCode: success ? 200 : -1,
|
|
74
|
+
};
|
|
75
|
+
}
|
|
76
|
+
_getRequestData(batch) {
|
|
77
|
+
return {
|
|
78
|
+
sdkKey: this._sdkKey,
|
|
79
|
+
data: {
|
|
80
|
+
events: batch.events,
|
|
81
|
+
},
|
|
82
|
+
urlConfig: this._logEventUrlConfig,
|
|
83
|
+
retries: 3,
|
|
84
|
+
isCompressable: true,
|
|
85
|
+
params: {
|
|
86
|
+
[NetworkConfig_1.NetworkParam.EventCount]: String(batch.events.length),
|
|
87
|
+
},
|
|
88
|
+
headers: {
|
|
89
|
+
'statsig-event-count': String(batch.events.length),
|
|
90
|
+
'statsig-retry-count': String(batch.attempts),
|
|
91
|
+
},
|
|
92
|
+
credentials: 'same-origin',
|
|
93
|
+
};
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
exports.EventSender = EventSender;
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import { BatchQueue } from './BatchedEventsQueue';
|
|
2
|
+
import { ErrorBoundary } from './ErrorBoundary';
|
|
3
|
+
import { NetworkCore } from './NetworkCore';
|
|
4
|
+
import { PendingEvents } from './PendingEvents';
|
|
5
|
+
import { StatsigClientEmitEventFunc } from './StatsigClientBase';
|
|
6
|
+
import { StatsigEventInternal } from './StatsigEvent';
|
|
7
|
+
import { LogEventCompressionMode, LoggingEnabledOption, NetworkConfigCommon, StatsigOptionsCommon } from './StatsigOptionsCommon';
|
|
8
|
+
import { UrlConfiguration } from './UrlConfiguration';
|
|
9
|
+
type PrepareFlushCallBack = () => void;
|
|
10
|
+
export declare class FlushCoordinator {
|
|
11
|
+
private _flushInterval;
|
|
12
|
+
private _batchQueue;
|
|
13
|
+
private _pendingEvents;
|
|
14
|
+
private _eventSender;
|
|
15
|
+
private _onPrepareFlush;
|
|
16
|
+
private _errorBoundary;
|
|
17
|
+
private _cooldownTimer;
|
|
18
|
+
private _maxIntervalTimer;
|
|
19
|
+
private _hasRunQuickFlush;
|
|
20
|
+
private _currentFlushPromise;
|
|
21
|
+
private _creationTime;
|
|
22
|
+
private _sdkKey;
|
|
23
|
+
private _storageKey;
|
|
24
|
+
constructor(batchQueue: BatchQueue, pendingEvents: PendingEvents, onPrepareFlush: PrepareFlushCallBack, sdkKey: string, network: NetworkCore, emitter: StatsigClientEmitEventFunc, logEventUrlConfig: UrlConfiguration, options: StatsigOptionsCommon<NetworkConfigCommon> | null, loggingEnabled: LoggingEnabledOption, errorBoundary: ErrorBoundary);
|
|
25
|
+
setLoggingEnabled(loggingEnabled: LoggingEnabledOption): void;
|
|
26
|
+
setLogEventCompressionMode(mode: LogEventCompressionMode): void;
|
|
27
|
+
startScheduledFlushCycle(): void;
|
|
28
|
+
stopScheduledFlushCycle(): void;
|
|
29
|
+
addEvent(event: StatsigEventInternal): void;
|
|
30
|
+
processManualFlush(): Promise<void>;
|
|
31
|
+
processShutdown(): Promise<void>;
|
|
32
|
+
private _executeFlush;
|
|
33
|
+
checkQuickFlush(): void;
|
|
34
|
+
private _attemptScheduledFlush;
|
|
35
|
+
processLimitFlush(): void;
|
|
36
|
+
private _processLimitFlushInternal;
|
|
37
|
+
private _scheduleNextFlush;
|
|
38
|
+
private _clearAllTimers;
|
|
39
|
+
private _processNextBatch;
|
|
40
|
+
private _processOneBatch;
|
|
41
|
+
private _prepareQueueForFlush;
|
|
42
|
+
containsAtLeastOneFullBatch(): boolean;
|
|
43
|
+
convertPendingEventsToBatches(): number;
|
|
44
|
+
private _handleFailure;
|
|
45
|
+
loadAndRetryShutdownFailedEvents(): Promise<void>;
|
|
46
|
+
private _getStorageKey;
|
|
47
|
+
private _saveShutdownFailedEventsToStorage;
|
|
48
|
+
private _getShutdownFailedEventsFromStorage;
|
|
49
|
+
}
|
|
50
|
+
export {};
|
|
@@ -0,0 +1,332 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.FlushCoordinator = void 0;
|
|
13
|
+
const EventRetryConstants_1 = require("./EventRetryConstants");
|
|
14
|
+
const EventSender_1 = require("./EventSender");
|
|
15
|
+
const FlushInterval_1 = require("./FlushInterval");
|
|
16
|
+
const FlushTypes_1 = require("./FlushTypes");
|
|
17
|
+
const Hashing_1 = require("./Hashing");
|
|
18
|
+
const Log_1 = require("./Log");
|
|
19
|
+
const NetworkCore_1 = require("./NetworkCore");
|
|
20
|
+
const StorageProvider_1 = require("./StorageProvider");
|
|
21
|
+
class FlushCoordinator {
|
|
22
|
+
constructor(batchQueue, pendingEvents, onPrepareFlush,
|
|
23
|
+
// For Event Sender
|
|
24
|
+
sdkKey, network, emitter, logEventUrlConfig, options, loggingEnabled, errorBoundary) {
|
|
25
|
+
this._cooldownTimer = null;
|
|
26
|
+
this._maxIntervalTimer = null;
|
|
27
|
+
this._hasRunQuickFlush = false;
|
|
28
|
+
this._currentFlushPromise = null;
|
|
29
|
+
this._creationTime = Date.now();
|
|
30
|
+
this._storageKey = null;
|
|
31
|
+
this._flushInterval = new FlushInterval_1.FlushInterval();
|
|
32
|
+
this._batchQueue = batchQueue;
|
|
33
|
+
this._pendingEvents = pendingEvents;
|
|
34
|
+
this._onPrepareFlush = onPrepareFlush;
|
|
35
|
+
this._errorBoundary = errorBoundary;
|
|
36
|
+
this._sdkKey = sdkKey;
|
|
37
|
+
this._eventSender = new EventSender_1.EventSender(sdkKey, network, emitter, logEventUrlConfig, options, loggingEnabled);
|
|
38
|
+
}
|
|
39
|
+
setLoggingEnabled(loggingEnabled) {
|
|
40
|
+
this._eventSender.setLoggingEnabled(loggingEnabled);
|
|
41
|
+
}
|
|
42
|
+
setLogEventCompressionMode(mode) {
|
|
43
|
+
this._eventSender.setLogEventCompressionMode(mode);
|
|
44
|
+
}
|
|
45
|
+
startScheduledFlushCycle() {
|
|
46
|
+
this._scheduleNextFlush();
|
|
47
|
+
}
|
|
48
|
+
stopScheduledFlushCycle() {
|
|
49
|
+
this._clearAllTimers();
|
|
50
|
+
}
|
|
51
|
+
addEvent(event) {
|
|
52
|
+
this._pendingEvents.addToPendingEventsQueue(event);
|
|
53
|
+
if (this._pendingEvents.hasEventsForFullBatch()) {
|
|
54
|
+
this.processLimitFlush();
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
processManualFlush() {
|
|
58
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
59
|
+
if (this._currentFlushPromise) {
|
|
60
|
+
yield this._currentFlushPromise;
|
|
61
|
+
}
|
|
62
|
+
this._currentFlushPromise = this._executeFlush(FlushTypes_1.FlushType.Manual).finally(() => {
|
|
63
|
+
this._currentFlushPromise = null;
|
|
64
|
+
this._scheduleNextFlush();
|
|
65
|
+
});
|
|
66
|
+
return this._currentFlushPromise;
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
processShutdown() {
|
|
70
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
71
|
+
if (this._currentFlushPromise) {
|
|
72
|
+
yield this._currentFlushPromise;
|
|
73
|
+
}
|
|
74
|
+
this._currentFlushPromise = this._executeFlush(FlushTypes_1.FlushType.Shutdown)
|
|
75
|
+
.catch((error) => {
|
|
76
|
+
Log_1.Log.error(`Error during shutdown flush: ${error}`);
|
|
77
|
+
})
|
|
78
|
+
.finally(() => {
|
|
79
|
+
this._currentFlushPromise = null;
|
|
80
|
+
});
|
|
81
|
+
return this._currentFlushPromise;
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
_executeFlush(flushType) {
|
|
85
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
86
|
+
this._clearAllTimers();
|
|
87
|
+
try {
|
|
88
|
+
this._prepareQueueForFlush(flushType);
|
|
89
|
+
const batches = this._batchQueue.takeAllBatches();
|
|
90
|
+
if (batches.length === 0) {
|
|
91
|
+
return;
|
|
92
|
+
}
|
|
93
|
+
yield Promise.all(batches.map((batch) => this._processOneBatch(batch, flushType)));
|
|
94
|
+
}
|
|
95
|
+
finally {
|
|
96
|
+
this._scheduleNextFlush();
|
|
97
|
+
}
|
|
98
|
+
});
|
|
99
|
+
}
|
|
100
|
+
checkQuickFlush() {
|
|
101
|
+
if (this._hasRunQuickFlush) {
|
|
102
|
+
return;
|
|
103
|
+
}
|
|
104
|
+
if (Date.now() - this._creationTime >
|
|
105
|
+
EventRetryConstants_1.EventRetryConstants.QUICK_FLUSH_WINDOW_MS) {
|
|
106
|
+
return;
|
|
107
|
+
}
|
|
108
|
+
this._hasRunQuickFlush = true;
|
|
109
|
+
setTimeout(() => {
|
|
110
|
+
this.processManualFlush().catch((error) => {
|
|
111
|
+
Log_1.Log.warn('Quick flush failed:', error);
|
|
112
|
+
});
|
|
113
|
+
}, EventRetryConstants_1.EventRetryConstants.QUICK_FLUSH_WINDOW_MS);
|
|
114
|
+
}
|
|
115
|
+
_attemptScheduledFlush() {
|
|
116
|
+
if (this._currentFlushPromise) {
|
|
117
|
+
this._scheduleNextFlush();
|
|
118
|
+
return;
|
|
119
|
+
}
|
|
120
|
+
const shouldFlushBySize = this.containsAtLeastOneFullBatch();
|
|
121
|
+
const shouldFlushByTime = this._flushInterval.hasReachedMaxInterval();
|
|
122
|
+
if (!shouldFlushBySize && !shouldFlushByTime) {
|
|
123
|
+
this._scheduleNextFlush();
|
|
124
|
+
return;
|
|
125
|
+
}
|
|
126
|
+
this._flushInterval.markFlushAttempt();
|
|
127
|
+
let flushType;
|
|
128
|
+
if (shouldFlushBySize) {
|
|
129
|
+
flushType = FlushTypes_1.FlushType.ScheduledFullBatch;
|
|
130
|
+
}
|
|
131
|
+
else {
|
|
132
|
+
flushType = FlushTypes_1.FlushType.ScheduledMaxTime;
|
|
133
|
+
}
|
|
134
|
+
this._currentFlushPromise = this._processNextBatch(flushType)
|
|
135
|
+
.then(() => {
|
|
136
|
+
//This discards boolean result. Main goal here is to track completion
|
|
137
|
+
})
|
|
138
|
+
.catch((error) => {
|
|
139
|
+
Log_1.Log.error('Error during scheduled flush:', error);
|
|
140
|
+
})
|
|
141
|
+
.finally(() => {
|
|
142
|
+
this._currentFlushPromise = null;
|
|
143
|
+
this._scheduleNextFlush();
|
|
144
|
+
});
|
|
145
|
+
}
|
|
146
|
+
processLimitFlush() {
|
|
147
|
+
if (!this._flushInterval.hasCompletelyRecoveredFromBackoff()) {
|
|
148
|
+
return;
|
|
149
|
+
}
|
|
150
|
+
if (this._currentFlushPromise) {
|
|
151
|
+
return;
|
|
152
|
+
}
|
|
153
|
+
this._currentFlushPromise = this._processLimitFlushInternal()
|
|
154
|
+
.catch((error) => {
|
|
155
|
+
Log_1.Log.error(`Error during limit flush`, error);
|
|
156
|
+
})
|
|
157
|
+
.finally(() => {
|
|
158
|
+
this._currentFlushPromise = null;
|
|
159
|
+
this._scheduleNextFlush();
|
|
160
|
+
});
|
|
161
|
+
}
|
|
162
|
+
_processLimitFlushInternal() {
|
|
163
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
164
|
+
const success = yield this._processNextBatch(FlushTypes_1.FlushType.Limit);
|
|
165
|
+
if (!success) {
|
|
166
|
+
return;
|
|
167
|
+
}
|
|
168
|
+
while (this._flushInterval.hasCompletelyRecoveredFromBackoff() &&
|
|
169
|
+
this.containsAtLeastOneFullBatch()) {
|
|
170
|
+
const success = yield this._processNextBatch(FlushTypes_1.FlushType.Limit);
|
|
171
|
+
if (!success) {
|
|
172
|
+
break;
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
});
|
|
176
|
+
}
|
|
177
|
+
_scheduleNextFlush() {
|
|
178
|
+
this._clearAllTimers();
|
|
179
|
+
const cooldownDelay = Math.max(0, this._flushInterval.getTimeUntilNextFlush());
|
|
180
|
+
this._cooldownTimer = setTimeout(() => {
|
|
181
|
+
this._cooldownTimer = null;
|
|
182
|
+
this._attemptScheduledFlush();
|
|
183
|
+
}, cooldownDelay);
|
|
184
|
+
const maxIntervalDelay = Math.max(0, this._flushInterval.getTimeTillMaxInterval());
|
|
185
|
+
this._maxIntervalTimer = setTimeout(() => {
|
|
186
|
+
this._maxIntervalTimer = null;
|
|
187
|
+
this._attemptScheduledFlush();
|
|
188
|
+
}, maxIntervalDelay);
|
|
189
|
+
}
|
|
190
|
+
_clearAllTimers() {
|
|
191
|
+
if (this._cooldownTimer !== null) {
|
|
192
|
+
clearTimeout(this._cooldownTimer);
|
|
193
|
+
this._cooldownTimer = null;
|
|
194
|
+
}
|
|
195
|
+
if (this._maxIntervalTimer !== null) {
|
|
196
|
+
clearTimeout(this._maxIntervalTimer);
|
|
197
|
+
this._maxIntervalTimer = null;
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
_processNextBatch(flushType) {
|
|
201
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
202
|
+
this._prepareQueueForFlush(flushType);
|
|
203
|
+
const batch = this._batchQueue.takeNextBatch();
|
|
204
|
+
if (!batch) {
|
|
205
|
+
return false;
|
|
206
|
+
}
|
|
207
|
+
return this._processOneBatch(batch, flushType);
|
|
208
|
+
});
|
|
209
|
+
}
|
|
210
|
+
_processOneBatch(batch, flushType) {
|
|
211
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
212
|
+
const result = yield this._eventSender.sendBatch(batch);
|
|
213
|
+
if (result.success) {
|
|
214
|
+
this._flushInterval.adjustForSuccess();
|
|
215
|
+
return true;
|
|
216
|
+
}
|
|
217
|
+
this._flushInterval.adjustForFailure();
|
|
218
|
+
this._handleFailure(batch, flushType, result.statusCode);
|
|
219
|
+
return false;
|
|
220
|
+
});
|
|
221
|
+
}
|
|
222
|
+
_prepareQueueForFlush(flushType) {
|
|
223
|
+
this._onPrepareFlush();
|
|
224
|
+
const droppedCount = this.convertPendingEventsToBatches();
|
|
225
|
+
if (droppedCount > 0) {
|
|
226
|
+
Log_1.Log.warn(`Dropped ${droppedCount} events`);
|
|
227
|
+
this._errorBoundary.logDroppedEvents(droppedCount, `Batch queue limit reached`, {
|
|
228
|
+
loggingInterval: this._flushInterval.getCurrentIntervalMs(),
|
|
229
|
+
batchSize: this._batchQueue.batchSize(),
|
|
230
|
+
maxPendingBatches: EventRetryConstants_1.EventRetryConstants.MAX_PENDING_BATCHES,
|
|
231
|
+
flushType: flushType,
|
|
232
|
+
});
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
containsAtLeastOneFullBatch() {
|
|
236
|
+
return (this._pendingEvents.hasEventsForFullBatch() ||
|
|
237
|
+
this._batchQueue.hasFullBatch());
|
|
238
|
+
}
|
|
239
|
+
convertPendingEventsToBatches() {
|
|
240
|
+
if (this._pendingEvents.isEmpty()) {
|
|
241
|
+
return 0;
|
|
242
|
+
}
|
|
243
|
+
const allEvents = this._pendingEvents.takeAll();
|
|
244
|
+
return this._batchQueue.createBatches(allEvents);
|
|
245
|
+
}
|
|
246
|
+
_handleFailure(batch, flushType, statusCode) {
|
|
247
|
+
if (flushType === FlushTypes_1.FlushType.Shutdown) {
|
|
248
|
+
Log_1.Log.warn(`${flushType} flush failed during shutdown. ` +
|
|
249
|
+
`${batch.events.length} event(s) will be saved to storage for retry in next session.`);
|
|
250
|
+
this._saveShutdownFailedEventsToStorage(batch.events);
|
|
251
|
+
this._errorBoundary.logEventRequestFailure(batch.events.length, `flush failed during shutdown - saved to storage`, flushType, statusCode);
|
|
252
|
+
return;
|
|
253
|
+
}
|
|
254
|
+
if (!NetworkCore_1.RETRYABLE_CODES.has(statusCode)) {
|
|
255
|
+
Log_1.Log.warn(`${flushType} flush failed after ${batch.attempts} attempt(s). ` +
|
|
256
|
+
`${batch.events.length} event(s) will be dropped. Non-retryable error: ${statusCode}`);
|
|
257
|
+
this._errorBoundary.logEventRequestFailure(batch.events.length, `non-retryable error`, flushType, statusCode);
|
|
258
|
+
return;
|
|
259
|
+
}
|
|
260
|
+
if (batch.attempts >= EventRetryConstants_1.EventRetryConstants.MAX_RETRY_ATTEMPTS) {
|
|
261
|
+
Log_1.Log.warn(`${flushType} flush failed after ${batch.attempts} attempt(s). ` +
|
|
262
|
+
`${batch.events.length} event(s) will be dropped.`);
|
|
263
|
+
this._errorBoundary.logEventRequestFailure(batch.events.length, `max retry attempts exceeded`, flushType, statusCode);
|
|
264
|
+
return;
|
|
265
|
+
}
|
|
266
|
+
batch.incrementAttempts();
|
|
267
|
+
const droppedCount = this._batchQueue.requeueBatch(batch);
|
|
268
|
+
if (droppedCount > 0) {
|
|
269
|
+
Log_1.Log.warn(`Failed to requeue batch : dropped ${droppedCount} events due to full queue`);
|
|
270
|
+
this._errorBoundary.logDroppedEvents(droppedCount, `Batch queue limit reached`, {
|
|
271
|
+
loggingInterval: this._flushInterval.getCurrentIntervalMs(),
|
|
272
|
+
batchSize: this._batchQueue.batchSize(),
|
|
273
|
+
maxPendingBatches: EventRetryConstants_1.EventRetryConstants.MAX_PENDING_BATCHES,
|
|
274
|
+
flushType: flushType,
|
|
275
|
+
});
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
loadAndRetryShutdownFailedEvents() {
|
|
279
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
280
|
+
const storageKey = this._getStorageKey();
|
|
281
|
+
try {
|
|
282
|
+
const events = this._getShutdownFailedEventsFromStorage(storageKey);
|
|
283
|
+
if (events.length === 0) {
|
|
284
|
+
return;
|
|
285
|
+
}
|
|
286
|
+
Log_1.Log.debug(`Loading ${events.length} failed shutdown event(s) from storage for retry`);
|
|
287
|
+
StorageProvider_1.Storage.removeItem(storageKey);
|
|
288
|
+
events.forEach((event) => {
|
|
289
|
+
this.addEvent(event);
|
|
290
|
+
});
|
|
291
|
+
yield this.processManualFlush();
|
|
292
|
+
}
|
|
293
|
+
catch (error) {
|
|
294
|
+
Log_1.Log.warn('Failed to load and retry failed shutdown events:', error);
|
|
295
|
+
}
|
|
296
|
+
});
|
|
297
|
+
}
|
|
298
|
+
_getStorageKey() {
|
|
299
|
+
if (!this._storageKey) {
|
|
300
|
+
this._storageKey = `statsig.failed_shutdown_events.${(0, Hashing_1._DJB2)(this._sdkKey)}`;
|
|
301
|
+
}
|
|
302
|
+
return this._storageKey;
|
|
303
|
+
}
|
|
304
|
+
_saveShutdownFailedEventsToStorage(events) {
|
|
305
|
+
const storageKey = this._getStorageKey();
|
|
306
|
+
try {
|
|
307
|
+
const existingEvents = this._getShutdownFailedEventsFromStorage(storageKey);
|
|
308
|
+
let allEvents = [...existingEvents, ...events];
|
|
309
|
+
if (allEvents.length > EventRetryConstants_1.EventRetryConstants.MAX_LOCAL_STORAGE) {
|
|
310
|
+
allEvents = allEvents.slice(-EventRetryConstants_1.EventRetryConstants.MAX_LOCAL_STORAGE);
|
|
311
|
+
}
|
|
312
|
+
(0, StorageProvider_1._setObjectInStorage)(storageKey, allEvents);
|
|
313
|
+
Log_1.Log.debug(`Saved ${events.length} failed shutdown event(s) to storage (total stored: ${allEvents.length})`);
|
|
314
|
+
}
|
|
315
|
+
catch (error) {
|
|
316
|
+
Log_1.Log.warn('Unable to save failed shutdown events to storage:', error);
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
_getShutdownFailedEventsFromStorage(storageKey) {
|
|
320
|
+
try {
|
|
321
|
+
const events = (0, StorageProvider_1._getObjectFromStorage)(storageKey);
|
|
322
|
+
if (Array.isArray(events)) {
|
|
323
|
+
return events;
|
|
324
|
+
}
|
|
325
|
+
return [];
|
|
326
|
+
}
|
|
327
|
+
catch (_a) {
|
|
328
|
+
return [];
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
exports.FlushCoordinator = FlushCoordinator;
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
export declare class FlushInterval {
|
|
2
|
+
private _currentIntervalMs;
|
|
3
|
+
private _lastFlushAttemptTime;
|
|
4
|
+
getCurrentIntervalMs(): number;
|
|
5
|
+
markFlushAttempt(): void;
|
|
6
|
+
getTimeSinceLastAttempt(): number;
|
|
7
|
+
hasReachedMaxInterval(): boolean;
|
|
8
|
+
getTimeTillMaxInterval(): number;
|
|
9
|
+
hasCompletelyRecoveredFromBackoff(): boolean;
|
|
10
|
+
adjustForSuccess(): void;
|
|
11
|
+
adjustForFailure(): void;
|
|
12
|
+
getTimeUntilNextFlush(): number;
|
|
13
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.FlushInterval = void 0;
|
|
4
|
+
const MIN_FLUSH_INTERVAL_MS = 1000;
|
|
5
|
+
const MAX_FLUSH_INTERVAL_MS = 60000;
|
|
6
|
+
class FlushInterval {
|
|
7
|
+
constructor() {
|
|
8
|
+
this._currentIntervalMs = MIN_FLUSH_INTERVAL_MS;
|
|
9
|
+
this._lastFlushAttemptTime = Date.now();
|
|
10
|
+
}
|
|
11
|
+
getCurrentIntervalMs() {
|
|
12
|
+
return this._currentIntervalMs;
|
|
13
|
+
}
|
|
14
|
+
markFlushAttempt() {
|
|
15
|
+
this._lastFlushAttemptTime = Date.now();
|
|
16
|
+
}
|
|
17
|
+
getTimeSinceLastAttempt() {
|
|
18
|
+
return Date.now() - this._lastFlushAttemptTime;
|
|
19
|
+
}
|
|
20
|
+
hasReachedMaxInterval() {
|
|
21
|
+
return this.getTimeSinceLastAttempt() >= MAX_FLUSH_INTERVAL_MS;
|
|
22
|
+
}
|
|
23
|
+
getTimeTillMaxInterval() {
|
|
24
|
+
return MAX_FLUSH_INTERVAL_MS - this.getTimeSinceLastAttempt();
|
|
25
|
+
}
|
|
26
|
+
hasCompletelyRecoveredFromBackoff() {
|
|
27
|
+
return this._currentIntervalMs <= MIN_FLUSH_INTERVAL_MS;
|
|
28
|
+
}
|
|
29
|
+
adjustForSuccess() {
|
|
30
|
+
const current = this._currentIntervalMs;
|
|
31
|
+
if (current === MIN_FLUSH_INTERVAL_MS) {
|
|
32
|
+
return;
|
|
33
|
+
}
|
|
34
|
+
this._currentIntervalMs = Math.max(MIN_FLUSH_INTERVAL_MS, Math.floor(current / 2));
|
|
35
|
+
}
|
|
36
|
+
adjustForFailure() {
|
|
37
|
+
const current = this._currentIntervalMs;
|
|
38
|
+
this._currentIntervalMs = Math.min(MAX_FLUSH_INTERVAL_MS, current * 2);
|
|
39
|
+
}
|
|
40
|
+
getTimeUntilNextFlush() {
|
|
41
|
+
return this.getCurrentIntervalMs() - this.getTimeSinceLastAttempt();
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
exports.FlushInterval = FlushInterval;
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.FlushType = void 0;
|
|
4
|
+
/* eslint-disable no-restricted-syntax */
|
|
5
|
+
var FlushType;
|
|
6
|
+
(function (FlushType) {
|
|
7
|
+
FlushType["ScheduledMaxTime"] = "scheduled:max_time";
|
|
8
|
+
FlushType["ScheduledFullBatch"] = "scheduled:full_batch";
|
|
9
|
+
FlushType["Limit"] = "limit";
|
|
10
|
+
FlushType["Manual"] = "manual";
|
|
11
|
+
FlushType["Shutdown"] = "shutdown";
|
|
12
|
+
})(FlushType || (exports.FlushType = FlushType = {}));
|
package/src/NetworkCore.d.ts
CHANGED
|
@@ -5,6 +5,7 @@ import { StatsigClientEmitEventFunc } from './StatsigClientBase';
|
|
|
5
5
|
import { AnyStatsigOptions, LogEventCompressionMode } from './StatsigOptionsCommon';
|
|
6
6
|
import { Flatten } from './TypingUtils';
|
|
7
7
|
import { UrlConfiguration } from './UrlConfiguration';
|
|
8
|
+
export declare const RETRYABLE_CODES: Set<number>;
|
|
8
9
|
type RequestArgs = {
|
|
9
10
|
sdkKey: string;
|
|
10
11
|
urlConfig: UrlConfiguration;
|
package/src/NetworkCore.js
CHANGED
|
@@ -9,7 +9,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
|
9
9
|
});
|
|
10
10
|
};
|
|
11
11
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
-
exports.NetworkCore = void 0;
|
|
12
|
+
exports.NetworkCore = exports.RETRYABLE_CODES = void 0;
|
|
13
13
|
require("./$_StatsigGlobal");
|
|
14
14
|
const __StatsigGlobal_1 = require("./$_StatsigGlobal");
|
|
15
15
|
const Diagnostics_1 = require("./Diagnostics");
|
|
@@ -31,7 +31,9 @@ const BACKOFF_MAX_MS = 30000;
|
|
|
31
31
|
const RATE_LIMIT_WINDOW_MS = 1000;
|
|
32
32
|
const RATE_LIMIT_MAX_REQ_COUNT = 50;
|
|
33
33
|
const LEAK_RATE = RATE_LIMIT_MAX_REQ_COUNT / RATE_LIMIT_WINDOW_MS;
|
|
34
|
-
|
|
34
|
+
exports.RETRYABLE_CODES = new Set([
|
|
35
|
+
408, 500, 502, 503, 504, 522, 524, 599,
|
|
36
|
+
]);
|
|
35
37
|
class NetworkCore {
|
|
36
38
|
constructor(options, _emitter) {
|
|
37
39
|
this._emitter = _emitter;
|
|
@@ -161,7 +163,7 @@ class NetworkCore {
|
|
|
161
163
|
}
|
|
162
164
|
if (!retries ||
|
|
163
165
|
currentAttempt > retries ||
|
|
164
|
-
!RETRYABLE_CODES.has((_b = response === null || response === void 0 ? void 0 : response.status) !== null && _b !== void 0 ? _b : 500)) {
|
|
166
|
+
!exports.RETRYABLE_CODES.has((_b = response === null || response === void 0 ? void 0 : response.status) !== null && _b !== void 0 ? _b : 500)) {
|
|
165
167
|
(_c = this._emitter) === null || _c === void 0 ? void 0 : _c.call(this, {
|
|
166
168
|
name: 'error',
|
|
167
169
|
error,
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { StatsigEventInternal } from './StatsigEvent';
|
|
2
|
+
export declare class PendingEvents {
|
|
3
|
+
private _pendingEvents;
|
|
4
|
+
private _batchSize;
|
|
5
|
+
constructor(batchSize: number);
|
|
6
|
+
addToPendingEventsQueue(event: StatsigEventInternal): void;
|
|
7
|
+
hasEventsForFullBatch(): boolean;
|
|
8
|
+
takeAll(): StatsigEventInternal[];
|
|
9
|
+
isEmpty(): boolean;
|
|
10
|
+
}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.PendingEvents = void 0;
|
|
4
|
+
const Log_1 = require("./Log");
|
|
5
|
+
class PendingEvents {
|
|
6
|
+
constructor(batchSize) {
|
|
7
|
+
this._pendingEvents = [];
|
|
8
|
+
this._batchSize = batchSize;
|
|
9
|
+
}
|
|
10
|
+
addToPendingEventsQueue(event) {
|
|
11
|
+
this._pendingEvents.push(event);
|
|
12
|
+
Log_1.Log.debug('Enqueued Event:', event);
|
|
13
|
+
}
|
|
14
|
+
hasEventsForFullBatch() {
|
|
15
|
+
return this._pendingEvents.length >= this._batchSize;
|
|
16
|
+
}
|
|
17
|
+
takeAll() {
|
|
18
|
+
const events = this._pendingEvents;
|
|
19
|
+
this._pendingEvents = [];
|
|
20
|
+
return events;
|
|
21
|
+
}
|
|
22
|
+
isEmpty() {
|
|
23
|
+
return this._pendingEvents.length === 0;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
exports.PendingEvents = PendingEvents;
|
package/src/StatsigClientBase.js
CHANGED
|
@@ -41,8 +41,8 @@ class StatsigClientBase {
|
|
|
41
41
|
this._options = options !== null && options !== void 0 ? options : {};
|
|
42
42
|
this._memoCache = {};
|
|
43
43
|
this.overrideAdapter = (_a = options === null || options === void 0 ? void 0 : options.overrideAdapter) !== null && _a !== void 0 ? _a : null;
|
|
44
|
-
this._logger = new EventLogger_1.EventLogger(sdkKey, emitter, network, options);
|
|
45
44
|
this._errorBoundary = new ErrorBoundary_1.ErrorBoundary(sdkKey, options, emitter);
|
|
45
|
+
this._logger = new EventLogger_1.EventLogger(sdkKey, emitter, network, options, this._errorBoundary);
|
|
46
46
|
this._errorBoundary.wrap(this);
|
|
47
47
|
this._errorBoundary.wrap(adapter);
|
|
48
48
|
this._errorBoundary.wrap(this._logger);
|
package/src/StatsigMetadata.d.ts
CHANGED
package/src/StatsigMetadata.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.StatsigMetadataProvider = exports.SDK_VERSION = void 0;
|
|
4
|
-
exports.SDK_VERSION = '3.31.1-beta.
|
|
4
|
+
exports.SDK_VERSION = '3.31.1-beta.2';
|
|
5
5
|
let metadata = {
|
|
6
6
|
sdkVersion: exports.SDK_VERSION,
|
|
7
7
|
sdkType: 'js-mono', // js-mono is overwritten by Precomp and OnDevice clients
|