@launchdarkly/js-client-sdk-common 1.14.0 → 1.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/dist/cjs/LDClientImpl.d.ts +11 -2
- package/dist/cjs/LDClientImpl.d.ts.map +1 -1
- package/dist/cjs/api/LDClient.d.ts +37 -0
- package/dist/cjs/api/LDClient.d.ts.map +1 -1
- package/dist/cjs/api/LDIdentifyOptions.d.ts +10 -0
- package/dist/cjs/api/LDIdentifyOptions.d.ts.map +1 -1
- package/dist/cjs/api/LDIdentifyResult.d.ts +28 -0
- package/dist/cjs/api/LDIdentifyResult.d.ts.map +1 -0
- package/dist/cjs/api/index.d.ts +1 -0
- package/dist/cjs/api/index.d.ts.map +1 -1
- package/dist/cjs/api/integrations/Hooks.d.ts +10 -1
- package/dist/cjs/api/integrations/Hooks.d.ts.map +1 -1
- package/dist/cjs/async/AsyncTaskQueue.d.ts +28 -9
- package/dist/cjs/async/AsyncTaskQueue.d.ts.map +1 -1
- package/dist/cjs/index.cjs +269 -37
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/index.d.ts +1 -1
- package/dist/cjs/index.d.ts.map +1 -1
- package/dist/cjs/streaming/StreamingProcessor.d.ts +1 -0
- package/dist/cjs/streaming/StreamingProcessor.d.ts.map +1 -1
- package/dist/esm/LDClientImpl.d.ts +11 -2
- package/dist/esm/LDClientImpl.d.ts.map +1 -1
- package/dist/esm/api/LDClient.d.ts +37 -0
- package/dist/esm/api/LDClient.d.ts.map +1 -1
- package/dist/esm/api/LDIdentifyOptions.d.ts +10 -0
- package/dist/esm/api/LDIdentifyOptions.d.ts.map +1 -1
- package/dist/esm/api/LDIdentifyResult.d.ts +28 -0
- package/dist/esm/api/LDIdentifyResult.d.ts.map +1 -0
- package/dist/esm/api/index.d.ts +1 -0
- package/dist/esm/api/index.d.ts.map +1 -1
- package/dist/esm/api/integrations/Hooks.d.ts +10 -1
- package/dist/esm/api/integrations/Hooks.d.ts.map +1 -1
- package/dist/esm/async/AsyncTaskQueue.d.ts +28 -9
- package/dist/esm/async/AsyncTaskQueue.d.ts.map +1 -1
- package/dist/esm/index.d.ts +1 -1
- package/dist/esm/index.d.ts.map +1 -1
- package/dist/esm/index.mjs +270 -38
- package/dist/esm/index.mjs.map +1 -1
- package/dist/esm/streaming/StreamingProcessor.d.ts +1 -0
- package/dist/esm/streaming/StreamingProcessor.d.ts.map +1 -1
- package/package.json +1 -1
package/dist/cjs/index.cjs
CHANGED
|
@@ -98,6 +98,143 @@ function makeRequestor(plainContextString, serviceEndpoints, paths, requests, en
|
|
|
98
98
|
return new Requestor(requests, uri, headers, method, body);
|
|
99
99
|
}
|
|
100
100
|
|
|
101
|
+
const duplicateExecutionError = new Error('Task has already been executed or shed. This is likely an implementation error. The task will not be executed again.');
|
|
102
|
+
/**
|
|
103
|
+
* Creates a pending task.
|
|
104
|
+
* @param task The async function to execute.
|
|
105
|
+
* @param sheddable Whether the task can be shed from the queue.
|
|
106
|
+
* @returns A pending task.
|
|
107
|
+
*/
|
|
108
|
+
function makePending(task, _logger, sheddable = false) {
|
|
109
|
+
let resolveTask;
|
|
110
|
+
const promise = new Promise((resolve) => {
|
|
111
|
+
resolveTask = (result, beforeResult) => {
|
|
112
|
+
try {
|
|
113
|
+
task.after?.(result, beforeResult);
|
|
114
|
+
}
|
|
115
|
+
catch (error) {
|
|
116
|
+
_logger?.error(`Error in after callback: ${error}`);
|
|
117
|
+
}
|
|
118
|
+
resolve(result);
|
|
119
|
+
};
|
|
120
|
+
});
|
|
121
|
+
const beforePromise = task.before ? task.before() : Promise.resolve(undefined);
|
|
122
|
+
let executedOrShed = false;
|
|
123
|
+
return {
|
|
124
|
+
execute: () => {
|
|
125
|
+
if (executedOrShed) {
|
|
126
|
+
// This should never happen. If it does, then it represents an implementation error in the SDK.
|
|
127
|
+
_logger?.error(duplicateExecutionError);
|
|
128
|
+
}
|
|
129
|
+
executedOrShed = true;
|
|
130
|
+
beforePromise
|
|
131
|
+
.then((beforeResult) => {
|
|
132
|
+
task
|
|
133
|
+
.execute(beforeResult)
|
|
134
|
+
.then((result) => resolveTask({ status: 'complete', result }, beforeResult))
|
|
135
|
+
.catch((error) => resolveTask({ status: 'error', error }, beforeResult));
|
|
136
|
+
})
|
|
137
|
+
.catch((error) => {
|
|
138
|
+
_logger?.error(error);
|
|
139
|
+
resolveTask({ status: 'error', error }, undefined);
|
|
140
|
+
});
|
|
141
|
+
},
|
|
142
|
+
shed: () => {
|
|
143
|
+
if (executedOrShed) {
|
|
144
|
+
// This should never happen. If it does, then it represents an implementation error in the SDK.
|
|
145
|
+
_logger?.error(duplicateExecutionError);
|
|
146
|
+
}
|
|
147
|
+
executedOrShed = true;
|
|
148
|
+
beforePromise.then((beforeResult) => {
|
|
149
|
+
resolveTask({ status: 'shed' }, beforeResult);
|
|
150
|
+
});
|
|
151
|
+
},
|
|
152
|
+
promise,
|
|
153
|
+
sheddable,
|
|
154
|
+
};
|
|
155
|
+
}
|
|
156
|
+
/**
|
|
157
|
+
* An asynchronous task queue with the ability to replace pending tasks.
|
|
158
|
+
*
|
|
159
|
+
* This is useful when you have asynchronous operations which much execute in order, and for cases where intermediate
|
|
160
|
+
* operations can be discarded.
|
|
161
|
+
*
|
|
162
|
+
* For instance, the SDK can only have one active context at a time, if you request identification of many contexts,
|
|
163
|
+
* then the ultimate state will be based on the last request. The intermediate identifies can be discarded.
|
|
164
|
+
*
|
|
165
|
+
* This queue will always begin execution of the first item added to the queue, at that point the item itself is not
|
|
166
|
+
* queued, but active. If another request is made while that item is still active, then it is added to the queue.
|
|
167
|
+
* A third request would then replace the second request if the second request had not yet become active, and it was
|
|
168
|
+
* sheddable.
|
|
169
|
+
*
|
|
170
|
+
* Once a task is active the queue will complete it. It doesn't cancel tasks that it has started, but it can shed tasks
|
|
171
|
+
* that have not started.
|
|
172
|
+
*
|
|
173
|
+
* TTaskResult Is the return type of the task to be executed. Tasks accept no parameters. So if you need parameters
|
|
174
|
+
* you should use a lambda to capture them.
|
|
175
|
+
*
|
|
176
|
+
* Exceptions from tasks are always handled and the execute method will never reject a promise.
|
|
177
|
+
*
|
|
178
|
+
* Queue management should be done synchronously. There should not be asynchronous operations between checking the queue
|
|
179
|
+
* and acting on the results of said check.
|
|
180
|
+
*/
|
|
181
|
+
function createAsyncTaskQueue(logger) {
|
|
182
|
+
let activeTask;
|
|
183
|
+
const queue = [];
|
|
184
|
+
function checkPending() {
|
|
185
|
+
// There is an existing active task, so we don't need to do anything.
|
|
186
|
+
if (activeTask) {
|
|
187
|
+
return;
|
|
188
|
+
}
|
|
189
|
+
// There are pending tasks, so we need to execute the next one.
|
|
190
|
+
if (queue.length > 0) {
|
|
191
|
+
const nextTask = queue.shift();
|
|
192
|
+
activeTask = nextTask.promise.finally(() => {
|
|
193
|
+
activeTask = undefined;
|
|
194
|
+
checkPending();
|
|
195
|
+
});
|
|
196
|
+
nextTask.execute();
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
return {
|
|
200
|
+
/**
|
|
201
|
+
* Execute a task using the queue.
|
|
202
|
+
*
|
|
203
|
+
* @param task The async function to execute.
|
|
204
|
+
* @param sheddable Whether the task can be shed from the queue.
|
|
205
|
+
* @returns A promise that resolves to the result of the task.
|
|
206
|
+
*/
|
|
207
|
+
execute(task, sheddable = false) {
|
|
208
|
+
const pending = makePending(task, logger, sheddable);
|
|
209
|
+
if (!activeTask) {
|
|
210
|
+
activeTask = pending.promise.finally(() => {
|
|
211
|
+
activeTask = undefined;
|
|
212
|
+
checkPending();
|
|
213
|
+
});
|
|
214
|
+
pending.execute();
|
|
215
|
+
}
|
|
216
|
+
else {
|
|
217
|
+
// If the last pending task is sheddable, we need to shed it before adding the new task.
|
|
218
|
+
if (queue[queue.length - 1]?.sheddable) {
|
|
219
|
+
queue.pop()?.shed();
|
|
220
|
+
}
|
|
221
|
+
queue.push(pending);
|
|
222
|
+
}
|
|
223
|
+
return pending.promise;
|
|
224
|
+
},
|
|
225
|
+
/**
|
|
226
|
+
* Returns the number of pending tasks in the queue.
|
|
227
|
+
* Intended for testing purposes only.
|
|
228
|
+
*
|
|
229
|
+
* @internal
|
|
230
|
+
* @returns The number of pending tasks in the queue.
|
|
231
|
+
*/
|
|
232
|
+
pendingCount() {
|
|
233
|
+
return queue.length;
|
|
234
|
+
},
|
|
235
|
+
};
|
|
236
|
+
}
|
|
237
|
+
|
|
101
238
|
// eslint-disable-next-line max-classes-per-file
|
|
102
239
|
const validators = {
|
|
103
240
|
logger: jsSdkCommon.TypeValidators.Object,
|
|
@@ -1212,6 +1349,7 @@ class LDClientImpl {
|
|
|
1212
1349
|
this._eventFactoryDefault = new EventFactory(false);
|
|
1213
1350
|
this._eventFactoryWithReasons = new EventFactory(true);
|
|
1214
1351
|
this._eventSendingEnabled = false;
|
|
1352
|
+
this._identifyQueue = createAsyncTaskQueue();
|
|
1215
1353
|
if (!sdkKey) {
|
|
1216
1354
|
throw new Error('You must configure the client with a client-side SDK key');
|
|
1217
1355
|
}
|
|
@@ -1277,7 +1415,7 @@ class LDClientImpl {
|
|
|
1277
1415
|
}
|
|
1278
1416
|
getContext() {
|
|
1279
1417
|
// The LDContext returned here may have been modified by the SDK (for example: adding auto env attributes).
|
|
1280
|
-
// We are returning an LDContext here to maintain a consistent
|
|
1418
|
+
// We are returning an LDContext here to maintain a consistent representation of context to the consuming
|
|
1281
1419
|
// code. We are returned the unchecked context so that if a consumer identifies with an invalid context
|
|
1282
1420
|
// and then calls getContext, they get back the same context they provided, without any assertion about
|
|
1283
1421
|
// validity.
|
|
@@ -1286,28 +1424,25 @@ class LDClientImpl {
|
|
|
1286
1424
|
getInternalContext() {
|
|
1287
1425
|
return this._checkedContext;
|
|
1288
1426
|
}
|
|
1289
|
-
_createIdentifyPromise(
|
|
1427
|
+
_createIdentifyPromise() {
|
|
1290
1428
|
let res;
|
|
1291
1429
|
let rej;
|
|
1292
1430
|
const basePromise = new Promise((resolve, reject) => {
|
|
1293
1431
|
res = resolve;
|
|
1294
1432
|
rej = reject;
|
|
1295
1433
|
});
|
|
1296
|
-
|
|
1297
|
-
return { identifyPromise: basePromise, identifyResolve: res, identifyReject: rej };
|
|
1298
|
-
}
|
|
1299
|
-
const timed = jsSdkCommon.timedPromise(timeout, 'identify');
|
|
1300
|
-
const raced = Promise.race([timed, basePromise]).catch((e) => {
|
|
1301
|
-
if (e.message.includes('timed out')) {
|
|
1302
|
-
this.logger.error(`identify error: ${e}`);
|
|
1303
|
-
}
|
|
1304
|
-
throw e;
|
|
1305
|
-
});
|
|
1306
|
-
return { identifyPromise: raced, identifyResolve: res, identifyReject: rej };
|
|
1434
|
+
return { identifyPromise: basePromise, identifyResolve: res, identifyReject: rej };
|
|
1307
1435
|
}
|
|
1308
1436
|
/**
|
|
1309
1437
|
* Identifies a context to LaunchDarkly. See {@link LDClient.identify}.
|
|
1310
1438
|
*
|
|
1439
|
+
* If used with the `sheddable` option set to true, then the identify operation will be sheddable. This means that if
|
|
1440
|
+
* multiple identify operations are done, without waiting for the previous one to complete, then intermediate
|
|
1441
|
+
* operations may be discarded.
|
|
1442
|
+
*
|
|
1443
|
+
* It is recommended to use the `identifyResult` method instead when the operation is sheddable. In a future release,
|
|
1444
|
+
* all identify operations will default to being sheddable.
|
|
1445
|
+
*
|
|
1311
1446
|
* @param pristineContext The LDContext object to be identified.
|
|
1312
1447
|
* @param identifyOptions Optional configuration. See {@link LDIdentifyOptions}.
|
|
1313
1448
|
* @returns A Promise which resolves when the flag values for the specified
|
|
@@ -1321,39 +1456,95 @@ class LDClientImpl {
|
|
|
1321
1456
|
* 3. A network error is encountered during initialization.
|
|
1322
1457
|
*/
|
|
1323
1458
|
async identify(pristineContext, identifyOptions) {
|
|
1459
|
+
// In order to manage customization in the derived classes it is important that `identify` MUST be implemented in
|
|
1460
|
+
// terms of `identifyResult`. So that the logic of the identification process can be extended in one place.
|
|
1461
|
+
const result = await this.identifyResult(pristineContext, identifyOptions);
|
|
1462
|
+
if (result.status === 'error') {
|
|
1463
|
+
throw result.error;
|
|
1464
|
+
}
|
|
1465
|
+
else if (result.status === 'timeout') {
|
|
1466
|
+
const timeoutError = new jsSdkCommon.LDTimeoutError(`identify timed out after ${result.timeout} seconds.`);
|
|
1467
|
+
this.logger.error(timeoutError.message);
|
|
1468
|
+
throw timeoutError;
|
|
1469
|
+
}
|
|
1470
|
+
// If completed or shed, then we are done.
|
|
1471
|
+
}
|
|
1472
|
+
async identifyResult(pristineContext, identifyOptions) {
|
|
1324
1473
|
const identifyTimeout = identifyOptions?.timeout ?? DEFAULT_IDENTIFY_TIMEOUT_SECONDS;
|
|
1325
1474
|
const noTimeout = identifyOptions?.timeout === undefined && identifyOptions?.noTimeout === true;
|
|
1326
|
-
// When noTimeout is specified, and a timeout is not
|
|
1475
|
+
// When noTimeout is specified, and a timeout is not specified, then this condition cannot
|
|
1327
1476
|
// be encountered. (Our default would need to be greater)
|
|
1328
1477
|
if (identifyTimeout > this._highTimeoutThreshold) {
|
|
1329
1478
|
this.logger.warn('The identify function was called with a timeout greater than ' +
|
|
1330
1479
|
`${this._highTimeoutThreshold} seconds. We recommend a timeout of less than ` +
|
|
1331
1480
|
`${this._highTimeoutThreshold} seconds.`);
|
|
1332
1481
|
}
|
|
1333
|
-
|
|
1334
|
-
|
|
1335
|
-
|
|
1336
|
-
|
|
1337
|
-
|
|
1338
|
-
|
|
1339
|
-
|
|
1340
|
-
|
|
1341
|
-
|
|
1342
|
-
|
|
1343
|
-
|
|
1344
|
-
|
|
1345
|
-
|
|
1346
|
-
|
|
1347
|
-
|
|
1348
|
-
|
|
1349
|
-
|
|
1350
|
-
|
|
1351
|
-
|
|
1352
|
-
|
|
1353
|
-
|
|
1354
|
-
|
|
1355
|
-
|
|
1482
|
+
const callSitePromise = this._identifyQueue
|
|
1483
|
+
.execute({
|
|
1484
|
+
before: async () => {
|
|
1485
|
+
let context = await ensureKey(pristineContext, this.platform);
|
|
1486
|
+
if (this.autoEnvAttributes === jsSdkCommon.AutoEnvAttributes.Enabled) {
|
|
1487
|
+
context = await addAutoEnv(context, this.platform, this._config);
|
|
1488
|
+
}
|
|
1489
|
+
const checkedContext = jsSdkCommon.Context.fromLDContext(context);
|
|
1490
|
+
if (checkedContext.valid) {
|
|
1491
|
+
const afterIdentify = this._hookRunner.identify(context, identifyOptions?.timeout);
|
|
1492
|
+
return {
|
|
1493
|
+
context,
|
|
1494
|
+
checkedContext,
|
|
1495
|
+
afterIdentify,
|
|
1496
|
+
};
|
|
1497
|
+
}
|
|
1498
|
+
return {
|
|
1499
|
+
context,
|
|
1500
|
+
checkedContext,
|
|
1501
|
+
};
|
|
1502
|
+
},
|
|
1503
|
+
execute: async (beforeResult) => {
|
|
1504
|
+
const { context, checkedContext } = beforeResult;
|
|
1505
|
+
if (!checkedContext.valid) {
|
|
1506
|
+
const error = new Error('Context was unspecified or had no key');
|
|
1507
|
+
this.emitter.emit('error', context, error);
|
|
1508
|
+
return Promise.reject(error);
|
|
1509
|
+
}
|
|
1510
|
+
this._uncheckedContext = context;
|
|
1511
|
+
this._checkedContext = checkedContext;
|
|
1512
|
+
this._eventProcessor?.sendEvent(this._eventFactoryDefault.identifyEvent(this._checkedContext));
|
|
1513
|
+
const { identifyPromise, identifyResolve, identifyReject } = this._createIdentifyPromise();
|
|
1514
|
+
this.logger.debug(`Identifying ${JSON.stringify(this._checkedContext)}`);
|
|
1515
|
+
await this.dataManager.identify(identifyResolve, identifyReject, checkedContext, identifyOptions);
|
|
1516
|
+
return identifyPromise;
|
|
1517
|
+
},
|
|
1518
|
+
after: async (res, beforeResult) => {
|
|
1519
|
+
if (res.status === 'complete') {
|
|
1520
|
+
beforeResult?.afterIdentify?.({ status: 'completed' });
|
|
1521
|
+
}
|
|
1522
|
+
else if (res.status === 'shed') {
|
|
1523
|
+
beforeResult?.afterIdentify?.({ status: 'shed' });
|
|
1524
|
+
}
|
|
1525
|
+
else if (res.status === 'error') {
|
|
1526
|
+
beforeResult?.afterIdentify?.({ status: 'error' });
|
|
1527
|
+
}
|
|
1528
|
+
},
|
|
1529
|
+
}, identifyOptions?.sheddable ?? false)
|
|
1530
|
+
.then((res) => {
|
|
1531
|
+
if (res.status === 'error') {
|
|
1532
|
+
return { status: 'error', error: res.error };
|
|
1533
|
+
}
|
|
1534
|
+
if (res.status === 'shed') {
|
|
1535
|
+
return { status: 'shed' };
|
|
1536
|
+
}
|
|
1537
|
+
return { status: 'completed' };
|
|
1538
|
+
});
|
|
1539
|
+
if (noTimeout) {
|
|
1540
|
+
return callSitePromise;
|
|
1541
|
+
}
|
|
1542
|
+
const timeoutPromise = new Promise((resolve) => {
|
|
1543
|
+
setTimeout(() => {
|
|
1544
|
+
resolve({ status: 'timeout', timeout: identifyTimeout });
|
|
1545
|
+
}, identifyTimeout * 1000);
|
|
1356
1546
|
});
|
|
1547
|
+
return Promise.race([callSitePromise, timeoutPromise]);
|
|
1357
1548
|
}
|
|
1358
1549
|
on(eventName, listener) {
|
|
1359
1550
|
this.emitter.on(eventName, listener);
|
|
@@ -1650,6 +1841,9 @@ class DataSourceStatusManager {
|
|
|
1650
1841
|
}
|
|
1651
1842
|
}
|
|
1652
1843
|
|
|
1844
|
+
function reportClosed(logger) {
|
|
1845
|
+
logger?.debug(`Poll completed after the processor was closed. Skipping processing.`);
|
|
1846
|
+
}
|
|
1653
1847
|
/**
|
|
1654
1848
|
* @internal
|
|
1655
1849
|
*/
|
|
@@ -1676,6 +1870,12 @@ class PollingProcessor {
|
|
|
1676
1870
|
try {
|
|
1677
1871
|
const res = await this._requestor.requestPayload();
|
|
1678
1872
|
try {
|
|
1873
|
+
// If the processor has been stopped, we discard the response.
|
|
1874
|
+
// This response could be for a no longer active context.
|
|
1875
|
+
if (this._stopped) {
|
|
1876
|
+
reportClosed(this._logger);
|
|
1877
|
+
return;
|
|
1878
|
+
}
|
|
1679
1879
|
const flags = JSON.parse(res);
|
|
1680
1880
|
try {
|
|
1681
1881
|
this._dataHandler?.(flags);
|
|
@@ -1689,6 +1889,12 @@ class PollingProcessor {
|
|
|
1689
1889
|
}
|
|
1690
1890
|
}
|
|
1691
1891
|
catch (err) {
|
|
1892
|
+
// If the processor has been stopped, we discard this error.
|
|
1893
|
+
// The original caller would consider this connection no longer active.
|
|
1894
|
+
if (this._stopped) {
|
|
1895
|
+
reportClosed(this._logger);
|
|
1896
|
+
return;
|
|
1897
|
+
}
|
|
1692
1898
|
const requestError = err;
|
|
1693
1899
|
if (requestError.status !== undefined) {
|
|
1694
1900
|
if (!jsSdkCommon.isHttpRecoverable(requestError.status)) {
|
|
@@ -1726,6 +1932,12 @@ const reportJsonError = (type, data, logger, errorHandler) => {
|
|
|
1726
1932
|
logger?.debug(`Invalid JSON follows: ${data}`);
|
|
1727
1933
|
errorHandler?.(new jsSdkCommon.LDStreamingError(jsSdkCommon.DataSourceErrorKind.InvalidData, 'Malformed JSON data in event stream'));
|
|
1728
1934
|
};
|
|
1935
|
+
function reportEventClosed(eventName, logger) {
|
|
1936
|
+
logger?.debug(`Received ${eventName} event after processor was closed. Skipping processing.`);
|
|
1937
|
+
}
|
|
1938
|
+
function reportPingClosed(logger) {
|
|
1939
|
+
logger?.debug('Ping completed after processor was closed. Skipping processing.');
|
|
1940
|
+
}
|
|
1729
1941
|
class StreamingProcessor {
|
|
1730
1942
|
constructor(_plainContextString, _dataSourceConfig, _listeners, _requests, encoding, _pollingRequestor, _diagnosticsManager, _errorHandler, _logger) {
|
|
1731
1943
|
this._plainContextString = _plainContextString;
|
|
@@ -1736,6 +1948,7 @@ class StreamingProcessor {
|
|
|
1736
1948
|
this._diagnosticsManager = _diagnosticsManager;
|
|
1737
1949
|
this._errorHandler = _errorHandler;
|
|
1738
1950
|
this._logger = _logger;
|
|
1951
|
+
this._stopped = false;
|
|
1739
1952
|
let path;
|
|
1740
1953
|
if (_dataSourceConfig.useReport && !_requests.getEventSourceCapabilities().customMethod) {
|
|
1741
1954
|
path = _dataSourceConfig.paths.pathPing(encoding, _plainContextString);
|
|
@@ -1823,6 +2036,12 @@ class StreamingProcessor {
|
|
|
1823
2036
|
};
|
|
1824
2037
|
this._listeners.forEach(({ deserializeData, processJson }, eventName) => {
|
|
1825
2038
|
eventSource.addEventListener(eventName, (event) => {
|
|
2039
|
+
// If an event comes in after the processor has been stopped, we skip processing it.
|
|
2040
|
+
// This event could be for a context which is no longer active.
|
|
2041
|
+
if (this._stopped) {
|
|
2042
|
+
reportEventClosed(eventName, this._logger);
|
|
2043
|
+
return;
|
|
2044
|
+
}
|
|
1826
2045
|
this._logger?.debug(`Received ${eventName} event`);
|
|
1827
2046
|
if (event?.data) {
|
|
1828
2047
|
this._logConnectionResult(true);
|
|
@@ -1845,6 +2064,12 @@ class StreamingProcessor {
|
|
|
1845
2064
|
try {
|
|
1846
2065
|
const res = await this._pollingRequestor.requestPayload();
|
|
1847
2066
|
try {
|
|
2067
|
+
// If the ping completes after the processor has been stopped, then we discard it.
|
|
2068
|
+
// This event could be for a context which is no longer active.
|
|
2069
|
+
if (this._stopped) {
|
|
2070
|
+
reportPingClosed(this._logger);
|
|
2071
|
+
return;
|
|
2072
|
+
}
|
|
1848
2073
|
const payload = JSON.parse(res);
|
|
1849
2074
|
try {
|
|
1850
2075
|
// forward the payload on to the PUT listener
|
|
@@ -1861,6 +2086,12 @@ class StreamingProcessor {
|
|
|
1861
2086
|
}
|
|
1862
2087
|
}
|
|
1863
2088
|
catch (err) {
|
|
2089
|
+
if (this._stopped) {
|
|
2090
|
+
// If the ping errors after the processor has been stopped, then we discard it.
|
|
2091
|
+
// The original caller would consider this connection no longer active.
|
|
2092
|
+
reportPingClosed(this._logger);
|
|
2093
|
+
return;
|
|
2094
|
+
}
|
|
1864
2095
|
const requestError = err;
|
|
1865
2096
|
this._errorHandler?.(new jsSdkCommon.LDPollingError(jsSdkCommon.DataSourceErrorKind.ErrorResponse, requestError.message, requestError.status));
|
|
1866
2097
|
}
|
|
@@ -1869,6 +2100,7 @@ class StreamingProcessor {
|
|
|
1869
2100
|
stop() {
|
|
1870
2101
|
this._eventSource?.close();
|
|
1871
2102
|
this._eventSource = undefined;
|
|
2103
|
+
this._stopped = true;
|
|
1872
2104
|
}
|
|
1873
2105
|
close() {
|
|
1874
2106
|
this.stop();
|