@microsoft/1ds-post-js 3.2.2 → 3.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/README.md +1 -1
  2. package/bundle/{ms.post-3.2.2.gbl.js → ms.post-3.2.3.gbl.js} +732 -726
  3. package/bundle/ms.post-3.2.3.gbl.js.map +1 -0
  4. package/bundle/ms.post-3.2.3.gbl.min.js +7 -0
  5. package/bundle/ms.post-3.2.3.gbl.min.js.map +1 -0
  6. package/bundle/ms.post-3.2.3.integrity.json +46 -0
  7. package/bundle/{ms.post-3.2.2.js → ms.post-3.2.3.js} +732 -726
  8. package/bundle/ms.post-3.2.3.js.map +1 -0
  9. package/bundle/ms.post-3.2.3.min.js +7 -0
  10. package/bundle/ms.post-3.2.3.min.js.map +1 -0
  11. package/bundle/ms.post.gbl.js +731 -725
  12. package/bundle/ms.post.gbl.js.map +1 -1
  13. package/bundle/ms.post.gbl.min.js +2 -2
  14. package/bundle/ms.post.gbl.min.js.map +1 -1
  15. package/bundle/ms.post.integrity.json +17 -17
  16. package/bundle/ms.post.js +731 -725
  17. package/bundle/ms.post.js.map +1 -1
  18. package/bundle/ms.post.min.js +2 -2
  19. package/bundle/ms.post.min.js.map +1 -1
  20. package/dist/ms.post.js +18 -12
  21. package/dist/ms.post.js.map +1 -1
  22. package/dist/ms.post.min.js +2 -2
  23. package/dist/ms.post.min.js.map +1 -1
  24. package/dist-esm/src/BatchNotificationActions.js +1 -1
  25. package/dist-esm/src/ClockSkewManager.js +1 -1
  26. package/dist-esm/src/Constants.js +1 -1
  27. package/dist-esm/src/DataModels.js +1 -1
  28. package/dist-esm/src/EventBatch.js +1 -1
  29. package/dist-esm/src/HttpManager.js +63 -63
  30. package/dist-esm/src/HttpManager.js.map +1 -1
  31. package/dist-esm/src/Index.js +1 -1
  32. package/dist-esm/src/KillSwitch.js +1 -1
  33. package/dist-esm/src/PostChannel.js +65 -57
  34. package/dist-esm/src/PostChannel.js.map +1 -1
  35. package/dist-esm/src/RetryPolicy.js +1 -1
  36. package/dist-esm/src/Serializer.js +1 -1
  37. package/dist-esm/src/TimeoutOverrideWrapper.js +1 -1
  38. package/dist-esm/src/typings/XDomainRequest.js +1 -1
  39. package/package.json +2 -2
  40. package/src/PostChannel.ts +29 -16
  41. package/bundle/ms.post-3.2.2.gbl.js.map +0 -1
  42. package/bundle/ms.post-3.2.2.gbl.min.js +0 -7
  43. package/bundle/ms.post-3.2.2.gbl.min.js.map +0 -1
  44. package/bundle/ms.post-3.2.2.integrity.json +0 -46
  45. package/bundle/ms.post-3.2.2.js.map +0 -1
  46. package/bundle/ms.post-3.2.2.min.js +0 -7
  47. package/bundle/ms.post-3.2.2.min.js.map +0 -1
@@ -1,5 +1,5 @@
1
1
  /*
2
- * 1DS JS SDK POST plugin, 3.2.2
2
+ * 1DS JS SDK POST plugin, 3.2.3
3
3
  * Copyright (c) Microsoft and contributors. All rights reserved.
4
4
  * (Microsoft Internal Only)
5
5
  */
@@ -40,7 +40,7 @@ var PostChannel = /** @class */ (function (_super) {
40
40
  var _this = _super.call(this) || this;
41
41
  _this.identifier = "PostChannel";
42
42
  _this.priority = 1011;
43
- _this.version = '3.2.2';
43
+ _this.version = '3.2.3';
44
44
  var _config;
45
45
  var _isTeardownCalled = false;
46
46
  var _flushCallbackQueue = [];
@@ -66,7 +66,6 @@ var PostChannel = /** @class */ (function (_super) {
66
66
  var _delayedBatchReason;
67
67
  var _optimizeObject = true;
68
68
  var _isPageUnloadTriggered = false;
69
- var _disableXhrSync = false;
70
69
  var _maxEventSendAttempts = MaxSendAttempts;
71
70
  var _maxUnloadEventSendAttempts = MaxSyncUnloadSendAttempts;
72
71
  var _evtNamespace;
@@ -100,7 +99,6 @@ var PostChannel = /** @class */ (function (_super) {
100
99
  if (_config.autoFlushEventsLimit > 0) {
101
100
  _autoFlushEventsLimit = _config.autoFlushEventsLimit;
102
101
  }
103
- _disableXhrSync = _config.disableXhrSync;
104
102
  if (isNumber(_config[strMaxEventRetryAttempts])) {
105
103
  _maxEventSendAttempts = _config[strMaxEventRetryAttempts];
106
104
  }
@@ -157,7 +155,7 @@ var PostChannel = /** @class */ (function (_super) {
157
155
  _addEventToQueues(event, true);
158
156
  if (_isPageUnloadTriggered) {
159
157
  // Unload event has been received so we need to try and flush new events
160
- _releaseAllQueues(2 /* SendBeacon */, 2 /* Unload */);
158
+ _releaseAllQueues(2 /* EventSendType.SendBeacon */, 2 /* SendRequestReason.Unload */);
161
159
  }
162
160
  else {
163
161
  _scheduleTimer();
@@ -166,7 +164,7 @@ var PostChannel = /** @class */ (function (_super) {
166
164
  _self.processNext(event, itemCtx);
167
165
  };
168
166
  _self._doTeardown = function (unloadCtx, unloadState) {
169
- _releaseAllQueues(2 /* SendBeacon */, 2 /* Unload */);
167
+ _releaseAllQueues(2 /* EventSendType.SendBeacon */, 2 /* SendRequestReason.Unload */);
170
168
  _isTeardownCalled = true;
171
169
  _httpManager.teardown();
172
170
  removePageUnloadEventListener(null, _evtNamespace);
@@ -193,7 +191,7 @@ var PostChannel = /** @class */ (function (_super) {
193
191
  _isPageUnloadTriggered = true;
194
192
  _httpManager.setUnloading(_isPageUnloadTriggered);
195
193
  }
196
- _releaseAllQueues(2 /* SendBeacon */, 2 /* Unload */);
194
+ _releaseAllQueues(2 /* EventSendType.SendBeacon */, 2 /* SendRequestReason.Unload */);
197
195
  }
198
196
  function _handleShowEvents(evt) {
199
197
  // Handle the page becoming visible again
@@ -207,7 +205,7 @@ var PostChannel = /** @class */ (function (_super) {
207
205
  }
208
206
  // Add default latency
209
207
  if (!event.latency) {
210
- event.latency = 1 /* Normal */;
208
+ event.latency = 1 /* EventLatencyValue.Normal */;
211
209
  }
212
210
  // Remove extra AI properties if present
213
211
  if (event.ext && event.ext["trace"]) {
@@ -230,7 +228,7 @@ var PostChannel = /** @class */ (function (_super) {
230
228
  // If the transmission is backed off then do not send synchronous events.
231
229
  // We will convert these events to Real time latency instead.
232
230
  if (_currentBackoffCount || _paused) {
233
- event.latency = 3 /* RealTime */;
231
+ event.latency = 3 /* EventLatencyValue.RealTime */;
234
232
  event.sync = false;
235
233
  }
236
234
  else {
@@ -240,7 +238,7 @@ var PostChannel = /** @class */ (function (_super) {
240
238
  if (_optimizeObject) {
241
239
  event = optimizeObject(event);
242
240
  }
243
- _httpManager.sendSynchronousBatch(EventBatch.create(event.iKey, [event]), event.sync === true ? 1 /* Synchronous */ : event.sync, 3 /* SyncEvent */);
241
+ _httpManager.sendSynchronousBatch(EventBatch.create(event.iKey, [event]), event.sync === true ? 1 /* EventSendType.Synchronous */ : event.sync, 3 /* SendRequestReason.SyncEvent */);
244
242
  return;
245
243
  }
246
244
  }
@@ -248,7 +246,7 @@ var PostChannel = /** @class */ (function (_super) {
248
246
  var evtLatency = event.latency;
249
247
  var queueSize = _queueSize;
250
248
  var queueLimit = _queueSizeLimit;
251
- if (evtLatency === 4 /* Immediate */) {
249
+ if (evtLatency === 4 /* EventLatencyValue.Immediate */) {
252
250
  queueSize = _immediateQueueSize;
253
251
  queueLimit = _immediateQueueSizeLimit;
254
252
  }
@@ -258,11 +256,11 @@ var PostChannel = /** @class */ (function (_super) {
258
256
  eventDropped = !_addEventToProperQueue(event, append);
259
257
  }
260
258
  else {
261
- var dropLatency = 1 /* Normal */;
259
+ var dropLatency = 1 /* EventLatencyValue.Normal */;
262
260
  var dropNumber = EventsDroppedAtOneTime;
263
- if (evtLatency === 4 /* Immediate */) {
261
+ if (evtLatency === 4 /* EventLatencyValue.Immediate */) {
264
262
  // Only drop other immediate events as they are not technically sharing the general queue
265
- dropLatency = 4 /* Immediate */;
263
+ dropLatency = 4 /* EventLatencyValue.Immediate */;
266
264
  dropNumber = 1;
267
265
  }
268
266
  // Drop old event from lower or equal latency
@@ -284,7 +282,7 @@ var PostChannel = /** @class */ (function (_super) {
284
282
  var doFlush = _queueSize > eventLimit;
285
283
  if (!doFlush && _autoFlushBatchLimit > 0) {
286
284
  // Check the auto flush max batch size
287
- for (var latency = 1 /* Normal */; !doFlush && latency <= 3 /* RealTime */; latency++) {
285
+ for (var latency = 1 /* EventLatencyValue.Normal */; !doFlush && latency <= 3 /* EventLatencyValue.RealTime */; latency++) {
288
286
  var batchQueue = _batchQueues[latency];
289
287
  if (batchQueue && batchQueue.batches) {
290
288
  arrForEach(batchQueue.batches, function (theBatch) {
@@ -341,16 +339,15 @@ var PostChannel = /** @class */ (function (_super) {
341
339
  _self.flush = function (async, callback, sendReason) {
342
340
  if (async === void 0) { async = true; }
343
341
  if (!_paused) {
344
- // Clear the normal schedule timer as we are going to try and flush ASAP
345
- _clearScheduledTimer();
346
- sendReason = sendReason || 1 /* ManualFlush */;
342
+ sendReason = sendReason || 1 /* SendRequestReason.ManualFlush */;
347
343
  if (async) {
348
- // Move all queued events to the HttpManager
349
- _queueBatches(1 /* Normal */, 0 /* Batched */, sendReason);
350
- // All events (should) have been queue -- lets just make sure the queue counts are correct to avoid queue exhaustion (previous bug #9685112)
351
- _resetQueueCounts();
352
344
  if (_flushCallbackTimerId == null) {
345
+ // Clear the normal schedule timer as we are going to try and flush ASAP
346
+ _clearScheduledTimer();
347
+ // Move all queued events to the HttpManager so that we don't discard new events (Auto flush scenario)
348
+ _queueBatches(1 /* EventLatencyValue.Normal */, 0 /* EventSendType.Batched */, sendReason);
353
349
  _flushCallbackTimerId = _createTimer(function () {
350
+ _flushCallbackTimerId = null;
354
351
  _flushImpl(callback, sendReason);
355
352
  }, 0);
356
353
  }
@@ -361,11 +358,17 @@ var PostChannel = /** @class */ (function (_super) {
361
358
  }
362
359
  }
363
360
  else {
361
+ // Clear the normal schedule timer as we are going to try and flush ASAP
362
+ var cleared = _clearScheduledTimer();
364
363
  // Now cause all queued events to be sent synchronously
365
- _sendEventsForLatencyAndAbove(1 /* Normal */, 1 /* Synchronous */, sendReason);
364
+ _sendEventsForLatencyAndAbove(1 /* EventLatencyValue.Normal */, 1 /* EventSendType.Synchronous */, sendReason);
366
365
  if (callback !== null && callback !== undefined) {
367
366
  callback();
368
367
  }
368
+ if (cleared) {
369
+ // restart the normal event timer if it was cleared
370
+ _scheduleTimer();
371
+ }
369
372
  }
370
373
  }
371
374
  };
@@ -401,8 +404,8 @@ var PostChannel = /** @class */ (function (_super) {
401
404
  function _scheduleTimer() {
402
405
  // If we had previously attempted to send requests, but the http manager didn't have any idle connections then the requests where delayed
403
406
  // so try and requeue then again now
404
- if (_delayedBatchSendLatency >= 0 && _queueBatches(_delayedBatchSendLatency, 0 /* Batched */, _delayedBatchReason)) {
405
- _httpManager.sendQueuedRequests(0 /* Batched */, _delayedBatchReason);
407
+ if (_delayedBatchSendLatency >= 0 && _queueBatches(_delayedBatchSendLatency, 0 /* EventSendType.Batched */, _delayedBatchReason)) {
408
+ _httpManager.sendQueuedRequests(0 /* EventSendType.Batched */, _delayedBatchReason);
406
409
  }
407
410
  if (_immediateQueueSize > 0 && !_immediateTimerId && !_paused) {
408
411
  // During initialization _profiles enforce that the direct [2] is less than real time [1] timer value
@@ -412,7 +415,7 @@ var PostChannel = /** @class */ (function (_super) {
412
415
  _immediateTimerId = _createTimer(function () {
413
416
  _immediateTimerId = null;
414
417
  // Only try to send direct events
415
- _sendEventsForLatencyAndAbove(4 /* Immediate */, 0 /* Batched */, 1 /* NormalSchedule */);
418
+ _sendEventsForLatencyAndAbove(4 /* EventLatencyValue.Immediate */, 0 /* EventSendType.Batched */, 1 /* SendRequestReason.NormalSchedule */);
416
419
  _scheduleTimer();
417
420
  }, immediateTimeOut);
418
421
  }
@@ -423,7 +426,7 @@ var PostChannel = /** @class */ (function (_super) {
423
426
  if (_hasEvents()) {
424
427
  _scheduledTimerId = _createTimer(function () {
425
428
  _scheduledTimerId = null;
426
- _sendEventsForLatencyAndAbove(_timerCount === 0 ? 3 /* RealTime */ : 1 /* Normal */, 0 /* Batched */, 1 /* NormalSchedule */);
429
+ _sendEventsForLatencyAndAbove(_timerCount === 0 ? 3 /* EventLatencyValue.RealTime */ : 1 /* EventLatencyValue.Normal */, 0 /* EventSendType.Batched */, 1 /* SendRequestReason.NormalSchedule */);
427
430
  // Increment the count for next cycle
428
431
  _timerCount++;
429
432
  _timerCount %= 2;
@@ -474,7 +477,6 @@ var PostChannel = /** @class */ (function (_super) {
474
477
  _delayedBatchReason = null;
475
478
  _optimizeObject = true;
476
479
  _isPageUnloadTriggered = false;
477
- _disableXhrSync = false;
478
480
  _maxEventSendAttempts = MaxSendAttempts;
479
481
  _maxUnloadEventSendAttempts = MaxSyncUnloadSendAttempts;
480
482
  _evtNamespace = null;
@@ -507,7 +509,9 @@ var PostChannel = /** @class */ (function (_super) {
507
509
  _timeoutWrapper.clear(_scheduledTimerId);
508
510
  _scheduledTimerId = null;
509
511
  _timerCount = 0;
512
+ return true;
510
513
  }
514
+ return false;
511
515
  }
512
516
  // Try to send all queued events using beacons if available
513
517
  function _releaseAllQueues(sendType, sendReason) {
@@ -519,7 +523,7 @@ var PostChannel = /** @class */ (function (_super) {
519
523
  }
520
524
  if (!_paused) {
521
525
  // Queue all the remaining requests to be sent. The requests will be sent using HTML5 Beacons if they are available.
522
- _sendEventsForLatencyAndAbove(1 /* Normal */, sendType, sendReason);
526
+ _sendEventsForLatencyAndAbove(1 /* EventLatencyValue.Normal */, sendType, sendReason);
523
527
  }
524
528
  }
525
529
  /**
@@ -528,19 +532,19 @@ var PostChannel = /** @class */ (function (_super) {
528
532
  * after flush are stored separately till we flush the current events.
529
533
  */
530
534
  function _clearQueues() {
531
- _batchQueues[4 /* Immediate */] = {
535
+ _batchQueues[4 /* EventLatencyValue.Immediate */] = {
532
536
  batches: [],
533
537
  iKeyMap: {}
534
538
  };
535
- _batchQueues[3 /* RealTime */] = {
539
+ _batchQueues[3 /* EventLatencyValue.RealTime */] = {
536
540
  batches: [],
537
541
  iKeyMap: {}
538
542
  };
539
- _batchQueues[2 /* CostDeferred */] = {
543
+ _batchQueues[2 /* EventLatencyValue.CostDeferred */] = {
540
544
  batches: [],
541
545
  iKeyMap: {}
542
546
  };
543
- _batchQueues[1 /* Normal */] = {
547
+ _batchQueues[1 /* EventLatencyValue.Normal */] = {
544
548
  batches: [],
545
549
  iKeyMap: {}
546
550
  };
@@ -548,7 +552,7 @@ var PostChannel = /** @class */ (function (_super) {
548
552
  function _getEventBatch(iKey, latency, create) {
549
553
  var batchQueue = _batchQueues[latency];
550
554
  if (!batchQueue) {
551
- latency = 1 /* Normal */;
555
+ latency = 1 /* EventLatencyValue.Normal */;
552
556
  batchQueue = _batchQueues[latency];
553
557
  }
554
558
  var eventBatch = batchQueue.iKeyMap[iKey];
@@ -568,7 +572,7 @@ var PostChannel = /** @class */ (function (_super) {
568
572
  }
569
573
  if (doFlush && _flushCallbackTimerId == null) {
570
574
  // Auto flush the queue
571
- _self.flush(isAsync, null, 20 /* MaxQueuedEvents */);
575
+ _self.flush(isAsync, null, 20 /* SendRequestReason.MaxQueuedEvents */);
572
576
  }
573
577
  }
574
578
  }
@@ -580,7 +584,7 @@ var PostChannel = /** @class */ (function (_super) {
580
584
  var latency = event.latency;
581
585
  var eventBatch = _getEventBatch(event.iKey, latency, true);
582
586
  if (eventBatch.addEvent(event)) {
583
- if (latency !== 4 /* Immediate */) {
587
+ if (latency !== 4 /* EventLatencyValue.Immediate */) {
584
588
  _queueSize++;
585
589
  // Check for auto flushing based on total events in the queue, but not for requeued or retry events
586
590
  if (append && event.sendAttempt === 0) {
@@ -604,7 +608,7 @@ var PostChannel = /** @class */ (function (_super) {
604
608
  var droppedEvents = eventBatch.split(0, dropNumber);
605
609
  var droppedCount = droppedEvents.count();
606
610
  if (droppedCount > 0) {
607
- if (currentLatency === 4 /* Immediate */) {
611
+ if (currentLatency === 4 /* EventLatencyValue.Immediate */) {
608
612
  _immediateQueueSize -= droppedCount;
609
613
  }
610
614
  else {
@@ -631,7 +635,7 @@ var PostChannel = /** @class */ (function (_super) {
631
635
  var batchQueue = _batchQueues[latency];
632
636
  if (batchQueue && batchQueue.batches) {
633
637
  arrForEach(batchQueue.batches, function (theBatch) {
634
- if (latency === 4 /* Immediate */) {
638
+ if (latency === 4 /* EventLatencyValue.Immediate */) {
635
639
  immediateQueue += theBatch.count();
636
640
  }
637
641
  else {
@@ -640,7 +644,7 @@ var PostChannel = /** @class */ (function (_super) {
640
644
  });
641
645
  }
642
646
  };
643
- for (var latency = 1 /* Normal */; latency <= 4 /* Immediate */; latency++) {
647
+ for (var latency = 1 /* EventLatencyValue.Normal */; latency <= 4 /* EventLatencyValue.Immediate */; latency++) {
644
648
  _loop_1(latency);
645
649
  }
646
650
  _queueSize = normalQueue;
@@ -648,14 +652,14 @@ var PostChannel = /** @class */ (function (_super) {
648
652
  }
649
653
  function _queueBatches(latency, sendType, sendReason) {
650
654
  var eventsQueued = false;
651
- var isAsync = sendType === 0 /* Batched */;
655
+ var isAsync = sendType === 0 /* EventSendType.Batched */;
652
656
  // Only queue batches (to the HttpManager) if this is a sync request or the httpManager has an idle connection
653
657
  // Thus keeping the events within the PostChannel until the HttpManager has a connection available
654
658
  // This is so we can drop "old" events if the queue is getting full because we can't successfully send events
655
659
  if (!isAsync || _httpManager.canSendRequest()) {
656
660
  doPerf(_self.core, function () { return "PostChannel._queueBatches"; }, function () {
657
661
  var droppedEvents = [];
658
- var latencyToProcess = 4 /* Immediate */;
662
+ var latencyToProcess = 4 /* EventLatencyValue.Immediate */;
659
663
  while (latencyToProcess >= latency) {
660
664
  var batchQueue = _batchQueues[latencyToProcess];
661
665
  if (batchQueue && batchQueue.batches && batchQueue.batches.length > 0) {
@@ -668,7 +672,7 @@ var PostChannel = /** @class */ (function (_super) {
668
672
  else {
669
673
  eventsQueued = eventsQueued || (theBatch && theBatch.count() > 0);
670
674
  }
671
- if (latencyToProcess === 4 /* Immediate */) {
675
+ if (latencyToProcess === 4 /* EventLatencyValue.Immediate */) {
672
676
  _immediateQueueSize -= theBatch.count();
673
677
  }
674
678
  else {
@@ -687,7 +691,7 @@ var PostChannel = /** @class */ (function (_super) {
687
691
  if (eventsQueued && _delayedBatchSendLatency >= latency) {
688
692
  // We have queued events at the same level as the delayed values so clear the setting
689
693
  _delayedBatchSendLatency = -1;
690
- _delayedBatchReason = 0 /* Undefined */;
694
+ _delayedBatchReason = 0 /* SendRequestReason.Undefined */;
691
695
  }
692
696
  }, function () { return ({ latency: latency, sendType: sendType, sendReason: sendReason }); }, !isAsync);
693
697
  }
@@ -705,22 +709,25 @@ var PostChannel = /** @class */ (function (_super) {
705
709
  */
706
710
  function _flushImpl(callback, sendReason) {
707
711
  // Add any additional queued events and cause all queued events to be sent asynchronously
708
- _sendEventsForLatencyAndAbove(1 /* Normal */, 0 /* Batched */, sendReason);
712
+ _sendEventsForLatencyAndAbove(1 /* EventLatencyValue.Normal */, 0 /* EventSendType.Batched */, sendReason);
713
+ // All events (should) have been queue -- lets just make sure the queue counts are correct to avoid queue exhaustion (previous bug #9685112)
714
+ _resetQueueCounts();
709
715
  _waitForIdleManager(function () {
710
716
  // Only called AFTER the httpManager does not have any outstanding requests
711
717
  if (callback) {
712
718
  callback();
713
719
  }
714
720
  if (_flushCallbackQueue.length > 0) {
715
- _flushCallbackTimerId = _createTimer(function () { return _flushImpl(_flushCallbackQueue.shift(), sendReason); }, 0);
721
+ _flushCallbackTimerId = _createTimer(function () {
722
+ _flushCallbackTimerId = null;
723
+ _flushImpl(_flushCallbackQueue.shift(), sendReason);
724
+ }, 0);
716
725
  }
717
726
  else {
718
727
  // No more flush requests
719
728
  _flushCallbackTimerId = null;
720
- if (_hasEvents()) {
721
- // We still have events, so restart the normal timer schedule
722
- _scheduleTimer();
723
- }
729
+ // Restart the normal timer schedule
730
+ _scheduleTimer();
724
731
  }
725
732
  });
726
733
  }
@@ -730,6 +737,7 @@ var PostChannel = /** @class */ (function (_super) {
730
737
  }
731
738
  else {
732
739
  _flushCallbackTimerId = _createTimer(function () {
740
+ _flushCallbackTimerId = null;
733
741
  _waitForIdleManager(callback);
734
742
  }, FlushCheckTimer);
735
743
  }
@@ -767,7 +775,7 @@ var PostChannel = /** @class */ (function (_super) {
767
775
  if (theEvent) {
768
776
  // Check if the request being added back is for a sync event in which case mark it no longer a sync event
769
777
  if (theEvent.sync) {
770
- theEvent.latency = 4 /* Immediate */;
778
+ theEvent.latency = 4 /* EventLatencyValue.Immediate */;
771
779
  theEvent.sync = false;
772
780
  }
773
781
  if (theEvent.sendAttempt < maxSendAttempts) {
@@ -787,7 +795,7 @@ var PostChannel = /** @class */ (function (_super) {
787
795
  }
788
796
  if (_isPageUnloadTriggered) {
789
797
  // Unload event has been received so we need to try and flush new events
790
- _releaseAllQueues(2 /* SendBeacon */, 2 /* Unload */);
798
+ _releaseAllQueues(2 /* EventSendType.SendBeacon */, 2 /* SendRequestReason.Unload */);
791
799
  }
792
800
  }
793
801
  function _callNotification(evtName, theArgs) {
@@ -798,7 +806,7 @@ var PostChannel = /** @class */ (function (_super) {
798
806
  notifyFunc.apply(manager, theArgs);
799
807
  }
800
808
  catch (e) {
801
- _throwInternal(_self.diagLog(), 1 /* CRITICAL */, 74 /* NotificationException */, evtName + " notification failed: " + e);
809
+ _throwInternal(_self.diagLog(), 1 /* eLoggingSeverity.CRITICAL */, 74 /* _eInternalMessageId.NotificationException */, evtName + " notification failed: " + e);
802
810
  }
803
811
  }
804
812
  }
@@ -830,9 +838,9 @@ var PostChannel = /** @class */ (function (_super) {
830
838
  */
831
839
  function _sendingEvent(batches, reason, isSyncRequest) {
832
840
  if (batches && batches.length > 0) {
833
- _callNotification("eventsSendRequest", [(reason >= 1000 /* SendingUndefined */ && reason <= 1999 /* SendingEventMax */ ?
834
- reason - 1000 /* SendingUndefined */ :
835
- 0 /* Undefined */), isSyncRequest !== true]);
841
+ _callNotification("eventsSendRequest", [(reason >= 1000 /* EventBatchNotificationReason.SendingUndefined */ && reason <= 1999 /* EventBatchNotificationReason.SendingEventMax */ ?
842
+ reason - 1000 /* EventBatchNotificationReason.SendingUndefined */ :
843
+ 0 /* SendRequestReason.Undefined */), isSyncRequest !== true]);
836
844
  }
837
845
  }
838
846
  /**
@@ -846,8 +854,8 @@ var PostChannel = /** @class */ (function (_super) {
846
854
  _scheduleTimer();
847
855
  }
848
856
  function _eventsDropped(batches, reason) {
849
- _notifyBatchEvents(strEventsDiscarded, batches, (reason >= 8000 /* EventsDropped */ && reason <= 8999 /* EventsDroppedMax */ ?
850
- reason - 8000 /* EventsDropped */ :
857
+ _notifyBatchEvents(strEventsDiscarded, batches, (reason >= 8000 /* EventBatchNotificationReason.EventsDropped */ && reason <= 8999 /* EventBatchNotificationReason.EventsDroppedMax */ ?
858
+ reason - 8000 /* EventBatchNotificationReason.EventsDropped */ :
851
859
  EventsDiscardedReason.Unknown));
852
860
  }
853
861
  function _eventsResponseFail(batches) {