hls.js 1.5.3 → 1.5.5-0.canary.9977

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/README.md +1 -0
  2. package/dist/hls-demo.js +10 -0
  3. package/dist/hls-demo.js.map +1 -1
  4. package/dist/hls.js +1954 -1103
  5. package/dist/hls.js.d.ts +63 -50
  6. package/dist/hls.js.map +1 -1
  7. package/dist/hls.light.js +1631 -784
  8. package/dist/hls.light.js.map +1 -1
  9. package/dist/hls.light.min.js +1 -1
  10. package/dist/hls.light.min.js.map +1 -1
  11. package/dist/hls.light.mjs +1428 -590
  12. package/dist/hls.light.mjs.map +1 -1
  13. package/dist/hls.min.js +1 -1
  14. package/dist/hls.min.js.map +1 -1
  15. package/dist/hls.mjs +1703 -866
  16. package/dist/hls.mjs.map +1 -1
  17. package/dist/hls.worker.js +1 -1
  18. package/dist/hls.worker.js.map +1 -1
  19. package/package.json +18 -18
  20. package/src/config.ts +3 -2
  21. package/src/controller/abr-controller.ts +24 -22
  22. package/src/controller/audio-stream-controller.ts +16 -17
  23. package/src/controller/audio-track-controller.ts +1 -1
  24. package/src/controller/base-playlist-controller.ts +7 -7
  25. package/src/controller/base-stream-controller.ts +56 -29
  26. package/src/controller/buffer-controller.ts +11 -11
  27. package/src/controller/cap-level-controller.ts +1 -2
  28. package/src/controller/cmcd-controller.ts +25 -3
  29. package/src/controller/content-steering-controller.ts +8 -6
  30. package/src/controller/eme-controller.ts +9 -22
  31. package/src/controller/error-controller.ts +6 -8
  32. package/src/controller/fps-controller.ts +2 -3
  33. package/src/controller/gap-controller.ts +43 -16
  34. package/src/controller/latency-controller.ts +9 -11
  35. package/src/controller/level-controller.ts +5 -17
  36. package/src/controller/stream-controller.ts +27 -33
  37. package/src/controller/subtitle-stream-controller.ts +14 -15
  38. package/src/controller/subtitle-track-controller.ts +5 -3
  39. package/src/controller/timeline-controller.ts +23 -30
  40. package/src/crypt/aes-crypto.ts +21 -2
  41. package/src/crypt/decrypter-aes-mode.ts +4 -0
  42. package/src/crypt/decrypter.ts +32 -18
  43. package/src/crypt/fast-aes-key.ts +24 -5
  44. package/src/demux/audio/adts.ts +9 -4
  45. package/src/demux/sample-aes.ts +2 -0
  46. package/src/demux/transmuxer-interface.ts +4 -12
  47. package/src/demux/transmuxer-worker.ts +4 -4
  48. package/src/demux/transmuxer.ts +16 -3
  49. package/src/demux/tsdemuxer.ts +63 -37
  50. package/src/demux/video/avc-video-parser.ts +208 -119
  51. package/src/demux/video/base-video-parser.ts +134 -2
  52. package/src/demux/video/exp-golomb.ts +0 -208
  53. package/src/demux/video/hevc-video-parser.ts +746 -0
  54. package/src/events.ts +7 -0
  55. package/src/hls.ts +42 -34
  56. package/src/loader/fragment-loader.ts +9 -2
  57. package/src/loader/key-loader.ts +2 -0
  58. package/src/loader/level-key.ts +10 -9
  59. package/src/remux/mp4-generator.ts +196 -1
  60. package/src/remux/mp4-remuxer.ts +23 -7
  61. package/src/task-loop.ts +5 -2
  62. package/src/types/component-api.ts +2 -0
  63. package/src/types/demuxer.ts +3 -0
  64. package/src/types/events.ts +4 -0
  65. package/src/utils/codecs.ts +33 -4
  66. package/src/utils/encryption-methods-util.ts +21 -0
  67. package/src/utils/logger.ts +53 -24
  68. package/src/utils/mp4-tools.ts +28 -9
package/dist/hls.mjs CHANGED
@@ -256,6 +256,7 @@ let Events = /*#__PURE__*/function (Events) {
256
256
  Events["MEDIA_ATTACHED"] = "hlsMediaAttached";
257
257
  Events["MEDIA_DETACHING"] = "hlsMediaDetaching";
258
258
  Events["MEDIA_DETACHED"] = "hlsMediaDetached";
259
+ Events["MEDIA_ENDED"] = "hlsMediaEnded";
259
260
  Events["BUFFER_RESET"] = "hlsBufferReset";
260
261
  Events["BUFFER_CODECS"] = "hlsBufferCodecs";
261
262
  Events["BUFFER_CREATED"] = "hlsBufferCreated";
@@ -369,6 +370,23 @@ let ErrorDetails = /*#__PURE__*/function (ErrorDetails) {
369
370
  return ErrorDetails;
370
371
  }({});
371
372
 
373
+ class Logger {
374
+ constructor(label, logger) {
375
+ this.trace = void 0;
376
+ this.debug = void 0;
377
+ this.log = void 0;
378
+ this.warn = void 0;
379
+ this.info = void 0;
380
+ this.error = void 0;
381
+ const lb = `[${label}]:`;
382
+ this.trace = noop;
383
+ this.debug = logger.debug.bind(null, lb);
384
+ this.log = logger.log.bind(null, lb);
385
+ this.warn = logger.warn.bind(null, lb);
386
+ this.info = logger.info.bind(null, lb);
387
+ this.error = logger.error.bind(null, lb);
388
+ }
389
+ }
372
390
  const noop = function noop() {};
373
391
  const fakeLogger = {
374
392
  trace: noop,
@@ -378,7 +396,9 @@ const fakeLogger = {
378
396
  info: noop,
379
397
  error: noop
380
398
  };
381
- let exportedLogger = fakeLogger;
399
+ function createLogger() {
400
+ return _extends({}, fakeLogger);
401
+ }
382
402
 
383
403
  // let lastCallTime;
384
404
  // function formatMsgWithTimeInfo(type, msg) {
@@ -389,35 +409,36 @@ let exportedLogger = fakeLogger;
389
409
  // return msg;
390
410
  // }
391
411
 
392
- function consolePrintFn(type) {
412
+ function consolePrintFn(type, id) {
393
413
  const func = self.console[type];
394
- if (func) {
395
- return func.bind(self.console, `[${type}] >`);
396
- }
397
- return noop;
414
+ return func ? func.bind(self.console, `${id ? '[' + id + '] ' : ''}[${type}] >`) : noop;
398
415
  }
399
- function exportLoggerFunctions(debugConfig, ...functions) {
400
- functions.forEach(function (type) {
401
- exportedLogger[type] = debugConfig[type] ? debugConfig[type].bind(debugConfig) : consolePrintFn(type);
402
- });
416
+ function getLoggerFn(key, debugConfig, id) {
417
+ return debugConfig[key] ? debugConfig[key].bind(debugConfig) : consolePrintFn(key, id);
403
418
  }
404
- function enableLogs(debugConfig, id) {
419
+ let exportedLogger = createLogger();
420
+ function enableLogs(debugConfig, context, id) {
405
421
  // check that console is available
422
+ const newLogger = createLogger();
406
423
  if (typeof console === 'object' && debugConfig === true || typeof debugConfig === 'object') {
407
- exportLoggerFunctions(debugConfig,
424
+ const keys = [
408
425
  // Remove out from list here to hard-disable a log-level
409
426
  // 'trace',
410
- 'debug', 'log', 'info', 'warn', 'error');
427
+ 'debug', 'log', 'info', 'warn', 'error'];
428
+ keys.forEach(key => {
429
+ newLogger[key] = getLoggerFn(key, debugConfig, id);
430
+ });
411
431
  // Some browsers don't allow to use bind on console object anyway
412
432
  // fallback to default if needed
413
433
  try {
414
- exportedLogger.log(`Debug logs enabled for "${id}" in hls.js version ${"1.5.3"}`);
434
+ newLogger.log(`Debug logs enabled for "${context}" in hls.js version ${"1.5.5-0.canary.9977"}`);
415
435
  } catch (e) {
416
- exportedLogger = fakeLogger;
436
+ /* log fn threw an exception. All logger methods are no-ops. */
437
+ return createLogger();
417
438
  }
418
- } else {
419
- exportedLogger = fakeLogger;
420
439
  }
440
+ exportedLogger = newLogger;
441
+ return newLogger;
421
442
  }
422
443
  const logger = exportedLogger;
423
444
 
@@ -1036,6 +1057,26 @@ function strToUtf8array(str) {
1036
1057
  return Uint8Array.from(unescape(encodeURIComponent(str)), c => c.charCodeAt(0));
1037
1058
  }
1038
1059
 
1060
+ var DecrypterAesMode = {
1061
+ cbc: 0,
1062
+ ctr: 1
1063
+ };
1064
+
1065
+ function isFullSegmentEncryption(method) {
1066
+ return method === 'AES-128' || method === 'AES-256' || method === 'AES-256-CTR';
1067
+ }
1068
+ function getAesModeFromFullSegmentMethod(method) {
1069
+ switch (method) {
1070
+ case 'AES-128':
1071
+ case 'AES-256':
1072
+ return DecrypterAesMode.cbc;
1073
+ case 'AES-256-CTR':
1074
+ return DecrypterAesMode.ctr;
1075
+ default:
1076
+ throw new Error(`invalid full segment method ${method}`);
1077
+ }
1078
+ }
1079
+
1039
1080
  /** returns `undefined` is `self` is missing, e.g. in node */
1040
1081
  const optionalSelf = typeof self !== 'undefined' ? self : undefined;
1041
1082
 
@@ -1550,6 +1591,12 @@ function readUint32(buffer, offset) {
1550
1591
  const val = readSint32(buffer, offset);
1551
1592
  return val < 0 ? 4294967296 + val : val;
1552
1593
  }
1594
+ function readUint64(buffer, offset) {
1595
+ let result = readUint32(buffer, offset);
1596
+ result *= Math.pow(2, 32);
1597
+ result += readUint32(buffer, offset + 4);
1598
+ return result;
1599
+ }
1553
1600
  function readSint32(buffer, offset) {
1554
1601
  return buffer[offset] << 24 | buffer[offset + 1] << 16 | buffer[offset + 2] << 8 | buffer[offset + 3];
1555
1602
  }
@@ -1612,15 +1659,14 @@ function parseSegmentIndex(sidx) {
1612
1659
  let index = 8;
1613
1660
  const timescale = readUint32(sidx, index);
1614
1661
  index += 4;
1615
-
1616
- // TODO: parse earliestPresentationTime and firstOffset
1617
- // usually zero in our case
1618
- const earliestPresentationTime = 0;
1619
- const firstOffset = 0;
1662
+ let earliestPresentationTime = 0;
1663
+ let firstOffset = 0;
1620
1664
  if (version === 0) {
1621
- index += 8;
1665
+ earliestPresentationTime = readUint32(sidx, index += 4);
1666
+ firstOffset = readUint32(sidx, index += 4);
1622
1667
  } else {
1623
- index += 16;
1668
+ earliestPresentationTime = readUint64(sidx, index += 8);
1669
+ firstOffset = readUint64(sidx, index += 8);
1624
1670
  }
1625
1671
 
1626
1672
  // skip reserved
@@ -2068,15 +2114,22 @@ function getDuration(data, initData) {
2068
2114
  }
2069
2115
  if (videoDuration === 0 && audioDuration === 0) {
2070
2116
  // If duration samples are not available in the traf use sidx subsegment_duration
2117
+ let sidxMinStart = Infinity;
2118
+ let sidxMaxEnd = 0;
2071
2119
  let sidxDuration = 0;
2072
2120
  const sidxs = findBox(data, ['sidx']);
2073
2121
  for (let i = 0; i < sidxs.length; i++) {
2074
2122
  const sidx = parseSegmentIndex(sidxs[i]);
2075
2123
  if (sidx != null && sidx.references) {
2076
- sidxDuration += sidx.references.reduce((dur, ref) => dur + ref.info.duration || 0, 0);
2124
+ sidxMinStart = Math.min(sidxMinStart, sidx.earliestPresentationTime / sidx.timescale);
2125
+ const subSegmentDuration = sidx.references.reduce((dur, ref) => dur + ref.info.duration || 0, 0);
2126
+ sidxMaxEnd = Math.max(sidxMaxEnd, subSegmentDuration + sidx.earliestPresentationTime / sidx.timescale);
2127
+ sidxDuration = sidxMaxEnd - sidxMinStart;
2077
2128
  }
2078
2129
  }
2079
- return sidxDuration;
2130
+ if (sidxDuration && isFiniteNumber(sidxDuration)) {
2131
+ return sidxDuration;
2132
+ }
2080
2133
  }
2081
2134
  if (videoDuration) {
2082
2135
  return videoDuration;
@@ -2674,12 +2727,12 @@ class LevelKey {
2674
2727
  this.keyFormatVersions = formatversions;
2675
2728
  this.iv = iv;
2676
2729
  this.encrypted = method ? method !== 'NONE' : false;
2677
- this.isCommonEncryption = this.encrypted && method !== 'AES-128';
2730
+ this.isCommonEncryption = this.encrypted && !isFullSegmentEncryption(method);
2678
2731
  }
2679
2732
  isSupported() {
2680
2733
  // If it's Segment encryption or No encryption, just select that key system
2681
2734
  if (this.method) {
2682
- if (this.method === 'AES-128' || this.method === 'NONE') {
2735
+ if (isFullSegmentEncryption(this.method) || this.method === 'NONE') {
2683
2736
  return true;
2684
2737
  }
2685
2738
  if (this.keyFormat === 'identity') {
@@ -2701,14 +2754,13 @@ class LevelKey {
2701
2754
  if (!this.encrypted || !this.uri) {
2702
2755
  return null;
2703
2756
  }
2704
- if (this.method === 'AES-128' && this.uri && !this.iv) {
2757
+ if (isFullSegmentEncryption(this.method) && this.uri && !this.iv) {
2705
2758
  if (typeof sn !== 'number') {
2706
2759
  // We are fetching decryption data for a initialization segment
2707
- // If the segment was encrypted with AES-128
2760
+ // If the segment was encrypted with AES-128/256
2708
2761
  // It must have an IV defined. We cannot substitute the Segment Number in.
2709
- if (this.method === 'AES-128' && !this.iv) {
2710
- logger.warn(`missing IV for initialization segment with method="${this.method}" - compliance issue`);
2711
- }
2762
+ logger.warn(`missing IV for initialization segment with method="${this.method}" - compliance issue`);
2763
+
2712
2764
  // Explicitly set sn to resulting value from implicit conversions 'initSegment' values for IV generation.
2713
2765
  sn = 0;
2714
2766
  }
@@ -2987,23 +3039,28 @@ function getCodecCompatibleNameLower(lowerCaseCodec, preferManagedMediaSource =
2987
3039
  if (CODEC_COMPATIBLE_NAMES[lowerCaseCodec]) {
2988
3040
  return CODEC_COMPATIBLE_NAMES[lowerCaseCodec];
2989
3041
  }
2990
-
2991
- // Idealy fLaC and Opus would be first (spec-compliant) but
2992
- // some browsers will report that fLaC is supported then fail.
2993
- // see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
2994
3042
  const codecsToCheck = {
3043
+ // Idealy fLaC and Opus would be first (spec-compliant) but
3044
+ // some browsers will report that fLaC is supported then fail.
3045
+ // see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
2995
3046
  flac: ['flac', 'fLaC', 'FLAC'],
2996
- opus: ['opus', 'Opus']
3047
+ opus: ['opus', 'Opus'],
3048
+ // Replace audio codec info if browser does not support mp4a.40.34,
3049
+ // and demuxer can fallback to 'audio/mpeg' or 'audio/mp4;codecs="mp3"'
3050
+ 'mp4a.40.34': ['mp3']
2997
3051
  }[lowerCaseCodec];
2998
3052
  for (let i = 0; i < codecsToCheck.length; i++) {
3053
+ var _getMediaSource;
2999
3054
  if (isCodecMediaSourceSupported(codecsToCheck[i], 'audio', preferManagedMediaSource)) {
3000
3055
  CODEC_COMPATIBLE_NAMES[lowerCaseCodec] = codecsToCheck[i];
3001
3056
  return codecsToCheck[i];
3057
+ } else if (codecsToCheck[i] === 'mp3' && (_getMediaSource = getMediaSource(preferManagedMediaSource)) != null && _getMediaSource.isTypeSupported('audio/mpeg')) {
3058
+ return '';
3002
3059
  }
3003
3060
  }
3004
3061
  return lowerCaseCodec;
3005
3062
  }
3006
- const AUDIO_CODEC_REGEXP = /flac|opus/i;
3063
+ const AUDIO_CODEC_REGEXP = /flac|opus|mp4a\.40\.34/i;
3007
3064
  function getCodecCompatibleName(codec, preferManagedMediaSource = true) {
3008
3065
  return codec.replace(AUDIO_CODEC_REGEXP, m => getCodecCompatibleNameLower(m.toLowerCase(), preferManagedMediaSource));
3009
3066
  }
@@ -3026,6 +3083,16 @@ function convertAVC1ToAVCOTI(codec) {
3026
3083
  }
3027
3084
  return codec;
3028
3085
  }
3086
+ function getM2TSSupportedAudioTypes(preferManagedMediaSource) {
3087
+ const MediaSource = getMediaSource(preferManagedMediaSource) || {
3088
+ isTypeSupported: () => false
3089
+ };
3090
+ return {
3091
+ mpeg: MediaSource.isTypeSupported('audio/mpeg'),
3092
+ mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
3093
+ ac3: MediaSource.isTypeSupported('audio/mp4; codecs="ac-3"')
3094
+ };
3095
+ }
3029
3096
 
3030
3097
  const MASTER_PLAYLIST_REGEX = /#EXT-X-STREAM-INF:([^\r\n]*)(?:[\r\n](?:#[^\r\n]*)?)*([^\r\n]+)|#EXT-X-(SESSION-DATA|SESSION-KEY|DEFINE|CONTENT-STEERING|START):([^\r\n]*)[\r\n]+/g;
3031
3098
  const MASTER_PLAYLIST_MEDIA_REGEX = /#EXT-X-MEDIA:(.*)/g;
@@ -4692,7 +4759,47 @@ class LatencyController {
4692
4759
  this.currentTime = 0;
4693
4760
  this.stallCount = 0;
4694
4761
  this._latency = null;
4695
- this.timeupdateHandler = () => this.timeupdate();
4762
+ this.onTimeupdate = () => {
4763
+ const {
4764
+ media,
4765
+ levelDetails
4766
+ } = this;
4767
+ if (!media || !levelDetails) {
4768
+ return;
4769
+ }
4770
+ this.currentTime = media.currentTime;
4771
+ const latency = this.computeLatency();
4772
+ if (latency === null) {
4773
+ return;
4774
+ }
4775
+ this._latency = latency;
4776
+
4777
+ // Adapt playbackRate to meet target latency in low-latency mode
4778
+ const {
4779
+ lowLatencyMode,
4780
+ maxLiveSyncPlaybackRate
4781
+ } = this.config;
4782
+ if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
4783
+ return;
4784
+ }
4785
+ const targetLatency = this.targetLatency;
4786
+ if (targetLatency === null) {
4787
+ return;
4788
+ }
4789
+ const distanceFromTarget = latency - targetLatency;
4790
+ // Only adjust playbackRate when within one target duration of targetLatency
4791
+ // and more than one second from under-buffering.
4792
+ // Playback further than one target duration from target can be considered DVR playback.
4793
+ const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
4794
+ const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
4795
+ if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
4796
+ const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
4797
+ const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
4798
+ media.playbackRate = Math.min(max, Math.max(1, rate));
4799
+ } else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
4800
+ media.playbackRate = 1;
4801
+ }
4802
+ };
4696
4803
  this.hls = hls;
4697
4804
  this.config = hls.config;
4698
4805
  this.registerListeners();
@@ -4784,7 +4891,7 @@ class LatencyController {
4784
4891
  this.onMediaDetaching();
4785
4892
  this.levelDetails = null;
4786
4893
  // @ts-ignore
4787
- this.hls = this.timeupdateHandler = null;
4894
+ this.hls = null;
4788
4895
  }
4789
4896
  registerListeners() {
4790
4897
  this.hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
@@ -4802,11 +4909,11 @@ class LatencyController {
4802
4909
  }
4803
4910
  onMediaAttached(event, data) {
4804
4911
  this.media = data.media;
4805
- this.media.addEventListener('timeupdate', this.timeupdateHandler);
4912
+ this.media.addEventListener('timeupdate', this.onTimeupdate);
4806
4913
  }
4807
4914
  onMediaDetaching() {
4808
4915
  if (this.media) {
4809
- this.media.removeEventListener('timeupdate', this.timeupdateHandler);
4916
+ this.media.removeEventListener('timeupdate', this.onTimeupdate);
4810
4917
  this.media = null;
4811
4918
  }
4812
4919
  }
@@ -4820,10 +4927,10 @@ class LatencyController {
4820
4927
  }) {
4821
4928
  this.levelDetails = details;
4822
4929
  if (details.advanced) {
4823
- this.timeupdate();
4930
+ this.onTimeupdate();
4824
4931
  }
4825
4932
  if (!details.live && this.media) {
4826
- this.media.removeEventListener('timeupdate', this.timeupdateHandler);
4933
+ this.media.removeEventListener('timeupdate', this.onTimeupdate);
4827
4934
  }
4828
4935
  }
4829
4936
  onError(event, data) {
@@ -4833,48 +4940,7 @@ class LatencyController {
4833
4940
  }
4834
4941
  this.stallCount++;
4835
4942
  if ((_this$levelDetails = this.levelDetails) != null && _this$levelDetails.live) {
4836
- logger.warn('[playback-rate-controller]: Stall detected, adjusting target latency');
4837
- }
4838
- }
4839
- timeupdate() {
4840
- const {
4841
- media,
4842
- levelDetails
4843
- } = this;
4844
- if (!media || !levelDetails) {
4845
- return;
4846
- }
4847
- this.currentTime = media.currentTime;
4848
- const latency = this.computeLatency();
4849
- if (latency === null) {
4850
- return;
4851
- }
4852
- this._latency = latency;
4853
-
4854
- // Adapt playbackRate to meet target latency in low-latency mode
4855
- const {
4856
- lowLatencyMode,
4857
- maxLiveSyncPlaybackRate
4858
- } = this.config;
4859
- if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
4860
- return;
4861
- }
4862
- const targetLatency = this.targetLatency;
4863
- if (targetLatency === null) {
4864
- return;
4865
- }
4866
- const distanceFromTarget = latency - targetLatency;
4867
- // Only adjust playbackRate when within one target duration of targetLatency
4868
- // and more than one second from under-buffering.
4869
- // Playback further than one target duration from target can be considered DVR playback.
4870
- const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
4871
- const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
4872
- if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
4873
- const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
4874
- const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
4875
- media.playbackRate = Math.min(max, Math.max(1, rate));
4876
- } else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
4877
- media.playbackRate = 1;
4943
+ this.hls.logger.warn('[latency-controller]: Stall detected, adjusting target latency');
4878
4944
  }
4879
4945
  }
4880
4946
  estimateLiveEdge() {
@@ -5646,18 +5712,13 @@ var ErrorActionFlags = {
5646
5712
  MoveAllAlternatesMatchingHDCP: 2,
5647
5713
  SwitchToSDR: 4
5648
5714
  }; // Reserved for future use
5649
- class ErrorController {
5715
+ class ErrorController extends Logger {
5650
5716
  constructor(hls) {
5717
+ super('error-controller', hls.logger);
5651
5718
  this.hls = void 0;
5652
5719
  this.playlistError = 0;
5653
5720
  this.penalizedRenditions = {};
5654
- this.log = void 0;
5655
- this.warn = void 0;
5656
- this.error = void 0;
5657
5721
  this.hls = hls;
5658
- this.log = logger.log.bind(logger, `[info]:`);
5659
- this.warn = logger.warn.bind(logger, `[warning]:`);
5660
- this.error = logger.error.bind(logger, `[error]:`);
5661
5722
  this.registerListeners();
5662
5723
  }
5663
5724
  registerListeners() {
@@ -6009,16 +6070,13 @@ class ErrorController {
6009
6070
  }
6010
6071
  }
6011
6072
 
6012
- class BasePlaylistController {
6073
+ class BasePlaylistController extends Logger {
6013
6074
  constructor(hls, logPrefix) {
6075
+ super(logPrefix, hls.logger);
6014
6076
  this.hls = void 0;
6015
6077
  this.timer = -1;
6016
6078
  this.requestScheduled = -1;
6017
6079
  this.canLoad = false;
6018
- this.log = void 0;
6019
- this.warn = void 0;
6020
- this.log = logger.log.bind(logger, `${logPrefix}:`);
6021
- this.warn = logger.warn.bind(logger, `${logPrefix}:`);
6022
6080
  this.hls = hls;
6023
6081
  }
6024
6082
  destroy() {
@@ -6051,7 +6109,7 @@ class BasePlaylistController {
6051
6109
  try {
6052
6110
  uri = new self.URL(attr.URI, previous.url).href;
6053
6111
  } catch (error) {
6054
- logger.warn(`Could not construct new URL for Rendition Report: ${error}`);
6112
+ this.warn(`Could not construct new URL for Rendition Report: ${error}`);
6055
6113
  uri = attr.URI || '';
6056
6114
  }
6057
6115
  // Use exact match. Otherwise, the last partial match, if any, will be used
@@ -6810,8 +6868,9 @@ function searchDownAndUpList(arr, searchIndex, predicate) {
6810
6868
  return -1;
6811
6869
  }
6812
6870
 
6813
- class AbrController {
6871
+ class AbrController extends Logger {
6814
6872
  constructor(_hls) {
6873
+ super('abr', _hls.logger);
6815
6874
  this.hls = void 0;
6816
6875
  this.lastLevelLoadSec = 0;
6817
6876
  this.lastLoadedFragLevel = -1;
@@ -6925,7 +6984,7 @@ class AbrController {
6925
6984
  this.resetEstimator(nextLoadLevelBitrate);
6926
6985
  }
6927
6986
  this.clearTimer();
6928
- logger.warn(`[abr] Fragment ${frag.sn}${part ? ' part ' + part.index : ''} of level ${frag.level} is loading too slowly;
6987
+ this.warn(`Fragment ${frag.sn}${part ? ' part ' + part.index : ''} of level ${frag.level} is loading too slowly;
6929
6988
  Time to underbuffer: ${bufferStarvationDelay.toFixed(3)} s
6930
6989
  Estimated load time for current fragment: ${fragLoadedDelay.toFixed(3)} s
6931
6990
  Estimated load time for down switch fragment: ${fragLevelNextLoadedDelay.toFixed(3)} s
@@ -6945,7 +7004,7 @@ class AbrController {
6945
7004
  }
6946
7005
  resetEstimator(abrEwmaDefaultEstimate) {
6947
7006
  if (abrEwmaDefaultEstimate) {
6948
- logger.log(`setting initial bwe to ${abrEwmaDefaultEstimate}`);
7007
+ this.log(`setting initial bwe to ${abrEwmaDefaultEstimate}`);
6949
7008
  this.hls.config.abrEwmaDefaultEstimate = abrEwmaDefaultEstimate;
6950
7009
  }
6951
7010
  this.firstSelection = -1;
@@ -7177,7 +7236,7 @@ class AbrController {
7177
7236
  }
7178
7237
  const firstLevel = this.hls.firstLevel;
7179
7238
  const clamped = Math.min(Math.max(firstLevel, minAutoLevel), maxAutoLevel);
7180
- logger.warn(`[abr] Could not find best starting auto level. Defaulting to first in playlist ${firstLevel} clamped to ${clamped}`);
7239
+ this.warn(`Could not find best starting auto level. Defaulting to first in playlist ${firstLevel} clamped to ${clamped}`);
7181
7240
  return clamped;
7182
7241
  }
7183
7242
  get forcedAutoLevel() {
@@ -7262,13 +7321,13 @@ class AbrController {
7262
7321
  // cap maxLoadingDelay and ensure it is not bigger 'than bitrate test' frag duration
7263
7322
  const maxLoadingDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxLoadingDelay) : config.maxLoadingDelay;
7264
7323
  maxStarvationDelay = maxLoadingDelay - bitrateTestDelay;
7265
- logger.info(`[abr] bitrate test took ${Math.round(1000 * bitrateTestDelay)}ms, set first fragment max fetchDuration to ${Math.round(1000 * maxStarvationDelay)} ms`);
7324
+ this.info(`bitrate test took ${Math.round(1000 * bitrateTestDelay)}ms, set first fragment max fetchDuration to ${Math.round(1000 * maxStarvationDelay)} ms`);
7266
7325
  // don't use conservative factor on bitrate test
7267
7326
  bwFactor = bwUpFactor = 1;
7268
7327
  }
7269
7328
  }
7270
7329
  const bestLevel = this.findBestLevel(avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay, maxStarvationDelay, bwFactor, bwUpFactor);
7271
- logger.info(`[abr] ${bufferStarvationDelay ? 'rebuffering expected' : 'buffer is empty'}, optimal quality level ${bestLevel}`);
7330
+ this.info(`${bufferStarvationDelay ? 'rebuffering expected' : 'buffer is empty'}, optimal quality level ${bestLevel}`);
7272
7331
  if (bestLevel > -1) {
7273
7332
  return bestLevel;
7274
7333
  }
@@ -7330,7 +7389,7 @@ class AbrController {
7330
7389
  currentVideoRange = preferHDR ? videoRanges[videoRanges.length - 1] : videoRanges[0];
7331
7390
  currentFrameRate = minFramerate;
7332
7391
  currentBw = Math.max(currentBw, minBitrate);
7333
- logger.log(`[abr] picked start tier ${JSON.stringify(startTier)}`);
7392
+ this.log(`picked start tier ${JSON.stringify(startTier)}`);
7334
7393
  } else {
7335
7394
  currentCodecSet = level == null ? void 0 : level.codecSet;
7336
7395
  currentVideoRange = level == null ? void 0 : level.videoRange;
@@ -7354,11 +7413,11 @@ class AbrController {
7354
7413
  const levels = this.hls.levels;
7355
7414
  const index = levels.indexOf(levelInfo);
7356
7415
  if (decodingInfo.error) {
7357
- logger.warn(`[abr] MediaCapabilities decodingInfo error: "${decodingInfo.error}" for level ${index} ${JSON.stringify(decodingInfo)}`);
7416
+ this.warn(`MediaCapabilities decodingInfo error: "${decodingInfo.error}" for level ${index} ${JSON.stringify(decodingInfo)}`);
7358
7417
  } else if (!decodingInfo.supported) {
7359
- logger.warn(`[abr] Unsupported MediaCapabilities decodingInfo result for level ${index} ${JSON.stringify(decodingInfo)}`);
7418
+ this.warn(`Unsupported MediaCapabilities decodingInfo result for level ${index} ${JSON.stringify(decodingInfo)}`);
7360
7419
  if (index > -1 && levels.length > 1) {
7361
- logger.log(`[abr] Removing unsupported level ${index}`);
7420
+ this.log(`Removing unsupported level ${index}`);
7362
7421
  this.hls.removeLevel(index);
7363
7422
  }
7364
7423
  }
@@ -7405,9 +7464,9 @@ class AbrController {
7405
7464
  const forcedAutoLevel = this.forcedAutoLevel;
7406
7465
  if (i !== loadLevel && (forcedAutoLevel === -1 || forcedAutoLevel !== loadLevel)) {
7407
7466
  if (levelsSkipped.length) {
7408
- logger.trace(`[abr] Skipped level(s) ${levelsSkipped.join(',')} of ${maxAutoLevel} max with CODECS and VIDEO-RANGE:"${levels[levelsSkipped[0]].codecs}" ${levels[levelsSkipped[0]].videoRange}; not compatible with "${level.codecs}" ${currentVideoRange}`);
7467
+ this.trace(`Skipped level(s) ${levelsSkipped.join(',')} of ${maxAutoLevel} max with CODECS and VIDEO-RANGE:"${levels[levelsSkipped[0]].codecs}" ${levels[levelsSkipped[0]].videoRange}; not compatible with "${level.codecs}" ${currentVideoRange}`);
7409
7468
  }
7410
- logger.info(`[abr] switch candidate:${selectionBaseLevel}->${i} adjustedbw(${Math.round(adjustedbw)})-bitrate=${Math.round(adjustedbw - bitrate)} ttfb:${ttfbEstimateSec.toFixed(1)} avgDuration:${avgDuration.toFixed(1)} maxFetchDuration:${maxFetchDuration.toFixed(1)} fetchDuration:${fetchDuration.toFixed(1)} firstSelection:${firstSelection} codecSet:${currentCodecSet} videoRange:${currentVideoRange} hls.loadLevel:${loadLevel}`);
7469
+ this.info(`switch candidate:${selectionBaseLevel}->${i} adjustedbw(${Math.round(adjustedbw)})-bitrate=${Math.round(adjustedbw - bitrate)} ttfb:${ttfbEstimateSec.toFixed(1)} avgDuration:${avgDuration.toFixed(1)} maxFetchDuration:${maxFetchDuration.toFixed(1)} fetchDuration:${fetchDuration.toFixed(1)} firstSelection:${firstSelection} codecSet:${currentCodecSet} videoRange:${currentVideoRange} hls.loadLevel:${loadLevel}`);
7411
7470
  }
7412
7471
  if (firstSelection) {
7413
7472
  this.firstSelection = i;
@@ -7420,8 +7479,12 @@ class AbrController {
7420
7479
  return -1;
7421
7480
  }
7422
7481
  set nextAutoLevel(nextLevel) {
7423
- const value = Math.max(this.hls.minAutoLevel, nextLevel);
7424
- if (this._nextAutoLevel != value) {
7482
+ const {
7483
+ maxAutoLevel,
7484
+ minAutoLevel
7485
+ } = this.hls;
7486
+ const value = Math.min(Math.max(nextLevel, minAutoLevel), maxAutoLevel);
7487
+ if (this._nextAutoLevel !== value) {
7425
7488
  this.nextAutoLevelKey = '';
7426
7489
  this._nextAutoLevel = value;
7427
7490
  }
@@ -7457,8 +7520,9 @@ class AbrController {
7457
7520
  * we are limiting the task execution per call stack to exactly one, but scheduling/post-poning further
7458
7521
  * task processing on the next main loop iteration (also known as "next tick" in the Node/JS runtime lingo).
7459
7522
  */
7460
- class TaskLoop {
7461
- constructor() {
7523
+ class TaskLoop extends Logger {
7524
+ constructor(label, logger) {
7525
+ super(label, logger);
7462
7526
  this._boundTick = void 0;
7463
7527
  this._tickTimer = null;
7464
7528
  this._tickInterval = null;
@@ -8549,8 +8613,8 @@ function createLoaderContext(frag, part = null) {
8549
8613
  var _frag$decryptdata;
8550
8614
  let byteRangeStart = start;
8551
8615
  let byteRangeEnd = end;
8552
- if (frag.sn === 'initSegment' && ((_frag$decryptdata = frag.decryptdata) == null ? void 0 : _frag$decryptdata.method) === 'AES-128') {
8553
- // MAP segment encrypted with method 'AES-128', when served with HTTP Range,
8616
+ if (frag.sn === 'initSegment' && isMethodFullSegmentAesCbc((_frag$decryptdata = frag.decryptdata) == null ? void 0 : _frag$decryptdata.method)) {
8617
+ // MAP segment encrypted with method 'AES-128' or 'AES-256' (cbc), when served with HTTP Range,
8554
8618
  // has the unencrypted size specified in the range.
8555
8619
  // Ref: https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-08#section-6.3.6
8556
8620
  const fragmentLen = end - start;
@@ -8583,6 +8647,9 @@ function createGapLoadError(frag, part) {
8583
8647
  (part ? part : frag).stats.aborted = true;
8584
8648
  return new LoadError(errorData);
8585
8649
  }
8650
+ function isMethodFullSegmentAesCbc(method) {
8651
+ return method === 'AES-128' || method === 'AES-256';
8652
+ }
8586
8653
  class LoadError extends Error {
8587
8654
  constructor(data) {
8588
8655
  super(data.error.message);
@@ -8592,33 +8659,61 @@ class LoadError extends Error {
8592
8659
  }
8593
8660
 
8594
8661
  class AESCrypto {
8595
- constructor(subtle, iv) {
8662
+ constructor(subtle, iv, aesMode) {
8596
8663
  this.subtle = void 0;
8597
8664
  this.aesIV = void 0;
8665
+ this.aesMode = void 0;
8598
8666
  this.subtle = subtle;
8599
8667
  this.aesIV = iv;
8668
+ this.aesMode = aesMode;
8600
8669
  }
8601
8670
  decrypt(data, key) {
8602
- return this.subtle.decrypt({
8603
- name: 'AES-CBC',
8604
- iv: this.aesIV
8605
- }, key, data);
8671
+ switch (this.aesMode) {
8672
+ case DecrypterAesMode.cbc:
8673
+ return this.subtle.decrypt({
8674
+ name: 'AES-CBC',
8675
+ iv: this.aesIV
8676
+ }, key, data);
8677
+ case DecrypterAesMode.ctr:
8678
+ return this.subtle.decrypt({
8679
+ name: 'AES-CTR',
8680
+ counter: this.aesIV,
8681
+ length: 64
8682
+ },
8683
+ //64 : NIST SP800-38A standard suggests that the counter should occupy half of the counter block
8684
+ key, data);
8685
+ default:
8686
+ throw new Error(`[AESCrypto] invalid aes mode ${this.aesMode}`);
8687
+ }
8606
8688
  }
8607
8689
  }
8608
8690
 
8609
8691
  class FastAESKey {
8610
- constructor(subtle, key) {
8692
+ constructor(subtle, key, aesMode) {
8611
8693
  this.subtle = void 0;
8612
8694
  this.key = void 0;
8695
+ this.aesMode = void 0;
8613
8696
  this.subtle = subtle;
8614
8697
  this.key = key;
8698
+ this.aesMode = aesMode;
8615
8699
  }
8616
8700
  expandKey() {
8701
+ const subtleAlgoName = getSubtleAlgoName(this.aesMode);
8617
8702
  return this.subtle.importKey('raw', this.key, {
8618
- name: 'AES-CBC'
8703
+ name: subtleAlgoName
8619
8704
  }, false, ['encrypt', 'decrypt']);
8620
8705
  }
8621
8706
  }
8707
+ function getSubtleAlgoName(aesMode) {
8708
+ switch (aesMode) {
8709
+ case DecrypterAesMode.cbc:
8710
+ return 'AES-CBC';
8711
+ case DecrypterAesMode.ctr:
8712
+ return 'AES-CTR';
8713
+ default:
8714
+ throw new Error(`[FastAESKey] invalid aes mode ${aesMode}`);
8715
+ }
8716
+ }
8622
8717
 
8623
8718
  // PKCS7
8624
8719
  function removePadding(array) {
@@ -8868,7 +8963,8 @@ class Decrypter {
8868
8963
  this.currentIV = null;
8869
8964
  this.currentResult = null;
8870
8965
  this.useSoftware = void 0;
8871
- this.useSoftware = config.enableSoftwareAES;
8966
+ this.enableSoftwareAES = void 0;
8967
+ this.enableSoftwareAES = config.enableSoftwareAES;
8872
8968
  this.removePKCS7Padding = removePKCS7Padding;
8873
8969
  // built in decryptor expects PKCS7 padding
8874
8970
  if (removePKCS7Padding) {
@@ -8881,9 +8977,7 @@ class Decrypter {
8881
8977
  /* no-op */
8882
8978
  }
8883
8979
  }
8884
- if (this.subtle === null) {
8885
- this.useSoftware = true;
8886
- }
8980
+ this.useSoftware = this.subtle === null;
8887
8981
  }
8888
8982
  destroy() {
8889
8983
  this.subtle = null;
@@ -8921,10 +9015,10 @@ class Decrypter {
8921
9015
  this.softwareDecrypter = null;
8922
9016
  }
8923
9017
  }
8924
- decrypt(data, key, iv) {
9018
+ decrypt(data, key, iv, aesMode) {
8925
9019
  if (this.useSoftware) {
8926
9020
  return new Promise((resolve, reject) => {
8927
- this.softwareDecrypt(new Uint8Array(data), key, iv);
9021
+ this.softwareDecrypt(new Uint8Array(data), key, iv, aesMode);
8928
9022
  const decryptResult = this.flush();
8929
9023
  if (decryptResult) {
8930
9024
  resolve(decryptResult.buffer);
@@ -8933,17 +9027,21 @@ class Decrypter {
8933
9027
  }
8934
9028
  });
8935
9029
  }
8936
- return this.webCryptoDecrypt(new Uint8Array(data), key, iv);
9030
+ return this.webCryptoDecrypt(new Uint8Array(data), key, iv, aesMode);
8937
9031
  }
8938
9032
 
8939
9033
  // Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
8940
9034
  // data is handled in the flush() call
8941
- softwareDecrypt(data, key, iv) {
9035
+ softwareDecrypt(data, key, iv, aesMode) {
8942
9036
  const {
8943
9037
  currentIV,
8944
9038
  currentResult,
8945
9039
  remainderData
8946
9040
  } = this;
9041
+ if (aesMode !== DecrypterAesMode.cbc || key.byteLength !== 16) {
9042
+ logger.warn('SoftwareDecrypt: can only handle AES-128-CBC');
9043
+ return null;
9044
+ }
8947
9045
  this.logOnce('JS AES decrypt');
8948
9046
  // The output is staggered during progressive parsing - the current result is cached, and emitted on the next call
8949
9047
  // This is done in order to strip PKCS7 padding, which is found at the end of each segment. We only know we've reached
@@ -8976,11 +9074,11 @@ class Decrypter {
8976
9074
  }
8977
9075
  return result;
8978
9076
  }
8979
- webCryptoDecrypt(data, key, iv) {
9077
+ webCryptoDecrypt(data, key, iv, aesMode) {
8980
9078
  const subtle = this.subtle;
8981
9079
  if (this.key !== key || !this.fastAesKey) {
8982
9080
  this.key = key;
8983
- this.fastAesKey = new FastAESKey(subtle, key);
9081
+ this.fastAesKey = new FastAESKey(subtle, key, aesMode);
8984
9082
  }
8985
9083
  return this.fastAesKey.expandKey().then(aesKey => {
8986
9084
  // decrypt using web crypto
@@ -8988,22 +9086,25 @@ class Decrypter {
8988
9086
  return Promise.reject(new Error('web crypto not initialized'));
8989
9087
  }
8990
9088
  this.logOnce('WebCrypto AES decrypt');
8991
- const crypto = new AESCrypto(subtle, new Uint8Array(iv));
9089
+ const crypto = new AESCrypto(subtle, new Uint8Array(iv), aesMode);
8992
9090
  return crypto.decrypt(data.buffer, aesKey);
8993
9091
  }).catch(err => {
8994
9092
  logger.warn(`[decrypter]: WebCrypto Error, disable WebCrypto API, ${err.name}: ${err.message}`);
8995
- return this.onWebCryptoError(data, key, iv);
9093
+ return this.onWebCryptoError(data, key, iv, aesMode);
8996
9094
  });
8997
9095
  }
8998
- onWebCryptoError(data, key, iv) {
8999
- this.useSoftware = true;
9000
- this.logEnabled = true;
9001
- this.softwareDecrypt(data, key, iv);
9002
- const decryptResult = this.flush();
9003
- if (decryptResult) {
9004
- return decryptResult.buffer;
9096
+ onWebCryptoError(data, key, iv, aesMode) {
9097
+ const enableSoftwareAES = this.enableSoftwareAES;
9098
+ if (enableSoftwareAES) {
9099
+ this.useSoftware = true;
9100
+ this.logEnabled = true;
9101
+ this.softwareDecrypt(data, key, iv, aesMode);
9102
+ const decryptResult = this.flush();
9103
+ if (decryptResult) {
9104
+ return decryptResult.buffer;
9105
+ }
9005
9106
  }
9006
- throw new Error('WebCrypto and softwareDecrypt: failed to decrypt data');
9107
+ throw new Error('WebCrypto' + (enableSoftwareAES ? ' and softwareDecrypt' : '') + ': failed to decrypt data');
9007
9108
  }
9008
9109
  getValidChunk(data) {
9009
9110
  let currentChunk = data;
@@ -9054,7 +9155,7 @@ const State = {
9054
9155
  };
9055
9156
  class BaseStreamController extends TaskLoop {
9056
9157
  constructor(hls, fragmentTracker, keyLoader, logPrefix, playlistType) {
9057
- super();
9158
+ super(logPrefix, hls.logger);
9058
9159
  this.hls = void 0;
9059
9160
  this.fragPrevious = null;
9060
9161
  this.fragCurrent = null;
@@ -9079,22 +9180,89 @@ class BaseStreamController extends TaskLoop {
9079
9180
  this.startFragRequested = false;
9080
9181
  this.decrypter = void 0;
9081
9182
  this.initPTS = [];
9082
- this.onvseeking = null;
9083
- this.onvended = null;
9084
- this.logPrefix = '';
9085
- this.log = void 0;
9086
- this.warn = void 0;
9183
+ this.buffering = true;
9184
+ this.onMediaSeeking = () => {
9185
+ const {
9186
+ config,
9187
+ fragCurrent,
9188
+ media,
9189
+ mediaBuffer,
9190
+ state
9191
+ } = this;
9192
+ const currentTime = media ? media.currentTime : 0;
9193
+ const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
9194
+ this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
9195
+ if (this.state === State.ENDED) {
9196
+ this.resetLoadingState();
9197
+ } else if (fragCurrent) {
9198
+ // Seeking while frag load is in progress
9199
+ const tolerance = config.maxFragLookUpTolerance;
9200
+ const fragStartOffset = fragCurrent.start - tolerance;
9201
+ const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
9202
+ // if seeking out of buffered range or into new one
9203
+ if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
9204
+ const pastFragment = currentTime > fragEndOffset;
9205
+ // if the seek position is outside the current fragment range
9206
+ if (currentTime < fragStartOffset || pastFragment) {
9207
+ if (pastFragment && fragCurrent.loader) {
9208
+ this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
9209
+ fragCurrent.abortRequests();
9210
+ this.resetLoadingState();
9211
+ }
9212
+ this.fragPrevious = null;
9213
+ }
9214
+ }
9215
+ }
9216
+ if (media) {
9217
+ // Remove gap fragments
9218
+ this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
9219
+ this.lastCurrentTime = currentTime;
9220
+ }
9221
+
9222
+ // in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
9223
+ if (!this.loadedmetadata && !bufferInfo.len) {
9224
+ this.nextLoadPosition = this.startPosition = currentTime;
9225
+ }
9226
+
9227
+ // Async tick to speed up processing
9228
+ this.tickImmediate();
9229
+ };
9230
+ this.onMediaEnded = () => {
9231
+ // reset startPosition and lastCurrentTime to restart playback @ stream beginning
9232
+ this.startPosition = this.lastCurrentTime = 0;
9233
+ if (this.playlistType === PlaylistLevelType.MAIN) {
9234
+ this.hls.trigger(Events.MEDIA_ENDED, {
9235
+ stalled: false
9236
+ });
9237
+ }
9238
+ };
9087
9239
  this.playlistType = playlistType;
9088
- this.logPrefix = logPrefix;
9089
- this.log = logger.log.bind(logger, `${logPrefix}:`);
9090
- this.warn = logger.warn.bind(logger, `${logPrefix}:`);
9091
9240
  this.hls = hls;
9092
9241
  this.fragmentLoader = new FragmentLoader(hls.config);
9093
9242
  this.keyLoader = keyLoader;
9094
9243
  this.fragmentTracker = fragmentTracker;
9095
9244
  this.config = hls.config;
9096
9245
  this.decrypter = new Decrypter(hls.config);
9246
+ }
9247
+ registerListeners() {
9248
+ const {
9249
+ hls
9250
+ } = this;
9251
+ hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
9252
+ hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
9253
+ hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
9097
9254
  hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
9255
+ hls.on(Events.ERROR, this.onError, this);
9256
+ }
9257
+ unregisterListeners() {
9258
+ const {
9259
+ hls
9260
+ } = this;
9261
+ hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
9262
+ hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
9263
+ hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
9264
+ hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
9265
+ hls.off(Events.ERROR, this.onError, this);
9098
9266
  }
9099
9267
  doTick() {
9100
9268
  this.onTickEnd();
@@ -9118,6 +9286,12 @@ class BaseStreamController extends TaskLoop {
9118
9286
  this.clearNextTick();
9119
9287
  this.state = State.STOPPED;
9120
9288
  }
9289
+ pauseBuffering() {
9290
+ this.buffering = false;
9291
+ }
9292
+ resumeBuffering() {
9293
+ this.buffering = true;
9294
+ }
9121
9295
  _streamEnded(bufferInfo, levelDetails) {
9122
9296
  // If playlist is live, there is another buffered range after the current range, nothing buffered, media is detached,
9123
9297
  // of nothing loading/loaded return false
@@ -9148,10 +9322,8 @@ class BaseStreamController extends TaskLoop {
9148
9322
  }
9149
9323
  onMediaAttached(event, data) {
9150
9324
  const media = this.media = this.mediaBuffer = data.media;
9151
- this.onvseeking = this.onMediaSeeking.bind(this);
9152
- this.onvended = this.onMediaEnded.bind(this);
9153
- media.addEventListener('seeking', this.onvseeking);
9154
- media.addEventListener('ended', this.onvended);
9325
+ media.addEventListener('seeking', this.onMediaSeeking);
9326
+ media.addEventListener('ended', this.onMediaEnded);
9155
9327
  const config = this.config;
9156
9328
  if (this.levels && config.autoStartLoad && this.state === State.STOPPED) {
9157
9329
  this.startLoad(config.startPosition);
@@ -9165,10 +9337,9 @@ class BaseStreamController extends TaskLoop {
9165
9337
  }
9166
9338
 
9167
9339
  // remove video listeners
9168
- if (media && this.onvseeking && this.onvended) {
9169
- media.removeEventListener('seeking', this.onvseeking);
9170
- media.removeEventListener('ended', this.onvended);
9171
- this.onvseeking = this.onvended = null;
9340
+ if (media) {
9341
+ media.removeEventListener('seeking', this.onMediaSeeking);
9342
+ media.removeEventListener('ended', this.onMediaEnded);
9172
9343
  }
9173
9344
  if (this.keyLoader) {
9174
9345
  this.keyLoader.detach();
@@ -9178,56 +9349,8 @@ class BaseStreamController extends TaskLoop {
9178
9349
  this.fragmentTracker.removeAllFragments();
9179
9350
  this.stopLoad();
9180
9351
  }
9181
- onMediaSeeking() {
9182
- const {
9183
- config,
9184
- fragCurrent,
9185
- media,
9186
- mediaBuffer,
9187
- state
9188
- } = this;
9189
- const currentTime = media ? media.currentTime : 0;
9190
- const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
9191
- this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
9192
- if (this.state === State.ENDED) {
9193
- this.resetLoadingState();
9194
- } else if (fragCurrent) {
9195
- // Seeking while frag load is in progress
9196
- const tolerance = config.maxFragLookUpTolerance;
9197
- const fragStartOffset = fragCurrent.start - tolerance;
9198
- const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
9199
- // if seeking out of buffered range or into new one
9200
- if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
9201
- const pastFragment = currentTime > fragEndOffset;
9202
- // if the seek position is outside the current fragment range
9203
- if (currentTime < fragStartOffset || pastFragment) {
9204
- if (pastFragment && fragCurrent.loader) {
9205
- this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
9206
- fragCurrent.abortRequests();
9207
- this.resetLoadingState();
9208
- }
9209
- this.fragPrevious = null;
9210
- }
9211
- }
9212
- }
9213
- if (media) {
9214
- // Remove gap fragments
9215
- this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
9216
- this.lastCurrentTime = currentTime;
9217
- }
9218
-
9219
- // in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
9220
- if (!this.loadedmetadata && !bufferInfo.len) {
9221
- this.nextLoadPosition = this.startPosition = currentTime;
9222
- }
9223
-
9224
- // Async tick to speed up processing
9225
- this.tickImmediate();
9226
- }
9227
- onMediaEnded() {
9228
- // reset startPosition and lastCurrentTime to restart playback @ stream beginning
9229
- this.startPosition = this.lastCurrentTime = 0;
9230
- }
9352
+ onManifestLoading() {}
9353
+ onError(event, data) {}
9231
9354
  onManifestLoaded(event, data) {
9232
9355
  this.startTimeOffset = data.startTimeOffset;
9233
9356
  this.initPTS = [];
@@ -9237,7 +9360,7 @@ class BaseStreamController extends TaskLoop {
9237
9360
  this.stopLoad();
9238
9361
  super.onHandlerDestroying();
9239
9362
  // @ts-ignore
9240
- this.hls = null;
9363
+ this.hls = this.onMediaSeeking = this.onMediaEnded = null;
9241
9364
  }
9242
9365
  onHandlerDestroyed() {
9243
9366
  this.state = State.STOPPED;
@@ -9368,10 +9491,10 @@ class BaseStreamController extends TaskLoop {
9368
9491
  const decryptData = frag.decryptdata;
9369
9492
 
9370
9493
  // check to see if the payload needs to be decrypted
9371
- if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv && decryptData.method === 'AES-128') {
9494
+ if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv && isFullSegmentEncryption(decryptData.method)) {
9372
9495
  const startTime = self.performance.now();
9373
9496
  // decrypt init segment data
9374
- return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer).catch(err => {
9497
+ return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer, getAesModeFromFullSegmentMethod(decryptData.method)).catch(err => {
9375
9498
  hls.trigger(Events.ERROR, {
9376
9499
  type: ErrorTypes.MEDIA_ERROR,
9377
9500
  details: ErrorDetails.FRAG_DECRYPT_ERROR,
@@ -9483,7 +9606,7 @@ class BaseStreamController extends TaskLoop {
9483
9606
  }
9484
9607
  let keyLoadingPromise = null;
9485
9608
  if (frag.encrypted && !((_frag$decryptdata = frag.decryptdata) != null && _frag$decryptdata.key)) {
9486
- this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.logPrefix === '[stream-controller]' ? 'level' : 'track'} ${frag.level}`);
9609
+ this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'} ${frag.level}`);
9487
9610
  this.state = State.KEY_LOADING;
9488
9611
  this.fragCurrent = frag;
9489
9612
  keyLoadingPromise = this.keyLoader.load(frag).then(keyLoadedData => {
@@ -9514,7 +9637,7 @@ class BaseStreamController extends TaskLoop {
9514
9637
  const partIndex = this.getNextPart(partList, frag, targetBufferTime);
9515
9638
  if (partIndex > -1) {
9516
9639
  const part = partList[partIndex];
9517
- this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.logPrefix === '[stream-controller]' ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
9640
+ this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
9518
9641
  this.nextLoadPosition = part.start + part.duration;
9519
9642
  this.state = State.FRAG_LOADING;
9520
9643
  let _result;
@@ -9543,7 +9666,7 @@ class BaseStreamController extends TaskLoop {
9543
9666
  }
9544
9667
  }
9545
9668
  }
9546
- this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.logPrefix === '[stream-controller]' ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
9669
+ this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
9547
9670
  // Don't update nextLoadPosition for fragments which are not buffered
9548
9671
  if (isFiniteNumber(frag.sn) && !this.bitrateTest) {
9549
9672
  this.nextLoadPosition = frag.start + frag.duration;
@@ -10128,7 +10251,7 @@ class BaseStreamController extends TaskLoop {
10128
10251
  errorAction.resolved = true;
10129
10252
  }
10130
10253
  } else {
10131
- logger.warn(`${data.details} reached or exceeded max retry (${retryCount})`);
10254
+ this.warn(`${data.details} reached or exceeded max retry (${retryCount})`);
10132
10255
  return;
10133
10256
  }
10134
10257
  } else if ((errorAction == null ? void 0 : errorAction.action) === NetworkErrorAction.SendAlternateToPenaltyBox) {
@@ -10537,6 +10660,7 @@ const initPTSFn = (timestamp, timeOffset, initPTS) => {
10537
10660
  */
10538
10661
  function getAudioConfig(observer, data, offset, audioCodec) {
10539
10662
  let adtsObjectType;
10663
+ let originalAdtsObjectType;
10540
10664
  let adtsExtensionSamplingIndex;
10541
10665
  let adtsChannelConfig;
10542
10666
  let config;
@@ -10544,7 +10668,7 @@ function getAudioConfig(observer, data, offset, audioCodec) {
10544
10668
  const manifestCodec = audioCodec;
10545
10669
  const adtsSamplingRates = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
10546
10670
  // byte 2
10547
- adtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
10671
+ adtsObjectType = originalAdtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
10548
10672
  const adtsSamplingIndex = (data[offset + 2] & 0x3c) >>> 2;
10549
10673
  if (adtsSamplingIndex > adtsSamplingRates.length - 1) {
10550
10674
  const error = new Error(`invalid ADTS sampling index:${adtsSamplingIndex}`);
@@ -10561,8 +10685,8 @@ function getAudioConfig(observer, data, offset, audioCodec) {
10561
10685
  // byte 3
10562
10686
  adtsChannelConfig |= (data[offset + 3] & 0xc0) >>> 6;
10563
10687
  logger.log(`manifest codec:${audioCodec}, ADTS type:${adtsObjectType}, samplingIndex:${adtsSamplingIndex}`);
10564
- // firefox: freq less than 24kHz = AAC SBR (HE-AAC)
10565
- if (/firefox/i.test(userAgent)) {
10688
+ // Firefox and Pale Moon: freq less than 24kHz = AAC SBR (HE-AAC)
10689
+ if (/firefox|palemoon/i.test(userAgent)) {
10566
10690
  if (adtsSamplingIndex >= 6) {
10567
10691
  adtsObjectType = 5;
10568
10692
  config = new Array(4);
@@ -10656,6 +10780,7 @@ function getAudioConfig(observer, data, offset, audioCodec) {
10656
10780
  samplerate: adtsSamplingRates[adtsSamplingIndex],
10657
10781
  channelCount: adtsChannelConfig,
10658
10782
  codec: 'mp4a.40.' + adtsObjectType,
10783
+ parsedCodec: 'mp4a.40.' + originalAdtsObjectType,
10659
10784
  manifestCodec
10660
10785
  };
10661
10786
  }
@@ -10710,7 +10835,8 @@ function initTrackConfig(track, observer, data, offset, audioCodec) {
10710
10835
  track.channelCount = config.channelCount;
10711
10836
  track.codec = config.codec;
10712
10837
  track.manifestCodec = config.manifestCodec;
10713
- logger.log(`parsed codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`);
10838
+ track.parsedCodec = config.parsedCodec;
10839
+ logger.log(`parsed codec:${track.parsedCodec}, codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`);
10714
10840
  }
10715
10841
  }
10716
10842
  function getFrameDuration(samplerate) {
@@ -11301,49 +11427,153 @@ class BaseVideoParser {
11301
11427
  logger.log(VideoSample.pts + '/' + VideoSample.dts + ':' + VideoSample.debug);
11302
11428
  }
11303
11429
  }
11304
- }
11305
-
11306
- /**
11307
- * Parser for exponential Golomb codes, a variable-bitwidth number encoding scheme used by h264.
11308
- */
11309
-
11310
- class ExpGolomb {
11311
- constructor(data) {
11312
- this.data = void 0;
11313
- this.bytesAvailable = void 0;
11314
- this.word = void 0;
11315
- this.bitsAvailable = void 0;
11316
- this.data = data;
11317
- // the number of bytes left to examine in this.data
11318
- this.bytesAvailable = data.byteLength;
11319
- // the current word being examined
11320
- this.word = 0; // :uint
11321
- // the number of bits left to examine in the current word
11322
- this.bitsAvailable = 0; // :uint
11323
- }
11430
+ parseNALu(track, array) {
11431
+ const len = array.byteLength;
11432
+ let state = track.naluState || 0;
11433
+ const lastState = state;
11434
+ const units = [];
11435
+ let i = 0;
11436
+ let value;
11437
+ let overflow;
11438
+ let unitType;
11439
+ let lastUnitStart = -1;
11440
+ let lastUnitType = 0;
11441
+ // logger.log('PES:' + Hex.hexDump(array));
11324
11442
 
11325
- // ():void
11326
- loadWord() {
11327
- const data = this.data;
11328
- const bytesAvailable = this.bytesAvailable;
11329
- const position = data.byteLength - bytesAvailable;
11330
- const workingBytes = new Uint8Array(4);
11331
- const availableBytes = Math.min(4, bytesAvailable);
11332
- if (availableBytes === 0) {
11333
- throw new Error('no bytes available');
11443
+ if (state === -1) {
11444
+ // special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
11445
+ lastUnitStart = 0;
11446
+ // NALu type is value read from offset 0
11447
+ lastUnitType = this.getNALuType(array, 0);
11448
+ state = 0;
11449
+ i = 1;
11334
11450
  }
11335
- workingBytes.set(data.subarray(position, position + availableBytes));
11336
- this.word = new DataView(workingBytes.buffer).getUint32(0);
11337
- // track the amount of this.data that has been processed
11338
- this.bitsAvailable = availableBytes * 8;
11339
- this.bytesAvailable -= availableBytes;
11340
- }
11451
+ while (i < len) {
11452
+ value = array[i++];
11453
+ // optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
11454
+ if (!state) {
11455
+ state = value ? 0 : 1;
11456
+ continue;
11457
+ }
11458
+ if (state === 1) {
11459
+ state = value ? 0 : 2;
11460
+ continue;
11461
+ }
11462
+ // here we have state either equal to 2 or 3
11463
+ if (!value) {
11464
+ state = 3;
11465
+ } else if (value === 1) {
11466
+ overflow = i - state - 1;
11467
+ if (lastUnitStart >= 0) {
11468
+ const unit = {
11469
+ data: array.subarray(lastUnitStart, overflow),
11470
+ type: lastUnitType
11471
+ };
11472
+ // logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
11473
+ units.push(unit);
11474
+ } else {
11475
+ // lastUnitStart is undefined => this is the first start code found in this PES packet
11476
+ // first check if start code delimiter is overlapping between 2 PES packets,
11477
+ // ie it started in last packet (lastState not zero)
11478
+ // and ended at the beginning of this PES packet (i <= 4 - lastState)
11479
+ const lastUnit = this.getLastNalUnit(track.samples);
11480
+ if (lastUnit) {
11481
+ if (lastState && i <= 4 - lastState) {
11482
+ // start delimiter overlapping between PES packets
11483
+ // strip start delimiter bytes from the end of last NAL unit
11484
+ // check if lastUnit had a state different from zero
11485
+ if (lastUnit.state) {
11486
+ // strip last bytes
11487
+ lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
11488
+ }
11489
+ }
11490
+ // If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
11341
11491
 
11342
- // (count:int):void
11343
- skipBits(count) {
11344
- let skipBytes; // :int
11345
- count = Math.min(count, this.bytesAvailable * 8 + this.bitsAvailable);
11346
- if (this.bitsAvailable > count) {
11492
+ if (overflow > 0) {
11493
+ // logger.log('first NALU found with overflow:' + overflow);
11494
+ lastUnit.data = appendUint8Array(lastUnit.data, array.subarray(0, overflow));
11495
+ lastUnit.state = 0;
11496
+ }
11497
+ }
11498
+ }
11499
+ // check if we can read unit type
11500
+ if (i < len) {
11501
+ unitType = this.getNALuType(array, i);
11502
+ // logger.log('find NALU @ offset:' + i + ',type:' + unitType);
11503
+ lastUnitStart = i;
11504
+ lastUnitType = unitType;
11505
+ state = 0;
11506
+ } else {
11507
+ // not enough byte to read unit type. let's read it on next PES parsing
11508
+ state = -1;
11509
+ }
11510
+ } else {
11511
+ state = 0;
11512
+ }
11513
+ }
11514
+ if (lastUnitStart >= 0 && state >= 0) {
11515
+ const unit = {
11516
+ data: array.subarray(lastUnitStart, len),
11517
+ type: lastUnitType,
11518
+ state: state
11519
+ };
11520
+ units.push(unit);
11521
+ // logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
11522
+ }
11523
+ // no NALu found
11524
+ if (units.length === 0) {
11525
+ // append pes.data to previous NAL unit
11526
+ const lastUnit = this.getLastNalUnit(track.samples);
11527
+ if (lastUnit) {
11528
+ lastUnit.data = appendUint8Array(lastUnit.data, array);
11529
+ }
11530
+ }
11531
+ track.naluState = state;
11532
+ return units;
11533
+ }
11534
+ }
11535
+
11536
+ /**
11537
+ * Parser for exponential Golomb codes, a variable-bitwidth number encoding scheme used by h264.
11538
+ */
11539
+
11540
+ class ExpGolomb {
11541
+ constructor(data) {
11542
+ this.data = void 0;
11543
+ this.bytesAvailable = void 0;
11544
+ this.word = void 0;
11545
+ this.bitsAvailable = void 0;
11546
+ this.data = data;
11547
+ // the number of bytes left to examine in this.data
11548
+ this.bytesAvailable = data.byteLength;
11549
+ // the current word being examined
11550
+ this.word = 0; // :uint
11551
+ // the number of bits left to examine in the current word
11552
+ this.bitsAvailable = 0; // :uint
11553
+ }
11554
+
11555
+ // ():void
11556
+ loadWord() {
11557
+ const data = this.data;
11558
+ const bytesAvailable = this.bytesAvailable;
11559
+ const position = data.byteLength - bytesAvailable;
11560
+ const workingBytes = new Uint8Array(4);
11561
+ const availableBytes = Math.min(4, bytesAvailable);
11562
+ if (availableBytes === 0) {
11563
+ throw new Error('no bytes available');
11564
+ }
11565
+ workingBytes.set(data.subarray(position, position + availableBytes));
11566
+ this.word = new DataView(workingBytes.buffer).getUint32(0);
11567
+ // track the amount of this.data that has been processed
11568
+ this.bitsAvailable = availableBytes * 8;
11569
+ this.bytesAvailable -= availableBytes;
11570
+ }
11571
+
11572
+ // (count:int):void
11573
+ skipBits(count) {
11574
+ let skipBytes; // :int
11575
+ count = Math.min(count, this.bytesAvailable * 8 + this.bitsAvailable);
11576
+ if (this.bitsAvailable > count) {
11347
11577
  this.word <<= count;
11348
11578
  this.bitsAvailable -= count;
11349
11579
  } else {
@@ -11443,21 +11673,171 @@ class ExpGolomb {
11443
11673
  readUInt() {
11444
11674
  return this.readBits(32);
11445
11675
  }
11676
+ }
11677
+
11678
+ class AvcVideoParser extends BaseVideoParser {
11679
+ parsePES(track, textTrack, pes, last, duration) {
11680
+ const units = this.parseNALu(track, pes.data);
11681
+ let VideoSample = this.VideoSample;
11682
+ let push;
11683
+ let spsfound = false;
11684
+ // free pes.data to save up some memory
11685
+ pes.data = null;
11686
+
11687
+ // if new NAL units found and last sample still there, let's push ...
11688
+ // this helps parsing streams with missing AUD (only do this if AUD never found)
11689
+ if (VideoSample && units.length && !track.audFound) {
11690
+ this.pushAccessUnit(VideoSample, track);
11691
+ VideoSample = this.VideoSample = this.createVideoSample(false, pes.pts, pes.dts, '');
11692
+ }
11693
+ units.forEach(unit => {
11694
+ var _VideoSample2;
11695
+ switch (unit.type) {
11696
+ // NDR
11697
+ case 1:
11698
+ {
11699
+ let iskey = false;
11700
+ push = true;
11701
+ const data = unit.data;
11702
+ // only check slice type to detect KF in case SPS found in same packet (any keyframe is preceded by SPS ...)
11703
+ if (spsfound && data.length > 4) {
11704
+ // retrieve slice type by parsing beginning of NAL unit (follow H264 spec, slice_header definition) to detect keyframe embedded in NDR
11705
+ const sliceType = this.readSliceType(data);
11706
+ // 2 : I slice, 4 : SI slice, 7 : I slice, 9: SI slice
11707
+ // SI slice : A slice that is coded using intra prediction only and using quantisation of the prediction samples.
11708
+ // An SI slice can be coded such that its decoded samples can be constructed identically to an SP slice.
11709
+ // I slice: A slice that is not an SI slice that is decoded using intra prediction only.
11710
+ // if (sliceType === 2 || sliceType === 7) {
11711
+ if (sliceType === 2 || sliceType === 4 || sliceType === 7 || sliceType === 9) {
11712
+ iskey = true;
11713
+ }
11714
+ }
11715
+ if (iskey) {
11716
+ var _VideoSample;
11717
+ // if we have non-keyframe data already, that cannot belong to the same frame as a keyframe, so force a push
11718
+ if ((_VideoSample = VideoSample) != null && _VideoSample.frame && !VideoSample.key) {
11719
+ this.pushAccessUnit(VideoSample, track);
11720
+ VideoSample = this.VideoSample = null;
11721
+ }
11722
+ }
11723
+ if (!VideoSample) {
11724
+ VideoSample = this.VideoSample = this.createVideoSample(true, pes.pts, pes.dts, '');
11725
+ }
11726
+ VideoSample.frame = true;
11727
+ VideoSample.key = iskey;
11728
+ break;
11729
+ // IDR
11730
+ }
11731
+ case 5:
11732
+ push = true;
11733
+ // handle PES not starting with AUD
11734
+ // if we have frame data already, that cannot belong to the same frame, so force a push
11735
+ if ((_VideoSample2 = VideoSample) != null && _VideoSample2.frame && !VideoSample.key) {
11736
+ this.pushAccessUnit(VideoSample, track);
11737
+ VideoSample = this.VideoSample = null;
11738
+ }
11739
+ if (!VideoSample) {
11740
+ VideoSample = this.VideoSample = this.createVideoSample(true, pes.pts, pes.dts, '');
11741
+ }
11742
+ VideoSample.key = true;
11743
+ VideoSample.frame = true;
11744
+ break;
11745
+ // SEI
11746
+ case 6:
11747
+ {
11748
+ push = true;
11749
+ parseSEIMessageFromNALu(unit.data, 1, pes.pts, textTrack.samples);
11750
+ break;
11751
+ // SPS
11752
+ }
11753
+ case 7:
11754
+ {
11755
+ var _track$pixelRatio, _track$pixelRatio2;
11756
+ push = true;
11757
+ spsfound = true;
11758
+ const sps = unit.data;
11759
+ const config = this.readSPS(sps);
11760
+ if (!track.sps || track.width !== config.width || track.height !== config.height || ((_track$pixelRatio = track.pixelRatio) == null ? void 0 : _track$pixelRatio[0]) !== config.pixelRatio[0] || ((_track$pixelRatio2 = track.pixelRatio) == null ? void 0 : _track$pixelRatio2[1]) !== config.pixelRatio[1]) {
11761
+ track.width = config.width;
11762
+ track.height = config.height;
11763
+ track.pixelRatio = config.pixelRatio;
11764
+ track.sps = [sps];
11765
+ track.duration = duration;
11766
+ const codecarray = sps.subarray(1, 4);
11767
+ let codecstring = 'avc1.';
11768
+ for (let i = 0; i < 3; i++) {
11769
+ let h = codecarray[i].toString(16);
11770
+ if (h.length < 2) {
11771
+ h = '0' + h;
11772
+ }
11773
+ codecstring += h;
11774
+ }
11775
+ track.codec = codecstring;
11776
+ }
11777
+ break;
11778
+ }
11779
+ // PPS
11780
+ case 8:
11781
+ push = true;
11782
+ track.pps = [unit.data];
11783
+ break;
11784
+ // AUD
11785
+ case 9:
11786
+ push = true;
11787
+ track.audFound = true;
11788
+ if (VideoSample) {
11789
+ this.pushAccessUnit(VideoSample, track);
11790
+ }
11791
+ VideoSample = this.VideoSample = this.createVideoSample(false, pes.pts, pes.dts, '');
11792
+ break;
11793
+ // Filler Data
11794
+ case 12:
11795
+ push = true;
11796
+ break;
11797
+ default:
11798
+ push = false;
11799
+ if (VideoSample) {
11800
+ VideoSample.debug += 'unknown NAL ' + unit.type + ' ';
11801
+ }
11802
+ break;
11803
+ }
11804
+ if (VideoSample && push) {
11805
+ const units = VideoSample.units;
11806
+ units.push(unit);
11807
+ }
11808
+ });
11809
+ // if last PES packet, push samples
11810
+ if (last && VideoSample) {
11811
+ this.pushAccessUnit(VideoSample, track);
11812
+ this.VideoSample = null;
11813
+ }
11814
+ }
11815
+ getNALuType(data, offset) {
11816
+ return data[offset] & 0x1f;
11817
+ }
11818
+ readSliceType(data) {
11819
+ const eg = new ExpGolomb(data);
11820
+ // skip NALu type
11821
+ eg.readUByte();
11822
+ // discard first_mb_in_slice
11823
+ eg.readUEG();
11824
+ // return slice_type
11825
+ return eg.readUEG();
11826
+ }
11446
11827
 
11447
11828
  /**
11448
- * Advance the ExpGolomb decoder past a scaling list. The scaling
11449
- * list is optionally transmitted as part of a sequence parameter
11829
+ * The scaling list is optionally transmitted as part of a sequence parameter
11450
11830
  * set and is not relevant to transmuxing.
11451
11831
  * @param count the number of entries in this scaling list
11452
11832
  * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
11453
11833
  */
11454
- skipScalingList(count) {
11834
+ skipScalingList(count, reader) {
11455
11835
  let lastScale = 8;
11456
11836
  let nextScale = 8;
11457
11837
  let deltaScale;
11458
11838
  for (let j = 0; j < count; j++) {
11459
11839
  if (nextScale !== 0) {
11460
- deltaScale = this.readEG();
11840
+ deltaScale = reader.readEG();
11461
11841
  nextScale = (lastScale + deltaScale + 256) % 256;
11462
11842
  }
11463
11843
  lastScale = nextScale === 0 ? lastScale : nextScale;
@@ -11472,7 +11852,8 @@ class ExpGolomb {
11472
11852
  * sequence parameter set, including the dimensions of the
11473
11853
  * associated video frames.
11474
11854
  */
11475
- readSPS() {
11855
+ readSPS(sps) {
11856
+ const eg = new ExpGolomb(sps);
11476
11857
  let frameCropLeftOffset = 0;
11477
11858
  let frameCropRightOffset = 0;
11478
11859
  let frameCropTopOffset = 0;
@@ -11480,13 +11861,13 @@ class ExpGolomb {
11480
11861
  let numRefFramesInPicOrderCntCycle;
11481
11862
  let scalingListCount;
11482
11863
  let i;
11483
- const readUByte = this.readUByte.bind(this);
11484
- const readBits = this.readBits.bind(this);
11485
- const readUEG = this.readUEG.bind(this);
11486
- const readBoolean = this.readBoolean.bind(this);
11487
- const skipBits = this.skipBits.bind(this);
11488
- const skipEG = this.skipEG.bind(this);
11489
- const skipUEG = this.skipUEG.bind(this);
11864
+ const readUByte = eg.readUByte.bind(eg);
11865
+ const readBits = eg.readBits.bind(eg);
11866
+ const readUEG = eg.readUEG.bind(eg);
11867
+ const readBoolean = eg.readBoolean.bind(eg);
11868
+ const skipBits = eg.skipBits.bind(eg);
11869
+ const skipEG = eg.skipEG.bind(eg);
11870
+ const skipUEG = eg.skipUEG.bind(eg);
11490
11871
  const skipScalingList = this.skipScalingList.bind(this);
11491
11872
  readUByte();
11492
11873
  const profileIdc = readUByte(); // profile_idc
@@ -11511,9 +11892,9 @@ class ExpGolomb {
11511
11892
  if (readBoolean()) {
11512
11893
  // seq_scaling_list_present_flag[ i ]
11513
11894
  if (i < 6) {
11514
- skipScalingList(16);
11895
+ skipScalingList(16, eg);
11515
11896
  } else {
11516
- skipScalingList(64);
11897
+ skipScalingList(64, eg);
11517
11898
  }
11518
11899
  }
11519
11900
  }
@@ -11618,19 +11999,15 @@ class ExpGolomb {
11618
11999
  pixelRatio: pixelRatio
11619
12000
  };
11620
12001
  }
11621
- readSliceType() {
11622
- // skip NALu type
11623
- this.readUByte();
11624
- // discard first_mb_in_slice
11625
- this.readUEG();
11626
- // return slice_type
11627
- return this.readUEG();
11628
- }
11629
12002
  }
11630
12003
 
11631
- class AvcVideoParser extends BaseVideoParser {
11632
- parseAVCPES(track, textTrack, pes, last, duration) {
11633
- const units = this.parseAVCNALu(track, pes.data);
12004
+ class HevcVideoParser extends BaseVideoParser {
12005
+ constructor(...args) {
12006
+ super(...args);
12007
+ this.initVPS = null;
12008
+ }
12009
+ parsePES(track, textTrack, pes, last, duration) {
12010
+ const units = this.parseNALu(track, pes.data);
11634
12011
  let VideoSample = this.VideoSample;
11635
12012
  let push;
11636
12013
  let spsfound = false;
@@ -11646,42 +12023,49 @@ class AvcVideoParser extends BaseVideoParser {
11646
12023
  units.forEach(unit => {
11647
12024
  var _VideoSample2;
11648
12025
  switch (unit.type) {
11649
- // NDR
12026
+ // NON-IDR, NON RANDOM ACCESS SLICE
12027
+ case 0:
11650
12028
  case 1:
11651
- {
11652
- let iskey = false;
11653
- push = true;
11654
- const data = unit.data;
11655
- // only check slice type to detect KF in case SPS found in same packet (any keyframe is preceded by SPS ...)
11656
- if (spsfound && data.length > 4) {
11657
- // retrieve slice type by parsing beginning of NAL unit (follow H264 spec, slice_header definition) to detect keyframe embedded in NDR
11658
- const sliceType = new ExpGolomb(data).readSliceType();
11659
- // 2 : I slice, 4 : SI slice, 7 : I slice, 9: SI slice
11660
- // SI slice : A slice that is coded using intra prediction only and using quantisation of the prediction samples.
11661
- // An SI slice can be coded such that its decoded samples can be constructed identically to an SP slice.
11662
- // I slice: A slice that is not an SI slice that is decoded using intra prediction only.
11663
- // if (sliceType === 2 || sliceType === 7) {
11664
- if (sliceType === 2 || sliceType === 4 || sliceType === 7 || sliceType === 9) {
11665
- iskey = true;
11666
- }
11667
- }
11668
- if (iskey) {
11669
- var _VideoSample;
11670
- // if we have non-keyframe data already, that cannot belong to the same frame as a keyframe, so force a push
11671
- if ((_VideoSample = VideoSample) != null && _VideoSample.frame && !VideoSample.key) {
11672
- this.pushAccessUnit(VideoSample, track);
11673
- VideoSample = this.VideoSample = null;
11674
- }
11675
- }
11676
- if (!VideoSample) {
11677
- VideoSample = this.VideoSample = this.createVideoSample(true, pes.pts, pes.dts, '');
12029
+ case 2:
12030
+ case 3:
12031
+ case 4:
12032
+ case 5:
12033
+ case 6:
12034
+ case 7:
12035
+ case 8:
12036
+ case 9:
12037
+ if (!VideoSample) {
12038
+ VideoSample = this.VideoSample = this.createVideoSample(false, pes.pts, pes.dts, '');
12039
+ }
12040
+ VideoSample.frame = true;
12041
+ push = true;
12042
+ break;
12043
+
12044
+ // CRA, BLA (random access picture)
12045
+ case 16:
12046
+ case 17:
12047
+ case 18:
12048
+ case 21:
12049
+ push = true;
12050
+ if (spsfound) {
12051
+ var _VideoSample;
12052
+ // handle PES not starting with AUD
12053
+ // if we have frame data already, that cannot belong to the same frame, so force a push
12054
+ if ((_VideoSample = VideoSample) != null && _VideoSample.frame && !VideoSample.key) {
12055
+ this.pushAccessUnit(VideoSample, track);
12056
+ VideoSample = this.VideoSample = null;
11678
12057
  }
11679
- VideoSample.frame = true;
11680
- VideoSample.key = iskey;
11681
- break;
11682
- // IDR
11683
12058
  }
11684
- case 5:
12059
+ if (!VideoSample) {
12060
+ VideoSample = this.VideoSample = this.createVideoSample(true, pes.pts, pes.dts, '');
12061
+ }
12062
+ VideoSample.key = true;
12063
+ VideoSample.frame = true;
12064
+ break;
12065
+
12066
+ // IDR
12067
+ case 19:
12068
+ case 20:
11685
12069
  push = true;
11686
12070
  // handle PES not starting with AUD
11687
12071
  // if we have frame data already, that cannot belong to the same frame, so force a push
@@ -11695,180 +12079,518 @@ class AvcVideoParser extends BaseVideoParser {
11695
12079
  VideoSample.key = true;
11696
12080
  VideoSample.frame = true;
11697
12081
  break;
12082
+
11698
12083
  // SEI
11699
- case 6:
11700
- {
11701
- push = true;
11702
- parseSEIMessageFromNALu(unit.data, 1, pes.pts, textTrack.samples);
11703
- break;
11704
- // SPS
11705
- }
11706
- case 7:
11707
- {
11708
- var _track$pixelRatio, _track$pixelRatio2;
11709
- push = true;
11710
- spsfound = true;
11711
- const sps = unit.data;
11712
- const expGolombDecoder = new ExpGolomb(sps);
11713
- const config = expGolombDecoder.readSPS();
11714
- if (!track.sps || track.width !== config.width || track.height !== config.height || ((_track$pixelRatio = track.pixelRatio) == null ? void 0 : _track$pixelRatio[0]) !== config.pixelRatio[0] || ((_track$pixelRatio2 = track.pixelRatio) == null ? void 0 : _track$pixelRatio2[1]) !== config.pixelRatio[1]) {
11715
- track.width = config.width;
11716
- track.height = config.height;
11717
- track.pixelRatio = config.pixelRatio;
11718
- track.sps = [sps];
11719
- track.duration = duration;
11720
- const codecarray = sps.subarray(1, 4);
11721
- let codecstring = 'avc1.';
11722
- for (let i = 0; i < 3; i++) {
11723
- let h = codecarray[i].toString(16);
11724
- if (h.length < 2) {
11725
- h = '0' + h;
11726
- }
11727
- codecstring += h;
11728
- }
11729
- track.codec = codecstring;
11730
- }
11731
- break;
11732
- }
11733
- // PPS
11734
- case 8:
11735
- push = true;
11736
- track.pps = [unit.data];
11737
- break;
11738
- // AUD
11739
- case 9:
12084
+ case 39:
11740
12085
  push = true;
11741
- track.audFound = true;
11742
- if (VideoSample) {
11743
- this.pushAccessUnit(VideoSample, track);
11744
- }
11745
- VideoSample = this.VideoSample = this.createVideoSample(false, pes.pts, pes.dts, '');
12086
+ parseSEIMessageFromNALu(unit.data, 2,
12087
+ // NALu header size
12088
+ pes.pts, textTrack.samples);
11746
12089
  break;
11747
- // Filler Data
11748
- case 12:
12090
+
12091
+ // VPS
12092
+ case 32:
11749
12093
  push = true;
11750
- break;
11751
- default:
11752
- push = false;
11753
- if (VideoSample) {
11754
- VideoSample.debug += 'unknown NAL ' + unit.type + ' ';
12094
+ if (!track.vps) {
12095
+ const config = this.readVPS(unit.data);
12096
+ track.params = _objectSpread2({}, config);
12097
+ this.initVPS = unit.data;
11755
12098
  }
12099
+ track.vps = [unit.data];
11756
12100
  break;
11757
- }
11758
- if (VideoSample && push) {
11759
- const units = VideoSample.units;
11760
- units.push(unit);
11761
- }
11762
- });
11763
- // if last PES packet, push samples
11764
- if (last && VideoSample) {
11765
- this.pushAccessUnit(VideoSample, track);
11766
- this.VideoSample = null;
11767
- }
11768
- }
11769
- parseAVCNALu(track, array) {
11770
- const len = array.byteLength;
11771
- let state = track.naluState || 0;
11772
- const lastState = state;
11773
- const units = [];
11774
- let i = 0;
11775
- let value;
11776
- let overflow;
11777
- let unitType;
11778
- let lastUnitStart = -1;
11779
- let lastUnitType = 0;
11780
- // logger.log('PES:' + Hex.hexDump(array));
11781
-
11782
- if (state === -1) {
11783
- // special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
11784
- lastUnitStart = 0;
11785
- // NALu type is value read from offset 0
11786
- lastUnitType = array[0] & 0x1f;
11787
- state = 0;
11788
- i = 1;
11789
- }
11790
- while (i < len) {
11791
- value = array[i++];
11792
- // optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
11793
- if (!state) {
11794
- state = value ? 0 : 1;
11795
- continue;
11796
- }
11797
- if (state === 1) {
11798
- state = value ? 0 : 2;
11799
- continue;
11800
- }
11801
- // here we have state either equal to 2 or 3
11802
- if (!value) {
11803
- state = 3;
11804
- } else if (value === 1) {
11805
- overflow = i - state - 1;
11806
- if (lastUnitStart >= 0) {
11807
- const unit = {
11808
- data: array.subarray(lastUnitStart, overflow),
11809
- type: lastUnitType
11810
- };
11811
- // logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
11812
- units.push(unit);
11813
- } else {
11814
- // lastUnitStart is undefined => this is the first start code found in this PES packet
11815
- // first check if start code delimiter is overlapping between 2 PES packets,
11816
- // ie it started in last packet (lastState not zero)
11817
- // and ended at the beginning of this PES packet (i <= 4 - lastState)
11818
- const lastUnit = this.getLastNalUnit(track.samples);
11819
- if (lastUnit) {
11820
- if (lastState && i <= 4 - lastState) {
11821
- // start delimiter overlapping between PES packets
11822
- // strip start delimiter bytes from the end of last NAL unit
11823
- // check if lastUnit had a state different from zero
11824
- if (lastUnit.state) {
11825
- // strip last bytes
11826
- lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
12101
+
12102
+ // SPS
12103
+ case 33:
12104
+ push = true;
12105
+ spsfound = true;
12106
+ if (typeof track.params === 'object') {
12107
+ if (track.vps !== undefined && track.vps[0] !== this.initVPS && track.sps !== undefined && !this.matchSPS(track.sps[0], unit.data)) {
12108
+ this.initVPS = track.vps[0];
12109
+ track.sps = track.pps = undefined;
12110
+ }
12111
+ if (!track.sps) {
12112
+ const config = this.readSPS(unit.data);
12113
+ track.width = config.width;
12114
+ track.height = config.height;
12115
+ track.pixelRatio = config.pixelRatio;
12116
+ track.duration = duration;
12117
+ track.codec = config.codecString;
12118
+ track.sps = [];
12119
+ for (const prop in config.params) {
12120
+ track.params[prop] = config.params[prop];
11827
12121
  }
11828
12122
  }
11829
- // If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
12123
+ if (track.vps !== undefined && track.vps[0] === this.initVPS) {
12124
+ track.sps.push(unit.data);
12125
+ }
12126
+ }
12127
+ if (!VideoSample) {
12128
+ VideoSample = this.VideoSample = this.createVideoSample(true, pes.pts, pes.dts, '');
12129
+ }
12130
+ VideoSample.key = true;
12131
+ break;
11830
12132
 
11831
- if (overflow > 0) {
11832
- // logger.log('first NALU found with overflow:' + overflow);
11833
- lastUnit.data = appendUint8Array(lastUnit.data, array.subarray(0, overflow));
11834
- lastUnit.state = 0;
12133
+ // PPS
12134
+ case 34:
12135
+ push = true;
12136
+ if (typeof track.params === 'object') {
12137
+ if (!track.pps) {
12138
+ track.pps = [];
12139
+ const config = this.readPPS(unit.data);
12140
+ for (const prop in config) {
12141
+ track.params[prop] = config[prop];
12142
+ }
12143
+ }
12144
+ if (this.initVPS !== null || track.pps.length === 0) {
12145
+ track.pps.push(unit.data);
11835
12146
  }
11836
12147
  }
12148
+ break;
12149
+
12150
+ // ACCESS UNIT DELIMITER
12151
+ case 35:
12152
+ push = true;
12153
+ track.audFound = true;
12154
+ if (VideoSample) {
12155
+ this.pushAccessUnit(VideoSample, track);
12156
+ }
12157
+ VideoSample = this.VideoSample = this.createVideoSample(false, pes.pts, pes.dts, '');
12158
+ break;
12159
+ default:
12160
+ push = false;
12161
+ if (VideoSample) {
12162
+ VideoSample.debug += 'unknown or irrelevant NAL ' + unit.type + ' ';
12163
+ }
12164
+ break;
12165
+ }
12166
+ if (VideoSample && push) {
12167
+ const units = VideoSample.units;
12168
+ units.push(unit);
12169
+ }
12170
+ });
12171
+ // if last PES packet, push samples
12172
+ if (last && VideoSample) {
12173
+ this.pushAccessUnit(VideoSample, track);
12174
+ this.VideoSample = null;
12175
+ }
12176
+ }
12177
+ getNALuType(data, offset) {
12178
+ return (data[offset] & 0x7e) >>> 1;
12179
+ }
12180
+ ebsp2rbsp(arr) {
12181
+ const dst = new Uint8Array(arr.byteLength);
12182
+ let dstIdx = 0;
12183
+ for (let i = 0; i < arr.byteLength; i++) {
12184
+ if (i >= 2) {
12185
+ // Unescape: Skip 0x03 after 00 00
12186
+ if (arr[i] === 0x03 && arr[i - 1] === 0x00 && arr[i - 2] === 0x00) {
12187
+ continue;
11837
12188
  }
11838
- // check if we can read unit type
11839
- if (i < len) {
11840
- unitType = array[i] & 0x1f;
11841
- // logger.log('find NALU @ offset:' + i + ',type:' + unitType);
11842
- lastUnitStart = i;
11843
- lastUnitType = unitType;
11844
- state = 0;
11845
- } else {
11846
- // not enough byte to read unit type. let's read it on next PES parsing
11847
- state = -1;
11848
- }
11849
- } else {
11850
- state = 0;
11851
12189
  }
12190
+ dst[dstIdx] = arr[i];
12191
+ dstIdx++;
11852
12192
  }
11853
- if (lastUnitStart >= 0 && state >= 0) {
11854
- const unit = {
11855
- data: array.subarray(lastUnitStart, len),
11856
- type: lastUnitType,
11857
- state: state
11858
- };
11859
- units.push(unit);
11860
- // logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
12193
+ return new Uint8Array(dst.buffer, 0, dstIdx);
12194
+ }
12195
+ readVPS(vps) {
12196
+ const eg = new ExpGolomb(vps);
12197
+ // remove header
12198
+ eg.readUByte();
12199
+ eg.readUByte();
12200
+ eg.readBits(4); // video_parameter_set_id
12201
+ eg.skipBits(2);
12202
+ eg.readBits(6); // max_layers_minus1
12203
+ const max_sub_layers_minus1 = eg.readBits(3);
12204
+ const temporal_id_nesting_flag = eg.readBoolean();
12205
+ // ...vui fps can be here, but empty fps value is not critical for metadata
12206
+
12207
+ return {
12208
+ numTemporalLayers: max_sub_layers_minus1 + 1,
12209
+ temporalIdNested: temporal_id_nesting_flag
12210
+ };
12211
+ }
12212
+ readSPS(sps) {
12213
+ const eg = new ExpGolomb(this.ebsp2rbsp(sps));
12214
+ eg.readUByte();
12215
+ eg.readUByte();
12216
+ eg.readBits(4); //video_parameter_set_id
12217
+ const max_sub_layers_minus1 = eg.readBits(3);
12218
+ eg.readBoolean(); // temporal_id_nesting_flag
12219
+
12220
+ // profile_tier_level
12221
+ const general_profile_space = eg.readBits(2);
12222
+ const general_tier_flag = eg.readBoolean();
12223
+ const general_profile_idc = eg.readBits(5);
12224
+ const general_profile_compatibility_flags_1 = eg.readUByte();
12225
+ const general_profile_compatibility_flags_2 = eg.readUByte();
12226
+ const general_profile_compatibility_flags_3 = eg.readUByte();
12227
+ const general_profile_compatibility_flags_4 = eg.readUByte();
12228
+ const general_constraint_indicator_flags_1 = eg.readUByte();
12229
+ const general_constraint_indicator_flags_2 = eg.readUByte();
12230
+ const general_constraint_indicator_flags_3 = eg.readUByte();
12231
+ const general_constraint_indicator_flags_4 = eg.readUByte();
12232
+ const general_constraint_indicator_flags_5 = eg.readUByte();
12233
+ const general_constraint_indicator_flags_6 = eg.readUByte();
12234
+ const general_level_idc = eg.readUByte();
12235
+ const sub_layer_profile_present_flags = [];
12236
+ const sub_layer_level_present_flags = [];
12237
+ for (let i = 0; i < max_sub_layers_minus1; i++) {
12238
+ sub_layer_profile_present_flags.push(eg.readBoolean());
12239
+ sub_layer_level_present_flags.push(eg.readBoolean());
12240
+ }
12241
+ if (max_sub_layers_minus1 > 0) {
12242
+ for (let i = max_sub_layers_minus1; i < 8; i++) {
12243
+ eg.readBits(2);
12244
+ }
12245
+ }
12246
+ for (let i = 0; i < max_sub_layers_minus1; i++) {
12247
+ if (sub_layer_profile_present_flags[i]) {
12248
+ eg.readUByte(); // sub_layer_profile_space, sub_layer_tier_flag, sub_layer_profile_idc
12249
+ eg.readUByte();
12250
+ eg.readUByte();
12251
+ eg.readUByte();
12252
+ eg.readUByte(); // sub_layer_profile_compatibility_flag
12253
+ eg.readUByte();
12254
+ eg.readUByte();
12255
+ eg.readUByte();
12256
+ eg.readUByte();
12257
+ eg.readUByte();
12258
+ eg.readUByte();
12259
+ }
12260
+ if (sub_layer_level_present_flags[i]) {
12261
+ eg.readUByte();
12262
+ }
12263
+ }
12264
+ eg.readUEG(); // seq_parameter_set_id
12265
+ const chroma_format_idc = eg.readUEG();
12266
+ if (chroma_format_idc == 3) {
12267
+ eg.skipBits(1); //separate_colour_plane_flag
12268
+ }
12269
+ const pic_width_in_luma_samples = eg.readUEG();
12270
+ const pic_height_in_luma_samples = eg.readUEG();
12271
+ const conformance_window_flag = eg.readBoolean();
12272
+ let pic_left_offset = 0,
12273
+ pic_right_offset = 0,
12274
+ pic_top_offset = 0,
12275
+ pic_bottom_offset = 0;
12276
+ if (conformance_window_flag) {
12277
+ pic_left_offset += eg.readUEG();
12278
+ pic_right_offset += eg.readUEG();
12279
+ pic_top_offset += eg.readUEG();
12280
+ pic_bottom_offset += eg.readUEG();
12281
+ }
12282
+ const bit_depth_luma_minus8 = eg.readUEG();
12283
+ const bit_depth_chroma_minus8 = eg.readUEG();
12284
+ const log2_max_pic_order_cnt_lsb_minus4 = eg.readUEG();
12285
+ const sub_layer_ordering_info_present_flag = eg.readBoolean();
12286
+ for (let i = sub_layer_ordering_info_present_flag ? 0 : max_sub_layers_minus1; i <= max_sub_layers_minus1; i++) {
12287
+ eg.skipUEG(); // max_dec_pic_buffering_minus1[i]
12288
+ eg.skipUEG(); // max_num_reorder_pics[i]
12289
+ eg.skipUEG(); // max_latency_increase_plus1[i]
12290
+ }
12291
+ eg.skipUEG(); // log2_min_luma_coding_block_size_minus3
12292
+ eg.skipUEG(); // log2_diff_max_min_luma_coding_block_size
12293
+ eg.skipUEG(); // log2_min_transform_block_size_minus2
12294
+ eg.skipUEG(); // log2_diff_max_min_transform_block_size
12295
+ eg.skipUEG(); // max_transform_hierarchy_depth_inter
12296
+ eg.skipUEG(); // max_transform_hierarchy_depth_intra
12297
+ const scaling_list_enabled_flag = eg.readBoolean();
12298
+ if (scaling_list_enabled_flag) {
12299
+ const sps_scaling_list_data_present_flag = eg.readBoolean();
12300
+ if (sps_scaling_list_data_present_flag) {
12301
+ for (let sizeId = 0; sizeId < 4; sizeId++) {
12302
+ for (let matrixId = 0; matrixId < (sizeId === 3 ? 2 : 6); matrixId++) {
12303
+ const scaling_list_pred_mode_flag = eg.readBoolean();
12304
+ if (!scaling_list_pred_mode_flag) {
12305
+ eg.readUEG(); // scaling_list_pred_matrix_id_delta
12306
+ } else {
12307
+ const coefNum = Math.min(64, 1 << 4 + (sizeId << 1));
12308
+ if (sizeId > 1) {
12309
+ eg.readEG();
12310
+ }
12311
+ for (let i = 0; i < coefNum; i++) {
12312
+ eg.readEG();
12313
+ }
12314
+ }
12315
+ }
12316
+ }
12317
+ }
11861
12318
  }
11862
- // no NALu found
11863
- if (units.length === 0) {
11864
- // append pes.data to previous NAL unit
11865
- const lastUnit = this.getLastNalUnit(track.samples);
11866
- if (lastUnit) {
11867
- lastUnit.data = appendUint8Array(lastUnit.data, array);
12319
+ eg.readBoolean(); // amp_enabled_flag
12320
+ eg.readBoolean(); // sample_adaptive_offset_enabled_flag
12321
+ const pcm_enabled_flag = eg.readBoolean();
12322
+ if (pcm_enabled_flag) {
12323
+ eg.readUByte();
12324
+ eg.skipUEG();
12325
+ eg.skipUEG();
12326
+ eg.readBoolean();
12327
+ }
12328
+ const num_short_term_ref_pic_sets = eg.readUEG();
12329
+ let num_delta_pocs = 0;
12330
+ for (let i = 0; i < num_short_term_ref_pic_sets; i++) {
12331
+ let inter_ref_pic_set_prediction_flag = false;
12332
+ if (i !== 0) {
12333
+ inter_ref_pic_set_prediction_flag = eg.readBoolean();
12334
+ }
12335
+ if (inter_ref_pic_set_prediction_flag) {
12336
+ if (i === num_short_term_ref_pic_sets) {
12337
+ eg.readUEG();
12338
+ }
12339
+ eg.readBoolean();
12340
+ eg.readUEG();
12341
+ let next_num_delta_pocs = 0;
12342
+ for (let j = 0; j <= num_delta_pocs; j++) {
12343
+ const used_by_curr_pic_flag = eg.readBoolean();
12344
+ let use_delta_flag = false;
12345
+ if (!used_by_curr_pic_flag) {
12346
+ use_delta_flag = eg.readBoolean();
12347
+ }
12348
+ if (used_by_curr_pic_flag || use_delta_flag) {
12349
+ next_num_delta_pocs++;
12350
+ }
12351
+ }
12352
+ num_delta_pocs = next_num_delta_pocs;
12353
+ } else {
12354
+ const num_negative_pics = eg.readUEG();
12355
+ const num_positive_pics = eg.readUEG();
12356
+ num_delta_pocs = num_negative_pics + num_positive_pics;
12357
+ for (let j = 0; j < num_negative_pics; j++) {
12358
+ eg.readUEG();
12359
+ eg.readBoolean();
12360
+ }
12361
+ for (let j = 0; j < num_positive_pics; j++) {
12362
+ eg.readUEG();
12363
+ eg.readBoolean();
12364
+ }
12365
+ }
12366
+ }
12367
+ const long_term_ref_pics_present_flag = eg.readBoolean();
12368
+ if (long_term_ref_pics_present_flag) {
12369
+ const num_long_term_ref_pics_sps = eg.readUEG();
12370
+ for (let i = 0; i < num_long_term_ref_pics_sps; i++) {
12371
+ for (let j = 0; j < log2_max_pic_order_cnt_lsb_minus4 + 4; j++) {
12372
+ eg.readBits(1);
12373
+ }
12374
+ eg.readBits(1);
12375
+ }
12376
+ }
12377
+ let min_spatial_segmentation_idc = 0;
12378
+ let sar_width = 1,
12379
+ sar_height = 1;
12380
+ let fps_fixed = true,
12381
+ fps_den = 1,
12382
+ fps_num = 0;
12383
+ eg.readBoolean(); // sps_temporal_mvp_enabled_flag
12384
+ eg.readBoolean(); // strong_intra_smoothing_enabled_flag
12385
+ let default_display_window_flag = false;
12386
+ const vui_parameters_present_flag = eg.readBoolean();
12387
+ if (vui_parameters_present_flag) {
12388
+ const aspect_ratio_info_present_flag = eg.readBoolean();
12389
+ if (aspect_ratio_info_present_flag) {
12390
+ const aspect_ratio_idc = eg.readUByte();
12391
+ const sar_width_table = [1, 12, 10, 16, 40, 24, 20, 32, 80, 18, 15, 64, 160, 4, 3, 2];
12392
+ const sar_height_table = [1, 11, 11, 11, 33, 11, 11, 11, 33, 11, 11, 33, 99, 3, 2, 1];
12393
+ if (aspect_ratio_idc > 0 && aspect_ratio_idc < 16) {
12394
+ sar_width = sar_width_table[aspect_ratio_idc - 1];
12395
+ sar_height = sar_height_table[aspect_ratio_idc - 1];
12396
+ } else if (aspect_ratio_idc === 255) {
12397
+ sar_width = eg.readBits(16);
12398
+ sar_height = eg.readBits(16);
12399
+ }
12400
+ }
12401
+ const overscan_info_present_flag = eg.readBoolean();
12402
+ if (overscan_info_present_flag) {
12403
+ eg.readBoolean();
12404
+ }
12405
+ const video_signal_type_present_flag = eg.readBoolean();
12406
+ if (video_signal_type_present_flag) {
12407
+ eg.readBits(3);
12408
+ eg.readBoolean();
12409
+ const colour_description_present_flag = eg.readBoolean();
12410
+ if (colour_description_present_flag) {
12411
+ eg.readUByte();
12412
+ eg.readUByte();
12413
+ eg.readUByte();
12414
+ }
12415
+ }
12416
+ const chroma_loc_info_present_flag = eg.readBoolean();
12417
+ if (chroma_loc_info_present_flag) {
12418
+ eg.readUEG();
12419
+ eg.readUEG();
12420
+ }
12421
+ eg.readBoolean(); // neutral_chroma_indication_flag
12422
+ eg.readBoolean(); // field_seq_flag
12423
+ eg.readBoolean(); // frame_field_info_present_flag
12424
+ default_display_window_flag = eg.readBoolean();
12425
+ if (default_display_window_flag) {
12426
+ pic_left_offset += eg.readUEG();
12427
+ pic_right_offset += eg.readUEG();
12428
+ pic_top_offset += eg.readUEG();
12429
+ pic_bottom_offset += eg.readUEG();
12430
+ }
12431
+ const vui_timing_info_present_flag = eg.readBoolean();
12432
+ if (vui_timing_info_present_flag) {
12433
+ fps_den = eg.readBits(32);
12434
+ fps_num = eg.readBits(32);
12435
+ const vui_poc_proportional_to_timing_flag = eg.readBoolean();
12436
+ if (vui_poc_proportional_to_timing_flag) {
12437
+ eg.readUEG();
12438
+ }
12439
+ const vui_hrd_parameters_present_flag = eg.readBoolean();
12440
+ if (vui_hrd_parameters_present_flag) {
12441
+ //const commonInfPresentFlag = true;
12442
+ //if (commonInfPresentFlag) {
12443
+ const nal_hrd_parameters_present_flag = eg.readBoolean();
12444
+ const vcl_hrd_parameters_present_flag = eg.readBoolean();
12445
+ let sub_pic_hrd_params_present_flag = false;
12446
+ if (nal_hrd_parameters_present_flag || vcl_hrd_parameters_present_flag) {
12447
+ sub_pic_hrd_params_present_flag = eg.readBoolean();
12448
+ if (sub_pic_hrd_params_present_flag) {
12449
+ eg.readUByte();
12450
+ eg.readBits(5);
12451
+ eg.readBoolean();
12452
+ eg.readBits(5);
12453
+ }
12454
+ eg.readBits(4); // bit_rate_scale
12455
+ eg.readBits(4); // cpb_size_scale
12456
+ if (sub_pic_hrd_params_present_flag) {
12457
+ eg.readBits(4);
12458
+ }
12459
+ eg.readBits(5);
12460
+ eg.readBits(5);
12461
+ eg.readBits(5);
12462
+ }
12463
+ //}
12464
+ for (let i = 0; i <= max_sub_layers_minus1; i++) {
12465
+ fps_fixed = eg.readBoolean(); // fixed_pic_rate_general_flag
12466
+ const fixed_pic_rate_within_cvs_flag = fps_fixed || eg.readBoolean();
12467
+ let low_delay_hrd_flag = false;
12468
+ if (fixed_pic_rate_within_cvs_flag) {
12469
+ eg.readEG();
12470
+ } else {
12471
+ low_delay_hrd_flag = eg.readBoolean();
12472
+ }
12473
+ const cpb_cnt = low_delay_hrd_flag ? 1 : eg.readUEG() + 1;
12474
+ if (nal_hrd_parameters_present_flag) {
12475
+ for (let j = 0; j < cpb_cnt; j++) {
12476
+ eg.readUEG();
12477
+ eg.readUEG();
12478
+ if (sub_pic_hrd_params_present_flag) {
12479
+ eg.readUEG();
12480
+ eg.readUEG();
12481
+ }
12482
+ eg.skipBits(1);
12483
+ }
12484
+ }
12485
+ if (vcl_hrd_parameters_present_flag) {
12486
+ for (let j = 0; j < cpb_cnt; j++) {
12487
+ eg.readUEG();
12488
+ eg.readUEG();
12489
+ if (sub_pic_hrd_params_present_flag) {
12490
+ eg.readUEG();
12491
+ eg.readUEG();
12492
+ }
12493
+ eg.skipBits(1);
12494
+ }
12495
+ }
12496
+ }
12497
+ }
11868
12498
  }
12499
+ const bitstream_restriction_flag = eg.readBoolean();
12500
+ if (bitstream_restriction_flag) {
12501
+ eg.readBoolean(); // tiles_fixed_structure_flag
12502
+ eg.readBoolean(); // motion_vectors_over_pic_boundaries_flag
12503
+ eg.readBoolean(); // restricted_ref_pic_lists_flag
12504
+ min_spatial_segmentation_idc = eg.readUEG();
12505
+ }
12506
+ }
12507
+ let width = pic_width_in_luma_samples,
12508
+ height = pic_height_in_luma_samples;
12509
+ if (conformance_window_flag || default_display_window_flag) {
12510
+ let chroma_scale_w = 1,
12511
+ chroma_scale_h = 1;
12512
+ if (chroma_format_idc === 1) {
12513
+ // YUV 420
12514
+ chroma_scale_w = chroma_scale_h = 2;
12515
+ } else if (chroma_format_idc == 2) {
12516
+ // YUV 422
12517
+ chroma_scale_w = 2;
12518
+ }
12519
+ width = pic_width_in_luma_samples - chroma_scale_w * pic_right_offset - chroma_scale_w * pic_left_offset;
12520
+ height = pic_height_in_luma_samples - chroma_scale_h * pic_bottom_offset - chroma_scale_h * pic_top_offset;
12521
+ }
12522
+ const profile_space_string = general_profile_space ? ['A', 'B', 'C'][general_profile_space] : '';
12523
+ const profile_compatibility_buf = general_profile_compatibility_flags_1 << 24 | general_profile_compatibility_flags_2 << 16 | general_profile_compatibility_flags_3 << 8 | general_profile_compatibility_flags_4;
12524
+ let profile_compatibility_rev = 0;
12525
+ for (let i = 0; i < 32; i++) {
12526
+ profile_compatibility_rev = (profile_compatibility_rev | (profile_compatibility_buf >> i & 1) << 31 - i) >>> 0; // reverse bit position (and cast as UInt32)
12527
+ }
12528
+ let profile_compatibility_flags_string = profile_compatibility_rev.toString(16);
12529
+ if (general_profile_idc === 1 && profile_compatibility_flags_string === '2') {
12530
+ profile_compatibility_flags_string = '6';
12531
+ }
12532
+ const tier_flag_string = general_tier_flag ? 'H' : 'L';
12533
+ return {
12534
+ codecString: `hvc1.${profile_space_string}${general_profile_idc}.${profile_compatibility_flags_string}.${tier_flag_string}${general_level_idc}.B0`,
12535
+ params: {
12536
+ general_tier_flag,
12537
+ general_profile_idc,
12538
+ general_profile_space,
12539
+ general_profile_compatibility_flags: [general_profile_compatibility_flags_1, general_profile_compatibility_flags_2, general_profile_compatibility_flags_3, general_profile_compatibility_flags_4],
12540
+ general_constraint_indicator_flags: [general_constraint_indicator_flags_1, general_constraint_indicator_flags_2, general_constraint_indicator_flags_3, general_constraint_indicator_flags_4, general_constraint_indicator_flags_5, general_constraint_indicator_flags_6],
12541
+ general_level_idc,
12542
+ bit_depth: bit_depth_luma_minus8 + 8,
12543
+ bit_depth_luma_minus8,
12544
+ bit_depth_chroma_minus8,
12545
+ min_spatial_segmentation_idc,
12546
+ chroma_format_idc: chroma_format_idc,
12547
+ frame_rate: {
12548
+ fixed: fps_fixed,
12549
+ fps: fps_num / fps_den
12550
+ }
12551
+ },
12552
+ width,
12553
+ height,
12554
+ pixelRatio: [sar_width, sar_height]
12555
+ };
12556
+ }
12557
+ readPPS(pps) {
12558
+ const eg = new ExpGolomb(this.ebsp2rbsp(pps));
12559
+ eg.readUByte();
12560
+ eg.readUByte();
12561
+ eg.skipUEG(); // pic_parameter_set_id
12562
+ eg.skipUEG(); // seq_parameter_set_id
12563
+ eg.skipBits(2); // dependent_slice_segments_enabled_flag, output_flag_present_flag
12564
+ eg.skipBits(3); // num_extra_slice_header_bits
12565
+ eg.skipBits(2); // sign_data_hiding_enabled_flag, cabac_init_present_flag
12566
+ eg.skipUEG();
12567
+ eg.skipUEG();
12568
+ eg.skipEG(); // init_qp_minus26
12569
+ eg.skipBits(2); // constrained_intra_pred_flag, transform_skip_enabled_flag
12570
+ const cu_qp_delta_enabled_flag = eg.readBoolean();
12571
+ if (cu_qp_delta_enabled_flag) {
12572
+ eg.skipUEG();
12573
+ }
12574
+ eg.skipEG(); // cb_qp_offset
12575
+ eg.skipEG(); // cr_qp_offset
12576
+ eg.skipBits(4); // pps_slice_chroma_qp_offsets_present_flag, weighted_pred_flag, weighted_bipred_flag, transquant_bypass_enabled_flag
12577
+ const tiles_enabled_flag = eg.readBoolean();
12578
+ const entropy_coding_sync_enabled_flag = eg.readBoolean();
12579
+ let parallelismType = 1; // slice-based parallel decoding
12580
+ if (entropy_coding_sync_enabled_flag && tiles_enabled_flag) {
12581
+ parallelismType = 0; // mixed-type parallel decoding
12582
+ } else if (entropy_coding_sync_enabled_flag) {
12583
+ parallelismType = 3; // wavefront-based parallel decoding
12584
+ } else if (tiles_enabled_flag) {
12585
+ parallelismType = 2; // tile-based parallel decoding
11869
12586
  }
11870
- track.naluState = state;
11871
- return units;
12587
+ return {
12588
+ parallelismType
12589
+ };
12590
+ }
12591
+ matchSPS(sps1, sps2) {
12592
+ // compare without headers and VPS related params
12593
+ return String.fromCharCode.apply(null, sps1).substr(3) === String.fromCharCode.apply(null, sps2).substr(3);
11872
12594
  }
11873
12595
  }
11874
12596
 
@@ -11886,7 +12608,7 @@ class SampleAesDecrypter {
11886
12608
  });
11887
12609
  }
11888
12610
  decryptBuffer(encryptedData) {
11889
- return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer);
12611
+ return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer, DecrypterAesMode.cbc);
11890
12612
  }
11891
12613
 
11892
12614
  // AAC - encrypt all full 16 bytes blocks starting from offset 16
@@ -12000,7 +12722,7 @@ class TSDemuxer {
12000
12722
  this.observer = observer;
12001
12723
  this.config = config;
12002
12724
  this.typeSupported = typeSupported;
12003
- this.videoParser = new AvcVideoParser();
12725
+ this.videoParser = null;
12004
12726
  }
12005
12727
  static probe(data) {
12006
12728
  const syncOffset = TSDemuxer.syncOffset(data);
@@ -12165,7 +12887,19 @@ class TSDemuxer {
12165
12887
  case videoPid:
12166
12888
  if (stt) {
12167
12889
  if (videoData && (pes = parsePES(videoData))) {
12168
- this.videoParser.parseAVCPES(videoTrack, textTrack, pes, false, this._duration);
12890
+ if (this.videoParser === null) {
12891
+ switch (videoTrack.segmentCodec) {
12892
+ case 'avc':
12893
+ this.videoParser = new AvcVideoParser();
12894
+ break;
12895
+ case 'hevc':
12896
+ this.videoParser = new HevcVideoParser();
12897
+ break;
12898
+ }
12899
+ }
12900
+ if (this.videoParser !== null) {
12901
+ this.videoParser.parsePES(videoTrack, textTrack, pes, false, this._duration);
12902
+ }
12169
12903
  }
12170
12904
  videoData = {
12171
12905
  data: [],
@@ -12332,8 +13066,20 @@ class TSDemuxer {
12332
13066
  // try to parse last PES packets
12333
13067
  let pes;
12334
13068
  if (videoData && (pes = parsePES(videoData))) {
12335
- this.videoParser.parseAVCPES(videoTrack, textTrack, pes, true, this._duration);
12336
- videoTrack.pesData = null;
13069
+ if (this.videoParser === null) {
13070
+ switch (videoTrack.segmentCodec) {
13071
+ case 'avc':
13072
+ this.videoParser = new AvcVideoParser();
13073
+ break;
13074
+ case 'hevc':
13075
+ this.videoParser = new HevcVideoParser();
13076
+ break;
13077
+ }
13078
+ }
13079
+ if (this.videoParser !== null) {
13080
+ this.videoParser.parsePES(videoTrack, textTrack, pes, true, this._duration);
13081
+ videoTrack.pesData = null;
13082
+ }
12337
13083
  } else {
12338
13084
  // either avcData null or PES truncated, keep it for next frag parsing
12339
13085
  videoTrack.pesData = videoData;
@@ -12666,7 +13412,12 @@ function parsePMT(data, offset, typeSupported, isSampleAes) {
12666
13412
  logger.warn('Unsupported EC-3 in M2TS found');
12667
13413
  break;
12668
13414
  case 0x24:
12669
- logger.warn('Unsupported HEVC in M2TS found');
13415
+ // ITU-T Rec. H.265 and ISO/IEC 23008-2 (HEVC)
13416
+ if (result.videoPid === -1) {
13417
+ result.videoPid = pid;
13418
+ result.segmentVideoCodec = 'hevc';
13419
+ logger.log('HEVC in M2TS found');
13420
+ }
12670
13421
  break;
12671
13422
  }
12672
13423
  // move to the next table entry
@@ -12889,6 +13640,8 @@ class MP4 {
12889
13640
  avc1: [],
12890
13641
  // codingname
12891
13642
  avcC: [],
13643
+ hvc1: [],
13644
+ hvcC: [],
12892
13645
  btrt: [],
12893
13646
  dinf: [],
12894
13647
  dref: [],
@@ -13313,8 +14066,10 @@ class MP4 {
13313
14066
  return MP4.box(MP4.types.stsd, MP4.STSD, MP4.ac3(track));
13314
14067
  }
13315
14068
  return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp4a(track));
13316
- } else {
14069
+ } else if (track.segmentCodec === 'avc') {
13317
14070
  return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
14071
+ } else {
14072
+ return MP4.box(MP4.types.stsd, MP4.STSD, MP4.hvc1(track));
13318
14073
  }
13319
14074
  }
13320
14075
  static tkhd(track) {
@@ -13452,6 +14207,84 @@ class MP4 {
13452
14207
  const result = appendUint8Array(MP4.FTYP, movie);
13453
14208
  return result;
13454
14209
  }
14210
+ static hvc1(track) {
14211
+ const ps = track.params;
14212
+ const units = [track.vps, track.sps, track.pps];
14213
+ const NALuLengthSize = 4;
14214
+ const config = new Uint8Array([0x01, ps.general_profile_space << 6 | (ps.general_tier_flag ? 32 : 0) | ps.general_profile_idc, ps.general_profile_compatibility_flags[0], ps.general_profile_compatibility_flags[1], ps.general_profile_compatibility_flags[2], ps.general_profile_compatibility_flags[3], ps.general_constraint_indicator_flags[0], ps.general_constraint_indicator_flags[1], ps.general_constraint_indicator_flags[2], ps.general_constraint_indicator_flags[3], ps.general_constraint_indicator_flags[4], ps.general_constraint_indicator_flags[5], ps.general_level_idc, 240 | ps.min_spatial_segmentation_idc >> 8, 255 & ps.min_spatial_segmentation_idc, 252 | ps.parallelismType, 252 | ps.chroma_format_idc, 248 | ps.bit_depth_luma_minus8, 248 | ps.bit_depth_chroma_minus8, 0x00, parseInt(ps.frame_rate.fps), NALuLengthSize - 1 | ps.temporal_id_nested << 2 | ps.num_temporal_layers << 3 | (ps.frame_rate.fixed ? 64 : 0), units.length]);
14215
+
14216
+ // compute hvcC size in bytes
14217
+ let length = config.length;
14218
+ for (let i = 0; i < units.length; i += 1) {
14219
+ length += 3;
14220
+ for (let j = 0; j < units[i].length; j += 1) {
14221
+ length += 2 + units[i][j].length;
14222
+ }
14223
+ }
14224
+ const hvcC = new Uint8Array(length);
14225
+ hvcC.set(config, 0);
14226
+ length = config.length;
14227
+ // append parameter set units: one vps, one or more sps and pps
14228
+ const iMax = units.length - 1;
14229
+ for (let i = 0; i < units.length; i += 1) {
14230
+ hvcC.set(new Uint8Array([32 + i | (i === iMax ? 128 : 0), 0x00, units[i].length]), length);
14231
+ length += 3;
14232
+ for (let j = 0; j < units[i].length; j += 1) {
14233
+ hvcC.set(new Uint8Array([units[i][j].length >> 8, units[i][j].length & 255]), length);
14234
+ length += 2;
14235
+ hvcC.set(units[i][j], length);
14236
+ length += units[i][j].length;
14237
+ }
14238
+ }
14239
+ const hvcc = MP4.box(MP4.types.hvcC, hvcC);
14240
+ const width = track.width;
14241
+ const height = track.height;
14242
+ const hSpacing = track.pixelRatio[0];
14243
+ const vSpacing = track.pixelRatio[1];
14244
+ return MP4.box(MP4.types.hvc1, new Uint8Array([0x00, 0x00, 0x00,
14245
+ // reserved
14246
+ 0x00, 0x00, 0x00,
14247
+ // reserved
14248
+ 0x00, 0x01,
14249
+ // data_reference_index
14250
+ 0x00, 0x00,
14251
+ // pre_defined
14252
+ 0x00, 0x00,
14253
+ // reserved
14254
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
14255
+ // pre_defined
14256
+ width >> 8 & 0xff, width & 0xff,
14257
+ // width
14258
+ height >> 8 & 0xff, height & 0xff,
14259
+ // height
14260
+ 0x00, 0x48, 0x00, 0x00,
14261
+ // horizresolution
14262
+ 0x00, 0x48, 0x00, 0x00,
14263
+ // vertresolution
14264
+ 0x00, 0x00, 0x00, 0x00,
14265
+ // reserved
14266
+ 0x00, 0x01,
14267
+ // frame_count
14268
+ 0x12, 0x64, 0x61, 0x69, 0x6c,
14269
+ // dailymotion/hls.js
14270
+ 0x79, 0x6d, 0x6f, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x68, 0x6c, 0x73, 0x2e, 0x6a, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
14271
+ // compressorname
14272
+ 0x00, 0x18,
14273
+ // depth = 24
14274
+ 0x11, 0x11]),
14275
+ // pre_defined = -1
14276
+ hvcc, MP4.box(MP4.types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80,
14277
+ // bufferSizeDB
14278
+ 0x00, 0x2d, 0xc6, 0xc0,
14279
+ // maxBitrate
14280
+ 0x00, 0x2d, 0xc6, 0xc0])),
14281
+ // avgBitrate
14282
+ MP4.box(MP4.types.pasp, new Uint8Array([hSpacing >> 24,
14283
+ // hSpacing
14284
+ hSpacing >> 16 & 0xff, hSpacing >> 8 & 0xff, hSpacing & 0xff, vSpacing >> 24,
14285
+ // vSpacing
14286
+ vSpacing >> 16 & 0xff, vSpacing >> 8 & 0xff, vSpacing & 0xff])));
14287
+ }
13455
14288
  }
13456
14289
  MP4.types = void 0;
13457
14290
  MP4.HDLR_TYPES = void 0;
@@ -13833,9 +14666,9 @@ class MP4Remuxer {
13833
14666
  const foundOverlap = delta < -1;
13834
14667
  if (foundHole || foundOverlap) {
13835
14668
  if (foundHole) {
13836
- logger.warn(`AVC: ${toMsFromMpegTsClock(delta, true)} ms (${delta}dts) hole between fragments detected at ${timeOffset.toFixed(3)}`);
14669
+ logger.warn(`${(track.segmentCodec || '').toUpperCase()}: ${toMsFromMpegTsClock(delta, true)} ms (${delta}dts) hole between fragments detected at ${timeOffset.toFixed(3)}`);
13837
14670
  } else {
13838
- logger.warn(`AVC: ${toMsFromMpegTsClock(-delta, true)} ms (${delta}dts) overlapping between fragments detected at ${timeOffset.toFixed(3)}`);
14671
+ logger.warn(`${(track.segmentCodec || '').toUpperCase()}: ${toMsFromMpegTsClock(-delta, true)} ms (${delta}dts) overlapping between fragments detected at ${timeOffset.toFixed(3)}`);
13839
14672
  }
13840
14673
  if (!foundOverlap || nextAvcDts >= inputSamples[0].pts || chromeVersion) {
13841
14674
  firstDTS = nextAvcDts;
@@ -13844,12 +14677,24 @@ class MP4Remuxer {
13844
14677
  inputSamples[0].dts = firstDTS;
13845
14678
  inputSamples[0].pts = firstPTS;
13846
14679
  } else {
14680
+ let isPTSOrderRetained = true;
13847
14681
  for (let i = 0; i < inputSamples.length; i++) {
13848
- if (inputSamples[i].dts > firstPTS) {
14682
+ if (inputSamples[i].dts > firstPTS && isPTSOrderRetained) {
13849
14683
  break;
13850
14684
  }
14685
+ const prevPTS = inputSamples[i].pts;
13851
14686
  inputSamples[i].dts -= delta;
13852
14687
  inputSamples[i].pts -= delta;
14688
+
14689
+ // check to see if this sample's PTS order has changed
14690
+ // relative to the next one
14691
+ if (i < inputSamples.length - 1) {
14692
+ const nextSamplePTS = inputSamples[i + 1].pts;
14693
+ const currentSamplePTS = inputSamples[i].pts;
14694
+ const currentOrder = nextSamplePTS <= currentSamplePTS;
14695
+ const prevOrder = nextSamplePTS <= prevPTS;
14696
+ isPTSOrderRetained = currentOrder == prevOrder;
14697
+ }
13853
14698
  }
13854
14699
  }
13855
14700
  logger.log(`Video: Initial PTS/DTS adjusted: ${toMsFromMpegTsClock(firstPTS, true)}/${toMsFromMpegTsClock(firstDTS, true)}, delta: ${toMsFromMpegTsClock(delta, true)} ms`);
@@ -13997,7 +14842,7 @@ class MP4Remuxer {
13997
14842
  }
13998
14843
  }
13999
14844
  }
14000
- // next AVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
14845
+ // next AVC/HEVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
14001
14846
  mp4SampleDuration = stretchedLastFrame || !mp4SampleDuration ? averageSampleDuration : mp4SampleDuration;
14002
14847
  this.nextAvcDts = nextAvcDts = lastDTS + mp4SampleDuration;
14003
14848
  this.videoSampleDuration = mp4SampleDuration;
@@ -14130,7 +14975,7 @@ class MP4Remuxer {
14130
14975
  logger.warn(`[mp4-remuxer]: Injecting ${missing} audio frame @ ${(nextPts / inputTimeScale).toFixed(3)}s due to ${Math.round(1000 * delta / inputTimeScale)} ms gap.`);
14131
14976
  for (let j = 0; j < missing; j++) {
14132
14977
  const newStamp = Math.max(nextPts, 0);
14133
- let fillFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
14978
+ let fillFrame = AAC.getSilentFrame(track.parsedCodec || track.manifestCodec || track.codec, track.channelCount);
14134
14979
  if (!fillFrame) {
14135
14980
  logger.log('[mp4-remuxer]: Unable to get silent frame for given audio codec; duplicating last frame instead.');
14136
14981
  fillFrame = sample.unit.subarray();
@@ -14258,7 +15103,7 @@ class MP4Remuxer {
14258
15103
  // samples count of this segment's duration
14259
15104
  const nbSamples = Math.ceil((endDTS - startDTS) / frameDuration);
14260
15105
  // silent frame
14261
- const silentFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
15106
+ const silentFrame = AAC.getSilentFrame(track.parsedCodec || track.manifestCodec || track.codec, track.channelCount);
14262
15107
  logger.warn('[mp4-remuxer]: remux empty Audio');
14263
15108
  // Can't remux if we can't generate a silent frame...
14264
15109
  if (!silentFrame) {
@@ -14652,13 +15497,15 @@ class Transmuxer {
14652
15497
  initSegmentData
14653
15498
  } = transmuxConfig;
14654
15499
  const keyData = getEncryptionType(uintData, decryptdata);
14655
- if (keyData && keyData.method === 'AES-128') {
15500
+ if (keyData && isFullSegmentEncryption(keyData.method)) {
14656
15501
  const decrypter = this.getDecrypter();
15502
+ const aesMode = getAesModeFromFullSegmentMethod(keyData.method);
15503
+
14657
15504
  // Software decryption is synchronous; webCrypto is not
14658
15505
  if (decrypter.isSync()) {
14659
15506
  // Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
14660
15507
  // data is handled in the flush() call
14661
- let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer);
15508
+ let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer, aesMode);
14662
15509
  // For Low-Latency HLS Parts, decrypt in place, since part parsing is expected on push progress
14663
15510
  const loadingParts = chunkMeta.part > -1;
14664
15511
  if (loadingParts) {
@@ -14670,7 +15517,7 @@ class Transmuxer {
14670
15517
  }
14671
15518
  uintData = new Uint8Array(decryptedData);
14672
15519
  } else {
14673
- this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer).then(decryptedData => {
15520
+ this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer, aesMode).then(decryptedData => {
14674
15521
  // Calling push here is important; if flush() is called while this is still resolving, this ensures that
14675
15522
  // the decrypted data has been transmuxed
14676
15523
  const result = this.push(decryptedData, null, chunkMeta);
@@ -15324,14 +16171,7 @@ class TransmuxerInterface {
15324
16171
  this.observer = new EventEmitter();
15325
16172
  this.observer.on(Events.FRAG_DECRYPTED, forwardMessage);
15326
16173
  this.observer.on(Events.ERROR, forwardMessage);
15327
- const MediaSource = getMediaSource(config.preferManagedMediaSource) || {
15328
- isTypeSupported: () => false
15329
- };
15330
- const m2tsTypeSupported = {
15331
- mpeg: MediaSource.isTypeSupported('audio/mpeg'),
15332
- mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
15333
- ac3: MediaSource.isTypeSupported('audio/mp4; codecs="ac-3"')
15334
- };
16174
+ const m2tsTypeSupported = getM2TSSupportedAudioTypes(config.preferManagedMediaSource);
15335
16175
 
15336
16176
  // navigator.vendor is not always available in Web Worker
15337
16177
  // refer to https://developer.mozilla.org/en-US/docs/Web/API/WorkerGlobalScope/navigator
@@ -15619,7 +16459,7 @@ const TICK_INTERVAL$2 = 100; // how often to tick in ms
15619
16459
 
15620
16460
  class AudioStreamController extends BaseStreamController {
15621
16461
  constructor(hls, fragmentTracker, keyLoader) {
15622
- super(hls, fragmentTracker, keyLoader, '[audio-stream-controller]', PlaylistLevelType.AUDIO);
16462
+ super(hls, fragmentTracker, keyLoader, 'audio-stream-controller', PlaylistLevelType.AUDIO);
15623
16463
  this.videoBuffer = null;
15624
16464
  this.videoTrackCC = -1;
15625
16465
  this.waitingVideoCC = -1;
@@ -15631,27 +16471,24 @@ class AudioStreamController extends BaseStreamController {
15631
16471
  this.flushing = false;
15632
16472
  this.bufferFlushed = false;
15633
16473
  this.cachedTrackLoadedData = null;
15634
- this._registerListeners();
16474
+ this.registerListeners();
15635
16475
  }
15636
16476
  onHandlerDestroying() {
15637
- this._unregisterListeners();
16477
+ this.unregisterListeners();
15638
16478
  super.onHandlerDestroying();
15639
16479
  this.mainDetails = null;
15640
16480
  this.bufferedTrack = null;
15641
16481
  this.switchingTrack = null;
15642
16482
  }
15643
- _registerListeners() {
16483
+ registerListeners() {
16484
+ super.registerListeners();
15644
16485
  const {
15645
16486
  hls
15646
16487
  } = this;
15647
- hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
15648
- hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
15649
- hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
15650
16488
  hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
15651
16489
  hls.on(Events.AUDIO_TRACKS_UPDATED, this.onAudioTracksUpdated, this);
15652
16490
  hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
15653
16491
  hls.on(Events.AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
15654
- hls.on(Events.ERROR, this.onError, this);
15655
16492
  hls.on(Events.BUFFER_RESET, this.onBufferReset, this);
15656
16493
  hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
15657
16494
  hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
@@ -15659,18 +16496,18 @@ class AudioStreamController extends BaseStreamController {
15659
16496
  hls.on(Events.INIT_PTS_FOUND, this.onInitPtsFound, this);
15660
16497
  hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
15661
16498
  }
15662
- _unregisterListeners() {
16499
+ unregisterListeners() {
15663
16500
  const {
15664
16501
  hls
15665
16502
  } = this;
15666
- hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
15667
- hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
15668
- hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
16503
+ if (!hls) {
16504
+ return;
16505
+ }
16506
+ super.unregisterListeners();
15669
16507
  hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
15670
16508
  hls.off(Events.AUDIO_TRACKS_UPDATED, this.onAudioTracksUpdated, this);
15671
16509
  hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
15672
16510
  hls.off(Events.AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
15673
- hls.off(Events.ERROR, this.onError, this);
15674
16511
  hls.off(Events.BUFFER_RESET, this.onBufferReset, this);
15675
16512
  hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
15676
16513
  hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
@@ -15839,12 +16676,13 @@ class AudioStreamController extends BaseStreamController {
15839
16676
  } = this;
15840
16677
  const config = hls.config;
15841
16678
 
15842
- // 1. if video not attached AND
16679
+ // 1. if buffering is suspended
16680
+ // 2. if video not attached AND
15843
16681
  // start fragment already requested OR start frag prefetch not enabled
15844
- // 2. if tracks or track not loaded and selected
16682
+ // 3. if tracks or track not loaded and selected
15845
16683
  // then exit loop
15846
16684
  // => if media not attached but start frag prefetch is enabled and start frag not requested yet, we will not exit loop
15847
- if (!media && (this.startFragRequested || !config.startFragPrefetch) || !(levels != null && levels[trackId])) {
16685
+ if (!this.buffering || !media && (this.startFragRequested || !config.startFragPrefetch) || !(levels != null && levels[trackId])) {
15848
16686
  return;
15849
16687
  }
15850
16688
  const levelInfo = levels[trackId];
@@ -16037,7 +16875,7 @@ class AudioStreamController extends BaseStreamController {
16037
16875
 
16038
16876
  // compute start position if we are aligned with the main playlist
16039
16877
  if (!this.startFragRequested && (this.mainDetails || !newDetails.live)) {
16040
- this.setStartPosition(track.details, sliding);
16878
+ this.setStartPosition(this.mainDetails || newDetails, sliding);
16041
16879
  }
16042
16880
  // only switch back to IDLE state if we were waiting for track to start downloading a new fragment
16043
16881
  if (this.state === State.WAITING_TRACK && !this.waitForCdnTuneIn(newDetails)) {
@@ -16402,7 +17240,7 @@ class AudioStreamController extends BaseStreamController {
16402
17240
 
16403
17241
  class AudioTrackController extends BasePlaylistController {
16404
17242
  constructor(hls) {
16405
- super(hls, '[audio-track-controller]');
17243
+ super(hls, 'audio-track-controller');
16406
17244
  this.tracks = [];
16407
17245
  this.groupIds = null;
16408
17246
  this.tracksInGroup = [];
@@ -16721,26 +17559,23 @@ const TICK_INTERVAL$1 = 500; // how often to tick in ms
16721
17559
 
16722
17560
  class SubtitleStreamController extends BaseStreamController {
16723
17561
  constructor(hls, fragmentTracker, keyLoader) {
16724
- super(hls, fragmentTracker, keyLoader, '[subtitle-stream-controller]', PlaylistLevelType.SUBTITLE);
17562
+ super(hls, fragmentTracker, keyLoader, 'subtitle-stream-controller', PlaylistLevelType.SUBTITLE);
16725
17563
  this.currentTrackId = -1;
16726
17564
  this.tracksBuffered = [];
16727
17565
  this.mainDetails = null;
16728
- this._registerListeners();
17566
+ this.registerListeners();
16729
17567
  }
16730
17568
  onHandlerDestroying() {
16731
- this._unregisterListeners();
17569
+ this.unregisterListeners();
16732
17570
  super.onHandlerDestroying();
16733
17571
  this.mainDetails = null;
16734
17572
  }
16735
- _registerListeners() {
17573
+ registerListeners() {
17574
+ super.registerListeners();
16736
17575
  const {
16737
17576
  hls
16738
17577
  } = this;
16739
- hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
16740
- hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
16741
- hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
16742
17578
  hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
16743
- hls.on(Events.ERROR, this.onError, this);
16744
17579
  hls.on(Events.SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this);
16745
17580
  hls.on(Events.SUBTITLE_TRACK_SWITCH, this.onSubtitleTrackSwitch, this);
16746
17581
  hls.on(Events.SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this);
@@ -16748,15 +17583,12 @@ class SubtitleStreamController extends BaseStreamController {
16748
17583
  hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
16749
17584
  hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
16750
17585
  }
16751
- _unregisterListeners() {
17586
+ unregisterListeners() {
17587
+ super.unregisterListeners();
16752
17588
  const {
16753
17589
  hls
16754
17590
  } = this;
16755
- hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
16756
- hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
16757
- hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
16758
17591
  hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
16759
- hls.off(Events.ERROR, this.onError, this);
16760
17592
  hls.off(Events.SUBTITLE_TRACKS_UPDATED, this.onSubtitleTracksUpdated, this);
16761
17593
  hls.off(Events.SUBTITLE_TRACK_SWITCH, this.onSubtitleTrackSwitch, this);
16762
17594
  hls.off(Events.SUBTITLE_TRACK_LOADED, this.onSubtitleTrackLoaded, this);
@@ -16957,7 +17789,7 @@ class SubtitleStreamController extends BaseStreamController {
16957
17789
  track.details = newDetails;
16958
17790
  this.levelLastLoaded = track;
16959
17791
  if (!this.startFragRequested && (this.mainDetails || !newDetails.live)) {
16960
- this.setStartPosition(track.details, sliding);
17792
+ this.setStartPosition(this.mainDetails || newDetails, sliding);
16961
17793
  }
16962
17794
 
16963
17795
  // trigger handler right now
@@ -16983,10 +17815,10 @@ class SubtitleStreamController extends BaseStreamController {
16983
17815
  return;
16984
17816
  }
16985
17817
  // check to see if the payload needs to be decrypted
16986
- if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv && decryptData.method === 'AES-128') {
17818
+ if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv && isFullSegmentEncryption(decryptData.method)) {
16987
17819
  const startTime = performance.now();
16988
17820
  // decrypt the subtitles
16989
- this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer).catch(err => {
17821
+ this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer, getAesModeFromFullSegmentMethod(decryptData.method)).catch(err => {
16990
17822
  hls.trigger(Events.ERROR, {
16991
17823
  type: ErrorTypes.MEDIA_ERROR,
16992
17824
  details: ErrorDetails.FRAG_DECRYPT_ERROR,
@@ -17120,7 +17952,7 @@ class BufferableInstance {
17120
17952
 
17121
17953
  class SubtitleTrackController extends BasePlaylistController {
17122
17954
  constructor(hls) {
17123
- super(hls, '[subtitle-track-controller]');
17955
+ super(hls, 'subtitle-track-controller');
17124
17956
  this.media = null;
17125
17957
  this.tracks = [];
17126
17958
  this.groupIds = null;
@@ -17129,10 +17961,10 @@ class SubtitleTrackController extends BasePlaylistController {
17129
17961
  this.currentTrack = null;
17130
17962
  this.selectDefaultTrack = true;
17131
17963
  this.queuedDefaultTrack = -1;
17132
- this.asyncPollTrackChange = () => this.pollTrackChange(0);
17133
17964
  this.useTextTrackPolling = false;
17134
17965
  this.subtitlePollingInterval = -1;
17135
17966
  this._subtitleDisplay = true;
17967
+ this.asyncPollTrackChange = () => this.pollTrackChange(0);
17136
17968
  this.onTextTracksChanged = () => {
17137
17969
  if (!this.useTextTrackPolling) {
17138
17970
  self.clearInterval(this.subtitlePollingInterval);
@@ -17166,6 +17998,7 @@ class SubtitleTrackController extends BasePlaylistController {
17166
17998
  this.tracks.length = 0;
17167
17999
  this.tracksInGroup.length = 0;
17168
18000
  this.currentTrack = null;
18001
+ // @ts-ignore
17169
18002
  this.onTextTracksChanged = this.asyncPollTrackChange = null;
17170
18003
  super.destroy();
17171
18004
  }
@@ -17626,8 +18459,9 @@ class BufferOperationQueue {
17626
18459
  }
17627
18460
 
17628
18461
  const VIDEO_CODEC_PROFILE_REPLACE = /(avc[1234]|hvc1|hev1|dvh[1e]|vp09|av01)(?:\.[^.,]+)+/;
17629
- class BufferController {
18462
+ class BufferController extends Logger {
17630
18463
  constructor(hls) {
18464
+ super('buffer-controller', hls.logger);
17631
18465
  // The level details used to determine duration, target-duration and live
17632
18466
  this.details = null;
17633
18467
  // cache the self generated object url to detect hijack of video tag
@@ -17657,9 +18491,6 @@ class BufferController {
17657
18491
  this.tracks = {};
17658
18492
  this.pendingTracks = {};
17659
18493
  this.sourceBuffer = void 0;
17660
- this.log = void 0;
17661
- this.warn = void 0;
17662
- this.error = void 0;
17663
18494
  this._onEndStreaming = event => {
17664
18495
  if (!this.hls) {
17665
18496
  return;
@@ -17705,15 +18536,11 @@ class BufferController {
17705
18536
  _objectUrl
17706
18537
  } = this;
17707
18538
  if (mediaSrc !== _objectUrl) {
17708
- logger.error(`Media element src was set while attaching MediaSource (${_objectUrl} > ${mediaSrc})`);
18539
+ this.error(`Media element src was set while attaching MediaSource (${_objectUrl} > ${mediaSrc})`);
17709
18540
  }
17710
18541
  };
17711
18542
  this.hls = hls;
17712
- const logPrefix = '[buffer-controller]';
17713
18543
  this.appendSource = hls.config.preferManagedMediaSource;
17714
- this.log = logger.log.bind(logger, logPrefix);
17715
- this.warn = logger.warn.bind(logger, logPrefix);
17716
- this.error = logger.error.bind(logger, logPrefix);
17717
18544
  this._initSourceBuffer();
17718
18545
  this.registerListeners();
17719
18546
  }
@@ -17726,6 +18553,12 @@ class BufferController {
17726
18553
  this.lastMpegAudioChunk = null;
17727
18554
  // @ts-ignore
17728
18555
  this.hls = null;
18556
+ // @ts-ignore
18557
+ this._onMediaSourceOpen = this._onMediaSourceClose = null;
18558
+ // @ts-ignore
18559
+ this._onMediaSourceEnded = null;
18560
+ // @ts-ignore
18561
+ this._onStartStreaming = this._onEndStreaming = null;
17729
18562
  }
17730
18563
  registerListeners() {
17731
18564
  const {
@@ -17888,6 +18721,7 @@ class BufferController {
17888
18721
  this.resetBuffer(type);
17889
18722
  });
17890
18723
  this._initSourceBuffer();
18724
+ this.hls.resumeBuffering();
17891
18725
  }
17892
18726
  resetBuffer(type) {
17893
18727
  const sb = this.sourceBuffer[type];
@@ -20990,14 +21824,12 @@ class TimelineController {
20990
21824
  this.cea608Parser1 = this.cea608Parser2 = undefined;
20991
21825
  }
20992
21826
  initCea608Parsers() {
20993
- if (this.config.enableCEA708Captions && (!this.cea608Parser1 || !this.cea608Parser2)) {
20994
- const channel1 = new OutputFilter(this, 'textTrack1');
20995
- const channel2 = new OutputFilter(this, 'textTrack2');
20996
- const channel3 = new OutputFilter(this, 'textTrack3');
20997
- const channel4 = new OutputFilter(this, 'textTrack4');
20998
- this.cea608Parser1 = new Cea608Parser(1, channel1, channel2);
20999
- this.cea608Parser2 = new Cea608Parser(3, channel3, channel4);
21000
- }
21827
+ const channel1 = new OutputFilter(this, 'textTrack1');
21828
+ const channel2 = new OutputFilter(this, 'textTrack2');
21829
+ const channel3 = new OutputFilter(this, 'textTrack3');
21830
+ const channel4 = new OutputFilter(this, 'textTrack4');
21831
+ this.cea608Parser1 = new Cea608Parser(1, channel1, channel2);
21832
+ this.cea608Parser2 = new Cea608Parser(3, channel3, channel4);
21001
21833
  }
21002
21834
  addCues(trackName, startTime, endTime, screen, cueRanges) {
21003
21835
  // skip cues which overlap more than 50% with previously parsed time ranges
@@ -21235,7 +22067,7 @@ class TimelineController {
21235
22067
  if (inUseTracks != null && inUseTracks.length) {
21236
22068
  const unusedTextTracks = inUseTracks.filter(t => t !== null).map(t => t.label);
21237
22069
  if (unusedTextTracks.length) {
21238
- logger.warn(`Media element contains unused subtitle tracks: ${unusedTextTracks.join(', ')}. Replace media element for each source to clear TextTracks and captions menu.`);
22070
+ this.hls.logger.warn(`Media element contains unused subtitle tracks: ${unusedTextTracks.join(', ')}. Replace media element for each source to clear TextTracks and captions menu.`);
21239
22071
  }
21240
22072
  }
21241
22073
  } else if (this.tracks.length) {
@@ -21280,26 +22112,23 @@ class TimelineController {
21280
22112
  return level == null ? void 0 : level.attrs['CLOSED-CAPTIONS'];
21281
22113
  }
21282
22114
  onFragLoading(event, data) {
21283
- this.initCea608Parsers();
21284
- const {
21285
- cea608Parser1,
21286
- cea608Parser2,
21287
- lastCc,
21288
- lastSn,
21289
- lastPartIndex
21290
- } = this;
21291
- if (!this.enabled || !cea608Parser1 || !cea608Parser2) {
21292
- return;
21293
- }
21294
22115
  // if this frag isn't contiguous, clear the parser so cues with bad start/end times aren't added to the textTrack
21295
- if (data.frag.type === PlaylistLevelType.MAIN) {
22116
+ if (this.enabled && data.frag.type === PlaylistLevelType.MAIN) {
21296
22117
  var _data$part$index, _data$part;
22118
+ const {
22119
+ cea608Parser1,
22120
+ cea608Parser2,
22121
+ lastSn
22122
+ } = this;
22123
+ if (!cea608Parser1 || !cea608Parser2) {
22124
+ return;
22125
+ }
21297
22126
  const {
21298
22127
  cc,
21299
22128
  sn
21300
22129
  } = data.frag;
21301
- const partIndex = (_data$part$index = data == null ? void 0 : (_data$part = data.part) == null ? void 0 : _data$part.index) != null ? _data$part$index : -1;
21302
- if (!(sn === lastSn + 1 || sn === lastSn && partIndex === lastPartIndex + 1 || cc === lastCc)) {
22130
+ const partIndex = (_data$part$index = (_data$part = data.part) == null ? void 0 : _data$part.index) != null ? _data$part$index : -1;
22131
+ if (!(sn === lastSn + 1 || sn === lastSn && partIndex === this.lastPartIndex + 1 || cc === this.lastCc)) {
21303
22132
  cea608Parser1.reset();
21304
22133
  cea608Parser2.reset();
21305
22134
  }
@@ -21356,7 +22185,7 @@ class TimelineController {
21356
22185
  frag: frag
21357
22186
  });
21358
22187
  }, error => {
21359
- logger.log(`Failed to parse IMSC1: ${error}`);
22188
+ hls.logger.log(`Failed to parse IMSC1: ${error}`);
21360
22189
  hls.trigger(Events.SUBTITLE_FRAG_PROCESSED, {
21361
22190
  success: false,
21362
22191
  frag: frag,
@@ -21397,7 +22226,7 @@ class TimelineController {
21397
22226
  this._fallbackToIMSC1(frag, payload);
21398
22227
  }
21399
22228
  // Something went wrong while parsing. Trigger event with success false.
21400
- logger.log(`Failed to parse VTT cue: ${error}`);
22229
+ hls.logger.log(`Failed to parse VTT cue: ${error}`);
21401
22230
  if (missingInitPTS && maxAvCC > frag.cc) {
21402
22231
  return;
21403
22232
  }
@@ -21458,12 +22287,7 @@ class TimelineController {
21458
22287
  this.captionsTracks = {};
21459
22288
  }
21460
22289
  onFragParsingUserdata(event, data) {
21461
- this.initCea608Parsers();
21462
- const {
21463
- cea608Parser1,
21464
- cea608Parser2
21465
- } = this;
21466
- if (!this.enabled || !cea608Parser1 || !cea608Parser2) {
22290
+ if (!this.enabled || !this.config.enableCEA708Captions) {
21467
22291
  return;
21468
22292
  }
21469
22293
  const {
@@ -21478,9 +22302,12 @@ class TimelineController {
21478
22302
  for (let i = 0; i < samples.length; i++) {
21479
22303
  const ccBytes = samples[i].bytes;
21480
22304
  if (ccBytes) {
22305
+ if (!this.cea608Parser1) {
22306
+ this.initCea608Parsers();
22307
+ }
21481
22308
  const ccdatas = this.extractCea608Data(ccBytes);
21482
- cea608Parser1.addData(samples[i].pts, ccdatas[0]);
21483
- cea608Parser2.addData(samples[i].pts, ccdatas[1]);
22309
+ this.cea608Parser1.addData(samples[i].pts, ccdatas[0]);
22310
+ this.cea608Parser2.addData(samples[i].pts, ccdatas[1]);
21484
22311
  }
21485
22312
  }
21486
22313
  }
@@ -21676,7 +22503,7 @@ class CapLevelController {
21676
22503
  const hls = this.hls;
21677
22504
  const maxLevel = this.getMaxLevel(levels.length - 1);
21678
22505
  if (maxLevel !== this.autoLevelCapping) {
21679
- logger.log(`Setting autoLevelCapping to ${maxLevel}: ${levels[maxLevel].height}p@${levels[maxLevel].bitrate} for media ${this.mediaWidth}x${this.mediaHeight}`);
22506
+ hls.logger.log(`Setting autoLevelCapping to ${maxLevel}: ${levels[maxLevel].height}p@${levels[maxLevel].bitrate} for media ${this.mediaWidth}x${this.mediaHeight}`);
21680
22507
  }
21681
22508
  hls.autoLevelCapping = maxLevel;
21682
22509
  if (hls.autoLevelCapping > this.autoLevelCapping && this.streamController) {
@@ -21854,10 +22681,10 @@ class FPSController {
21854
22681
  totalDroppedFrames: droppedFrames
21855
22682
  });
21856
22683
  if (droppedFPS > 0) {
21857
- // logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
22684
+ // hls.logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
21858
22685
  if (currentDropped > hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
21859
22686
  let currentLevel = hls.currentLevel;
21860
- logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
22687
+ hls.logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
21861
22688
  if (currentLevel > 0 && (hls.autoLevelCapping === -1 || hls.autoLevelCapping >= currentLevel)) {
21862
22689
  currentLevel = currentLevel - 1;
21863
22690
  hls.trigger(Events.FPS_DROP_LEVEL_CAPPING, {
@@ -21889,7 +22716,6 @@ class FPSController {
21889
22716
  }
21890
22717
  }
21891
22718
 
21892
- const LOGGER_PREFIX = '[eme]';
21893
22719
  /**
21894
22720
  * Controller to deal with encrypted media extensions (EME)
21895
22721
  * @see https://developer.mozilla.org/en-US/docs/Web/API/Encrypted_Media_Extensions_API
@@ -21897,8 +22723,9 @@ const LOGGER_PREFIX = '[eme]';
21897
22723
  * @class
21898
22724
  * @constructor
21899
22725
  */
21900
- class EMEController {
22726
+ class EMEController extends Logger {
21901
22727
  constructor(hls) {
22728
+ super('eme', hls.logger);
21902
22729
  this.hls = void 0;
21903
22730
  this.config = void 0;
21904
22731
  this.media = null;
@@ -21908,12 +22735,100 @@ class EMEController {
21908
22735
  this.mediaKeySessions = [];
21909
22736
  this.keyIdToKeySessionPromise = {};
21910
22737
  this.setMediaKeysQueue = EMEController.CDMCleanupPromise ? [EMEController.CDMCleanupPromise] : [];
21911
- this.onMediaEncrypted = this._onMediaEncrypted.bind(this);
21912
- this.onWaitingForKey = this._onWaitingForKey.bind(this);
21913
- this.debug = logger.debug.bind(logger, LOGGER_PREFIX);
21914
- this.log = logger.log.bind(logger, LOGGER_PREFIX);
21915
- this.warn = logger.warn.bind(logger, LOGGER_PREFIX);
21916
- this.error = logger.error.bind(logger, LOGGER_PREFIX);
22738
+ this.onMediaEncrypted = event => {
22739
+ const {
22740
+ initDataType,
22741
+ initData
22742
+ } = event;
22743
+ this.debug(`"${event.type}" event: init data type: "${initDataType}"`);
22744
+
22745
+ // Ignore event when initData is null
22746
+ if (initData === null) {
22747
+ return;
22748
+ }
22749
+ let keyId;
22750
+ let keySystemDomain;
22751
+ if (initDataType === 'sinf' && this.config.drmSystems[KeySystems.FAIRPLAY]) {
22752
+ // Match sinf keyId to playlist skd://keyId=
22753
+ const json = bin2str(new Uint8Array(initData));
22754
+ try {
22755
+ const sinf = base64Decode(JSON.parse(json).sinf);
22756
+ const tenc = parseSinf(new Uint8Array(sinf));
22757
+ if (!tenc) {
22758
+ return;
22759
+ }
22760
+ keyId = tenc.subarray(8, 24);
22761
+ keySystemDomain = KeySystems.FAIRPLAY;
22762
+ } catch (error) {
22763
+ this.warn('Failed to parse sinf "encrypted" event message initData');
22764
+ return;
22765
+ }
22766
+ } else {
22767
+ // Support clear-lead key-session creation (otherwise depend on playlist keys)
22768
+ const psshInfo = parsePssh(initData);
22769
+ if (psshInfo === null) {
22770
+ return;
22771
+ }
22772
+ if (psshInfo.version === 0 && psshInfo.systemId === KeySystemIds.WIDEVINE && psshInfo.data) {
22773
+ keyId = psshInfo.data.subarray(8, 24);
22774
+ }
22775
+ keySystemDomain = keySystemIdToKeySystemDomain(psshInfo.systemId);
22776
+ }
22777
+ if (!keySystemDomain || !keyId) {
22778
+ return;
22779
+ }
22780
+ const keyIdHex = Hex.hexDump(keyId);
22781
+ const {
22782
+ keyIdToKeySessionPromise,
22783
+ mediaKeySessions
22784
+ } = this;
22785
+ let keySessionContextPromise = keyIdToKeySessionPromise[keyIdHex];
22786
+ for (let i = 0; i < mediaKeySessions.length; i++) {
22787
+ // Match playlist key
22788
+ const keyContext = mediaKeySessions[i];
22789
+ const decryptdata = keyContext.decryptdata;
22790
+ if (decryptdata.pssh || !decryptdata.keyId) {
22791
+ continue;
22792
+ }
22793
+ const oldKeyIdHex = Hex.hexDump(decryptdata.keyId);
22794
+ if (keyIdHex === oldKeyIdHex || decryptdata.uri.replace(/-/g, '').indexOf(keyIdHex) !== -1) {
22795
+ keySessionContextPromise = keyIdToKeySessionPromise[oldKeyIdHex];
22796
+ delete keyIdToKeySessionPromise[oldKeyIdHex];
22797
+ decryptdata.pssh = new Uint8Array(initData);
22798
+ decryptdata.keyId = keyId;
22799
+ keySessionContextPromise = keyIdToKeySessionPromise[keyIdHex] = keySessionContextPromise.then(() => {
22800
+ return this.generateRequestWithPreferredKeySession(keyContext, initDataType, initData, 'encrypted-event-key-match');
22801
+ });
22802
+ break;
22803
+ }
22804
+ }
22805
+ if (!keySessionContextPromise) {
22806
+ // Clear-lead key (not encountered in playlist)
22807
+ keySessionContextPromise = keyIdToKeySessionPromise[keyIdHex] = this.getKeySystemSelectionPromise([keySystemDomain]).then(({
22808
+ keySystem,
22809
+ mediaKeys
22810
+ }) => {
22811
+ var _keySystemToKeySystem;
22812
+ this.throwIfDestroyed();
22813
+ const decryptdata = new LevelKey('ISO-23001-7', keyIdHex, (_keySystemToKeySystem = keySystemDomainToKeySystemFormat(keySystem)) != null ? _keySystemToKeySystem : '');
22814
+ decryptdata.pssh = new Uint8Array(initData);
22815
+ decryptdata.keyId = keyId;
22816
+ return this.attemptSetMediaKeys(keySystem, mediaKeys).then(() => {
22817
+ this.throwIfDestroyed();
22818
+ const keySessionContext = this.createMediaKeySessionContext({
22819
+ decryptdata,
22820
+ keySystem,
22821
+ mediaKeys
22822
+ });
22823
+ return this.generateRequestWithPreferredKeySession(keySessionContext, initDataType, initData, 'encrypted-event-no-match');
22824
+ });
22825
+ });
22826
+ }
22827
+ keySessionContextPromise.catch(error => this.handleError(error));
22828
+ };
22829
+ this.onWaitingForKey = event => {
22830
+ this.log(`"${event.type}" event`);
22831
+ };
21917
22832
  this.hls = hls;
21918
22833
  this.config = hls.config;
21919
22834
  this.registerListeners();
@@ -21927,9 +22842,9 @@ class EMEController {
21927
22842
  config.licenseXhrSetup = config.licenseResponseCallback = undefined;
21928
22843
  config.drmSystems = config.drmSystemOptions = {};
21929
22844
  // @ts-ignore
21930
- this.hls = this.onMediaEncrypted = this.onWaitingForKey = this.keyIdToKeySessionPromise = null;
22845
+ this.hls = this.config = this.keyIdToKeySessionPromise = null;
21931
22846
  // @ts-ignore
21932
- this.config = null;
22847
+ this.onMediaEncrypted = this.onWaitingForKey = null;
21933
22848
  }
21934
22849
  registerListeners() {
21935
22850
  this.hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
@@ -22193,100 +23108,6 @@ class EMEController {
22193
23108
  }
22194
23109
  return this.attemptKeySystemAccess(keySystemsToAttempt);
22195
23110
  }
22196
- _onMediaEncrypted(event) {
22197
- const {
22198
- initDataType,
22199
- initData
22200
- } = event;
22201
- this.debug(`"${event.type}" event: init data type: "${initDataType}"`);
22202
-
22203
- // Ignore event when initData is null
22204
- if (initData === null) {
22205
- return;
22206
- }
22207
- let keyId;
22208
- let keySystemDomain;
22209
- if (initDataType === 'sinf' && this.config.drmSystems[KeySystems.FAIRPLAY]) {
22210
- // Match sinf keyId to playlist skd://keyId=
22211
- const json = bin2str(new Uint8Array(initData));
22212
- try {
22213
- const sinf = base64Decode(JSON.parse(json).sinf);
22214
- const tenc = parseSinf(new Uint8Array(sinf));
22215
- if (!tenc) {
22216
- return;
22217
- }
22218
- keyId = tenc.subarray(8, 24);
22219
- keySystemDomain = KeySystems.FAIRPLAY;
22220
- } catch (error) {
22221
- this.warn('Failed to parse sinf "encrypted" event message initData');
22222
- return;
22223
- }
22224
- } else {
22225
- // Support clear-lead key-session creation (otherwise depend on playlist keys)
22226
- const psshInfo = parsePssh(initData);
22227
- if (psshInfo === null) {
22228
- return;
22229
- }
22230
- if (psshInfo.version === 0 && psshInfo.systemId === KeySystemIds.WIDEVINE && psshInfo.data) {
22231
- keyId = psshInfo.data.subarray(8, 24);
22232
- }
22233
- keySystemDomain = keySystemIdToKeySystemDomain(psshInfo.systemId);
22234
- }
22235
- if (!keySystemDomain || !keyId) {
22236
- return;
22237
- }
22238
- const keyIdHex = Hex.hexDump(keyId);
22239
- const {
22240
- keyIdToKeySessionPromise,
22241
- mediaKeySessions
22242
- } = this;
22243
- let keySessionContextPromise = keyIdToKeySessionPromise[keyIdHex];
22244
- for (let i = 0; i < mediaKeySessions.length; i++) {
22245
- // Match playlist key
22246
- const keyContext = mediaKeySessions[i];
22247
- const decryptdata = keyContext.decryptdata;
22248
- if (decryptdata.pssh || !decryptdata.keyId) {
22249
- continue;
22250
- }
22251
- const oldKeyIdHex = Hex.hexDump(decryptdata.keyId);
22252
- if (keyIdHex === oldKeyIdHex || decryptdata.uri.replace(/-/g, '').indexOf(keyIdHex) !== -1) {
22253
- keySessionContextPromise = keyIdToKeySessionPromise[oldKeyIdHex];
22254
- delete keyIdToKeySessionPromise[oldKeyIdHex];
22255
- decryptdata.pssh = new Uint8Array(initData);
22256
- decryptdata.keyId = keyId;
22257
- keySessionContextPromise = keyIdToKeySessionPromise[keyIdHex] = keySessionContextPromise.then(() => {
22258
- return this.generateRequestWithPreferredKeySession(keyContext, initDataType, initData, 'encrypted-event-key-match');
22259
- });
22260
- break;
22261
- }
22262
- }
22263
- if (!keySessionContextPromise) {
22264
- // Clear-lead key (not encountered in playlist)
22265
- keySessionContextPromise = keyIdToKeySessionPromise[keyIdHex] = this.getKeySystemSelectionPromise([keySystemDomain]).then(({
22266
- keySystem,
22267
- mediaKeys
22268
- }) => {
22269
- var _keySystemToKeySystem;
22270
- this.throwIfDestroyed();
22271
- const decryptdata = new LevelKey('ISO-23001-7', keyIdHex, (_keySystemToKeySystem = keySystemDomainToKeySystemFormat(keySystem)) != null ? _keySystemToKeySystem : '');
22272
- decryptdata.pssh = new Uint8Array(initData);
22273
- decryptdata.keyId = keyId;
22274
- return this.attemptSetMediaKeys(keySystem, mediaKeys).then(() => {
22275
- this.throwIfDestroyed();
22276
- const keySessionContext = this.createMediaKeySessionContext({
22277
- decryptdata,
22278
- keySystem,
22279
- mediaKeys
22280
- });
22281
- return this.generateRequestWithPreferredKeySession(keySessionContext, initDataType, initData, 'encrypted-event-no-match');
22282
- });
22283
- });
22284
- }
22285
- keySessionContextPromise.catch(error => this.handleError(error));
22286
- }
22287
- _onWaitingForKey(event) {
22288
- this.log(`"${event.type}" event`);
22289
- }
22290
23111
  attemptSetMediaKeys(keySystem, mediaKeys) {
22291
23112
  const queue = this.setMediaKeysQueue.slice();
22292
23113
  this.log(`Setting media-keys for "${keySystem}"`);
@@ -22879,20 +23700,6 @@ class SfItem {
22879
23700
  }
22880
23701
  }
22881
23702
 
22882
- /**
22883
- * A class to represent structured field tokens when `Symbol` is not available.
22884
- *
22885
- * @group Structured Field
22886
- *
22887
- * @beta
22888
- */
22889
- class SfToken {
22890
- constructor(description) {
22891
- this.description = void 0;
22892
- this.description = description;
22893
- }
22894
- }
22895
-
22896
23703
  const DICT = 'Dict';
22897
23704
 
22898
23705
  function format(value) {
@@ -22916,29 +23723,27 @@ function throwError(action, src, type, cause) {
22916
23723
  });
22917
23724
  }
22918
23725
 
22919
- const BARE_ITEM = 'Bare Item';
22920
-
22921
- const BOOLEAN = 'Boolean';
22922
-
22923
- const BYTES = 'Byte Sequence';
22924
-
22925
- const DECIMAL = 'Decimal';
22926
-
22927
- const INTEGER = 'Integer';
22928
-
22929
- function isInvalidInt(value) {
22930
- return value < -999999999999999 || 999999999999999 < value;
23726
+ function serializeError(src, type, cause) {
23727
+ return throwError('serialize', src, type, cause);
22931
23728
  }
22932
23729
 
22933
- const STRING_REGEX = /[\x00-\x1f\x7f]+/; // eslint-disable-line no-control-regex
22934
-
22935
- const TOKEN = 'Token';
23730
+ /**
23731
+ * A class to represent structured field tokens when `Symbol` is not available.
23732
+ *
23733
+ * @group Structured Field
23734
+ *
23735
+ * @beta
23736
+ */
23737
+ class SfToken {
23738
+ constructor(description) {
23739
+ this.description = void 0;
23740
+ this.description = description;
23741
+ }
23742
+ }
22936
23743
 
22937
- const KEY = 'Key';
23744
+ const BARE_ITEM = 'Bare Item';
22938
23745
 
22939
- function serializeError(src, type, cause) {
22940
- return throwError('serialize', src, type, cause);
22941
- }
23746
+ const BOOLEAN = 'Boolean';
22942
23747
 
22943
23748
  // 4.1.9. Serializing a Boolean
22944
23749
  //
@@ -22977,6 +23782,8 @@ function base64encode(binary) {
22977
23782
  return btoa(String.fromCharCode(...binary));
22978
23783
  }
22979
23784
 
23785
+ const BYTES = 'Byte Sequence';
23786
+
22980
23787
  // 4.1.8. Serializing a Byte Sequence
22981
23788
  //
22982
23789
  // Given a Byte Sequence as input_bytes, return an ASCII string suitable
@@ -23008,6 +23815,12 @@ function serializeByteSequence(value) {
23008
23815
  return `:${base64encode(value)}:`;
23009
23816
  }
23010
23817
 
23818
+ const INTEGER = 'Integer';
23819
+
23820
+ function isInvalidInt(value) {
23821
+ return value < -999999999999999 || 999999999999999 < value;
23822
+ }
23823
+
23011
23824
  // 4.1.4. Serializing an Integer
23012
23825
  //
23013
23826
  // Given an Integer as input_integer, return an ASCII string suitable
@@ -23073,6 +23886,8 @@ function roundToEven(value, precision) {
23073
23886
  }
23074
23887
  }
23075
23888
 
23889
+ const DECIMAL = 'Decimal';
23890
+
23076
23891
  // 4.1.5. Serializing a Decimal
23077
23892
  //
23078
23893
  // Given a decimal number as input_decimal, return an ASCII string
@@ -23118,6 +23933,8 @@ function serializeDecimal(value) {
23118
23933
 
23119
23934
  const STRING = 'String';
23120
23935
 
23936
+ const STRING_REGEX = /[\x00-\x1f\x7f]+/; // eslint-disable-line no-control-regex
23937
+
23121
23938
  // 4.1.6. Serializing a String
23122
23939
  //
23123
23940
  // Given a String as input_string, return an ASCII string suitable for
@@ -23153,6 +23970,8 @@ function symbolToStr(symbol) {
23153
23970
  return symbol.description || symbol.toString().slice(7, -1);
23154
23971
  }
23155
23972
 
23973
+ const TOKEN = 'Token';
23974
+
23156
23975
  function serializeToken(token) {
23157
23976
  const value = symbolToStr(token);
23158
23977
  if (/^([a-zA-Z*])([!#$%&'*+\-.^_`|~\w:/]*)$/.test(value) === false) {
@@ -23220,6 +24039,8 @@ function serializeBareItem(value) {
23220
24039
  }
23221
24040
  }
23222
24041
 
24042
+ const KEY = 'Key';
24043
+
23223
24044
  // 4.1.1.3. Serializing a Key
23224
24045
  //
23225
24046
  // Given a key as input_key, return an ASCII string suitable for use in
@@ -23461,36 +24282,6 @@ function urlToRelativePath(url, base) {
23461
24282
  return toPath.join('/');
23462
24283
  }
23463
24284
 
23464
- /**
23465
- * Generate a random v4 UUID
23466
- *
23467
- * @returns A random v4 UUID
23468
- *
23469
- * @group Utils
23470
- *
23471
- * @beta
23472
- */
23473
- function uuid() {
23474
- try {
23475
- return crypto.randomUUID();
23476
- } catch (error) {
23477
- try {
23478
- const url = URL.createObjectURL(new Blob());
23479
- const uuid = url.toString();
23480
- URL.revokeObjectURL(url);
23481
- return uuid.slice(uuid.lastIndexOf('/') + 1);
23482
- } catch (error) {
23483
- let dt = new Date().getTime();
23484
- const uuid = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, c => {
23485
- const r = (dt + Math.random() * 16) % 16 | 0;
23486
- dt = Math.floor(dt / 16);
23487
- return (c == 'x' ? r : r & 0x3 | 0x8).toString(16);
23488
- });
23489
- return uuid;
23490
- }
23491
- }
23492
- }
23493
-
23494
24285
  const toRounded = value => Math.round(value);
23495
24286
  const toUrlSafe = (value, options) => {
23496
24287
  if (options != null && options.baseUrl) {
@@ -23716,6 +24507,36 @@ function appendCmcdQuery(url, cmcd, options) {
23716
24507
  return `${url}${separator}${query}`;
23717
24508
  }
23718
24509
 
24510
+ /**
24511
+ * Generate a random v4 UUID
24512
+ *
24513
+ * @returns A random v4 UUID
24514
+ *
24515
+ * @group Utils
24516
+ *
24517
+ * @beta
24518
+ */
24519
+ function uuid() {
24520
+ try {
24521
+ return crypto.randomUUID();
24522
+ } catch (error) {
24523
+ try {
24524
+ const url = URL.createObjectURL(new Blob());
24525
+ const uuid = url.toString();
24526
+ URL.revokeObjectURL(url);
24527
+ return uuid.slice(uuid.lastIndexOf('/') + 1);
24528
+ } catch (error) {
24529
+ let dt = new Date().getTime();
24530
+ const uuid = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, c => {
24531
+ const r = (dt + Math.random() * 16) % 16 | 0;
24532
+ dt = Math.floor(dt / 16);
24533
+ return (c == 'x' ? r : r & 0x3 | 0x8).toString(16);
24534
+ });
24535
+ return uuid;
24536
+ }
24537
+ }
24538
+ }
24539
+
23719
24540
  /**
23720
24541
  * Controller to deal with Common Media Client Data (CMCD)
23721
24542
  * @see https://cdn.cta.tech/cta/media/media/resources/standards/pdfs/cta-5004-final.pdf
@@ -23779,6 +24600,12 @@ class CMCDController {
23779
24600
  data.tb = this.getTopBandwidth(ot) / 1000;
23780
24601
  data.bl = this.getBufferLength(ot);
23781
24602
  }
24603
+ const next = this.getNextFrag(fragment);
24604
+ if (next) {
24605
+ if (next.url && next.url !== fragment.url) {
24606
+ data.nor = next.url;
24607
+ }
24608
+ }
23782
24609
  this.apply(context, data);
23783
24610
  } catch (error) {
23784
24611
  logger.warn('Could not generate segment CMCD data.', error);
@@ -23871,7 +24698,7 @@ class CMCDController {
23871
24698
  data.su = this.buffering;
23872
24699
  }
23873
24700
 
23874
- // TODO: Implement rtp, nrr, nor, dl
24701
+ // TODO: Implement rtp, nrr, dl
23875
24702
 
23876
24703
  const {
23877
24704
  includeKeys
@@ -23882,15 +24709,28 @@ class CMCDController {
23882
24709
  return acc;
23883
24710
  }, {});
23884
24711
  }
24712
+ const options = {
24713
+ baseUrl: context.url
24714
+ };
23885
24715
  if (this.useHeaders) {
23886
24716
  if (!context.headers) {
23887
24717
  context.headers = {};
23888
24718
  }
23889
- appendCmcdHeaders(context.headers, data);
24719
+ appendCmcdHeaders(context.headers, data, options);
23890
24720
  } else {
23891
- context.url = appendCmcdQuery(context.url, data);
24721
+ context.url = appendCmcdQuery(context.url, data, options);
24722
+ }
24723
+ }
24724
+ getNextFrag(fragment) {
24725
+ var _this$hls$levels$frag;
24726
+ const levelDetails = (_this$hls$levels$frag = this.hls.levels[fragment.level]) == null ? void 0 : _this$hls$levels$frag.details;
24727
+ if (levelDetails) {
24728
+ const index = fragment.sn - levelDetails.startSN;
24729
+ return levelDetails.fragments[index + 1];
23892
24730
  }
24731
+ return undefined;
23893
24732
  }
24733
+
23894
24734
  /**
23895
24735
  * The CMCD object type.
23896
24736
  */
@@ -24019,10 +24859,10 @@ class CMCDController {
24019
24859
  }
24020
24860
 
24021
24861
  const PATHWAY_PENALTY_DURATION_MS = 300000;
24022
- class ContentSteeringController {
24862
+ class ContentSteeringController extends Logger {
24023
24863
  constructor(hls) {
24864
+ super('content-steering', hls.logger);
24024
24865
  this.hls = void 0;
24025
- this.log = void 0;
24026
24866
  this.loader = null;
24027
24867
  this.uri = null;
24028
24868
  this.pathwayId = '.';
@@ -24037,7 +24877,6 @@ class ContentSteeringController {
24037
24877
  this.subtitleTracks = null;
24038
24878
  this.penalizedPathways = {};
24039
24879
  this.hls = hls;
24040
- this.log = logger.log.bind(logger, `[content-steering]:`);
24041
24880
  this.registerListeners();
24042
24881
  }
24043
24882
  registerListeners() {
@@ -24161,7 +25000,7 @@ class ContentSteeringController {
24161
25000
  errorAction.resolved = this.pathwayId !== errorPathway;
24162
25001
  }
24163
25002
  if (!errorAction.resolved) {
24164
- logger.warn(`Could not resolve ${data.details} ("${data.error.message}") with content-steering for Pathway: ${errorPathway} levels: ${levels ? levels.length : levels} priorities: ${JSON.stringify(pathwayPriority)} penalized: ${JSON.stringify(this.penalizedPathways)}`);
25003
+ this.warn(`Could not resolve ${data.details} ("${data.error.message}") with content-steering for Pathway: ${errorPathway} levels: ${levels ? levels.length : levels} priorities: ${JSON.stringify(pathwayPriority)} penalized: ${JSON.stringify(this.penalizedPathways)}`);
24165
25004
  }
24166
25005
  }
24167
25006
  }
@@ -24332,7 +25171,7 @@ class ContentSteeringController {
24332
25171
  onSuccess: (response, stats, context, networkDetails) => {
24333
25172
  this.log(`Loaded steering manifest: "${url}"`);
24334
25173
  const steeringData = response.data;
24335
- if (steeringData.VERSION !== 1) {
25174
+ if ((steeringData == null ? void 0 : steeringData.VERSION) !== 1) {
24336
25175
  this.log(`Steering VERSION ${steeringData.VERSION} not supported!`);
24337
25176
  return;
24338
25177
  }
@@ -25302,7 +26141,7 @@ function timelineConfig() {
25302
26141
  /**
25303
26142
  * @ignore
25304
26143
  */
25305
- function mergeConfig(defaultConfig, userConfig) {
26144
+ function mergeConfig(defaultConfig, userConfig, logger) {
25306
26145
  if ((userConfig.liveSyncDurationCount || userConfig.liveMaxLatencyDurationCount) && (userConfig.liveSyncDuration || userConfig.liveMaxLatencyDuration)) {
25307
26146
  throw new Error("Illegal hls.js config: don't mix up liveSyncDurationCount/liveMaxLatencyDurationCount and liveSyncDuration/liveMaxLatencyDuration");
25308
26147
  }
@@ -25372,7 +26211,7 @@ function deepCpy(obj) {
25372
26211
  /**
25373
26212
  * @ignore
25374
26213
  */
25375
- function enableStreamingMode(config) {
26214
+ function enableStreamingMode(config, logger) {
25376
26215
  const currentLoader = config.loader;
25377
26216
  if (currentLoader !== FetchLoader && currentLoader !== XhrLoader) {
25378
26217
  // If a developer has configured their own loader, respect that choice
@@ -25389,10 +26228,9 @@ function enableStreamingMode(config) {
25389
26228
  }
25390
26229
  }
25391
26230
 
25392
- let chromeOrFirefox;
25393
26231
  class LevelController extends BasePlaylistController {
25394
26232
  constructor(hls, contentSteeringController) {
25395
- super(hls, '[level-controller]');
26233
+ super(hls, 'level-controller');
25396
26234
  this._levels = [];
25397
26235
  this._firstLevel = -1;
25398
26236
  this._maxAutoLevel = -1;
@@ -25463,23 +26301,15 @@ class LevelController extends BasePlaylistController {
25463
26301
  let videoCodecFound = false;
25464
26302
  let audioCodecFound = false;
25465
26303
  data.levels.forEach(levelParsed => {
25466
- var _audioCodec, _videoCodec;
26304
+ var _videoCodec;
25467
26305
  const attributes = levelParsed.attrs;
25468
-
25469
- // erase audio codec info if browser does not support mp4a.40.34.
25470
- // demuxer will autodetect codec and fallback to mpeg/audio
25471
26306
  let {
25472
26307
  audioCodec,
25473
26308
  videoCodec
25474
26309
  } = levelParsed;
25475
- if (((_audioCodec = audioCodec) == null ? void 0 : _audioCodec.indexOf('mp4a.40.34')) !== -1) {
25476
- chromeOrFirefox || (chromeOrFirefox = /chrome|firefox/i.test(navigator.userAgent));
25477
- if (chromeOrFirefox) {
25478
- levelParsed.audioCodec = audioCodec = undefined;
25479
- }
25480
- }
25481
26310
  if (audioCodec) {
25482
- levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource);
26311
+ // Returns empty and set to undefined for 'mp4a.40.34' with fallback to 'audio/mpeg' SourceBuffer
26312
+ levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource) || undefined;
25483
26313
  }
25484
26314
  if (((_videoCodec = videoCodec) == null ? void 0 : _videoCodec.indexOf('avc1')) === 0) {
25485
26315
  videoCodec = levelParsed.videoCodec = convertAVC1ToAVCOTI(videoCodec);
@@ -26065,6 +26895,8 @@ class KeyLoader {
26065
26895
  }
26066
26896
  return this.loadKeyEME(keyInfo, frag);
26067
26897
  case 'AES-128':
26898
+ case 'AES-256':
26899
+ case 'AES-256-CTR':
26068
26900
  return this.loadKeyHTTP(keyInfo, frag);
26069
26901
  default:
26070
26902
  return Promise.reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, new Error(`Key supplied with unsupported METHOD: "${decryptdata.method}"`)));
@@ -26202,8 +27034,9 @@ const STALL_MINIMUM_DURATION_MS = 250;
26202
27034
  const MAX_START_GAP_JUMP = 2.0;
26203
27035
  const SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1;
26204
27036
  const SKIP_BUFFER_RANGE_START = 0.05;
26205
- class GapController {
27037
+ class GapController extends Logger {
26206
27038
  constructor(config, media, fragmentTracker, hls) {
27039
+ super('gap-controller', hls.logger);
26207
27040
  this.config = void 0;
26208
27041
  this.media = null;
26209
27042
  this.fragmentTracker = void 0;
@@ -26213,6 +27046,7 @@ class GapController {
26213
27046
  this.stalled = null;
26214
27047
  this.moved = false;
26215
27048
  this.seeking = false;
27049
+ this.ended = 0;
26216
27050
  this.config = config;
26217
27051
  this.media = media;
26218
27052
  this.fragmentTracker = fragmentTracker;
@@ -26230,7 +27064,7 @@ class GapController {
26230
27064
  *
26231
27065
  * @param lastCurrentTime - Previously read playhead position
26232
27066
  */
26233
- poll(lastCurrentTime, activeFrag) {
27067
+ poll(lastCurrentTime, activeFrag, levelDetails, state) {
26234
27068
  const {
26235
27069
  config,
26236
27070
  media,
@@ -26249,6 +27083,7 @@ class GapController {
26249
27083
 
26250
27084
  // The playhead is moving, no-op
26251
27085
  if (currentTime !== lastCurrentTime) {
27086
+ this.ended = 0;
26252
27087
  this.moved = true;
26253
27088
  if (!seeking) {
26254
27089
  this.nudgeRetry = 0;
@@ -26257,7 +27092,7 @@ class GapController {
26257
27092
  // The playhead is now moving, but was previously stalled
26258
27093
  if (this.stallReported) {
26259
27094
  const _stalledDuration = self.performance.now() - stalled;
26260
- logger.warn(`playback not stuck anymore @${currentTime}, after ${Math.round(_stalledDuration)}ms`);
27095
+ this.warn(`playback not stuck anymore @${currentTime}, after ${Math.round(_stalledDuration)}ms`);
26261
27096
  this.stallReported = false;
26262
27097
  }
26263
27098
  this.stalled = null;
@@ -26293,7 +27128,6 @@ class GapController {
26293
27128
  // Skip start gaps if we haven't played, but the last poll detected the start of a stall
26294
27129
  // The addition poll gives the browser a chance to jump the gap for us
26295
27130
  if (!this.moved && this.stalled !== null) {
26296
- var _level$details;
26297
27131
  // There is no playable buffer (seeked, waiting for buffer)
26298
27132
  const isBuffered = bufferInfo.len > 0;
26299
27133
  if (!isBuffered && !nextStart) {
@@ -26305,9 +27139,8 @@ class GapController {
26305
27139
  // When joining a live stream with audio tracks, account for live playlist window sliding by allowing
26306
27140
  // a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment
26307
27141
  // that begins over 1 target duration after the video start position.
26308
- const level = this.hls.levels ? this.hls.levels[this.hls.currentLevel] : null;
26309
- const isLive = level == null ? void 0 : (_level$details = level.details) == null ? void 0 : _level$details.live;
26310
- const maxStartGapJump = isLive ? level.details.targetduration * 2 : MAX_START_GAP_JUMP;
27142
+ const isLive = !!(levelDetails != null && levelDetails.live);
27143
+ const maxStartGapJump = isLive ? levelDetails.targetduration * 2 : MAX_START_GAP_JUMP;
26311
27144
  const partialOrGap = this.fragmentTracker.getPartialFragment(currentTime);
26312
27145
  if (startJump > 0 && (startJump <= maxStartGapJump || partialOrGap)) {
26313
27146
  if (!media.paused) {
@@ -26325,6 +27158,17 @@ class GapController {
26325
27158
  }
26326
27159
  const stalledDuration = tnow - stalled;
26327
27160
  if (!seeking && stalledDuration >= STALL_MINIMUM_DURATION_MS) {
27161
+ // Dispatch MEDIA_ENDED when media.ended/ended event is not signalled at end of stream
27162
+ if (state === State.ENDED && !(levelDetails && levelDetails.live) && Math.abs(currentTime - ((levelDetails == null ? void 0 : levelDetails.edge) || 0)) < 1) {
27163
+ if (stalledDuration < 1000 || this.ended) {
27164
+ return;
27165
+ }
27166
+ this.ended = currentTime;
27167
+ this.hls.trigger(Events.MEDIA_ENDED, {
27168
+ stalled: true
27169
+ });
27170
+ return;
27171
+ }
26328
27172
  // Report stalling after trying to fix
26329
27173
  this._reportStall(bufferInfo);
26330
27174
  if (!this.media) {
@@ -26368,7 +27212,7 @@ class GapController {
26368
27212
  // needs to cross some sort of threshold covering all source-buffers content
26369
27213
  // to start playing properly.
26370
27214
  if ((bufferInfo.len > config.maxBufferHole || bufferInfo.nextStart && bufferInfo.nextStart - currentTime < config.maxBufferHole) && stalledDurationMs > config.highBufferWatchdogPeriod * 1000) {
26371
- logger.warn('Trying to nudge playhead over buffer-hole');
27215
+ this.warn('Trying to nudge playhead over buffer-hole');
26372
27216
  // Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
26373
27217
  // We only try to jump the hole if it's under the configured size
26374
27218
  // Reset stalled so to rearm watchdog timer
@@ -26392,7 +27236,7 @@ class GapController {
26392
27236
  // Report stalled error once
26393
27237
  this.stallReported = true;
26394
27238
  const error = new Error(`Playback stalling at @${media.currentTime} due to low buffer (${JSON.stringify(bufferInfo)})`);
26395
- logger.warn(error.message);
27239
+ this.warn(error.message);
26396
27240
  hls.trigger(Events.ERROR, {
26397
27241
  type: ErrorTypes.MEDIA_ERROR,
26398
27242
  details: ErrorDetails.BUFFER_STALLED_ERROR,
@@ -26460,7 +27304,7 @@ class GapController {
26460
27304
  }
26461
27305
  }
26462
27306
  const targetTime = Math.max(startTime + SKIP_BUFFER_RANGE_START, currentTime + SKIP_BUFFER_HOLE_STEP_SECONDS);
26463
- logger.warn(`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`);
27307
+ this.warn(`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`);
26464
27308
  this.moved = true;
26465
27309
  this.stalled = null;
26466
27310
  media.currentTime = targetTime;
@@ -26501,7 +27345,7 @@ class GapController {
26501
27345
  const targetTime = currentTime + (nudgeRetry + 1) * config.nudgeOffset;
26502
27346
  // playback stalled in buffered area ... let's nudge currentTime to try to overcome this
26503
27347
  const error = new Error(`Nudging 'currentTime' from ${currentTime} to ${targetTime}`);
26504
- logger.warn(error.message);
27348
+ this.warn(error.message);
26505
27349
  media.currentTime = targetTime;
26506
27350
  hls.trigger(Events.ERROR, {
26507
27351
  type: ErrorTypes.MEDIA_ERROR,
@@ -26511,7 +27355,7 @@ class GapController {
26511
27355
  });
26512
27356
  } else {
26513
27357
  const error = new Error(`Playhead still not moving while enough data buffered @${currentTime} after ${config.nudgeMaxRetry} nudges`);
26514
- logger.error(error.message);
27358
+ this.error(error.message);
26515
27359
  hls.trigger(Events.ERROR, {
26516
27360
  type: ErrorTypes.MEDIA_ERROR,
26517
27361
  details: ErrorDetails.BUFFER_STALLED_ERROR,
@@ -26526,7 +27370,7 @@ const TICK_INTERVAL = 100; // how often to tick in ms
26526
27370
 
26527
27371
  class StreamController extends BaseStreamController {
26528
27372
  constructor(hls, fragmentTracker, keyLoader) {
26529
- super(hls, fragmentTracker, keyLoader, '[stream-controller]', PlaylistLevelType.MAIN);
27373
+ super(hls, fragmentTracker, keyLoader, 'stream-controller', PlaylistLevelType.MAIN);
26530
27374
  this.audioCodecSwap = false;
26531
27375
  this.gapController = null;
26532
27376
  this.level = -1;
@@ -26534,27 +27378,43 @@ class StreamController extends BaseStreamController {
26534
27378
  this.altAudio = false;
26535
27379
  this.audioOnly = false;
26536
27380
  this.fragPlaying = null;
26537
- this.onvplaying = null;
26538
- this.onvseeked = null;
26539
27381
  this.fragLastKbps = 0;
26540
27382
  this.couldBacktrack = false;
26541
27383
  this.backtrackFragment = null;
26542
27384
  this.audioCodecSwitch = false;
26543
27385
  this.videoBuffer = null;
26544
- this._registerListeners();
27386
+ this.onMediaPlaying = () => {
27387
+ // tick to speed up FRAG_CHANGED triggering
27388
+ this.tick();
27389
+ };
27390
+ this.onMediaSeeked = () => {
27391
+ const media = this.media;
27392
+ const currentTime = media ? media.currentTime : null;
27393
+ if (isFiniteNumber(currentTime)) {
27394
+ this.log(`Media seeked to ${currentTime.toFixed(3)}`);
27395
+ }
27396
+
27397
+ // If seeked was issued before buffer was appended do not tick immediately
27398
+ const bufferInfo = this.getMainFwdBufferInfo();
27399
+ if (bufferInfo === null || bufferInfo.len === 0) {
27400
+ this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
27401
+ return;
27402
+ }
27403
+
27404
+ // tick to speed up FRAG_CHANGED triggering
27405
+ this.tick();
27406
+ };
27407
+ this.registerListeners();
26545
27408
  }
26546
- _registerListeners() {
27409
+ registerListeners() {
27410
+ super.registerListeners();
26547
27411
  const {
26548
27412
  hls
26549
27413
  } = this;
26550
- hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
26551
- hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
26552
- hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
26553
27414
  hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
26554
27415
  hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
26555
27416
  hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
26556
27417
  hls.on(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
26557
- hls.on(Events.ERROR, this.onError, this);
26558
27418
  hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
26559
27419
  hls.on(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
26560
27420
  hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
@@ -26562,17 +27422,14 @@ class StreamController extends BaseStreamController {
26562
27422
  hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
26563
27423
  hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
26564
27424
  }
26565
- _unregisterListeners() {
27425
+ unregisterListeners() {
27426
+ super.unregisterListeners();
26566
27427
  const {
26567
27428
  hls
26568
27429
  } = this;
26569
- hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
26570
- hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
26571
- hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
26572
27430
  hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
26573
27431
  hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
26574
27432
  hls.off(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
26575
- hls.off(Events.ERROR, this.onError, this);
26576
27433
  hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
26577
27434
  hls.off(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
26578
27435
  hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
@@ -26581,7 +27438,9 @@ class StreamController extends BaseStreamController {
26581
27438
  hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
26582
27439
  }
26583
27440
  onHandlerDestroying() {
26584
- this._unregisterListeners();
27441
+ // @ts-ignore
27442
+ this.onMediaPlaying = this.onMediaSeeked = null;
27443
+ this.unregisterListeners();
26585
27444
  super.onHandlerDestroying();
26586
27445
  }
26587
27446
  startLoad(startPosition) {
@@ -26607,7 +27466,8 @@ class StreamController extends BaseStreamController {
26607
27466
  }
26608
27467
  // set new level to playlist loader : this will trigger start level load
26609
27468
  // hls.nextLoadLevel remains until it is set to a new value or until a new frag is successfully loaded
26610
- this.level = hls.nextLoadLevel = startLevel;
27469
+ hls.nextLoadLevel = startLevel;
27470
+ this.level = hls.loadLevel;
26611
27471
  this.loadedmetadata = false;
26612
27472
  }
26613
27473
  // if startPosition undefined but lastCurrentTime set, set startPosition to last currentTime
@@ -26700,7 +27560,7 @@ class StreamController extends BaseStreamController {
26700
27560
  if (this.altAudio && this.audioOnly) {
26701
27561
  return;
26702
27562
  }
26703
- if (!(levels != null && levels[level])) {
27563
+ if (!this.buffering || !(levels != null && levels[level])) {
26704
27564
  return;
26705
27565
  }
26706
27566
  const levelInfo = levels[level];
@@ -26908,20 +27768,17 @@ class StreamController extends BaseStreamController {
26908
27768
  onMediaAttached(event, data) {
26909
27769
  super.onMediaAttached(event, data);
26910
27770
  const media = data.media;
26911
- this.onvplaying = this.onMediaPlaying.bind(this);
26912
- this.onvseeked = this.onMediaSeeked.bind(this);
26913
- media.addEventListener('playing', this.onvplaying);
26914
- media.addEventListener('seeked', this.onvseeked);
27771
+ media.addEventListener('playing', this.onMediaPlaying);
27772
+ media.addEventListener('seeked', this.onMediaSeeked);
26915
27773
  this.gapController = new GapController(this.config, media, this.fragmentTracker, this.hls);
26916
27774
  }
26917
27775
  onMediaDetaching() {
26918
27776
  const {
26919
27777
  media
26920
27778
  } = this;
26921
- if (media && this.onvplaying && this.onvseeked) {
26922
- media.removeEventListener('playing', this.onvplaying);
26923
- media.removeEventListener('seeked', this.onvseeked);
26924
- this.onvplaying = this.onvseeked = null;
27779
+ if (media) {
27780
+ media.removeEventListener('playing', this.onMediaPlaying);
27781
+ media.removeEventListener('seeked', this.onMediaSeeked);
26925
27782
  this.videoBuffer = null;
26926
27783
  }
26927
27784
  this.fragPlaying = null;
@@ -26931,27 +27788,6 @@ class StreamController extends BaseStreamController {
26931
27788
  }
26932
27789
  super.onMediaDetaching();
26933
27790
  }
26934
- onMediaPlaying() {
26935
- // tick to speed up FRAG_CHANGED triggering
26936
- this.tick();
26937
- }
26938
- onMediaSeeked() {
26939
- const media = this.media;
26940
- const currentTime = media ? media.currentTime : null;
26941
- if (isFiniteNumber(currentTime)) {
26942
- this.log(`Media seeked to ${currentTime.toFixed(3)}`);
26943
- }
26944
-
26945
- // If seeked was issued before buffer was appended do not tick immediately
26946
- const bufferInfo = this.getMainFwdBufferInfo();
26947
- if (bufferInfo === null || bufferInfo.len === 0) {
26948
- this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
26949
- return;
26950
- }
26951
-
26952
- // tick to speed up FRAG_CHANGED triggering
26953
- this.tick();
26954
- }
26955
27791
  onManifestLoading() {
26956
27792
  // reset buffer on manifest loading
26957
27793
  this.log('Trigger BUFFER_RESET');
@@ -27243,8 +28079,10 @@ class StreamController extends BaseStreamController {
27243
28079
  }
27244
28080
  if (this.loadedmetadata || !BufferHelper.getBuffered(media).length) {
27245
28081
  // Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
27246
- const activeFrag = this.state !== State.IDLE ? this.fragCurrent : null;
27247
- gapController.poll(this.lastCurrentTime, activeFrag);
28082
+ const state = this.state;
28083
+ const activeFrag = state !== State.IDLE ? this.fragCurrent : null;
28084
+ const levelDetails = this.getLevelDetails();
28085
+ gapController.poll(this.lastCurrentTime, activeFrag, levelDetails, state);
27248
28086
  }
27249
28087
  this.lastCurrentTime = media.currentTime;
27250
28088
  }
@@ -27682,7 +28520,7 @@ class Hls {
27682
28520
  * Get the video-dev/hls.js package version.
27683
28521
  */
27684
28522
  static get version() {
27685
- return "1.5.3";
28523
+ return "1.5.5-0.canary.9977";
27686
28524
  }
27687
28525
 
27688
28526
  /**
@@ -27745,9 +28583,12 @@ class Hls {
27745
28583
  * The configuration object provided on player instantiation.
27746
28584
  */
27747
28585
  this.userConfig = void 0;
28586
+ /**
28587
+ * The logger functions used by this player instance, configured on player instantiation.
28588
+ */
28589
+ this.logger = void 0;
27748
28590
  this.coreComponents = void 0;
27749
28591
  this.networkControllers = void 0;
27750
- this.started = false;
27751
28592
  this._emitter = new EventEmitter();
27752
28593
  this._autoLevelCapping = -1;
27753
28594
  this._maxHdcpLevel = null;
@@ -27764,11 +28605,11 @@ class Hls {
27764
28605
  this._media = null;
27765
28606
  this.url = null;
27766
28607
  this.triggeringException = void 0;
27767
- enableLogs(userConfig.debug || false, 'Hls instance');
27768
- const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig);
28608
+ const logger = this.logger = enableLogs(userConfig.debug || false, 'Hls instance');
28609
+ const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig, logger);
27769
28610
  this.userConfig = userConfig;
27770
28611
  if (config.progressive) {
27771
- enableStreamingMode(config);
28612
+ enableStreamingMode(config, logger);
27772
28613
  }
27773
28614
 
27774
28615
  // core controllers and network loaders
@@ -27867,7 +28708,7 @@ class Hls {
27867
28708
  try {
27868
28709
  return this.emit(event, event, eventObject);
27869
28710
  } catch (error) {
27870
- logger.error('An internal error happened while handling event ' + event + '. Error message: "' + error.message + '". Here is a stacktrace:', error);
28711
+ this.logger.error('An internal error happened while handling event ' + event + '. Error message: "' + error.message + '". Here is a stacktrace:', error);
27871
28712
  // Prevent recursion in error event handlers that throw #5497
27872
28713
  if (!this.triggeringException) {
27873
28714
  this.triggeringException = true;
@@ -27893,7 +28734,7 @@ class Hls {
27893
28734
  * Dispose of the instance
27894
28735
  */
27895
28736
  destroy() {
27896
- logger.log('destroy');
28737
+ this.logger.log('destroy');
27897
28738
  this.trigger(Events.DESTROYING, undefined);
27898
28739
  this.detachMedia();
27899
28740
  this.removeAllListeners();
@@ -27914,7 +28755,7 @@ class Hls {
27914
28755
  * Attaches Hls.js to a media element
27915
28756
  */
27916
28757
  attachMedia(media) {
27917
- logger.log('attachMedia');
28758
+ this.logger.log('attachMedia');
27918
28759
  this._media = media;
27919
28760
  this.trigger(Events.MEDIA_ATTACHING, {
27920
28761
  media: media
@@ -27925,7 +28766,7 @@ class Hls {
27925
28766
  * Detach Hls.js from the media
27926
28767
  */
27927
28768
  detachMedia() {
27928
- logger.log('detachMedia');
28769
+ this.logger.log('detachMedia');
27929
28770
  this.trigger(Events.MEDIA_DETACHING, undefined);
27930
28771
  this._media = null;
27931
28772
  }
@@ -27942,7 +28783,7 @@ class Hls {
27942
28783
  });
27943
28784
  this._autoLevelCapping = -1;
27944
28785
  this._maxHdcpLevel = null;
27945
- logger.log(`loadSource:${loadingSource}`);
28786
+ this.logger.log(`loadSource:${loadingSource}`);
27946
28787
  if (media && loadedSource && (loadedSource !== loadingSource || this.bufferController.hasSourceTypes())) {
27947
28788
  this.detachMedia();
27948
28789
  this.attachMedia(media);
@@ -27961,8 +28802,7 @@ class Hls {
27961
28802
  * Defaults to -1 (None: starts from earliest point)
27962
28803
  */
27963
28804
  startLoad(startPosition = -1) {
27964
- logger.log(`startLoad(${startPosition})`);
27965
- this.started = true;
28805
+ this.logger.log(`startLoad(${startPosition})`);
27966
28806
  this.networkControllers.forEach(controller => {
27967
28807
  controller.startLoad(startPosition);
27968
28808
  });
@@ -27972,34 +28812,31 @@ class Hls {
27972
28812
  * Stop loading of any stream data.
27973
28813
  */
27974
28814
  stopLoad() {
27975
- logger.log('stopLoad');
27976
- this.started = false;
28815
+ this.logger.log('stopLoad');
27977
28816
  this.networkControllers.forEach(controller => {
27978
28817
  controller.stopLoad();
27979
28818
  });
27980
28819
  }
27981
28820
 
27982
28821
  /**
27983
- * Resumes stream controller segment loading if previously started.
28822
+ * Resumes stream controller segment loading after `pauseBuffering` has been called.
27984
28823
  */
27985
28824
  resumeBuffering() {
27986
- if (this.started) {
27987
- this.networkControllers.forEach(controller => {
27988
- if ('fragmentLoader' in controller) {
27989
- controller.startLoad(-1);
27990
- }
27991
- });
27992
- }
28825
+ this.networkControllers.forEach(controller => {
28826
+ if (controller.resumeBuffering) {
28827
+ controller.resumeBuffering();
28828
+ }
28829
+ });
27993
28830
  }
27994
28831
 
27995
28832
  /**
27996
- * Stops stream controller segment loading without changing 'started' state like stopLoad().
28833
+ * Prevents stream controller from loading new segments until `resumeBuffering` is called.
27997
28834
  * This allows for media buffering to be paused without interupting playlist loading.
27998
28835
  */
27999
28836
  pauseBuffering() {
28000
28837
  this.networkControllers.forEach(controller => {
28001
- if ('fragmentLoader' in controller) {
28002
- controller.stopLoad();
28838
+ if (controller.pauseBuffering) {
28839
+ controller.pauseBuffering();
28003
28840
  }
28004
28841
  });
28005
28842
  }
@@ -28008,7 +28845,7 @@ class Hls {
28008
28845
  * Swap through possible audio codecs in the stream (for example to switch from stereo to 5.1)
28009
28846
  */
28010
28847
  swapAudioCodec() {
28011
- logger.log('swapAudioCodec');
28848
+ this.logger.log('swapAudioCodec');
28012
28849
  this.streamController.swapAudioCodec();
28013
28850
  }
28014
28851
 
@@ -28019,7 +28856,7 @@ class Hls {
28019
28856
  * Automatic recovery of media-errors by this process is configurable.
28020
28857
  */
28021
28858
  recoverMediaError() {
28022
- logger.log('recoverMediaError');
28859
+ this.logger.log('recoverMediaError');
28023
28860
  const media = this._media;
28024
28861
  this.detachMedia();
28025
28862
  if (media) {
@@ -28049,7 +28886,7 @@ class Hls {
28049
28886
  * Set quality level index immediately. This will flush the current buffer to replace the quality asap. That means playback will interrupt at least shortly to re-buffer and re-sync eventually. Set to -1 for automatic level selection.
28050
28887
  */
28051
28888
  set currentLevel(newLevel) {
28052
- logger.log(`set currentLevel:${newLevel}`);
28889
+ this.logger.log(`set currentLevel:${newLevel}`);
28053
28890
  this.levelController.manualLevel = newLevel;
28054
28891
  this.streamController.immediateLevelSwitch();
28055
28892
  }
@@ -28068,7 +28905,7 @@ class Hls {
28068
28905
  * @param newLevel - Pass -1 for automatic level selection
28069
28906
  */
28070
28907
  set nextLevel(newLevel) {
28071
- logger.log(`set nextLevel:${newLevel}`);
28908
+ this.logger.log(`set nextLevel:${newLevel}`);
28072
28909
  this.levelController.manualLevel = newLevel;
28073
28910
  this.streamController.nextLevelSwitch();
28074
28911
  }
@@ -28087,7 +28924,7 @@ class Hls {
28087
28924
  * @param newLevel - Pass -1 for automatic level selection
28088
28925
  */
28089
28926
  set loadLevel(newLevel) {
28090
- logger.log(`set loadLevel:${newLevel}`);
28927
+ this.logger.log(`set loadLevel:${newLevel}`);
28091
28928
  this.levelController.manualLevel = newLevel;
28092
28929
  }
28093
28930
 
@@ -28118,7 +28955,7 @@ class Hls {
28118
28955
  * Sets "first-level", see getter.
28119
28956
  */
28120
28957
  set firstLevel(newLevel) {
28121
- logger.log(`set firstLevel:${newLevel}`);
28958
+ this.logger.log(`set firstLevel:${newLevel}`);
28122
28959
  this.levelController.firstLevel = newLevel;
28123
28960
  }
28124
28961
 
@@ -28143,7 +28980,7 @@ class Hls {
28143
28980
  * (determined from download of first segment)
28144
28981
  */
28145
28982
  set startLevel(newLevel) {
28146
- logger.log(`set startLevel:${newLevel}`);
28983
+ this.logger.log(`set startLevel:${newLevel}`);
28147
28984
  // if not in automatic start level detection, ensure startLevel is greater than minAutoLevel
28148
28985
  if (newLevel !== -1) {
28149
28986
  newLevel = Math.max(newLevel, this.minAutoLevel);
@@ -28218,7 +29055,7 @@ class Hls {
28218
29055
  */
28219
29056
  set autoLevelCapping(newLevel) {
28220
29057
  if (this._autoLevelCapping !== newLevel) {
28221
- logger.log(`set autoLevelCapping:${newLevel}`);
29058
+ this.logger.log(`set autoLevelCapping:${newLevel}`);
28222
29059
  this._autoLevelCapping = newLevel;
28223
29060
  this.levelController.checkMaxAutoUpdated();
28224
29061
  }