hls.js 1.5.4 → 1.5.5-0.canary.9977

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. package/README.md +1 -0
  2. package/dist/hls-demo.js +10 -0
  3. package/dist/hls-demo.js.map +1 -1
  4. package/dist/hls.js +1930 -1095
  5. package/dist/hls.js.d.ts +63 -50
  6. package/dist/hls.js.map +1 -1
  7. package/dist/hls.light.js +1609 -778
  8. package/dist/hls.light.js.map +1 -1
  9. package/dist/hls.light.min.js +1 -1
  10. package/dist/hls.light.min.js.map +1 -1
  11. package/dist/hls.light.mjs +1363 -542
  12. package/dist/hls.light.mjs.map +1 -1
  13. package/dist/hls.min.js +1 -1
  14. package/dist/hls.min.js.map +1 -1
  15. package/dist/hls.mjs +1635 -815
  16. package/dist/hls.mjs.map +1 -1
  17. package/dist/hls.worker.js +1 -1
  18. package/dist/hls.worker.js.map +1 -1
  19. package/package.json +18 -18
  20. package/src/config.ts +3 -2
  21. package/src/controller/abr-controller.ts +21 -20
  22. package/src/controller/audio-stream-controller.ts +15 -16
  23. package/src/controller/audio-track-controller.ts +1 -1
  24. package/src/controller/base-playlist-controller.ts +7 -7
  25. package/src/controller/base-stream-controller.ts +56 -29
  26. package/src/controller/buffer-controller.ts +11 -11
  27. package/src/controller/cap-level-controller.ts +1 -2
  28. package/src/controller/cmcd-controller.ts +25 -3
  29. package/src/controller/content-steering-controller.ts +8 -6
  30. package/src/controller/eme-controller.ts +9 -22
  31. package/src/controller/error-controller.ts +6 -8
  32. package/src/controller/fps-controller.ts +2 -3
  33. package/src/controller/gap-controller.ts +43 -16
  34. package/src/controller/latency-controller.ts +9 -11
  35. package/src/controller/level-controller.ts +5 -17
  36. package/src/controller/stream-controller.ts +25 -32
  37. package/src/controller/subtitle-stream-controller.ts +13 -14
  38. package/src/controller/subtitle-track-controller.ts +5 -3
  39. package/src/controller/timeline-controller.ts +23 -30
  40. package/src/crypt/aes-crypto.ts +21 -2
  41. package/src/crypt/decrypter-aes-mode.ts +4 -0
  42. package/src/crypt/decrypter.ts +32 -18
  43. package/src/crypt/fast-aes-key.ts +24 -5
  44. package/src/demux/audio/adts.ts +9 -4
  45. package/src/demux/sample-aes.ts +2 -0
  46. package/src/demux/transmuxer-interface.ts +4 -12
  47. package/src/demux/transmuxer-worker.ts +4 -4
  48. package/src/demux/transmuxer.ts +16 -3
  49. package/src/demux/tsdemuxer.ts +63 -37
  50. package/src/demux/video/avc-video-parser.ts +208 -119
  51. package/src/demux/video/base-video-parser.ts +134 -2
  52. package/src/demux/video/exp-golomb.ts +0 -208
  53. package/src/demux/video/hevc-video-parser.ts +746 -0
  54. package/src/events.ts +7 -0
  55. package/src/hls.ts +42 -34
  56. package/src/loader/fragment-loader.ts +9 -2
  57. package/src/loader/key-loader.ts +2 -0
  58. package/src/loader/level-key.ts +10 -9
  59. package/src/remux/mp4-generator.ts +196 -1
  60. package/src/remux/mp4-remuxer.ts +23 -7
  61. package/src/task-loop.ts +5 -2
  62. package/src/types/component-api.ts +2 -0
  63. package/src/types/demuxer.ts +3 -0
  64. package/src/types/events.ts +4 -0
  65. package/src/utils/codecs.ts +33 -4
  66. package/src/utils/encryption-methods-util.ts +21 -0
  67. package/src/utils/logger.ts +53 -24
@@ -256,6 +256,7 @@ let Events = /*#__PURE__*/function (Events) {
256
256
  Events["MEDIA_ATTACHED"] = "hlsMediaAttached";
257
257
  Events["MEDIA_DETACHING"] = "hlsMediaDetaching";
258
258
  Events["MEDIA_DETACHED"] = "hlsMediaDetached";
259
+ Events["MEDIA_ENDED"] = "hlsMediaEnded";
259
260
  Events["BUFFER_RESET"] = "hlsBufferReset";
260
261
  Events["BUFFER_CODECS"] = "hlsBufferCodecs";
261
262
  Events["BUFFER_CREATED"] = "hlsBufferCreated";
@@ -369,6 +370,23 @@ let ErrorDetails = /*#__PURE__*/function (ErrorDetails) {
369
370
  return ErrorDetails;
370
371
  }({});
371
372
 
373
+ class Logger {
374
+ constructor(label, logger) {
375
+ this.trace = void 0;
376
+ this.debug = void 0;
377
+ this.log = void 0;
378
+ this.warn = void 0;
379
+ this.info = void 0;
380
+ this.error = void 0;
381
+ const lb = `[${label}]:`;
382
+ this.trace = noop;
383
+ this.debug = logger.debug.bind(null, lb);
384
+ this.log = logger.log.bind(null, lb);
385
+ this.warn = logger.warn.bind(null, lb);
386
+ this.info = logger.info.bind(null, lb);
387
+ this.error = logger.error.bind(null, lb);
388
+ }
389
+ }
372
390
  const noop = function noop() {};
373
391
  const fakeLogger = {
374
392
  trace: noop,
@@ -378,7 +396,9 @@ const fakeLogger = {
378
396
  info: noop,
379
397
  error: noop
380
398
  };
381
- let exportedLogger = fakeLogger;
399
+ function createLogger() {
400
+ return _extends({}, fakeLogger);
401
+ }
382
402
 
383
403
  // let lastCallTime;
384
404
  // function formatMsgWithTimeInfo(type, msg) {
@@ -389,35 +409,36 @@ let exportedLogger = fakeLogger;
389
409
  // return msg;
390
410
  // }
391
411
 
392
- function consolePrintFn(type) {
412
+ function consolePrintFn(type, id) {
393
413
  const func = self.console[type];
394
- if (func) {
395
- return func.bind(self.console, `[${type}] >`);
396
- }
397
- return noop;
414
+ return func ? func.bind(self.console, `${id ? '[' + id + '] ' : ''}[${type}] >`) : noop;
398
415
  }
399
- function exportLoggerFunctions(debugConfig, ...functions) {
400
- functions.forEach(function (type) {
401
- exportedLogger[type] = debugConfig[type] ? debugConfig[type].bind(debugConfig) : consolePrintFn(type);
402
- });
416
+ function getLoggerFn(key, debugConfig, id) {
417
+ return debugConfig[key] ? debugConfig[key].bind(debugConfig) : consolePrintFn(key, id);
403
418
  }
404
- function enableLogs(debugConfig, id) {
419
+ let exportedLogger = createLogger();
420
+ function enableLogs(debugConfig, context, id) {
405
421
  // check that console is available
422
+ const newLogger = createLogger();
406
423
  if (typeof console === 'object' && debugConfig === true || typeof debugConfig === 'object') {
407
- exportLoggerFunctions(debugConfig,
424
+ const keys = [
408
425
  // Remove out from list here to hard-disable a log-level
409
426
  // 'trace',
410
- 'debug', 'log', 'info', 'warn', 'error');
427
+ 'debug', 'log', 'info', 'warn', 'error'];
428
+ keys.forEach(key => {
429
+ newLogger[key] = getLoggerFn(key, debugConfig, id);
430
+ });
411
431
  // Some browsers don't allow to use bind on console object anyway
412
432
  // fallback to default if needed
413
433
  try {
414
- exportedLogger.log(`Debug logs enabled for "${id}" in hls.js version ${"1.5.4"}`);
434
+ newLogger.log(`Debug logs enabled for "${context}" in hls.js version ${"1.5.5-0.canary.9977"}`);
415
435
  } catch (e) {
416
- exportedLogger = fakeLogger;
436
+ /* log fn threw an exception. All logger methods are no-ops. */
437
+ return createLogger();
417
438
  }
418
- } else {
419
- exportedLogger = fakeLogger;
420
439
  }
440
+ exportedLogger = newLogger;
441
+ return newLogger;
421
442
  }
422
443
  const logger = exportedLogger;
423
444
 
@@ -991,6 +1012,26 @@ class LevelDetails {
991
1012
  }
992
1013
  }
993
1014
 
1015
+ var DecrypterAesMode = {
1016
+ cbc: 0,
1017
+ ctr: 1
1018
+ };
1019
+
1020
+ function isFullSegmentEncryption(method) {
1021
+ return method === 'AES-128' || method === 'AES-256' || method === 'AES-256-CTR';
1022
+ }
1023
+ function getAesModeFromFullSegmentMethod(method) {
1024
+ switch (method) {
1025
+ case 'AES-128':
1026
+ case 'AES-256':
1027
+ return DecrypterAesMode.cbc;
1028
+ case 'AES-256-CTR':
1029
+ return DecrypterAesMode.ctr;
1030
+ default:
1031
+ throw new Error(`invalid full segment method ${method}`);
1032
+ }
1033
+ }
1034
+
994
1035
  // This file is inserted as a shim for modules which we do not want to include into the distro.
995
1036
  // This replacement is done in the "alias" plugin of the rollup config.
996
1037
  var empty = undefined;
@@ -2431,12 +2472,12 @@ class LevelKey {
2431
2472
  this.keyFormatVersions = formatversions;
2432
2473
  this.iv = iv;
2433
2474
  this.encrypted = method ? method !== 'NONE' : false;
2434
- this.isCommonEncryption = this.encrypted && method !== 'AES-128';
2475
+ this.isCommonEncryption = this.encrypted && !isFullSegmentEncryption(method);
2435
2476
  }
2436
2477
  isSupported() {
2437
2478
  // If it's Segment encryption or No encryption, just select that key system
2438
2479
  if (this.method) {
2439
- if (this.method === 'AES-128' || this.method === 'NONE') {
2480
+ if (isFullSegmentEncryption(this.method) || this.method === 'NONE') {
2440
2481
  return true;
2441
2482
  }
2442
2483
  if (this.keyFormat === 'identity') {
@@ -2450,14 +2491,13 @@ class LevelKey {
2450
2491
  if (!this.encrypted || !this.uri) {
2451
2492
  return null;
2452
2493
  }
2453
- if (this.method === 'AES-128' && this.uri && !this.iv) {
2494
+ if (isFullSegmentEncryption(this.method) && this.uri && !this.iv) {
2454
2495
  if (typeof sn !== 'number') {
2455
2496
  // We are fetching decryption data for a initialization segment
2456
- // If the segment was encrypted with AES-128
2497
+ // If the segment was encrypted with AES-128/256
2457
2498
  // It must have an IV defined. We cannot substitute the Segment Number in.
2458
- if (this.method === 'AES-128' && !this.iv) {
2459
- logger.warn(`missing IV for initialization segment with method="${this.method}" - compliance issue`);
2460
- }
2499
+ logger.warn(`missing IV for initialization segment with method="${this.method}" - compliance issue`);
2500
+
2461
2501
  // Explicitly set sn to resulting value from implicit conversions 'initSegment' values for IV generation.
2462
2502
  sn = 0;
2463
2503
  }
@@ -2604,23 +2644,28 @@ function getCodecCompatibleNameLower(lowerCaseCodec, preferManagedMediaSource =
2604
2644
  if (CODEC_COMPATIBLE_NAMES[lowerCaseCodec]) {
2605
2645
  return CODEC_COMPATIBLE_NAMES[lowerCaseCodec];
2606
2646
  }
2607
-
2608
- // Idealy fLaC and Opus would be first (spec-compliant) but
2609
- // some browsers will report that fLaC is supported then fail.
2610
- // see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
2611
2647
  const codecsToCheck = {
2648
+ // Idealy fLaC and Opus would be first (spec-compliant) but
2649
+ // some browsers will report that fLaC is supported then fail.
2650
+ // see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
2612
2651
  flac: ['flac', 'fLaC', 'FLAC'],
2613
- opus: ['opus', 'Opus']
2652
+ opus: ['opus', 'Opus'],
2653
+ // Replace audio codec info if browser does not support mp4a.40.34,
2654
+ // and demuxer can fallback to 'audio/mpeg' or 'audio/mp4;codecs="mp3"'
2655
+ 'mp4a.40.34': ['mp3']
2614
2656
  }[lowerCaseCodec];
2615
2657
  for (let i = 0; i < codecsToCheck.length; i++) {
2658
+ var _getMediaSource;
2616
2659
  if (isCodecMediaSourceSupported(codecsToCheck[i], 'audio', preferManagedMediaSource)) {
2617
2660
  CODEC_COMPATIBLE_NAMES[lowerCaseCodec] = codecsToCheck[i];
2618
2661
  return codecsToCheck[i];
2662
+ } else if (codecsToCheck[i] === 'mp3' && (_getMediaSource = getMediaSource(preferManagedMediaSource)) != null && _getMediaSource.isTypeSupported('audio/mpeg')) {
2663
+ return '';
2619
2664
  }
2620
2665
  }
2621
2666
  return lowerCaseCodec;
2622
2667
  }
2623
- const AUDIO_CODEC_REGEXP = /flac|opus/i;
2668
+ const AUDIO_CODEC_REGEXP = /flac|opus|mp4a\.40\.34/i;
2624
2669
  function getCodecCompatibleName(codec, preferManagedMediaSource = true) {
2625
2670
  return codec.replace(AUDIO_CODEC_REGEXP, m => getCodecCompatibleNameLower(m.toLowerCase(), preferManagedMediaSource));
2626
2671
  }
@@ -2643,6 +2688,16 @@ function convertAVC1ToAVCOTI(codec) {
2643
2688
  }
2644
2689
  return codec;
2645
2690
  }
2691
+ function getM2TSSupportedAudioTypes(preferManagedMediaSource) {
2692
+ const MediaSource = getMediaSource(preferManagedMediaSource) || {
2693
+ isTypeSupported: () => false
2694
+ };
2695
+ return {
2696
+ mpeg: MediaSource.isTypeSupported('audio/mpeg'),
2697
+ mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
2698
+ ac3: false
2699
+ };
2700
+ }
2646
2701
 
2647
2702
  const MASTER_PLAYLIST_REGEX = /#EXT-X-STREAM-INF:([^\r\n]*)(?:[\r\n](?:#[^\r\n]*)?)*([^\r\n]+)|#EXT-X-(SESSION-DATA|SESSION-KEY|DEFINE|CONTENT-STEERING|START):([^\r\n]*)[\r\n]+/g;
2648
2703
  const MASTER_PLAYLIST_MEDIA_REGEX = /#EXT-X-MEDIA:(.*)/g;
@@ -4220,7 +4275,47 @@ class LatencyController {
4220
4275
  this.currentTime = 0;
4221
4276
  this.stallCount = 0;
4222
4277
  this._latency = null;
4223
- this.timeupdateHandler = () => this.timeupdate();
4278
+ this.onTimeupdate = () => {
4279
+ const {
4280
+ media,
4281
+ levelDetails
4282
+ } = this;
4283
+ if (!media || !levelDetails) {
4284
+ return;
4285
+ }
4286
+ this.currentTime = media.currentTime;
4287
+ const latency = this.computeLatency();
4288
+ if (latency === null) {
4289
+ return;
4290
+ }
4291
+ this._latency = latency;
4292
+
4293
+ // Adapt playbackRate to meet target latency in low-latency mode
4294
+ const {
4295
+ lowLatencyMode,
4296
+ maxLiveSyncPlaybackRate
4297
+ } = this.config;
4298
+ if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
4299
+ return;
4300
+ }
4301
+ const targetLatency = this.targetLatency;
4302
+ if (targetLatency === null) {
4303
+ return;
4304
+ }
4305
+ const distanceFromTarget = latency - targetLatency;
4306
+ // Only adjust playbackRate when within one target duration of targetLatency
4307
+ // and more than one second from under-buffering.
4308
+ // Playback further than one target duration from target can be considered DVR playback.
4309
+ const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
4310
+ const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
4311
+ if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
4312
+ const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
4313
+ const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
4314
+ media.playbackRate = Math.min(max, Math.max(1, rate));
4315
+ } else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
4316
+ media.playbackRate = 1;
4317
+ }
4318
+ };
4224
4319
  this.hls = hls;
4225
4320
  this.config = hls.config;
4226
4321
  this.registerListeners();
@@ -4312,7 +4407,7 @@ class LatencyController {
4312
4407
  this.onMediaDetaching();
4313
4408
  this.levelDetails = null;
4314
4409
  // @ts-ignore
4315
- this.hls = this.timeupdateHandler = null;
4410
+ this.hls = null;
4316
4411
  }
4317
4412
  registerListeners() {
4318
4413
  this.hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
@@ -4330,11 +4425,11 @@ class LatencyController {
4330
4425
  }
4331
4426
  onMediaAttached(event, data) {
4332
4427
  this.media = data.media;
4333
- this.media.addEventListener('timeupdate', this.timeupdateHandler);
4428
+ this.media.addEventListener('timeupdate', this.onTimeupdate);
4334
4429
  }
4335
4430
  onMediaDetaching() {
4336
4431
  if (this.media) {
4337
- this.media.removeEventListener('timeupdate', this.timeupdateHandler);
4432
+ this.media.removeEventListener('timeupdate', this.onTimeupdate);
4338
4433
  this.media = null;
4339
4434
  }
4340
4435
  }
@@ -4348,10 +4443,10 @@ class LatencyController {
4348
4443
  }) {
4349
4444
  this.levelDetails = details;
4350
4445
  if (details.advanced) {
4351
- this.timeupdate();
4446
+ this.onTimeupdate();
4352
4447
  }
4353
4448
  if (!details.live && this.media) {
4354
- this.media.removeEventListener('timeupdate', this.timeupdateHandler);
4449
+ this.media.removeEventListener('timeupdate', this.onTimeupdate);
4355
4450
  }
4356
4451
  }
4357
4452
  onError(event, data) {
@@ -4361,48 +4456,7 @@ class LatencyController {
4361
4456
  }
4362
4457
  this.stallCount++;
4363
4458
  if ((_this$levelDetails = this.levelDetails) != null && _this$levelDetails.live) {
4364
- logger.warn('[playback-rate-controller]: Stall detected, adjusting target latency');
4365
- }
4366
- }
4367
- timeupdate() {
4368
- const {
4369
- media,
4370
- levelDetails
4371
- } = this;
4372
- if (!media || !levelDetails) {
4373
- return;
4374
- }
4375
- this.currentTime = media.currentTime;
4376
- const latency = this.computeLatency();
4377
- if (latency === null) {
4378
- return;
4379
- }
4380
- this._latency = latency;
4381
-
4382
- // Adapt playbackRate to meet target latency in low-latency mode
4383
- const {
4384
- lowLatencyMode,
4385
- maxLiveSyncPlaybackRate
4386
- } = this.config;
4387
- if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
4388
- return;
4389
- }
4390
- const targetLatency = this.targetLatency;
4391
- if (targetLatency === null) {
4392
- return;
4393
- }
4394
- const distanceFromTarget = latency - targetLatency;
4395
- // Only adjust playbackRate when within one target duration of targetLatency
4396
- // and more than one second from under-buffering.
4397
- // Playback further than one target duration from target can be considered DVR playback.
4398
- const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
4399
- const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
4400
- if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
4401
- const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
4402
- const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
4403
- media.playbackRate = Math.min(max, Math.max(1, rate));
4404
- } else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
4405
- media.playbackRate = 1;
4459
+ this.hls.logger.warn('[latency-controller]: Stall detected, adjusting target latency');
4406
4460
  }
4407
4461
  }
4408
4462
  estimateLiveEdge() {
@@ -5174,18 +5228,13 @@ var ErrorActionFlags = {
5174
5228
  MoveAllAlternatesMatchingHDCP: 2,
5175
5229
  SwitchToSDR: 4
5176
5230
  }; // Reserved for future use
5177
- class ErrorController {
5231
+ class ErrorController extends Logger {
5178
5232
  constructor(hls) {
5233
+ super('error-controller', hls.logger);
5179
5234
  this.hls = void 0;
5180
5235
  this.playlistError = 0;
5181
5236
  this.penalizedRenditions = {};
5182
- this.log = void 0;
5183
- this.warn = void 0;
5184
- this.error = void 0;
5185
5237
  this.hls = hls;
5186
- this.log = logger.log.bind(logger, `[info]:`);
5187
- this.warn = logger.warn.bind(logger, `[warning]:`);
5188
- this.error = logger.error.bind(logger, `[error]:`);
5189
5238
  this.registerListeners();
5190
5239
  }
5191
5240
  registerListeners() {
@@ -5537,16 +5586,13 @@ class ErrorController {
5537
5586
  }
5538
5587
  }
5539
5588
 
5540
- class BasePlaylistController {
5589
+ class BasePlaylistController extends Logger {
5541
5590
  constructor(hls, logPrefix) {
5591
+ super(logPrefix, hls.logger);
5542
5592
  this.hls = void 0;
5543
5593
  this.timer = -1;
5544
5594
  this.requestScheduled = -1;
5545
5595
  this.canLoad = false;
5546
- this.log = void 0;
5547
- this.warn = void 0;
5548
- this.log = logger.log.bind(logger, `${logPrefix}:`);
5549
- this.warn = logger.warn.bind(logger, `${logPrefix}:`);
5550
5596
  this.hls = hls;
5551
5597
  }
5552
5598
  destroy() {
@@ -5579,7 +5625,7 @@ class BasePlaylistController {
5579
5625
  try {
5580
5626
  uri = new self.URL(attr.URI, previous.url).href;
5581
5627
  } catch (error) {
5582
- logger.warn(`Could not construct new URL for Rendition Report: ${error}`);
5628
+ this.warn(`Could not construct new URL for Rendition Report: ${error}`);
5583
5629
  uri = attr.URI || '';
5584
5630
  }
5585
5631
  // Use exact match. Otherwise, the last partial match, if any, will be used
@@ -6125,8 +6171,9 @@ function getCodecTiers(levels, audioTracksByGroup, minAutoLevel, maxAutoLevel) {
6125
6171
  }, {});
6126
6172
  }
6127
6173
 
6128
- class AbrController {
6174
+ class AbrController extends Logger {
6129
6175
  constructor(_hls) {
6176
+ super('abr', _hls.logger);
6130
6177
  this.hls = void 0;
6131
6178
  this.lastLevelLoadSec = 0;
6132
6179
  this.lastLoadedFragLevel = -1;
@@ -6240,7 +6287,7 @@ class AbrController {
6240
6287
  this.resetEstimator(nextLoadLevelBitrate);
6241
6288
  }
6242
6289
  this.clearTimer();
6243
- logger.warn(`[abr] Fragment ${frag.sn}${part ? ' part ' + part.index : ''} of level ${frag.level} is loading too slowly;
6290
+ this.warn(`Fragment ${frag.sn}${part ? ' part ' + part.index : ''} of level ${frag.level} is loading too slowly;
6244
6291
  Time to underbuffer: ${bufferStarvationDelay.toFixed(3)} s
6245
6292
  Estimated load time for current fragment: ${fragLoadedDelay.toFixed(3)} s
6246
6293
  Estimated load time for down switch fragment: ${fragLevelNextLoadedDelay.toFixed(3)} s
@@ -6260,7 +6307,7 @@ class AbrController {
6260
6307
  }
6261
6308
  resetEstimator(abrEwmaDefaultEstimate) {
6262
6309
  if (abrEwmaDefaultEstimate) {
6263
- logger.log(`setting initial bwe to ${abrEwmaDefaultEstimate}`);
6310
+ this.log(`setting initial bwe to ${abrEwmaDefaultEstimate}`);
6264
6311
  this.hls.config.abrEwmaDefaultEstimate = abrEwmaDefaultEstimate;
6265
6312
  }
6266
6313
  this.firstSelection = -1;
@@ -6492,7 +6539,7 @@ class AbrController {
6492
6539
  }
6493
6540
  const firstLevel = this.hls.firstLevel;
6494
6541
  const clamped = Math.min(Math.max(firstLevel, minAutoLevel), maxAutoLevel);
6495
- logger.warn(`[abr] Could not find best starting auto level. Defaulting to first in playlist ${firstLevel} clamped to ${clamped}`);
6542
+ this.warn(`Could not find best starting auto level. Defaulting to first in playlist ${firstLevel} clamped to ${clamped}`);
6496
6543
  return clamped;
6497
6544
  }
6498
6545
  get forcedAutoLevel() {
@@ -6577,13 +6624,13 @@ class AbrController {
6577
6624
  // cap maxLoadingDelay and ensure it is not bigger 'than bitrate test' frag duration
6578
6625
  const maxLoadingDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxLoadingDelay) : config.maxLoadingDelay;
6579
6626
  maxStarvationDelay = maxLoadingDelay - bitrateTestDelay;
6580
- logger.info(`[abr] bitrate test took ${Math.round(1000 * bitrateTestDelay)}ms, set first fragment max fetchDuration to ${Math.round(1000 * maxStarvationDelay)} ms`);
6627
+ this.info(`bitrate test took ${Math.round(1000 * bitrateTestDelay)}ms, set first fragment max fetchDuration to ${Math.round(1000 * maxStarvationDelay)} ms`);
6581
6628
  // don't use conservative factor on bitrate test
6582
6629
  bwFactor = bwUpFactor = 1;
6583
6630
  }
6584
6631
  }
6585
6632
  const bestLevel = this.findBestLevel(avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay, maxStarvationDelay, bwFactor, bwUpFactor);
6586
- logger.info(`[abr] ${bufferStarvationDelay ? 'rebuffering expected' : 'buffer is empty'}, optimal quality level ${bestLevel}`);
6633
+ this.info(`${bufferStarvationDelay ? 'rebuffering expected' : 'buffer is empty'}, optimal quality level ${bestLevel}`);
6587
6634
  if (bestLevel > -1) {
6588
6635
  return bestLevel;
6589
6636
  }
@@ -6645,7 +6692,7 @@ class AbrController {
6645
6692
  currentVideoRange = preferHDR ? videoRanges[videoRanges.length - 1] : videoRanges[0];
6646
6693
  currentFrameRate = minFramerate;
6647
6694
  currentBw = Math.max(currentBw, minBitrate);
6648
- logger.log(`[abr] picked start tier ${JSON.stringify(startTier)}`);
6695
+ this.log(`picked start tier ${JSON.stringify(startTier)}`);
6649
6696
  } else {
6650
6697
  currentCodecSet = level == null ? void 0 : level.codecSet;
6651
6698
  currentVideoRange = level == null ? void 0 : level.videoRange;
@@ -6698,9 +6745,9 @@ class AbrController {
6698
6745
  const forcedAutoLevel = this.forcedAutoLevel;
6699
6746
  if (i !== loadLevel && (forcedAutoLevel === -1 || forcedAutoLevel !== loadLevel)) {
6700
6747
  if (levelsSkipped.length) {
6701
- logger.trace(`[abr] Skipped level(s) ${levelsSkipped.join(',')} of ${maxAutoLevel} max with CODECS and VIDEO-RANGE:"${levels[levelsSkipped[0]].codecs}" ${levels[levelsSkipped[0]].videoRange}; not compatible with "${level.codecs}" ${currentVideoRange}`);
6748
+ this.trace(`Skipped level(s) ${levelsSkipped.join(',')} of ${maxAutoLevel} max with CODECS and VIDEO-RANGE:"${levels[levelsSkipped[0]].codecs}" ${levels[levelsSkipped[0]].videoRange}; not compatible with "${level.codecs}" ${currentVideoRange}`);
6702
6749
  }
6703
- logger.info(`[abr] switch candidate:${selectionBaseLevel}->${i} adjustedbw(${Math.round(adjustedbw)})-bitrate=${Math.round(adjustedbw - bitrate)} ttfb:${ttfbEstimateSec.toFixed(1)} avgDuration:${avgDuration.toFixed(1)} maxFetchDuration:${maxFetchDuration.toFixed(1)} fetchDuration:${fetchDuration.toFixed(1)} firstSelection:${firstSelection} codecSet:${currentCodecSet} videoRange:${currentVideoRange} hls.loadLevel:${loadLevel}`);
6750
+ this.info(`switch candidate:${selectionBaseLevel}->${i} adjustedbw(${Math.round(adjustedbw)})-bitrate=${Math.round(adjustedbw - bitrate)} ttfb:${ttfbEstimateSec.toFixed(1)} avgDuration:${avgDuration.toFixed(1)} maxFetchDuration:${maxFetchDuration.toFixed(1)} fetchDuration:${fetchDuration.toFixed(1)} firstSelection:${firstSelection} codecSet:${currentCodecSet} videoRange:${currentVideoRange} hls.loadLevel:${loadLevel}`);
6704
6751
  }
6705
6752
  if (firstSelection) {
6706
6753
  this.firstSelection = i;
@@ -6936,8 +6983,9 @@ class BufferOperationQueue {
6936
6983
  }
6937
6984
 
6938
6985
  const VIDEO_CODEC_PROFILE_REPLACE = /(avc[1234]|hvc1|hev1|dvh[1e]|vp09|av01)(?:\.[^.,]+)+/;
6939
- class BufferController {
6986
+ class BufferController extends Logger {
6940
6987
  constructor(hls) {
6988
+ super('buffer-controller', hls.logger);
6941
6989
  // The level details used to determine duration, target-duration and live
6942
6990
  this.details = null;
6943
6991
  // cache the self generated object url to detect hijack of video tag
@@ -6967,9 +7015,6 @@ class BufferController {
6967
7015
  this.tracks = {};
6968
7016
  this.pendingTracks = {};
6969
7017
  this.sourceBuffer = void 0;
6970
- this.log = void 0;
6971
- this.warn = void 0;
6972
- this.error = void 0;
6973
7018
  this._onEndStreaming = event => {
6974
7019
  if (!this.hls) {
6975
7020
  return;
@@ -7015,15 +7060,11 @@ class BufferController {
7015
7060
  _objectUrl
7016
7061
  } = this;
7017
7062
  if (mediaSrc !== _objectUrl) {
7018
- logger.error(`Media element src was set while attaching MediaSource (${_objectUrl} > ${mediaSrc})`);
7063
+ this.error(`Media element src was set while attaching MediaSource (${_objectUrl} > ${mediaSrc})`);
7019
7064
  }
7020
7065
  };
7021
7066
  this.hls = hls;
7022
- const logPrefix = '[buffer-controller]';
7023
7067
  this.appendSource = hls.config.preferManagedMediaSource;
7024
- this.log = logger.log.bind(logger, logPrefix);
7025
- this.warn = logger.warn.bind(logger, logPrefix);
7026
- this.error = logger.error.bind(logger, logPrefix);
7027
7068
  this._initSourceBuffer();
7028
7069
  this.registerListeners();
7029
7070
  }
@@ -7036,6 +7077,12 @@ class BufferController {
7036
7077
  this.lastMpegAudioChunk = null;
7037
7078
  // @ts-ignore
7038
7079
  this.hls = null;
7080
+ // @ts-ignore
7081
+ this._onMediaSourceOpen = this._onMediaSourceClose = null;
7082
+ // @ts-ignore
7083
+ this._onMediaSourceEnded = null;
7084
+ // @ts-ignore
7085
+ this._onStartStreaming = this._onEndStreaming = null;
7039
7086
  }
7040
7087
  registerListeners() {
7041
7088
  const {
@@ -7198,6 +7245,7 @@ class BufferController {
7198
7245
  this.resetBuffer(type);
7199
7246
  });
7200
7247
  this._initSourceBuffer();
7248
+ this.hls.resumeBuffering();
7201
7249
  }
7202
7250
  resetBuffer(type) {
7203
7251
  const sb = this.sourceBuffer[type];
@@ -8035,7 +8083,7 @@ class CapLevelController {
8035
8083
  const hls = this.hls;
8036
8084
  const maxLevel = this.getMaxLevel(levels.length - 1);
8037
8085
  if (maxLevel !== this.autoLevelCapping) {
8038
- logger.log(`Setting autoLevelCapping to ${maxLevel}: ${levels[maxLevel].height}p@${levels[maxLevel].bitrate} for media ${this.mediaWidth}x${this.mediaHeight}`);
8086
+ hls.logger.log(`Setting autoLevelCapping to ${maxLevel}: ${levels[maxLevel].height}p@${levels[maxLevel].bitrate} for media ${this.mediaWidth}x${this.mediaHeight}`);
8039
8087
  }
8040
8088
  hls.autoLevelCapping = maxLevel;
8041
8089
  if (hls.autoLevelCapping > this.autoLevelCapping && this.streamController) {
@@ -8213,10 +8261,10 @@ class FPSController {
8213
8261
  totalDroppedFrames: droppedFrames
8214
8262
  });
8215
8263
  if (droppedFPS > 0) {
8216
- // logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
8264
+ // hls.logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
8217
8265
  if (currentDropped > hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
8218
8266
  let currentLevel = hls.currentLevel;
8219
- logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
8267
+ hls.logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
8220
8268
  if (currentLevel > 0 && (hls.autoLevelCapping === -1 || hls.autoLevelCapping >= currentLevel)) {
8221
8269
  currentLevel = currentLevel - 1;
8222
8270
  hls.trigger(Events.FPS_DROP_LEVEL_CAPPING, {
@@ -8249,10 +8297,10 @@ class FPSController {
8249
8297
  }
8250
8298
 
8251
8299
  const PATHWAY_PENALTY_DURATION_MS = 300000;
8252
- class ContentSteeringController {
8300
+ class ContentSteeringController extends Logger {
8253
8301
  constructor(hls) {
8302
+ super('content-steering', hls.logger);
8254
8303
  this.hls = void 0;
8255
- this.log = void 0;
8256
8304
  this.loader = null;
8257
8305
  this.uri = null;
8258
8306
  this.pathwayId = '.';
@@ -8267,7 +8315,6 @@ class ContentSteeringController {
8267
8315
  this.subtitleTracks = null;
8268
8316
  this.penalizedPathways = {};
8269
8317
  this.hls = hls;
8270
- this.log = logger.log.bind(logger, `[content-steering]:`);
8271
8318
  this.registerListeners();
8272
8319
  }
8273
8320
  registerListeners() {
@@ -8391,7 +8438,7 @@ class ContentSteeringController {
8391
8438
  errorAction.resolved = this.pathwayId !== errorPathway;
8392
8439
  }
8393
8440
  if (!errorAction.resolved) {
8394
- logger.warn(`Could not resolve ${data.details} ("${data.error.message}") with content-steering for Pathway: ${errorPathway} levels: ${levels ? levels.length : levels} priorities: ${JSON.stringify(pathwayPriority)} penalized: ${JSON.stringify(this.penalizedPathways)}`);
8441
+ this.warn(`Could not resolve ${data.details} ("${data.error.message}") with content-steering for Pathway: ${errorPathway} levels: ${levels ? levels.length : levels} priorities: ${JSON.stringify(pathwayPriority)} penalized: ${JSON.stringify(this.penalizedPathways)}`);
8395
8442
  }
8396
8443
  }
8397
8444
  }
@@ -8562,7 +8609,7 @@ class ContentSteeringController {
8562
8609
  onSuccess: (response, stats, context, networkDetails) => {
8563
8610
  this.log(`Loaded steering manifest: "${url}"`);
8564
8611
  const steeringData = response.data;
8565
- if (steeringData.VERSION !== 1) {
8612
+ if ((steeringData == null ? void 0 : steeringData.VERSION) !== 1) {
8566
8613
  this.log(`Steering VERSION ${steeringData.VERSION} not supported!`);
8567
8614
  return;
8568
8615
  }
@@ -9501,7 +9548,7 @@ function timelineConfig() {
9501
9548
  /**
9502
9549
  * @ignore
9503
9550
  */
9504
- function mergeConfig(defaultConfig, userConfig) {
9551
+ function mergeConfig(defaultConfig, userConfig, logger) {
9505
9552
  if ((userConfig.liveSyncDurationCount || userConfig.liveMaxLatencyDurationCount) && (userConfig.liveSyncDuration || userConfig.liveMaxLatencyDuration)) {
9506
9553
  throw new Error("Illegal hls.js config: don't mix up liveSyncDurationCount/liveMaxLatencyDurationCount and liveSyncDuration/liveMaxLatencyDuration");
9507
9554
  }
@@ -9571,7 +9618,7 @@ function deepCpy(obj) {
9571
9618
  /**
9572
9619
  * @ignore
9573
9620
  */
9574
- function enableStreamingMode(config) {
9621
+ function enableStreamingMode(config, logger) {
9575
9622
  const currentLoader = config.loader;
9576
9623
  if (currentLoader !== FetchLoader && currentLoader !== XhrLoader) {
9577
9624
  // If a developer has configured their own loader, respect that choice
@@ -9588,10 +9635,9 @@ function enableStreamingMode(config) {
9588
9635
  }
9589
9636
  }
9590
9637
 
9591
- let chromeOrFirefox;
9592
9638
  class LevelController extends BasePlaylistController {
9593
9639
  constructor(hls, contentSteeringController) {
9594
- super(hls, '[level-controller]');
9640
+ super(hls, 'level-controller');
9595
9641
  this._levels = [];
9596
9642
  this._firstLevel = -1;
9597
9643
  this._maxAutoLevel = -1;
@@ -9662,23 +9708,15 @@ class LevelController extends BasePlaylistController {
9662
9708
  let videoCodecFound = false;
9663
9709
  let audioCodecFound = false;
9664
9710
  data.levels.forEach(levelParsed => {
9665
- var _audioCodec, _videoCodec;
9711
+ var _videoCodec;
9666
9712
  const attributes = levelParsed.attrs;
9667
-
9668
- // erase audio codec info if browser does not support mp4a.40.34.
9669
- // demuxer will autodetect codec and fallback to mpeg/audio
9670
9713
  let {
9671
9714
  audioCodec,
9672
9715
  videoCodec
9673
9716
  } = levelParsed;
9674
- if (((_audioCodec = audioCodec) == null ? void 0 : _audioCodec.indexOf('mp4a.40.34')) !== -1) {
9675
- chromeOrFirefox || (chromeOrFirefox = /chrome|firefox/i.test(navigator.userAgent));
9676
- if (chromeOrFirefox) {
9677
- levelParsed.audioCodec = audioCodec = undefined;
9678
- }
9679
- }
9680
9717
  if (audioCodec) {
9681
- levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource);
9718
+ // Returns empty and set to undefined for 'mp4a.40.34' with fallback to 'audio/mpeg' SourceBuffer
9719
+ levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource) || undefined;
9682
9720
  }
9683
9721
  if (((_videoCodec = videoCodec) == null ? void 0 : _videoCodec.indexOf('avc1')) === 0) {
9684
9722
  videoCodec = levelParsed.videoCodec = convertAVC1ToAVCOTI(videoCodec);
@@ -10806,8 +10844,8 @@ function createLoaderContext(frag, part = null) {
10806
10844
  var _frag$decryptdata;
10807
10845
  let byteRangeStart = start;
10808
10846
  let byteRangeEnd = end;
10809
- if (frag.sn === 'initSegment' && ((_frag$decryptdata = frag.decryptdata) == null ? void 0 : _frag$decryptdata.method) === 'AES-128') {
10810
- // MAP segment encrypted with method 'AES-128', when served with HTTP Range,
10847
+ if (frag.sn === 'initSegment' && isMethodFullSegmentAesCbc((_frag$decryptdata = frag.decryptdata) == null ? void 0 : _frag$decryptdata.method)) {
10848
+ // MAP segment encrypted with method 'AES-128' or 'AES-256' (cbc), when served with HTTP Range,
10811
10849
  // has the unencrypted size specified in the range.
10812
10850
  // Ref: https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-08#section-6.3.6
10813
10851
  const fragmentLen = end - start;
@@ -10840,6 +10878,9 @@ function createGapLoadError(frag, part) {
10840
10878
  (part ? part : frag).stats.aborted = true;
10841
10879
  return new LoadError(errorData);
10842
10880
  }
10881
+ function isMethodFullSegmentAesCbc(method) {
10882
+ return method === 'AES-128' || method === 'AES-256';
10883
+ }
10843
10884
  class LoadError extends Error {
10844
10885
  constructor(data) {
10845
10886
  super(data.error.message);
@@ -10985,6 +11026,8 @@ class KeyLoader {
10985
11026
  }
10986
11027
  return this.loadKeyEME(keyInfo, frag);
10987
11028
  case 'AES-128':
11029
+ case 'AES-256':
11030
+ case 'AES-256-CTR':
10988
11031
  return this.loadKeyHTTP(keyInfo, frag);
10989
11032
  default:
10990
11033
  return Promise.reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, new Error(`Key supplied with unsupported METHOD: "${decryptdata.method}"`)));
@@ -11120,8 +11163,9 @@ class KeyLoader {
11120
11163
  * we are limiting the task execution per call stack to exactly one, but scheduling/post-poning further
11121
11164
  * task processing on the next main loop iteration (also known as "next tick" in the Node/JS runtime lingo).
11122
11165
  */
11123
- class TaskLoop {
11124
- constructor() {
11166
+ class TaskLoop extends Logger {
11167
+ constructor(label, logger) {
11168
+ super(label, logger);
11125
11169
  this._boundTick = void 0;
11126
11170
  this._tickTimer = null;
11127
11171
  this._tickInterval = null;
@@ -11389,33 +11433,61 @@ function alignMediaPlaylistByPDT(details, refDetails) {
11389
11433
  }
11390
11434
 
11391
11435
  class AESCrypto {
11392
- constructor(subtle, iv) {
11436
+ constructor(subtle, iv, aesMode) {
11393
11437
  this.subtle = void 0;
11394
11438
  this.aesIV = void 0;
11439
+ this.aesMode = void 0;
11395
11440
  this.subtle = subtle;
11396
11441
  this.aesIV = iv;
11442
+ this.aesMode = aesMode;
11397
11443
  }
11398
11444
  decrypt(data, key) {
11399
- return this.subtle.decrypt({
11400
- name: 'AES-CBC',
11401
- iv: this.aesIV
11402
- }, key, data);
11445
+ switch (this.aesMode) {
11446
+ case DecrypterAesMode.cbc:
11447
+ return this.subtle.decrypt({
11448
+ name: 'AES-CBC',
11449
+ iv: this.aesIV
11450
+ }, key, data);
11451
+ case DecrypterAesMode.ctr:
11452
+ return this.subtle.decrypt({
11453
+ name: 'AES-CTR',
11454
+ counter: this.aesIV,
11455
+ length: 64
11456
+ },
11457
+ //64 : NIST SP800-38A standard suggests that the counter should occupy half of the counter block
11458
+ key, data);
11459
+ default:
11460
+ throw new Error(`[AESCrypto] invalid aes mode ${this.aesMode}`);
11461
+ }
11403
11462
  }
11404
11463
  }
11405
11464
 
11406
11465
  class FastAESKey {
11407
- constructor(subtle, key) {
11466
+ constructor(subtle, key, aesMode) {
11408
11467
  this.subtle = void 0;
11409
11468
  this.key = void 0;
11469
+ this.aesMode = void 0;
11410
11470
  this.subtle = subtle;
11411
11471
  this.key = key;
11472
+ this.aesMode = aesMode;
11412
11473
  }
11413
11474
  expandKey() {
11475
+ const subtleAlgoName = getSubtleAlgoName(this.aesMode);
11414
11476
  return this.subtle.importKey('raw', this.key, {
11415
- name: 'AES-CBC'
11477
+ name: subtleAlgoName
11416
11478
  }, false, ['encrypt', 'decrypt']);
11417
11479
  }
11418
11480
  }
11481
+ function getSubtleAlgoName(aesMode) {
11482
+ switch (aesMode) {
11483
+ case DecrypterAesMode.cbc:
11484
+ return 'AES-CBC';
11485
+ case DecrypterAesMode.ctr:
11486
+ return 'AES-CTR';
11487
+ default:
11488
+ throw new Error(`[FastAESKey] invalid aes mode ${aesMode}`);
11489
+ }
11490
+ }
11419
11491
 
11420
11492
  // PKCS7
11421
11493
  function removePadding(array) {
@@ -11665,7 +11737,8 @@ class Decrypter {
11665
11737
  this.currentIV = null;
11666
11738
  this.currentResult = null;
11667
11739
  this.useSoftware = void 0;
11668
- this.useSoftware = config.enableSoftwareAES;
11740
+ this.enableSoftwareAES = void 0;
11741
+ this.enableSoftwareAES = config.enableSoftwareAES;
11669
11742
  this.removePKCS7Padding = removePKCS7Padding;
11670
11743
  // built in decryptor expects PKCS7 padding
11671
11744
  if (removePKCS7Padding) {
@@ -11678,9 +11751,7 @@ class Decrypter {
11678
11751
  /* no-op */
11679
11752
  }
11680
11753
  }
11681
- if (this.subtle === null) {
11682
- this.useSoftware = true;
11683
- }
11754
+ this.useSoftware = this.subtle === null;
11684
11755
  }
11685
11756
  destroy() {
11686
11757
  this.subtle = null;
@@ -11718,10 +11789,10 @@ class Decrypter {
11718
11789
  this.softwareDecrypter = null;
11719
11790
  }
11720
11791
  }
11721
- decrypt(data, key, iv) {
11792
+ decrypt(data, key, iv, aesMode) {
11722
11793
  if (this.useSoftware) {
11723
11794
  return new Promise((resolve, reject) => {
11724
- this.softwareDecrypt(new Uint8Array(data), key, iv);
11795
+ this.softwareDecrypt(new Uint8Array(data), key, iv, aesMode);
11725
11796
  const decryptResult = this.flush();
11726
11797
  if (decryptResult) {
11727
11798
  resolve(decryptResult.buffer);
@@ -11730,17 +11801,21 @@ class Decrypter {
11730
11801
  }
11731
11802
  });
11732
11803
  }
11733
- return this.webCryptoDecrypt(new Uint8Array(data), key, iv);
11804
+ return this.webCryptoDecrypt(new Uint8Array(data), key, iv, aesMode);
11734
11805
  }
11735
11806
 
11736
11807
  // Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
11737
11808
  // data is handled in the flush() call
11738
- softwareDecrypt(data, key, iv) {
11809
+ softwareDecrypt(data, key, iv, aesMode) {
11739
11810
  const {
11740
11811
  currentIV,
11741
11812
  currentResult,
11742
11813
  remainderData
11743
11814
  } = this;
11815
+ if (aesMode !== DecrypterAesMode.cbc || key.byteLength !== 16) {
11816
+ logger.warn('SoftwareDecrypt: can only handle AES-128-CBC');
11817
+ return null;
11818
+ }
11744
11819
  this.logOnce('JS AES decrypt');
11745
11820
  // The output is staggered during progressive parsing - the current result is cached, and emitted on the next call
11746
11821
  // This is done in order to strip PKCS7 padding, which is found at the end of each segment. We only know we've reached
@@ -11773,11 +11848,11 @@ class Decrypter {
11773
11848
  }
11774
11849
  return result;
11775
11850
  }
11776
- webCryptoDecrypt(data, key, iv) {
11851
+ webCryptoDecrypt(data, key, iv, aesMode) {
11777
11852
  const subtle = this.subtle;
11778
11853
  if (this.key !== key || !this.fastAesKey) {
11779
11854
  this.key = key;
11780
- this.fastAesKey = new FastAESKey(subtle, key);
11855
+ this.fastAesKey = new FastAESKey(subtle, key, aesMode);
11781
11856
  }
11782
11857
  return this.fastAesKey.expandKey().then(aesKey => {
11783
11858
  // decrypt using web crypto
@@ -11785,22 +11860,25 @@ class Decrypter {
11785
11860
  return Promise.reject(new Error('web crypto not initialized'));
11786
11861
  }
11787
11862
  this.logOnce('WebCrypto AES decrypt');
11788
- const crypto = new AESCrypto(subtle, new Uint8Array(iv));
11863
+ const crypto = new AESCrypto(subtle, new Uint8Array(iv), aesMode);
11789
11864
  return crypto.decrypt(data.buffer, aesKey);
11790
11865
  }).catch(err => {
11791
11866
  logger.warn(`[decrypter]: WebCrypto Error, disable WebCrypto API, ${err.name}: ${err.message}`);
11792
- return this.onWebCryptoError(data, key, iv);
11867
+ return this.onWebCryptoError(data, key, iv, aesMode);
11793
11868
  });
11794
11869
  }
11795
- onWebCryptoError(data, key, iv) {
11796
- this.useSoftware = true;
11797
- this.logEnabled = true;
11798
- this.softwareDecrypt(data, key, iv);
11799
- const decryptResult = this.flush();
11800
- if (decryptResult) {
11801
- return decryptResult.buffer;
11870
+ onWebCryptoError(data, key, iv, aesMode) {
11871
+ const enableSoftwareAES = this.enableSoftwareAES;
11872
+ if (enableSoftwareAES) {
11873
+ this.useSoftware = true;
11874
+ this.logEnabled = true;
11875
+ this.softwareDecrypt(data, key, iv, aesMode);
11876
+ const decryptResult = this.flush();
11877
+ if (decryptResult) {
11878
+ return decryptResult.buffer;
11879
+ }
11802
11880
  }
11803
- throw new Error('WebCrypto and softwareDecrypt: failed to decrypt data');
11881
+ throw new Error('WebCrypto' + (enableSoftwareAES ? ' and softwareDecrypt' : '') + ': failed to decrypt data');
11804
11882
  }
11805
11883
  getValidChunk(data) {
11806
11884
  let currentChunk = data;
@@ -11851,7 +11929,7 @@ const State = {
11851
11929
  };
11852
11930
  class BaseStreamController extends TaskLoop {
11853
11931
  constructor(hls, fragmentTracker, keyLoader, logPrefix, playlistType) {
11854
- super();
11932
+ super(logPrefix, hls.logger);
11855
11933
  this.hls = void 0;
11856
11934
  this.fragPrevious = null;
11857
11935
  this.fragCurrent = null;
@@ -11876,22 +11954,89 @@ class BaseStreamController extends TaskLoop {
11876
11954
  this.startFragRequested = false;
11877
11955
  this.decrypter = void 0;
11878
11956
  this.initPTS = [];
11879
- this.onvseeking = null;
11880
- this.onvended = null;
11881
- this.logPrefix = '';
11882
- this.log = void 0;
11883
- this.warn = void 0;
11957
+ this.buffering = true;
11958
+ this.onMediaSeeking = () => {
11959
+ const {
11960
+ config,
11961
+ fragCurrent,
11962
+ media,
11963
+ mediaBuffer,
11964
+ state
11965
+ } = this;
11966
+ const currentTime = media ? media.currentTime : 0;
11967
+ const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
11968
+ this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
11969
+ if (this.state === State.ENDED) {
11970
+ this.resetLoadingState();
11971
+ } else if (fragCurrent) {
11972
+ // Seeking while frag load is in progress
11973
+ const tolerance = config.maxFragLookUpTolerance;
11974
+ const fragStartOffset = fragCurrent.start - tolerance;
11975
+ const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
11976
+ // if seeking out of buffered range or into new one
11977
+ if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
11978
+ const pastFragment = currentTime > fragEndOffset;
11979
+ // if the seek position is outside the current fragment range
11980
+ if (currentTime < fragStartOffset || pastFragment) {
11981
+ if (pastFragment && fragCurrent.loader) {
11982
+ this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
11983
+ fragCurrent.abortRequests();
11984
+ this.resetLoadingState();
11985
+ }
11986
+ this.fragPrevious = null;
11987
+ }
11988
+ }
11989
+ }
11990
+ if (media) {
11991
+ // Remove gap fragments
11992
+ this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
11993
+ this.lastCurrentTime = currentTime;
11994
+ }
11995
+
11996
+ // in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
11997
+ if (!this.loadedmetadata && !bufferInfo.len) {
11998
+ this.nextLoadPosition = this.startPosition = currentTime;
11999
+ }
12000
+
12001
+ // Async tick to speed up processing
12002
+ this.tickImmediate();
12003
+ };
12004
+ this.onMediaEnded = () => {
12005
+ // reset startPosition and lastCurrentTime to restart playback @ stream beginning
12006
+ this.startPosition = this.lastCurrentTime = 0;
12007
+ if (this.playlistType === PlaylistLevelType.MAIN) {
12008
+ this.hls.trigger(Events.MEDIA_ENDED, {
12009
+ stalled: false
12010
+ });
12011
+ }
12012
+ };
11884
12013
  this.playlistType = playlistType;
11885
- this.logPrefix = logPrefix;
11886
- this.log = logger.log.bind(logger, `${logPrefix}:`);
11887
- this.warn = logger.warn.bind(logger, `${logPrefix}:`);
11888
12014
  this.hls = hls;
11889
12015
  this.fragmentLoader = new FragmentLoader(hls.config);
11890
12016
  this.keyLoader = keyLoader;
11891
12017
  this.fragmentTracker = fragmentTracker;
11892
12018
  this.config = hls.config;
11893
12019
  this.decrypter = new Decrypter(hls.config);
12020
+ }
12021
+ registerListeners() {
12022
+ const {
12023
+ hls
12024
+ } = this;
12025
+ hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
12026
+ hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
12027
+ hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
11894
12028
  hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
12029
+ hls.on(Events.ERROR, this.onError, this);
12030
+ }
12031
+ unregisterListeners() {
12032
+ const {
12033
+ hls
12034
+ } = this;
12035
+ hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
12036
+ hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
12037
+ hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
12038
+ hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
12039
+ hls.off(Events.ERROR, this.onError, this);
11895
12040
  }
11896
12041
  doTick() {
11897
12042
  this.onTickEnd();
@@ -11915,6 +12060,12 @@ class BaseStreamController extends TaskLoop {
11915
12060
  this.clearNextTick();
11916
12061
  this.state = State.STOPPED;
11917
12062
  }
12063
+ pauseBuffering() {
12064
+ this.buffering = false;
12065
+ }
12066
+ resumeBuffering() {
12067
+ this.buffering = true;
12068
+ }
11918
12069
  _streamEnded(bufferInfo, levelDetails) {
11919
12070
  // If playlist is live, there is another buffered range after the current range, nothing buffered, media is detached,
11920
12071
  // of nothing loading/loaded return false
@@ -11945,10 +12096,8 @@ class BaseStreamController extends TaskLoop {
11945
12096
  }
11946
12097
  onMediaAttached(event, data) {
11947
12098
  const media = this.media = this.mediaBuffer = data.media;
11948
- this.onvseeking = this.onMediaSeeking.bind(this);
11949
- this.onvended = this.onMediaEnded.bind(this);
11950
- media.addEventListener('seeking', this.onvseeking);
11951
- media.addEventListener('ended', this.onvended);
12099
+ media.addEventListener('seeking', this.onMediaSeeking);
12100
+ media.addEventListener('ended', this.onMediaEnded);
11952
12101
  const config = this.config;
11953
12102
  if (this.levels && config.autoStartLoad && this.state === State.STOPPED) {
11954
12103
  this.startLoad(config.startPosition);
@@ -11962,10 +12111,9 @@ class BaseStreamController extends TaskLoop {
11962
12111
  }
11963
12112
 
11964
12113
  // remove video listeners
11965
- if (media && this.onvseeking && this.onvended) {
11966
- media.removeEventListener('seeking', this.onvseeking);
11967
- media.removeEventListener('ended', this.onvended);
11968
- this.onvseeking = this.onvended = null;
12114
+ if (media) {
12115
+ media.removeEventListener('seeking', this.onMediaSeeking);
12116
+ media.removeEventListener('ended', this.onMediaEnded);
11969
12117
  }
11970
12118
  if (this.keyLoader) {
11971
12119
  this.keyLoader.detach();
@@ -11975,56 +12123,8 @@ class BaseStreamController extends TaskLoop {
11975
12123
  this.fragmentTracker.removeAllFragments();
11976
12124
  this.stopLoad();
11977
12125
  }
11978
- onMediaSeeking() {
11979
- const {
11980
- config,
11981
- fragCurrent,
11982
- media,
11983
- mediaBuffer,
11984
- state
11985
- } = this;
11986
- const currentTime = media ? media.currentTime : 0;
11987
- const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
11988
- this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
11989
- if (this.state === State.ENDED) {
11990
- this.resetLoadingState();
11991
- } else if (fragCurrent) {
11992
- // Seeking while frag load is in progress
11993
- const tolerance = config.maxFragLookUpTolerance;
11994
- const fragStartOffset = fragCurrent.start - tolerance;
11995
- const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
11996
- // if seeking out of buffered range or into new one
11997
- if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
11998
- const pastFragment = currentTime > fragEndOffset;
11999
- // if the seek position is outside the current fragment range
12000
- if (currentTime < fragStartOffset || pastFragment) {
12001
- if (pastFragment && fragCurrent.loader) {
12002
- this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
12003
- fragCurrent.abortRequests();
12004
- this.resetLoadingState();
12005
- }
12006
- this.fragPrevious = null;
12007
- }
12008
- }
12009
- }
12010
- if (media) {
12011
- // Remove gap fragments
12012
- this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
12013
- this.lastCurrentTime = currentTime;
12014
- }
12015
-
12016
- // in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
12017
- if (!this.loadedmetadata && !bufferInfo.len) {
12018
- this.nextLoadPosition = this.startPosition = currentTime;
12019
- }
12020
-
12021
- // Async tick to speed up processing
12022
- this.tickImmediate();
12023
- }
12024
- onMediaEnded() {
12025
- // reset startPosition and lastCurrentTime to restart playback @ stream beginning
12026
- this.startPosition = this.lastCurrentTime = 0;
12027
- }
12126
+ onManifestLoading() {}
12127
+ onError(event, data) {}
12028
12128
  onManifestLoaded(event, data) {
12029
12129
  this.startTimeOffset = data.startTimeOffset;
12030
12130
  this.initPTS = [];
@@ -12034,7 +12134,7 @@ class BaseStreamController extends TaskLoop {
12034
12134
  this.stopLoad();
12035
12135
  super.onHandlerDestroying();
12036
12136
  // @ts-ignore
12037
- this.hls = null;
12137
+ this.hls = this.onMediaSeeking = this.onMediaEnded = null;
12038
12138
  }
12039
12139
  onHandlerDestroyed() {
12040
12140
  this.state = State.STOPPED;
@@ -12165,10 +12265,10 @@ class BaseStreamController extends TaskLoop {
12165
12265
  const decryptData = frag.decryptdata;
12166
12266
 
12167
12267
  // check to see if the payload needs to be decrypted
12168
- if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv && decryptData.method === 'AES-128') {
12268
+ if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv && isFullSegmentEncryption(decryptData.method)) {
12169
12269
  const startTime = self.performance.now();
12170
12270
  // decrypt init segment data
12171
- return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer).catch(err => {
12271
+ return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer, getAesModeFromFullSegmentMethod(decryptData.method)).catch(err => {
12172
12272
  hls.trigger(Events.ERROR, {
12173
12273
  type: ErrorTypes.MEDIA_ERROR,
12174
12274
  details: ErrorDetails.FRAG_DECRYPT_ERROR,
@@ -12280,7 +12380,7 @@ class BaseStreamController extends TaskLoop {
12280
12380
  }
12281
12381
  let keyLoadingPromise = null;
12282
12382
  if (frag.encrypted && !((_frag$decryptdata = frag.decryptdata) != null && _frag$decryptdata.key)) {
12283
- this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.logPrefix === '[stream-controller]' ? 'level' : 'track'} ${frag.level}`);
12383
+ this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'} ${frag.level}`);
12284
12384
  this.state = State.KEY_LOADING;
12285
12385
  this.fragCurrent = frag;
12286
12386
  keyLoadingPromise = this.keyLoader.load(frag).then(keyLoadedData => {
@@ -12311,7 +12411,7 @@ class BaseStreamController extends TaskLoop {
12311
12411
  const partIndex = this.getNextPart(partList, frag, targetBufferTime);
12312
12412
  if (partIndex > -1) {
12313
12413
  const part = partList[partIndex];
12314
- this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.logPrefix === '[stream-controller]' ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
12414
+ this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
12315
12415
  this.nextLoadPosition = part.start + part.duration;
12316
12416
  this.state = State.FRAG_LOADING;
12317
12417
  let _result;
@@ -12340,7 +12440,7 @@ class BaseStreamController extends TaskLoop {
12340
12440
  }
12341
12441
  }
12342
12442
  }
12343
- this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.logPrefix === '[stream-controller]' ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
12443
+ this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
12344
12444
  // Don't update nextLoadPosition for fragments which are not buffered
12345
12445
  if (isFiniteNumber(frag.sn) && !this.bitrateTest) {
12346
12446
  this.nextLoadPosition = frag.start + frag.duration;
@@ -12925,7 +13025,7 @@ class BaseStreamController extends TaskLoop {
12925
13025
  errorAction.resolved = true;
12926
13026
  }
12927
13027
  } else {
12928
- logger.warn(`${data.details} reached or exceeded max retry (${retryCount})`);
13028
+ this.warn(`${data.details} reached or exceeded max retry (${retryCount})`);
12929
13029
  return;
12930
13030
  }
12931
13031
  } else if ((errorAction == null ? void 0 : errorAction.action) === NetworkErrorAction.SendAlternateToPenaltyBox) {
@@ -13320,6 +13420,7 @@ const initPTSFn = (timestamp, timeOffset, initPTS) => {
13320
13420
  */
13321
13421
  function getAudioConfig(observer, data, offset, audioCodec) {
13322
13422
  let adtsObjectType;
13423
+ let originalAdtsObjectType;
13323
13424
  let adtsExtensionSamplingIndex;
13324
13425
  let adtsChannelConfig;
13325
13426
  let config;
@@ -13327,7 +13428,7 @@ function getAudioConfig(observer, data, offset, audioCodec) {
13327
13428
  const manifestCodec = audioCodec;
13328
13429
  const adtsSamplingRates = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
13329
13430
  // byte 2
13330
- adtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
13431
+ adtsObjectType = originalAdtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
13331
13432
  const adtsSamplingIndex = (data[offset + 2] & 0x3c) >>> 2;
13332
13433
  if (adtsSamplingIndex > adtsSamplingRates.length - 1) {
13333
13434
  const error = new Error(`invalid ADTS sampling index:${adtsSamplingIndex}`);
@@ -13344,8 +13445,8 @@ function getAudioConfig(observer, data, offset, audioCodec) {
13344
13445
  // byte 3
13345
13446
  adtsChannelConfig |= (data[offset + 3] & 0xc0) >>> 6;
13346
13447
  logger.log(`manifest codec:${audioCodec}, ADTS type:${adtsObjectType}, samplingIndex:${adtsSamplingIndex}`);
13347
- // firefox: freq less than 24kHz = AAC SBR (HE-AAC)
13348
- if (/firefox/i.test(userAgent)) {
13448
+ // Firefox and Pale Moon: freq less than 24kHz = AAC SBR (HE-AAC)
13449
+ if (/firefox|palemoon/i.test(userAgent)) {
13349
13450
  if (adtsSamplingIndex >= 6) {
13350
13451
  adtsObjectType = 5;
13351
13452
  config = new Array(4);
@@ -13439,6 +13540,7 @@ function getAudioConfig(observer, data, offset, audioCodec) {
13439
13540
  samplerate: adtsSamplingRates[adtsSamplingIndex],
13440
13541
  channelCount: adtsChannelConfig,
13441
13542
  codec: 'mp4a.40.' + adtsObjectType,
13543
+ parsedCodec: 'mp4a.40.' + originalAdtsObjectType,
13442
13544
  manifestCodec
13443
13545
  };
13444
13546
  }
@@ -13493,7 +13595,8 @@ function initTrackConfig(track, observer, data, offset, audioCodec) {
13493
13595
  track.channelCount = config.channelCount;
13494
13596
  track.codec = config.codec;
13495
13597
  track.manifestCodec = config.manifestCodec;
13496
- logger.log(`parsed codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`);
13598
+ track.parsedCodec = config.parsedCodec;
13599
+ logger.log(`parsed codec:${track.parsedCodec}, codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`);
13497
13600
  }
13498
13601
  }
13499
13602
  function getFrameDuration(samplerate) {
@@ -13971,6 +14074,110 @@ class BaseVideoParser {
13971
14074
  logger.log(VideoSample.pts + '/' + VideoSample.dts + ':' + VideoSample.debug);
13972
14075
  }
13973
14076
  }
14077
+ parseNALu(track, array) {
14078
+ const len = array.byteLength;
14079
+ let state = track.naluState || 0;
14080
+ const lastState = state;
14081
+ const units = [];
14082
+ let i = 0;
14083
+ let value;
14084
+ let overflow;
14085
+ let unitType;
14086
+ let lastUnitStart = -1;
14087
+ let lastUnitType = 0;
14088
+ // logger.log('PES:' + Hex.hexDump(array));
14089
+
14090
+ if (state === -1) {
14091
+ // special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
14092
+ lastUnitStart = 0;
14093
+ // NALu type is value read from offset 0
14094
+ lastUnitType = this.getNALuType(array, 0);
14095
+ state = 0;
14096
+ i = 1;
14097
+ }
14098
+ while (i < len) {
14099
+ value = array[i++];
14100
+ // optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
14101
+ if (!state) {
14102
+ state = value ? 0 : 1;
14103
+ continue;
14104
+ }
14105
+ if (state === 1) {
14106
+ state = value ? 0 : 2;
14107
+ continue;
14108
+ }
14109
+ // here we have state either equal to 2 or 3
14110
+ if (!value) {
14111
+ state = 3;
14112
+ } else if (value === 1) {
14113
+ overflow = i - state - 1;
14114
+ if (lastUnitStart >= 0) {
14115
+ const unit = {
14116
+ data: array.subarray(lastUnitStart, overflow),
14117
+ type: lastUnitType
14118
+ };
14119
+ // logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
14120
+ units.push(unit);
14121
+ } else {
14122
+ // lastUnitStart is undefined => this is the first start code found in this PES packet
14123
+ // first check if start code delimiter is overlapping between 2 PES packets,
14124
+ // ie it started in last packet (lastState not zero)
14125
+ // and ended at the beginning of this PES packet (i <= 4 - lastState)
14126
+ const lastUnit = this.getLastNalUnit(track.samples);
14127
+ if (lastUnit) {
14128
+ if (lastState && i <= 4 - lastState) {
14129
+ // start delimiter overlapping between PES packets
14130
+ // strip start delimiter bytes from the end of last NAL unit
14131
+ // check if lastUnit had a state different from zero
14132
+ if (lastUnit.state) {
14133
+ // strip last bytes
14134
+ lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
14135
+ }
14136
+ }
14137
+ // If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
14138
+
14139
+ if (overflow > 0) {
14140
+ // logger.log('first NALU found with overflow:' + overflow);
14141
+ lastUnit.data = appendUint8Array(lastUnit.data, array.subarray(0, overflow));
14142
+ lastUnit.state = 0;
14143
+ }
14144
+ }
14145
+ }
14146
+ // check if we can read unit type
14147
+ if (i < len) {
14148
+ unitType = this.getNALuType(array, i);
14149
+ // logger.log('find NALU @ offset:' + i + ',type:' + unitType);
14150
+ lastUnitStart = i;
14151
+ lastUnitType = unitType;
14152
+ state = 0;
14153
+ } else {
14154
+ // not enough byte to read unit type. let's read it on next PES parsing
14155
+ state = -1;
14156
+ }
14157
+ } else {
14158
+ state = 0;
14159
+ }
14160
+ }
14161
+ if (lastUnitStart >= 0 && state >= 0) {
14162
+ const unit = {
14163
+ data: array.subarray(lastUnitStart, len),
14164
+ type: lastUnitType,
14165
+ state: state
14166
+ };
14167
+ units.push(unit);
14168
+ // logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
14169
+ }
14170
+ // no NALu found
14171
+ if (units.length === 0) {
14172
+ // append pes.data to previous NAL unit
14173
+ const lastUnit = this.getLastNalUnit(track.samples);
14174
+ if (lastUnit) {
14175
+ lastUnit.data = appendUint8Array(lastUnit.data, array);
14176
+ }
14177
+ }
14178
+ track.naluState = state;
14179
+ return units;
14180
+ }
13974
14181
  }
13975
14182
 
13976
14183
  /**
@@ -14113,21 +14320,171 @@ class ExpGolomb {
14113
14320
  readUInt() {
14114
14321
  return this.readBits(32);
14115
14322
  }
14323
+ }
14324
+
14325
+ class AvcVideoParser extends BaseVideoParser {
14326
+ parsePES(track, textTrack, pes, last, duration) {
14327
+ const units = this.parseNALu(track, pes.data);
14328
+ let VideoSample = this.VideoSample;
14329
+ let push;
14330
+ let spsfound = false;
14331
+ // free pes.data to save up some memory
14332
+ pes.data = null;
14333
+
14334
+ // if new NAL units found and last sample still there, let's push ...
14335
+ // this helps parsing streams with missing AUD (only do this if AUD never found)
14336
+ if (VideoSample && units.length && !track.audFound) {
14337
+ this.pushAccessUnit(VideoSample, track);
14338
+ VideoSample = this.VideoSample = this.createVideoSample(false, pes.pts, pes.dts, '');
14339
+ }
14340
+ units.forEach(unit => {
14341
+ var _VideoSample2;
14342
+ switch (unit.type) {
14343
+ // NDR
14344
+ case 1:
14345
+ {
14346
+ let iskey = false;
14347
+ push = true;
14348
+ const data = unit.data;
14349
+ // only check slice type to detect KF in case SPS found in same packet (any keyframe is preceded by SPS ...)
14350
+ if (spsfound && data.length > 4) {
14351
+ // retrieve slice type by parsing beginning of NAL unit (follow H264 spec, slice_header definition) to detect keyframe embedded in NDR
14352
+ const sliceType = this.readSliceType(data);
14353
+ // 2 : I slice, 4 : SI slice, 7 : I slice, 9: SI slice
14354
+ // SI slice : A slice that is coded using intra prediction only and using quantisation of the prediction samples.
14355
+ // An SI slice can be coded such that its decoded samples can be constructed identically to an SP slice.
14356
+ // I slice: A slice that is not an SI slice that is decoded using intra prediction only.
14357
+ // if (sliceType === 2 || sliceType === 7) {
14358
+ if (sliceType === 2 || sliceType === 4 || sliceType === 7 || sliceType === 9) {
14359
+ iskey = true;
14360
+ }
14361
+ }
14362
+ if (iskey) {
14363
+ var _VideoSample;
14364
+ // if we have non-keyframe data already, that cannot belong to the same frame as a keyframe, so force a push
14365
+ if ((_VideoSample = VideoSample) != null && _VideoSample.frame && !VideoSample.key) {
14366
+ this.pushAccessUnit(VideoSample, track);
14367
+ VideoSample = this.VideoSample = null;
14368
+ }
14369
+ }
14370
+ if (!VideoSample) {
14371
+ VideoSample = this.VideoSample = this.createVideoSample(true, pes.pts, pes.dts, '');
14372
+ }
14373
+ VideoSample.frame = true;
14374
+ VideoSample.key = iskey;
14375
+ break;
14376
+ // IDR
14377
+ }
14378
+ case 5:
14379
+ push = true;
14380
+ // handle PES not starting with AUD
14381
+ // if we have frame data already, that cannot belong to the same frame, so force a push
14382
+ if ((_VideoSample2 = VideoSample) != null && _VideoSample2.frame && !VideoSample.key) {
14383
+ this.pushAccessUnit(VideoSample, track);
14384
+ VideoSample = this.VideoSample = null;
14385
+ }
14386
+ if (!VideoSample) {
14387
+ VideoSample = this.VideoSample = this.createVideoSample(true, pes.pts, pes.dts, '');
14388
+ }
14389
+ VideoSample.key = true;
14390
+ VideoSample.frame = true;
14391
+ break;
14392
+ // SEI
14393
+ case 6:
14394
+ {
14395
+ push = true;
14396
+ parseSEIMessageFromNALu(unit.data, 1, pes.pts, textTrack.samples);
14397
+ break;
14398
+ // SPS
14399
+ }
14400
+ case 7:
14401
+ {
14402
+ var _track$pixelRatio, _track$pixelRatio2;
14403
+ push = true;
14404
+ spsfound = true;
14405
+ const sps = unit.data;
14406
+ const config = this.readSPS(sps);
14407
+ if (!track.sps || track.width !== config.width || track.height !== config.height || ((_track$pixelRatio = track.pixelRatio) == null ? void 0 : _track$pixelRatio[0]) !== config.pixelRatio[0] || ((_track$pixelRatio2 = track.pixelRatio) == null ? void 0 : _track$pixelRatio2[1]) !== config.pixelRatio[1]) {
14408
+ track.width = config.width;
14409
+ track.height = config.height;
14410
+ track.pixelRatio = config.pixelRatio;
14411
+ track.sps = [sps];
14412
+ track.duration = duration;
14413
+ const codecarray = sps.subarray(1, 4);
14414
+ let codecstring = 'avc1.';
14415
+ for (let i = 0; i < 3; i++) {
14416
+ let h = codecarray[i].toString(16);
14417
+ if (h.length < 2) {
14418
+ h = '0' + h;
14419
+ }
14420
+ codecstring += h;
14421
+ }
14422
+ track.codec = codecstring;
14423
+ }
14424
+ break;
14425
+ }
14426
+ // PPS
14427
+ case 8:
14428
+ push = true;
14429
+ track.pps = [unit.data];
14430
+ break;
14431
+ // AUD
14432
+ case 9:
14433
+ push = true;
14434
+ track.audFound = true;
14435
+ if (VideoSample) {
14436
+ this.pushAccessUnit(VideoSample, track);
14437
+ }
14438
+ VideoSample = this.VideoSample = this.createVideoSample(false, pes.pts, pes.dts, '');
14439
+ break;
14440
+ // Filler Data
14441
+ case 12:
14442
+ push = true;
14443
+ break;
14444
+ default:
14445
+ push = false;
14446
+ if (VideoSample) {
14447
+ VideoSample.debug += 'unknown NAL ' + unit.type + ' ';
14448
+ }
14449
+ break;
14450
+ }
14451
+ if (VideoSample && push) {
14452
+ const units = VideoSample.units;
14453
+ units.push(unit);
14454
+ }
14455
+ });
14456
+ // if last PES packet, push samples
14457
+ if (last && VideoSample) {
14458
+ this.pushAccessUnit(VideoSample, track);
14459
+ this.VideoSample = null;
14460
+ }
14461
+ }
14462
+ getNALuType(data, offset) {
14463
+ return data[offset] & 0x1f;
14464
+ }
14465
+ readSliceType(data) {
14466
+ const eg = new ExpGolomb(data);
14467
+ // skip NALu type
14468
+ eg.readUByte();
14469
+ // discard first_mb_in_slice
14470
+ eg.readUEG();
14471
+ // return slice_type
14472
+ return eg.readUEG();
14473
+ }
14116
14474
 
14117
14475
  /**
14118
- * Advance the ExpGolomb decoder past a scaling list. The scaling
14119
- * list is optionally transmitted as part of a sequence parameter
14476
+ * The scaling list is optionally transmitted as part of a sequence parameter
14120
14477
  * set and is not relevant to transmuxing.
14121
14478
  * @param count the number of entries in this scaling list
14122
14479
  * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
14123
14480
  */
14124
- skipScalingList(count) {
14481
+ skipScalingList(count, reader) {
14125
14482
  let lastScale = 8;
14126
14483
  let nextScale = 8;
14127
14484
  let deltaScale;
14128
14485
  for (let j = 0; j < count; j++) {
14129
14486
  if (nextScale !== 0) {
14130
- deltaScale = this.readEG();
14487
+ deltaScale = reader.readEG();
14131
14488
  nextScale = (lastScale + deltaScale + 256) % 256;
14132
14489
  }
14133
14490
  lastScale = nextScale === 0 ? lastScale : nextScale;
@@ -14142,7 +14499,8 @@ class ExpGolomb {
14142
14499
  * sequence parameter set, including the dimensions of the
14143
14500
  * associated video frames.
14144
14501
  */
14145
- readSPS() {
14502
+ readSPS(sps) {
14503
+ const eg = new ExpGolomb(sps);
14146
14504
  let frameCropLeftOffset = 0;
14147
14505
  let frameCropRightOffset = 0;
14148
14506
  let frameCropTopOffset = 0;
@@ -14150,13 +14508,13 @@ class ExpGolomb {
14150
14508
  let numRefFramesInPicOrderCntCycle;
14151
14509
  let scalingListCount;
14152
14510
  let i;
14153
- const readUByte = this.readUByte.bind(this);
14154
- const readBits = this.readBits.bind(this);
14155
- const readUEG = this.readUEG.bind(this);
14156
- const readBoolean = this.readBoolean.bind(this);
14157
- const skipBits = this.skipBits.bind(this);
14158
- const skipEG = this.skipEG.bind(this);
14159
- const skipUEG = this.skipUEG.bind(this);
14511
+ const readUByte = eg.readUByte.bind(eg);
14512
+ const readBits = eg.readBits.bind(eg);
14513
+ const readUEG = eg.readUEG.bind(eg);
14514
+ const readBoolean = eg.readBoolean.bind(eg);
14515
+ const skipBits = eg.skipBits.bind(eg);
14516
+ const skipEG = eg.skipEG.bind(eg);
14517
+ const skipUEG = eg.skipUEG.bind(eg);
14160
14518
  const skipScalingList = this.skipScalingList.bind(this);
14161
14519
  readUByte();
14162
14520
  const profileIdc = readUByte(); // profile_idc
@@ -14181,9 +14539,9 @@ class ExpGolomb {
14181
14539
  if (readBoolean()) {
14182
14540
  // seq_scaling_list_present_flag[ i ]
14183
14541
  if (i < 6) {
14184
- skipScalingList(16);
14542
+ skipScalingList(16, eg);
14185
14543
  } else {
14186
- skipScalingList(64);
14544
+ skipScalingList(64, eg);
14187
14545
  }
14188
14546
  }
14189
14547
  }
@@ -14288,19 +14646,15 @@ class ExpGolomb {
14288
14646
  pixelRatio: pixelRatio
14289
14647
  };
14290
14648
  }
14291
- readSliceType() {
14292
- // skip NALu type
14293
- this.readUByte();
14294
- // discard first_mb_in_slice
14295
- this.readUEG();
14296
- // return slice_type
14297
- return this.readUEG();
14298
- }
14299
14649
  }
14300
14650
 
14301
- class AvcVideoParser extends BaseVideoParser {
14302
- parseAVCPES(track, textTrack, pes, last, duration) {
14303
- const units = this.parseAVCNALu(track, pes.data);
14651
+ class HevcVideoParser extends BaseVideoParser {
14652
+ constructor(...args) {
14653
+ super(...args);
14654
+ this.initVPS = null;
14655
+ }
14656
+ parsePES(track, textTrack, pes, last, duration) {
14657
+ const units = this.parseNALu(track, pes.data);
14304
14658
  let VideoSample = this.VideoSample;
14305
14659
  let push;
14306
14660
  let spsfound = false;
@@ -14316,42 +14670,49 @@ class AvcVideoParser extends BaseVideoParser {
14316
14670
  units.forEach(unit => {
14317
14671
  var _VideoSample2;
14318
14672
  switch (unit.type) {
14319
- // NDR
14673
+ // NON-IDR, NON RANDOM ACCESS SLICE
14674
+ case 0:
14320
14675
  case 1:
14321
- {
14322
- let iskey = false;
14323
- push = true;
14324
- const data = unit.data;
14325
- // only check slice type to detect KF in case SPS found in same packet (any keyframe is preceded by SPS ...)
14326
- if (spsfound && data.length > 4) {
14327
- // retrieve slice type by parsing beginning of NAL unit (follow H264 spec, slice_header definition) to detect keyframe embedded in NDR
14328
- const sliceType = new ExpGolomb(data).readSliceType();
14329
- // 2 : I slice, 4 : SI slice, 7 : I slice, 9: SI slice
14330
- // SI slice : A slice that is coded using intra prediction only and using quantisation of the prediction samples.
14331
- // An SI slice can be coded such that its decoded samples can be constructed identically to an SP slice.
14332
- // I slice: A slice that is not an SI slice that is decoded using intra prediction only.
14333
- // if (sliceType === 2 || sliceType === 7) {
14334
- if (sliceType === 2 || sliceType === 4 || sliceType === 7 || sliceType === 9) {
14335
- iskey = true;
14336
- }
14337
- }
14338
- if (iskey) {
14339
- var _VideoSample;
14340
- // if we have non-keyframe data already, that cannot belong to the same frame as a keyframe, so force a push
14341
- if ((_VideoSample = VideoSample) != null && _VideoSample.frame && !VideoSample.key) {
14342
- this.pushAccessUnit(VideoSample, track);
14343
- VideoSample = this.VideoSample = null;
14344
- }
14345
- }
14346
- if (!VideoSample) {
14347
- VideoSample = this.VideoSample = this.createVideoSample(true, pes.pts, pes.dts, '');
14676
+ case 2:
14677
+ case 3:
14678
+ case 4:
14679
+ case 5:
14680
+ case 6:
14681
+ case 7:
14682
+ case 8:
14683
+ case 9:
14684
+ if (!VideoSample) {
14685
+ VideoSample = this.VideoSample = this.createVideoSample(false, pes.pts, pes.dts, '');
14686
+ }
14687
+ VideoSample.frame = true;
14688
+ push = true;
14689
+ break;
14690
+
14691
+ // CRA, BLA (random access picture)
14692
+ case 16:
14693
+ case 17:
14694
+ case 18:
14695
+ case 21:
14696
+ push = true;
14697
+ if (spsfound) {
14698
+ var _VideoSample;
14699
+ // handle PES not starting with AUD
14700
+ // if we have frame data already, that cannot belong to the same frame, so force a push
14701
+ if ((_VideoSample = VideoSample) != null && _VideoSample.frame && !VideoSample.key) {
14702
+ this.pushAccessUnit(VideoSample, track);
14703
+ VideoSample = this.VideoSample = null;
14348
14704
  }
14349
- VideoSample.frame = true;
14350
- VideoSample.key = iskey;
14351
- break;
14352
- // IDR
14353
14705
  }
14354
- case 5:
14706
+ if (!VideoSample) {
14707
+ VideoSample = this.VideoSample = this.createVideoSample(true, pes.pts, pes.dts, '');
14708
+ }
14709
+ VideoSample.key = true;
14710
+ VideoSample.frame = true;
14711
+ break;
14712
+
14713
+ // IDR
14714
+ case 19:
14715
+ case 20:
14355
14716
  push = true;
14356
14717
  // handle PES not starting with AUD
14357
14718
  // if we have frame data already, that cannot belong to the same frame, so force a push
@@ -14365,48 +14726,76 @@ class AvcVideoParser extends BaseVideoParser {
14365
14726
  VideoSample.key = true;
14366
14727
  VideoSample.frame = true;
14367
14728
  break;
14729
+
14368
14730
  // SEI
14369
- case 6:
14370
- {
14371
- push = true;
14372
- parseSEIMessageFromNALu(unit.data, 1, pes.pts, textTrack.samples);
14373
- break;
14374
- // SPS
14731
+ case 39:
14732
+ push = true;
14733
+ parseSEIMessageFromNALu(unit.data, 2,
14734
+ // NALu header size
14735
+ pes.pts, textTrack.samples);
14736
+ break;
14737
+
14738
+ // VPS
14739
+ case 32:
14740
+ push = true;
14741
+ if (!track.vps) {
14742
+ const config = this.readVPS(unit.data);
14743
+ track.params = _objectSpread2({}, config);
14744
+ this.initVPS = unit.data;
14375
14745
  }
14376
- case 7:
14377
- {
14378
- var _track$pixelRatio, _track$pixelRatio2;
14379
- push = true;
14380
- spsfound = true;
14381
- const sps = unit.data;
14382
- const expGolombDecoder = new ExpGolomb(sps);
14383
- const config = expGolombDecoder.readSPS();
14384
- if (!track.sps || track.width !== config.width || track.height !== config.height || ((_track$pixelRatio = track.pixelRatio) == null ? void 0 : _track$pixelRatio[0]) !== config.pixelRatio[0] || ((_track$pixelRatio2 = track.pixelRatio) == null ? void 0 : _track$pixelRatio2[1]) !== config.pixelRatio[1]) {
14746
+ track.vps = [unit.data];
14747
+ break;
14748
+
14749
+ // SPS
14750
+ case 33:
14751
+ push = true;
14752
+ spsfound = true;
14753
+ if (typeof track.params === 'object') {
14754
+ if (track.vps !== undefined && track.vps[0] !== this.initVPS && track.sps !== undefined && !this.matchSPS(track.sps[0], unit.data)) {
14755
+ this.initVPS = track.vps[0];
14756
+ track.sps = track.pps = undefined;
14757
+ }
14758
+ if (!track.sps) {
14759
+ const config = this.readSPS(unit.data);
14385
14760
  track.width = config.width;
14386
14761
  track.height = config.height;
14387
14762
  track.pixelRatio = config.pixelRatio;
14388
- track.sps = [sps];
14389
14763
  track.duration = duration;
14390
- const codecarray = sps.subarray(1, 4);
14391
- let codecstring = 'avc1.';
14392
- for (let i = 0; i < 3; i++) {
14393
- let h = codecarray[i].toString(16);
14394
- if (h.length < 2) {
14395
- h = '0' + h;
14396
- }
14397
- codecstring += h;
14764
+ track.codec = config.codecString;
14765
+ track.sps = [];
14766
+ for (const prop in config.params) {
14767
+ track.params[prop] = config.params[prop];
14398
14768
  }
14399
- track.codec = codecstring;
14400
14769
  }
14401
- break;
14770
+ if (track.vps !== undefined && track.vps[0] === this.initVPS) {
14771
+ track.sps.push(unit.data);
14772
+ }
14773
+ }
14774
+ if (!VideoSample) {
14775
+ VideoSample = this.VideoSample = this.createVideoSample(true, pes.pts, pes.dts, '');
14402
14776
  }
14777
+ VideoSample.key = true;
14778
+ break;
14779
+
14403
14780
  // PPS
14404
- case 8:
14781
+ case 34:
14405
14782
  push = true;
14406
- track.pps = [unit.data];
14783
+ if (typeof track.params === 'object') {
14784
+ if (!track.pps) {
14785
+ track.pps = [];
14786
+ const config = this.readPPS(unit.data);
14787
+ for (const prop in config) {
14788
+ track.params[prop] = config[prop];
14789
+ }
14790
+ }
14791
+ if (this.initVPS !== null || track.pps.length === 0) {
14792
+ track.pps.push(unit.data);
14793
+ }
14794
+ }
14407
14795
  break;
14408
- // AUD
14409
- case 9:
14796
+
14797
+ // ACCESS UNIT DELIMITER
14798
+ case 35:
14410
14799
  push = true;
14411
14800
  track.audFound = true;
14412
14801
  if (VideoSample) {
@@ -14414,14 +14803,10 @@ class AvcVideoParser extends BaseVideoParser {
14414
14803
  }
14415
14804
  VideoSample = this.VideoSample = this.createVideoSample(false, pes.pts, pes.dts, '');
14416
14805
  break;
14417
- // Filler Data
14418
- case 12:
14419
- push = true;
14420
- break;
14421
14806
  default:
14422
14807
  push = false;
14423
14808
  if (VideoSample) {
14424
- VideoSample.debug += 'unknown NAL ' + unit.type + ' ';
14809
+ VideoSample.debug += 'unknown or irrelevant NAL ' + unit.type + ' ';
14425
14810
  }
14426
14811
  break;
14427
14812
  }
@@ -14436,109 +14821,423 @@ class AvcVideoParser extends BaseVideoParser {
14436
14821
  this.VideoSample = null;
14437
14822
  }
14438
14823
  }
14439
- parseAVCNALu(track, array) {
14440
- const len = array.byteLength;
14441
- let state = track.naluState || 0;
14442
- const lastState = state;
14443
- const units = [];
14444
- let i = 0;
14445
- let value;
14446
- let overflow;
14447
- let unitType;
14448
- let lastUnitStart = -1;
14449
- let lastUnitType = 0;
14450
- // logger.log('PES:' + Hex.hexDump(array));
14451
-
14452
- if (state === -1) {
14453
- // special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
14454
- lastUnitStart = 0;
14455
- // NALu type is value read from offset 0
14456
- lastUnitType = array[0] & 0x1f;
14457
- state = 0;
14458
- i = 1;
14459
- }
14460
- while (i < len) {
14461
- value = array[i++];
14462
- // optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
14463
- if (!state) {
14464
- state = value ? 0 : 1;
14465
- continue;
14466
- }
14467
- if (state === 1) {
14468
- state = value ? 0 : 2;
14469
- continue;
14824
+ getNALuType(data, offset) {
14825
+ return (data[offset] & 0x7e) >>> 1;
14826
+ }
14827
+ ebsp2rbsp(arr) {
14828
+ const dst = new Uint8Array(arr.byteLength);
14829
+ let dstIdx = 0;
14830
+ for (let i = 0; i < arr.byteLength; i++) {
14831
+ if (i >= 2) {
14832
+ // Unescape: Skip 0x03 after 00 00
14833
+ if (arr[i] === 0x03 && arr[i - 1] === 0x00 && arr[i - 2] === 0x00) {
14834
+ continue;
14835
+ }
14470
14836
  }
14471
- // here we have state either equal to 2 or 3
14472
- if (!value) {
14473
- state = 3;
14474
- } else if (value === 1) {
14475
- overflow = i - state - 1;
14476
- if (lastUnitStart >= 0) {
14477
- const unit = {
14478
- data: array.subarray(lastUnitStart, overflow),
14479
- type: lastUnitType
14480
- };
14481
- // logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
14482
- units.push(unit);
14483
- } else {
14484
- // lastUnitStart is undefined => this is the first start code found in this PES packet
14485
- // first check if start code delimiter is overlapping between 2 PES packets,
14486
- // ie it started in last packet (lastState not zero)
14487
- // and ended at the beginning of this PES packet (i <= 4 - lastState)
14488
- const lastUnit = this.getLastNalUnit(track.samples);
14489
- if (lastUnit) {
14490
- if (lastState && i <= 4 - lastState) {
14491
- // start delimiter overlapping between PES packets
14492
- // strip start delimiter bytes from the end of last NAL unit
14493
- // check if lastUnit had a state different from zero
14494
- if (lastUnit.state) {
14495
- // strip last bytes
14496
- lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
14497
- }
14498
- }
14499
- // If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
14837
+ dst[dstIdx] = arr[i];
14838
+ dstIdx++;
14839
+ }
14840
+ return new Uint8Array(dst.buffer, 0, dstIdx);
14841
+ }
14842
+ readVPS(vps) {
14843
+ const eg = new ExpGolomb(vps);
14844
+ // remove header
14845
+ eg.readUByte();
14846
+ eg.readUByte();
14847
+ eg.readBits(4); // video_parameter_set_id
14848
+ eg.skipBits(2);
14849
+ eg.readBits(6); // max_layers_minus1
14850
+ const max_sub_layers_minus1 = eg.readBits(3);
14851
+ const temporal_id_nesting_flag = eg.readBoolean();
14852
+ // ...vui fps can be here, but empty fps value is not critical for metadata
14500
14853
 
14501
- if (overflow > 0) {
14502
- // logger.log('first NALU found with overflow:' + overflow);
14503
- lastUnit.data = appendUint8Array(lastUnit.data, array.subarray(0, overflow));
14504
- lastUnit.state = 0;
14854
+ return {
14855
+ numTemporalLayers: max_sub_layers_minus1 + 1,
14856
+ temporalIdNested: temporal_id_nesting_flag
14857
+ };
14858
+ }
14859
+ readSPS(sps) {
14860
+ const eg = new ExpGolomb(this.ebsp2rbsp(sps));
14861
+ eg.readUByte();
14862
+ eg.readUByte();
14863
+ eg.readBits(4); //video_parameter_set_id
14864
+ const max_sub_layers_minus1 = eg.readBits(3);
14865
+ eg.readBoolean(); // temporal_id_nesting_flag
14866
+
14867
+ // profile_tier_level
14868
+ const general_profile_space = eg.readBits(2);
14869
+ const general_tier_flag = eg.readBoolean();
14870
+ const general_profile_idc = eg.readBits(5);
14871
+ const general_profile_compatibility_flags_1 = eg.readUByte();
14872
+ const general_profile_compatibility_flags_2 = eg.readUByte();
14873
+ const general_profile_compatibility_flags_3 = eg.readUByte();
14874
+ const general_profile_compatibility_flags_4 = eg.readUByte();
14875
+ const general_constraint_indicator_flags_1 = eg.readUByte();
14876
+ const general_constraint_indicator_flags_2 = eg.readUByte();
14877
+ const general_constraint_indicator_flags_3 = eg.readUByte();
14878
+ const general_constraint_indicator_flags_4 = eg.readUByte();
14879
+ const general_constraint_indicator_flags_5 = eg.readUByte();
14880
+ const general_constraint_indicator_flags_6 = eg.readUByte();
14881
+ const general_level_idc = eg.readUByte();
14882
+ const sub_layer_profile_present_flags = [];
14883
+ const sub_layer_level_present_flags = [];
14884
+ for (let i = 0; i < max_sub_layers_minus1; i++) {
14885
+ sub_layer_profile_present_flags.push(eg.readBoolean());
14886
+ sub_layer_level_present_flags.push(eg.readBoolean());
14887
+ }
14888
+ if (max_sub_layers_minus1 > 0) {
14889
+ for (let i = max_sub_layers_minus1; i < 8; i++) {
14890
+ eg.readBits(2);
14891
+ }
14892
+ }
14893
+ for (let i = 0; i < max_sub_layers_minus1; i++) {
14894
+ if (sub_layer_profile_present_flags[i]) {
14895
+ eg.readUByte(); // sub_layer_profile_space, sub_layer_tier_flag, sub_layer_profile_idc
14896
+ eg.readUByte();
14897
+ eg.readUByte();
14898
+ eg.readUByte();
14899
+ eg.readUByte(); // sub_layer_profile_compatibility_flag
14900
+ eg.readUByte();
14901
+ eg.readUByte();
14902
+ eg.readUByte();
14903
+ eg.readUByte();
14904
+ eg.readUByte();
14905
+ eg.readUByte();
14906
+ }
14907
+ if (sub_layer_level_present_flags[i]) {
14908
+ eg.readUByte();
14909
+ }
14910
+ }
14911
+ eg.readUEG(); // seq_parameter_set_id
14912
+ const chroma_format_idc = eg.readUEG();
14913
+ if (chroma_format_idc == 3) {
14914
+ eg.skipBits(1); //separate_colour_plane_flag
14915
+ }
14916
+ const pic_width_in_luma_samples = eg.readUEG();
14917
+ const pic_height_in_luma_samples = eg.readUEG();
14918
+ const conformance_window_flag = eg.readBoolean();
14919
+ let pic_left_offset = 0,
14920
+ pic_right_offset = 0,
14921
+ pic_top_offset = 0,
14922
+ pic_bottom_offset = 0;
14923
+ if (conformance_window_flag) {
14924
+ pic_left_offset += eg.readUEG();
14925
+ pic_right_offset += eg.readUEG();
14926
+ pic_top_offset += eg.readUEG();
14927
+ pic_bottom_offset += eg.readUEG();
14928
+ }
14929
+ const bit_depth_luma_minus8 = eg.readUEG();
14930
+ const bit_depth_chroma_minus8 = eg.readUEG();
14931
+ const log2_max_pic_order_cnt_lsb_minus4 = eg.readUEG();
14932
+ const sub_layer_ordering_info_present_flag = eg.readBoolean();
14933
+ for (let i = sub_layer_ordering_info_present_flag ? 0 : max_sub_layers_minus1; i <= max_sub_layers_minus1; i++) {
14934
+ eg.skipUEG(); // max_dec_pic_buffering_minus1[i]
14935
+ eg.skipUEG(); // max_num_reorder_pics[i]
14936
+ eg.skipUEG(); // max_latency_increase_plus1[i]
14937
+ }
14938
+ eg.skipUEG(); // log2_min_luma_coding_block_size_minus3
14939
+ eg.skipUEG(); // log2_diff_max_min_luma_coding_block_size
14940
+ eg.skipUEG(); // log2_min_transform_block_size_minus2
14941
+ eg.skipUEG(); // log2_diff_max_min_transform_block_size
14942
+ eg.skipUEG(); // max_transform_hierarchy_depth_inter
14943
+ eg.skipUEG(); // max_transform_hierarchy_depth_intra
14944
+ const scaling_list_enabled_flag = eg.readBoolean();
14945
+ if (scaling_list_enabled_flag) {
14946
+ const sps_scaling_list_data_present_flag = eg.readBoolean();
14947
+ if (sps_scaling_list_data_present_flag) {
14948
+ for (let sizeId = 0; sizeId < 4; sizeId++) {
14949
+ for (let matrixId = 0; matrixId < (sizeId === 3 ? 2 : 6); matrixId++) {
14950
+ const scaling_list_pred_mode_flag = eg.readBoolean();
14951
+ if (!scaling_list_pred_mode_flag) {
14952
+ eg.readUEG(); // scaling_list_pred_matrix_id_delta
14953
+ } else {
14954
+ const coefNum = Math.min(64, 1 << 4 + (sizeId << 1));
14955
+ if (sizeId > 1) {
14956
+ eg.readEG();
14957
+ }
14958
+ for (let i = 0; i < coefNum; i++) {
14959
+ eg.readEG();
14960
+ }
14505
14961
  }
14506
14962
  }
14507
14963
  }
14508
- // check if we can read unit type
14509
- if (i < len) {
14510
- unitType = array[i] & 0x1f;
14511
- // logger.log('find NALU @ offset:' + i + ',type:' + unitType);
14512
- lastUnitStart = i;
14513
- lastUnitType = unitType;
14514
- state = 0;
14515
- } else {
14516
- // not enough byte to read unit type. let's read it on next PES parsing
14517
- state = -1;
14964
+ }
14965
+ }
14966
+ eg.readBoolean(); // amp_enabled_flag
14967
+ eg.readBoolean(); // sample_adaptive_offset_enabled_flag
14968
+ const pcm_enabled_flag = eg.readBoolean();
14969
+ if (pcm_enabled_flag) {
14970
+ eg.readUByte();
14971
+ eg.skipUEG();
14972
+ eg.skipUEG();
14973
+ eg.readBoolean();
14974
+ }
14975
+ const num_short_term_ref_pic_sets = eg.readUEG();
14976
+ let num_delta_pocs = 0;
14977
+ for (let i = 0; i < num_short_term_ref_pic_sets; i++) {
14978
+ let inter_ref_pic_set_prediction_flag = false;
14979
+ if (i !== 0) {
14980
+ inter_ref_pic_set_prediction_flag = eg.readBoolean();
14981
+ }
14982
+ if (inter_ref_pic_set_prediction_flag) {
14983
+ if (i === num_short_term_ref_pic_sets) {
14984
+ eg.readUEG();
14518
14985
  }
14986
+ eg.readBoolean();
14987
+ eg.readUEG();
14988
+ let next_num_delta_pocs = 0;
14989
+ for (let j = 0; j <= num_delta_pocs; j++) {
14990
+ const used_by_curr_pic_flag = eg.readBoolean();
14991
+ let use_delta_flag = false;
14992
+ if (!used_by_curr_pic_flag) {
14993
+ use_delta_flag = eg.readBoolean();
14994
+ }
14995
+ if (used_by_curr_pic_flag || use_delta_flag) {
14996
+ next_num_delta_pocs++;
14997
+ }
14998
+ }
14999
+ num_delta_pocs = next_num_delta_pocs;
14519
15000
  } else {
14520
- state = 0;
15001
+ const num_negative_pics = eg.readUEG();
15002
+ const num_positive_pics = eg.readUEG();
15003
+ num_delta_pocs = num_negative_pics + num_positive_pics;
15004
+ for (let j = 0; j < num_negative_pics; j++) {
15005
+ eg.readUEG();
15006
+ eg.readBoolean();
15007
+ }
15008
+ for (let j = 0; j < num_positive_pics; j++) {
15009
+ eg.readUEG();
15010
+ eg.readBoolean();
15011
+ }
14521
15012
  }
14522
15013
  }
14523
- if (lastUnitStart >= 0 && state >= 0) {
14524
- const unit = {
14525
- data: array.subarray(lastUnitStart, len),
14526
- type: lastUnitType,
14527
- state: state
14528
- };
14529
- units.push(unit);
14530
- // logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
14531
- }
14532
- // no NALu found
14533
- if (units.length === 0) {
14534
- // append pes.data to previous NAL unit
14535
- const lastUnit = this.getLastNalUnit(track.samples);
14536
- if (lastUnit) {
14537
- lastUnit.data = appendUint8Array(lastUnit.data, array);
15014
+ const long_term_ref_pics_present_flag = eg.readBoolean();
15015
+ if (long_term_ref_pics_present_flag) {
15016
+ const num_long_term_ref_pics_sps = eg.readUEG();
15017
+ for (let i = 0; i < num_long_term_ref_pics_sps; i++) {
15018
+ for (let j = 0; j < log2_max_pic_order_cnt_lsb_minus4 + 4; j++) {
15019
+ eg.readBits(1);
15020
+ }
15021
+ eg.readBits(1);
15022
+ }
15023
+ }
15024
+ let min_spatial_segmentation_idc = 0;
15025
+ let sar_width = 1,
15026
+ sar_height = 1;
15027
+ let fps_fixed = true,
15028
+ fps_den = 1,
15029
+ fps_num = 0;
15030
+ eg.readBoolean(); // sps_temporal_mvp_enabled_flag
15031
+ eg.readBoolean(); // strong_intra_smoothing_enabled_flag
15032
+ let default_display_window_flag = false;
15033
+ const vui_parameters_present_flag = eg.readBoolean();
15034
+ if (vui_parameters_present_flag) {
15035
+ const aspect_ratio_info_present_flag = eg.readBoolean();
15036
+ if (aspect_ratio_info_present_flag) {
15037
+ const aspect_ratio_idc = eg.readUByte();
15038
+ const sar_width_table = [1, 12, 10, 16, 40, 24, 20, 32, 80, 18, 15, 64, 160, 4, 3, 2];
15039
+ const sar_height_table = [1, 11, 11, 11, 33, 11, 11, 11, 33, 11, 11, 33, 99, 3, 2, 1];
15040
+ if (aspect_ratio_idc > 0 && aspect_ratio_idc < 16) {
15041
+ sar_width = sar_width_table[aspect_ratio_idc - 1];
15042
+ sar_height = sar_height_table[aspect_ratio_idc - 1];
15043
+ } else if (aspect_ratio_idc === 255) {
15044
+ sar_width = eg.readBits(16);
15045
+ sar_height = eg.readBits(16);
15046
+ }
15047
+ }
15048
+ const overscan_info_present_flag = eg.readBoolean();
15049
+ if (overscan_info_present_flag) {
15050
+ eg.readBoolean();
15051
+ }
15052
+ const video_signal_type_present_flag = eg.readBoolean();
15053
+ if (video_signal_type_present_flag) {
15054
+ eg.readBits(3);
15055
+ eg.readBoolean();
15056
+ const colour_description_present_flag = eg.readBoolean();
15057
+ if (colour_description_present_flag) {
15058
+ eg.readUByte();
15059
+ eg.readUByte();
15060
+ eg.readUByte();
15061
+ }
14538
15062
  }
15063
+ const chroma_loc_info_present_flag = eg.readBoolean();
15064
+ if (chroma_loc_info_present_flag) {
15065
+ eg.readUEG();
15066
+ eg.readUEG();
15067
+ }
15068
+ eg.readBoolean(); // neutral_chroma_indication_flag
15069
+ eg.readBoolean(); // field_seq_flag
15070
+ eg.readBoolean(); // frame_field_info_present_flag
15071
+ default_display_window_flag = eg.readBoolean();
15072
+ if (default_display_window_flag) {
15073
+ pic_left_offset += eg.readUEG();
15074
+ pic_right_offset += eg.readUEG();
15075
+ pic_top_offset += eg.readUEG();
15076
+ pic_bottom_offset += eg.readUEG();
15077
+ }
15078
+ const vui_timing_info_present_flag = eg.readBoolean();
15079
+ if (vui_timing_info_present_flag) {
15080
+ fps_den = eg.readBits(32);
15081
+ fps_num = eg.readBits(32);
15082
+ const vui_poc_proportional_to_timing_flag = eg.readBoolean();
15083
+ if (vui_poc_proportional_to_timing_flag) {
15084
+ eg.readUEG();
15085
+ }
15086
+ const vui_hrd_parameters_present_flag = eg.readBoolean();
15087
+ if (vui_hrd_parameters_present_flag) {
15088
+ //const commonInfPresentFlag = true;
15089
+ //if (commonInfPresentFlag) {
15090
+ const nal_hrd_parameters_present_flag = eg.readBoolean();
15091
+ const vcl_hrd_parameters_present_flag = eg.readBoolean();
15092
+ let sub_pic_hrd_params_present_flag = false;
15093
+ if (nal_hrd_parameters_present_flag || vcl_hrd_parameters_present_flag) {
15094
+ sub_pic_hrd_params_present_flag = eg.readBoolean();
15095
+ if (sub_pic_hrd_params_present_flag) {
15096
+ eg.readUByte();
15097
+ eg.readBits(5);
15098
+ eg.readBoolean();
15099
+ eg.readBits(5);
15100
+ }
15101
+ eg.readBits(4); // bit_rate_scale
15102
+ eg.readBits(4); // cpb_size_scale
15103
+ if (sub_pic_hrd_params_present_flag) {
15104
+ eg.readBits(4);
15105
+ }
15106
+ eg.readBits(5);
15107
+ eg.readBits(5);
15108
+ eg.readBits(5);
15109
+ }
15110
+ //}
15111
+ for (let i = 0; i <= max_sub_layers_minus1; i++) {
15112
+ fps_fixed = eg.readBoolean(); // fixed_pic_rate_general_flag
15113
+ const fixed_pic_rate_within_cvs_flag = fps_fixed || eg.readBoolean();
15114
+ let low_delay_hrd_flag = false;
15115
+ if (fixed_pic_rate_within_cvs_flag) {
15116
+ eg.readEG();
15117
+ } else {
15118
+ low_delay_hrd_flag = eg.readBoolean();
15119
+ }
15120
+ const cpb_cnt = low_delay_hrd_flag ? 1 : eg.readUEG() + 1;
15121
+ if (nal_hrd_parameters_present_flag) {
15122
+ for (let j = 0; j < cpb_cnt; j++) {
15123
+ eg.readUEG();
15124
+ eg.readUEG();
15125
+ if (sub_pic_hrd_params_present_flag) {
15126
+ eg.readUEG();
15127
+ eg.readUEG();
15128
+ }
15129
+ eg.skipBits(1);
15130
+ }
15131
+ }
15132
+ if (vcl_hrd_parameters_present_flag) {
15133
+ for (let j = 0; j < cpb_cnt; j++) {
15134
+ eg.readUEG();
15135
+ eg.readUEG();
15136
+ if (sub_pic_hrd_params_present_flag) {
15137
+ eg.readUEG();
15138
+ eg.readUEG();
15139
+ }
15140
+ eg.skipBits(1);
15141
+ }
15142
+ }
15143
+ }
15144
+ }
15145
+ }
15146
+ const bitstream_restriction_flag = eg.readBoolean();
15147
+ if (bitstream_restriction_flag) {
15148
+ eg.readBoolean(); // tiles_fixed_structure_flag
15149
+ eg.readBoolean(); // motion_vectors_over_pic_boundaries_flag
15150
+ eg.readBoolean(); // restricted_ref_pic_lists_flag
15151
+ min_spatial_segmentation_idc = eg.readUEG();
15152
+ }
15153
+ }
15154
+ let width = pic_width_in_luma_samples,
15155
+ height = pic_height_in_luma_samples;
15156
+ if (conformance_window_flag || default_display_window_flag) {
15157
+ let chroma_scale_w = 1,
15158
+ chroma_scale_h = 1;
15159
+ if (chroma_format_idc === 1) {
15160
+ // YUV 420
15161
+ chroma_scale_w = chroma_scale_h = 2;
15162
+ } else if (chroma_format_idc == 2) {
15163
+ // YUV 422
15164
+ chroma_scale_w = 2;
15165
+ }
15166
+ width = pic_width_in_luma_samples - chroma_scale_w * pic_right_offset - chroma_scale_w * pic_left_offset;
15167
+ height = pic_height_in_luma_samples - chroma_scale_h * pic_bottom_offset - chroma_scale_h * pic_top_offset;
15168
+ }
15169
+ const profile_space_string = general_profile_space ? ['A', 'B', 'C'][general_profile_space] : '';
15170
+ const profile_compatibility_buf = general_profile_compatibility_flags_1 << 24 | general_profile_compatibility_flags_2 << 16 | general_profile_compatibility_flags_3 << 8 | general_profile_compatibility_flags_4;
15171
+ let profile_compatibility_rev = 0;
15172
+ for (let i = 0; i < 32; i++) {
15173
+ profile_compatibility_rev = (profile_compatibility_rev | (profile_compatibility_buf >> i & 1) << 31 - i) >>> 0; // reverse bit position (and cast as UInt32)
15174
+ }
15175
+ let profile_compatibility_flags_string = profile_compatibility_rev.toString(16);
15176
+ if (general_profile_idc === 1 && profile_compatibility_flags_string === '2') {
15177
+ profile_compatibility_flags_string = '6';
15178
+ }
15179
+ const tier_flag_string = general_tier_flag ? 'H' : 'L';
15180
+ return {
15181
+ codecString: `hvc1.${profile_space_string}${general_profile_idc}.${profile_compatibility_flags_string}.${tier_flag_string}${general_level_idc}.B0`,
15182
+ params: {
15183
+ general_tier_flag,
15184
+ general_profile_idc,
15185
+ general_profile_space,
15186
+ general_profile_compatibility_flags: [general_profile_compatibility_flags_1, general_profile_compatibility_flags_2, general_profile_compatibility_flags_3, general_profile_compatibility_flags_4],
15187
+ general_constraint_indicator_flags: [general_constraint_indicator_flags_1, general_constraint_indicator_flags_2, general_constraint_indicator_flags_3, general_constraint_indicator_flags_4, general_constraint_indicator_flags_5, general_constraint_indicator_flags_6],
15188
+ general_level_idc,
15189
+ bit_depth: bit_depth_luma_minus8 + 8,
15190
+ bit_depth_luma_minus8,
15191
+ bit_depth_chroma_minus8,
15192
+ min_spatial_segmentation_idc,
15193
+ chroma_format_idc: chroma_format_idc,
15194
+ frame_rate: {
15195
+ fixed: fps_fixed,
15196
+ fps: fps_num / fps_den
15197
+ }
15198
+ },
15199
+ width,
15200
+ height,
15201
+ pixelRatio: [sar_width, sar_height]
15202
+ };
15203
+ }
15204
+ readPPS(pps) {
15205
+ const eg = new ExpGolomb(this.ebsp2rbsp(pps));
15206
+ eg.readUByte();
15207
+ eg.readUByte();
15208
+ eg.skipUEG(); // pic_parameter_set_id
15209
+ eg.skipUEG(); // seq_parameter_set_id
15210
+ eg.skipBits(2); // dependent_slice_segments_enabled_flag, output_flag_present_flag
15211
+ eg.skipBits(3); // num_extra_slice_header_bits
15212
+ eg.skipBits(2); // sign_data_hiding_enabled_flag, cabac_init_present_flag
15213
+ eg.skipUEG();
15214
+ eg.skipUEG();
15215
+ eg.skipEG(); // init_qp_minus26
15216
+ eg.skipBits(2); // constrained_intra_pred_flag, transform_skip_enabled_flag
15217
+ const cu_qp_delta_enabled_flag = eg.readBoolean();
15218
+ if (cu_qp_delta_enabled_flag) {
15219
+ eg.skipUEG();
15220
+ }
15221
+ eg.skipEG(); // cb_qp_offset
15222
+ eg.skipEG(); // cr_qp_offset
15223
+ eg.skipBits(4); // pps_slice_chroma_qp_offsets_present_flag, weighted_pred_flag, weighted_bipred_flag, transquant_bypass_enabled_flag
15224
+ const tiles_enabled_flag = eg.readBoolean();
15225
+ const entropy_coding_sync_enabled_flag = eg.readBoolean();
15226
+ let parallelismType = 1; // slice-based parallel decoding
15227
+ if (entropy_coding_sync_enabled_flag && tiles_enabled_flag) {
15228
+ parallelismType = 0; // mixed-type parallel decoding
15229
+ } else if (entropy_coding_sync_enabled_flag) {
15230
+ parallelismType = 3; // wavefront-based parallel decoding
15231
+ } else if (tiles_enabled_flag) {
15232
+ parallelismType = 2; // tile-based parallel decoding
14539
15233
  }
14540
- track.naluState = state;
14541
- return units;
15234
+ return {
15235
+ parallelismType
15236
+ };
15237
+ }
15238
+ matchSPS(sps1, sps2) {
15239
+ // compare without headers and VPS related params
15240
+ return String.fromCharCode.apply(null, sps1).substr(3) === String.fromCharCode.apply(null, sps2).substr(3);
14542
15241
  }
14543
15242
  }
14544
15243
 
@@ -14556,7 +15255,7 @@ class SampleAesDecrypter {
14556
15255
  });
14557
15256
  }
14558
15257
  decryptBuffer(encryptedData) {
14559
- return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer);
15258
+ return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer, DecrypterAesMode.cbc);
14560
15259
  }
14561
15260
 
14562
15261
  // AAC - encrypt all full 16 bytes blocks starting from offset 16
@@ -14670,7 +15369,7 @@ class TSDemuxer {
14670
15369
  this.observer = observer;
14671
15370
  this.config = config;
14672
15371
  this.typeSupported = typeSupported;
14673
- this.videoParser = new AvcVideoParser();
15372
+ this.videoParser = null;
14674
15373
  }
14675
15374
  static probe(data) {
14676
15375
  const syncOffset = TSDemuxer.syncOffset(data);
@@ -14835,7 +15534,19 @@ class TSDemuxer {
14835
15534
  case videoPid:
14836
15535
  if (stt) {
14837
15536
  if (videoData && (pes = parsePES(videoData))) {
14838
- this.videoParser.parseAVCPES(videoTrack, textTrack, pes, false, this._duration);
15537
+ if (this.videoParser === null) {
15538
+ switch (videoTrack.segmentCodec) {
15539
+ case 'avc':
15540
+ this.videoParser = new AvcVideoParser();
15541
+ break;
15542
+ case 'hevc':
15543
+ this.videoParser = new HevcVideoParser();
15544
+ break;
15545
+ }
15546
+ }
15547
+ if (this.videoParser !== null) {
15548
+ this.videoParser.parsePES(videoTrack, textTrack, pes, false, this._duration);
15549
+ }
14839
15550
  }
14840
15551
  videoData = {
14841
15552
  data: [],
@@ -14997,8 +15708,20 @@ class TSDemuxer {
14997
15708
  // try to parse last PES packets
14998
15709
  let pes;
14999
15710
  if (videoData && (pes = parsePES(videoData))) {
15000
- this.videoParser.parseAVCPES(videoTrack, textTrack, pes, true, this._duration);
15001
- videoTrack.pesData = null;
15711
+ if (this.videoParser === null) {
15712
+ switch (videoTrack.segmentCodec) {
15713
+ case 'avc':
15714
+ this.videoParser = new AvcVideoParser();
15715
+ break;
15716
+ case 'hevc':
15717
+ this.videoParser = new HevcVideoParser();
15718
+ break;
15719
+ }
15720
+ }
15721
+ if (this.videoParser !== null) {
15722
+ this.videoParser.parsePES(videoTrack, textTrack, pes, true, this._duration);
15723
+ videoTrack.pesData = null;
15724
+ }
15002
15725
  } else {
15003
15726
  // either avcData null or PES truncated, keep it for next frag parsing
15004
15727
  videoTrack.pesData = videoData;
@@ -15301,7 +16024,12 @@ function parsePMT(data, offset, typeSupported, isSampleAes) {
15301
16024
  logger.warn('Unsupported EC-3 in M2TS found');
15302
16025
  break;
15303
16026
  case 0x24:
15304
- logger.warn('Unsupported HEVC in M2TS found');
16027
+ // ITU-T Rec. H.265 and ISO/IEC 23008-2 (HEVC)
16028
+ if (result.videoPid === -1) {
16029
+ result.videoPid = pid;
16030
+ result.segmentVideoCodec = 'hevc';
16031
+ logger.log('HEVC in M2TS found');
16032
+ }
15305
16033
  break;
15306
16034
  }
15307
16035
  // move to the next table entry
@@ -15524,6 +16252,8 @@ class MP4 {
15524
16252
  avc1: [],
15525
16253
  // codingname
15526
16254
  avcC: [],
16255
+ hvc1: [],
16256
+ hvcC: [],
15527
16257
  btrt: [],
15528
16258
  dinf: [],
15529
16259
  dref: [],
@@ -15948,8 +16678,10 @@ class MP4 {
15948
16678
  return MP4.box(MP4.types.stsd, MP4.STSD, MP4.ac3(track));
15949
16679
  }
15950
16680
  return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp4a(track));
15951
- } else {
16681
+ } else if (track.segmentCodec === 'avc') {
15952
16682
  return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
16683
+ } else {
16684
+ return MP4.box(MP4.types.stsd, MP4.STSD, MP4.hvc1(track));
15953
16685
  }
15954
16686
  }
15955
16687
  static tkhd(track) {
@@ -16087,6 +16819,84 @@ class MP4 {
16087
16819
  const result = appendUint8Array(MP4.FTYP, movie);
16088
16820
  return result;
16089
16821
  }
16822
+ static hvc1(track) {
16823
+ const ps = track.params;
16824
+ const units = [track.vps, track.sps, track.pps];
16825
+ const NALuLengthSize = 4;
16826
+ const config = new Uint8Array([0x01, ps.general_profile_space << 6 | (ps.general_tier_flag ? 32 : 0) | ps.general_profile_idc, ps.general_profile_compatibility_flags[0], ps.general_profile_compatibility_flags[1], ps.general_profile_compatibility_flags[2], ps.general_profile_compatibility_flags[3], ps.general_constraint_indicator_flags[0], ps.general_constraint_indicator_flags[1], ps.general_constraint_indicator_flags[2], ps.general_constraint_indicator_flags[3], ps.general_constraint_indicator_flags[4], ps.general_constraint_indicator_flags[5], ps.general_level_idc, 240 | ps.min_spatial_segmentation_idc >> 8, 255 & ps.min_spatial_segmentation_idc, 252 | ps.parallelismType, 252 | ps.chroma_format_idc, 248 | ps.bit_depth_luma_minus8, 248 | ps.bit_depth_chroma_minus8, 0x00, parseInt(ps.frame_rate.fps), NALuLengthSize - 1 | ps.temporal_id_nested << 2 | ps.num_temporal_layers << 3 | (ps.frame_rate.fixed ? 64 : 0), units.length]);
16827
+
16828
+ // compute hvcC size in bytes
16829
+ let length = config.length;
16830
+ for (let i = 0; i < units.length; i += 1) {
16831
+ length += 3;
16832
+ for (let j = 0; j < units[i].length; j += 1) {
16833
+ length += 2 + units[i][j].length;
16834
+ }
16835
+ }
16836
+ const hvcC = new Uint8Array(length);
16837
+ hvcC.set(config, 0);
16838
+ length = config.length;
16839
+ // append parameter set units: one vps, one or more sps and pps
16840
+ const iMax = units.length - 1;
16841
+ for (let i = 0; i < units.length; i += 1) {
16842
+ hvcC.set(new Uint8Array([32 + i | (i === iMax ? 128 : 0), 0x00, units[i].length]), length);
16843
+ length += 3;
16844
+ for (let j = 0; j < units[i].length; j += 1) {
16845
+ hvcC.set(new Uint8Array([units[i][j].length >> 8, units[i][j].length & 255]), length);
16846
+ length += 2;
16847
+ hvcC.set(units[i][j], length);
16848
+ length += units[i][j].length;
16849
+ }
16850
+ }
16851
+ const hvcc = MP4.box(MP4.types.hvcC, hvcC);
16852
+ const width = track.width;
16853
+ const height = track.height;
16854
+ const hSpacing = track.pixelRatio[0];
16855
+ const vSpacing = track.pixelRatio[1];
16856
+ return MP4.box(MP4.types.hvc1, new Uint8Array([0x00, 0x00, 0x00,
16857
+ // reserved
16858
+ 0x00, 0x00, 0x00,
16859
+ // reserved
16860
+ 0x00, 0x01,
16861
+ // data_reference_index
16862
+ 0x00, 0x00,
16863
+ // pre_defined
16864
+ 0x00, 0x00,
16865
+ // reserved
16866
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
16867
+ // pre_defined
16868
+ width >> 8 & 0xff, width & 0xff,
16869
+ // width
16870
+ height >> 8 & 0xff, height & 0xff,
16871
+ // height
16872
+ 0x00, 0x48, 0x00, 0x00,
16873
+ // horizresolution
16874
+ 0x00, 0x48, 0x00, 0x00,
16875
+ // vertresolution
16876
+ 0x00, 0x00, 0x00, 0x00,
16877
+ // reserved
16878
+ 0x00, 0x01,
16879
+ // frame_count
16880
+ 0x12, 0x64, 0x61, 0x69, 0x6c,
16881
+ // dailymotion/hls.js
16882
+ 0x79, 0x6d, 0x6f, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x68, 0x6c, 0x73, 0x2e, 0x6a, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
16883
+ // compressorname
16884
+ 0x00, 0x18,
16885
+ // depth = 24
16886
+ 0x11, 0x11]),
16887
+ // pre_defined = -1
16888
+ hvcc, MP4.box(MP4.types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80,
16889
+ // bufferSizeDB
16890
+ 0x00, 0x2d, 0xc6, 0xc0,
16891
+ // maxBitrate
16892
+ 0x00, 0x2d, 0xc6, 0xc0])),
16893
+ // avgBitrate
16894
+ MP4.box(MP4.types.pasp, new Uint8Array([hSpacing >> 24,
16895
+ // hSpacing
16896
+ hSpacing >> 16 & 0xff, hSpacing >> 8 & 0xff, hSpacing & 0xff, vSpacing >> 24,
16897
+ // vSpacing
16898
+ vSpacing >> 16 & 0xff, vSpacing >> 8 & 0xff, vSpacing & 0xff])));
16899
+ }
16090
16900
  }
16091
16901
  MP4.types = void 0;
16092
16902
  MP4.HDLR_TYPES = void 0;
@@ -16462,9 +17272,9 @@ class MP4Remuxer {
16462
17272
  const foundOverlap = delta < -1;
16463
17273
  if (foundHole || foundOverlap) {
16464
17274
  if (foundHole) {
16465
- logger.warn(`AVC: ${toMsFromMpegTsClock(delta, true)} ms (${delta}dts) hole between fragments detected at ${timeOffset.toFixed(3)}`);
17275
+ logger.warn(`${(track.segmentCodec || '').toUpperCase()}: ${toMsFromMpegTsClock(delta, true)} ms (${delta}dts) hole between fragments detected at ${timeOffset.toFixed(3)}`);
16466
17276
  } else {
16467
- logger.warn(`AVC: ${toMsFromMpegTsClock(-delta, true)} ms (${delta}dts) overlapping between fragments detected at ${timeOffset.toFixed(3)}`);
17277
+ logger.warn(`${(track.segmentCodec || '').toUpperCase()}: ${toMsFromMpegTsClock(-delta, true)} ms (${delta}dts) overlapping between fragments detected at ${timeOffset.toFixed(3)}`);
16468
17278
  }
16469
17279
  if (!foundOverlap || nextAvcDts >= inputSamples[0].pts || chromeVersion) {
16470
17280
  firstDTS = nextAvcDts;
@@ -16473,12 +17283,24 @@ class MP4Remuxer {
16473
17283
  inputSamples[0].dts = firstDTS;
16474
17284
  inputSamples[0].pts = firstPTS;
16475
17285
  } else {
17286
+ let isPTSOrderRetained = true;
16476
17287
  for (let i = 0; i < inputSamples.length; i++) {
16477
- if (inputSamples[i].dts > firstPTS) {
17288
+ if (inputSamples[i].dts > firstPTS && isPTSOrderRetained) {
16478
17289
  break;
16479
17290
  }
17291
+ const prevPTS = inputSamples[i].pts;
16480
17292
  inputSamples[i].dts -= delta;
16481
17293
  inputSamples[i].pts -= delta;
17294
+
17295
+ // check to see if this sample's PTS order has changed
17296
+ // relative to the next one
17297
+ if (i < inputSamples.length - 1) {
17298
+ const nextSamplePTS = inputSamples[i + 1].pts;
17299
+ const currentSamplePTS = inputSamples[i].pts;
17300
+ const currentOrder = nextSamplePTS <= currentSamplePTS;
17301
+ const prevOrder = nextSamplePTS <= prevPTS;
17302
+ isPTSOrderRetained = currentOrder == prevOrder;
17303
+ }
16482
17304
  }
16483
17305
  }
16484
17306
  logger.log(`Video: Initial PTS/DTS adjusted: ${toMsFromMpegTsClock(firstPTS, true)}/${toMsFromMpegTsClock(firstDTS, true)}, delta: ${toMsFromMpegTsClock(delta, true)} ms`);
@@ -16626,7 +17448,7 @@ class MP4Remuxer {
16626
17448
  }
16627
17449
  }
16628
17450
  }
16629
- // next AVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
17451
+ // next AVC/HEVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
16630
17452
  mp4SampleDuration = stretchedLastFrame || !mp4SampleDuration ? averageSampleDuration : mp4SampleDuration;
16631
17453
  this.nextAvcDts = nextAvcDts = lastDTS + mp4SampleDuration;
16632
17454
  this.videoSampleDuration = mp4SampleDuration;
@@ -16759,7 +17581,7 @@ class MP4Remuxer {
16759
17581
  logger.warn(`[mp4-remuxer]: Injecting ${missing} audio frame @ ${(nextPts / inputTimeScale).toFixed(3)}s due to ${Math.round(1000 * delta / inputTimeScale)} ms gap.`);
16760
17582
  for (let j = 0; j < missing; j++) {
16761
17583
  const newStamp = Math.max(nextPts, 0);
16762
- let fillFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
17584
+ let fillFrame = AAC.getSilentFrame(track.parsedCodec || track.manifestCodec || track.codec, track.channelCount);
16763
17585
  if (!fillFrame) {
16764
17586
  logger.log('[mp4-remuxer]: Unable to get silent frame for given audio codec; duplicating last frame instead.');
16765
17587
  fillFrame = sample.unit.subarray();
@@ -16887,7 +17709,7 @@ class MP4Remuxer {
16887
17709
  // samples count of this segment's duration
16888
17710
  const nbSamples = Math.ceil((endDTS - startDTS) / frameDuration);
16889
17711
  // silent frame
16890
- const silentFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
17712
+ const silentFrame = AAC.getSilentFrame(track.parsedCodec || track.manifestCodec || track.codec, track.channelCount);
16891
17713
  logger.warn('[mp4-remuxer]: remux empty Audio');
16892
17714
  // Can't remux if we can't generate a silent frame...
16893
17715
  if (!silentFrame) {
@@ -17278,13 +18100,15 @@ class Transmuxer {
17278
18100
  initSegmentData
17279
18101
  } = transmuxConfig;
17280
18102
  const keyData = getEncryptionType(uintData, decryptdata);
17281
- if (keyData && keyData.method === 'AES-128') {
18103
+ if (keyData && isFullSegmentEncryption(keyData.method)) {
17282
18104
  const decrypter = this.getDecrypter();
18105
+ const aesMode = getAesModeFromFullSegmentMethod(keyData.method);
18106
+
17283
18107
  // Software decryption is synchronous; webCrypto is not
17284
18108
  if (decrypter.isSync()) {
17285
18109
  // Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
17286
18110
  // data is handled in the flush() call
17287
- let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer);
18111
+ let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer, aesMode);
17288
18112
  // For Low-Latency HLS Parts, decrypt in place, since part parsing is expected on push progress
17289
18113
  const loadingParts = chunkMeta.part > -1;
17290
18114
  if (loadingParts) {
@@ -17296,7 +18120,7 @@ class Transmuxer {
17296
18120
  }
17297
18121
  uintData = new Uint8Array(decryptedData);
17298
18122
  } else {
17299
- this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer).then(decryptedData => {
18123
+ this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer, aesMode).then(decryptedData => {
17300
18124
  // Calling push here is important; if flush() is called while this is still resolving, this ensures that
17301
18125
  // the decrypted data has been transmuxed
17302
18126
  const result = this.push(decryptedData, null, chunkMeta);
@@ -17950,14 +18774,7 @@ class TransmuxerInterface {
17950
18774
  this.observer = new EventEmitter();
17951
18775
  this.observer.on(Events.FRAG_DECRYPTED, forwardMessage);
17952
18776
  this.observer.on(Events.ERROR, forwardMessage);
17953
- const MediaSource = getMediaSource(config.preferManagedMediaSource) || {
17954
- isTypeSupported: () => false
17955
- };
17956
- const m2tsTypeSupported = {
17957
- mpeg: MediaSource.isTypeSupported('audio/mpeg'),
17958
- mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
17959
- ac3: false
17960
- };
18777
+ const m2tsTypeSupported = getM2TSSupportedAudioTypes(config.preferManagedMediaSource);
17961
18778
 
17962
18779
  // navigator.vendor is not always available in Web Worker
17963
18780
  // refer to https://developer.mozilla.org/en-US/docs/Web/API/WorkerGlobalScope/navigator
@@ -18221,8 +19038,9 @@ const STALL_MINIMUM_DURATION_MS = 250;
18221
19038
  const MAX_START_GAP_JUMP = 2.0;
18222
19039
  const SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1;
18223
19040
  const SKIP_BUFFER_RANGE_START = 0.05;
18224
- class GapController {
19041
+ class GapController extends Logger {
18225
19042
  constructor(config, media, fragmentTracker, hls) {
19043
+ super('gap-controller', hls.logger);
18226
19044
  this.config = void 0;
18227
19045
  this.media = null;
18228
19046
  this.fragmentTracker = void 0;
@@ -18232,6 +19050,7 @@ class GapController {
18232
19050
  this.stalled = null;
18233
19051
  this.moved = false;
18234
19052
  this.seeking = false;
19053
+ this.ended = 0;
18235
19054
  this.config = config;
18236
19055
  this.media = media;
18237
19056
  this.fragmentTracker = fragmentTracker;
@@ -18249,7 +19068,7 @@ class GapController {
18249
19068
  *
18250
19069
  * @param lastCurrentTime - Previously read playhead position
18251
19070
  */
18252
- poll(lastCurrentTime, activeFrag) {
19071
+ poll(lastCurrentTime, activeFrag, levelDetails, state) {
18253
19072
  const {
18254
19073
  config,
18255
19074
  media,
@@ -18268,6 +19087,7 @@ class GapController {
18268
19087
 
18269
19088
  // The playhead is moving, no-op
18270
19089
  if (currentTime !== lastCurrentTime) {
19090
+ this.ended = 0;
18271
19091
  this.moved = true;
18272
19092
  if (!seeking) {
18273
19093
  this.nudgeRetry = 0;
@@ -18276,7 +19096,7 @@ class GapController {
18276
19096
  // The playhead is now moving, but was previously stalled
18277
19097
  if (this.stallReported) {
18278
19098
  const _stalledDuration = self.performance.now() - stalled;
18279
- logger.warn(`playback not stuck anymore @${currentTime}, after ${Math.round(_stalledDuration)}ms`);
19099
+ this.warn(`playback not stuck anymore @${currentTime}, after ${Math.round(_stalledDuration)}ms`);
18280
19100
  this.stallReported = false;
18281
19101
  }
18282
19102
  this.stalled = null;
@@ -18312,7 +19132,6 @@ class GapController {
18312
19132
  // Skip start gaps if we haven't played, but the last poll detected the start of a stall
18313
19133
  // The addition poll gives the browser a chance to jump the gap for us
18314
19134
  if (!this.moved && this.stalled !== null) {
18315
- var _level$details;
18316
19135
  // There is no playable buffer (seeked, waiting for buffer)
18317
19136
  const isBuffered = bufferInfo.len > 0;
18318
19137
  if (!isBuffered && !nextStart) {
@@ -18324,9 +19143,8 @@ class GapController {
18324
19143
  // When joining a live stream with audio tracks, account for live playlist window sliding by allowing
18325
19144
  // a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment
18326
19145
  // that begins over 1 target duration after the video start position.
18327
- const level = this.hls.levels ? this.hls.levels[this.hls.currentLevel] : null;
18328
- const isLive = level == null ? void 0 : (_level$details = level.details) == null ? void 0 : _level$details.live;
18329
- const maxStartGapJump = isLive ? level.details.targetduration * 2 : MAX_START_GAP_JUMP;
19146
+ const isLive = !!(levelDetails != null && levelDetails.live);
19147
+ const maxStartGapJump = isLive ? levelDetails.targetduration * 2 : MAX_START_GAP_JUMP;
18330
19148
  const partialOrGap = this.fragmentTracker.getPartialFragment(currentTime);
18331
19149
  if (startJump > 0 && (startJump <= maxStartGapJump || partialOrGap)) {
18332
19150
  if (!media.paused) {
@@ -18344,6 +19162,17 @@ class GapController {
18344
19162
  }
18345
19163
  const stalledDuration = tnow - stalled;
18346
19164
  if (!seeking && stalledDuration >= STALL_MINIMUM_DURATION_MS) {
19165
+ // Dispatch MEDIA_ENDED when media.ended/ended event is not signalled at end of stream
19166
+ if (state === State.ENDED && !(levelDetails && levelDetails.live) && Math.abs(currentTime - ((levelDetails == null ? void 0 : levelDetails.edge) || 0)) < 1) {
19167
+ if (stalledDuration < 1000 || this.ended) {
19168
+ return;
19169
+ }
19170
+ this.ended = currentTime;
19171
+ this.hls.trigger(Events.MEDIA_ENDED, {
19172
+ stalled: true
19173
+ });
19174
+ return;
19175
+ }
18347
19176
  // Report stalling after trying to fix
18348
19177
  this._reportStall(bufferInfo);
18349
19178
  if (!this.media) {
@@ -18387,7 +19216,7 @@ class GapController {
18387
19216
  // needs to cross some sort of threshold covering all source-buffers content
18388
19217
  // to start playing properly.
18389
19218
  if ((bufferInfo.len > config.maxBufferHole || bufferInfo.nextStart && bufferInfo.nextStart - currentTime < config.maxBufferHole) && stalledDurationMs > config.highBufferWatchdogPeriod * 1000) {
18390
- logger.warn('Trying to nudge playhead over buffer-hole');
19219
+ this.warn('Trying to nudge playhead over buffer-hole');
18391
19220
  // Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
18392
19221
  // We only try to jump the hole if it's under the configured size
18393
19222
  // Reset stalled so to rearm watchdog timer
@@ -18411,7 +19240,7 @@ class GapController {
18411
19240
  // Report stalled error once
18412
19241
  this.stallReported = true;
18413
19242
  const error = new Error(`Playback stalling at @${media.currentTime} due to low buffer (${JSON.stringify(bufferInfo)})`);
18414
- logger.warn(error.message);
19243
+ this.warn(error.message);
18415
19244
  hls.trigger(Events.ERROR, {
18416
19245
  type: ErrorTypes.MEDIA_ERROR,
18417
19246
  details: ErrorDetails.BUFFER_STALLED_ERROR,
@@ -18479,7 +19308,7 @@ class GapController {
18479
19308
  }
18480
19309
  }
18481
19310
  const targetTime = Math.max(startTime + SKIP_BUFFER_RANGE_START, currentTime + SKIP_BUFFER_HOLE_STEP_SECONDS);
18482
- logger.warn(`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`);
19311
+ this.warn(`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`);
18483
19312
  this.moved = true;
18484
19313
  this.stalled = null;
18485
19314
  media.currentTime = targetTime;
@@ -18520,7 +19349,7 @@ class GapController {
18520
19349
  const targetTime = currentTime + (nudgeRetry + 1) * config.nudgeOffset;
18521
19350
  // playback stalled in buffered area ... let's nudge currentTime to try to overcome this
18522
19351
  const error = new Error(`Nudging 'currentTime' from ${currentTime} to ${targetTime}`);
18523
- logger.warn(error.message);
19352
+ this.warn(error.message);
18524
19353
  media.currentTime = targetTime;
18525
19354
  hls.trigger(Events.ERROR, {
18526
19355
  type: ErrorTypes.MEDIA_ERROR,
@@ -18530,7 +19359,7 @@ class GapController {
18530
19359
  });
18531
19360
  } else {
18532
19361
  const error = new Error(`Playhead still not moving while enough data buffered @${currentTime} after ${config.nudgeMaxRetry} nudges`);
18533
- logger.error(error.message);
19362
+ this.error(error.message);
18534
19363
  hls.trigger(Events.ERROR, {
18535
19364
  type: ErrorTypes.MEDIA_ERROR,
18536
19365
  details: ErrorDetails.BUFFER_STALLED_ERROR,
@@ -18545,7 +19374,7 @@ const TICK_INTERVAL = 100; // how often to tick in ms
18545
19374
 
18546
19375
  class StreamController extends BaseStreamController {
18547
19376
  constructor(hls, fragmentTracker, keyLoader) {
18548
- super(hls, fragmentTracker, keyLoader, '[stream-controller]', PlaylistLevelType.MAIN);
19377
+ super(hls, fragmentTracker, keyLoader, 'stream-controller', PlaylistLevelType.MAIN);
18549
19378
  this.audioCodecSwap = false;
18550
19379
  this.gapController = null;
18551
19380
  this.level = -1;
@@ -18553,27 +19382,43 @@ class StreamController extends BaseStreamController {
18553
19382
  this.altAudio = false;
18554
19383
  this.audioOnly = false;
18555
19384
  this.fragPlaying = null;
18556
- this.onvplaying = null;
18557
- this.onvseeked = null;
18558
19385
  this.fragLastKbps = 0;
18559
19386
  this.couldBacktrack = false;
18560
19387
  this.backtrackFragment = null;
18561
19388
  this.audioCodecSwitch = false;
18562
19389
  this.videoBuffer = null;
18563
- this._registerListeners();
19390
+ this.onMediaPlaying = () => {
19391
+ // tick to speed up FRAG_CHANGED triggering
19392
+ this.tick();
19393
+ };
19394
+ this.onMediaSeeked = () => {
19395
+ const media = this.media;
19396
+ const currentTime = media ? media.currentTime : null;
19397
+ if (isFiniteNumber(currentTime)) {
19398
+ this.log(`Media seeked to ${currentTime.toFixed(3)}`);
19399
+ }
19400
+
19401
+ // If seeked was issued before buffer was appended do not tick immediately
19402
+ const bufferInfo = this.getMainFwdBufferInfo();
19403
+ if (bufferInfo === null || bufferInfo.len === 0) {
19404
+ this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
19405
+ return;
19406
+ }
19407
+
19408
+ // tick to speed up FRAG_CHANGED triggering
19409
+ this.tick();
19410
+ };
19411
+ this.registerListeners();
18564
19412
  }
18565
- _registerListeners() {
19413
+ registerListeners() {
19414
+ super.registerListeners();
18566
19415
  const {
18567
19416
  hls
18568
19417
  } = this;
18569
- hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
18570
- hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
18571
- hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
18572
19418
  hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
18573
19419
  hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
18574
19420
  hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
18575
19421
  hls.on(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
18576
- hls.on(Events.ERROR, this.onError, this);
18577
19422
  hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
18578
19423
  hls.on(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
18579
19424
  hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
@@ -18581,17 +19426,14 @@ class StreamController extends BaseStreamController {
18581
19426
  hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
18582
19427
  hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
18583
19428
  }
18584
- _unregisterListeners() {
19429
+ unregisterListeners() {
19430
+ super.unregisterListeners();
18585
19431
  const {
18586
19432
  hls
18587
19433
  } = this;
18588
- hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
18589
- hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
18590
- hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
18591
19434
  hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
18592
19435
  hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
18593
19436
  hls.off(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
18594
- hls.off(Events.ERROR, this.onError, this);
18595
19437
  hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
18596
19438
  hls.off(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
18597
19439
  hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
@@ -18600,7 +19442,9 @@ class StreamController extends BaseStreamController {
18600
19442
  hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
18601
19443
  }
18602
19444
  onHandlerDestroying() {
18603
- this._unregisterListeners();
19445
+ // @ts-ignore
19446
+ this.onMediaPlaying = this.onMediaSeeked = null;
19447
+ this.unregisterListeners();
18604
19448
  super.onHandlerDestroying();
18605
19449
  }
18606
19450
  startLoad(startPosition) {
@@ -18720,7 +19564,7 @@ class StreamController extends BaseStreamController {
18720
19564
  if (this.altAudio && this.audioOnly) {
18721
19565
  return;
18722
19566
  }
18723
- if (!(levels != null && levels[level])) {
19567
+ if (!this.buffering || !(levels != null && levels[level])) {
18724
19568
  return;
18725
19569
  }
18726
19570
  const levelInfo = levels[level];
@@ -18928,20 +19772,17 @@ class StreamController extends BaseStreamController {
18928
19772
  onMediaAttached(event, data) {
18929
19773
  super.onMediaAttached(event, data);
18930
19774
  const media = data.media;
18931
- this.onvplaying = this.onMediaPlaying.bind(this);
18932
- this.onvseeked = this.onMediaSeeked.bind(this);
18933
- media.addEventListener('playing', this.onvplaying);
18934
- media.addEventListener('seeked', this.onvseeked);
19775
+ media.addEventListener('playing', this.onMediaPlaying);
19776
+ media.addEventListener('seeked', this.onMediaSeeked);
18935
19777
  this.gapController = new GapController(this.config, media, this.fragmentTracker, this.hls);
18936
19778
  }
18937
19779
  onMediaDetaching() {
18938
19780
  const {
18939
19781
  media
18940
19782
  } = this;
18941
- if (media && this.onvplaying && this.onvseeked) {
18942
- media.removeEventListener('playing', this.onvplaying);
18943
- media.removeEventListener('seeked', this.onvseeked);
18944
- this.onvplaying = this.onvseeked = null;
19783
+ if (media) {
19784
+ media.removeEventListener('playing', this.onMediaPlaying);
19785
+ media.removeEventListener('seeked', this.onMediaSeeked);
18945
19786
  this.videoBuffer = null;
18946
19787
  }
18947
19788
  this.fragPlaying = null;
@@ -18951,27 +19792,6 @@ class StreamController extends BaseStreamController {
18951
19792
  }
18952
19793
  super.onMediaDetaching();
18953
19794
  }
18954
- onMediaPlaying() {
18955
- // tick to speed up FRAG_CHANGED triggering
18956
- this.tick();
18957
- }
18958
- onMediaSeeked() {
18959
- const media = this.media;
18960
- const currentTime = media ? media.currentTime : null;
18961
- if (isFiniteNumber(currentTime)) {
18962
- this.log(`Media seeked to ${currentTime.toFixed(3)}`);
18963
- }
18964
-
18965
- // If seeked was issued before buffer was appended do not tick immediately
18966
- const bufferInfo = this.getMainFwdBufferInfo();
18967
- if (bufferInfo === null || bufferInfo.len === 0) {
18968
- this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
18969
- return;
18970
- }
18971
-
18972
- // tick to speed up FRAG_CHANGED triggering
18973
- this.tick();
18974
- }
18975
19795
  onManifestLoading() {
18976
19796
  // reset buffer on manifest loading
18977
19797
  this.log('Trigger BUFFER_RESET');
@@ -19263,8 +20083,10 @@ class StreamController extends BaseStreamController {
19263
20083
  }
19264
20084
  if (this.loadedmetadata || !BufferHelper.getBuffered(media).length) {
19265
20085
  // Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
19266
- const activeFrag = this.state !== State.IDLE ? this.fragCurrent : null;
19267
- gapController.poll(this.lastCurrentTime, activeFrag);
20086
+ const state = this.state;
20087
+ const activeFrag = state !== State.IDLE ? this.fragCurrent : null;
20088
+ const levelDetails = this.getLevelDetails();
20089
+ gapController.poll(this.lastCurrentTime, activeFrag, levelDetails, state);
19268
20090
  }
19269
20091
  this.lastCurrentTime = media.currentTime;
19270
20092
  }
@@ -19702,7 +20524,7 @@ class Hls {
19702
20524
  * Get the video-dev/hls.js package version.
19703
20525
  */
19704
20526
  static get version() {
19705
- return "1.5.4";
20527
+ return "1.5.5-0.canary.9977";
19706
20528
  }
19707
20529
 
19708
20530
  /**
@@ -19765,9 +20587,12 @@ class Hls {
19765
20587
  * The configuration object provided on player instantiation.
19766
20588
  */
19767
20589
  this.userConfig = void 0;
20590
+ /**
20591
+ * The logger functions used by this player instance, configured on player instantiation.
20592
+ */
20593
+ this.logger = void 0;
19768
20594
  this.coreComponents = void 0;
19769
20595
  this.networkControllers = void 0;
19770
- this.started = false;
19771
20596
  this._emitter = new EventEmitter();
19772
20597
  this._autoLevelCapping = -1;
19773
20598
  this._maxHdcpLevel = null;
@@ -19784,11 +20609,11 @@ class Hls {
19784
20609
  this._media = null;
19785
20610
  this.url = null;
19786
20611
  this.triggeringException = void 0;
19787
- enableLogs(userConfig.debug || false, 'Hls instance');
19788
- const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig);
20612
+ const logger = this.logger = enableLogs(userConfig.debug || false, 'Hls instance');
20613
+ const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig, logger);
19789
20614
  this.userConfig = userConfig;
19790
20615
  if (config.progressive) {
19791
- enableStreamingMode(config);
20616
+ enableStreamingMode(config, logger);
19792
20617
  }
19793
20618
 
19794
20619
  // core controllers and network loaders
@@ -19887,7 +20712,7 @@ class Hls {
19887
20712
  try {
19888
20713
  return this.emit(event, event, eventObject);
19889
20714
  } catch (error) {
19890
- logger.error('An internal error happened while handling event ' + event + '. Error message: "' + error.message + '". Here is a stacktrace:', error);
20715
+ this.logger.error('An internal error happened while handling event ' + event + '. Error message: "' + error.message + '". Here is a stacktrace:', error);
19891
20716
  // Prevent recursion in error event handlers that throw #5497
19892
20717
  if (!this.triggeringException) {
19893
20718
  this.triggeringException = true;
@@ -19913,7 +20738,7 @@ class Hls {
19913
20738
  * Dispose of the instance
19914
20739
  */
19915
20740
  destroy() {
19916
- logger.log('destroy');
20741
+ this.logger.log('destroy');
19917
20742
  this.trigger(Events.DESTROYING, undefined);
19918
20743
  this.detachMedia();
19919
20744
  this.removeAllListeners();
@@ -19934,7 +20759,7 @@ class Hls {
19934
20759
  * Attaches Hls.js to a media element
19935
20760
  */
19936
20761
  attachMedia(media) {
19937
- logger.log('attachMedia');
20762
+ this.logger.log('attachMedia');
19938
20763
  this._media = media;
19939
20764
  this.trigger(Events.MEDIA_ATTACHING, {
19940
20765
  media: media
@@ -19945,7 +20770,7 @@ class Hls {
19945
20770
  * Detach Hls.js from the media
19946
20771
  */
19947
20772
  detachMedia() {
19948
- logger.log('detachMedia');
20773
+ this.logger.log('detachMedia');
19949
20774
  this.trigger(Events.MEDIA_DETACHING, undefined);
19950
20775
  this._media = null;
19951
20776
  }
@@ -19962,7 +20787,7 @@ class Hls {
19962
20787
  });
19963
20788
  this._autoLevelCapping = -1;
19964
20789
  this._maxHdcpLevel = null;
19965
- logger.log(`loadSource:${loadingSource}`);
20790
+ this.logger.log(`loadSource:${loadingSource}`);
19966
20791
  if (media && loadedSource && (loadedSource !== loadingSource || this.bufferController.hasSourceTypes())) {
19967
20792
  this.detachMedia();
19968
20793
  this.attachMedia(media);
@@ -19981,8 +20806,7 @@ class Hls {
19981
20806
  * Defaults to -1 (None: starts from earliest point)
19982
20807
  */
19983
20808
  startLoad(startPosition = -1) {
19984
- logger.log(`startLoad(${startPosition})`);
19985
- this.started = true;
20809
+ this.logger.log(`startLoad(${startPosition})`);
19986
20810
  this.networkControllers.forEach(controller => {
19987
20811
  controller.startLoad(startPosition);
19988
20812
  });
@@ -19992,34 +20816,31 @@ class Hls {
19992
20816
  * Stop loading of any stream data.
19993
20817
  */
19994
20818
  stopLoad() {
19995
- logger.log('stopLoad');
19996
- this.started = false;
20819
+ this.logger.log('stopLoad');
19997
20820
  this.networkControllers.forEach(controller => {
19998
20821
  controller.stopLoad();
19999
20822
  });
20000
20823
  }
20001
20824
 
20002
20825
  /**
20003
- * Resumes stream controller segment loading if previously started.
20826
+ * Resumes stream controller segment loading after `pauseBuffering` has been called.
20004
20827
  */
20005
20828
  resumeBuffering() {
20006
- if (this.started) {
20007
- this.networkControllers.forEach(controller => {
20008
- if ('fragmentLoader' in controller) {
20009
- controller.startLoad(-1);
20010
- }
20011
- });
20012
- }
20829
+ this.networkControllers.forEach(controller => {
20830
+ if (controller.resumeBuffering) {
20831
+ controller.resumeBuffering();
20832
+ }
20833
+ });
20013
20834
  }
20014
20835
 
20015
20836
  /**
20016
- * Stops stream controller segment loading without changing 'started' state like stopLoad().
20837
+ * Prevents stream controller from loading new segments until `resumeBuffering` is called.
20017
20838
  * This allows for media buffering to be paused without interupting playlist loading.
20018
20839
  */
20019
20840
  pauseBuffering() {
20020
20841
  this.networkControllers.forEach(controller => {
20021
- if ('fragmentLoader' in controller) {
20022
- controller.stopLoad();
20842
+ if (controller.pauseBuffering) {
20843
+ controller.pauseBuffering();
20023
20844
  }
20024
20845
  });
20025
20846
  }
@@ -20028,7 +20849,7 @@ class Hls {
20028
20849
  * Swap through possible audio codecs in the stream (for example to switch from stereo to 5.1)
20029
20850
  */
20030
20851
  swapAudioCodec() {
20031
- logger.log('swapAudioCodec');
20852
+ this.logger.log('swapAudioCodec');
20032
20853
  this.streamController.swapAudioCodec();
20033
20854
  }
20034
20855
 
@@ -20039,7 +20860,7 @@ class Hls {
20039
20860
  * Automatic recovery of media-errors by this process is configurable.
20040
20861
  */
20041
20862
  recoverMediaError() {
20042
- logger.log('recoverMediaError');
20863
+ this.logger.log('recoverMediaError');
20043
20864
  const media = this._media;
20044
20865
  this.detachMedia();
20045
20866
  if (media) {
@@ -20069,7 +20890,7 @@ class Hls {
20069
20890
  * Set quality level index immediately. This will flush the current buffer to replace the quality asap. That means playback will interrupt at least shortly to re-buffer and re-sync eventually. Set to -1 for automatic level selection.
20070
20891
  */
20071
20892
  set currentLevel(newLevel) {
20072
- logger.log(`set currentLevel:${newLevel}`);
20893
+ this.logger.log(`set currentLevel:${newLevel}`);
20073
20894
  this.levelController.manualLevel = newLevel;
20074
20895
  this.streamController.immediateLevelSwitch();
20075
20896
  }
@@ -20088,7 +20909,7 @@ class Hls {
20088
20909
  * @param newLevel - Pass -1 for automatic level selection
20089
20910
  */
20090
20911
  set nextLevel(newLevel) {
20091
- logger.log(`set nextLevel:${newLevel}`);
20912
+ this.logger.log(`set nextLevel:${newLevel}`);
20092
20913
  this.levelController.manualLevel = newLevel;
20093
20914
  this.streamController.nextLevelSwitch();
20094
20915
  }
@@ -20107,7 +20928,7 @@ class Hls {
20107
20928
  * @param newLevel - Pass -1 for automatic level selection
20108
20929
  */
20109
20930
  set loadLevel(newLevel) {
20110
- logger.log(`set loadLevel:${newLevel}`);
20931
+ this.logger.log(`set loadLevel:${newLevel}`);
20111
20932
  this.levelController.manualLevel = newLevel;
20112
20933
  }
20113
20934
 
@@ -20138,7 +20959,7 @@ class Hls {
20138
20959
  * Sets "first-level", see getter.
20139
20960
  */
20140
20961
  set firstLevel(newLevel) {
20141
- logger.log(`set firstLevel:${newLevel}`);
20962
+ this.logger.log(`set firstLevel:${newLevel}`);
20142
20963
  this.levelController.firstLevel = newLevel;
20143
20964
  }
20144
20965
 
@@ -20163,7 +20984,7 @@ class Hls {
20163
20984
  * (determined from download of first segment)
20164
20985
  */
20165
20986
  set startLevel(newLevel) {
20166
- logger.log(`set startLevel:${newLevel}`);
20987
+ this.logger.log(`set startLevel:${newLevel}`);
20167
20988
  // if not in automatic start level detection, ensure startLevel is greater than minAutoLevel
20168
20989
  if (newLevel !== -1) {
20169
20990
  newLevel = Math.max(newLevel, this.minAutoLevel);
@@ -20238,7 +21059,7 @@ class Hls {
20238
21059
  */
20239
21060
  set autoLevelCapping(newLevel) {
20240
21061
  if (this._autoLevelCapping !== newLevel) {
20241
- logger.log(`set autoLevelCapping:${newLevel}`);
21062
+ this.logger.log(`set autoLevelCapping:${newLevel}`);
20242
21063
  this._autoLevelCapping = newLevel;
20243
21064
  this.levelController.checkMaxAutoUpdated();
20244
21065
  }