hls.js 1.5.5 → 1.5.6-0.canary.10003

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/README.md +1 -0
  2. package/dist/hls-demo.js +10 -0
  3. package/dist/hls-demo.js.map +1 -1
  4. package/dist/hls.js +2075 -1166
  5. package/dist/hls.js.d.ts +65 -50
  6. package/dist/hls.js.map +1 -1
  7. package/dist/hls.light.js +1148 -859
  8. package/dist/hls.light.js.map +1 -1
  9. package/dist/hls.light.min.js +1 -1
  10. package/dist/hls.light.min.js.map +1 -1
  11. package/dist/hls.light.mjs +984 -696
  12. package/dist/hls.light.mjs.map +1 -1
  13. package/dist/hls.min.js +1 -1
  14. package/dist/hls.min.js.map +1 -1
  15. package/dist/hls.mjs +1757 -863
  16. package/dist/hls.mjs.map +1 -1
  17. package/dist/hls.worker.js +1 -1
  18. package/dist/hls.worker.js.map +1 -1
  19. package/package.json +20 -20
  20. package/src/config.ts +3 -2
  21. package/src/controller/abr-controller.ts +21 -20
  22. package/src/controller/audio-stream-controller.ts +15 -16
  23. package/src/controller/audio-track-controller.ts +1 -1
  24. package/src/controller/base-playlist-controller.ts +20 -8
  25. package/src/controller/base-stream-controller.ts +149 -33
  26. package/src/controller/buffer-controller.ts +11 -11
  27. package/src/controller/cap-level-controller.ts +1 -2
  28. package/src/controller/cmcd-controller.ts +27 -6
  29. package/src/controller/content-steering-controller.ts +8 -6
  30. package/src/controller/eme-controller.ts +9 -22
  31. package/src/controller/error-controller.ts +6 -8
  32. package/src/controller/fps-controller.ts +2 -3
  33. package/src/controller/gap-controller.ts +43 -16
  34. package/src/controller/latency-controller.ts +9 -11
  35. package/src/controller/level-controller.ts +12 -18
  36. package/src/controller/stream-controller.ts +25 -32
  37. package/src/controller/subtitle-stream-controller.ts +13 -14
  38. package/src/controller/subtitle-track-controller.ts +5 -3
  39. package/src/controller/timeline-controller.ts +23 -30
  40. package/src/crypt/aes-crypto.ts +21 -2
  41. package/src/crypt/decrypter-aes-mode.ts +4 -0
  42. package/src/crypt/decrypter.ts +32 -18
  43. package/src/crypt/fast-aes-key.ts +24 -5
  44. package/src/demux/audio/adts.ts +9 -4
  45. package/src/demux/sample-aes.ts +2 -0
  46. package/src/demux/transmuxer-interface.ts +4 -12
  47. package/src/demux/transmuxer-worker.ts +4 -4
  48. package/src/demux/transmuxer.ts +16 -3
  49. package/src/demux/tsdemuxer.ts +71 -37
  50. package/src/demux/video/avc-video-parser.ts +208 -119
  51. package/src/demux/video/base-video-parser.ts +134 -2
  52. package/src/demux/video/exp-golomb.ts +0 -208
  53. package/src/demux/video/hevc-video-parser.ts +746 -0
  54. package/src/events.ts +7 -0
  55. package/src/hls.ts +42 -34
  56. package/src/loader/fragment-loader.ts +9 -2
  57. package/src/loader/key-loader.ts +2 -0
  58. package/src/loader/level-key.ts +10 -9
  59. package/src/loader/playlist-loader.ts +4 -5
  60. package/src/remux/mp4-generator.ts +196 -1
  61. package/src/remux/mp4-remuxer.ts +23 -7
  62. package/src/task-loop.ts +5 -2
  63. package/src/types/component-api.ts +2 -0
  64. package/src/types/demuxer.ts +3 -0
  65. package/src/types/events.ts +4 -0
  66. package/src/utils/codecs.ts +33 -4
  67. package/src/utils/encryption-methods-util.ts +21 -0
  68. package/src/utils/logger.ts +54 -24
@@ -256,6 +256,7 @@ let Events = /*#__PURE__*/function (Events) {
256
256
  Events["MEDIA_ATTACHED"] = "hlsMediaAttached";
257
257
  Events["MEDIA_DETACHING"] = "hlsMediaDetaching";
258
258
  Events["MEDIA_DETACHED"] = "hlsMediaDetached";
259
+ Events["MEDIA_ENDED"] = "hlsMediaEnded";
259
260
  Events["BUFFER_RESET"] = "hlsBufferReset";
260
261
  Events["BUFFER_CODECS"] = "hlsBufferCodecs";
261
262
  Events["BUFFER_CREATED"] = "hlsBufferCreated";
@@ -369,58 +370,6 @@ let ErrorDetails = /*#__PURE__*/function (ErrorDetails) {
369
370
  return ErrorDetails;
370
371
  }({});
371
372
 
372
- const noop = function noop() {};
373
- const fakeLogger = {
374
- trace: noop,
375
- debug: noop,
376
- log: noop,
377
- warn: noop,
378
- info: noop,
379
- error: noop
380
- };
381
- let exportedLogger = fakeLogger;
382
-
383
- // let lastCallTime;
384
- // function formatMsgWithTimeInfo(type, msg) {
385
- // const now = Date.now();
386
- // const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
387
- // lastCallTime = now;
388
- // msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
389
- // return msg;
390
- // }
391
-
392
- function consolePrintFn(type) {
393
- const func = self.console[type];
394
- if (func) {
395
- return func.bind(self.console, `[${type}] >`);
396
- }
397
- return noop;
398
- }
399
- function exportLoggerFunctions(debugConfig, ...functions) {
400
- functions.forEach(function (type) {
401
- exportedLogger[type] = debugConfig[type] ? debugConfig[type].bind(debugConfig) : consolePrintFn(type);
402
- });
403
- }
404
- function enableLogs(debugConfig, id) {
405
- // check that console is available
406
- if (typeof console === 'object' && debugConfig === true || typeof debugConfig === 'object') {
407
- exportLoggerFunctions(debugConfig,
408
- // Remove out from list here to hard-disable a log-level
409
- // 'trace',
410
- 'debug', 'log', 'info', 'warn', 'error');
411
- // Some browsers don't allow to use bind on console object anyway
412
- // fallback to default if needed
413
- try {
414
- exportedLogger.log(`Debug logs enabled for "${id}" in hls.js version ${"1.5.5"}`);
415
- } catch (e) {
416
- exportedLogger = fakeLogger;
417
- }
418
- } else {
419
- exportedLogger = fakeLogger;
420
- }
421
- }
422
- const logger = exportedLogger;
423
-
424
373
  const DECIMAL_RESOLUTION_REGEX = /^(\d+)x(\d+)$/;
425
374
  const ATTR_LIST_REGEX = /(.+?)=(".*?"|.*?)(?:,|$)/g;
426
375
 
@@ -502,6 +451,79 @@ class AttrList {
502
451
  }
503
452
  }
504
453
 
454
+ class Logger {
455
+ constructor(label, logger) {
456
+ this.trace = void 0;
457
+ this.debug = void 0;
458
+ this.log = void 0;
459
+ this.warn = void 0;
460
+ this.info = void 0;
461
+ this.error = void 0;
462
+ const lb = `[${label}]:`;
463
+ this.trace = noop;
464
+ this.debug = logger.debug.bind(null, lb);
465
+ this.log = logger.log.bind(null, lb);
466
+ this.warn = logger.warn.bind(null, lb);
467
+ this.info = logger.info.bind(null, lb);
468
+ this.error = logger.error.bind(null, lb);
469
+ }
470
+ }
471
+ const noop = function noop() {};
472
+ const fakeLogger = {
473
+ trace: noop,
474
+ debug: noop,
475
+ log: noop,
476
+ warn: noop,
477
+ info: noop,
478
+ error: noop
479
+ };
480
+ function createLogger() {
481
+ return _extends({}, fakeLogger);
482
+ }
483
+
484
+ // let lastCallTime;
485
+ // function formatMsgWithTimeInfo(type, msg) {
486
+ // const now = Date.now();
487
+ // const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
488
+ // lastCallTime = now;
489
+ // msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
490
+ // return msg;
491
+ // }
492
+
493
+ function consolePrintFn(type, id) {
494
+ const func = self.console[type];
495
+ return func ? func.bind(self.console, `${id ? '[' + id + '] ' : ''}[${type}] >`) : noop;
496
+ }
497
+ function getLoggerFn(key, debugConfig, id) {
498
+ return debugConfig[key] ? debugConfig[key].bind(debugConfig) : consolePrintFn(key, id);
499
+ }
500
+ const exportedLogger = createLogger();
501
+ function enableLogs(debugConfig, context, id) {
502
+ // check that console is available
503
+ const newLogger = createLogger();
504
+ if (typeof console === 'object' && debugConfig === true || typeof debugConfig === 'object') {
505
+ const keys = [
506
+ // Remove out from list here to hard-disable a log-level
507
+ // 'trace',
508
+ 'debug', 'log', 'info', 'warn', 'error'];
509
+ keys.forEach(key => {
510
+ newLogger[key] = getLoggerFn(key, debugConfig, id);
511
+ });
512
+ // Some browsers don't allow to use bind on console object anyway
513
+ // fallback to default if needed
514
+ try {
515
+ newLogger.log(`Debug logs enabled for "${context}" in hls.js version ${"1.5.6-0.canary.10003"}`);
516
+ } catch (e) {
517
+ /* log fn threw an exception. All logger methods are no-ops. */
518
+ return createLogger();
519
+ }
520
+ }
521
+ // global exported logger uses the log methods from last call to `enableLogs`
522
+ _extends(exportedLogger, newLogger);
523
+ return newLogger;
524
+ }
525
+ const logger = exportedLogger;
526
+
505
527
  // Avoid exporting const enum so that these values can be inlined
506
528
 
507
529
  function isDateRangeCueAttribute(attrName) {
@@ -991,10 +1013,30 @@ class LevelDetails {
991
1013
  }
992
1014
  }
993
1015
 
1016
+ var DecrypterAesMode = {
1017
+ cbc: 0,
1018
+ ctr: 1
1019
+ };
1020
+
1021
+ function isFullSegmentEncryption(method) {
1022
+ return method === 'AES-128' || method === 'AES-256' || method === 'AES-256-CTR';
1023
+ }
1024
+ function getAesModeFromFullSegmentMethod(method) {
1025
+ switch (method) {
1026
+ case 'AES-128':
1027
+ case 'AES-256':
1028
+ return DecrypterAesMode.cbc;
1029
+ case 'AES-256-CTR':
1030
+ return DecrypterAesMode.ctr;
1031
+ default:
1032
+ throw new Error(`invalid full segment method ${method}`);
1033
+ }
1034
+ }
1035
+
994
1036
  // This file is inserted as a shim for modules which we do not want to include into the distro.
995
1037
  // This replacement is done in the "alias" plugin of the rollup config.
996
1038
  var empty = undefined;
997
- var Cues = /*@__PURE__*/getDefaultExportFromCjs(empty);
1039
+ var HevcVideoParser = /*@__PURE__*/getDefaultExportFromCjs(empty);
998
1040
 
999
1041
  function sliceUint8(array, start, end) {
1000
1042
  // @ts-expect-error This polyfills IE11 usage of Uint8Array slice.
@@ -2431,12 +2473,12 @@ class LevelKey {
2431
2473
  this.keyFormatVersions = formatversions;
2432
2474
  this.iv = iv;
2433
2475
  this.encrypted = method ? method !== 'NONE' : false;
2434
- this.isCommonEncryption = this.encrypted && method !== 'AES-128';
2476
+ this.isCommonEncryption = this.encrypted && !isFullSegmentEncryption(method);
2435
2477
  }
2436
2478
  isSupported() {
2437
2479
  // If it's Segment encryption or No encryption, just select that key system
2438
2480
  if (this.method) {
2439
- if (this.method === 'AES-128' || this.method === 'NONE') {
2481
+ if (isFullSegmentEncryption(this.method) || this.method === 'NONE') {
2440
2482
  return true;
2441
2483
  }
2442
2484
  if (this.keyFormat === 'identity') {
@@ -2450,14 +2492,13 @@ class LevelKey {
2450
2492
  if (!this.encrypted || !this.uri) {
2451
2493
  return null;
2452
2494
  }
2453
- if (this.method === 'AES-128' && this.uri && !this.iv) {
2495
+ if (isFullSegmentEncryption(this.method) && this.uri && !this.iv) {
2454
2496
  if (typeof sn !== 'number') {
2455
2497
  // We are fetching decryption data for a initialization segment
2456
- // If the segment was encrypted with AES-128
2498
+ // If the segment was encrypted with AES-128/256
2457
2499
  // It must have an IV defined. We cannot substitute the Segment Number in.
2458
- if (this.method === 'AES-128' && !this.iv) {
2459
- logger.warn(`missing IV for initialization segment with method="${this.method}" - compliance issue`);
2460
- }
2500
+ logger.warn(`missing IV for initialization segment with method="${this.method}" - compliance issue`);
2501
+
2461
2502
  // Explicitly set sn to resulting value from implicit conversions 'initSegment' values for IV generation.
2462
2503
  sn = 0;
2463
2504
  }
@@ -2604,23 +2645,28 @@ function getCodecCompatibleNameLower(lowerCaseCodec, preferManagedMediaSource =
2604
2645
  if (CODEC_COMPATIBLE_NAMES[lowerCaseCodec]) {
2605
2646
  return CODEC_COMPATIBLE_NAMES[lowerCaseCodec];
2606
2647
  }
2607
-
2608
- // Idealy fLaC and Opus would be first (spec-compliant) but
2609
- // some browsers will report that fLaC is supported then fail.
2610
- // see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
2611
2648
  const codecsToCheck = {
2649
+ // Idealy fLaC and Opus would be first (spec-compliant) but
2650
+ // some browsers will report that fLaC is supported then fail.
2651
+ // see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
2612
2652
  flac: ['flac', 'fLaC', 'FLAC'],
2613
- opus: ['opus', 'Opus']
2653
+ opus: ['opus', 'Opus'],
2654
+ // Replace audio codec info if browser does not support mp4a.40.34,
2655
+ // and demuxer can fallback to 'audio/mpeg' or 'audio/mp4;codecs="mp3"'
2656
+ 'mp4a.40.34': ['mp3']
2614
2657
  }[lowerCaseCodec];
2615
2658
  for (let i = 0; i < codecsToCheck.length; i++) {
2659
+ var _getMediaSource;
2616
2660
  if (isCodecMediaSourceSupported(codecsToCheck[i], 'audio', preferManagedMediaSource)) {
2617
2661
  CODEC_COMPATIBLE_NAMES[lowerCaseCodec] = codecsToCheck[i];
2618
2662
  return codecsToCheck[i];
2663
+ } else if (codecsToCheck[i] === 'mp3' && (_getMediaSource = getMediaSource(preferManagedMediaSource)) != null && _getMediaSource.isTypeSupported('audio/mpeg')) {
2664
+ return '';
2619
2665
  }
2620
2666
  }
2621
2667
  return lowerCaseCodec;
2622
2668
  }
2623
- const AUDIO_CODEC_REGEXP = /flac|opus/i;
2669
+ const AUDIO_CODEC_REGEXP = /flac|opus|mp4a\.40\.34/i;
2624
2670
  function getCodecCompatibleName(codec, preferManagedMediaSource = true) {
2625
2671
  return codec.replace(AUDIO_CODEC_REGEXP, m => getCodecCompatibleNameLower(m.toLowerCase(), preferManagedMediaSource));
2626
2672
  }
@@ -2643,6 +2689,16 @@ function convertAVC1ToAVCOTI(codec) {
2643
2689
  }
2644
2690
  return codec;
2645
2691
  }
2692
+ function getM2TSSupportedAudioTypes(preferManagedMediaSource) {
2693
+ const MediaSource = getMediaSource(preferManagedMediaSource) || {
2694
+ isTypeSupported: () => false
2695
+ };
2696
+ return {
2697
+ mpeg: MediaSource.isTypeSupported('audio/mpeg'),
2698
+ mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
2699
+ ac3: false
2700
+ };
2701
+ }
2646
2702
 
2647
2703
  const MASTER_PLAYLIST_REGEX = /#EXT-X-STREAM-INF:([^\r\n]*)(?:[\r\n](?:#[^\r\n]*)?)*([^\r\n]+)|#EXT-X-(SESSION-DATA|SESSION-KEY|DEFINE|CONTENT-STEERING|START):([^\r\n]*)[\r\n]+/g;
2648
2704
  const MASTER_PLAYLIST_MEDIA_REGEX = /#EXT-X-MEDIA:(.*)/g;
@@ -3443,10 +3499,10 @@ class PlaylistLoader {
3443
3499
  const loaderContext = loader.context;
3444
3500
  if (loaderContext && loaderContext.url === context.url && loaderContext.level === context.level) {
3445
3501
  // same URL can't overlap
3446
- logger.trace('[playlist-loader]: playlist request ongoing');
3502
+ this.hls.logger.trace('[playlist-loader]: playlist request ongoing');
3447
3503
  return;
3448
3504
  }
3449
- logger.log(`[playlist-loader]: aborting previous loader for type: ${context.type}`);
3505
+ this.hls.logger.log(`[playlist-loader]: aborting previous loader for type: ${context.type}`);
3450
3506
  loader.abort();
3451
3507
  }
3452
3508
 
@@ -3556,7 +3612,7 @@ class PlaylistLoader {
3556
3612
  // alt audio rendition in which quality levels (main)
3557
3613
  // contains both audio+video. but with mixed audio track not signaled
3558
3614
  if (!embeddedAudioFound && levels[0].audioCodec && !levels[0].attrs.AUDIO) {
3559
- logger.log('[playlist-loader]: audio codec signaled in quality level, but no embedded audio track signaled, create one');
3615
+ this.hls.logger.log('[playlist-loader]: audio codec signaled in quality level, but no embedded audio track signaled, create one');
3560
3616
  audioTracks.unshift({
3561
3617
  type: 'main',
3562
3618
  name: 'main',
@@ -3655,7 +3711,7 @@ class PlaylistLoader {
3655
3711
  message += ` id: ${context.id} group-id: "${context.groupId}"`;
3656
3712
  }
3657
3713
  const error = new Error(message);
3658
- logger.warn(`[playlist-loader]: ${message}`);
3714
+ this.hls.logger.warn(`[playlist-loader]: ${message}`);
3659
3715
  let details = ErrorDetails.UNKNOWN;
3660
3716
  let fatal = false;
3661
3717
  const loader = this.getInternalLoader(context);
@@ -4220,7 +4276,47 @@ class LatencyController {
4220
4276
  this.currentTime = 0;
4221
4277
  this.stallCount = 0;
4222
4278
  this._latency = null;
4223
- this.timeupdateHandler = () => this.timeupdate();
4279
+ this.onTimeupdate = () => {
4280
+ const {
4281
+ media,
4282
+ levelDetails
4283
+ } = this;
4284
+ if (!media || !levelDetails) {
4285
+ return;
4286
+ }
4287
+ this.currentTime = media.currentTime;
4288
+ const latency = this.computeLatency();
4289
+ if (latency === null) {
4290
+ return;
4291
+ }
4292
+ this._latency = latency;
4293
+
4294
+ // Adapt playbackRate to meet target latency in low-latency mode
4295
+ const {
4296
+ lowLatencyMode,
4297
+ maxLiveSyncPlaybackRate
4298
+ } = this.config;
4299
+ if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
4300
+ return;
4301
+ }
4302
+ const targetLatency = this.targetLatency;
4303
+ if (targetLatency === null) {
4304
+ return;
4305
+ }
4306
+ const distanceFromTarget = latency - targetLatency;
4307
+ // Only adjust playbackRate when within one target duration of targetLatency
4308
+ // and more than one second from under-buffering.
4309
+ // Playback further than one target duration from target can be considered DVR playback.
4310
+ const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
4311
+ const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
4312
+ if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
4313
+ const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
4314
+ const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
4315
+ media.playbackRate = Math.min(max, Math.max(1, rate));
4316
+ } else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
4317
+ media.playbackRate = 1;
4318
+ }
4319
+ };
4224
4320
  this.hls = hls;
4225
4321
  this.config = hls.config;
4226
4322
  this.registerListeners();
@@ -4312,7 +4408,7 @@ class LatencyController {
4312
4408
  this.onMediaDetaching();
4313
4409
  this.levelDetails = null;
4314
4410
  // @ts-ignore
4315
- this.hls = this.timeupdateHandler = null;
4411
+ this.hls = null;
4316
4412
  }
4317
4413
  registerListeners() {
4318
4414
  this.hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
@@ -4330,11 +4426,11 @@ class LatencyController {
4330
4426
  }
4331
4427
  onMediaAttached(event, data) {
4332
4428
  this.media = data.media;
4333
- this.media.addEventListener('timeupdate', this.timeupdateHandler);
4429
+ this.media.addEventListener('timeupdate', this.onTimeupdate);
4334
4430
  }
4335
4431
  onMediaDetaching() {
4336
4432
  if (this.media) {
4337
- this.media.removeEventListener('timeupdate', this.timeupdateHandler);
4433
+ this.media.removeEventListener('timeupdate', this.onTimeupdate);
4338
4434
  this.media = null;
4339
4435
  }
4340
4436
  }
@@ -4348,10 +4444,10 @@ class LatencyController {
4348
4444
  }) {
4349
4445
  this.levelDetails = details;
4350
4446
  if (details.advanced) {
4351
- this.timeupdate();
4447
+ this.onTimeupdate();
4352
4448
  }
4353
4449
  if (!details.live && this.media) {
4354
- this.media.removeEventListener('timeupdate', this.timeupdateHandler);
4450
+ this.media.removeEventListener('timeupdate', this.onTimeupdate);
4355
4451
  }
4356
4452
  }
4357
4453
  onError(event, data) {
@@ -4361,48 +4457,7 @@ class LatencyController {
4361
4457
  }
4362
4458
  this.stallCount++;
4363
4459
  if ((_this$levelDetails = this.levelDetails) != null && _this$levelDetails.live) {
4364
- logger.warn('[playback-rate-controller]: Stall detected, adjusting target latency');
4365
- }
4366
- }
4367
- timeupdate() {
4368
- const {
4369
- media,
4370
- levelDetails
4371
- } = this;
4372
- if (!media || !levelDetails) {
4373
- return;
4374
- }
4375
- this.currentTime = media.currentTime;
4376
- const latency = this.computeLatency();
4377
- if (latency === null) {
4378
- return;
4379
- }
4380
- this._latency = latency;
4381
-
4382
- // Adapt playbackRate to meet target latency in low-latency mode
4383
- const {
4384
- lowLatencyMode,
4385
- maxLiveSyncPlaybackRate
4386
- } = this.config;
4387
- if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
4388
- return;
4389
- }
4390
- const targetLatency = this.targetLatency;
4391
- if (targetLatency === null) {
4392
- return;
4393
- }
4394
- const distanceFromTarget = latency - targetLatency;
4395
- // Only adjust playbackRate when within one target duration of targetLatency
4396
- // and more than one second from under-buffering.
4397
- // Playback further than one target duration from target can be considered DVR playback.
4398
- const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
4399
- const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
4400
- if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
4401
- const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
4402
- const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
4403
- media.playbackRate = Math.min(max, Math.max(1, rate));
4404
- } else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
4405
- media.playbackRate = 1;
4460
+ this.hls.logger.warn('[latency-controller]: Stall detected, adjusting target latency');
4406
4461
  }
4407
4462
  }
4408
4463
  estimateLiveEdge() {
@@ -5174,18 +5229,13 @@ var ErrorActionFlags = {
5174
5229
  MoveAllAlternatesMatchingHDCP: 2,
5175
5230
  SwitchToSDR: 4
5176
5231
  }; // Reserved for future use
5177
- class ErrorController {
5232
+ class ErrorController extends Logger {
5178
5233
  constructor(hls) {
5234
+ super('error-controller', hls.logger);
5179
5235
  this.hls = void 0;
5180
5236
  this.playlistError = 0;
5181
5237
  this.penalizedRenditions = {};
5182
- this.log = void 0;
5183
- this.warn = void 0;
5184
- this.error = void 0;
5185
5238
  this.hls = hls;
5186
- this.log = logger.log.bind(logger, `[info]:`);
5187
- this.warn = logger.warn.bind(logger, `[warning]:`);
5188
- this.error = logger.error.bind(logger, `[error]:`);
5189
5239
  this.registerListeners();
5190
5240
  }
5191
5241
  registerListeners() {
@@ -5537,16 +5587,13 @@ class ErrorController {
5537
5587
  }
5538
5588
  }
5539
5589
 
5540
- class BasePlaylistController {
5590
+ class BasePlaylistController extends Logger {
5541
5591
  constructor(hls, logPrefix) {
5592
+ super(logPrefix, hls.logger);
5542
5593
  this.hls = void 0;
5543
5594
  this.timer = -1;
5544
5595
  this.requestScheduled = -1;
5545
5596
  this.canLoad = false;
5546
- this.log = void 0;
5547
- this.warn = void 0;
5548
- this.log = logger.log.bind(logger, `${logPrefix}:`);
5549
- this.warn = logger.warn.bind(logger, `${logPrefix}:`);
5550
5597
  this.hls = hls;
5551
5598
  }
5552
5599
  destroy() {
@@ -5579,7 +5626,7 @@ class BasePlaylistController {
5579
5626
  try {
5580
5627
  uri = new self.URL(attr.URI, previous.url).href;
5581
5628
  } catch (error) {
5582
- logger.warn(`Could not construct new URL for Rendition Report: ${error}`);
5629
+ this.warn(`Could not construct new URL for Rendition Report: ${error}`);
5583
5630
  uri = attr.URI || '';
5584
5631
  }
5585
5632
  // Use exact match. Otherwise, the last partial match, if any, will be used
@@ -5666,7 +5713,12 @@ class BasePlaylistController {
5666
5713
  const cdnAge = lastAdvanced + details.ageHeader;
5667
5714
  let currentGoal = Math.min(cdnAge - details.partTarget, details.targetduration * 1.5);
5668
5715
  if (currentGoal > 0) {
5669
- if (previousDetails && currentGoal > previousDetails.tuneInGoal) {
5716
+ if (cdnAge > details.targetduration * 3) {
5717
+ // Omit segment and part directives when the last response was more than 3 target durations ago,
5718
+ this.log(`Playlist last advanced ${lastAdvanced.toFixed(2)}s ago. Omitting segment and part directives.`);
5719
+ msn = undefined;
5720
+ part = undefined;
5721
+ } else if (previousDetails != null && previousDetails.tuneInGoal && cdnAge - details.partTarget > previousDetails.tuneInGoal) {
5670
5722
  // If we attempted to get the next or latest playlist update, but currentGoal increased,
5671
5723
  // then we either can't catchup, or the "age" header cannot be trusted.
5672
5724
  this.warn(`CDN Tune-in goal increased from: ${previousDetails.tuneInGoal} to: ${currentGoal} with playlist age: ${details.age}`);
@@ -6125,8 +6177,9 @@ function getCodecTiers(levels, audioTracksByGroup, minAutoLevel, maxAutoLevel) {
6125
6177
  }, {});
6126
6178
  }
6127
6179
 
6128
- class AbrController {
6180
+ class AbrController extends Logger {
6129
6181
  constructor(_hls) {
6182
+ super('abr', _hls.logger);
6130
6183
  this.hls = void 0;
6131
6184
  this.lastLevelLoadSec = 0;
6132
6185
  this.lastLoadedFragLevel = -1;
@@ -6240,7 +6293,7 @@ class AbrController {
6240
6293
  this.resetEstimator(nextLoadLevelBitrate);
6241
6294
  }
6242
6295
  this.clearTimer();
6243
- logger.warn(`[abr] Fragment ${frag.sn}${part ? ' part ' + part.index : ''} of level ${frag.level} is loading too slowly;
6296
+ this.warn(`Fragment ${frag.sn}${part ? ' part ' + part.index : ''} of level ${frag.level} is loading too slowly;
6244
6297
  Time to underbuffer: ${bufferStarvationDelay.toFixed(3)} s
6245
6298
  Estimated load time for current fragment: ${fragLoadedDelay.toFixed(3)} s
6246
6299
  Estimated load time for down switch fragment: ${fragLevelNextLoadedDelay.toFixed(3)} s
@@ -6260,7 +6313,7 @@ class AbrController {
6260
6313
  }
6261
6314
  resetEstimator(abrEwmaDefaultEstimate) {
6262
6315
  if (abrEwmaDefaultEstimate) {
6263
- logger.log(`setting initial bwe to ${abrEwmaDefaultEstimate}`);
6316
+ this.log(`setting initial bwe to ${abrEwmaDefaultEstimate}`);
6264
6317
  this.hls.config.abrEwmaDefaultEstimate = abrEwmaDefaultEstimate;
6265
6318
  }
6266
6319
  this.firstSelection = -1;
@@ -6492,7 +6545,7 @@ class AbrController {
6492
6545
  }
6493
6546
  const firstLevel = this.hls.firstLevel;
6494
6547
  const clamped = Math.min(Math.max(firstLevel, minAutoLevel), maxAutoLevel);
6495
- logger.warn(`[abr] Could not find best starting auto level. Defaulting to first in playlist ${firstLevel} clamped to ${clamped}`);
6548
+ this.warn(`Could not find best starting auto level. Defaulting to first in playlist ${firstLevel} clamped to ${clamped}`);
6496
6549
  return clamped;
6497
6550
  }
6498
6551
  get forcedAutoLevel() {
@@ -6577,13 +6630,13 @@ class AbrController {
6577
6630
  // cap maxLoadingDelay and ensure it is not bigger 'than bitrate test' frag duration
6578
6631
  const maxLoadingDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxLoadingDelay) : config.maxLoadingDelay;
6579
6632
  maxStarvationDelay = maxLoadingDelay - bitrateTestDelay;
6580
- logger.info(`[abr] bitrate test took ${Math.round(1000 * bitrateTestDelay)}ms, set first fragment max fetchDuration to ${Math.round(1000 * maxStarvationDelay)} ms`);
6633
+ this.info(`bitrate test took ${Math.round(1000 * bitrateTestDelay)}ms, set first fragment max fetchDuration to ${Math.round(1000 * maxStarvationDelay)} ms`);
6581
6634
  // don't use conservative factor on bitrate test
6582
6635
  bwFactor = bwUpFactor = 1;
6583
6636
  }
6584
6637
  }
6585
6638
  const bestLevel = this.findBestLevel(avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay, maxStarvationDelay, bwFactor, bwUpFactor);
6586
- logger.info(`[abr] ${bufferStarvationDelay ? 'rebuffering expected' : 'buffer is empty'}, optimal quality level ${bestLevel}`);
6639
+ this.info(`${bufferStarvationDelay ? 'rebuffering expected' : 'buffer is empty'}, optimal quality level ${bestLevel}`);
6587
6640
  if (bestLevel > -1) {
6588
6641
  return bestLevel;
6589
6642
  }
@@ -6645,7 +6698,7 @@ class AbrController {
6645
6698
  currentVideoRange = preferHDR ? videoRanges[videoRanges.length - 1] : videoRanges[0];
6646
6699
  currentFrameRate = minFramerate;
6647
6700
  currentBw = Math.max(currentBw, minBitrate);
6648
- logger.log(`[abr] picked start tier ${JSON.stringify(startTier)}`);
6701
+ this.log(`picked start tier ${JSON.stringify(startTier)}`);
6649
6702
  } else {
6650
6703
  currentCodecSet = level == null ? void 0 : level.codecSet;
6651
6704
  currentVideoRange = level == null ? void 0 : level.videoRange;
@@ -6698,9 +6751,9 @@ class AbrController {
6698
6751
  const forcedAutoLevel = this.forcedAutoLevel;
6699
6752
  if (i !== loadLevel && (forcedAutoLevel === -1 || forcedAutoLevel !== loadLevel)) {
6700
6753
  if (levelsSkipped.length) {
6701
- logger.trace(`[abr] Skipped level(s) ${levelsSkipped.join(',')} of ${maxAutoLevel} max with CODECS and VIDEO-RANGE:"${levels[levelsSkipped[0]].codecs}" ${levels[levelsSkipped[0]].videoRange}; not compatible with "${level.codecs}" ${currentVideoRange}`);
6754
+ this.trace(`Skipped level(s) ${levelsSkipped.join(',')} of ${maxAutoLevel} max with CODECS and VIDEO-RANGE:"${levels[levelsSkipped[0]].codecs}" ${levels[levelsSkipped[0]].videoRange}; not compatible with "${level.codecs}" ${currentVideoRange}`);
6702
6755
  }
6703
- logger.info(`[abr] switch candidate:${selectionBaseLevel}->${i} adjustedbw(${Math.round(adjustedbw)})-bitrate=${Math.round(adjustedbw - bitrate)} ttfb:${ttfbEstimateSec.toFixed(1)} avgDuration:${avgDuration.toFixed(1)} maxFetchDuration:${maxFetchDuration.toFixed(1)} fetchDuration:${fetchDuration.toFixed(1)} firstSelection:${firstSelection} codecSet:${currentCodecSet} videoRange:${currentVideoRange} hls.loadLevel:${loadLevel}`);
6756
+ this.info(`switch candidate:${selectionBaseLevel}->${i} adjustedbw(${Math.round(adjustedbw)})-bitrate=${Math.round(adjustedbw - bitrate)} ttfb:${ttfbEstimateSec.toFixed(1)} avgDuration:${avgDuration.toFixed(1)} maxFetchDuration:${maxFetchDuration.toFixed(1)} fetchDuration:${fetchDuration.toFixed(1)} firstSelection:${firstSelection} codecSet:${currentCodecSet} videoRange:${currentVideoRange} hls.loadLevel:${loadLevel}`);
6704
6757
  }
6705
6758
  if (firstSelection) {
6706
6759
  this.firstSelection = i;
@@ -6936,8 +6989,9 @@ class BufferOperationQueue {
6936
6989
  }
6937
6990
 
6938
6991
  const VIDEO_CODEC_PROFILE_REPLACE = /(avc[1234]|hvc1|hev1|dvh[1e]|vp09|av01)(?:\.[^.,]+)+/;
6939
- class BufferController {
6992
+ class BufferController extends Logger {
6940
6993
  constructor(hls) {
6994
+ super('buffer-controller', hls.logger);
6941
6995
  // The level details used to determine duration, target-duration and live
6942
6996
  this.details = null;
6943
6997
  // cache the self generated object url to detect hijack of video tag
@@ -6967,9 +7021,6 @@ class BufferController {
6967
7021
  this.tracks = {};
6968
7022
  this.pendingTracks = {};
6969
7023
  this.sourceBuffer = void 0;
6970
- this.log = void 0;
6971
- this.warn = void 0;
6972
- this.error = void 0;
6973
7024
  this._onEndStreaming = event => {
6974
7025
  if (!this.hls) {
6975
7026
  return;
@@ -7015,15 +7066,11 @@ class BufferController {
7015
7066
  _objectUrl
7016
7067
  } = this;
7017
7068
  if (mediaSrc !== _objectUrl) {
7018
- logger.error(`Media element src was set while attaching MediaSource (${_objectUrl} > ${mediaSrc})`);
7069
+ this.error(`Media element src was set while attaching MediaSource (${_objectUrl} > ${mediaSrc})`);
7019
7070
  }
7020
7071
  };
7021
7072
  this.hls = hls;
7022
- const logPrefix = '[buffer-controller]';
7023
7073
  this.appendSource = hls.config.preferManagedMediaSource;
7024
- this.log = logger.log.bind(logger, logPrefix);
7025
- this.warn = logger.warn.bind(logger, logPrefix);
7026
- this.error = logger.error.bind(logger, logPrefix);
7027
7074
  this._initSourceBuffer();
7028
7075
  this.registerListeners();
7029
7076
  }
@@ -7036,6 +7083,12 @@ class BufferController {
7036
7083
  this.lastMpegAudioChunk = null;
7037
7084
  // @ts-ignore
7038
7085
  this.hls = null;
7086
+ // @ts-ignore
7087
+ this._onMediaSourceOpen = this._onMediaSourceClose = null;
7088
+ // @ts-ignore
7089
+ this._onMediaSourceEnded = null;
7090
+ // @ts-ignore
7091
+ this._onStartStreaming = this._onEndStreaming = null;
7039
7092
  }
7040
7093
  registerListeners() {
7041
7094
  const {
@@ -7198,6 +7251,7 @@ class BufferController {
7198
7251
  this.resetBuffer(type);
7199
7252
  });
7200
7253
  this._initSourceBuffer();
7254
+ this.hls.resumeBuffering();
7201
7255
  }
7202
7256
  resetBuffer(type) {
7203
7257
  const sb = this.sourceBuffer[type];
@@ -8035,7 +8089,7 @@ class CapLevelController {
8035
8089
  const hls = this.hls;
8036
8090
  const maxLevel = this.getMaxLevel(levels.length - 1);
8037
8091
  if (maxLevel !== this.autoLevelCapping) {
8038
- logger.log(`Setting autoLevelCapping to ${maxLevel}: ${levels[maxLevel].height}p@${levels[maxLevel].bitrate} for media ${this.mediaWidth}x${this.mediaHeight}`);
8092
+ hls.logger.log(`Setting autoLevelCapping to ${maxLevel}: ${levels[maxLevel].height}p@${levels[maxLevel].bitrate} for media ${this.mediaWidth}x${this.mediaHeight}`);
8039
8093
  }
8040
8094
  hls.autoLevelCapping = maxLevel;
8041
8095
  if (hls.autoLevelCapping > this.autoLevelCapping && this.streamController) {
@@ -8213,10 +8267,10 @@ class FPSController {
8213
8267
  totalDroppedFrames: droppedFrames
8214
8268
  });
8215
8269
  if (droppedFPS > 0) {
8216
- // logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
8270
+ // hls.logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
8217
8271
  if (currentDropped > hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
8218
8272
  let currentLevel = hls.currentLevel;
8219
- logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
8273
+ hls.logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
8220
8274
  if (currentLevel > 0 && (hls.autoLevelCapping === -1 || hls.autoLevelCapping >= currentLevel)) {
8221
8275
  currentLevel = currentLevel - 1;
8222
8276
  hls.trigger(Events.FPS_DROP_LEVEL_CAPPING, {
@@ -8249,10 +8303,10 @@ class FPSController {
8249
8303
  }
8250
8304
 
8251
8305
  const PATHWAY_PENALTY_DURATION_MS = 300000;
8252
- class ContentSteeringController {
8306
+ class ContentSteeringController extends Logger {
8253
8307
  constructor(hls) {
8308
+ super('content-steering', hls.logger);
8254
8309
  this.hls = void 0;
8255
- this.log = void 0;
8256
8310
  this.loader = null;
8257
8311
  this.uri = null;
8258
8312
  this.pathwayId = '.';
@@ -8267,7 +8321,6 @@ class ContentSteeringController {
8267
8321
  this.subtitleTracks = null;
8268
8322
  this.penalizedPathways = {};
8269
8323
  this.hls = hls;
8270
- this.log = logger.log.bind(logger, `[content-steering]:`);
8271
8324
  this.registerListeners();
8272
8325
  }
8273
8326
  registerListeners() {
@@ -8391,7 +8444,7 @@ class ContentSteeringController {
8391
8444
  errorAction.resolved = this.pathwayId !== errorPathway;
8392
8445
  }
8393
8446
  if (!errorAction.resolved) {
8394
- logger.warn(`Could not resolve ${data.details} ("${data.error.message}") with content-steering for Pathway: ${errorPathway} levels: ${levels ? levels.length : levels} priorities: ${JSON.stringify(pathwayPriority)} penalized: ${JSON.stringify(this.penalizedPathways)}`);
8447
+ this.warn(`Could not resolve ${data.details} ("${data.error.message}") with content-steering for Pathway: ${errorPathway} levels: ${levels ? levels.length : levels} priorities: ${JSON.stringify(pathwayPriority)} penalized: ${JSON.stringify(this.penalizedPathways)}`);
8395
8448
  }
8396
8449
  }
8397
8450
  }
@@ -8562,7 +8615,7 @@ class ContentSteeringController {
8562
8615
  onSuccess: (response, stats, context, networkDetails) => {
8563
8616
  this.log(`Loaded steering manifest: "${url}"`);
8564
8617
  const steeringData = response.data;
8565
- if (steeringData.VERSION !== 1) {
8618
+ if ((steeringData == null ? void 0 : steeringData.VERSION) !== 1) {
8566
8619
  this.log(`Steering VERSION ${steeringData.VERSION} not supported!`);
8567
8620
  return;
8568
8621
  }
@@ -9470,7 +9523,7 @@ const hlsDefaultConfig = _objectSpread2(_objectSpread2({
9470
9523
  });
9471
9524
  function timelineConfig() {
9472
9525
  return {
9473
- cueHandler: Cues,
9526
+ cueHandler: HevcVideoParser,
9474
9527
  // used by timeline-controller
9475
9528
  enableWebVTT: false,
9476
9529
  // used by timeline-controller
@@ -9501,7 +9554,7 @@ function timelineConfig() {
9501
9554
  /**
9502
9555
  * @ignore
9503
9556
  */
9504
- function mergeConfig(defaultConfig, userConfig) {
9557
+ function mergeConfig(defaultConfig, userConfig, logger) {
9505
9558
  if ((userConfig.liveSyncDurationCount || userConfig.liveMaxLatencyDurationCount) && (userConfig.liveSyncDuration || userConfig.liveMaxLatencyDuration)) {
9506
9559
  throw new Error("Illegal hls.js config: don't mix up liveSyncDurationCount/liveMaxLatencyDurationCount and liveSyncDuration/liveMaxLatencyDuration");
9507
9560
  }
@@ -9571,7 +9624,7 @@ function deepCpy(obj) {
9571
9624
  /**
9572
9625
  * @ignore
9573
9626
  */
9574
- function enableStreamingMode(config) {
9627
+ function enableStreamingMode(config, logger) {
9575
9628
  const currentLoader = config.loader;
9576
9629
  if (currentLoader !== FetchLoader && currentLoader !== XhrLoader) {
9577
9630
  // If a developer has configured their own loader, respect that choice
@@ -9588,10 +9641,9 @@ function enableStreamingMode(config) {
9588
9641
  }
9589
9642
  }
9590
9643
 
9591
- let chromeOrFirefox;
9592
9644
  class LevelController extends BasePlaylistController {
9593
9645
  constructor(hls, contentSteeringController) {
9594
- super(hls, '[level-controller]');
9646
+ super(hls, 'level-controller');
9595
9647
  this._levels = [];
9596
9648
  this._firstLevel = -1;
9597
9649
  this._maxAutoLevel = -1;
@@ -9662,23 +9714,15 @@ class LevelController extends BasePlaylistController {
9662
9714
  let videoCodecFound = false;
9663
9715
  let audioCodecFound = false;
9664
9716
  data.levels.forEach(levelParsed => {
9665
- var _audioCodec, _videoCodec;
9717
+ var _videoCodec;
9666
9718
  const attributes = levelParsed.attrs;
9667
-
9668
- // erase audio codec info if browser does not support mp4a.40.34.
9669
- // demuxer will autodetect codec and fallback to mpeg/audio
9670
9719
  let {
9671
9720
  audioCodec,
9672
9721
  videoCodec
9673
9722
  } = levelParsed;
9674
- if (((_audioCodec = audioCodec) == null ? void 0 : _audioCodec.indexOf('mp4a.40.34')) !== -1) {
9675
- chromeOrFirefox || (chromeOrFirefox = /chrome|firefox/i.test(navigator.userAgent));
9676
- if (chromeOrFirefox) {
9677
- levelParsed.audioCodec = audioCodec = undefined;
9678
- }
9679
- }
9680
9723
  if (audioCodec) {
9681
- levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource);
9724
+ // Returns empty and set to undefined for 'mp4a.40.34' with fallback to 'audio/mpeg' SourceBuffer
9725
+ levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource) || undefined;
9682
9726
  }
9683
9727
  if (((_videoCodec = videoCodec) == null ? void 0 : _videoCodec.indexOf('avc1')) === 0) {
9684
9728
  videoCodec = levelParsed.videoCodec = convertAVC1ToAVCOTI(videoCodec);
@@ -10020,7 +10064,12 @@ class LevelController extends BasePlaylistController {
10020
10064
  if (curLevel.fragmentError === 0) {
10021
10065
  curLevel.loadError = 0;
10022
10066
  }
10023
- this.playlistLoaded(level, data, curLevel.details);
10067
+ // Ignore matching details populated by loading a Media Playlist directly
10068
+ let previousDetails = curLevel.details;
10069
+ if (previousDetails === data.details && previousDetails.advanced) {
10070
+ previousDetails = undefined;
10071
+ }
10072
+ this.playlistLoaded(level, data, previousDetails);
10024
10073
  } else if ((_data$deliveryDirecti2 = data.deliveryDirectives) != null && _data$deliveryDirecti2.skip) {
10025
10074
  // received a delta playlist update that cannot be merged
10026
10075
  details.deltaUpdateFailed = true;
@@ -10806,8 +10855,8 @@ function createLoaderContext(frag, part = null) {
10806
10855
  var _frag$decryptdata;
10807
10856
  let byteRangeStart = start;
10808
10857
  let byteRangeEnd = end;
10809
- if (frag.sn === 'initSegment' && ((_frag$decryptdata = frag.decryptdata) == null ? void 0 : _frag$decryptdata.method) === 'AES-128') {
10810
- // MAP segment encrypted with method 'AES-128', when served with HTTP Range,
10858
+ if (frag.sn === 'initSegment' && isMethodFullSegmentAesCbc((_frag$decryptdata = frag.decryptdata) == null ? void 0 : _frag$decryptdata.method)) {
10859
+ // MAP segment encrypted with method 'AES-128' or 'AES-256' (cbc), when served with HTTP Range,
10811
10860
  // has the unencrypted size specified in the range.
10812
10861
  // Ref: https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-08#section-6.3.6
10813
10862
  const fragmentLen = end - start;
@@ -10840,6 +10889,9 @@ function createGapLoadError(frag, part) {
10840
10889
  (part ? part : frag).stats.aborted = true;
10841
10890
  return new LoadError(errorData);
10842
10891
  }
10892
+ function isMethodFullSegmentAesCbc(method) {
10893
+ return method === 'AES-128' || method === 'AES-256';
10894
+ }
10843
10895
  class LoadError extends Error {
10844
10896
  constructor(data) {
10845
10897
  super(data.error.message);
@@ -10985,6 +11037,8 @@ class KeyLoader {
10985
11037
  }
10986
11038
  return this.loadKeyEME(keyInfo, frag);
10987
11039
  case 'AES-128':
11040
+ case 'AES-256':
11041
+ case 'AES-256-CTR':
10988
11042
  return this.loadKeyHTTP(keyInfo, frag);
10989
11043
  default:
10990
11044
  return Promise.reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, new Error(`Key supplied with unsupported METHOD: "${decryptdata.method}"`)));
@@ -11120,8 +11174,9 @@ class KeyLoader {
11120
11174
  * we are limiting the task execution per call stack to exactly one, but scheduling/post-poning further
11121
11175
  * task processing on the next main loop iteration (also known as "next tick" in the Node/JS runtime lingo).
11122
11176
  */
11123
- class TaskLoop {
11124
- constructor() {
11177
+ class TaskLoop extends Logger {
11178
+ constructor(label, logger) {
11179
+ super(label, logger);
11125
11180
  this._boundTick = void 0;
11126
11181
  this._tickTimer = null;
11127
11182
  this._tickInterval = null;
@@ -11389,33 +11444,61 @@ function alignMediaPlaylistByPDT(details, refDetails) {
11389
11444
  }
11390
11445
 
11391
11446
  class AESCrypto {
11392
- constructor(subtle, iv) {
11447
+ constructor(subtle, iv, aesMode) {
11393
11448
  this.subtle = void 0;
11394
11449
  this.aesIV = void 0;
11450
+ this.aesMode = void 0;
11395
11451
  this.subtle = subtle;
11396
11452
  this.aesIV = iv;
11453
+ this.aesMode = aesMode;
11397
11454
  }
11398
11455
  decrypt(data, key) {
11399
- return this.subtle.decrypt({
11400
- name: 'AES-CBC',
11401
- iv: this.aesIV
11402
- }, key, data);
11456
+ switch (this.aesMode) {
11457
+ case DecrypterAesMode.cbc:
11458
+ return this.subtle.decrypt({
11459
+ name: 'AES-CBC',
11460
+ iv: this.aesIV
11461
+ }, key, data);
11462
+ case DecrypterAesMode.ctr:
11463
+ return this.subtle.decrypt({
11464
+ name: 'AES-CTR',
11465
+ counter: this.aesIV,
11466
+ length: 64
11467
+ },
11468
+ //64 : NIST SP800-38A standard suggests that the counter should occupy half of the counter block
11469
+ key, data);
11470
+ default:
11471
+ throw new Error(`[AESCrypto] invalid aes mode ${this.aesMode}`);
11472
+ }
11403
11473
  }
11404
11474
  }
11405
11475
 
11406
11476
  class FastAESKey {
11407
- constructor(subtle, key) {
11477
+ constructor(subtle, key, aesMode) {
11408
11478
  this.subtle = void 0;
11409
11479
  this.key = void 0;
11480
+ this.aesMode = void 0;
11410
11481
  this.subtle = subtle;
11411
11482
  this.key = key;
11483
+ this.aesMode = aesMode;
11412
11484
  }
11413
11485
  expandKey() {
11486
+ const subtleAlgoName = getSubtleAlgoName(this.aesMode);
11414
11487
  return this.subtle.importKey('raw', this.key, {
11415
- name: 'AES-CBC'
11488
+ name: subtleAlgoName
11416
11489
  }, false, ['encrypt', 'decrypt']);
11417
11490
  }
11418
11491
  }
11492
+ function getSubtleAlgoName(aesMode) {
11493
+ switch (aesMode) {
11494
+ case DecrypterAesMode.cbc:
11495
+ return 'AES-CBC';
11496
+ case DecrypterAesMode.ctr:
11497
+ return 'AES-CTR';
11498
+ default:
11499
+ throw new Error(`[FastAESKey] invalid aes mode ${aesMode}`);
11500
+ }
11501
+ }
11419
11502
 
11420
11503
  // PKCS7
11421
11504
  function removePadding(array) {
@@ -11665,7 +11748,8 @@ class Decrypter {
11665
11748
  this.currentIV = null;
11666
11749
  this.currentResult = null;
11667
11750
  this.useSoftware = void 0;
11668
- this.useSoftware = config.enableSoftwareAES;
11751
+ this.enableSoftwareAES = void 0;
11752
+ this.enableSoftwareAES = config.enableSoftwareAES;
11669
11753
  this.removePKCS7Padding = removePKCS7Padding;
11670
11754
  // built in decryptor expects PKCS7 padding
11671
11755
  if (removePKCS7Padding) {
@@ -11678,9 +11762,7 @@ class Decrypter {
11678
11762
  /* no-op */
11679
11763
  }
11680
11764
  }
11681
- if (this.subtle === null) {
11682
- this.useSoftware = true;
11683
- }
11765
+ this.useSoftware = this.subtle === null;
11684
11766
  }
11685
11767
  destroy() {
11686
11768
  this.subtle = null;
@@ -11718,10 +11800,10 @@ class Decrypter {
11718
11800
  this.softwareDecrypter = null;
11719
11801
  }
11720
11802
  }
11721
- decrypt(data, key, iv) {
11803
+ decrypt(data, key, iv, aesMode) {
11722
11804
  if (this.useSoftware) {
11723
11805
  return new Promise((resolve, reject) => {
11724
- this.softwareDecrypt(new Uint8Array(data), key, iv);
11806
+ this.softwareDecrypt(new Uint8Array(data), key, iv, aesMode);
11725
11807
  const decryptResult = this.flush();
11726
11808
  if (decryptResult) {
11727
11809
  resolve(decryptResult.buffer);
@@ -11730,17 +11812,21 @@ class Decrypter {
11730
11812
  }
11731
11813
  });
11732
11814
  }
11733
- return this.webCryptoDecrypt(new Uint8Array(data), key, iv);
11815
+ return this.webCryptoDecrypt(new Uint8Array(data), key, iv, aesMode);
11734
11816
  }
11735
11817
 
11736
11818
  // Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
11737
11819
  // data is handled in the flush() call
11738
- softwareDecrypt(data, key, iv) {
11820
+ softwareDecrypt(data, key, iv, aesMode) {
11739
11821
  const {
11740
11822
  currentIV,
11741
11823
  currentResult,
11742
11824
  remainderData
11743
11825
  } = this;
11826
+ if (aesMode !== DecrypterAesMode.cbc || key.byteLength !== 16) {
11827
+ logger.warn('SoftwareDecrypt: can only handle AES-128-CBC');
11828
+ return null;
11829
+ }
11744
11830
  this.logOnce('JS AES decrypt');
11745
11831
  // The output is staggered during progressive parsing - the current result is cached, and emitted on the next call
11746
11832
  // This is done in order to strip PKCS7 padding, which is found at the end of each segment. We only know we've reached
@@ -11773,11 +11859,11 @@ class Decrypter {
11773
11859
  }
11774
11860
  return result;
11775
11861
  }
11776
- webCryptoDecrypt(data, key, iv) {
11862
+ webCryptoDecrypt(data, key, iv, aesMode) {
11777
11863
  const subtle = this.subtle;
11778
11864
  if (this.key !== key || !this.fastAesKey) {
11779
11865
  this.key = key;
11780
- this.fastAesKey = new FastAESKey(subtle, key);
11866
+ this.fastAesKey = new FastAESKey(subtle, key, aesMode);
11781
11867
  }
11782
11868
  return this.fastAesKey.expandKey().then(aesKey => {
11783
11869
  // decrypt using web crypto
@@ -11785,22 +11871,25 @@ class Decrypter {
11785
11871
  return Promise.reject(new Error('web crypto not initialized'));
11786
11872
  }
11787
11873
  this.logOnce('WebCrypto AES decrypt');
11788
- const crypto = new AESCrypto(subtle, new Uint8Array(iv));
11874
+ const crypto = new AESCrypto(subtle, new Uint8Array(iv), aesMode);
11789
11875
  return crypto.decrypt(data.buffer, aesKey);
11790
11876
  }).catch(err => {
11791
11877
  logger.warn(`[decrypter]: WebCrypto Error, disable WebCrypto API, ${err.name}: ${err.message}`);
11792
- return this.onWebCryptoError(data, key, iv);
11878
+ return this.onWebCryptoError(data, key, iv, aesMode);
11793
11879
  });
11794
11880
  }
11795
- onWebCryptoError(data, key, iv) {
11796
- this.useSoftware = true;
11797
- this.logEnabled = true;
11798
- this.softwareDecrypt(data, key, iv);
11799
- const decryptResult = this.flush();
11800
- if (decryptResult) {
11801
- return decryptResult.buffer;
11881
+ onWebCryptoError(data, key, iv, aesMode) {
11882
+ const enableSoftwareAES = this.enableSoftwareAES;
11883
+ if (enableSoftwareAES) {
11884
+ this.useSoftware = true;
11885
+ this.logEnabled = true;
11886
+ this.softwareDecrypt(data, key, iv, aesMode);
11887
+ const decryptResult = this.flush();
11888
+ if (decryptResult) {
11889
+ return decryptResult.buffer;
11890
+ }
11802
11891
  }
11803
- throw new Error('WebCrypto and softwareDecrypt: failed to decrypt data');
11892
+ throw new Error('WebCrypto' + (enableSoftwareAES ? ' and softwareDecrypt' : '') + ': failed to decrypt data');
11804
11893
  }
11805
11894
  getValidChunk(data) {
11806
11895
  let currentChunk = data;
@@ -11851,7 +11940,7 @@ const State = {
11851
11940
  };
11852
11941
  class BaseStreamController extends TaskLoop {
11853
11942
  constructor(hls, fragmentTracker, keyLoader, logPrefix, playlistType) {
11854
- super();
11943
+ super(logPrefix, hls.logger);
11855
11944
  this.hls = void 0;
11856
11945
  this.fragPrevious = null;
11857
11946
  this.fragCurrent = null;
@@ -11876,22 +11965,98 @@ class BaseStreamController extends TaskLoop {
11876
11965
  this.startFragRequested = false;
11877
11966
  this.decrypter = void 0;
11878
11967
  this.initPTS = [];
11879
- this.onvseeking = null;
11880
- this.onvended = null;
11881
- this.logPrefix = '';
11882
- this.log = void 0;
11883
- this.warn = void 0;
11968
+ this.buffering = true;
11969
+ this.loadingParts = false;
11970
+ this.onMediaSeeking = () => {
11971
+ const {
11972
+ config,
11973
+ fragCurrent,
11974
+ media,
11975
+ mediaBuffer,
11976
+ state
11977
+ } = this;
11978
+ const currentTime = media ? media.currentTime : 0;
11979
+ const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
11980
+ this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
11981
+ if (this.state === State.ENDED) {
11982
+ this.resetLoadingState();
11983
+ } else if (fragCurrent) {
11984
+ // Seeking while frag load is in progress
11985
+ const tolerance = config.maxFragLookUpTolerance;
11986
+ const fragStartOffset = fragCurrent.start - tolerance;
11987
+ const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
11988
+ // if seeking out of buffered range or into new one
11989
+ if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
11990
+ const pastFragment = currentTime > fragEndOffset;
11991
+ // if the seek position is outside the current fragment range
11992
+ if (currentTime < fragStartOffset || pastFragment) {
11993
+ if (pastFragment && fragCurrent.loader) {
11994
+ this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
11995
+ fragCurrent.abortRequests();
11996
+ this.resetLoadingState();
11997
+ }
11998
+ this.fragPrevious = null;
11999
+ }
12000
+ }
12001
+ }
12002
+ if (media) {
12003
+ // Remove gap fragments
12004
+ this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
12005
+ this.lastCurrentTime = currentTime;
12006
+ if (!this.loadingParts) {
12007
+ const bufferEnd = Math.max(bufferInfo.end, currentTime);
12008
+ const shouldLoadParts = this.shouldLoadParts(this.getLevelDetails(), bufferEnd);
12009
+ if (shouldLoadParts) {
12010
+ this.log(`LL-Part loading ON after seeking to ${currentTime.toFixed(2)} with buffer @${bufferEnd.toFixed(2)}`);
12011
+ this.loadingParts = shouldLoadParts;
12012
+ }
12013
+ }
12014
+ }
12015
+
12016
+ // in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
12017
+ if (!this.loadedmetadata && !bufferInfo.len) {
12018
+ this.nextLoadPosition = this.startPosition = currentTime;
12019
+ }
12020
+
12021
+ // Async tick to speed up processing
12022
+ this.tickImmediate();
12023
+ };
12024
+ this.onMediaEnded = () => {
12025
+ // reset startPosition and lastCurrentTime to restart playback @ stream beginning
12026
+ this.startPosition = this.lastCurrentTime = 0;
12027
+ if (this.playlistType === PlaylistLevelType.MAIN) {
12028
+ this.hls.trigger(Events.MEDIA_ENDED, {
12029
+ stalled: false
12030
+ });
12031
+ }
12032
+ };
11884
12033
  this.playlistType = playlistType;
11885
- this.logPrefix = logPrefix;
11886
- this.log = logger.log.bind(logger, `${logPrefix}:`);
11887
- this.warn = logger.warn.bind(logger, `${logPrefix}:`);
11888
12034
  this.hls = hls;
11889
12035
  this.fragmentLoader = new FragmentLoader(hls.config);
11890
12036
  this.keyLoader = keyLoader;
11891
12037
  this.fragmentTracker = fragmentTracker;
11892
12038
  this.config = hls.config;
11893
12039
  this.decrypter = new Decrypter(hls.config);
12040
+ }
12041
+ registerListeners() {
12042
+ const {
12043
+ hls
12044
+ } = this;
12045
+ hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
12046
+ hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
12047
+ hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
11894
12048
  hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
12049
+ hls.on(Events.ERROR, this.onError, this);
12050
+ }
12051
+ unregisterListeners() {
12052
+ const {
12053
+ hls
12054
+ } = this;
12055
+ hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
12056
+ hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
12057
+ hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
12058
+ hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
12059
+ hls.off(Events.ERROR, this.onError, this);
11895
12060
  }
11896
12061
  doTick() {
11897
12062
  this.onTickEnd();
@@ -11915,6 +12080,12 @@ class BaseStreamController extends TaskLoop {
11915
12080
  this.clearNextTick();
11916
12081
  this.state = State.STOPPED;
11917
12082
  }
12083
+ pauseBuffering() {
12084
+ this.buffering = false;
12085
+ }
12086
+ resumeBuffering() {
12087
+ this.buffering = true;
12088
+ }
11918
12089
  _streamEnded(bufferInfo, levelDetails) {
11919
12090
  // If playlist is live, there is another buffered range after the current range, nothing buffered, media is detached,
11920
12091
  // of nothing loading/loaded return false
@@ -11945,10 +12116,8 @@ class BaseStreamController extends TaskLoop {
11945
12116
  }
11946
12117
  onMediaAttached(event, data) {
11947
12118
  const media = this.media = this.mediaBuffer = data.media;
11948
- this.onvseeking = this.onMediaSeeking.bind(this);
11949
- this.onvended = this.onMediaEnded.bind(this);
11950
- media.addEventListener('seeking', this.onvseeking);
11951
- media.addEventListener('ended', this.onvended);
12119
+ media.addEventListener('seeking', this.onMediaSeeking);
12120
+ media.addEventListener('ended', this.onMediaEnded);
11952
12121
  const config = this.config;
11953
12122
  if (this.levels && config.autoStartLoad && this.state === State.STOPPED) {
11954
12123
  this.startLoad(config.startPosition);
@@ -11962,10 +12131,9 @@ class BaseStreamController extends TaskLoop {
11962
12131
  }
11963
12132
 
11964
12133
  // remove video listeners
11965
- if (media && this.onvseeking && this.onvended) {
11966
- media.removeEventListener('seeking', this.onvseeking);
11967
- media.removeEventListener('ended', this.onvended);
11968
- this.onvseeking = this.onvended = null;
12134
+ if (media) {
12135
+ media.removeEventListener('seeking', this.onMediaSeeking);
12136
+ media.removeEventListener('ended', this.onMediaEnded);
11969
12137
  }
11970
12138
  if (this.keyLoader) {
11971
12139
  this.keyLoader.detach();
@@ -11975,56 +12143,8 @@ class BaseStreamController extends TaskLoop {
11975
12143
  this.fragmentTracker.removeAllFragments();
11976
12144
  this.stopLoad();
11977
12145
  }
11978
- onMediaSeeking() {
11979
- const {
11980
- config,
11981
- fragCurrent,
11982
- media,
11983
- mediaBuffer,
11984
- state
11985
- } = this;
11986
- const currentTime = media ? media.currentTime : 0;
11987
- const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
11988
- this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
11989
- if (this.state === State.ENDED) {
11990
- this.resetLoadingState();
11991
- } else if (fragCurrent) {
11992
- // Seeking while frag load is in progress
11993
- const tolerance = config.maxFragLookUpTolerance;
11994
- const fragStartOffset = fragCurrent.start - tolerance;
11995
- const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
11996
- // if seeking out of buffered range or into new one
11997
- if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
11998
- const pastFragment = currentTime > fragEndOffset;
11999
- // if the seek position is outside the current fragment range
12000
- if (currentTime < fragStartOffset || pastFragment) {
12001
- if (pastFragment && fragCurrent.loader) {
12002
- this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
12003
- fragCurrent.abortRequests();
12004
- this.resetLoadingState();
12005
- }
12006
- this.fragPrevious = null;
12007
- }
12008
- }
12009
- }
12010
- if (media) {
12011
- // Remove gap fragments
12012
- this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
12013
- this.lastCurrentTime = currentTime;
12014
- }
12015
-
12016
- // in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
12017
- if (!this.loadedmetadata && !bufferInfo.len) {
12018
- this.nextLoadPosition = this.startPosition = currentTime;
12019
- }
12020
-
12021
- // Async tick to speed up processing
12022
- this.tickImmediate();
12023
- }
12024
- onMediaEnded() {
12025
- // reset startPosition and lastCurrentTime to restart playback @ stream beginning
12026
- this.startPosition = this.lastCurrentTime = 0;
12027
- }
12146
+ onManifestLoading() {}
12147
+ onError(event, data) {}
12028
12148
  onManifestLoaded(event, data) {
12029
12149
  this.startTimeOffset = data.startTimeOffset;
12030
12150
  this.initPTS = [];
@@ -12034,7 +12154,7 @@ class BaseStreamController extends TaskLoop {
12034
12154
  this.stopLoad();
12035
12155
  super.onHandlerDestroying();
12036
12156
  // @ts-ignore
12037
- this.hls = null;
12157
+ this.hls = this.onMediaSeeking = this.onMediaEnded = null;
12038
12158
  }
12039
12159
  onHandlerDestroyed() {
12040
12160
  this.state = State.STOPPED;
@@ -12165,10 +12285,10 @@ class BaseStreamController extends TaskLoop {
12165
12285
  const decryptData = frag.decryptdata;
12166
12286
 
12167
12287
  // check to see if the payload needs to be decrypted
12168
- if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv && decryptData.method === 'AES-128') {
12288
+ if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv && isFullSegmentEncryption(decryptData.method)) {
12169
12289
  const startTime = self.performance.now();
12170
12290
  // decrypt init segment data
12171
- return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer).catch(err => {
12291
+ return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer, getAesModeFromFullSegmentMethod(decryptData.method)).catch(err => {
12172
12292
  hls.trigger(Events.ERROR, {
12173
12293
  type: ErrorTypes.MEDIA_ERROR,
12174
12294
  details: ErrorDetails.FRAG_DECRYPT_ERROR,
@@ -12280,7 +12400,7 @@ class BaseStreamController extends TaskLoop {
12280
12400
  }
12281
12401
  let keyLoadingPromise = null;
12282
12402
  if (frag.encrypted && !((_frag$decryptdata = frag.decryptdata) != null && _frag$decryptdata.key)) {
12283
- this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.logPrefix === '[stream-controller]' ? 'level' : 'track'} ${frag.level}`);
12403
+ this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'} ${frag.level}`);
12284
12404
  this.state = State.KEY_LOADING;
12285
12405
  this.fragCurrent = frag;
12286
12406
  keyLoadingPromise = this.keyLoader.load(frag).then(keyLoadedData => {
@@ -12301,8 +12421,16 @@ class BaseStreamController extends TaskLoop {
12301
12421
  } else if (!frag.encrypted && details.encryptedFragments.length) {
12302
12422
  this.keyLoader.loadClear(frag, details.encryptedFragments);
12303
12423
  }
12424
+ const fragPrevious = this.fragPrevious;
12425
+ if (frag.sn !== 'initSegment' && (!fragPrevious || frag.sn !== fragPrevious.sn)) {
12426
+ const shouldLoadParts = this.shouldLoadParts(level.details, frag.end);
12427
+ if (shouldLoadParts !== this.loadingParts) {
12428
+ this.log(`LL-Part loading ${shouldLoadParts ? 'ON' : 'OFF'} loading sn ${fragPrevious == null ? void 0 : fragPrevious.sn}->${frag.sn}`);
12429
+ this.loadingParts = shouldLoadParts;
12430
+ }
12431
+ }
12304
12432
  targetBufferTime = Math.max(frag.start, targetBufferTime || 0);
12305
- if (this.config.lowLatencyMode && frag.sn !== 'initSegment') {
12433
+ if (this.loadingParts && frag.sn !== 'initSegment') {
12306
12434
  const partList = details.partList;
12307
12435
  if (partList && progressCallback) {
12308
12436
  if (targetBufferTime > frag.end && details.fragmentHint) {
@@ -12311,7 +12439,7 @@ class BaseStreamController extends TaskLoop {
12311
12439
  const partIndex = this.getNextPart(partList, frag, targetBufferTime);
12312
12440
  if (partIndex > -1) {
12313
12441
  const part = partList[partIndex];
12314
- this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.logPrefix === '[stream-controller]' ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
12442
+ this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
12315
12443
  this.nextLoadPosition = part.start + part.duration;
12316
12444
  this.state = State.FRAG_LOADING;
12317
12445
  let _result;
@@ -12340,7 +12468,14 @@ class BaseStreamController extends TaskLoop {
12340
12468
  }
12341
12469
  }
12342
12470
  }
12343
- this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.logPrefix === '[stream-controller]' ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
12471
+ if (frag.sn !== 'initSegment' && this.loadingParts) {
12472
+ this.log(`LL-Part loading OFF after next part miss @${targetBufferTime.toFixed(2)}`);
12473
+ this.loadingParts = false;
12474
+ } else if (!frag.url) {
12475
+ // Selected fragment hint for part but not loading parts
12476
+ return Promise.resolve(null);
12477
+ }
12478
+ this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
12344
12479
  // Don't update nextLoadPosition for fragments which are not buffered
12345
12480
  if (isFiniteNumber(frag.sn) && !this.bitrateTest) {
12346
12481
  this.nextLoadPosition = frag.start + frag.duration;
@@ -12438,8 +12573,36 @@ class BaseStreamController extends TaskLoop {
12438
12573
  if (part) {
12439
12574
  part.stats.parsing.end = now;
12440
12575
  }
12576
+ // See if part loading should be disabled/enabled based on buffer and playback position.
12577
+ if (frag.sn !== 'initSegment') {
12578
+ const levelDetails = this.getLevelDetails();
12579
+ const loadingPartsAtEdge = levelDetails && frag.sn > levelDetails.endSN;
12580
+ const shouldLoadParts = loadingPartsAtEdge || this.shouldLoadParts(levelDetails, frag.end);
12581
+ if (shouldLoadParts !== this.loadingParts) {
12582
+ this.log(`LL-Part loading ${shouldLoadParts ? 'ON' : 'OFF'} after parsing segment ending @${frag.end.toFixed(2)}`);
12583
+ this.loadingParts = shouldLoadParts;
12584
+ }
12585
+ }
12441
12586
  this.updateLevelTiming(frag, part, level, chunkMeta.partial);
12442
12587
  }
12588
+ shouldLoadParts(details, bufferEnd) {
12589
+ if (this.config.lowLatencyMode) {
12590
+ if (!details) {
12591
+ return this.loadingParts;
12592
+ }
12593
+ if (details != null && details.partList) {
12594
+ var _details$fragmentHint;
12595
+ // Buffer must be ahead of first part + duration of parts after last segment
12596
+ // and playback must be at or past segment adjacent to part list
12597
+ const firstPart = details.partList[0];
12598
+ const safePartStart = firstPart.end + (((_details$fragmentHint = details.fragmentHint) == null ? void 0 : _details$fragmentHint.duration) || 0);
12599
+ if (bufferEnd >= safePartStart && this.lastCurrentTime > firstPart.start - firstPart.fragment.duration) {
12600
+ return true;
12601
+ }
12602
+ }
12603
+ }
12604
+ return false;
12605
+ }
12443
12606
  getCurrentContext(chunkMeta) {
12444
12607
  const {
12445
12608
  levels,
@@ -12588,7 +12751,8 @@ class BaseStreamController extends TaskLoop {
12588
12751
  config
12589
12752
  } = this;
12590
12753
  const start = fragments[0].start;
12591
- let frag;
12754
+ const canLoadParts = config.lowLatencyMode && !!levelDetails.partList;
12755
+ let frag = null;
12592
12756
  if (levelDetails.live) {
12593
12757
  const initialLiveManifestSize = config.initialLiveManifestSize;
12594
12758
  if (fragLen < initialLiveManifestSize) {
@@ -12600,6 +12764,10 @@ class BaseStreamController extends TaskLoop {
12600
12764
  // Do not load using live logic if the starting frag is requested - we want to use getFragmentAtPosition() so that
12601
12765
  // we get the fragment matching that start time
12602
12766
  if (!levelDetails.PTSKnown && !this.startFragRequested && this.startPosition === -1 || pos < start) {
12767
+ if (canLoadParts && !this.loadingParts) {
12768
+ this.log(`LL-Part loading ON for initial live fragment`);
12769
+ this.loadingParts = true;
12770
+ }
12603
12771
  frag = this.getInitialLiveFragment(levelDetails, fragments);
12604
12772
  this.startPosition = this.nextLoadPosition = frag ? this.hls.liveSyncPosition || frag.start : pos;
12605
12773
  }
@@ -12610,7 +12778,7 @@ class BaseStreamController extends TaskLoop {
12610
12778
 
12611
12779
  // If we haven't run into any special cases already, just load the fragment most closely matching the requested position
12612
12780
  if (!frag) {
12613
- const end = config.lowLatencyMode ? levelDetails.partEnd : levelDetails.fragmentEnd;
12781
+ const end = this.loadingParts ? levelDetails.partEnd : levelDetails.fragmentEnd;
12614
12782
  frag = this.getFragmentAtPosition(pos, end, levelDetails);
12615
12783
  }
12616
12784
  return this.mapToInitFragWhenRequired(frag);
@@ -12732,7 +12900,7 @@ class BaseStreamController extends TaskLoop {
12732
12900
  } = levelDetails;
12733
12901
  const tolerance = config.maxFragLookUpTolerance;
12734
12902
  const partList = levelDetails.partList;
12735
- const loadingParts = !!(config.lowLatencyMode && partList != null && partList.length && fragmentHint);
12903
+ const loadingParts = !!(this.loadingParts && partList != null && partList.length && fragmentHint);
12736
12904
  if (loadingParts && fragmentHint && !this.bitrateTest) {
12737
12905
  // Include incomplete fragment with parts at end
12738
12906
  fragments = fragments.concat(fragmentHint);
@@ -12925,7 +13093,7 @@ class BaseStreamController extends TaskLoop {
12925
13093
  errorAction.resolved = true;
12926
13094
  }
12927
13095
  } else {
12928
- logger.warn(`${data.details} reached or exceeded max retry (${retryCount})`);
13096
+ this.warn(`${data.details} reached or exceeded max retry (${retryCount})`);
12929
13097
  return;
12930
13098
  }
12931
13099
  } else if ((errorAction == null ? void 0 : errorAction.action) === NetworkErrorAction.SendAlternateToPenaltyBox) {
@@ -13320,6 +13488,7 @@ const initPTSFn = (timestamp, timeOffset, initPTS) => {
13320
13488
  */
13321
13489
  function getAudioConfig(observer, data, offset, audioCodec) {
13322
13490
  let adtsObjectType;
13491
+ let originalAdtsObjectType;
13323
13492
  let adtsExtensionSamplingIndex;
13324
13493
  let adtsChannelConfig;
13325
13494
  let config;
@@ -13327,7 +13496,7 @@ function getAudioConfig(observer, data, offset, audioCodec) {
13327
13496
  const manifestCodec = audioCodec;
13328
13497
  const adtsSamplingRates = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
13329
13498
  // byte 2
13330
- adtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
13499
+ adtsObjectType = originalAdtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
13331
13500
  const adtsSamplingIndex = (data[offset + 2] & 0x3c) >>> 2;
13332
13501
  if (adtsSamplingIndex > adtsSamplingRates.length - 1) {
13333
13502
  const error = new Error(`invalid ADTS sampling index:${adtsSamplingIndex}`);
@@ -13344,8 +13513,8 @@ function getAudioConfig(observer, data, offset, audioCodec) {
13344
13513
  // byte 3
13345
13514
  adtsChannelConfig |= (data[offset + 3] & 0xc0) >>> 6;
13346
13515
  logger.log(`manifest codec:${audioCodec}, ADTS type:${adtsObjectType}, samplingIndex:${adtsSamplingIndex}`);
13347
- // firefox: freq less than 24kHz = AAC SBR (HE-AAC)
13348
- if (/firefox/i.test(userAgent)) {
13516
+ // Firefox and Pale Moon: freq less than 24kHz = AAC SBR (HE-AAC)
13517
+ if (/firefox|palemoon/i.test(userAgent)) {
13349
13518
  if (adtsSamplingIndex >= 6) {
13350
13519
  adtsObjectType = 5;
13351
13520
  config = new Array(4);
@@ -13439,6 +13608,7 @@ function getAudioConfig(observer, data, offset, audioCodec) {
13439
13608
  samplerate: adtsSamplingRates[adtsSamplingIndex],
13440
13609
  channelCount: adtsChannelConfig,
13441
13610
  codec: 'mp4a.40.' + adtsObjectType,
13611
+ parsedCodec: 'mp4a.40.' + originalAdtsObjectType,
13442
13612
  manifestCodec
13443
13613
  };
13444
13614
  }
@@ -13493,7 +13663,8 @@ function initTrackConfig(track, observer, data, offset, audioCodec) {
13493
13663
  track.channelCount = config.channelCount;
13494
13664
  track.codec = config.codec;
13495
13665
  track.manifestCodec = config.manifestCodec;
13496
- logger.log(`parsed codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`);
13666
+ track.parsedCodec = config.parsedCodec;
13667
+ logger.log(`parsed codec:${track.parsedCodec}, codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`);
13497
13668
  }
13498
13669
  }
13499
13670
  function getFrameDuration(samplerate) {
@@ -13971,17 +14142,121 @@ class BaseVideoParser {
13971
14142
  logger.log(VideoSample.pts + '/' + VideoSample.dts + ':' + VideoSample.debug);
13972
14143
  }
13973
14144
  }
13974
- }
13975
-
13976
- /**
13977
- * Parser for exponential Golomb codes, a variable-bitwidth number encoding scheme used by h264.
13978
- */
13979
-
13980
- class ExpGolomb {
13981
- constructor(data) {
13982
- this.data = void 0;
13983
- this.bytesAvailable = void 0;
13984
- this.word = void 0;
14145
+ parseNALu(track, array) {
14146
+ const len = array.byteLength;
14147
+ let state = track.naluState || 0;
14148
+ const lastState = state;
14149
+ const units = [];
14150
+ let i = 0;
14151
+ let value;
14152
+ let overflow;
14153
+ let unitType;
14154
+ let lastUnitStart = -1;
14155
+ let lastUnitType = 0;
14156
+ // logger.log('PES:' + Hex.hexDump(array));
14157
+
14158
+ if (state === -1) {
14159
+ // special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
14160
+ lastUnitStart = 0;
14161
+ // NALu type is value read from offset 0
14162
+ lastUnitType = this.getNALuType(array, 0);
14163
+ state = 0;
14164
+ i = 1;
14165
+ }
14166
+ while (i < len) {
14167
+ value = array[i++];
14168
+ // optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
14169
+ if (!state) {
14170
+ state = value ? 0 : 1;
14171
+ continue;
14172
+ }
14173
+ if (state === 1) {
14174
+ state = value ? 0 : 2;
14175
+ continue;
14176
+ }
14177
+ // here we have state either equal to 2 or 3
14178
+ if (!value) {
14179
+ state = 3;
14180
+ } else if (value === 1) {
14181
+ overflow = i - state - 1;
14182
+ if (lastUnitStart >= 0) {
14183
+ const unit = {
14184
+ data: array.subarray(lastUnitStart, overflow),
14185
+ type: lastUnitType
14186
+ };
14187
+ // logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
14188
+ units.push(unit);
14189
+ } else {
14190
+ // lastUnitStart is undefined => this is the first start code found in this PES packet
14191
+ // first check if start code delimiter is overlapping between 2 PES packets,
14192
+ // ie it started in last packet (lastState not zero)
14193
+ // and ended at the beginning of this PES packet (i <= 4 - lastState)
14194
+ const lastUnit = this.getLastNalUnit(track.samples);
14195
+ if (lastUnit) {
14196
+ if (lastState && i <= 4 - lastState) {
14197
+ // start delimiter overlapping between PES packets
14198
+ // strip start delimiter bytes from the end of last NAL unit
14199
+ // check if lastUnit had a state different from zero
14200
+ if (lastUnit.state) {
14201
+ // strip last bytes
14202
+ lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
14203
+ }
14204
+ }
14205
+ // If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
14206
+
14207
+ if (overflow > 0) {
14208
+ // logger.log('first NALU found with overflow:' + overflow);
14209
+ lastUnit.data = appendUint8Array(lastUnit.data, array.subarray(0, overflow));
14210
+ lastUnit.state = 0;
14211
+ }
14212
+ }
14213
+ }
14214
+ // check if we can read unit type
14215
+ if (i < len) {
14216
+ unitType = this.getNALuType(array, i);
14217
+ // logger.log('find NALU @ offset:' + i + ',type:' + unitType);
14218
+ lastUnitStart = i;
14219
+ lastUnitType = unitType;
14220
+ state = 0;
14221
+ } else {
14222
+ // not enough byte to read unit type. let's read it on next PES parsing
14223
+ state = -1;
14224
+ }
14225
+ } else {
14226
+ state = 0;
14227
+ }
14228
+ }
14229
+ if (lastUnitStart >= 0 && state >= 0) {
14230
+ const unit = {
14231
+ data: array.subarray(lastUnitStart, len),
14232
+ type: lastUnitType,
14233
+ state: state
14234
+ };
14235
+ units.push(unit);
14236
+ // logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
14237
+ }
14238
+ // no NALu found
14239
+ if (units.length === 0) {
14240
+ // append pes.data to previous NAL unit
14241
+ const lastUnit = this.getLastNalUnit(track.samples);
14242
+ if (lastUnit) {
14243
+ lastUnit.data = appendUint8Array(lastUnit.data, array);
14244
+ }
14245
+ }
14246
+ track.naluState = state;
14247
+ return units;
14248
+ }
14249
+ }
14250
+
14251
+ /**
14252
+ * Parser for exponential Golomb codes, a variable-bitwidth number encoding scheme used by h264.
14253
+ */
14254
+
14255
+ class ExpGolomb {
14256
+ constructor(data) {
14257
+ this.data = void 0;
14258
+ this.bytesAvailable = void 0;
14259
+ this.word = void 0;
13985
14260
  this.bitsAvailable = void 0;
13986
14261
  this.data = data;
13987
14262
  // the number of bytes left to examine in this.data
@@ -14113,194 +14388,11 @@ class ExpGolomb {
14113
14388
  readUInt() {
14114
14389
  return this.readBits(32);
14115
14390
  }
14116
-
14117
- /**
14118
- * Advance the ExpGolomb decoder past a scaling list. The scaling
14119
- * list is optionally transmitted as part of a sequence parameter
14120
- * set and is not relevant to transmuxing.
14121
- * @param count the number of entries in this scaling list
14122
- * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
14123
- */
14124
- skipScalingList(count) {
14125
- let lastScale = 8;
14126
- let nextScale = 8;
14127
- let deltaScale;
14128
- for (let j = 0; j < count; j++) {
14129
- if (nextScale !== 0) {
14130
- deltaScale = this.readEG();
14131
- nextScale = (lastScale + deltaScale + 256) % 256;
14132
- }
14133
- lastScale = nextScale === 0 ? lastScale : nextScale;
14134
- }
14135
- }
14136
-
14137
- /**
14138
- * Read a sequence parameter set and return some interesting video
14139
- * properties. A sequence parameter set is the H264 metadata that
14140
- * describes the properties of upcoming video frames.
14141
- * @returns an object with configuration parsed from the
14142
- * sequence parameter set, including the dimensions of the
14143
- * associated video frames.
14144
- */
14145
- readSPS() {
14146
- let frameCropLeftOffset = 0;
14147
- let frameCropRightOffset = 0;
14148
- let frameCropTopOffset = 0;
14149
- let frameCropBottomOffset = 0;
14150
- let numRefFramesInPicOrderCntCycle;
14151
- let scalingListCount;
14152
- let i;
14153
- const readUByte = this.readUByte.bind(this);
14154
- const readBits = this.readBits.bind(this);
14155
- const readUEG = this.readUEG.bind(this);
14156
- const readBoolean = this.readBoolean.bind(this);
14157
- const skipBits = this.skipBits.bind(this);
14158
- const skipEG = this.skipEG.bind(this);
14159
- const skipUEG = this.skipUEG.bind(this);
14160
- const skipScalingList = this.skipScalingList.bind(this);
14161
- readUByte();
14162
- const profileIdc = readUByte(); // profile_idc
14163
- readBits(5); // profileCompat constraint_set[0-4]_flag, u(5)
14164
- skipBits(3); // reserved_zero_3bits u(3),
14165
- readUByte(); // level_idc u(8)
14166
- skipUEG(); // seq_parameter_set_id
14167
- // some profiles have more optional data we don't need
14168
- if (profileIdc === 100 || profileIdc === 110 || profileIdc === 122 || profileIdc === 244 || profileIdc === 44 || profileIdc === 83 || profileIdc === 86 || profileIdc === 118 || profileIdc === 128) {
14169
- const chromaFormatIdc = readUEG();
14170
- if (chromaFormatIdc === 3) {
14171
- skipBits(1);
14172
- } // separate_colour_plane_flag
14173
-
14174
- skipUEG(); // bit_depth_luma_minus8
14175
- skipUEG(); // bit_depth_chroma_minus8
14176
- skipBits(1); // qpprime_y_zero_transform_bypass_flag
14177
- if (readBoolean()) {
14178
- // seq_scaling_matrix_present_flag
14179
- scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
14180
- for (i = 0; i < scalingListCount; i++) {
14181
- if (readBoolean()) {
14182
- // seq_scaling_list_present_flag[ i ]
14183
- if (i < 6) {
14184
- skipScalingList(16);
14185
- } else {
14186
- skipScalingList(64);
14187
- }
14188
- }
14189
- }
14190
- }
14191
- }
14192
- skipUEG(); // log2_max_frame_num_minus4
14193
- const picOrderCntType = readUEG();
14194
- if (picOrderCntType === 0) {
14195
- readUEG(); // log2_max_pic_order_cnt_lsb_minus4
14196
- } else if (picOrderCntType === 1) {
14197
- skipBits(1); // delta_pic_order_always_zero_flag
14198
- skipEG(); // offset_for_non_ref_pic
14199
- skipEG(); // offset_for_top_to_bottom_field
14200
- numRefFramesInPicOrderCntCycle = readUEG();
14201
- for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
14202
- skipEG();
14203
- } // offset_for_ref_frame[ i ]
14204
- }
14205
- skipUEG(); // max_num_ref_frames
14206
- skipBits(1); // gaps_in_frame_num_value_allowed_flag
14207
- const picWidthInMbsMinus1 = readUEG();
14208
- const picHeightInMapUnitsMinus1 = readUEG();
14209
- const frameMbsOnlyFlag = readBits(1);
14210
- if (frameMbsOnlyFlag === 0) {
14211
- skipBits(1);
14212
- } // mb_adaptive_frame_field_flag
14213
-
14214
- skipBits(1); // direct_8x8_inference_flag
14215
- if (readBoolean()) {
14216
- // frame_cropping_flag
14217
- frameCropLeftOffset = readUEG();
14218
- frameCropRightOffset = readUEG();
14219
- frameCropTopOffset = readUEG();
14220
- frameCropBottomOffset = readUEG();
14221
- }
14222
- let pixelRatio = [1, 1];
14223
- if (readBoolean()) {
14224
- // vui_parameters_present_flag
14225
- if (readBoolean()) {
14226
- // aspect_ratio_info_present_flag
14227
- const aspectRatioIdc = readUByte();
14228
- switch (aspectRatioIdc) {
14229
- case 1:
14230
- pixelRatio = [1, 1];
14231
- break;
14232
- case 2:
14233
- pixelRatio = [12, 11];
14234
- break;
14235
- case 3:
14236
- pixelRatio = [10, 11];
14237
- break;
14238
- case 4:
14239
- pixelRatio = [16, 11];
14240
- break;
14241
- case 5:
14242
- pixelRatio = [40, 33];
14243
- break;
14244
- case 6:
14245
- pixelRatio = [24, 11];
14246
- break;
14247
- case 7:
14248
- pixelRatio = [20, 11];
14249
- break;
14250
- case 8:
14251
- pixelRatio = [32, 11];
14252
- break;
14253
- case 9:
14254
- pixelRatio = [80, 33];
14255
- break;
14256
- case 10:
14257
- pixelRatio = [18, 11];
14258
- break;
14259
- case 11:
14260
- pixelRatio = [15, 11];
14261
- break;
14262
- case 12:
14263
- pixelRatio = [64, 33];
14264
- break;
14265
- case 13:
14266
- pixelRatio = [160, 99];
14267
- break;
14268
- case 14:
14269
- pixelRatio = [4, 3];
14270
- break;
14271
- case 15:
14272
- pixelRatio = [3, 2];
14273
- break;
14274
- case 16:
14275
- pixelRatio = [2, 1];
14276
- break;
14277
- case 255:
14278
- {
14279
- pixelRatio = [readUByte() << 8 | readUByte(), readUByte() << 8 | readUByte()];
14280
- break;
14281
- }
14282
- }
14283
- }
14284
- }
14285
- return {
14286
- width: Math.ceil((picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2),
14287
- height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - (frameMbsOnlyFlag ? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset),
14288
- pixelRatio: pixelRatio
14289
- };
14290
- }
14291
- readSliceType() {
14292
- // skip NALu type
14293
- this.readUByte();
14294
- // discard first_mb_in_slice
14295
- this.readUEG();
14296
- // return slice_type
14297
- return this.readUEG();
14298
- }
14299
14391
  }
14300
14392
 
14301
14393
  class AvcVideoParser extends BaseVideoParser {
14302
- parseAVCPES(track, textTrack, pes, last, duration) {
14303
- const units = this.parseAVCNALu(track, pes.data);
14394
+ parsePES(track, textTrack, pes, last, duration) {
14395
+ const units = this.parseNALu(track, pes.data);
14304
14396
  let VideoSample = this.VideoSample;
14305
14397
  let push;
14306
14398
  let spsfound = false;
@@ -14325,7 +14417,7 @@ class AvcVideoParser extends BaseVideoParser {
14325
14417
  // only check slice type to detect KF in case SPS found in same packet (any keyframe is preceded by SPS ...)
14326
14418
  if (spsfound && data.length > 4) {
14327
14419
  // retrieve slice type by parsing beginning of NAL unit (follow H264 spec, slice_header definition) to detect keyframe embedded in NDR
14328
- const sliceType = new ExpGolomb(data).readSliceType();
14420
+ const sliceType = this.readSliceType(data);
14329
14421
  // 2 : I slice, 4 : SI slice, 7 : I slice, 9: SI slice
14330
14422
  // SI slice : A slice that is coded using intra prediction only and using quantisation of the prediction samples.
14331
14423
  // An SI slice can be coded such that its decoded samples can be constructed identically to an SP slice.
@@ -14379,8 +14471,7 @@ class AvcVideoParser extends BaseVideoParser {
14379
14471
  push = true;
14380
14472
  spsfound = true;
14381
14473
  const sps = unit.data;
14382
- const expGolombDecoder = new ExpGolomb(sps);
14383
- const config = expGolombDecoder.readSPS();
14474
+ const config = this.readSPS(sps);
14384
14475
  if (!track.sps || track.width !== config.width || track.height !== config.height || ((_track$pixelRatio = track.pixelRatio) == null ? void 0 : _track$pixelRatio[0]) !== config.pixelRatio[0] || ((_track$pixelRatio2 = track.pixelRatio) == null ? void 0 : _track$pixelRatio2[1]) !== config.pixelRatio[1]) {
14385
14476
  track.width = config.width;
14386
14477
  track.height = config.height;
@@ -14436,109 +14527,192 @@ class AvcVideoParser extends BaseVideoParser {
14436
14527
  this.VideoSample = null;
14437
14528
  }
14438
14529
  }
14439
- parseAVCNALu(track, array) {
14440
- const len = array.byteLength;
14441
- let state = track.naluState || 0;
14442
- const lastState = state;
14443
- const units = [];
14444
- let i = 0;
14445
- let value;
14446
- let overflow;
14447
- let unitType;
14448
- let lastUnitStart = -1;
14449
- let lastUnitType = 0;
14450
- // logger.log('PES:' + Hex.hexDump(array));
14530
+ getNALuType(data, offset) {
14531
+ return data[offset] & 0x1f;
14532
+ }
14533
+ readSliceType(data) {
14534
+ const eg = new ExpGolomb(data);
14535
+ // skip NALu type
14536
+ eg.readUByte();
14537
+ // discard first_mb_in_slice
14538
+ eg.readUEG();
14539
+ // return slice_type
14540
+ return eg.readUEG();
14541
+ }
14451
14542
 
14452
- if (state === -1) {
14453
- // special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
14454
- lastUnitStart = 0;
14455
- // NALu type is value read from offset 0
14456
- lastUnitType = array[0] & 0x1f;
14457
- state = 0;
14458
- i = 1;
14459
- }
14460
- while (i < len) {
14461
- value = array[i++];
14462
- // optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
14463
- if (!state) {
14464
- state = value ? 0 : 1;
14465
- continue;
14466
- }
14467
- if (state === 1) {
14468
- state = value ? 0 : 2;
14469
- continue;
14543
+ /**
14544
+ * The scaling list is optionally transmitted as part of a sequence parameter
14545
+ * set and is not relevant to transmuxing.
14546
+ * @param count the number of entries in this scaling list
14547
+ * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
14548
+ */
14549
+ skipScalingList(count, reader) {
14550
+ let lastScale = 8;
14551
+ let nextScale = 8;
14552
+ let deltaScale;
14553
+ for (let j = 0; j < count; j++) {
14554
+ if (nextScale !== 0) {
14555
+ deltaScale = reader.readEG();
14556
+ nextScale = (lastScale + deltaScale + 256) % 256;
14470
14557
  }
14471
- // here we have state either equal to 2 or 3
14472
- if (!value) {
14473
- state = 3;
14474
- } else if (value === 1) {
14475
- overflow = i - state - 1;
14476
- if (lastUnitStart >= 0) {
14477
- const unit = {
14478
- data: array.subarray(lastUnitStart, overflow),
14479
- type: lastUnitType
14480
- };
14481
- // logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
14482
- units.push(unit);
14483
- } else {
14484
- // lastUnitStart is undefined => this is the first start code found in this PES packet
14485
- // first check if start code delimiter is overlapping between 2 PES packets,
14486
- // ie it started in last packet (lastState not zero)
14487
- // and ended at the beginning of this PES packet (i <= 4 - lastState)
14488
- const lastUnit = this.getLastNalUnit(track.samples);
14489
- if (lastUnit) {
14490
- if (lastState && i <= 4 - lastState) {
14491
- // start delimiter overlapping between PES packets
14492
- // strip start delimiter bytes from the end of last NAL unit
14493
- // check if lastUnit had a state different from zero
14494
- if (lastUnit.state) {
14495
- // strip last bytes
14496
- lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
14497
- }
14498
- }
14499
- // If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
14558
+ lastScale = nextScale === 0 ? lastScale : nextScale;
14559
+ }
14560
+ }
14500
14561
 
14501
- if (overflow > 0) {
14502
- // logger.log('first NALU found with overflow:' + overflow);
14503
- lastUnit.data = appendUint8Array(lastUnit.data, array.subarray(0, overflow));
14504
- lastUnit.state = 0;
14562
+ /**
14563
+ * Read a sequence parameter set and return some interesting video
14564
+ * properties. A sequence parameter set is the H264 metadata that
14565
+ * describes the properties of upcoming video frames.
14566
+ * @returns an object with configuration parsed from the
14567
+ * sequence parameter set, including the dimensions of the
14568
+ * associated video frames.
14569
+ */
14570
+ readSPS(sps) {
14571
+ const eg = new ExpGolomb(sps);
14572
+ let frameCropLeftOffset = 0;
14573
+ let frameCropRightOffset = 0;
14574
+ let frameCropTopOffset = 0;
14575
+ let frameCropBottomOffset = 0;
14576
+ let numRefFramesInPicOrderCntCycle;
14577
+ let scalingListCount;
14578
+ let i;
14579
+ const readUByte = eg.readUByte.bind(eg);
14580
+ const readBits = eg.readBits.bind(eg);
14581
+ const readUEG = eg.readUEG.bind(eg);
14582
+ const readBoolean = eg.readBoolean.bind(eg);
14583
+ const skipBits = eg.skipBits.bind(eg);
14584
+ const skipEG = eg.skipEG.bind(eg);
14585
+ const skipUEG = eg.skipUEG.bind(eg);
14586
+ const skipScalingList = this.skipScalingList.bind(this);
14587
+ readUByte();
14588
+ const profileIdc = readUByte(); // profile_idc
14589
+ readBits(5); // profileCompat constraint_set[0-4]_flag, u(5)
14590
+ skipBits(3); // reserved_zero_3bits u(3),
14591
+ readUByte(); // level_idc u(8)
14592
+ skipUEG(); // seq_parameter_set_id
14593
+ // some profiles have more optional data we don't need
14594
+ if (profileIdc === 100 || profileIdc === 110 || profileIdc === 122 || profileIdc === 244 || profileIdc === 44 || profileIdc === 83 || profileIdc === 86 || profileIdc === 118 || profileIdc === 128) {
14595
+ const chromaFormatIdc = readUEG();
14596
+ if (chromaFormatIdc === 3) {
14597
+ skipBits(1);
14598
+ } // separate_colour_plane_flag
14599
+
14600
+ skipUEG(); // bit_depth_luma_minus8
14601
+ skipUEG(); // bit_depth_chroma_minus8
14602
+ skipBits(1); // qpprime_y_zero_transform_bypass_flag
14603
+ if (readBoolean()) {
14604
+ // seq_scaling_matrix_present_flag
14605
+ scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
14606
+ for (i = 0; i < scalingListCount; i++) {
14607
+ if (readBoolean()) {
14608
+ // seq_scaling_list_present_flag[ i ]
14609
+ if (i < 6) {
14610
+ skipScalingList(16, eg);
14611
+ } else {
14612
+ skipScalingList(64, eg);
14505
14613
  }
14506
14614
  }
14507
14615
  }
14508
- // check if we can read unit type
14509
- if (i < len) {
14510
- unitType = array[i] & 0x1f;
14511
- // logger.log('find NALU @ offset:' + i + ',type:' + unitType);
14512
- lastUnitStart = i;
14513
- lastUnitType = unitType;
14514
- state = 0;
14515
- } else {
14516
- // not enough byte to read unit type. let's read it on next PES parsing
14517
- state = -1;
14518
- }
14519
- } else {
14520
- state = 0;
14521
14616
  }
14522
14617
  }
14523
- if (lastUnitStart >= 0 && state >= 0) {
14524
- const unit = {
14525
- data: array.subarray(lastUnitStart, len),
14526
- type: lastUnitType,
14527
- state: state
14528
- };
14529
- units.push(unit);
14530
- // logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
14618
+ skipUEG(); // log2_max_frame_num_minus4
14619
+ const picOrderCntType = readUEG();
14620
+ if (picOrderCntType === 0) {
14621
+ readUEG(); // log2_max_pic_order_cnt_lsb_minus4
14622
+ } else if (picOrderCntType === 1) {
14623
+ skipBits(1); // delta_pic_order_always_zero_flag
14624
+ skipEG(); // offset_for_non_ref_pic
14625
+ skipEG(); // offset_for_top_to_bottom_field
14626
+ numRefFramesInPicOrderCntCycle = readUEG();
14627
+ for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
14628
+ skipEG();
14629
+ } // offset_for_ref_frame[ i ]
14531
14630
  }
14532
- // no NALu found
14533
- if (units.length === 0) {
14534
- // append pes.data to previous NAL unit
14535
- const lastUnit = this.getLastNalUnit(track.samples);
14536
- if (lastUnit) {
14537
- lastUnit.data = appendUint8Array(lastUnit.data, array);
14631
+ skipUEG(); // max_num_ref_frames
14632
+ skipBits(1); // gaps_in_frame_num_value_allowed_flag
14633
+ const picWidthInMbsMinus1 = readUEG();
14634
+ const picHeightInMapUnitsMinus1 = readUEG();
14635
+ const frameMbsOnlyFlag = readBits(1);
14636
+ if (frameMbsOnlyFlag === 0) {
14637
+ skipBits(1);
14638
+ } // mb_adaptive_frame_field_flag
14639
+
14640
+ skipBits(1); // direct_8x8_inference_flag
14641
+ if (readBoolean()) {
14642
+ // frame_cropping_flag
14643
+ frameCropLeftOffset = readUEG();
14644
+ frameCropRightOffset = readUEG();
14645
+ frameCropTopOffset = readUEG();
14646
+ frameCropBottomOffset = readUEG();
14647
+ }
14648
+ let pixelRatio = [1, 1];
14649
+ if (readBoolean()) {
14650
+ // vui_parameters_present_flag
14651
+ if (readBoolean()) {
14652
+ // aspect_ratio_info_present_flag
14653
+ const aspectRatioIdc = readUByte();
14654
+ switch (aspectRatioIdc) {
14655
+ case 1:
14656
+ pixelRatio = [1, 1];
14657
+ break;
14658
+ case 2:
14659
+ pixelRatio = [12, 11];
14660
+ break;
14661
+ case 3:
14662
+ pixelRatio = [10, 11];
14663
+ break;
14664
+ case 4:
14665
+ pixelRatio = [16, 11];
14666
+ break;
14667
+ case 5:
14668
+ pixelRatio = [40, 33];
14669
+ break;
14670
+ case 6:
14671
+ pixelRatio = [24, 11];
14672
+ break;
14673
+ case 7:
14674
+ pixelRatio = [20, 11];
14675
+ break;
14676
+ case 8:
14677
+ pixelRatio = [32, 11];
14678
+ break;
14679
+ case 9:
14680
+ pixelRatio = [80, 33];
14681
+ break;
14682
+ case 10:
14683
+ pixelRatio = [18, 11];
14684
+ break;
14685
+ case 11:
14686
+ pixelRatio = [15, 11];
14687
+ break;
14688
+ case 12:
14689
+ pixelRatio = [64, 33];
14690
+ break;
14691
+ case 13:
14692
+ pixelRatio = [160, 99];
14693
+ break;
14694
+ case 14:
14695
+ pixelRatio = [4, 3];
14696
+ break;
14697
+ case 15:
14698
+ pixelRatio = [3, 2];
14699
+ break;
14700
+ case 16:
14701
+ pixelRatio = [2, 1];
14702
+ break;
14703
+ case 255:
14704
+ {
14705
+ pixelRatio = [readUByte() << 8 | readUByte(), readUByte() << 8 | readUByte()];
14706
+ break;
14707
+ }
14708
+ }
14538
14709
  }
14539
14710
  }
14540
- track.naluState = state;
14541
- return units;
14711
+ return {
14712
+ width: Math.ceil((picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2),
14713
+ height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - (frameMbsOnlyFlag ? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset),
14714
+ pixelRatio: pixelRatio
14715
+ };
14542
14716
  }
14543
14717
  }
14544
14718
 
@@ -14556,7 +14730,7 @@ class SampleAesDecrypter {
14556
14730
  });
14557
14731
  }
14558
14732
  decryptBuffer(encryptedData) {
14559
- return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer);
14733
+ return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer, DecrypterAesMode.cbc);
14560
14734
  }
14561
14735
 
14562
14736
  // AAC - encrypt all full 16 bytes blocks starting from offset 16
@@ -14670,7 +14844,7 @@ class TSDemuxer {
14670
14844
  this.observer = observer;
14671
14845
  this.config = config;
14672
14846
  this.typeSupported = typeSupported;
14673
- this.videoParser = new AvcVideoParser();
14847
+ this.videoParser = null;
14674
14848
  }
14675
14849
  static probe(data) {
14676
14850
  const syncOffset = TSDemuxer.syncOffset(data);
@@ -14835,7 +15009,16 @@ class TSDemuxer {
14835
15009
  case videoPid:
14836
15010
  if (stt) {
14837
15011
  if (videoData && (pes = parsePES(videoData))) {
14838
- this.videoParser.parseAVCPES(videoTrack, textTrack, pes, false, this._duration);
15012
+ if (this.videoParser === null) {
15013
+ switch (videoTrack.segmentCodec) {
15014
+ case 'avc':
15015
+ this.videoParser = new AvcVideoParser();
15016
+ break;
15017
+ }
15018
+ }
15019
+ if (this.videoParser !== null) {
15020
+ this.videoParser.parsePES(videoTrack, textTrack, pes, false, this._duration);
15021
+ }
14839
15022
  }
14840
15023
  videoData = {
14841
15024
  data: [],
@@ -14997,8 +15180,17 @@ class TSDemuxer {
14997
15180
  // try to parse last PES packets
14998
15181
  let pes;
14999
15182
  if (videoData && (pes = parsePES(videoData))) {
15000
- this.videoParser.parseAVCPES(videoTrack, textTrack, pes, true, this._duration);
15001
- videoTrack.pesData = null;
15183
+ if (this.videoParser === null) {
15184
+ switch (videoTrack.segmentCodec) {
15185
+ case 'avc':
15186
+ this.videoParser = new AvcVideoParser();
15187
+ break;
15188
+ }
15189
+ }
15190
+ if (this.videoParser !== null) {
15191
+ this.videoParser.parsePES(videoTrack, textTrack, pes, true, this._duration);
15192
+ videoTrack.pesData = null;
15193
+ }
15002
15194
  } else {
15003
15195
  // either avcData null or PES truncated, keep it for next frag parsing
15004
15196
  videoTrack.pesData = videoData;
@@ -15301,7 +15493,10 @@ function parsePMT(data, offset, typeSupported, isSampleAes) {
15301
15493
  logger.warn('Unsupported EC-3 in M2TS found');
15302
15494
  break;
15303
15495
  case 0x24:
15304
- logger.warn('Unsupported HEVC in M2TS found');
15496
+ // ITU-T Rec. H.265 and ISO/IEC 23008-2 (HEVC)
15497
+ {
15498
+ logger.warn('Unsupported HEVC in M2TS found');
15499
+ }
15305
15500
  break;
15306
15501
  }
15307
15502
  // move to the next table entry
@@ -15524,6 +15719,8 @@ class MP4 {
15524
15719
  avc1: [],
15525
15720
  // codingname
15526
15721
  avcC: [],
15722
+ hvc1: [],
15723
+ hvcC: [],
15527
15724
  btrt: [],
15528
15725
  dinf: [],
15529
15726
  dref: [],
@@ -15948,8 +16145,10 @@ class MP4 {
15948
16145
  return MP4.box(MP4.types.stsd, MP4.STSD, MP4.ac3(track));
15949
16146
  }
15950
16147
  return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp4a(track));
15951
- } else {
16148
+ } else if (track.segmentCodec === 'avc') {
15952
16149
  return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
16150
+ } else {
16151
+ return MP4.box(MP4.types.stsd, MP4.STSD, MP4.hvc1(track));
15953
16152
  }
15954
16153
  }
15955
16154
  static tkhd(track) {
@@ -16087,6 +16286,84 @@ class MP4 {
16087
16286
  const result = appendUint8Array(MP4.FTYP, movie);
16088
16287
  return result;
16089
16288
  }
16289
+ static hvc1(track) {
16290
+ const ps = track.params;
16291
+ const units = [track.vps, track.sps, track.pps];
16292
+ const NALuLengthSize = 4;
16293
+ const config = new Uint8Array([0x01, ps.general_profile_space << 6 | (ps.general_tier_flag ? 32 : 0) | ps.general_profile_idc, ps.general_profile_compatibility_flags[0], ps.general_profile_compatibility_flags[1], ps.general_profile_compatibility_flags[2], ps.general_profile_compatibility_flags[3], ps.general_constraint_indicator_flags[0], ps.general_constraint_indicator_flags[1], ps.general_constraint_indicator_flags[2], ps.general_constraint_indicator_flags[3], ps.general_constraint_indicator_flags[4], ps.general_constraint_indicator_flags[5], ps.general_level_idc, 240 | ps.min_spatial_segmentation_idc >> 8, 255 & ps.min_spatial_segmentation_idc, 252 | ps.parallelismType, 252 | ps.chroma_format_idc, 248 | ps.bit_depth_luma_minus8, 248 | ps.bit_depth_chroma_minus8, 0x00, parseInt(ps.frame_rate.fps), NALuLengthSize - 1 | ps.temporal_id_nested << 2 | ps.num_temporal_layers << 3 | (ps.frame_rate.fixed ? 64 : 0), units.length]);
16294
+
16295
+ // compute hvcC size in bytes
16296
+ let length = config.length;
16297
+ for (let i = 0; i < units.length; i += 1) {
16298
+ length += 3;
16299
+ for (let j = 0; j < units[i].length; j += 1) {
16300
+ length += 2 + units[i][j].length;
16301
+ }
16302
+ }
16303
+ const hvcC = new Uint8Array(length);
16304
+ hvcC.set(config, 0);
16305
+ length = config.length;
16306
+ // append parameter set units: one vps, one or more sps and pps
16307
+ const iMax = units.length - 1;
16308
+ for (let i = 0; i < units.length; i += 1) {
16309
+ hvcC.set(new Uint8Array([32 + i | (i === iMax ? 128 : 0), 0x00, units[i].length]), length);
16310
+ length += 3;
16311
+ for (let j = 0; j < units[i].length; j += 1) {
16312
+ hvcC.set(new Uint8Array([units[i][j].length >> 8, units[i][j].length & 255]), length);
16313
+ length += 2;
16314
+ hvcC.set(units[i][j], length);
16315
+ length += units[i][j].length;
16316
+ }
16317
+ }
16318
+ const hvcc = MP4.box(MP4.types.hvcC, hvcC);
16319
+ const width = track.width;
16320
+ const height = track.height;
16321
+ const hSpacing = track.pixelRatio[0];
16322
+ const vSpacing = track.pixelRatio[1];
16323
+ return MP4.box(MP4.types.hvc1, new Uint8Array([0x00, 0x00, 0x00,
16324
+ // reserved
16325
+ 0x00, 0x00, 0x00,
16326
+ // reserved
16327
+ 0x00, 0x01,
16328
+ // data_reference_index
16329
+ 0x00, 0x00,
16330
+ // pre_defined
16331
+ 0x00, 0x00,
16332
+ // reserved
16333
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
16334
+ // pre_defined
16335
+ width >> 8 & 0xff, width & 0xff,
16336
+ // width
16337
+ height >> 8 & 0xff, height & 0xff,
16338
+ // height
16339
+ 0x00, 0x48, 0x00, 0x00,
16340
+ // horizresolution
16341
+ 0x00, 0x48, 0x00, 0x00,
16342
+ // vertresolution
16343
+ 0x00, 0x00, 0x00, 0x00,
16344
+ // reserved
16345
+ 0x00, 0x01,
16346
+ // frame_count
16347
+ 0x12, 0x64, 0x61, 0x69, 0x6c,
16348
+ // dailymotion/hls.js
16349
+ 0x79, 0x6d, 0x6f, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x68, 0x6c, 0x73, 0x2e, 0x6a, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
16350
+ // compressorname
16351
+ 0x00, 0x18,
16352
+ // depth = 24
16353
+ 0x11, 0x11]),
16354
+ // pre_defined = -1
16355
+ hvcc, MP4.box(MP4.types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80,
16356
+ // bufferSizeDB
16357
+ 0x00, 0x2d, 0xc6, 0xc0,
16358
+ // maxBitrate
16359
+ 0x00, 0x2d, 0xc6, 0xc0])),
16360
+ // avgBitrate
16361
+ MP4.box(MP4.types.pasp, new Uint8Array([hSpacing >> 24,
16362
+ // hSpacing
16363
+ hSpacing >> 16 & 0xff, hSpacing >> 8 & 0xff, hSpacing & 0xff, vSpacing >> 24,
16364
+ // vSpacing
16365
+ vSpacing >> 16 & 0xff, vSpacing >> 8 & 0xff, vSpacing & 0xff])));
16366
+ }
16090
16367
  }
16091
16368
  MP4.types = void 0;
16092
16369
  MP4.HDLR_TYPES = void 0;
@@ -16462,9 +16739,9 @@ class MP4Remuxer {
16462
16739
  const foundOverlap = delta < -1;
16463
16740
  if (foundHole || foundOverlap) {
16464
16741
  if (foundHole) {
16465
- logger.warn(`AVC: ${toMsFromMpegTsClock(delta, true)} ms (${delta}dts) hole between fragments detected at ${timeOffset.toFixed(3)}`);
16742
+ logger.warn(`${(track.segmentCodec || '').toUpperCase()}: ${toMsFromMpegTsClock(delta, true)} ms (${delta}dts) hole between fragments detected at ${timeOffset.toFixed(3)}`);
16466
16743
  } else {
16467
- logger.warn(`AVC: ${toMsFromMpegTsClock(-delta, true)} ms (${delta}dts) overlapping between fragments detected at ${timeOffset.toFixed(3)}`);
16744
+ logger.warn(`${(track.segmentCodec || '').toUpperCase()}: ${toMsFromMpegTsClock(-delta, true)} ms (${delta}dts) overlapping between fragments detected at ${timeOffset.toFixed(3)}`);
16468
16745
  }
16469
16746
  if (!foundOverlap || nextAvcDts >= inputSamples[0].pts || chromeVersion) {
16470
16747
  firstDTS = nextAvcDts;
@@ -16473,12 +16750,24 @@ class MP4Remuxer {
16473
16750
  inputSamples[0].dts = firstDTS;
16474
16751
  inputSamples[0].pts = firstPTS;
16475
16752
  } else {
16753
+ let isPTSOrderRetained = true;
16476
16754
  for (let i = 0; i < inputSamples.length; i++) {
16477
- if (inputSamples[i].dts > firstPTS) {
16755
+ if (inputSamples[i].dts > firstPTS && isPTSOrderRetained) {
16478
16756
  break;
16479
16757
  }
16758
+ const prevPTS = inputSamples[i].pts;
16480
16759
  inputSamples[i].dts -= delta;
16481
16760
  inputSamples[i].pts -= delta;
16761
+
16762
+ // check to see if this sample's PTS order has changed
16763
+ // relative to the next one
16764
+ if (i < inputSamples.length - 1) {
16765
+ const nextSamplePTS = inputSamples[i + 1].pts;
16766
+ const currentSamplePTS = inputSamples[i].pts;
16767
+ const currentOrder = nextSamplePTS <= currentSamplePTS;
16768
+ const prevOrder = nextSamplePTS <= prevPTS;
16769
+ isPTSOrderRetained = currentOrder == prevOrder;
16770
+ }
16482
16771
  }
16483
16772
  }
16484
16773
  logger.log(`Video: Initial PTS/DTS adjusted: ${toMsFromMpegTsClock(firstPTS, true)}/${toMsFromMpegTsClock(firstDTS, true)}, delta: ${toMsFromMpegTsClock(delta, true)} ms`);
@@ -16626,7 +16915,7 @@ class MP4Remuxer {
16626
16915
  }
16627
16916
  }
16628
16917
  }
16629
- // next AVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
16918
+ // next AVC/HEVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
16630
16919
  mp4SampleDuration = stretchedLastFrame || !mp4SampleDuration ? averageSampleDuration : mp4SampleDuration;
16631
16920
  this.nextAvcDts = nextAvcDts = lastDTS + mp4SampleDuration;
16632
16921
  this.videoSampleDuration = mp4SampleDuration;
@@ -16759,7 +17048,7 @@ class MP4Remuxer {
16759
17048
  logger.warn(`[mp4-remuxer]: Injecting ${missing} audio frame @ ${(nextPts / inputTimeScale).toFixed(3)}s due to ${Math.round(1000 * delta / inputTimeScale)} ms gap.`);
16760
17049
  for (let j = 0; j < missing; j++) {
16761
17050
  const newStamp = Math.max(nextPts, 0);
16762
- let fillFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
17051
+ let fillFrame = AAC.getSilentFrame(track.parsedCodec || track.manifestCodec || track.codec, track.channelCount);
16763
17052
  if (!fillFrame) {
16764
17053
  logger.log('[mp4-remuxer]: Unable to get silent frame for given audio codec; duplicating last frame instead.');
16765
17054
  fillFrame = sample.unit.subarray();
@@ -16887,7 +17176,7 @@ class MP4Remuxer {
16887
17176
  // samples count of this segment's duration
16888
17177
  const nbSamples = Math.ceil((endDTS - startDTS) / frameDuration);
16889
17178
  // silent frame
16890
- const silentFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
17179
+ const silentFrame = AAC.getSilentFrame(track.parsedCodec || track.manifestCodec || track.codec, track.channelCount);
16891
17180
  logger.warn('[mp4-remuxer]: remux empty Audio');
16892
17181
  // Can't remux if we can't generate a silent frame...
16893
17182
  if (!silentFrame) {
@@ -17278,13 +17567,15 @@ class Transmuxer {
17278
17567
  initSegmentData
17279
17568
  } = transmuxConfig;
17280
17569
  const keyData = getEncryptionType(uintData, decryptdata);
17281
- if (keyData && keyData.method === 'AES-128') {
17570
+ if (keyData && isFullSegmentEncryption(keyData.method)) {
17282
17571
  const decrypter = this.getDecrypter();
17572
+ const aesMode = getAesModeFromFullSegmentMethod(keyData.method);
17573
+
17283
17574
  // Software decryption is synchronous; webCrypto is not
17284
17575
  if (decrypter.isSync()) {
17285
17576
  // Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
17286
17577
  // data is handled in the flush() call
17287
- let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer);
17578
+ let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer, aesMode);
17288
17579
  // For Low-Latency HLS Parts, decrypt in place, since part parsing is expected on push progress
17289
17580
  const loadingParts = chunkMeta.part > -1;
17290
17581
  if (loadingParts) {
@@ -17296,7 +17587,7 @@ class Transmuxer {
17296
17587
  }
17297
17588
  uintData = new Uint8Array(decryptedData);
17298
17589
  } else {
17299
- this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer).then(decryptedData => {
17590
+ this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer, aesMode).then(decryptedData => {
17300
17591
  // Calling push here is important; if flush() is called while this is still resolving, this ensures that
17301
17592
  // the decrypted data has been transmuxed
17302
17593
  const result = this.push(decryptedData, null, chunkMeta);
@@ -17950,14 +18241,7 @@ class TransmuxerInterface {
17950
18241
  this.observer = new EventEmitter();
17951
18242
  this.observer.on(Events.FRAG_DECRYPTED, forwardMessage);
17952
18243
  this.observer.on(Events.ERROR, forwardMessage);
17953
- const MediaSource = getMediaSource(config.preferManagedMediaSource) || {
17954
- isTypeSupported: () => false
17955
- };
17956
- const m2tsTypeSupported = {
17957
- mpeg: MediaSource.isTypeSupported('audio/mpeg'),
17958
- mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
17959
- ac3: false
17960
- };
18244
+ const m2tsTypeSupported = getM2TSSupportedAudioTypes(config.preferManagedMediaSource);
17961
18245
 
17962
18246
  // navigator.vendor is not always available in Web Worker
17963
18247
  // refer to https://developer.mozilla.org/en-US/docs/Web/API/WorkerGlobalScope/navigator
@@ -18221,8 +18505,9 @@ const STALL_MINIMUM_DURATION_MS = 250;
18221
18505
  const MAX_START_GAP_JUMP = 2.0;
18222
18506
  const SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1;
18223
18507
  const SKIP_BUFFER_RANGE_START = 0.05;
18224
- class GapController {
18508
+ class GapController extends Logger {
18225
18509
  constructor(config, media, fragmentTracker, hls) {
18510
+ super('gap-controller', hls.logger);
18226
18511
  this.config = void 0;
18227
18512
  this.media = null;
18228
18513
  this.fragmentTracker = void 0;
@@ -18232,6 +18517,7 @@ class GapController {
18232
18517
  this.stalled = null;
18233
18518
  this.moved = false;
18234
18519
  this.seeking = false;
18520
+ this.ended = 0;
18235
18521
  this.config = config;
18236
18522
  this.media = media;
18237
18523
  this.fragmentTracker = fragmentTracker;
@@ -18249,7 +18535,7 @@ class GapController {
18249
18535
  *
18250
18536
  * @param lastCurrentTime - Previously read playhead position
18251
18537
  */
18252
- poll(lastCurrentTime, activeFrag) {
18538
+ poll(lastCurrentTime, activeFrag, levelDetails, state) {
18253
18539
  const {
18254
18540
  config,
18255
18541
  media,
@@ -18268,6 +18554,7 @@ class GapController {
18268
18554
 
18269
18555
  // The playhead is moving, no-op
18270
18556
  if (currentTime !== lastCurrentTime) {
18557
+ this.ended = 0;
18271
18558
  this.moved = true;
18272
18559
  if (!seeking) {
18273
18560
  this.nudgeRetry = 0;
@@ -18276,7 +18563,7 @@ class GapController {
18276
18563
  // The playhead is now moving, but was previously stalled
18277
18564
  if (this.stallReported) {
18278
18565
  const _stalledDuration = self.performance.now() - stalled;
18279
- logger.warn(`playback not stuck anymore @${currentTime}, after ${Math.round(_stalledDuration)}ms`);
18566
+ this.warn(`playback not stuck anymore @${currentTime}, after ${Math.round(_stalledDuration)}ms`);
18280
18567
  this.stallReported = false;
18281
18568
  }
18282
18569
  this.stalled = null;
@@ -18312,7 +18599,6 @@ class GapController {
18312
18599
  // Skip start gaps if we haven't played, but the last poll detected the start of a stall
18313
18600
  // The addition poll gives the browser a chance to jump the gap for us
18314
18601
  if (!this.moved && this.stalled !== null) {
18315
- var _level$details;
18316
18602
  // There is no playable buffer (seeked, waiting for buffer)
18317
18603
  const isBuffered = bufferInfo.len > 0;
18318
18604
  if (!isBuffered && !nextStart) {
@@ -18324,9 +18610,8 @@ class GapController {
18324
18610
  // When joining a live stream with audio tracks, account for live playlist window sliding by allowing
18325
18611
  // a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment
18326
18612
  // that begins over 1 target duration after the video start position.
18327
- const level = this.hls.levels ? this.hls.levels[this.hls.currentLevel] : null;
18328
- const isLive = level == null ? void 0 : (_level$details = level.details) == null ? void 0 : _level$details.live;
18329
- const maxStartGapJump = isLive ? level.details.targetduration * 2 : MAX_START_GAP_JUMP;
18613
+ const isLive = !!(levelDetails != null && levelDetails.live);
18614
+ const maxStartGapJump = isLive ? levelDetails.targetduration * 2 : MAX_START_GAP_JUMP;
18330
18615
  const partialOrGap = this.fragmentTracker.getPartialFragment(currentTime);
18331
18616
  if (startJump > 0 && (startJump <= maxStartGapJump || partialOrGap)) {
18332
18617
  if (!media.paused) {
@@ -18344,6 +18629,17 @@ class GapController {
18344
18629
  }
18345
18630
  const stalledDuration = tnow - stalled;
18346
18631
  if (!seeking && stalledDuration >= STALL_MINIMUM_DURATION_MS) {
18632
+ // Dispatch MEDIA_ENDED when media.ended/ended event is not signalled at end of stream
18633
+ if (state === State.ENDED && !(levelDetails && levelDetails.live) && Math.abs(currentTime - ((levelDetails == null ? void 0 : levelDetails.edge) || 0)) < 1) {
18634
+ if (stalledDuration < 1000 || this.ended) {
18635
+ return;
18636
+ }
18637
+ this.ended = currentTime;
18638
+ this.hls.trigger(Events.MEDIA_ENDED, {
18639
+ stalled: true
18640
+ });
18641
+ return;
18642
+ }
18347
18643
  // Report stalling after trying to fix
18348
18644
  this._reportStall(bufferInfo);
18349
18645
  if (!this.media) {
@@ -18387,7 +18683,7 @@ class GapController {
18387
18683
  // needs to cross some sort of threshold covering all source-buffers content
18388
18684
  // to start playing properly.
18389
18685
  if ((bufferInfo.len > config.maxBufferHole || bufferInfo.nextStart && bufferInfo.nextStart - currentTime < config.maxBufferHole) && stalledDurationMs > config.highBufferWatchdogPeriod * 1000) {
18390
- logger.warn('Trying to nudge playhead over buffer-hole');
18686
+ this.warn('Trying to nudge playhead over buffer-hole');
18391
18687
  // Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
18392
18688
  // We only try to jump the hole if it's under the configured size
18393
18689
  // Reset stalled so to rearm watchdog timer
@@ -18411,7 +18707,7 @@ class GapController {
18411
18707
  // Report stalled error once
18412
18708
  this.stallReported = true;
18413
18709
  const error = new Error(`Playback stalling at @${media.currentTime} due to low buffer (${JSON.stringify(bufferInfo)})`);
18414
- logger.warn(error.message);
18710
+ this.warn(error.message);
18415
18711
  hls.trigger(Events.ERROR, {
18416
18712
  type: ErrorTypes.MEDIA_ERROR,
18417
18713
  details: ErrorDetails.BUFFER_STALLED_ERROR,
@@ -18479,7 +18775,7 @@ class GapController {
18479
18775
  }
18480
18776
  }
18481
18777
  const targetTime = Math.max(startTime + SKIP_BUFFER_RANGE_START, currentTime + SKIP_BUFFER_HOLE_STEP_SECONDS);
18482
- logger.warn(`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`);
18778
+ this.warn(`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`);
18483
18779
  this.moved = true;
18484
18780
  this.stalled = null;
18485
18781
  media.currentTime = targetTime;
@@ -18520,7 +18816,7 @@ class GapController {
18520
18816
  const targetTime = currentTime + (nudgeRetry + 1) * config.nudgeOffset;
18521
18817
  // playback stalled in buffered area ... let's nudge currentTime to try to overcome this
18522
18818
  const error = new Error(`Nudging 'currentTime' from ${currentTime} to ${targetTime}`);
18523
- logger.warn(error.message);
18819
+ this.warn(error.message);
18524
18820
  media.currentTime = targetTime;
18525
18821
  hls.trigger(Events.ERROR, {
18526
18822
  type: ErrorTypes.MEDIA_ERROR,
@@ -18530,7 +18826,7 @@ class GapController {
18530
18826
  });
18531
18827
  } else {
18532
18828
  const error = new Error(`Playhead still not moving while enough data buffered @${currentTime} after ${config.nudgeMaxRetry} nudges`);
18533
- logger.error(error.message);
18829
+ this.error(error.message);
18534
18830
  hls.trigger(Events.ERROR, {
18535
18831
  type: ErrorTypes.MEDIA_ERROR,
18536
18832
  details: ErrorDetails.BUFFER_STALLED_ERROR,
@@ -18545,7 +18841,7 @@ const TICK_INTERVAL = 100; // how often to tick in ms
18545
18841
 
18546
18842
  class StreamController extends BaseStreamController {
18547
18843
  constructor(hls, fragmentTracker, keyLoader) {
18548
- super(hls, fragmentTracker, keyLoader, '[stream-controller]', PlaylistLevelType.MAIN);
18844
+ super(hls, fragmentTracker, keyLoader, 'stream-controller', PlaylistLevelType.MAIN);
18549
18845
  this.audioCodecSwap = false;
18550
18846
  this.gapController = null;
18551
18847
  this.level = -1;
@@ -18553,27 +18849,43 @@ class StreamController extends BaseStreamController {
18553
18849
  this.altAudio = false;
18554
18850
  this.audioOnly = false;
18555
18851
  this.fragPlaying = null;
18556
- this.onvplaying = null;
18557
- this.onvseeked = null;
18558
18852
  this.fragLastKbps = 0;
18559
18853
  this.couldBacktrack = false;
18560
18854
  this.backtrackFragment = null;
18561
18855
  this.audioCodecSwitch = false;
18562
18856
  this.videoBuffer = null;
18563
- this._registerListeners();
18857
+ this.onMediaPlaying = () => {
18858
+ // tick to speed up FRAG_CHANGED triggering
18859
+ this.tick();
18860
+ };
18861
+ this.onMediaSeeked = () => {
18862
+ const media = this.media;
18863
+ const currentTime = media ? media.currentTime : null;
18864
+ if (isFiniteNumber(currentTime)) {
18865
+ this.log(`Media seeked to ${currentTime.toFixed(3)}`);
18866
+ }
18867
+
18868
+ // If seeked was issued before buffer was appended do not tick immediately
18869
+ const bufferInfo = this.getMainFwdBufferInfo();
18870
+ if (bufferInfo === null || bufferInfo.len === 0) {
18871
+ this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
18872
+ return;
18873
+ }
18874
+
18875
+ // tick to speed up FRAG_CHANGED triggering
18876
+ this.tick();
18877
+ };
18878
+ this.registerListeners();
18564
18879
  }
18565
- _registerListeners() {
18880
+ registerListeners() {
18881
+ super.registerListeners();
18566
18882
  const {
18567
18883
  hls
18568
18884
  } = this;
18569
- hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
18570
- hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
18571
- hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
18572
18885
  hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
18573
18886
  hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
18574
18887
  hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
18575
18888
  hls.on(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
18576
- hls.on(Events.ERROR, this.onError, this);
18577
18889
  hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
18578
18890
  hls.on(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
18579
18891
  hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
@@ -18581,17 +18893,14 @@ class StreamController extends BaseStreamController {
18581
18893
  hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
18582
18894
  hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
18583
18895
  }
18584
- _unregisterListeners() {
18896
+ unregisterListeners() {
18897
+ super.unregisterListeners();
18585
18898
  const {
18586
18899
  hls
18587
18900
  } = this;
18588
- hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
18589
- hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
18590
- hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
18591
18901
  hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
18592
18902
  hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
18593
18903
  hls.off(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
18594
- hls.off(Events.ERROR, this.onError, this);
18595
18904
  hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
18596
18905
  hls.off(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
18597
18906
  hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
@@ -18600,7 +18909,9 @@ class StreamController extends BaseStreamController {
18600
18909
  hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
18601
18910
  }
18602
18911
  onHandlerDestroying() {
18603
- this._unregisterListeners();
18912
+ // @ts-ignore
18913
+ this.onMediaPlaying = this.onMediaSeeked = null;
18914
+ this.unregisterListeners();
18604
18915
  super.onHandlerDestroying();
18605
18916
  }
18606
18917
  startLoad(startPosition) {
@@ -18720,7 +19031,7 @@ class StreamController extends BaseStreamController {
18720
19031
  if (this.altAudio && this.audioOnly) {
18721
19032
  return;
18722
19033
  }
18723
- if (!(levels != null && levels[level])) {
19034
+ if (!this.buffering || !(levels != null && levels[level])) {
18724
19035
  return;
18725
19036
  }
18726
19037
  const levelInfo = levels[level];
@@ -18928,20 +19239,17 @@ class StreamController extends BaseStreamController {
18928
19239
  onMediaAttached(event, data) {
18929
19240
  super.onMediaAttached(event, data);
18930
19241
  const media = data.media;
18931
- this.onvplaying = this.onMediaPlaying.bind(this);
18932
- this.onvseeked = this.onMediaSeeked.bind(this);
18933
- media.addEventListener('playing', this.onvplaying);
18934
- media.addEventListener('seeked', this.onvseeked);
19242
+ media.addEventListener('playing', this.onMediaPlaying);
19243
+ media.addEventListener('seeked', this.onMediaSeeked);
18935
19244
  this.gapController = new GapController(this.config, media, this.fragmentTracker, this.hls);
18936
19245
  }
18937
19246
  onMediaDetaching() {
18938
19247
  const {
18939
19248
  media
18940
19249
  } = this;
18941
- if (media && this.onvplaying && this.onvseeked) {
18942
- media.removeEventListener('playing', this.onvplaying);
18943
- media.removeEventListener('seeked', this.onvseeked);
18944
- this.onvplaying = this.onvseeked = null;
19250
+ if (media) {
19251
+ media.removeEventListener('playing', this.onMediaPlaying);
19252
+ media.removeEventListener('seeked', this.onMediaSeeked);
18945
19253
  this.videoBuffer = null;
18946
19254
  }
18947
19255
  this.fragPlaying = null;
@@ -18951,27 +19259,6 @@ class StreamController extends BaseStreamController {
18951
19259
  }
18952
19260
  super.onMediaDetaching();
18953
19261
  }
18954
- onMediaPlaying() {
18955
- // tick to speed up FRAG_CHANGED triggering
18956
- this.tick();
18957
- }
18958
- onMediaSeeked() {
18959
- const media = this.media;
18960
- const currentTime = media ? media.currentTime : null;
18961
- if (isFiniteNumber(currentTime)) {
18962
- this.log(`Media seeked to ${currentTime.toFixed(3)}`);
18963
- }
18964
-
18965
- // If seeked was issued before buffer was appended do not tick immediately
18966
- const bufferInfo = this.getMainFwdBufferInfo();
18967
- if (bufferInfo === null || bufferInfo.len === 0) {
18968
- this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
18969
- return;
18970
- }
18971
-
18972
- // tick to speed up FRAG_CHANGED triggering
18973
- this.tick();
18974
- }
18975
19262
  onManifestLoading() {
18976
19263
  // reset buffer on manifest loading
18977
19264
  this.log('Trigger BUFFER_RESET');
@@ -19263,8 +19550,10 @@ class StreamController extends BaseStreamController {
19263
19550
  }
19264
19551
  if (this.loadedmetadata || !BufferHelper.getBuffered(media).length) {
19265
19552
  // Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
19266
- const activeFrag = this.state !== State.IDLE ? this.fragCurrent : null;
19267
- gapController.poll(this.lastCurrentTime, activeFrag);
19553
+ const state = this.state;
19554
+ const activeFrag = state !== State.IDLE ? this.fragCurrent : null;
19555
+ const levelDetails = this.getLevelDetails();
19556
+ gapController.poll(this.lastCurrentTime, activeFrag, levelDetails, state);
19268
19557
  }
19269
19558
  this.lastCurrentTime = media.currentTime;
19270
19559
  }
@@ -19702,7 +19991,7 @@ class Hls {
19702
19991
  * Get the video-dev/hls.js package version.
19703
19992
  */
19704
19993
  static get version() {
19705
- return "1.5.5";
19994
+ return "1.5.6-0.canary.10003";
19706
19995
  }
19707
19996
 
19708
19997
  /**
@@ -19765,9 +20054,12 @@ class Hls {
19765
20054
  * The configuration object provided on player instantiation.
19766
20055
  */
19767
20056
  this.userConfig = void 0;
20057
+ /**
20058
+ * The logger functions used by this player instance, configured on player instantiation.
20059
+ */
20060
+ this.logger = void 0;
19768
20061
  this.coreComponents = void 0;
19769
20062
  this.networkControllers = void 0;
19770
- this.started = false;
19771
20063
  this._emitter = new EventEmitter();
19772
20064
  this._autoLevelCapping = -1;
19773
20065
  this._maxHdcpLevel = null;
@@ -19784,11 +20076,11 @@ class Hls {
19784
20076
  this._media = null;
19785
20077
  this.url = null;
19786
20078
  this.triggeringException = void 0;
19787
- enableLogs(userConfig.debug || false, 'Hls instance');
19788
- const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig);
20079
+ const logger = this.logger = enableLogs(userConfig.debug || false, 'Hls instance');
20080
+ const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig, logger);
19789
20081
  this.userConfig = userConfig;
19790
20082
  if (config.progressive) {
19791
- enableStreamingMode(config);
20083
+ enableStreamingMode(config, logger);
19792
20084
  }
19793
20085
 
19794
20086
  // core controllers and network loaders
@@ -19887,7 +20179,7 @@ class Hls {
19887
20179
  try {
19888
20180
  return this.emit(event, event, eventObject);
19889
20181
  } catch (error) {
19890
- logger.error('An internal error happened while handling event ' + event + '. Error message: "' + error.message + '". Here is a stacktrace:', error);
20182
+ this.logger.error('An internal error happened while handling event ' + event + '. Error message: "' + error.message + '". Here is a stacktrace:', error);
19891
20183
  // Prevent recursion in error event handlers that throw #5497
19892
20184
  if (!this.triggeringException) {
19893
20185
  this.triggeringException = true;
@@ -19913,7 +20205,7 @@ class Hls {
19913
20205
  * Dispose of the instance
19914
20206
  */
19915
20207
  destroy() {
19916
- logger.log('destroy');
20208
+ this.logger.log('destroy');
19917
20209
  this.trigger(Events.DESTROYING, undefined);
19918
20210
  this.detachMedia();
19919
20211
  this.removeAllListeners();
@@ -19934,7 +20226,7 @@ class Hls {
19934
20226
  * Attaches Hls.js to a media element
19935
20227
  */
19936
20228
  attachMedia(media) {
19937
- logger.log('attachMedia');
20229
+ this.logger.log('attachMedia');
19938
20230
  this._media = media;
19939
20231
  this.trigger(Events.MEDIA_ATTACHING, {
19940
20232
  media: media
@@ -19945,7 +20237,7 @@ class Hls {
19945
20237
  * Detach Hls.js from the media
19946
20238
  */
19947
20239
  detachMedia() {
19948
- logger.log('detachMedia');
20240
+ this.logger.log('detachMedia');
19949
20241
  this.trigger(Events.MEDIA_DETACHING, undefined);
19950
20242
  this._media = null;
19951
20243
  }
@@ -19962,7 +20254,7 @@ class Hls {
19962
20254
  });
19963
20255
  this._autoLevelCapping = -1;
19964
20256
  this._maxHdcpLevel = null;
19965
- logger.log(`loadSource:${loadingSource}`);
20257
+ this.logger.log(`loadSource:${loadingSource}`);
19966
20258
  if (media && loadedSource && (loadedSource !== loadingSource || this.bufferController.hasSourceTypes())) {
19967
20259
  this.detachMedia();
19968
20260
  this.attachMedia(media);
@@ -19981,8 +20273,7 @@ class Hls {
19981
20273
  * Defaults to -1 (None: starts from earliest point)
19982
20274
  */
19983
20275
  startLoad(startPosition = -1) {
19984
- logger.log(`startLoad(${startPosition})`);
19985
- this.started = true;
20276
+ this.logger.log(`startLoad(${startPosition})`);
19986
20277
  this.networkControllers.forEach(controller => {
19987
20278
  controller.startLoad(startPosition);
19988
20279
  });
@@ -19992,34 +20283,31 @@ class Hls {
19992
20283
  * Stop loading of any stream data.
19993
20284
  */
19994
20285
  stopLoad() {
19995
- logger.log('stopLoad');
19996
- this.started = false;
20286
+ this.logger.log('stopLoad');
19997
20287
  this.networkControllers.forEach(controller => {
19998
20288
  controller.stopLoad();
19999
20289
  });
20000
20290
  }
20001
20291
 
20002
20292
  /**
20003
- * Resumes stream controller segment loading if previously started.
20293
+ * Resumes stream controller segment loading after `pauseBuffering` has been called.
20004
20294
  */
20005
20295
  resumeBuffering() {
20006
- if (this.started) {
20007
- this.networkControllers.forEach(controller => {
20008
- if ('fragmentLoader' in controller) {
20009
- controller.startLoad(-1);
20010
- }
20011
- });
20012
- }
20296
+ this.networkControllers.forEach(controller => {
20297
+ if (controller.resumeBuffering) {
20298
+ controller.resumeBuffering();
20299
+ }
20300
+ });
20013
20301
  }
20014
20302
 
20015
20303
  /**
20016
- * Stops stream controller segment loading without changing 'started' state like stopLoad().
20304
+ * Prevents stream controller from loading new segments until `resumeBuffering` is called.
20017
20305
  * This allows for media buffering to be paused without interupting playlist loading.
20018
20306
  */
20019
20307
  pauseBuffering() {
20020
20308
  this.networkControllers.forEach(controller => {
20021
- if ('fragmentLoader' in controller) {
20022
- controller.stopLoad();
20309
+ if (controller.pauseBuffering) {
20310
+ controller.pauseBuffering();
20023
20311
  }
20024
20312
  });
20025
20313
  }
@@ -20028,7 +20316,7 @@ class Hls {
20028
20316
  * Swap through possible audio codecs in the stream (for example to switch from stereo to 5.1)
20029
20317
  */
20030
20318
  swapAudioCodec() {
20031
- logger.log('swapAudioCodec');
20319
+ this.logger.log('swapAudioCodec');
20032
20320
  this.streamController.swapAudioCodec();
20033
20321
  }
20034
20322
 
@@ -20039,7 +20327,7 @@ class Hls {
20039
20327
  * Automatic recovery of media-errors by this process is configurable.
20040
20328
  */
20041
20329
  recoverMediaError() {
20042
- logger.log('recoverMediaError');
20330
+ this.logger.log('recoverMediaError');
20043
20331
  const media = this._media;
20044
20332
  this.detachMedia();
20045
20333
  if (media) {
@@ -20069,7 +20357,7 @@ class Hls {
20069
20357
  * Set quality level index immediately. This will flush the current buffer to replace the quality asap. That means playback will interrupt at least shortly to re-buffer and re-sync eventually. Set to -1 for automatic level selection.
20070
20358
  */
20071
20359
  set currentLevel(newLevel) {
20072
- logger.log(`set currentLevel:${newLevel}`);
20360
+ this.logger.log(`set currentLevel:${newLevel}`);
20073
20361
  this.levelController.manualLevel = newLevel;
20074
20362
  this.streamController.immediateLevelSwitch();
20075
20363
  }
@@ -20088,7 +20376,7 @@ class Hls {
20088
20376
  * @param newLevel - Pass -1 for automatic level selection
20089
20377
  */
20090
20378
  set nextLevel(newLevel) {
20091
- logger.log(`set nextLevel:${newLevel}`);
20379
+ this.logger.log(`set nextLevel:${newLevel}`);
20092
20380
  this.levelController.manualLevel = newLevel;
20093
20381
  this.streamController.nextLevelSwitch();
20094
20382
  }
@@ -20107,7 +20395,7 @@ class Hls {
20107
20395
  * @param newLevel - Pass -1 for automatic level selection
20108
20396
  */
20109
20397
  set loadLevel(newLevel) {
20110
- logger.log(`set loadLevel:${newLevel}`);
20398
+ this.logger.log(`set loadLevel:${newLevel}`);
20111
20399
  this.levelController.manualLevel = newLevel;
20112
20400
  }
20113
20401
 
@@ -20138,7 +20426,7 @@ class Hls {
20138
20426
  * Sets "first-level", see getter.
20139
20427
  */
20140
20428
  set firstLevel(newLevel) {
20141
- logger.log(`set firstLevel:${newLevel}`);
20429
+ this.logger.log(`set firstLevel:${newLevel}`);
20142
20430
  this.levelController.firstLevel = newLevel;
20143
20431
  }
20144
20432
 
@@ -20163,7 +20451,7 @@ class Hls {
20163
20451
  * (determined from download of first segment)
20164
20452
  */
20165
20453
  set startLevel(newLevel) {
20166
- logger.log(`set startLevel:${newLevel}`);
20454
+ this.logger.log(`set startLevel:${newLevel}`);
20167
20455
  // if not in automatic start level detection, ensure startLevel is greater than minAutoLevel
20168
20456
  if (newLevel !== -1) {
20169
20457
  newLevel = Math.max(newLevel, this.minAutoLevel);
@@ -20238,7 +20526,7 @@ class Hls {
20238
20526
  */
20239
20527
  set autoLevelCapping(newLevel) {
20240
20528
  if (this._autoLevelCapping !== newLevel) {
20241
- logger.log(`set autoLevelCapping:${newLevel}`);
20529
+ this.logger.log(`set autoLevelCapping:${newLevel}`);
20242
20530
  this._autoLevelCapping = newLevel;
20243
20531
  this.levelController.checkMaxAutoUpdated();
20244
20532
  }
@@ -20517,5 +20805,5 @@ var KeySystemFormats = empty.KeySystemFormats;
20517
20805
  var KeySystems = empty.KeySystems;
20518
20806
  var SubtitleStreamController = empty.SubtitleStreamController;
20519
20807
  var TimelineController = empty.TimelineController;
20520
- export { AbrController, AttrList, Cues as AudioStreamController, Cues as AudioTrackController, BasePlaylistController, BaseSegment, BaseStreamController, BufferController, Cues as CMCDController, CapLevelController, ChunkMetadata, ContentSteeringController, DateRange, Cues as EMEController, ErrorActionFlags, ErrorController, ErrorDetails, ErrorTypes, Events, FPSController, Fragment, Hls, HlsSkip, HlsUrlParameters, KeySystemFormats, KeySystems, Level, LevelDetails, LevelKey, LoadStats, MetadataSchema, NetworkErrorAction, Part, PlaylistLevelType, SubtitleStreamController, Cues as SubtitleTrackController, TimelineController, Hls as default, getMediaSource, isMSESupported, isSupported };
20808
+ export { AbrController, AttrList, HevcVideoParser as AudioStreamController, HevcVideoParser as AudioTrackController, BasePlaylistController, BaseSegment, BaseStreamController, BufferController, HevcVideoParser as CMCDController, CapLevelController, ChunkMetadata, ContentSteeringController, DateRange, HevcVideoParser as EMEController, ErrorActionFlags, ErrorController, ErrorDetails, ErrorTypes, Events, FPSController, Fragment, Hls, HlsSkip, HlsUrlParameters, KeySystemFormats, KeySystems, Level, LevelDetails, LevelKey, LoadStats, MetadataSchema, NetworkErrorAction, Part, PlaylistLevelType, SubtitleStreamController, HevcVideoParser as SubtitleTrackController, TimelineController, Hls as default, getMediaSource, isMSESupported, isSupported };
20521
20809
  //# sourceMappingURL=hls.light.mjs.map