@editframe/elements 0.18.3-beta.0 → 0.18.8-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (110) hide show
  1. package/dist/elements/EFAudio.d.ts +1 -2
  2. package/dist/elements/EFAudio.js +6 -9
  3. package/dist/elements/EFMedia/AssetMediaEngine.browsertest.d.ts +0 -0
  4. package/dist/elements/EFMedia/AssetMediaEngine.d.ts +2 -4
  5. package/dist/elements/EFMedia/AssetMediaEngine.js +34 -5
  6. package/dist/elements/EFMedia/BaseMediaEngine.js +20 -1
  7. package/dist/elements/EFMedia/BufferedSeekingInput.d.ts +5 -5
  8. package/dist/elements/EFMedia/BufferedSeekingInput.js +27 -7
  9. package/dist/elements/EFMedia/JitMediaEngine.d.ts +1 -1
  10. package/dist/elements/EFMedia/JitMediaEngine.js +22 -3
  11. package/dist/elements/EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.js +4 -1
  12. package/dist/elements/EFMedia/audioTasks/makeAudioInputTask.js +11 -3
  13. package/dist/elements/EFMedia/audioTasks/makeAudioSeekTask.chunkboundary.regression.browsertest.d.ts +0 -0
  14. package/dist/elements/EFMedia/audioTasks/makeAudioSeekTask.js +17 -4
  15. package/dist/elements/EFMedia/audioTasks/makeAudioSegmentFetchTask.js +11 -1
  16. package/dist/elements/EFMedia/audioTasks/makeAudioSegmentIdTask.js +3 -2
  17. package/dist/elements/EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.js +4 -1
  18. package/dist/elements/EFMedia/shared/PrecisionUtils.d.ts +28 -0
  19. package/dist/elements/EFMedia/shared/PrecisionUtils.js +29 -0
  20. package/dist/elements/EFMedia/videoTasks/makeVideoSeekTask.js +11 -2
  21. package/dist/elements/EFMedia/videoTasks/makeVideoSegmentFetchTask.js +11 -1
  22. package/dist/elements/EFMedia/videoTasks/makeVideoSegmentIdTask.js +3 -2
  23. package/dist/elements/EFMedia.d.ts +0 -12
  24. package/dist/elements/EFMedia.js +4 -30
  25. package/dist/elements/EFTimegroup.js +12 -17
  26. package/dist/elements/EFVideo.d.ts +0 -9
  27. package/dist/elements/EFVideo.js +0 -7
  28. package/dist/elements/SampleBuffer.js +6 -6
  29. package/dist/getRenderInfo.d.ts +2 -2
  30. package/dist/gui/ContextMixin.js +71 -17
  31. package/dist/gui/TWMixin.js +1 -1
  32. package/dist/style.css +1 -1
  33. package/dist/transcoding/types/index.d.ts +9 -9
  34. package/package.json +2 -3
  35. package/src/elements/EFAudio.browsertest.ts +7 -7
  36. package/src/elements/EFAudio.ts +7 -20
  37. package/src/elements/EFMedia/AssetMediaEngine.browsertest.ts +100 -0
  38. package/src/elements/EFMedia/AssetMediaEngine.ts +72 -7
  39. package/src/elements/EFMedia/BaseMediaEngine.ts +50 -1
  40. package/src/elements/EFMedia/BufferedSeekingInput.browsertest.ts +135 -54
  41. package/src/elements/EFMedia/BufferedSeekingInput.ts +74 -17
  42. package/src/elements/EFMedia/JitMediaEngine.ts +58 -2
  43. package/src/elements/EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.ts +10 -1
  44. package/src/elements/EFMedia/audioTasks/makeAudioInputTask.ts +16 -8
  45. package/src/elements/EFMedia/audioTasks/makeAudioSeekTask.chunkboundary.regression.browsertest.ts +199 -0
  46. package/src/elements/EFMedia/audioTasks/makeAudioSeekTask.ts +35 -4
  47. package/src/elements/EFMedia/audioTasks/makeAudioSegmentFetchTask.ts +12 -1
  48. package/src/elements/EFMedia/audioTasks/makeAudioSegmentIdTask.ts +3 -2
  49. package/src/elements/EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.ts +10 -1
  50. package/src/elements/EFMedia/shared/PrecisionUtils.ts +46 -0
  51. package/src/elements/EFMedia/videoTasks/makeVideoSeekTask.ts +27 -3
  52. package/src/elements/EFMedia/videoTasks/makeVideoSegmentFetchTask.ts +12 -1
  53. package/src/elements/EFMedia/videoTasks/makeVideoSegmentIdTask.ts +3 -2
  54. package/src/elements/EFMedia.browsertest.ts +73 -33
  55. package/src/elements/EFMedia.ts +11 -54
  56. package/src/elements/EFTimegroup.ts +21 -26
  57. package/src/elements/EFVideo.browsertest.ts +895 -162
  58. package/src/elements/EFVideo.ts +0 -16
  59. package/src/elements/SampleBuffer.ts +8 -10
  60. package/src/gui/ContextMixin.ts +104 -26
  61. package/src/transcoding/types/index.ts +10 -6
  62. package/test/EFVideo.framegen.browsertest.ts +1 -1
  63. package/test/__cache__/GET__api_v1_transcode_audio_1_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__32da3954ba60c96ad732020c65a08ebc/metadata.json +3 -3
  64. package/test/__cache__/GET__api_v1_transcode_audio_1_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__9ed2d25c675aa6bb6ff5b3ae23887c71/data.bin +0 -0
  65. package/test/__cache__/GET__api_v1_transcode_audio_1_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__9ed2d25c675aa6bb6ff5b3ae23887c71/metadata.json +22 -0
  66. package/test/__cache__/GET__api_v1_transcode_audio_2_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__b0b2b07efcf607de8ee0f650328c32f7/metadata.json +3 -3
  67. package/test/__cache__/GET__api_v1_transcode_audio_2_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__d5a3309a2bf756dd6e304807eb402f56/data.bin +0 -0
  68. package/test/__cache__/GET__api_v1_transcode_audio_2_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__d5a3309a2bf756dd6e304807eb402f56/metadata.json +22 -0
  69. package/test/__cache__/GET__api_v1_transcode_audio_3_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a75c2252b542e0c152c780e9a8d7b154/metadata.json +3 -3
  70. package/test/__cache__/GET__api_v1_transcode_audio_3_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__773254bb671e3466fca8677139fb239e/data.bin +0 -0
  71. package/test/__cache__/GET__api_v1_transcode_audio_3_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__773254bb671e3466fca8677139fb239e/metadata.json +22 -0
  72. package/test/__cache__/GET__api_v1_transcode_audio_4_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a64ff1cfb1b52cae14df4b5dfa1e222b/metadata.json +3 -3
  73. package/test/__cache__/GET__api_v1_transcode_audio_init_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__e66d2c831d951e74ad0aeaa6489795d0/metadata.json +3 -3
  74. package/test/__cache__/GET__api_v1_transcode_high_1_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__26197f6f7c46cacb0a71134131c3f775/metadata.json +3 -3
  75. package/test/__cache__/GET__api_v1_transcode_high_2_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__4cb6774cd3650ccf59c8f8dc6678c0b9/metadata.json +3 -3
  76. package/test/__cache__/GET__api_v1_transcode_high_4_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a6fb05a22b18d850f7f2950bbcdbdeed/data.bin +0 -0
  77. package/test/__cache__/GET__api_v1_transcode_high_4_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a6fb05a22b18d850f7f2950bbcdbdeed/metadata.json +21 -0
  78. package/test/__cache__/GET__api_v1_transcode_high_5_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a50058c7c3602e90879fe3428ed891f4/data.bin +0 -0
  79. package/test/__cache__/GET__api_v1_transcode_high_5_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a50058c7c3602e90879fe3428ed891f4/metadata.json +21 -0
  80. package/test/__cache__/GET__api_v1_transcode_high_init_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__0798c479b44aaeef850609a430f6e613/metadata.json +3 -3
  81. package/test/__cache__/GET__api_v1_transcode_manifest_json_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__3be92a0437de726b431ed5af2369158a/data.bin +1 -1
  82. package/test/__cache__/GET__api_v1_transcode_manifest_json_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__3be92a0437de726b431ed5af2369158a/metadata.json +4 -4
  83. package/test/recordReplayProxyPlugin.js +50 -0
  84. package/types.json +1 -1
  85. package/dist/DecoderResetFrequency.test.d.ts +0 -1
  86. package/dist/DecoderResetRecovery.test.d.ts +0 -1
  87. package/dist/ScrubTrackManager.d.ts +0 -96
  88. package/dist/elements/EFMedia/services/AudioElementFactory.browsertest.d.ts +0 -1
  89. package/dist/elements/EFMedia/services/AudioElementFactory.d.ts +0 -22
  90. package/dist/elements/EFMedia/services/AudioElementFactory.js +0 -72
  91. package/dist/elements/EFMedia/services/MediaSourceService.browsertest.d.ts +0 -1
  92. package/dist/elements/EFMedia/services/MediaSourceService.d.ts +0 -47
  93. package/dist/elements/EFMedia/services/MediaSourceService.js +0 -73
  94. package/dist/gui/services/ElementConnectionManager.browsertest.d.ts +0 -1
  95. package/dist/gui/services/ElementConnectionManager.d.ts +0 -59
  96. package/dist/gui/services/ElementConnectionManager.js +0 -128
  97. package/dist/gui/services/PlaybackController.browsertest.d.ts +0 -1
  98. package/dist/gui/services/PlaybackController.d.ts +0 -103
  99. package/dist/gui/services/PlaybackController.js +0 -290
  100. package/dist/services/MediaSourceManager.d.ts +0 -62
  101. package/dist/services/MediaSourceManager.js +0 -211
  102. package/src/elements/EFMedia/services/AudioElementFactory.browsertest.ts +0 -325
  103. package/src/elements/EFMedia/services/AudioElementFactory.ts +0 -119
  104. package/src/elements/EFMedia/services/MediaSourceService.browsertest.ts +0 -257
  105. package/src/elements/EFMedia/services/MediaSourceService.ts +0 -102
  106. package/src/gui/services/ElementConnectionManager.browsertest.ts +0 -263
  107. package/src/gui/services/ElementConnectionManager.ts +0 -224
  108. package/src/gui/services/PlaybackController.browsertest.ts +0 -437
  109. package/src/gui/services/PlaybackController.ts +0 -521
  110. package/src/services/MediaSourceManager.ts +0 -333
@@ -2,8 +2,7 @@ import { Task } from '@lit/task';
2
2
  import { EFMedia } from './EFMedia.js';
3
3
  declare const EFAudio_base: typeof EFMedia;
4
4
  export declare class EFAudio extends EFAudio_base {
5
- static get observedAttributes(): string[];
6
- attributeChangedCallback(name: string, old: string | null, value: string | null): void;
5
+ private _propertyHack;
7
6
  audioElementRef: import('lit-html/directives/ref.js').Ref<HTMLAudioElement>;
8
7
  render(): import('lit-html').TemplateResult<1>;
9
8
  frameTask: Task<readonly [import('@lit/task').TaskStatus, import('@lit/task').TaskStatus, import('@lit/task').TaskStatus, import('@lit/task').TaskStatus], void>;
@@ -2,12 +2,13 @@ import { EFMedia } from "./EFMedia.js";
2
2
  import { TWMixin } from "../gui/TWMixin2.js";
3
3
  import { Task } from "@lit/task";
4
4
  import { html } from "lit";
5
- import { customElement } from "lit/decorators.js";
5
+ import { customElement, property } from "lit/decorators.js";
6
6
  import _decorate from "@oxc-project/runtime/helpers/decorate";
7
7
  import { createRef, ref } from "lit/directives/ref.js";
8
8
  let EFAudio = class EFAudio$1 extends TWMixin(EFMedia) {
9
9
  constructor(..._args) {
10
10
  super(..._args);
11
+ this._propertyHack = false;
11
12
  this.audioElementRef = createRef();
12
13
  this.frameTask = new Task(this, {
13
14
  args: () => [
@@ -25,14 +26,6 @@ let EFAudio = class EFAudio$1 extends TWMixin(EFMedia) {
25
26
  }
26
27
  });
27
28
  }
28
- static get observedAttributes() {
29
- const parentAttributes = super.observedAttributes || [];
30
- return [...parentAttributes];
31
- }
32
- attributeChangedCallback(name, old, value) {
33
- super.attributeChangedCallback(name, old, value);
34
- if (name === "asset-id") this.assetId = value;
35
- }
36
29
  render() {
37
30
  return html`<audio ${ref(this.audioElementRef)}></audio>`;
38
31
  }
@@ -61,5 +54,9 @@ let EFAudio = class EFAudio$1 extends TWMixin(EFMedia) {
61
54
  return this.audioBufferTask;
62
55
  }
63
56
  };
57
+ _decorate([property({
58
+ type: Boolean,
59
+ attribute: "dummy-property"
60
+ })], EFAudio.prototype, "_propertyHack", void 0);
64
61
  EFAudio = _decorate([customElement("ef-audio")], EFAudio);
65
62
  export { EFAudio };
@@ -3,6 +3,7 @@ import { AudioRendition, InitSegmentPaths, MediaEngine, SegmentTimeRange } from
3
3
  import { UrlGenerator } from '../../transcoding/utils/UrlGenerator';
4
4
  import { EFMedia } from '../EFMedia';
5
5
  import { BaseMediaEngine } from './BaseMediaEngine';
6
+ import { MediaRendition } from './shared/MediaTaskUtils';
6
7
  export declare class AssetMediaEngine extends BaseMediaEngine implements MediaEngine {
7
8
  host: EFMedia;
8
9
  src: string;
@@ -40,8 +41,5 @@ export declare class AssetMediaEngine extends BaseMediaEngine implements MediaEn
40
41
  * Calculate audio segments for variable-duration segments using track fragment index
41
42
  */
42
43
  calculateAudioSegmentRange(fromMs: number, toMs: number, rendition: AudioRendition, _durationMs: number): SegmentTimeRange[];
43
- computeSegmentId(desiredSeekTimeMs: number, rendition: {
44
- trackId: number | undefined;
45
- src: string;
46
- }): number;
44
+ computeSegmentId(desiredSeekTimeMs: number, rendition: MediaRendition): number;
47
45
  }
@@ -1,4 +1,5 @@
1
1
  import { BaseMediaEngine } from "./BaseMediaEngine.js";
2
+ import { convertToScaledTime, roundToMilliseconds } from "./shared/PrecisionUtils.js";
2
3
  var AssetMediaEngine = class AssetMediaEngine extends BaseMediaEngine {
3
4
  static async fetch(host, urlGenerator, src) {
4
5
  const url = urlGenerator.generateTrackFragmentIndexUrl(src);
@@ -81,9 +82,15 @@ var AssetMediaEngine = class AssetMediaEngine extends BaseMediaEngine {
81
82
  * Calculate audio segments for variable-duration segments using track fragment index
82
83
  */
83
84
  calculateAudioSegmentRange(fromMs, toMs, rendition, _durationMs) {
84
- if (fromMs >= toMs || !rendition.trackId) return [];
85
+ if (fromMs >= toMs || !rendition.trackId) {
86
+ console.warn(`calculateAudioSegmentRange: invalid fromMs ${fromMs} toMs ${toMs} rendition ${JSON.stringify(rendition)}`);
87
+ return [];
88
+ }
85
89
  const track = this.data[rendition.trackId];
86
- if (!track) return [];
90
+ if (!track) {
91
+ console.warn(`calculateAudioSegmentRange: track not found for rendition ${JSON.stringify(rendition)}`);
92
+ return [];
93
+ }
87
94
  const { timescale, segments } = track;
88
95
  const segmentRanges = [];
89
96
  for (let i = 0; i < segments.length; i++) {
@@ -98,6 +105,10 @@ var AssetMediaEngine = class AssetMediaEngine extends BaseMediaEngine {
98
105
  endMs: segmentEndMs
99
106
  });
100
107
  }
108
+ if (segmentRanges.length === 0) console.warn(`calculateAudioSegmentRange: no segments found for fromMs ${fromMs} toMs ${toMs} rendition ${JSON.stringify({
109
+ rendition,
110
+ track
111
+ })}`);
101
112
  return segmentRanges;
102
113
  }
103
114
  computeSegmentId(desiredSeekTimeMs, rendition) {
@@ -105,12 +116,30 @@ var AssetMediaEngine = class AssetMediaEngine extends BaseMediaEngine {
105
116
  const track = this.data[rendition.trackId];
106
117
  if (!track) throw new Error("Track not found");
107
118
  const { timescale, segments } = track;
108
- const desiredSeekTime = desiredSeekTimeMs / 1e3 * timescale;
119
+ const startTimeOffsetMs = "startTimeOffsetMs" in rendition && rendition.startTimeOffsetMs || 0;
120
+ const mediaTimeMs = roundToMilliseconds(desiredSeekTimeMs + startTimeOffsetMs);
121
+ const scaledSeekTime = convertToScaledTime(mediaTimeMs, timescale);
109
122
  for (let i = segments.length - 1; i >= 0; i--) {
110
123
  const segment = segments[i];
111
- if (segment.cts <= desiredSeekTime) return i;
124
+ const segmentEndTime = segment.cts + segment.duration;
125
+ if (segment.cts <= scaledSeekTime && scaledSeekTime < segmentEndTime) return i;
126
+ }
127
+ let nearestSegmentIndex = 0;
128
+ let nearestDistance = Number.MAX_SAFE_INTEGER;
129
+ for (let i = 0; i < segments.length; i++) {
130
+ const segment = segments[i];
131
+ const segmentStartTime = segment.cts;
132
+ const segmentEndTime = segment.cts + segment.duration;
133
+ let distance;
134
+ if (scaledSeekTime < segmentStartTime) distance = segmentStartTime - scaledSeekTime;
135
+ else if (scaledSeekTime >= segmentEndTime) distance = scaledSeekTime - segmentEndTime;
136
+ else return i;
137
+ if (distance < nearestDistance) {
138
+ nearestDistance = distance;
139
+ nearestSegmentIndex = i;
140
+ }
112
141
  }
113
- return 0;
142
+ return nearestSegmentIndex;
114
143
  }
115
144
  };
116
145
  export { AssetMediaEngine };
@@ -76,14 +76,33 @@ var BaseMediaEngine = class {
76
76
  */
77
77
  calculateAudioSegmentRange(fromMs, toMs, rendition, durationMs) {
78
78
  if (fromMs >= toMs) return [];
79
- const segmentDurationMs = rendition.segmentDurationMs || 1e3;
80
79
  const segments = [];
80
+ if (rendition.segmentDurationsMs && rendition.segmentDurationsMs.length > 0) {
81
+ let cumulativeTime = 0;
82
+ for (let i = 0; i < rendition.segmentDurationsMs.length; i++) {
83
+ const segmentDuration = rendition.segmentDurationsMs[i];
84
+ if (segmentDuration === void 0) continue;
85
+ const segmentStartMs = cumulativeTime;
86
+ const segmentEndMs = Math.min(cumulativeTime + segmentDuration, durationMs);
87
+ if (segmentStartMs >= durationMs) break;
88
+ if (segmentStartMs < toMs && segmentEndMs > fromMs) segments.push({
89
+ segmentId: i + 1,
90
+ startMs: segmentStartMs,
91
+ endMs: segmentEndMs
92
+ });
93
+ cumulativeTime += segmentDuration;
94
+ if (cumulativeTime >= durationMs) break;
95
+ }
96
+ return segments;
97
+ }
98
+ const segmentDurationMs = rendition.segmentDurationMs || 1e3;
81
99
  const startSegmentIndex = Math.floor(fromMs / segmentDurationMs);
82
100
  const endSegmentIndex = Math.floor(toMs / segmentDurationMs);
83
101
  for (let i = startSegmentIndex; i <= endSegmentIndex; i++) {
84
102
  const segmentId = i + 1;
85
103
  const segmentStartMs = i * segmentDurationMs;
86
104
  const segmentEndMs = Math.min((i + 1) * segmentDurationMs, durationMs);
105
+ if (segmentStartMs >= durationMs) break;
87
106
  if (segmentStartMs < toMs && segmentEndMs > fromMs) segments.push({
88
107
  segmentId,
89
108
  startMs: segmentStartMs,
@@ -3,8 +3,8 @@ interface BufferedSeekingInputOptions {
3
3
  videoBufferSize?: number;
4
4
  audioBufferSize?: number;
5
5
  /**
6
- * FFmpeg start_time offset in milliseconds from the processed video.
7
- * Applied during seeking to correct for timing shifts introduced by FFmpeg processing.
6
+ * Timeline offset in milliseconds to map user timeline to media timeline.
7
+ * Applied during seeking to handle media that doesn't start at 0ms.
8
8
  */
9
9
  startTimeOffsetMs?: number;
10
10
  }
@@ -18,8 +18,8 @@ export declare class BufferedSeekingInput {
18
18
  private trackIteratorCreationPromises;
19
19
  private trackSeekPromises;
20
20
  /**
21
- * FFmpeg start_time offset in milliseconds from the processed video.
22
- * Applied during seeking to correct for timing shifts introduced by FFmpeg processing.
21
+ * Timeline offset in milliseconds to map user timeline to media timeline.
22
+ * Applied during seeking to handle media that doesn't start at 0ms.
23
23
  */
24
24
  private readonly startTimeOffsetMs;
25
25
  constructor(arrayBuffer: ArrayBuffer, options?: BufferedSeekingInputOptions);
@@ -36,7 +36,7 @@ export declare class BufferedSeekingInput {
36
36
  getTrackIterator(trackId: number): Promise<AsyncIterator<MediaSample, any, undefined>>;
37
37
  private createIteratorSafe;
38
38
  createTrackBuffer(trackId: number): Promise<void>;
39
- seek(trackId: number, timeMs: number): Promise<MediaSample>;
39
+ seek(trackId: number, timeMs: number): Promise<MediaSample | undefined>;
40
40
  private resetIterator;
41
41
  private seekSafe;
42
42
  }
@@ -1,3 +1,4 @@
1
+ import { roundToMilliseconds } from "./shared/PrecisionUtils.js";
1
2
  import { SampleBuffer } from "../SampleBuffer.js";
2
3
  import { AudioSampleSink, BufferSource, Input, MP4, VideoSampleSink } from "mediabunny";
3
4
  const defaultOptions = {
@@ -113,10 +114,11 @@ var BufferedSeekingInput = class {
113
114
  }
114
115
  }
115
116
  async seek(trackId, timeMs) {
116
- const correctedTimeMs = timeMs + this.startTimeOffsetMs;
117
+ const mediaTimeMs = timeMs + this.startTimeOffsetMs;
118
+ const roundedMediaTimeMs = roundToMilliseconds(mediaTimeMs);
117
119
  const existingSeek = this.trackSeekPromises.get(trackId);
118
120
  if (existingSeek) await existingSeek;
119
- const seekPromise = this.seekSafe(trackId, correctedTimeMs);
121
+ const seekPromise = this.seekSafe(trackId, roundedMediaTimeMs);
120
122
  this.trackSeekPromises.set(trackId, seekPromise);
121
123
  try {
122
124
  return await seekPromise;
@@ -138,12 +140,24 @@ var BufferedSeekingInput = class {
138
140
  async seekSafe(trackId, timeMs) {
139
141
  if (!this.trackBuffers.has(trackId)) await this.createTrackBuffer(trackId);
140
142
  const trackBuffer = this.trackBuffers.get(trackId);
141
- if (timeMs < trackBuffer.firstTimestamp * 1e3) await this.resetIterator(trackId);
142
- const alreadyInBuffer = trackBuffer.find(timeMs);
143
143
  const track = await this.getTrack(trackId);
144
- const firstTimestampMs = await track.getFirstTimestamp() * 1e3;
145
- const lastSampleEndMs = await track.computeDuration() * 1e3;
146
- if (timeMs < firstTimestampMs || timeMs >= lastSampleEndMs) throw new NoSample(`Seek time ${timeMs}ms is outside track range [${firstTimestampMs}ms, ${lastSampleEndMs}ms]`);
144
+ const firstTimestampMs = roundToMilliseconds(await track.getFirstTimestamp() * 1e3);
145
+ let roundedTimeMs = roundToMilliseconds(timeMs);
146
+ if (roundedTimeMs < firstTimestampMs) {
147
+ const bufferContents$1 = trackBuffer.getContents();
148
+ if (bufferContents$1.length > 0) {
149
+ timeMs = firstTimestampMs;
150
+ roundedTimeMs = roundToMilliseconds(timeMs);
151
+ }
152
+ }
153
+ const bufferContents = trackBuffer.getContents();
154
+ if (bufferContents.length > 0) {
155
+ const bufferStartMs = roundToMilliseconds(trackBuffer.firstTimestamp * 1e3);
156
+ const lastSample = bufferContents[bufferContents.length - 1];
157
+ const bufferEndMs = lastSample ? roundToMilliseconds((lastSample.timestamp + (lastSample.duration || 0)) * 1e3) : bufferStartMs;
158
+ if (roundedTimeMs < bufferStartMs || roundedTimeMs > bufferEndMs) await this.resetIterator(trackId);
159
+ }
160
+ const alreadyInBuffer = trackBuffer.find(timeMs);
147
161
  if (alreadyInBuffer) return alreadyInBuffer;
148
162
  const iterator = await this.getTrackIterator(trackId);
149
163
  while (true) {
@@ -153,6 +167,12 @@ var BufferedSeekingInput = class {
153
167
  if (foundSample) return foundSample;
154
168
  if (done) break;
155
169
  }
170
+ const finalBufferContents = trackBuffer.getContents();
171
+ if (finalBufferContents.length > 0) {
172
+ const lastSample = finalBufferContents[finalBufferContents.length - 1];
173
+ const lastSampleEndMs = roundToMilliseconds(((lastSample?.timestamp || 0) + (lastSample?.duration || 0)) * 1e3);
174
+ if (roundToMilliseconds(timeMs) >= lastSampleEndMs) return lastSample;
175
+ }
156
176
  throw new NoSample(`Sample not found for time ${timeMs} in ${track.type} track ${trackId}`);
157
177
  }
158
178
  };
@@ -27,5 +27,5 @@ export declare class JitMediaEngine extends BaseMediaEngine implements MediaEngi
27
27
  trackId: number | undefined;
28
28
  src: string;
29
29
  }): Promise<ArrayBuffer>;
30
- computeSegmentId(desiredSeekTimeMs: number, rendition: VideoRendition | AudioRendition): number;
30
+ computeSegmentId(desiredSeekTimeMs: number, rendition: VideoRendition | AudioRendition): number | undefined;
31
31
  }
@@ -24,7 +24,8 @@ var JitMediaEngine = class JitMediaEngine extends BaseMediaEngine {
24
24
  id: rendition.id,
25
25
  trackId: void 0,
26
26
  src: this.data.sourceUrl,
27
- segmentDurationMs: rendition.segmentDurationMs
27
+ segmentDurationMs: rendition.segmentDurationMs,
28
+ segmentDurationsMs: rendition.segmentDurationsMs
28
29
  };
29
30
  }
30
31
  get videoRendition() {
@@ -34,7 +35,8 @@ var JitMediaEngine = class JitMediaEngine extends BaseMediaEngine {
34
35
  id: rendition.id,
35
36
  trackId: void 0,
36
37
  src: this.data.sourceUrl,
37
- segmentDurationMs: rendition.segmentDurationMs
38
+ segmentDurationMs: rendition.segmentDurationMs,
39
+ segmentDurationsMs: rendition.segmentDurationsMs
38
40
  };
39
41
  }
40
42
  get templates() {
@@ -53,9 +55,26 @@ var JitMediaEngine = class JitMediaEngine extends BaseMediaEngine {
53
55
  return this.fetchMediaCache(url);
54
56
  }
55
57
  computeSegmentId(desiredSeekTimeMs, rendition) {
58
+ if (desiredSeekTimeMs > this.durationMs) return void 0;
59
+ if (rendition.segmentDurationsMs && rendition.segmentDurationsMs.length > 0) {
60
+ let cumulativeTime = 0;
61
+ for (let i = 0; i < rendition.segmentDurationsMs.length; i++) {
62
+ const segmentDuration = rendition.segmentDurationsMs[i];
63
+ if (segmentDuration === void 0) throw new Error("Segment duration is required for JIT metadata");
64
+ const segmentStartMs$1 = cumulativeTime;
65
+ const segmentEndMs = cumulativeTime + segmentDuration;
66
+ const isLastSegment = i === rendition.segmentDurationsMs.length - 1;
67
+ const includesEndTime = isLastSegment && desiredSeekTimeMs === this.durationMs;
68
+ if (desiredSeekTimeMs >= segmentStartMs$1 && (desiredSeekTimeMs < segmentEndMs || includesEndTime)) return i + 1;
69
+ cumulativeTime += segmentDuration;
70
+ if (cumulativeTime >= this.durationMs) break;
71
+ }
72
+ return void 0;
73
+ }
56
74
  if (!rendition.segmentDurationMs) throw new Error("Segment duration is required for JIT metadata");
57
75
  const segmentIndex = Math.floor(desiredSeekTimeMs / rendition.segmentDurationMs);
58
- if (segmentIndex * rendition.segmentDurationMs >= this.durationMs) return segmentIndex;
76
+ const segmentStartMs = segmentIndex * rendition.segmentDurationMs;
77
+ if (segmentStartMs >= this.durationMs) return void 0;
59
78
  return segmentIndex + 1;
60
79
  }
61
80
  };
@@ -57,7 +57,10 @@ function makeAudioFrequencyAnalysisTask(element) {
57
57
  const currentTimeMs = element.currentSourceTimeMs;
58
58
  const analysisWindowMs = 5e3;
59
59
  const fromMs = Math.max(0, currentTimeMs);
60
- const toMs = fromMs + analysisWindowMs;
60
+ const maxToMs = fromMs + analysisWindowMs;
61
+ const videoDurationMs = element.intrinsicDurationMs || 0;
62
+ const toMs = videoDurationMs > 0 ? Math.min(maxToMs, videoDurationMs) : maxToMs;
63
+ if (fromMs >= toMs) return null;
61
64
  const { fetchAudioSpanningTime: fetchAudioSpan } = await import("../shared/AudioSpanUtils.js");
62
65
  const audioSpan = await fetchAudioSpan(element, fromMs, toMs, new AbortController().signal);
63
66
  if (!audioSpan || !audioSpan.blob) {
@@ -8,13 +8,21 @@ const makeAudioInputTask = (host) => {
8
8
  console.error("audioInputTask error", error);
9
9
  },
10
10
  onComplete: (_value) => {},
11
- task: async () => {
11
+ task: async (_, { signal }) => {
12
12
  const initSegment = await host.audioInitSegmentFetchTask.taskComplete;
13
+ signal.throwIfAborted();
13
14
  const segment = await host.audioSegmentFetchTask.taskComplete;
15
+ signal.throwIfAborted();
14
16
  if (!initSegment || !segment) throw new Error("Init segment or segment is not available");
15
- return new BufferedSeekingInput(await new Blob([initSegment, segment]).arrayBuffer(), {
17
+ const mediaEngine = await host.mediaEngineTask.taskComplete;
18
+ const audioRendition = mediaEngine?.audioRendition;
19
+ const startTimeOffsetMs = audioRendition?.startTimeOffsetMs;
20
+ const arrayBuffer = await new Blob([initSegment, segment]).arrayBuffer();
21
+ signal.throwIfAborted();
22
+ return new BufferedSeekingInput(arrayBuffer, {
16
23
  videoBufferSize: EFMedia.VIDEO_SAMPLE_BUFFER_SIZE,
17
- audioBufferSize: EFMedia.AUDIO_SAMPLE_BUFFER_SIZE
24
+ audioBufferSize: EFMedia.AUDIO_SAMPLE_BUFFER_SIZE,
25
+ startTimeOffsetMs
18
26
  });
19
27
  }
20
28
  });
@@ -4,19 +4,32 @@ const makeAudioSeekTask = (host) => {
4
4
  return new Task(host, {
5
5
  args: () => [host.desiredSeekTimeMs, host.audioInputTask.value],
6
6
  onError: (error) => {
7
- if (error instanceof IgnorableError) console.info("audioSeekTask aborted");
8
- console.error("audioSeekTask error", error);
7
+ if (error instanceof IgnorableError) {
8
+ console.info("audioSeekTask aborted");
9
+ return;
10
+ }
11
+ if (error instanceof DOMException) console.error(`audioSeekTask error: ${error.message} ${error.name} ${error.code}`);
12
+ else if (error instanceof Error) console.error(`audioSeekTask error ${error.name}: ${error.message}`);
13
+ else console.error("audioSeekTask unknown error", error);
9
14
  },
10
15
  onComplete: (_value) => {},
11
- task: async (_) => {
16
+ task: async ([targetSeekTimeMs], { signal }) => {
12
17
  await host.audioSegmentIdTask.taskComplete;
18
+ signal.throwIfAborted();
13
19
  await host.audioSegmentFetchTask.taskComplete;
20
+ signal.throwIfAborted();
14
21
  await host.audioInitSegmentFetchTask.taskComplete;
22
+ signal.throwIfAborted();
15
23
  const audioInput = await host.audioInputTask.taskComplete;
24
+ signal.throwIfAborted();
16
25
  if (!audioInput) throw new Error("Audio input is not available");
17
26
  const audioTrack = await audioInput.getFirstAudioTrack();
18
27
  if (!audioTrack) throw new Error("Audio track is not available");
19
- const sample = await audioInput.seek(audioTrack.id, host.desiredSeekTimeMs);
28
+ signal.throwIfAborted();
29
+ const sample = await audioInput.seek(audioTrack.id, targetSeekTimeMs);
30
+ signal.throwIfAborted();
31
+ if (sample === void 0 && signal.aborted) return void 0;
32
+ if (sample === void 0) throw new Error("Audio seek failed to find sample");
20
33
  return sample;
21
34
  }
22
35
  });
@@ -10,7 +10,17 @@ const makeAudioSegmentFetchTask = (host) => {
10
10
  task: async (_, { signal }) => {
11
11
  const mediaEngine = await getLatestMediaEngine(host, signal);
12
12
  const segmentId = await host.audioSegmentIdTask.taskComplete;
13
- if (segmentId === void 0) throw new Error("Segment ID is not available");
13
+ if (segmentId === void 0) {
14
+ const rendition = mediaEngine.audioRendition;
15
+ const debugInfo = {
16
+ hasRendition: !!rendition,
17
+ segmentDurationMs: rendition?.segmentDurationMs,
18
+ segmentDurationsMs: rendition?.segmentDurationsMs?.length || 0,
19
+ desiredSeekTimeMs: host.desiredSeekTimeMs,
20
+ intrinsicDurationMs: host.intrinsicDurationMs
21
+ };
22
+ throw new Error(`Segment ID is not available for audio. Debug info: ${JSON.stringify(debugInfo)}`);
23
+ }
14
24
  return mediaEngine.fetchMediaSegment(segmentId, mediaEngine.getAudioRendition(), signal);
15
25
  }
16
26
  });
@@ -7,9 +7,10 @@ const makeAudioSegmentIdTask = (host) => {
7
7
  console.error("audioSegmentIdTask error", error);
8
8
  },
9
9
  onComplete: (_value) => {},
10
- task: async (_, { signal }) => {
10
+ task: async ([, targetSeekTimeMs], { signal }) => {
11
11
  const mediaEngine = await getLatestMediaEngine(host, signal);
12
- return mediaEngine.computeSegmentId(host.desiredSeekTimeMs, mediaEngine.getAudioRendition());
12
+ signal.throwIfAborted();
13
+ return mediaEngine.computeSegmentId(targetSeekTimeMs, mediaEngine.getAudioRendition());
13
14
  }
14
15
  });
15
16
  };
@@ -24,7 +24,10 @@ function makeAudioTimeDomainAnalysisTask(element) {
24
24
  const currentTimeMs = element.currentSourceTimeMs;
25
25
  const analysisWindowMs = 5e3;
26
26
  const fromMs = Math.max(0, currentTimeMs);
27
- const toMs = fromMs + analysisWindowMs;
27
+ const maxToMs = fromMs + analysisWindowMs;
28
+ const videoDurationMs = element.intrinsicDurationMs || 0;
29
+ const toMs = videoDurationMs > 0 ? Math.min(maxToMs, videoDurationMs) : maxToMs;
30
+ if (fromMs >= toMs) return null;
28
31
  const { fetchAudioSpanningTime: fetchAudioSpan } = await import("../shared/AudioSpanUtils.js");
29
32
  const audioSpan = await fetchAudioSpan(element, fromMs, toMs, new AbortController().signal);
30
33
  if (!audioSpan || !audioSpan.blob) {
@@ -0,0 +1,28 @@
1
+ /**
2
+ * Centralized precision utilities for consistent timing calculations across the media pipeline.
3
+ *
4
+ * The key insight is that floating-point precision errors can cause inconsistencies between:
5
+ * 1. Segment selection logic (in AssetMediaEngine.computeSegmentId)
6
+ * 2. Sample finding logic (in SampleBuffer.find)
7
+ * 3. Timeline mapping (in BufferedSeekingInput.seek)
8
+ *
9
+ * All timing calculations must use the same rounding strategy to ensure consistency.
10
+ */
11
+ /**
12
+ * Round time to millisecond precision to handle floating-point precision issues.
13
+ * Uses Math.round for consistent behavior across the entire pipeline.
14
+ *
15
+ * This function should be used for ALL time-related calculations that need to be
16
+ * compared between different parts of the system.
17
+ */
18
+ export declare const roundToMilliseconds: (timeMs: number) => number;
19
+ /**
20
+ * Convert media time (in seconds) to scaled time units using consistent rounding.
21
+ * This is used in segment selection to convert from milliseconds to timescale units.
22
+ */
23
+ export declare const convertToScaledTime: (timeMs: number, timescale: number) => number;
24
+ /**
25
+ * Convert scaled time units back to media time (in milliseconds) using consistent rounding.
26
+ * This is the inverse of convertToScaledTime.
27
+ */
28
+ export declare const convertFromScaledTime: (scaledTime: number, timescale: number) => number;
@@ -0,0 +1,29 @@
1
+ /**
2
+ * Centralized precision utilities for consistent timing calculations across the media pipeline.
3
+ *
4
+ * The key insight is that floating-point precision errors can cause inconsistencies between:
5
+ * 1. Segment selection logic (in AssetMediaEngine.computeSegmentId)
6
+ * 2. Sample finding logic (in SampleBuffer.find)
7
+ * 3. Timeline mapping (in BufferedSeekingInput.seek)
8
+ *
9
+ * All timing calculations must use the same rounding strategy to ensure consistency.
10
+ */
11
+ /**
12
+ * Round time to millisecond precision to handle floating-point precision issues.
13
+ * Uses Math.round for consistent behavior across the entire pipeline.
14
+ *
15
+ * This function should be used for ALL time-related calculations that need to be
16
+ * compared between different parts of the system.
17
+ */
18
+ const roundToMilliseconds = (timeMs) => {
19
+ return Math.round(timeMs * 1e3) / 1e3;
20
+ };
21
+ /**
22
+ * Convert media time (in seconds) to scaled time units using consistent rounding.
23
+ * This is used in segment selection to convert from milliseconds to timescale units.
24
+ */
25
+ const convertToScaledTime = (timeMs, timescale) => {
26
+ const scaledTime = timeMs / 1e3 * timescale;
27
+ return Math.round(scaledTime);
28
+ };
29
+ export { convertToScaledTime, roundToMilliseconds };
@@ -8,16 +8,25 @@ const makeVideoSeekTask = (host) => {
8
8
  console.error("videoSeekTask error", error);
9
9
  },
10
10
  onComplete: (_value) => {},
11
- task: async (_) => {
11
+ task: async ([targetSeekTimeMs], { signal }) => {
12
12
  await host.mediaEngineTask.taskComplete;
13
+ signal.throwIfAborted();
13
14
  await host.videoSegmentIdTask.taskComplete;
15
+ signal.throwIfAborted();
14
16
  await host.videoSegmentFetchTask.taskComplete;
17
+ signal.throwIfAborted();
15
18
  await host.videoInitSegmentFetchTask.taskComplete;
19
+ signal.throwIfAborted();
16
20
  const videoInput = await host.videoInputTask.taskComplete;
21
+ signal.throwIfAborted();
17
22
  if (!videoInput) throw new Error("Video input is not available");
18
23
  const videoTrack = await videoInput.getFirstVideoTrack();
19
24
  if (!videoTrack) throw new Error("Video track is not available");
20
- const sample = await videoInput.seek(videoTrack.id, host.desiredSeekTimeMs);
25
+ signal.throwIfAborted();
26
+ const sample = await videoInput.seek(videoTrack.id, targetSeekTimeMs);
27
+ signal.throwIfAborted();
28
+ if (sample === void 0 && signal.aborted) return void 0;
29
+ if (sample === void 0) throw new Error("Video seek failed to find sample");
21
30
  return sample;
22
31
  }
23
32
  });
@@ -10,7 +10,17 @@ const makeVideoSegmentFetchTask = (host) => {
10
10
  task: async (_, { signal }) => {
11
11
  const mediaEngine = await getLatestMediaEngine(host, signal);
12
12
  const segmentId = await host.videoSegmentIdTask.taskComplete;
13
- if (segmentId === void 0) throw new Error("Segment ID is not available");
13
+ if (segmentId === void 0) {
14
+ const rendition = mediaEngine.videoRendition;
15
+ const debugInfo = {
16
+ hasRendition: !!rendition,
17
+ segmentDurationMs: rendition?.segmentDurationMs,
18
+ segmentDurationsMs: rendition?.segmentDurationsMs?.length || 0,
19
+ desiredSeekTimeMs: host.desiredSeekTimeMs,
20
+ intrinsicDurationMs: host.intrinsicDurationMs
21
+ };
22
+ throw new Error(`Segment ID is not available for video. Debug info: ${JSON.stringify(debugInfo)}`);
23
+ }
14
24
  return mediaEngine.fetchMediaSegment(segmentId, mediaEngine.getVideoRendition(), signal);
15
25
  }
16
26
  });
@@ -7,9 +7,10 @@ const makeVideoSegmentIdTask = (host) => {
7
7
  console.error("videoSegmentIdTask error", error);
8
8
  },
9
9
  onComplete: (_value) => {},
10
- task: async (_, { signal }) => {
10
+ task: async ([, targetSeekTimeMs], { signal }) => {
11
11
  const mediaEngine = await getLatestMediaEngine(host, signal);
12
- return mediaEngine.computeSegmentId(host.desiredSeekTimeMs, mediaEngine.getVideoRendition());
12
+ signal.throwIfAborted();
13
+ return mediaEngine.computeSegmentId(targetSeekTimeMs, mediaEngine.getVideoRendition());
13
14
  }
14
15
  });
15
16
  };
@@ -12,8 +12,6 @@ export declare class EFMedia extends EFMedia_base {
12
12
  static readonly VIDEO_SAMPLE_BUFFER_SIZE = 30;
13
13
  static readonly AUDIO_SAMPLE_BUFFER_SIZE = 120;
14
14
  static get observedAttributes(): string[];
15
- private mediaSourceService;
16
- private audioElementFactory;
17
15
  static styles: import('lit').CSSResult[];
18
16
  currentTimeMs: number;
19
17
  /**
@@ -86,10 +84,6 @@ export declare class EFMedia extends EFMedia_base {
86
84
  * Now powered by clean, testable utility functions
87
85
  */
88
86
  fetchAudioSpanningTime(fromMs: number, toMs: number, signal?: AbortSignal): Promise<AudioSpan>;
89
- /**
90
- * Get the HTML audio element for ContextMixin integration
91
- */
92
- get audioElement(): HTMLAudioElement | null;
93
87
  /**
94
88
  * Check if an audio segment is cached in the unified buffer system
95
89
  * Now uses the same caching approach as video for consistency
@@ -100,11 +94,5 @@ export declare class EFMedia extends EFMedia_base {
100
94
  * Now uses the same caching approach as video for consistency
101
95
  */
102
96
  getCachedAudioSegments(segmentIds: number[]): Set<number>;
103
- /**
104
- * Get MediaElementAudioSourceNode for ContextMixin integration
105
- * Uses AudioElementFactory for proper caching and lifecycle management
106
- */
107
- getMediaElementSource(audioContext: AudioContext): Promise<MediaElementAudioSourceNode>;
108
- disconnectedCallback(): void;
109
97
  }
110
98
  export {};