@editframe/elements 0.20.2-beta.0 → 0.20.4-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. package/dist/EF_FRAMEGEN.js +3 -20
  2. package/dist/elements/EFMedia/AssetMediaEngine.d.ts +4 -4
  3. package/dist/elements/EFMedia/AssetMediaEngine.js +8 -4
  4. package/dist/elements/EFMedia/BaseMediaEngine.d.ts +10 -2
  5. package/dist/elements/EFMedia/BaseMediaEngine.js +8 -2
  6. package/dist/elements/EFMedia/JitMediaEngine.js +13 -4
  7. package/dist/elements/EFMedia/audioTasks/makeAudioBufferTask.js +1 -1
  8. package/dist/elements/EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.js +0 -2
  9. package/dist/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.js +1 -1
  10. package/dist/elements/EFMedia/audioTasks/makeAudioInputTask.js +5 -4
  11. package/dist/elements/EFMedia/audioTasks/makeAudioSegmentFetchTask.js +2 -12
  12. package/dist/elements/EFMedia/audioTasks/makeAudioSegmentIdTask.js +1 -1
  13. package/dist/elements/EFMedia/audioTasks/makeAudioTasksVideoOnly.browsertest.d.ts +1 -0
  14. package/dist/elements/EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.js +5 -2
  15. package/dist/elements/EFMedia/shared/AudioSpanUtils.d.ts +1 -1
  16. package/dist/elements/EFMedia/shared/AudioSpanUtils.js +3 -3
  17. package/dist/elements/EFMedia/shared/BufferUtils.d.ts +1 -1
  18. package/dist/elements/EFMedia/shared/BufferUtils.js +3 -1
  19. package/dist/elements/EFMedia/shared/MediaTaskUtils.d.ts +1 -1
  20. package/dist/elements/EFMedia/shared/RenditionHelpers.d.ts +1 -9
  21. package/dist/elements/EFMedia/tasks/makeMediaEngineTask.d.ts +1 -2
  22. package/dist/elements/EFMedia/tasks/makeMediaEngineTask.js +1 -6
  23. package/dist/elements/EFMedia/videoTasks/makeScrubVideoBufferTask.js +2 -1
  24. package/dist/elements/EFMedia/videoTasks/makeScrubVideoInitSegmentFetchTask.js +0 -2
  25. package/dist/elements/EFMedia/videoTasks/makeScrubVideoInputTask.js +0 -2
  26. package/dist/elements/EFMedia/videoTasks/makeScrubVideoSegmentFetchTask.js +0 -2
  27. package/dist/elements/EFMedia/videoTasks/makeScrubVideoSegmentIdTask.js +0 -2
  28. package/dist/elements/EFMedia/videoTasks/makeUnifiedVideoSeekTask.js +4 -5
  29. package/dist/elements/EFMedia/videoTasks/makeVideoBufferTask.js +2 -2
  30. package/dist/elements/EFMedia.d.ts +2 -1
  31. package/dist/elements/EFMedia.js +1 -0
  32. package/dist/elements/EFTimegroup.js +1 -1
  33. package/dist/transcoding/types/index.d.ts +6 -4
  34. package/package.json +2 -2
  35. package/src/elements/EFMedia/AssetIdMediaEngine.test.ts +6 -4
  36. package/src/elements/EFMedia/AssetMediaEngine.browsertest.ts +25 -23
  37. package/src/elements/EFMedia/AssetMediaEngine.ts +16 -6
  38. package/src/elements/EFMedia/BaseMediaEngine.browsertest.ts +94 -0
  39. package/src/elements/EFMedia/BaseMediaEngine.ts +10 -8
  40. package/src/elements/EFMedia/JitMediaEngine.ts +20 -6
  41. package/src/elements/EFMedia/audioTasks/makeAudioBufferTask.ts +5 -2
  42. package/src/elements/EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.ts +0 -5
  43. package/src/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.ts +2 -1
  44. package/src/elements/EFMedia/audioTasks/makeAudioInputTask.ts +11 -5
  45. package/src/elements/EFMedia/audioTasks/makeAudioSegmentFetchTask.ts +4 -16
  46. package/src/elements/EFMedia/audioTasks/makeAudioSegmentIdTask.ts +4 -2
  47. package/src/elements/EFMedia/audioTasks/makeAudioTasksVideoOnly.browsertest.ts +95 -0
  48. package/src/elements/EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.ts +5 -6
  49. package/src/elements/EFMedia/shared/AudioSpanUtils.ts +5 -4
  50. package/src/elements/EFMedia/shared/BufferUtils.ts +7 -3
  51. package/src/elements/EFMedia/shared/MediaTaskUtils.ts +1 -1
  52. package/src/elements/EFMedia/shared/RenditionHelpers.browsertest.ts +41 -42
  53. package/src/elements/EFMedia/shared/RenditionHelpers.ts +0 -23
  54. package/src/elements/EFMedia/tasks/makeMediaEngineTask.ts +1 -9
  55. package/src/elements/EFMedia/videoTasks/makeScrubVideoBufferTask.ts +3 -2
  56. package/src/elements/EFMedia/videoTasks/makeScrubVideoInitSegmentFetchTask.ts +0 -5
  57. package/src/elements/EFMedia/videoTasks/makeScrubVideoInputTask.ts +1 -5
  58. package/src/elements/EFMedia/videoTasks/makeScrubVideoSegmentFetchTask.ts +0 -5
  59. package/src/elements/EFMedia/videoTasks/makeScrubVideoSegmentIdTask.ts +0 -5
  60. package/src/elements/EFMedia/videoTasks/makeUnifiedVideoSeekTask.ts +10 -19
  61. package/src/elements/EFMedia/videoTasks/makeVideoBufferTask.ts +2 -5
  62. package/src/elements/EFMedia.ts +2 -1
  63. package/src/elements/EFThumbnailStrip.media-engine.browsertest.ts +2 -1
  64. package/src/elements/EFTimegroup.ts +1 -1
  65. package/src/transcoding/types/index.ts +6 -4
  66. package/src/utils/LRUCache.test.ts +3 -1
  67. package/types.json +1 -1
@@ -57,8 +57,9 @@ describe("AssetMediaEngine", () => {
57
57
  expect,
58
58
  }) => {
59
59
  const audioRendition = mediaEngine.audioRendition;
60
- expect(audioRendition.trackId).toBe(2);
61
- expect(audioRendition.src).toBe(host.src);
60
+ expect(audioRendition).toBeDefined();
61
+ expect(audioRendition!.trackId).toBe(2);
62
+ expect(audioRendition!.src).toBe(host.src);
62
63
  });
63
64
 
64
65
  test("returns video rendition with correct properties", ({
@@ -67,9 +68,10 @@ describe("AssetMediaEngine", () => {
67
68
  expect,
68
69
  }) => {
69
70
  const videoRendition = mediaEngine.videoRendition;
70
- expect(videoRendition.trackId).toBe(1);
71
- expect(videoRendition.src).toBe(host.src);
72
- expect(videoRendition.startTimeOffsetMs).toBeCloseTo(66.6, 0);
71
+ expect(videoRendition).toBeDefined();
72
+ expect(videoRendition!.trackId).toBe(1);
73
+ expect(videoRendition!.src).toBe(host.src);
74
+ expect(videoRendition!.startTimeOffsetMs).toBeCloseTo(66.6, 0);
73
75
  });
74
76
 
75
77
  test("provides templates for asset endpoints", ({ mediaEngine, expect }) => {
@@ -100,39 +102,39 @@ describe("AssetMediaEngine", () => {
100
102
 
101
103
  describe("bars n tone segment id computation", () => {
102
104
  test("computes 0ms is 0", ({ expect, mediaEngine }) => {
103
- expect(
104
- mediaEngine.computeSegmentId(0, mediaEngine.getVideoRendition()),
105
- ).toBe(0);
105
+ const videoRendition = mediaEngine.getVideoRendition();
106
+ expect(videoRendition).toBeDefined();
107
+ expect(mediaEngine.computeSegmentId(0, videoRendition!)).toBe(0);
106
108
  });
107
109
 
108
110
  test("computes 2000 is 1", ({ expect, mediaEngine }) => {
109
- expect(
110
- mediaEngine.computeSegmentId(2000, mediaEngine.getVideoRendition()),
111
- ).toBe(1);
111
+ const videoRendition = mediaEngine.getVideoRendition();
112
+ expect(videoRendition).toBeDefined();
113
+ expect(mediaEngine.computeSegmentId(2000, videoRendition!)).toBe(1);
112
114
  });
113
115
 
114
116
  test("computes 4000 is 2", ({ expect, mediaEngine }) => {
115
- expect(
116
- mediaEngine.computeSegmentId(4000, mediaEngine.getVideoRendition()),
117
- ).toBe(2);
117
+ const videoRendition = mediaEngine.getVideoRendition();
118
+ expect(videoRendition).toBeDefined();
119
+ expect(mediaEngine.computeSegmentId(4000, videoRendition!)).toBe(2);
118
120
  });
119
121
 
120
122
  test("computes 6000 is 3", ({ expect, mediaEngine }) => {
121
- expect(
122
- mediaEngine.computeSegmentId(6000, mediaEngine.getVideoRendition()),
123
- ).toBe(3);
123
+ const videoRendition = mediaEngine.getVideoRendition();
124
+ expect(videoRendition).toBeDefined();
125
+ expect(mediaEngine.computeSegmentId(6000, videoRendition!)).toBe(3);
124
126
  });
125
127
 
126
128
  test("computes 8000 is 4", ({ expect, mediaEngine }) => {
127
- expect(
128
- mediaEngine.computeSegmentId(8000, mediaEngine.getVideoRendition()),
129
- ).toBe(4);
129
+ const videoRendition = mediaEngine.getVideoRendition();
130
+ expect(videoRendition).toBeDefined();
131
+ expect(mediaEngine.computeSegmentId(8000, videoRendition!)).toBe(4);
130
132
  });
131
133
 
132
134
  test("computes 7975 is 3", ({ expect, mediaEngine }) => {
133
- expect(
134
- mediaEngine.computeSegmentId(7975, mediaEngine.getVideoRendition()),
135
- ).toBe(3);
135
+ const videoRendition = mediaEngine.getVideoRendition();
136
+ expect(videoRendition).toBeDefined();
137
+ expect(mediaEngine.computeSegmentId(7975, videoRendition!)).toBe(3);
136
138
  });
137
139
  });
138
140
  });
@@ -54,16 +54,28 @@ export class AssetMediaEngine extends BaseMediaEngine implements MediaEngine {
54
54
  }
55
55
 
56
56
  get videoRendition() {
57
+ const videoTrack = this.videoTrackIndex;
58
+
59
+ if (!videoTrack || videoTrack.track === undefined) {
60
+ return undefined;
61
+ }
62
+
57
63
  return {
58
- trackId: this.videoTrackIndex?.track,
64
+ trackId: videoTrack.track,
59
65
  src: this.src,
60
- startTimeOffsetMs: this.videoTrackIndex?.startTimeOffsetMs,
66
+ startTimeOffsetMs: videoTrack.startTimeOffsetMs,
61
67
  };
62
68
  }
63
69
 
64
70
  get audioRendition() {
71
+ const audioTrack = this.audioTrackIndex;
72
+
73
+ if (!audioTrack || audioTrack.track === undefined) {
74
+ return undefined;
75
+ }
76
+
65
77
  return {
66
- trackId: this.audioTrackIndex?.track,
78
+ trackId: audioTrack.track,
67
79
  src: this.src,
68
80
  };
69
81
  }
@@ -322,9 +334,7 @@ export class AssetMediaEngine extends BaseMediaEngine implements MediaEngine {
322
334
  // This is because Asset segments are independent timeline fragments
323
335
 
324
336
  if (!rendition.trackId) {
325
- throw new Error(
326
- "[convertToSegmentRelativeTimestamps] Track ID is required for asset metadata",
327
- );
337
+ throw new Error("Track ID is required for asset metadata");
328
338
  }
329
339
  // For AssetMediaEngine, we need to calculate the actual segment start time
330
340
  // using the precise segment boundaries from the track fragment index
@@ -34,6 +34,100 @@ class TestMediaEngine extends BaseMediaEngine {
34
34
  }
35
35
  }
36
36
 
37
+ // Test implementation for video-only assets
38
+ // @ts-expect-error missing implementations
39
+ class VideoOnlyMediaEngine extends BaseMediaEngine {
40
+ fetchMediaSegment = vi.fn();
41
+ public host: EFMedia;
42
+
43
+ constructor(host: EFMedia) {
44
+ super(host);
45
+ this.host = host;
46
+ }
47
+
48
+ get videoRendition() {
49
+ return {
50
+ trackId: 1,
51
+ src: "test-video.mp4",
52
+ segmentDurationMs: 2000,
53
+ };
54
+ }
55
+
56
+ get audioRendition() {
57
+ return undefined; // Video-only asset
58
+ }
59
+ }
60
+
61
+ // Test implementation for audio-only assets
62
+ // @ts-expect-error missing implementations
63
+ class AudioOnlyMediaEngine extends BaseMediaEngine {
64
+ fetchMediaSegment = vi.fn();
65
+ public host: EFMedia;
66
+
67
+ constructor(host: EFMedia) {
68
+ super(host);
69
+ this.host = host;
70
+ }
71
+
72
+ get videoRendition() {
73
+ return undefined; // Audio-only asset
74
+ }
75
+
76
+ get audioRendition() {
77
+ return {
78
+ trackId: 1,
79
+ src: "test-audio.mp4",
80
+ segmentDurationMs: 1000,
81
+ };
82
+ }
83
+ }
84
+
85
+ describe("BaseMediaEngine API Contract", () => {
86
+ test("getAudioRendition returns audio rendition when available", ({
87
+ expect,
88
+ }) => {
89
+ const host = document.createElement("ef-video") as EFMedia;
90
+ const engine = new TestMediaEngine(host);
91
+
92
+ const result = engine.getAudioRendition();
93
+ expect(result).toBeDefined();
94
+ expect(result?.trackId).toBe(2);
95
+ expect(result?.src).toBe("test-audio.mp4");
96
+ });
97
+
98
+ test("getAudioRendition returns undefined for video-only assets", ({
99
+ expect,
100
+ }) => {
101
+ const host = document.createElement("ef-video") as EFMedia;
102
+ const engine = new VideoOnlyMediaEngine(host);
103
+
104
+ const result = engine.getAudioRendition();
105
+ expect(result).toBeUndefined();
106
+ });
107
+
108
+ test("getVideoRendition returns video rendition when available", ({
109
+ expect,
110
+ }) => {
111
+ const host = document.createElement("ef-video") as EFMedia;
112
+ const engine = new TestMediaEngine(host);
113
+
114
+ const result = engine.getVideoRendition();
115
+ expect(result).toBeDefined();
116
+ expect(result?.trackId).toBe(1);
117
+ expect(result?.src).toBe("test-video.mp4");
118
+ });
119
+
120
+ test("getVideoRendition returns undefined for audio-only assets", ({
121
+ expect,
122
+ }) => {
123
+ const host = document.createElement("ef-audio") as EFMedia;
124
+ const engine = new AudioOnlyMediaEngine(host);
125
+
126
+ const result = engine.getVideoRendition();
127
+ expect(result).toBeUndefined();
128
+ });
129
+ });
130
+
37
131
  describe("BaseMediaEngine deduplication", () => {
38
132
  test("should fetch segment successfully", async ({ expect }) => {
39
133
  const host = document.createElement("ef-video") as EFMedia;
@@ -23,17 +23,19 @@ export abstract class BaseMediaEngine {
23
23
  abstract get videoRendition(): VideoRendition | undefined;
24
24
  abstract get audioRendition(): AudioRendition | undefined;
25
25
 
26
- getVideoRendition(): VideoRendition {
27
- if (!this.videoRendition) {
28
- throw new Error("No video rendition available");
29
- }
26
+ /**
27
+ * Get video rendition if available. Returns undefined for audio-only assets.
28
+ * Callers should handle undefined gracefully.
29
+ */
30
+ getVideoRendition(): VideoRendition | undefined {
30
31
  return this.videoRendition;
31
32
  }
32
33
 
33
- getAudioRendition(): AudioRendition {
34
- if (!this.audioRendition) {
35
- throw new Error("No audio rendition available");
36
- }
34
+ /**
35
+ * Get audio rendition if available. Returns undefined for video-only assets.
36
+ * Callers should handle undefined gracefully.
37
+ */
38
+ getAudioRendition(): AudioRendition | undefined {
37
39
  return this.audioRendition;
38
40
  }
39
41
 
@@ -215,15 +215,29 @@ export class JitMediaEngine extends BaseMediaEngine implements MediaEngine {
215
215
  async extractThumbnails(
216
216
  timestamps: number[],
217
217
  ): Promise<(ThumbnailResult | null)[]> {
218
- const mainRendition = this.videoRendition;
219
- const scrubRendition = this.getScrubVideoRendition();
220
-
221
- const rendition = mainRendition || scrubRendition;
222
-
223
- if (!rendition) {
218
+ // Use same rendition priority as video: try main rendition first for frame alignment
219
+ let rendition: VideoRendition;
220
+ try {
221
+ const mainRendition = this.getVideoRendition();
222
+ if (mainRendition) {
223
+ rendition = mainRendition;
224
+ } else {
225
+ const scrubRendition = this.getScrubVideoRendition();
226
+ if (scrubRendition) {
227
+ rendition = scrubRendition;
228
+ } else {
229
+ throw new Error("No video rendition available");
230
+ }
231
+ }
232
+ } catch (error) {
233
+ console.warn(
234
+ "JitMediaEngine: No video rendition available for thumbnails",
235
+ error,
236
+ );
224
237
  return timestamps.map(() => null);
225
238
  }
226
239
 
240
+ // Use shared thumbnail extraction logic
227
241
  return this.thumbnailExtractor.extractThumbnails(
228
242
  timestamps,
229
243
  rendition,
@@ -42,15 +42,18 @@ export const makeAudioBufferTask = (host: EFMedia): AudioBufferTask => {
42
42
  task: async ([seekTimeMs], { signal }) => {
43
43
  // Skip buffering entirely in rendering mode
44
44
  if (EF_RENDERING()) {
45
- return currentState;
45
+ return currentState; // Return existing state without any buffering activity
46
46
  }
47
47
 
48
+ // Get media engine to potentially override buffer configuration
48
49
  const mediaEngine = await getLatestMediaEngine(host, signal);
49
50
 
51
+ // Return existing state if no audio rendition available
50
52
  if (!mediaEngine.audioRendition) {
51
53
  return currentState;
52
54
  }
53
55
 
56
+ // Use media engine's buffer config, falling back to host properties
54
57
  const engineConfig = mediaEngine.getBufferConfig();
55
58
  const bufferDurationMs = engineConfig.audioBufferDurationMs;
56
59
  const maxParallelFetches = engineConfig.maxAudioBufferFetches;
@@ -90,7 +93,7 @@ export const makeAudioBufferTask = (host: EFMedia): AudioBufferTask => {
90
93
  const mediaEngine = await getLatestMediaEngine(host, signal);
91
94
  const audioRendition = mediaEngine.audioRendition;
92
95
  if (!audioRendition) {
93
- throw new Error("No audio track available in source");
96
+ throw new Error("Audio rendition not available");
94
97
  }
95
98
  return audioRendition;
96
99
  },
@@ -95,11 +95,6 @@ export function makeAudioFrequencyAnalysisTask(element: EFMedia) {
95
95
  task: async (_, { signal }) => {
96
96
  if (element.currentSourceTimeMs < 0) return null;
97
97
 
98
- const mediaEngine = element.mediaEngineTask.value;
99
- if (!mediaEngine?.audioRendition) {
100
- return null;
101
- }
102
-
103
98
  const currentTimeMs = element.currentSourceTimeMs;
104
99
 
105
100
  // Calculate exact audio window needed based on fftDecay and frame timing
@@ -14,8 +14,9 @@ export const makeAudioInitSegmentFetchTask = (
14
14
  onComplete: (_value) => {},
15
15
  task: async ([_mediaEngine], { signal }) => {
16
16
  const mediaEngine = await getLatestMediaEngine(host, signal);
17
+ const audioRendition = mediaEngine.getAudioRendition();
17
18
 
18
- const audioRendition = mediaEngine.audioRendition;
19
+ // Return undefined if no audio rendition available (video-only asset)
19
20
  if (!audioRendition) {
20
21
  return undefined;
21
22
  }
@@ -6,7 +6,7 @@ import type { InputTask } from "../shared/MediaTaskUtils";
6
6
  export const makeAudioInputTask = (host: EFMedia): InputTask => {
7
7
  return new Task<
8
8
  readonly [ArrayBuffer | undefined, ArrayBuffer | undefined],
9
- BufferedSeekingInput
9
+ BufferedSeekingInput | undefined
10
10
  >(host, {
11
11
  args: () =>
12
12
  [
@@ -18,18 +18,24 @@ export const makeAudioInputTask = (host: EFMedia): InputTask => {
18
18
  },
19
19
  onComplete: (_value) => {},
20
20
  task: async (_, { signal }) => {
21
+ const mediaEngine = await host.mediaEngineTask.taskComplete;
22
+ const audioRendition = mediaEngine?.audioRendition;
23
+
24
+ // Return undefined if no audio rendition available (video-only asset)
25
+ if (!audioRendition) {
26
+ return undefined;
27
+ }
28
+
21
29
  const initSegment = await host.audioInitSegmentFetchTask.taskComplete;
22
30
  signal.throwIfAborted();
23
31
  const segment = await host.audioSegmentFetchTask.taskComplete;
24
32
  signal.throwIfAborted();
25
33
 
26
34
  if (!initSegment || !segment) {
27
- throw new Error("No audio track available in source");
35
+ return undefined;
28
36
  }
29
37
 
30
- const mediaEngine = await host.mediaEngineTask.taskComplete;
31
- const audioRendition = mediaEngine?.audioRendition;
32
- const startTimeOffsetMs = audioRendition?.startTimeOffsetMs;
38
+ const startTimeOffsetMs = audioRendition.startTimeOffsetMs;
33
39
 
34
40
  const arrayBuffer = await new Blob([initSegment, segment]).arrayBuffer();
35
41
  signal.throwIfAborted();
@@ -18,26 +18,14 @@ export const makeAudioSegmentFetchTask = (
18
18
  onComplete: (_value) => {},
19
19
  task: async (_, { signal }) => {
20
20
  const mediaEngine = await getLatestMediaEngine(host, signal);
21
+ const segmentId = await host.audioSegmentIdTask.taskComplete;
22
+ const audioRendition = mediaEngine.getAudioRendition();
21
23
 
22
- const audioRendition = mediaEngine.audioRendition;
23
- if (!audioRendition) {
24
+ // Return undefined if no audio rendition or segment ID available (video-only asset)
25
+ if (!audioRendition || segmentId === undefined) {
24
26
  return undefined;
25
27
  }
26
28
 
27
- const segmentId = await host.audioSegmentIdTask.taskComplete;
28
- if (segmentId === undefined) {
29
- const debugInfo = {
30
- hasRendition: true,
31
- segmentDurationMs: audioRendition.segmentDurationMs,
32
- segmentDurationsMs: audioRendition.segmentDurationsMs?.length || 0,
33
- desiredSeekTimeMs: host.desiredSeekTimeMs,
34
- intrinsicDurationMs: host.intrinsicDurationMs,
35
- };
36
- throw new Error(
37
- `Segment ID is not available for audio. Debug info: ${JSON.stringify(debugInfo)}`,
38
- );
39
- }
40
-
41
29
  return mediaEngine.fetchMediaSegment(segmentId, audioRendition, signal);
42
30
  },
43
31
  });
@@ -14,9 +14,11 @@ export const makeAudioSegmentIdTask = (
14
14
  onComplete: (_value) => {},
15
15
  task: async ([, targetSeekTimeMs], { signal }) => {
16
16
  const mediaEngine = await getLatestMediaEngine(host, signal);
17
- signal.throwIfAborted(); // Abort if a new seek started
17
+ signal.throwIfAborted();
18
18
 
19
- const audioRendition = mediaEngine.audioRendition;
19
+ const audioRendition = mediaEngine.getAudioRendition();
20
+
21
+ // Return undefined if no audio rendition available (video-only asset)
20
22
  if (!audioRendition) {
21
23
  return undefined;
22
24
  }
@@ -0,0 +1,95 @@
1
+ import { describe } from "vitest";
2
+ import { test as baseTest } from "../../../../test/useMSW.js";
3
+ import type { EFMedia } from "../../EFMedia.js";
4
+ import { AssetMediaEngine } from "../AssetMediaEngine.js";
5
+
6
+ const test = baseTest.extend<{
7
+ videoOnlyAssetEngine: AssetMediaEngine;
8
+ }>({
9
+ videoOnlyAssetEngine: async ({}, use) => {
10
+ const host = document.createElement("ef-video") as EFMedia;
11
+ const engine = new AssetMediaEngine(host, "test-video-only.mp4");
12
+
13
+ // Simulate video-only asset data (no audio track) - this is the exact scenario
14
+ // that caused "computeSegmentId: trackId not found for rendition {\"src\":\"uuid\"}"
15
+ (engine as any).data = {
16
+ 1: {
17
+ track: 1,
18
+ type: "video",
19
+ width: 480,
20
+ height: 270,
21
+ timescale: 15360,
22
+ sample_count: 1,
23
+ codec: "avc1.640015",
24
+ duration: 30208,
25
+ startTimeOffsetMs: 67,
26
+ initSegment: { offset: 0, size: 763 },
27
+ segments: [
28
+ { cts: 1024, dts: 0, duration: 30720, offset: 763, size: 13997 },
29
+ ],
30
+ },
31
+ // Note: No track 2 (audio) - this simulates the exact video-only asset scenario
32
+ };
33
+
34
+ await use(engine);
35
+ },
36
+ });
37
+
38
+ /**
39
+ * Regression test for: "computeSegmentId: trackId not found for rendition {\"src\":\"uuid\"}"
40
+ *
41
+ * This test ensures that AssetMediaEngine properly handles video-only assets
42
+ * by returning undefined for audio renditions instead of malformed objects.
43
+ *
44
+ * This test would FAIL with the old implementation and PASS with the new implementation.
45
+ */
46
+ describe("AssetMediaEngine - Video-Only Asset Handling", () => {
47
+ test("audioRendition returns undefined for video-only asset", ({
48
+ videoOnlyAssetEngine,
49
+ expect,
50
+ }) => {
51
+ // This is the core fix - should return undefined, not {src: "..."}
52
+ const audioRendition = videoOnlyAssetEngine.audioRendition;
53
+ expect(audioRendition).toBeUndefined();
54
+ });
55
+
56
+ test("videoRendition returns valid object for video-only asset", ({
57
+ videoOnlyAssetEngine,
58
+ expect,
59
+ }) => {
60
+ const videoRendition = videoOnlyAssetEngine.videoRendition;
61
+ expect(videoRendition).toBeDefined();
62
+ expect(videoRendition?.trackId).toBe(1);
63
+ expect(videoRendition?.src).toBe("test-video-only.mp4");
64
+ });
65
+
66
+ test("getAudioRendition returns undefined for video-only asset", ({
67
+ videoOnlyAssetEngine,
68
+ expect,
69
+ }) => {
70
+ // New API behavior - should return undefined gracefully
71
+ const result = videoOnlyAssetEngine.getAudioRendition();
72
+ expect(result).toBeUndefined();
73
+ });
74
+
75
+ test("original error scenario is prevented", ({
76
+ videoOnlyAssetEngine,
77
+ expect,
78
+ }) => {
79
+ // This is the exact scenario that caused the original error:
80
+ // "computeSegmentId: trackId not found for rendition {\"src\":\"uuid\"}"
81
+
82
+ const audioRendition = videoOnlyAssetEngine.getAudioRendition();
83
+
84
+ // Before fix: audioRendition would be {trackId: undefined, src: "..."}
85
+ // After fix: audioRendition should be undefined
86
+ expect(audioRendition).toBeUndefined();
87
+
88
+ // This prevents the downstream error where trackId was missing entirely
89
+ if (audioRendition !== undefined) {
90
+ // If audioRendition exists, it should have a valid trackId
91
+ expect(audioRendition.trackId).toBeDefined();
92
+ expect(typeof audioRendition.trackId).toBe("number");
93
+ }
94
+ });
95
+ });
@@ -2,7 +2,7 @@ import { Task } from "@lit/task";
2
2
 
3
3
  import { EF_INTERACTIVE } from "../../../EF_INTERACTIVE.js";
4
4
  import { LRUCache } from "../../../utils/LRUCache.js";
5
- import type { EFMedia } from "../../EFMedia.js";
5
+ import { type EFMedia, IgnorableError } from "../../EFMedia.js";
6
6
 
7
7
  // DECAY_WEIGHT constant - same as original
8
8
  const DECAY_WEIGHT = 0.8;
@@ -14,6 +14,10 @@ export function makeAudioTimeDomainAnalysisTask(element: EFMedia) {
14
14
  return new Task(element, {
15
15
  autoRun: EF_INTERACTIVE,
16
16
  onError: (error) => {
17
+ if (error instanceof IgnorableError) {
18
+ console.info("byteTimeDomainTask skipped: no audio track");
19
+ return;
20
+ }
17
21
  console.error("byteTimeDomainTask error", error);
18
22
  },
19
23
  args: () =>
@@ -27,11 +31,6 @@ export function makeAudioTimeDomainAnalysisTask(element: EFMedia) {
27
31
  task: async (_, { signal }) => {
28
32
  if (element.currentSourceTimeMs < 0) return null;
29
33
 
30
- const mediaEngine = element.mediaEngineTask.value;
31
- if (!mediaEngine?.audioRendition) {
32
- return null;
33
- }
34
-
35
34
  const currentTimeMs = element.currentSourceTimeMs;
36
35
 
37
36
  // Calculate exact audio window needed based on fftDecay and frame timing
@@ -16,7 +16,7 @@ const fetchAudioSegmentData = async (
16
16
  ): Promise<Map<number, ArrayBuffer>> => {
17
17
  const audioRendition = mediaEngine.audioRendition;
18
18
  if (!audioRendition) {
19
- throw new Error("No audio track available in source");
19
+ throw new Error("Audio rendition not available");
20
20
  }
21
21
 
22
22
  const segmentData = new Map<number, ArrayBuffer>();
@@ -62,7 +62,7 @@ export const fetchAudioSpanningTime = async (
62
62
  fromMs: number,
63
63
  toMs: number,
64
64
  signal: AbortSignal,
65
- ): Promise<AudioSpan> => {
65
+ ): Promise<AudioSpan | undefined> => {
66
66
  // Validate inputs
67
67
  if (fromMs >= toMs || fromMs < 0) {
68
68
  throw new Error(`Invalid time range: fromMs=${fromMs}, toMs=${toMs}`);
@@ -72,12 +72,13 @@ export const fetchAudioSpanningTime = async (
72
72
  const mediaEngine = await host.mediaEngineTask.taskComplete;
73
73
  const initSegment = await host.audioInitSegmentFetchTask.taskComplete;
74
74
 
75
+ // Return undefined if no audio rendition available
75
76
  if (!mediaEngine?.audioRendition) {
76
- throw new Error("No audio track available in source");
77
+ return undefined;
77
78
  }
78
79
 
79
80
  if (!initSegment) {
80
- throw new Error("Audio init segment is not available");
81
+ return undefined;
81
82
  }
82
83
 
83
84
  // Calculate segments needed using the media engine's method
@@ -35,7 +35,7 @@ export interface MediaBufferDependencies<
35
35
  ) => Promise<number | undefined>;
36
36
  prefetchSegment: (segmentId: number, rendition: T) => Promise<void>; // Just trigger prefetch, don't return data
37
37
  isSegmentCached: (segmentId: number, rendition: T) => boolean; // Check BaseMediaEngine cache
38
- getRendition: () => Promise<T>;
38
+ getRendition: () => Promise<T | undefined>;
39
39
  logError: (message: string, error: any) => void;
40
40
  }
41
41
 
@@ -208,6 +208,10 @@ export const manageMediaBuffer = async <
208
208
  }
209
209
 
210
210
  const rendition = await deps.getRendition();
211
+ if (!rendition) {
212
+ // Cannot buffer without a rendition
213
+ return currentState;
214
+ }
211
215
  const endTimeMs = seekTimeMs + config.bufferDurationMs;
212
216
 
213
217
  const desiredSegments = await computeSegmentRangeAsync(
@@ -217,7 +221,6 @@ export const manageMediaBuffer = async <
217
221
  rendition,
218
222
  deps.computeSegmentId,
219
223
  );
220
-
221
224
  // Filter out segments already cached by BaseMediaEngine
222
225
  const uncachedSegments = desiredSegments.filter(
223
226
  (segmentId) => !deps.isSegmentCached(segmentId, rendition),
@@ -287,10 +290,11 @@ export const manageMediaBuffer = async <
287
290
  startNextSegment();
288
291
  }
289
292
 
290
- return {
293
+ const result = {
291
294
  currentSeekTimeMs: seekTimeMs,
292
295
  requestedSegments: newRequestedSegments,
293
296
  activeRequests: newActiveRequests,
294
297
  requestQueue: remainingQueue, // What's left in the queue
295
298
  };
299
+ return result;
296
300
  };
@@ -40,5 +40,5 @@ export type SegmentFetchTask = Task<
40
40
  */
41
41
  export type InputTask = Task<
42
42
  readonly [ArrayBuffer, ArrayBuffer],
43
- BufferedSeekingInput
43
+ BufferedSeekingInput | undefined
44
44
  >;