@editframe/elements 0.20.3-beta.0 → 0.21.0-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (128) hide show
  1. package/dist/DelayedLoadingState.js +0 -27
  2. package/dist/EF_FRAMEGEN.d.ts +5 -3
  3. package/dist/EF_FRAMEGEN.js +51 -29
  4. package/dist/_virtual/_@oxc-project_runtime@0.93.0/helpers/decorate.js +7 -0
  5. package/dist/elements/ContextProxiesController.js +2 -22
  6. package/dist/elements/EFAudio.js +4 -8
  7. package/dist/elements/EFCaptions.js +59 -84
  8. package/dist/elements/EFImage.js +5 -6
  9. package/dist/elements/EFMedia/AssetIdMediaEngine.js +2 -4
  10. package/dist/elements/EFMedia/AssetMediaEngine.d.ts +4 -4
  11. package/dist/elements/EFMedia/AssetMediaEngine.js +41 -32
  12. package/dist/elements/EFMedia/BaseMediaEngine.d.ts +10 -2
  13. package/dist/elements/EFMedia/BaseMediaEngine.js +57 -67
  14. package/dist/elements/EFMedia/BufferedSeekingInput.js +134 -76
  15. package/dist/elements/EFMedia/JitMediaEngine.js +22 -23
  16. package/dist/elements/EFMedia/audioTasks/makeAudioBufferTask.js +4 -7
  17. package/dist/elements/EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.js +1 -3
  18. package/dist/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.js +2 -2
  19. package/dist/elements/EFMedia/audioTasks/makeAudioInputTask.js +9 -7
  20. package/dist/elements/EFMedia/audioTasks/makeAudioSeekTask.js +1 -3
  21. package/dist/elements/EFMedia/audioTasks/makeAudioSegmentFetchTask.js +2 -12
  22. package/dist/elements/EFMedia/audioTasks/makeAudioSegmentIdTask.js +2 -2
  23. package/dist/elements/EFMedia/audioTasks/makeAudioTasksVideoOnly.browsertest.d.ts +1 -0
  24. package/dist/elements/EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.js +6 -3
  25. package/dist/elements/EFMedia/shared/AudioSpanUtils.d.ts +1 -1
  26. package/dist/elements/EFMedia/shared/AudioSpanUtils.js +5 -17
  27. package/dist/elements/EFMedia/shared/BufferUtils.d.ts +1 -1
  28. package/dist/elements/EFMedia/shared/BufferUtils.js +2 -13
  29. package/dist/elements/EFMedia/shared/GlobalInputCache.js +0 -24
  30. package/dist/elements/EFMedia/shared/MediaTaskUtils.d.ts +1 -1
  31. package/dist/elements/EFMedia/shared/PrecisionUtils.js +0 -21
  32. package/dist/elements/EFMedia/shared/RenditionHelpers.d.ts +1 -9
  33. package/dist/elements/EFMedia/shared/ThumbnailExtractor.js +0 -17
  34. package/dist/elements/EFMedia/tasks/makeMediaEngineTask.d.ts +1 -2
  35. package/dist/elements/EFMedia/tasks/makeMediaEngineTask.js +2 -16
  36. package/dist/elements/EFMedia/videoTasks/MainVideoInputCache.d.ts +29 -0
  37. package/dist/elements/EFMedia/videoTasks/MainVideoInputCache.js +32 -0
  38. package/dist/elements/EFMedia/videoTasks/ScrubInputCache.js +1 -15
  39. package/dist/elements/EFMedia/videoTasks/makeScrubVideoBufferTask.js +3 -8
  40. package/dist/elements/EFMedia/videoTasks/makeScrubVideoInitSegmentFetchTask.js +0 -2
  41. package/dist/elements/EFMedia/videoTasks/makeScrubVideoInputTask.js +8 -7
  42. package/dist/elements/EFMedia/videoTasks/makeScrubVideoSeekTask.js +12 -13
  43. package/dist/elements/EFMedia/videoTasks/makeScrubVideoSegmentFetchTask.js +0 -2
  44. package/dist/elements/EFMedia/videoTasks/makeScrubVideoSegmentIdTask.js +1 -3
  45. package/dist/elements/EFMedia/videoTasks/makeUnifiedVideoSeekTask.js +134 -71
  46. package/dist/elements/EFMedia/videoTasks/makeVideoBufferTask.js +8 -12
  47. package/dist/elements/EFMedia.d.ts +2 -1
  48. package/dist/elements/EFMedia.js +26 -23
  49. package/dist/elements/EFSourceMixin.js +5 -7
  50. package/dist/elements/EFSurface.js +6 -9
  51. package/dist/elements/EFTemporal.js +19 -37
  52. package/dist/elements/EFThumbnailStrip.js +16 -59
  53. package/dist/elements/EFTimegroup.js +96 -91
  54. package/dist/elements/EFVideo.d.ts +6 -2
  55. package/dist/elements/EFVideo.js +142 -107
  56. package/dist/elements/EFWaveform.js +18 -27
  57. package/dist/elements/SampleBuffer.js +2 -5
  58. package/dist/elements/TargetController.js +3 -3
  59. package/dist/elements/durationConverter.js +4 -4
  60. package/dist/elements/updateAnimations.js +14 -35
  61. package/dist/gui/ContextMixin.js +23 -52
  62. package/dist/gui/EFConfiguration.js +7 -7
  63. package/dist/gui/EFControls.js +5 -5
  64. package/dist/gui/EFFilmstrip.js +77 -98
  65. package/dist/gui/EFFitScale.js +5 -6
  66. package/dist/gui/EFFocusOverlay.js +4 -4
  67. package/dist/gui/EFPreview.js +4 -4
  68. package/dist/gui/EFScrubber.js +9 -9
  69. package/dist/gui/EFTimeDisplay.js +5 -5
  70. package/dist/gui/EFToggleLoop.js +4 -4
  71. package/dist/gui/EFTogglePlay.js +5 -5
  72. package/dist/gui/EFWorkbench.js +5 -5
  73. package/dist/gui/TWMixin2.js +1 -1
  74. package/dist/index.d.ts +1 -0
  75. package/dist/otel/BridgeSpanExporter.d.ts +13 -0
  76. package/dist/otel/BridgeSpanExporter.js +87 -0
  77. package/dist/otel/setupBrowserTracing.d.ts +12 -0
  78. package/dist/otel/setupBrowserTracing.js +30 -0
  79. package/dist/otel/tracingHelpers.d.ts +34 -0
  80. package/dist/otel/tracingHelpers.js +113 -0
  81. package/dist/transcoding/cache/RequestDeduplicator.js +0 -21
  82. package/dist/transcoding/cache/URLTokenDeduplicator.js +1 -21
  83. package/dist/transcoding/types/index.d.ts +6 -4
  84. package/dist/transcoding/utils/UrlGenerator.js +2 -19
  85. package/dist/utils/LRUCache.js +6 -53
  86. package/package.json +10 -2
  87. package/src/elements/EFCaptions.browsertest.ts +2 -0
  88. package/src/elements/EFMedia/AssetIdMediaEngine.test.ts +6 -4
  89. package/src/elements/EFMedia/AssetMediaEngine.browsertest.ts +25 -23
  90. package/src/elements/EFMedia/AssetMediaEngine.ts +81 -43
  91. package/src/elements/EFMedia/BaseMediaEngine.browsertest.ts +94 -0
  92. package/src/elements/EFMedia/BaseMediaEngine.ts +120 -60
  93. package/src/elements/EFMedia/BufferedSeekingInput.ts +218 -101
  94. package/src/elements/EFMedia/JitMediaEngine.ts +20 -6
  95. package/src/elements/EFMedia/audioTasks/makeAudioBufferTask.ts +5 -2
  96. package/src/elements/EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.ts +0 -5
  97. package/src/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.ts +2 -1
  98. package/src/elements/EFMedia/audioTasks/makeAudioInputTask.ts +18 -8
  99. package/src/elements/EFMedia/audioTasks/makeAudioSegmentFetchTask.ts +4 -16
  100. package/src/elements/EFMedia/audioTasks/makeAudioSegmentIdTask.ts +4 -2
  101. package/src/elements/EFMedia/audioTasks/makeAudioTasksVideoOnly.browsertest.ts +95 -0
  102. package/src/elements/EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.ts +5 -6
  103. package/src/elements/EFMedia/shared/AudioSpanUtils.ts +5 -4
  104. package/src/elements/EFMedia/shared/BufferUtils.ts +7 -3
  105. package/src/elements/EFMedia/shared/MediaTaskUtils.ts +1 -1
  106. package/src/elements/EFMedia/shared/RenditionHelpers.browsertest.ts +41 -42
  107. package/src/elements/EFMedia/shared/RenditionHelpers.ts +0 -23
  108. package/src/elements/EFMedia/tasks/makeMediaEngineTask.ts +1 -9
  109. package/src/elements/EFMedia/videoTasks/MainVideoInputCache.ts +76 -0
  110. package/src/elements/EFMedia/videoTasks/makeScrubVideoBufferTask.ts +3 -2
  111. package/src/elements/EFMedia/videoTasks/makeScrubVideoInitSegmentFetchTask.ts +0 -5
  112. package/src/elements/EFMedia/videoTasks/makeScrubVideoInputTask.ts +17 -15
  113. package/src/elements/EFMedia/videoTasks/makeScrubVideoSeekTask.ts +7 -1
  114. package/src/elements/EFMedia/videoTasks/makeScrubVideoSegmentFetchTask.ts +0 -5
  115. package/src/elements/EFMedia/videoTasks/makeScrubVideoSegmentIdTask.ts +0 -5
  116. package/src/elements/EFMedia/videoTasks/makeUnifiedVideoSeekTask.ts +222 -125
  117. package/src/elements/EFMedia/videoTasks/makeVideoBufferTask.ts +2 -5
  118. package/src/elements/EFMedia.ts +18 -2
  119. package/src/elements/EFThumbnailStrip.media-engine.browsertest.ts +2 -1
  120. package/src/elements/EFTimegroup.browsertest.ts +10 -8
  121. package/src/elements/EFTimegroup.ts +165 -77
  122. package/src/elements/EFVideo.browsertest.ts +19 -27
  123. package/src/elements/EFVideo.ts +203 -101
  124. package/src/otel/BridgeSpanExporter.ts +150 -0
  125. package/src/otel/setupBrowserTracing.ts +68 -0
  126. package/src/otel/tracingHelpers.ts +251 -0
  127. package/src/transcoding/types/index.ts +6 -4
  128. package/types.json +1 -1
@@ -8,6 +8,7 @@ import {
8
8
  MP4,
9
9
  VideoSampleSink,
10
10
  } from "mediabunny";
11
+ import { withSpan } from "../../otel/tracingHelpers.js";
11
12
  import { type MediaSample, SampleBuffer } from "../SampleBuffer";
12
13
  import { roundToMilliseconds } from "./shared/PrecisionUtils";
13
14
 
@@ -177,26 +178,39 @@ export class BufferedSeekingInput {
177
178
  }
178
179
 
179
180
  async seek(trackId: number, timeMs: number) {
180
- // Apply timeline offset to map user timeline to media timeline
181
- const mediaTimeMs = timeMs + this.startTimeOffsetMs;
182
-
183
- // Round using consistent precision handling
184
- const roundedMediaTimeMs = roundToMilliseconds(mediaTimeMs);
185
-
186
- // Serialize seek operations per track (but don't block iterator creation)
187
- const existingSeek = this.trackSeekPromises.get(trackId);
188
- if (existingSeek) {
189
- await existingSeek;
190
- }
181
+ return withSpan(
182
+ "bufferedInput.seek",
183
+ {
184
+ trackId,
185
+ timeMs,
186
+ startTimeOffsetMs: this.startTimeOffsetMs,
187
+ },
188
+ undefined,
189
+ async (span) => {
190
+ // Apply timeline offset to map user timeline to media timeline
191
+ const mediaTimeMs = timeMs + this.startTimeOffsetMs;
192
+
193
+ // Round using consistent precision handling
194
+ const roundedMediaTimeMs = roundToMilliseconds(mediaTimeMs);
195
+ span.setAttribute("roundedMediaTimeMs", roundedMediaTimeMs);
196
+
197
+ // Serialize seek operations per track (but don't block iterator creation)
198
+ const existingSeek = this.trackSeekPromises.get(trackId);
199
+ if (existingSeek) {
200
+ span.setAttribute("waitedForExistingSeek", true);
201
+ await existingSeek;
202
+ }
191
203
 
192
- const seekPromise = this.seekSafe(trackId, roundedMediaTimeMs);
193
- this.trackSeekPromises.set(trackId, seekPromise);
204
+ const seekPromise = this.seekSafe(trackId, roundedMediaTimeMs);
205
+ this.trackSeekPromises.set(trackId, seekPromise);
194
206
 
195
- try {
196
- return await seekPromise;
197
- } finally {
198
- this.trackSeekPromises.delete(trackId);
199
- }
207
+ try {
208
+ return await seekPromise;
209
+ } finally {
210
+ this.trackSeekPromises.delete(trackId);
211
+ }
212
+ },
213
+ );
200
214
  }
201
215
 
202
216
  private async resetIterator(track: InputTrack) {
@@ -224,90 +238,193 @@ export class BufferedSeekingInput {
224
238
  #seekLock?: PromiseWithResolvers<void>;
225
239
 
226
240
  private async seekSafe(trackId: number, timeMs: number) {
227
- if (this.#seekLock) {
228
- await this.#seekLock.promise;
229
- }
230
- const seekLock = Promise.withResolvers<void>();
231
- this.#seekLock = seekLock;
232
-
233
- try {
234
- const track = await this.getTrack(trackId);
235
- const trackBuffer = this.getTrackBuffer(track);
236
-
237
- const roundedTimeMs = roundToMilliseconds(timeMs);
238
- const firstTimestampMs = roundToMilliseconds(
239
- (await track.getFirstTimestamp()) * 1000,
240
- );
241
-
242
- if (roundedTimeMs < firstTimestampMs) {
243
- console.error("Seeking outside bounds of input", {
244
- roundedTimeMs,
245
- firstTimestampMs,
246
- });
247
- throw new NoSample(
248
- `Seeking outside bounds of input ${roundedTimeMs} < ${firstTimestampMs}`,
249
- );
250
- }
251
-
252
- // Check if we need to reset iterator for seeks outside current buffer range
253
- const bufferContents = trackBuffer.getContents();
254
- if (bufferContents.length > 0) {
255
- const bufferStartMs = roundToMilliseconds(
256
- trackBuffer.firstTimestamp * 1000,
257
- );
258
-
259
- if (roundedTimeMs < bufferStartMs) {
260
- await this.resetIterator(track);
261
- }
262
- }
263
-
264
- const alreadyInBuffer = trackBuffer.find(timeMs);
265
- if (alreadyInBuffer) return alreadyInBuffer;
266
-
267
- const iterator = this.getTrackIterator(track);
268
- while (true) {
269
- const { done, value: decodedSample } = await iterator.next();
270
-
271
- if (decodedSample) {
272
- trackBuffer.push(decodedSample);
273
- }
274
- const foundSample = trackBuffer.find(roundedTimeMs);
275
- if (foundSample) {
276
- return foundSample;
277
- }
278
- if (done) {
279
- break;
241
+ return withSpan(
242
+ "bufferedInput.seekSafe",
243
+ {
244
+ trackId,
245
+ timeMs,
246
+ },
247
+ undefined,
248
+ async (span) => {
249
+ if (this.#seekLock) {
250
+ span.setAttribute("waitedForSeekLock", true);
251
+ await this.#seekLock.promise;
280
252
  }
281
- }
282
-
283
- // Check if we're seeking to the exact end of the track (legitimate use case)
284
- const finalBufferContents = trackBuffer.getContents();
285
- if (finalBufferContents.length > 0) {
286
- const lastSample = finalBufferContents[finalBufferContents.length - 1];
287
- const lastSampleEndMs = roundToMilliseconds(
288
- ((lastSample?.timestamp || 0) + (lastSample?.duration || 0)) * 1000,
289
- );
290
-
291
- // Only return last sample if seeking to exactly the track duration
292
- // (end of video) AND we have the final segment loaded
293
- const trackDurationMs = (await track.computeDuration()) * 1000;
294
- const isSeekingToTrackEnd =
295
- roundToMilliseconds(timeMs) === roundToMilliseconds(trackDurationMs);
296
- const isAtEndOfTrack = roundToMilliseconds(timeMs) >= lastSampleEndMs;
297
-
298
- if (isSeekingToTrackEnd && isAtEndOfTrack) {
299
- return lastSample;
253
+ const seekLock = Promise.withResolvers<void>();
254
+ this.#seekLock = seekLock;
255
+
256
+ try {
257
+ const track = await this.getTrack(trackId);
258
+ span.setAttribute("trackType", track.type);
259
+
260
+ const trackBuffer = this.getTrackBuffer(track);
261
+
262
+ const roundedTimeMs = roundToMilliseconds(timeMs);
263
+ const firstTimestampMs = roundToMilliseconds(
264
+ (await track.getFirstTimestamp()) * 1000,
265
+ );
266
+ span.setAttribute("firstTimestampMs", firstTimestampMs);
267
+
268
+ if (roundedTimeMs < firstTimestampMs) {
269
+ console.error("Seeking outside bounds of input", {
270
+ roundedTimeMs,
271
+ firstTimestampMs,
272
+ });
273
+ throw new NoSample(
274
+ `Seeking outside bounds of input ${roundedTimeMs} < ${firstTimestampMs}`,
275
+ );
276
+ }
277
+
278
+ // Check if we need to reset iterator for seeks outside current buffer range
279
+ const bufferContents = trackBuffer.getContents();
280
+ span.setAttribute("bufferContentsLength", bufferContents.length);
281
+
282
+ if (bufferContents.length > 0) {
283
+ const bufferStartMs = roundToMilliseconds(
284
+ trackBuffer.firstTimestamp * 1000,
285
+ );
286
+ span.setAttribute("bufferStartMs", bufferStartMs);
287
+
288
+ if (roundedTimeMs < bufferStartMs) {
289
+ span.setAttribute("resetIterator", true);
290
+ await this.resetIterator(track);
291
+ }
292
+ }
293
+
294
+ const alreadyInBuffer = trackBuffer.find(timeMs);
295
+ if (alreadyInBuffer) {
296
+ span.setAttribute("foundInBuffer", true);
297
+ span.setAttribute("bufferSize", trackBuffer.length);
298
+ const contents = trackBuffer.getContents();
299
+ if (contents.length > 0) {
300
+ span.setAttribute(
301
+ "bufferTimestamps",
302
+ contents
303
+ .map((s) => Math.round((s.timestamp || 0) * 1000))
304
+ .slice(0, 10)
305
+ .join(","),
306
+ );
307
+ }
308
+ return alreadyInBuffer;
309
+ }
310
+
311
+ // Buffer miss - record buffer state
312
+ span.setAttribute("foundInBuffer", false);
313
+ span.setAttribute("bufferSize", trackBuffer.length);
314
+ span.setAttribute("requestedTimeMs", Math.round(timeMs));
315
+
316
+ const contents = trackBuffer.getContents();
317
+ if (contents.length > 0) {
318
+ const firstSample = contents[0];
319
+ const lastSample = contents[contents.length - 1];
320
+ if (firstSample && lastSample) {
321
+ const bufferStartMs = Math.round(
322
+ (firstSample.timestamp || 0) * 1000,
323
+ );
324
+ const bufferEndMs = Math.round(
325
+ ((lastSample.timestamp || 0) + (lastSample.duration || 0)) *
326
+ 1000,
327
+ );
328
+ span.setAttribute("bufferStartMs", bufferStartMs);
329
+ span.setAttribute("bufferEndMs", bufferEndMs);
330
+ span.setAttribute(
331
+ "bufferRangeMs",
332
+ `${bufferStartMs}-${bufferEndMs}`,
333
+ );
334
+ }
335
+ }
336
+
337
+ const iterator = this.getTrackIterator(track);
338
+ let iterationCount = 0;
339
+ const decodeStart = performance.now();
340
+
341
+ while (true) {
342
+ iterationCount++;
343
+ const iterStart = performance.now();
344
+ const { done, value: decodedSample } = await iterator.next();
345
+ const iterEnd = performance.now();
346
+
347
+ // Record individual iteration timing for first 5 iterations
348
+ if (iterationCount <= 5) {
349
+ span.setAttribute(
350
+ `iter${iterationCount}Ms`,
351
+ Math.round((iterEnd - iterStart) * 100) / 100,
352
+ );
353
+ }
354
+
355
+ if (decodedSample) {
356
+ trackBuffer.push(decodedSample);
357
+ if (iterationCount <= 5) {
358
+ span.setAttribute(
359
+ `iter${iterationCount}Timestamp`,
360
+ Math.round((decodedSample.timestamp || 0) * 1000),
361
+ );
362
+ }
363
+ }
364
+
365
+ const foundSample = trackBuffer.find(roundedTimeMs);
366
+ if (foundSample) {
367
+ const decodeEnd = performance.now();
368
+ span.setAttribute("iterationCount", iterationCount);
369
+ span.setAttribute(
370
+ "decodeMs",
371
+ Math.round((decodeEnd - decodeStart) * 100) / 100,
372
+ );
373
+ span.setAttribute(
374
+ "avgIterMs",
375
+ Math.round(((decodeEnd - decodeStart) / iterationCount) * 100) /
376
+ 100,
377
+ );
378
+ span.setAttribute("foundSample", true);
379
+ span.setAttribute(
380
+ "foundTimestamp",
381
+ Math.round((foundSample.timestamp || 0) * 1000),
382
+ );
383
+ return foundSample;
384
+ }
385
+ if (done) {
386
+ break;
387
+ }
388
+ }
389
+
390
+ span.setAttribute("iterationCount", iterationCount);
391
+ span.setAttribute("reachedEnd", true);
392
+
393
+ // Check if we're seeking to the exact end of the track (legitimate use case)
394
+ const finalBufferContents = trackBuffer.getContents();
395
+ if (finalBufferContents.length > 0) {
396
+ const lastSample =
397
+ finalBufferContents[finalBufferContents.length - 1];
398
+ const lastSampleEndMs = roundToMilliseconds(
399
+ ((lastSample?.timestamp || 0) + (lastSample?.duration || 0)) *
400
+ 1000,
401
+ );
402
+
403
+ // Only return last sample if seeking to exactly the track duration
404
+ // (end of video) AND we have the final segment loaded
405
+ const trackDurationMs = (await track.computeDuration()) * 1000;
406
+ const isSeekingToTrackEnd =
407
+ roundToMilliseconds(timeMs) ===
408
+ roundToMilliseconds(trackDurationMs);
409
+ const isAtEndOfTrack =
410
+ roundToMilliseconds(timeMs) >= lastSampleEndMs;
411
+
412
+ if (isSeekingToTrackEnd && isAtEndOfTrack) {
413
+ span.setAttribute("returnedLastSample", true);
414
+ return lastSample;
415
+ }
416
+ }
417
+
418
+ // For all other cases (seeking within track but outside buffer range), throw error
419
+ // The caller should ensure the correct segment is loaded before seeking
420
+ throw new NoSample(
421
+ `Sample not found for time ${timeMs} in ${track.type} track ${trackId}`,
422
+ );
423
+ } finally {
424
+ this.#seekLock = undefined;
425
+ seekLock.resolve();
300
426
  }
301
- }
302
-
303
- // For all other cases (seeking within track but outside buffer range), throw error
304
- // The caller should ensure the correct segment is loaded before seeking
305
- throw new NoSample(
306
- `Sample not found for time ${timeMs} in ${track.type} track ${trackId}`,
307
- );
308
- } finally {
309
- this.#seekLock = undefined;
310
- seekLock.resolve();
311
- }
427
+ },
428
+ );
312
429
  }
313
430
  }
@@ -215,15 +215,29 @@ export class JitMediaEngine extends BaseMediaEngine implements MediaEngine {
215
215
  async extractThumbnails(
216
216
  timestamps: number[],
217
217
  ): Promise<(ThumbnailResult | null)[]> {
218
- const mainRendition = this.videoRendition;
219
- const scrubRendition = this.getScrubVideoRendition();
220
-
221
- const rendition = mainRendition || scrubRendition;
222
-
223
- if (!rendition) {
218
+ // Use same rendition priority as video: try main rendition first for frame alignment
219
+ let rendition: VideoRendition;
220
+ try {
221
+ const mainRendition = this.getVideoRendition();
222
+ if (mainRendition) {
223
+ rendition = mainRendition;
224
+ } else {
225
+ const scrubRendition = this.getScrubVideoRendition();
226
+ if (scrubRendition) {
227
+ rendition = scrubRendition;
228
+ } else {
229
+ throw new Error("No video rendition available");
230
+ }
231
+ }
232
+ } catch (error) {
233
+ console.warn(
234
+ "JitMediaEngine: No video rendition available for thumbnails",
235
+ error,
236
+ );
224
237
  return timestamps.map(() => null);
225
238
  }
226
239
 
240
+ // Use shared thumbnail extraction logic
227
241
  return this.thumbnailExtractor.extractThumbnails(
228
242
  timestamps,
229
243
  rendition,
@@ -42,15 +42,18 @@ export const makeAudioBufferTask = (host: EFMedia): AudioBufferTask => {
42
42
  task: async ([seekTimeMs], { signal }) => {
43
43
  // Skip buffering entirely in rendering mode
44
44
  if (EF_RENDERING()) {
45
- return currentState;
45
+ return currentState; // Return existing state without any buffering activity
46
46
  }
47
47
 
48
+ // Get media engine to potentially override buffer configuration
48
49
  const mediaEngine = await getLatestMediaEngine(host, signal);
49
50
 
51
+ // Return existing state if no audio rendition available
50
52
  if (!mediaEngine.audioRendition) {
51
53
  return currentState;
52
54
  }
53
55
 
56
+ // Use media engine's buffer config, falling back to host properties
54
57
  const engineConfig = mediaEngine.getBufferConfig();
55
58
  const bufferDurationMs = engineConfig.audioBufferDurationMs;
56
59
  const maxParallelFetches = engineConfig.maxAudioBufferFetches;
@@ -90,7 +93,7 @@ export const makeAudioBufferTask = (host: EFMedia): AudioBufferTask => {
90
93
  const mediaEngine = await getLatestMediaEngine(host, signal);
91
94
  const audioRendition = mediaEngine.audioRendition;
92
95
  if (!audioRendition) {
93
- throw new Error("No audio track available in source");
96
+ throw new Error("Audio rendition not available");
94
97
  }
95
98
  return audioRendition;
96
99
  },
@@ -95,11 +95,6 @@ export function makeAudioFrequencyAnalysisTask(element: EFMedia) {
95
95
  task: async (_, { signal }) => {
96
96
  if (element.currentSourceTimeMs < 0) return null;
97
97
 
98
- const mediaEngine = element.mediaEngineTask.value;
99
- if (!mediaEngine?.audioRendition) {
100
- return null;
101
- }
102
-
103
98
  const currentTimeMs = element.currentSourceTimeMs;
104
99
 
105
100
  // Calculate exact audio window needed based on fftDecay and frame timing
@@ -14,8 +14,9 @@ export const makeAudioInitSegmentFetchTask = (
14
14
  onComplete: (_value) => {},
15
15
  task: async ([_mediaEngine], { signal }) => {
16
16
  const mediaEngine = await getLatestMediaEngine(host, signal);
17
+ const audioRendition = mediaEngine.getAudioRendition();
17
18
 
18
- const audioRendition = mediaEngine.audioRendition;
19
+ // Return undefined if no audio rendition available (video-only asset)
19
20
  if (!audioRendition) {
20
21
  return undefined;
21
22
  }
@@ -6,7 +6,7 @@ import type { InputTask } from "../shared/MediaTaskUtils";
6
6
  export const makeAudioInputTask = (host: EFMedia): InputTask => {
7
7
  return new Task<
8
8
  readonly [ArrayBuffer | undefined, ArrayBuffer | undefined],
9
- BufferedSeekingInput
9
+ BufferedSeekingInput | undefined
10
10
  >(host, {
11
11
  args: () =>
12
12
  [
@@ -18,21 +18,31 @@ export const makeAudioInputTask = (host: EFMedia): InputTask => {
18
18
  },
19
19
  onComplete: (_value) => {},
20
20
  task: async (_, { signal }) => {
21
+ const mediaEngine = await host.mediaEngineTask.taskComplete;
22
+ if (signal.aborted) return undefined;
23
+
24
+ const audioRendition = mediaEngine?.audioRendition;
25
+
26
+ // Return undefined if no audio rendition available (video-only asset)
27
+ if (!audioRendition) {
28
+ return undefined;
29
+ }
30
+
21
31
  const initSegment = await host.audioInitSegmentFetchTask.taskComplete;
22
- signal.throwIfAborted();
32
+ if (signal.aborted) return undefined;
33
+
23
34
  const segment = await host.audioSegmentFetchTask.taskComplete;
24
- signal.throwIfAborted();
35
+ if (signal.aborted) return undefined;
25
36
 
26
37
  if (!initSegment || !segment) {
27
- throw new Error("No audio track available in source");
38
+ return undefined;
28
39
  }
29
40
 
30
- const mediaEngine = await host.mediaEngineTask.taskComplete;
31
- const audioRendition = mediaEngine?.audioRendition;
32
- const startTimeOffsetMs = audioRendition?.startTimeOffsetMs;
41
+ const startTimeOffsetMs = audioRendition.startTimeOffsetMs;
33
42
 
34
43
  const arrayBuffer = await new Blob([initSegment, segment]).arrayBuffer();
35
- signal.throwIfAborted();
44
+ if (signal.aborted) return undefined;
45
+
36
46
  return new BufferedSeekingInput(arrayBuffer, {
37
47
  videoBufferSize: EFMedia.VIDEO_SAMPLE_BUFFER_SIZE,
38
48
  audioBufferSize: EFMedia.AUDIO_SAMPLE_BUFFER_SIZE,
@@ -18,26 +18,14 @@ export const makeAudioSegmentFetchTask = (
18
18
  onComplete: (_value) => {},
19
19
  task: async (_, { signal }) => {
20
20
  const mediaEngine = await getLatestMediaEngine(host, signal);
21
+ const segmentId = await host.audioSegmentIdTask.taskComplete;
22
+ const audioRendition = mediaEngine.getAudioRendition();
21
23
 
22
- const audioRendition = mediaEngine.audioRendition;
23
- if (!audioRendition) {
24
+ // Return undefined if no audio rendition or segment ID available (video-only asset)
25
+ if (!audioRendition || segmentId === undefined) {
24
26
  return undefined;
25
27
  }
26
28
 
27
- const segmentId = await host.audioSegmentIdTask.taskComplete;
28
- if (segmentId === undefined) {
29
- const debugInfo = {
30
- hasRendition: true,
31
- segmentDurationMs: audioRendition.segmentDurationMs,
32
- segmentDurationsMs: audioRendition.segmentDurationsMs?.length || 0,
33
- desiredSeekTimeMs: host.desiredSeekTimeMs,
34
- intrinsicDurationMs: host.intrinsicDurationMs,
35
- };
36
- throw new Error(
37
- `Segment ID is not available for audio. Debug info: ${JSON.stringify(debugInfo)}`,
38
- );
39
- }
40
-
41
29
  return mediaEngine.fetchMediaSegment(segmentId, audioRendition, signal);
42
30
  },
43
31
  });
@@ -14,9 +14,11 @@ export const makeAudioSegmentIdTask = (
14
14
  onComplete: (_value) => {},
15
15
  task: async ([, targetSeekTimeMs], { signal }) => {
16
16
  const mediaEngine = await getLatestMediaEngine(host, signal);
17
- signal.throwIfAborted(); // Abort if a new seek started
17
+ signal.throwIfAborted();
18
18
 
19
- const audioRendition = mediaEngine.audioRendition;
19
+ const audioRendition = mediaEngine.getAudioRendition();
20
+
21
+ // Return undefined if no audio rendition available (video-only asset)
20
22
  if (!audioRendition) {
21
23
  return undefined;
22
24
  }
@@ -0,0 +1,95 @@
1
+ import { describe } from "vitest";
2
+ import { test as baseTest } from "../../../../test/useMSW.js";
3
+ import type { EFMedia } from "../../EFMedia.js";
4
+ import { AssetMediaEngine } from "../AssetMediaEngine.js";
5
+
6
+ const test = baseTest.extend<{
7
+ videoOnlyAssetEngine: AssetMediaEngine;
8
+ }>({
9
+ videoOnlyAssetEngine: async ({}, use) => {
10
+ const host = document.createElement("ef-video") as EFMedia;
11
+ const engine = new AssetMediaEngine(host, "test-video-only.mp4");
12
+
13
+ // Simulate video-only asset data (no audio track) - this is the exact scenario
14
+ // that caused "computeSegmentId: trackId not found for rendition {\"src\":\"uuid\"}"
15
+ (engine as any).data = {
16
+ 1: {
17
+ track: 1,
18
+ type: "video",
19
+ width: 480,
20
+ height: 270,
21
+ timescale: 15360,
22
+ sample_count: 1,
23
+ codec: "avc1.640015",
24
+ duration: 30208,
25
+ startTimeOffsetMs: 67,
26
+ initSegment: { offset: 0, size: 763 },
27
+ segments: [
28
+ { cts: 1024, dts: 0, duration: 30720, offset: 763, size: 13997 },
29
+ ],
30
+ },
31
+ // Note: No track 2 (audio) - this simulates the exact video-only asset scenario
32
+ };
33
+
34
+ await use(engine);
35
+ },
36
+ });
37
+
38
+ /**
39
+ * Regression test for: "computeSegmentId: trackId not found for rendition {\"src\":\"uuid\"}"
40
+ *
41
+ * This test ensures that AssetMediaEngine properly handles video-only assets
42
+ * by returning undefined for audio renditions instead of malformed objects.
43
+ *
44
+ * This test would FAIL with the old implementation and PASS with the new implementation.
45
+ */
46
+ describe("AssetMediaEngine - Video-Only Asset Handling", () => {
47
+ test("audioRendition returns undefined for video-only asset", ({
48
+ videoOnlyAssetEngine,
49
+ expect,
50
+ }) => {
51
+ // This is the core fix - should return undefined, not {src: "..."}
52
+ const audioRendition = videoOnlyAssetEngine.audioRendition;
53
+ expect(audioRendition).toBeUndefined();
54
+ });
55
+
56
+ test("videoRendition returns valid object for video-only asset", ({
57
+ videoOnlyAssetEngine,
58
+ expect,
59
+ }) => {
60
+ const videoRendition = videoOnlyAssetEngine.videoRendition;
61
+ expect(videoRendition).toBeDefined();
62
+ expect(videoRendition?.trackId).toBe(1);
63
+ expect(videoRendition?.src).toBe("test-video-only.mp4");
64
+ });
65
+
66
+ test("getAudioRendition returns undefined for video-only asset", ({
67
+ videoOnlyAssetEngine,
68
+ expect,
69
+ }) => {
70
+ // New API behavior - should return undefined gracefully
71
+ const result = videoOnlyAssetEngine.getAudioRendition();
72
+ expect(result).toBeUndefined();
73
+ });
74
+
75
+ test("original error scenario is prevented", ({
76
+ videoOnlyAssetEngine,
77
+ expect,
78
+ }) => {
79
+ // This is the exact scenario that caused the original error:
80
+ // "computeSegmentId: trackId not found for rendition {\"src\":\"uuid\"}"
81
+
82
+ const audioRendition = videoOnlyAssetEngine.getAudioRendition();
83
+
84
+ // Before fix: audioRendition would be {trackId: undefined, src: "..."}
85
+ // After fix: audioRendition should be undefined
86
+ expect(audioRendition).toBeUndefined();
87
+
88
+ // This prevents the downstream error where trackId was missing entirely
89
+ if (audioRendition !== undefined) {
90
+ // If audioRendition exists, it should have a valid trackId
91
+ expect(audioRendition.trackId).toBeDefined();
92
+ expect(typeof audioRendition.trackId).toBe("number");
93
+ }
94
+ });
95
+ });
@@ -2,7 +2,7 @@ import { Task } from "@lit/task";
2
2
 
3
3
  import { EF_INTERACTIVE } from "../../../EF_INTERACTIVE.js";
4
4
  import { LRUCache } from "../../../utils/LRUCache.js";
5
- import type { EFMedia } from "../../EFMedia.js";
5
+ import { type EFMedia, IgnorableError } from "../../EFMedia.js";
6
6
 
7
7
  // DECAY_WEIGHT constant - same as original
8
8
  const DECAY_WEIGHT = 0.8;
@@ -14,6 +14,10 @@ export function makeAudioTimeDomainAnalysisTask(element: EFMedia) {
14
14
  return new Task(element, {
15
15
  autoRun: EF_INTERACTIVE,
16
16
  onError: (error) => {
17
+ if (error instanceof IgnorableError) {
18
+ console.info("byteTimeDomainTask skipped: no audio track");
19
+ return;
20
+ }
17
21
  console.error("byteTimeDomainTask error", error);
18
22
  },
19
23
  args: () =>
@@ -27,11 +31,6 @@ export function makeAudioTimeDomainAnalysisTask(element: EFMedia) {
27
31
  task: async (_, { signal }) => {
28
32
  if (element.currentSourceTimeMs < 0) return null;
29
33
 
30
- const mediaEngine = element.mediaEngineTask.value;
31
- if (!mediaEngine?.audioRendition) {
32
- return null;
33
- }
34
-
35
34
  const currentTimeMs = element.currentSourceTimeMs;
36
35
 
37
36
  // Calculate exact audio window needed based on fftDecay and frame timing