@editframe/elements 0.17.6-beta.0 → 0.18.7-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/EF_FRAMEGEN.js +1 -1
- package/dist/elements/EFAudio.d.ts +21 -2
- package/dist/elements/EFAudio.js +41 -11
- package/dist/elements/EFImage.d.ts +1 -0
- package/dist/elements/EFImage.js +11 -3
- package/dist/elements/EFMedia/AssetIdMediaEngine.d.ts +18 -0
- package/dist/elements/EFMedia/AssetIdMediaEngine.js +41 -0
- package/dist/elements/EFMedia/AssetMediaEngine.browsertest.d.ts +0 -0
- package/dist/elements/EFMedia/AssetMediaEngine.d.ts +45 -0
- package/dist/elements/EFMedia/AssetMediaEngine.js +135 -0
- package/dist/elements/EFMedia/BaseMediaEngine.d.ts +55 -0
- package/dist/elements/EFMedia/BaseMediaEngine.js +115 -0
- package/dist/elements/EFMedia/BufferedSeekingInput.d.ts +43 -0
- package/dist/elements/EFMedia/BufferedSeekingInput.js +179 -0
- package/dist/elements/EFMedia/JitMediaEngine.browsertest.d.ts +0 -0
- package/dist/elements/EFMedia/JitMediaEngine.d.ts +31 -0
- package/dist/elements/EFMedia/JitMediaEngine.js +81 -0
- package/dist/elements/EFMedia/audioTasks/makeAudioBufferTask.browsertest.d.ts +9 -0
- package/dist/elements/EFMedia/audioTasks/makeAudioBufferTask.d.ts +16 -0
- package/dist/elements/EFMedia/audioTasks/makeAudioBufferTask.js +48 -0
- package/dist/elements/EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.d.ts +3 -0
- package/dist/elements/EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.js +141 -0
- package/dist/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.browsertest.d.ts +9 -0
- package/dist/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.d.ts +4 -0
- package/dist/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.js +16 -0
- package/dist/elements/EFMedia/audioTasks/makeAudioInputTask.browsertest.d.ts +9 -0
- package/dist/elements/EFMedia/audioTasks/makeAudioInputTask.d.ts +3 -0
- package/dist/elements/EFMedia/audioTasks/makeAudioInputTask.js +30 -0
- package/dist/elements/EFMedia/audioTasks/makeAudioSeekTask.chunkboundary.regression.browsertest.d.ts +0 -0
- package/dist/elements/EFMedia/audioTasks/makeAudioSeekTask.d.ts +7 -0
- package/dist/elements/EFMedia/audioTasks/makeAudioSeekTask.js +32 -0
- package/dist/elements/EFMedia/audioTasks/makeAudioSegmentFetchTask.d.ts +4 -0
- package/dist/elements/EFMedia/audioTasks/makeAudioSegmentFetchTask.js +28 -0
- package/dist/elements/EFMedia/audioTasks/makeAudioSegmentIdTask.d.ts +4 -0
- package/dist/elements/EFMedia/audioTasks/makeAudioSegmentIdTask.js +17 -0
- package/dist/elements/EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.d.ts +3 -0
- package/dist/elements/EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.js +107 -0
- package/dist/elements/EFMedia/shared/AudioSpanUtils.d.ts +7 -0
- package/dist/elements/EFMedia/shared/AudioSpanUtils.js +54 -0
- package/dist/elements/EFMedia/shared/BufferUtils.d.ts +70 -0
- package/dist/elements/EFMedia/shared/BufferUtils.js +89 -0
- package/dist/elements/EFMedia/shared/MediaTaskUtils.d.ts +23 -0
- package/dist/elements/EFMedia/shared/PrecisionUtils.d.ts +28 -0
- package/dist/elements/EFMedia/shared/PrecisionUtils.js +29 -0
- package/dist/elements/EFMedia/shared/RenditionHelpers.d.ts +19 -0
- package/dist/elements/EFMedia/tasks/makeMediaEngineTask.d.ts +18 -0
- package/dist/elements/EFMedia/tasks/makeMediaEngineTask.js +60 -0
- package/dist/elements/EFMedia/videoTasks/makeVideoBufferTask.browsertest.d.ts +9 -0
- package/dist/elements/EFMedia/videoTasks/makeVideoBufferTask.d.ts +16 -0
- package/dist/elements/EFMedia/videoTasks/makeVideoBufferTask.js +46 -0
- package/dist/elements/EFMedia/videoTasks/makeVideoInitSegmentFetchTask.browsertest.d.ts +9 -0
- package/dist/elements/EFMedia/videoTasks/makeVideoInitSegmentFetchTask.d.ts +4 -0
- package/dist/elements/EFMedia/videoTasks/makeVideoInitSegmentFetchTask.js +16 -0
- package/dist/elements/EFMedia/videoTasks/makeVideoInputTask.browsertest.d.ts +9 -0
- package/dist/elements/EFMedia/videoTasks/makeVideoInputTask.d.ts +3 -0
- package/dist/elements/EFMedia/videoTasks/makeVideoInputTask.js +27 -0
- package/dist/elements/EFMedia/videoTasks/makeVideoSeekTask.d.ts +7 -0
- package/dist/elements/EFMedia/videoTasks/makeVideoSeekTask.js +34 -0
- package/dist/elements/EFMedia/videoTasks/makeVideoSegmentFetchTask.browsertest.d.ts +9 -0
- package/dist/elements/EFMedia/videoTasks/makeVideoSegmentFetchTask.d.ts +4 -0
- package/dist/elements/EFMedia/videoTasks/makeVideoSegmentFetchTask.js +28 -0
- package/dist/elements/EFMedia/videoTasks/makeVideoSegmentIdTask.browsertest.d.ts +9 -0
- package/dist/elements/EFMedia/videoTasks/makeVideoSegmentIdTask.d.ts +4 -0
- package/dist/elements/EFMedia/videoTasks/makeVideoSegmentIdTask.js +17 -0
- package/dist/elements/EFMedia.browsertest.d.ts +1 -0
- package/dist/elements/EFMedia.d.ts +63 -111
- package/dist/elements/EFMedia.js +117 -1113
- package/dist/elements/EFTemporal.d.ts +1 -1
- package/dist/elements/EFTemporal.js +1 -1
- package/dist/elements/EFTimegroup.d.ts +11 -0
- package/dist/elements/EFTimegroup.js +83 -13
- package/dist/elements/EFVideo.d.ts +54 -32
- package/dist/elements/EFVideo.js +100 -207
- package/dist/elements/EFWaveform.js +2 -2
- package/dist/elements/SampleBuffer.d.ts +14 -0
- package/dist/elements/SampleBuffer.js +52 -0
- package/dist/getRenderInfo.js +2 -1
- package/dist/gui/ContextMixin.js +3 -2
- package/dist/gui/EFFilmstrip.d.ts +3 -3
- package/dist/gui/EFFilmstrip.js +1 -1
- package/dist/gui/EFFitScale.d.ts +2 -2
- package/dist/gui/TWMixin.js +1 -1
- package/dist/style.css +1 -1
- package/dist/transcoding/cache/CacheManager.d.ts +73 -0
- package/dist/transcoding/cache/RequestDeduplicator.d.ts +29 -0
- package/dist/transcoding/cache/RequestDeduplicator.js +53 -0
- package/dist/transcoding/cache/RequestDeduplicator.test.d.ts +1 -0
- package/dist/transcoding/types/index.d.ts +242 -0
- package/dist/transcoding/utils/MediaUtils.d.ts +9 -0
- package/dist/transcoding/utils/UrlGenerator.d.ts +26 -0
- package/dist/transcoding/utils/UrlGenerator.js +45 -0
- package/dist/transcoding/utils/constants.d.ts +27 -0
- package/dist/utils/LRUCache.d.ts +34 -0
- package/dist/utils/LRUCache.js +115 -0
- package/package.json +3 -3
- package/src/elements/EFAudio.browsertest.ts +189 -49
- package/src/elements/EFAudio.ts +59 -13
- package/src/elements/EFImage.browsertest.ts +42 -0
- package/src/elements/EFImage.ts +23 -3
- package/src/elements/EFMedia/AssetIdMediaEngine.test.ts +222 -0
- package/src/elements/EFMedia/AssetIdMediaEngine.ts +70 -0
- package/src/elements/EFMedia/AssetMediaEngine.browsertest.ts +100 -0
- package/src/elements/EFMedia/AssetMediaEngine.ts +255 -0
- package/src/elements/EFMedia/BaseMediaEngine.test.ts +164 -0
- package/src/elements/EFMedia/BaseMediaEngine.ts +219 -0
- package/src/elements/EFMedia/BufferedSeekingInput.browsertest.ts +481 -0
- package/src/elements/EFMedia/BufferedSeekingInput.ts +324 -0
- package/src/elements/EFMedia/JitMediaEngine.browsertest.ts +165 -0
- package/src/elements/EFMedia/JitMediaEngine.ts +166 -0
- package/src/elements/EFMedia/audioTasks/makeAudioBufferTask.browsertest.ts +554 -0
- package/src/elements/EFMedia/audioTasks/makeAudioBufferTask.ts +81 -0
- package/src/elements/EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.ts +250 -0
- package/src/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.browsertest.ts +59 -0
- package/src/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.ts +23 -0
- package/src/elements/EFMedia/audioTasks/makeAudioInputTask.browsertest.ts +55 -0
- package/src/elements/EFMedia/audioTasks/makeAudioInputTask.ts +43 -0
- package/src/elements/EFMedia/audioTasks/makeAudioSeekTask.chunkboundary.regression.browsertest.ts +199 -0
- package/src/elements/EFMedia/audioTasks/makeAudioSeekTask.ts +64 -0
- package/src/elements/EFMedia/audioTasks/makeAudioSegmentFetchTask.ts +45 -0
- package/src/elements/EFMedia/audioTasks/makeAudioSegmentIdTask.ts +24 -0
- package/src/elements/EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.ts +183 -0
- package/src/elements/EFMedia/shared/AudioSpanUtils.ts +128 -0
- package/src/elements/EFMedia/shared/BufferUtils.ts +310 -0
- package/src/elements/EFMedia/shared/MediaTaskUtils.ts +44 -0
- package/src/elements/EFMedia/shared/PrecisionUtils.ts +46 -0
- package/src/elements/EFMedia/shared/RenditionHelpers.browsertest.ts +247 -0
- package/src/elements/EFMedia/shared/RenditionHelpers.ts +79 -0
- package/src/elements/EFMedia/tasks/makeMediaEngineTask.browsertest.ts +128 -0
- package/src/elements/EFMedia/tasks/makeMediaEngineTask.test.ts +233 -0
- package/src/elements/EFMedia/tasks/makeMediaEngineTask.ts +89 -0
- package/src/elements/EFMedia/videoTasks/makeVideoBufferTask.browsertest.ts +555 -0
- package/src/elements/EFMedia/videoTasks/makeVideoBufferTask.ts +79 -0
- package/src/elements/EFMedia/videoTasks/makeVideoInitSegmentFetchTask.browsertest.ts +59 -0
- package/src/elements/EFMedia/videoTasks/makeVideoInitSegmentFetchTask.ts +23 -0
- package/src/elements/EFMedia/videoTasks/makeVideoInputTask.browsertest.ts +55 -0
- package/src/elements/EFMedia/videoTasks/makeVideoInputTask.ts +45 -0
- package/src/elements/EFMedia/videoTasks/makeVideoSeekTask.ts +68 -0
- package/src/elements/EFMedia/videoTasks/makeVideoSegmentFetchTask.browsertest.ts +57 -0
- package/src/elements/EFMedia/videoTasks/makeVideoSegmentFetchTask.ts +43 -0
- package/src/elements/EFMedia/videoTasks/makeVideoSegmentIdTask.browsertest.ts +56 -0
- package/src/elements/EFMedia/videoTasks/makeVideoSegmentIdTask.ts +24 -0
- package/src/elements/EFMedia.browsertest.ts +706 -273
- package/src/elements/EFMedia.ts +136 -1769
- package/src/elements/EFTemporal.ts +3 -4
- package/src/elements/EFTimegroup.browsertest.ts +6 -3
- package/src/elements/EFTimegroup.ts +147 -21
- package/src/elements/EFVideo.browsertest.ts +980 -169
- package/src/elements/EFVideo.ts +113 -458
- package/src/elements/EFWaveform.ts +1 -1
- package/src/elements/MediaController.ts +2 -12
- package/src/elements/SampleBuffer.ts +95 -0
- package/src/gui/ContextMixin.ts +3 -6
- package/src/transcoding/cache/CacheManager.ts +208 -0
- package/src/transcoding/cache/RequestDeduplicator.test.ts +170 -0
- package/src/transcoding/cache/RequestDeduplicator.ts +65 -0
- package/src/transcoding/types/index.ts +269 -0
- package/src/transcoding/utils/MediaUtils.ts +63 -0
- package/src/transcoding/utils/UrlGenerator.ts +68 -0
- package/src/transcoding/utils/constants.ts +36 -0
- package/src/utils/LRUCache.ts +153 -0
- package/test/EFVideo.framegen.browsertest.ts +39 -30
- package/test/__cache__/GET__api_v1_transcode_audio_1_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__32da3954ba60c96ad732020c65a08ebc/data.bin +0 -0
- package/test/__cache__/GET__api_v1_transcode_audio_1_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__32da3954ba60c96ad732020c65a08ebc/metadata.json +21 -0
- package/test/__cache__/GET__api_v1_transcode_audio_1_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__9ed2d25c675aa6bb6ff5b3ae23887c71/data.bin +0 -0
- package/test/__cache__/GET__api_v1_transcode_audio_1_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__9ed2d25c675aa6bb6ff5b3ae23887c71/metadata.json +22 -0
- package/test/__cache__/GET__api_v1_transcode_audio_2_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__b0b2b07efcf607de8ee0f650328c32f7/data.bin +0 -0
- package/test/__cache__/GET__api_v1_transcode_audio_2_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__b0b2b07efcf607de8ee0f650328c32f7/metadata.json +21 -0
- package/test/__cache__/GET__api_v1_transcode_audio_2_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__d5a3309a2bf756dd6e304807eb402f56/data.bin +0 -0
- package/test/__cache__/GET__api_v1_transcode_audio_2_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__d5a3309a2bf756dd6e304807eb402f56/metadata.json +22 -0
- package/test/__cache__/GET__api_v1_transcode_audio_3_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a75c2252b542e0c152c780e9a8d7b154/data.bin +0 -0
- package/test/__cache__/GET__api_v1_transcode_audio_3_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a75c2252b542e0c152c780e9a8d7b154/metadata.json +21 -0
- package/test/__cache__/GET__api_v1_transcode_audio_3_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__773254bb671e3466fca8677139fb239e/data.bin +0 -0
- package/test/__cache__/GET__api_v1_transcode_audio_3_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__773254bb671e3466fca8677139fb239e/metadata.json +22 -0
- package/test/__cache__/GET__api_v1_transcode_audio_4_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a64ff1cfb1b52cae14df4b5dfa1e222b/data.bin +0 -0
- package/test/__cache__/GET__api_v1_transcode_audio_4_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a64ff1cfb1b52cae14df4b5dfa1e222b/metadata.json +21 -0
- package/test/__cache__/GET__api_v1_transcode_audio_5_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__91e8a522f950809b9f09f4173113b4b0/data.bin +0 -0
- package/test/__cache__/GET__api_v1_transcode_audio_5_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__91e8a522f950809b9f09f4173113b4b0/metadata.json +21 -0
- package/test/__cache__/GET__api_v1_transcode_audio_init_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__e66d2c831d951e74ad0aeaa6489795d0/data.bin +0 -0
- package/test/__cache__/GET__api_v1_transcode_audio_init_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__e66d2c831d951e74ad0aeaa6489795d0/metadata.json +21 -0
- package/test/__cache__/GET__api_v1_transcode_high_1_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__26197f6f7c46cacb0a71134131c3f775/data.bin +0 -0
- package/test/__cache__/GET__api_v1_transcode_high_1_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__26197f6f7c46cacb0a71134131c3f775/metadata.json +21 -0
- package/test/__cache__/GET__api_v1_transcode_high_2_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__4cb6774cd3650ccf59c8f8dc6678c0b9/data.bin +0 -0
- package/test/__cache__/GET__api_v1_transcode_high_2_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__4cb6774cd3650ccf59c8f8dc6678c0b9/metadata.json +21 -0
- package/test/__cache__/GET__api_v1_transcode_high_3_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__0b3b2b1c8933f7fcf8a9ecaa88d58b41/data.bin +0 -0
- package/test/__cache__/GET__api_v1_transcode_high_3_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__0b3b2b1c8933f7fcf8a9ecaa88d58b41/metadata.json +21 -0
- package/test/__cache__/GET__api_v1_transcode_high_4_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a6fb05a22b18d850f7f2950bbcdbdeed/data.bin +0 -0
- package/test/__cache__/GET__api_v1_transcode_high_4_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a6fb05a22b18d850f7f2950bbcdbdeed/metadata.json +21 -0
- package/test/__cache__/GET__api_v1_transcode_high_5_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a50058c7c3602e90879fe3428ed891f4/data.bin +0 -0
- package/test/__cache__/GET__api_v1_transcode_high_5_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a50058c7c3602e90879fe3428ed891f4/metadata.json +21 -0
- package/test/__cache__/GET__api_v1_transcode_high_init_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__0798c479b44aaeef850609a430f6e613/data.bin +0 -0
- package/test/__cache__/GET__api_v1_transcode_high_init_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__0798c479b44aaeef850609a430f6e613/metadata.json +21 -0
- package/test/__cache__/GET__api_v1_transcode_manifest_json_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__3be92a0437de726b431ed5af2369158a/data.bin +1 -0
- package/test/__cache__/GET__api_v1_transcode_manifest_json_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__3be92a0437de726b431ed5af2369158a/metadata.json +19 -0
- package/test/createJitTestClips.ts +320 -188
- package/test/recordReplayProxyPlugin.js +352 -0
- package/test/useAssetMSW.ts +1 -1
- package/test/useMSW.ts +35 -22
- package/types.json +1 -1
- package/dist/JitTranscodingClient.d.ts +0 -167
- package/dist/JitTranscodingClient.js +0 -373
- package/dist/ScrubTrackManager.d.ts +0 -96
- package/dist/ScrubTrackManager.js +0 -216
- package/dist/elements/printTaskStatus.js +0 -11
- package/src/elements/__screenshots__/EFMedia.browsertest.ts/EFMedia-JIT-audio-playback-audioBufferTask-should-work-in-JIT-mode-without-URL-errors-1.png +0 -0
- package/test/EFVideo.frame-tasks.browsertest.ts +0 -524
- /package/dist/{DecoderResetFrequency.test.d.ts → elements/EFMedia/AssetIdMediaEngine.test.d.ts} +0 -0
- /package/dist/{DecoderResetRecovery.test.d.ts → elements/EFMedia/BaseMediaEngine.test.d.ts} +0 -0
- /package/dist/{JitTranscodingClient.browsertest.d.ts → elements/EFMedia/BufferedSeekingInput.browsertest.d.ts} +0 -0
- /package/dist/{JitTranscodingClient.test.d.ts → elements/EFMedia/shared/RenditionHelpers.browsertest.d.ts} +0 -0
- /package/dist/{ScrubTrackIntegration.test.d.ts → elements/EFMedia/tasks/makeMediaEngineTask.browsertest.d.ts} +0 -0
- /package/dist/{SegmentSwitchLoading.test.d.ts → elements/EFMedia/tasks/makeMediaEngineTask.test.d.ts} +0 -0
|
@@ -0,0 +1,310 @@
|
|
|
1
|
+
import type {
|
|
2
|
+
AudioRendition,
|
|
3
|
+
VideoRendition,
|
|
4
|
+
} from "../../../transcoding/types";
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* State interface for media buffering - generic for both audio and video
|
|
8
|
+
*/
|
|
9
|
+
export interface MediaBufferState {
|
|
10
|
+
currentSeekTimeMs: number;
|
|
11
|
+
activeRequests: Set<number>;
|
|
12
|
+
cachedSegments: Set<number>;
|
|
13
|
+
requestQueue: number[];
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Configuration interface for media buffering - generic for both audio and video
|
|
18
|
+
*/
|
|
19
|
+
export interface MediaBufferConfig {
|
|
20
|
+
bufferDurationMs: number;
|
|
21
|
+
maxParallelFetches: number;
|
|
22
|
+
enableBuffering: boolean;
|
|
23
|
+
enableContinuousBuffering?: boolean;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* Dependencies interface for media buffering - generic for both audio and video
|
|
28
|
+
*/
|
|
29
|
+
export interface MediaBufferDependencies<
|
|
30
|
+
T extends AudioRendition | VideoRendition,
|
|
31
|
+
> {
|
|
32
|
+
computeSegmentId: (
|
|
33
|
+
timeMs: number,
|
|
34
|
+
rendition: T,
|
|
35
|
+
) => Promise<number | undefined>;
|
|
36
|
+
fetchSegment: (segmentId: number, rendition: T) => Promise<ArrayBuffer>;
|
|
37
|
+
getRendition: () => Promise<T>;
|
|
38
|
+
logError: (message: string, error: any) => void;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Compute segment range for a time window
|
|
43
|
+
* Pure function - determines which segments are needed for a time range
|
|
44
|
+
*/
|
|
45
|
+
export const computeSegmentRange = <T extends AudioRendition | VideoRendition>(
|
|
46
|
+
startTimeMs: number,
|
|
47
|
+
endTimeMs: number,
|
|
48
|
+
rendition: T,
|
|
49
|
+
computeSegmentId: (timeMs: number, rendition: T) => number | undefined,
|
|
50
|
+
): number[] => {
|
|
51
|
+
const segments: number[] = [];
|
|
52
|
+
const segmentDurationMs = (rendition as any).segmentDurationMs || 1000;
|
|
53
|
+
|
|
54
|
+
// Calculate segment indices that overlap with [startTimeMs, endTimeMs]
|
|
55
|
+
const startSegmentIndex = Math.floor(startTimeMs / segmentDurationMs);
|
|
56
|
+
const endSegmentIndex = Math.floor(endTimeMs / segmentDurationMs);
|
|
57
|
+
|
|
58
|
+
for (let i = startSegmentIndex; i <= endSegmentIndex; i++) {
|
|
59
|
+
const segmentId = computeSegmentId(i * segmentDurationMs, rendition);
|
|
60
|
+
if (segmentId !== undefined) {
|
|
61
|
+
segments.push(segmentId);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
return segments.filter((id, index, arr) => arr.indexOf(id) === index); // Remove duplicates
|
|
66
|
+
};
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Async version of computeSegmentRange for when computeSegmentId is async
|
|
70
|
+
*/
|
|
71
|
+
export const computeSegmentRangeAsync = async <
|
|
72
|
+
T extends AudioRendition | VideoRendition,
|
|
73
|
+
>(
|
|
74
|
+
startTimeMs: number,
|
|
75
|
+
endTimeMs: number,
|
|
76
|
+
durationMs: number,
|
|
77
|
+
rendition: T,
|
|
78
|
+
computeSegmentId: (
|
|
79
|
+
timeMs: number,
|
|
80
|
+
rendition: T,
|
|
81
|
+
) => Promise<number | undefined>,
|
|
82
|
+
): Promise<number[]> => {
|
|
83
|
+
const segments: number[] = [];
|
|
84
|
+
const segmentDurationMs = (rendition as any).segmentDurationMs || 1000;
|
|
85
|
+
|
|
86
|
+
// Calculate segment indices that overlap with [startTimeMs, endTimeMs]
|
|
87
|
+
const startSegmentIndex = Math.floor(startTimeMs / segmentDurationMs);
|
|
88
|
+
const endSegmentIndex = Math.floor(
|
|
89
|
+
Math.min(endTimeMs, durationMs) / segmentDurationMs,
|
|
90
|
+
);
|
|
91
|
+
|
|
92
|
+
for (let i = startSegmentIndex; i <= endSegmentIndex; i++) {
|
|
93
|
+
const timeMs = i * segmentDurationMs;
|
|
94
|
+
if (timeMs < durationMs) {
|
|
95
|
+
const segmentId = await computeSegmentId(timeMs, rendition);
|
|
96
|
+
if (segmentId !== undefined) {
|
|
97
|
+
segments.push(segmentId);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
return segments.filter((id, index, arr) => arr.indexOf(id) === index); // Remove duplicates
|
|
103
|
+
};
|
|
104
|
+
|
|
105
|
+
/**
|
|
106
|
+
* Compute buffer queue based on current state and desired segments
|
|
107
|
+
* Pure function - determines what segments should be fetched
|
|
108
|
+
*/
|
|
109
|
+
export const computeBufferQueue = (
|
|
110
|
+
desiredSegments: number[],
|
|
111
|
+
activeRequests: Set<number>,
|
|
112
|
+
cachedSegments: Set<number>,
|
|
113
|
+
): number[] => {
|
|
114
|
+
return desiredSegments.filter(
|
|
115
|
+
(segmentId) =>
|
|
116
|
+
!activeRequests.has(segmentId) && !cachedSegments.has(segmentId),
|
|
117
|
+
);
|
|
118
|
+
};
|
|
119
|
+
|
|
120
|
+
/**
|
|
121
|
+
* Handle seek time change and recompute buffer queue
|
|
122
|
+
* Pure function - computes new queue when seek time changes
|
|
123
|
+
*/
|
|
124
|
+
export const handleSeekTimeChange = <T extends AudioRendition | VideoRendition>(
|
|
125
|
+
newSeekTimeMs: number,
|
|
126
|
+
bufferDurationMs: number,
|
|
127
|
+
rendition: T,
|
|
128
|
+
currentState: MediaBufferState,
|
|
129
|
+
computeSegmentId: (timeMs: number, rendition: T) => number | undefined,
|
|
130
|
+
): { newQueue: number[]; overlappingRequests: number[] } => {
|
|
131
|
+
const endTimeMs = newSeekTimeMs + bufferDurationMs;
|
|
132
|
+
const desiredSegments = computeSegmentRange(
|
|
133
|
+
newSeekTimeMs,
|
|
134
|
+
endTimeMs,
|
|
135
|
+
rendition,
|
|
136
|
+
computeSegmentId,
|
|
137
|
+
);
|
|
138
|
+
|
|
139
|
+
// Find segments that are already being requested
|
|
140
|
+
const overlappingRequests = desiredSegments.filter((segmentId) =>
|
|
141
|
+
currentState.activeRequests.has(segmentId),
|
|
142
|
+
);
|
|
143
|
+
|
|
144
|
+
const newQueue = computeBufferQueue(
|
|
145
|
+
desiredSegments,
|
|
146
|
+
currentState.activeRequests,
|
|
147
|
+
currentState.cachedSegments,
|
|
148
|
+
);
|
|
149
|
+
|
|
150
|
+
return { newQueue, overlappingRequests };
|
|
151
|
+
};
|
|
152
|
+
|
|
153
|
+
/**
|
|
154
|
+
* Check if a specific segment is cached in the buffer
|
|
155
|
+
* Pure function for accessing buffer cache state
|
|
156
|
+
*/
|
|
157
|
+
export const getCachedSegment = (
|
|
158
|
+
segmentId: number,
|
|
159
|
+
bufferState: MediaBufferState | undefined,
|
|
160
|
+
): boolean => {
|
|
161
|
+
return bufferState?.cachedSegments.has(segmentId) ?? false;
|
|
162
|
+
};
|
|
163
|
+
|
|
164
|
+
/**
|
|
165
|
+
* Get cached segments from a list of segment IDs
|
|
166
|
+
* Pure function that returns which segments are available in cache
|
|
167
|
+
*/
|
|
168
|
+
export const getCachedSegments = (
|
|
169
|
+
segmentIds: number[],
|
|
170
|
+
bufferState: MediaBufferState | undefined,
|
|
171
|
+
): Set<number> => {
|
|
172
|
+
if (!bufferState) {
|
|
173
|
+
return new Set();
|
|
174
|
+
}
|
|
175
|
+
return new Set(segmentIds.filter((id) => bufferState.cachedSegments.has(id)));
|
|
176
|
+
};
|
|
177
|
+
|
|
178
|
+
/**
|
|
179
|
+
* Get missing segments from a list of segment IDs
|
|
180
|
+
* Pure function that returns which segments need to be fetched
|
|
181
|
+
*/
|
|
182
|
+
export const getMissingSegments = (
|
|
183
|
+
segmentIds: number[],
|
|
184
|
+
bufferState: MediaBufferState | undefined,
|
|
185
|
+
): number[] => {
|
|
186
|
+
if (!bufferState) {
|
|
187
|
+
return segmentIds;
|
|
188
|
+
}
|
|
189
|
+
return segmentIds.filter((id) => !bufferState.cachedSegments.has(id));
|
|
190
|
+
};
|
|
191
|
+
|
|
192
|
+
/**
|
|
193
|
+
* Core media buffering logic with explicit dependencies
|
|
194
|
+
* Generic implementation that works for both audio and video
|
|
195
|
+
*/
|
|
196
|
+
export const manageMediaBuffer = async <
|
|
197
|
+
T extends AudioRendition | VideoRendition,
|
|
198
|
+
>(
|
|
199
|
+
seekTimeMs: number,
|
|
200
|
+
config: MediaBufferConfig,
|
|
201
|
+
currentState: MediaBufferState,
|
|
202
|
+
durationMs: number,
|
|
203
|
+
signal: AbortSignal,
|
|
204
|
+
deps: MediaBufferDependencies<T>,
|
|
205
|
+
): Promise<MediaBufferState> => {
|
|
206
|
+
if (!config.enableBuffering) {
|
|
207
|
+
return currentState;
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
const rendition = await deps.getRendition();
|
|
211
|
+
const endTimeMs = seekTimeMs + config.bufferDurationMs;
|
|
212
|
+
|
|
213
|
+
const desiredSegments = await computeSegmentRangeAsync(
|
|
214
|
+
seekTimeMs,
|
|
215
|
+
endTimeMs,
|
|
216
|
+
durationMs,
|
|
217
|
+
rendition,
|
|
218
|
+
deps.computeSegmentId,
|
|
219
|
+
);
|
|
220
|
+
|
|
221
|
+
const newQueue = computeBufferQueue(
|
|
222
|
+
desiredSegments,
|
|
223
|
+
currentState.activeRequests,
|
|
224
|
+
currentState.cachedSegments,
|
|
225
|
+
);
|
|
226
|
+
|
|
227
|
+
// Start fetching segments up to maxParallelFetches limit
|
|
228
|
+
const segmentsToFetch = newQueue.slice(0, config.maxParallelFetches);
|
|
229
|
+
const newActiveRequests = new Set(currentState.activeRequests);
|
|
230
|
+
const newCachedSegments = new Set(currentState.cachedSegments);
|
|
231
|
+
|
|
232
|
+
// Function to start next individual segment when a slot becomes available
|
|
233
|
+
const startNextSegment = (remainingQueue: number[]): void => {
|
|
234
|
+
if (remainingQueue.length === 0 || signal.aborted) return;
|
|
235
|
+
|
|
236
|
+
const availableSlots = config.maxParallelFetches - newActiveRequests.size;
|
|
237
|
+
if (availableSlots <= 0) return;
|
|
238
|
+
|
|
239
|
+
const nextSegmentId = remainingQueue[0];
|
|
240
|
+
if (nextSegmentId === undefined) return;
|
|
241
|
+
|
|
242
|
+
if (
|
|
243
|
+
newActiveRequests.has(nextSegmentId) ||
|
|
244
|
+
newCachedSegments.has(nextSegmentId)
|
|
245
|
+
) {
|
|
246
|
+
// Skip this segment and try the next
|
|
247
|
+
startNextSegment(remainingQueue.slice(1));
|
|
248
|
+
return;
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
newActiveRequests.add(nextSegmentId);
|
|
252
|
+
|
|
253
|
+
deps
|
|
254
|
+
.fetchSegment(nextSegmentId, rendition)
|
|
255
|
+
.then(() => {
|
|
256
|
+
if (signal.aborted) return;
|
|
257
|
+
newActiveRequests.delete(nextSegmentId);
|
|
258
|
+
newCachedSegments.add(nextSegmentId);
|
|
259
|
+
startNextSegment(remainingQueue.slice(1));
|
|
260
|
+
})
|
|
261
|
+
.catch((error) => {
|
|
262
|
+
if (signal.aborted) return;
|
|
263
|
+
newActiveRequests.delete(nextSegmentId);
|
|
264
|
+
deps.logError(`Failed to fetch segment ${nextSegmentId}`, error);
|
|
265
|
+
startNextSegment(remainingQueue.slice(1));
|
|
266
|
+
});
|
|
267
|
+
};
|
|
268
|
+
|
|
269
|
+
// Start fetch promises for new segments
|
|
270
|
+
for (const segmentId of segmentsToFetch) {
|
|
271
|
+
if (signal.aborted) break;
|
|
272
|
+
|
|
273
|
+
newActiveRequests.add(segmentId);
|
|
274
|
+
|
|
275
|
+
// Start fetch (don't await - let it run in background)
|
|
276
|
+
deps
|
|
277
|
+
.fetchSegment(segmentId, rendition)
|
|
278
|
+
.then(() => {
|
|
279
|
+
if (signal.aborted) return;
|
|
280
|
+
// On success, move from active to cached
|
|
281
|
+
newActiveRequests.delete(segmentId);
|
|
282
|
+
newCachedSegments.add(segmentId);
|
|
283
|
+
|
|
284
|
+
// Continue buffering if there are more segments needed and continuous buffering is enabled
|
|
285
|
+
if (config.enableContinuousBuffering ?? true) {
|
|
286
|
+
const remainingQueue = newQueue.slice(segmentsToFetch.length);
|
|
287
|
+
startNextSegment(remainingQueue);
|
|
288
|
+
}
|
|
289
|
+
})
|
|
290
|
+
.catch((error) => {
|
|
291
|
+
if (signal.aborted) return;
|
|
292
|
+
// On error, remove from active and continue
|
|
293
|
+
newActiveRequests.delete(segmentId);
|
|
294
|
+
deps.logError(`Failed to fetch segment ${segmentId}`, error);
|
|
295
|
+
|
|
296
|
+
// Continue buffering even after error if continuous buffering is enabled
|
|
297
|
+
if (config.enableContinuousBuffering ?? true) {
|
|
298
|
+
const remainingQueue = newQueue.slice(segmentsToFetch.length);
|
|
299
|
+
startNextSegment(remainingQueue);
|
|
300
|
+
}
|
|
301
|
+
});
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
return {
|
|
305
|
+
currentSeekTimeMs: seekTimeMs,
|
|
306
|
+
activeRequests: newActiveRequests,
|
|
307
|
+
cachedSegments: newCachedSegments,
|
|
308
|
+
requestQueue: newQueue.slice(segmentsToFetch.length), // Remaining queue
|
|
309
|
+
};
|
|
310
|
+
};
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import type { Task } from "@lit/task";
|
|
2
|
+
import type {
|
|
3
|
+
AudioRendition,
|
|
4
|
+
MediaEngine,
|
|
5
|
+
VideoRendition,
|
|
6
|
+
} from "../../../transcoding/types";
|
|
7
|
+
import type { BufferedSeekingInput } from "../BufferedSeekingInput";
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Generic rendition type that can be either audio or video
|
|
11
|
+
*/
|
|
12
|
+
export type MediaRendition = AudioRendition | VideoRendition;
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Generic task type for init segment fetch
|
|
16
|
+
*/
|
|
17
|
+
export type InitSegmentFetchTask = Task<
|
|
18
|
+
readonly [MediaEngine | undefined],
|
|
19
|
+
ArrayBuffer
|
|
20
|
+
>;
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Generic task type for segment ID calculation
|
|
24
|
+
*/
|
|
25
|
+
export type SegmentIdTask = Task<
|
|
26
|
+
readonly [MediaEngine | undefined, number],
|
|
27
|
+
number | undefined
|
|
28
|
+
>;
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Generic task type for segment fetch
|
|
32
|
+
*/
|
|
33
|
+
export type SegmentFetchTask = Task<
|
|
34
|
+
readonly [MediaEngine | undefined, number | undefined],
|
|
35
|
+
ArrayBuffer
|
|
36
|
+
>;
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Generic task type for input creation
|
|
40
|
+
*/
|
|
41
|
+
export type InputTask = Task<
|
|
42
|
+
readonly [ArrayBuffer, ArrayBuffer],
|
|
43
|
+
BufferedSeekingInput
|
|
44
|
+
>;
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Centralized precision utilities for consistent timing calculations across the media pipeline.
|
|
3
|
+
*
|
|
4
|
+
* The key insight is that floating-point precision errors can cause inconsistencies between:
|
|
5
|
+
* 1. Segment selection logic (in AssetMediaEngine.computeSegmentId)
|
|
6
|
+
* 2. Sample finding logic (in SampleBuffer.find)
|
|
7
|
+
* 3. Timeline mapping (in BufferedSeekingInput.seek)
|
|
8
|
+
*
|
|
9
|
+
* All timing calculations must use the same rounding strategy to ensure consistency.
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Round time to millisecond precision to handle floating-point precision issues.
|
|
14
|
+
* Uses Math.round for consistent behavior across the entire pipeline.
|
|
15
|
+
*
|
|
16
|
+
* This function should be used for ALL time-related calculations that need to be
|
|
17
|
+
* compared between different parts of the system.
|
|
18
|
+
*/
|
|
19
|
+
export const roundToMilliseconds = (timeMs: number): number => {
|
|
20
|
+
// Round to 3 decimal places (microsecond precision)
|
|
21
|
+
return Math.round(timeMs * 1000) / 1000;
|
|
22
|
+
};
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Convert media time (in seconds) to scaled time units using consistent rounding.
|
|
26
|
+
* This is used in segment selection to convert from milliseconds to timescale units.
|
|
27
|
+
*/
|
|
28
|
+
export const convertToScaledTime = (
|
|
29
|
+
timeMs: number,
|
|
30
|
+
timescale: number,
|
|
31
|
+
): number => {
|
|
32
|
+
const scaledTime = (timeMs / 1000) * timescale;
|
|
33
|
+
return Math.round(scaledTime);
|
|
34
|
+
};
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Convert scaled time units back to media time (in milliseconds) using consistent rounding.
|
|
38
|
+
* This is the inverse of convertToScaledTime.
|
|
39
|
+
*/
|
|
40
|
+
export const convertFromScaledTime = (
|
|
41
|
+
scaledTime: number,
|
|
42
|
+
timescale: number,
|
|
43
|
+
): number => {
|
|
44
|
+
const timeMs = (scaledTime / timescale) * 1000;
|
|
45
|
+
return roundToMilliseconds(timeMs);
|
|
46
|
+
};
|
|
@@ -0,0 +1,247 @@
|
|
|
1
|
+
import { test as baseTest, describe, vi } from "vitest";
|
|
2
|
+
import type {
|
|
3
|
+
AudioRendition,
|
|
4
|
+
MediaEngine,
|
|
5
|
+
VideoRendition,
|
|
6
|
+
} from "../../../transcoding/types";
|
|
7
|
+
import {
|
|
8
|
+
calculateSegmentRange,
|
|
9
|
+
computeSegmentId,
|
|
10
|
+
getAudioRendition,
|
|
11
|
+
getVideoRendition,
|
|
12
|
+
} from "./RenditionHelpers";
|
|
13
|
+
|
|
14
|
+
const test = baseTest.extend<{
|
|
15
|
+
mockMediaEngine: MediaEngine;
|
|
16
|
+
mockVideoRendition: VideoRendition;
|
|
17
|
+
mockAudioRendition: AudioRendition;
|
|
18
|
+
mockMediaEngineWithoutAudio: MediaEngine;
|
|
19
|
+
mockMediaEngineWithoutVideo: MediaEngine;
|
|
20
|
+
}>({
|
|
21
|
+
mockMediaEngine: async ({}, use) => {
|
|
22
|
+
const mockVideoRendition = {
|
|
23
|
+
trackId: 1,
|
|
24
|
+
src: "video-track.mp4",
|
|
25
|
+
segmentDurationMs: 1000,
|
|
26
|
+
} as VideoRendition;
|
|
27
|
+
|
|
28
|
+
const mockAudioRendition = {
|
|
29
|
+
trackId: 2,
|
|
30
|
+
src: "audio-track.mp4",
|
|
31
|
+
segmentDurationMs: 1000,
|
|
32
|
+
} as AudioRendition;
|
|
33
|
+
|
|
34
|
+
const mockMediaEngine = {
|
|
35
|
+
durationMs: 10000,
|
|
36
|
+
src: "https://example.com/media.mp4",
|
|
37
|
+
videoRendition: mockVideoRendition,
|
|
38
|
+
audioRendition: mockAudioRendition,
|
|
39
|
+
fetchMediaSegment: vi.fn(),
|
|
40
|
+
} as unknown as MediaEngine;
|
|
41
|
+
await use(mockMediaEngine);
|
|
42
|
+
},
|
|
43
|
+
|
|
44
|
+
mockVideoRendition: async ({}, use) => {
|
|
45
|
+
const mockVideoRendition = {
|
|
46
|
+
trackId: 1,
|
|
47
|
+
src: "video-track.mp4",
|
|
48
|
+
segmentDurationMs: 1000,
|
|
49
|
+
} as VideoRendition;
|
|
50
|
+
await use(mockVideoRendition);
|
|
51
|
+
},
|
|
52
|
+
|
|
53
|
+
mockAudioRendition: async ({}, use) => {
|
|
54
|
+
const mockAudioRendition = {
|
|
55
|
+
trackId: 2,
|
|
56
|
+
src: "audio-track.mp4",
|
|
57
|
+
segmentDurationMs: 1000,
|
|
58
|
+
} as AudioRendition;
|
|
59
|
+
await use(mockAudioRendition);
|
|
60
|
+
},
|
|
61
|
+
|
|
62
|
+
mockMediaEngineWithoutAudio: async ({}, use) => {
|
|
63
|
+
const mockMediaEngine = {
|
|
64
|
+
durationMs: 10000,
|
|
65
|
+
src: "https://example.com/media.mp4",
|
|
66
|
+
videoRendition: {
|
|
67
|
+
trackId: 1,
|
|
68
|
+
src: "video-track.mp4",
|
|
69
|
+
segmentDurationMs: 1000,
|
|
70
|
+
} as VideoRendition,
|
|
71
|
+
audioRendition: null,
|
|
72
|
+
fetchMediaSegment: vi.fn(),
|
|
73
|
+
} as unknown as MediaEngine;
|
|
74
|
+
await use(mockMediaEngine);
|
|
75
|
+
},
|
|
76
|
+
|
|
77
|
+
mockMediaEngineWithoutVideo: async ({}, use) => {
|
|
78
|
+
const mockMediaEngine = {
|
|
79
|
+
durationMs: 10000,
|
|
80
|
+
src: "https://example.com/media.mp4",
|
|
81
|
+
videoRendition: null,
|
|
82
|
+
audioRendition: {
|
|
83
|
+
trackId: 2,
|
|
84
|
+
src: "audio-track.mp4",
|
|
85
|
+
segmentDurationMs: 1000,
|
|
86
|
+
} as AudioRendition,
|
|
87
|
+
fetchMediaSegment: vi.fn(),
|
|
88
|
+
} as unknown as MediaEngine;
|
|
89
|
+
await use(mockMediaEngine);
|
|
90
|
+
},
|
|
91
|
+
});
|
|
92
|
+
|
|
93
|
+
describe("RenditionHelpers", () => {
|
|
94
|
+
describe("getAudioRendition", () => {
|
|
95
|
+
test("returns audio rendition when available", ({
|
|
96
|
+
mockMediaEngine,
|
|
97
|
+
expect,
|
|
98
|
+
}) => {
|
|
99
|
+
const result = getAudioRendition(mockMediaEngine);
|
|
100
|
+
expect(result).toBe(mockMediaEngine.audioRendition);
|
|
101
|
+
expect(result.trackId).toBe(2);
|
|
102
|
+
expect(result.src).toBe("audio-track.mp4");
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
test("throws error when audio rendition is not available", ({
|
|
106
|
+
mockMediaEngineWithoutAudio,
|
|
107
|
+
expect,
|
|
108
|
+
}) => {
|
|
109
|
+
expect(() => getAudioRendition(mockMediaEngineWithoutAudio)).toThrow(
|
|
110
|
+
"Audio rendition is not available",
|
|
111
|
+
);
|
|
112
|
+
});
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
describe("getVideoRendition", () => {
|
|
116
|
+
test("returns video rendition when available", ({
|
|
117
|
+
mockMediaEngine,
|
|
118
|
+
expect,
|
|
119
|
+
}) => {
|
|
120
|
+
const result = getVideoRendition(mockMediaEngine);
|
|
121
|
+
expect(result).toBe(mockMediaEngine.videoRendition);
|
|
122
|
+
expect(result.trackId).toBe(1);
|
|
123
|
+
expect(result.src).toBe("video-track.mp4");
|
|
124
|
+
});
|
|
125
|
+
|
|
126
|
+
test("throws error when video rendition is not available", ({
|
|
127
|
+
mockMediaEngineWithoutVideo,
|
|
128
|
+
expect,
|
|
129
|
+
}) => {
|
|
130
|
+
expect(() => getVideoRendition(mockMediaEngineWithoutVideo)).toThrow(
|
|
131
|
+
"Video rendition is not available",
|
|
132
|
+
);
|
|
133
|
+
});
|
|
134
|
+
});
|
|
135
|
+
|
|
136
|
+
describe("computeSegmentId", () => {
|
|
137
|
+
test("calculates segment ID correctly for audio rendition", ({
|
|
138
|
+
mockAudioRendition,
|
|
139
|
+
expect,
|
|
140
|
+
}) => {
|
|
141
|
+
// Test various time points
|
|
142
|
+
expect(computeSegmentId(0, mockAudioRendition)).toBe(1); // First segment
|
|
143
|
+
expect(computeSegmentId(500, mockAudioRendition)).toBe(1); // Still first segment
|
|
144
|
+
expect(computeSegmentId(999, mockAudioRendition)).toBe(1); // Still first segment
|
|
145
|
+
expect(computeSegmentId(1000, mockAudioRendition)).toBe(2); // Second segment
|
|
146
|
+
expect(computeSegmentId(1500, mockAudioRendition)).toBe(2); // Still second segment
|
|
147
|
+
expect(computeSegmentId(2000, mockAudioRendition)).toBe(3); // Third segment
|
|
148
|
+
});
|
|
149
|
+
|
|
150
|
+
test("calculates segment ID correctly for video rendition", ({
|
|
151
|
+
mockVideoRendition,
|
|
152
|
+
expect,
|
|
153
|
+
}) => {
|
|
154
|
+
// Test various time points
|
|
155
|
+
expect(computeSegmentId(0, mockVideoRendition)).toBe(1); // First segment
|
|
156
|
+
expect(computeSegmentId(999, mockVideoRendition)).toBe(1); // Still first segment
|
|
157
|
+
expect(computeSegmentId(1000, mockVideoRendition)).toBe(2); // Second segment
|
|
158
|
+
expect(computeSegmentId(2500, mockVideoRendition)).toBe(3); // Third segment
|
|
159
|
+
});
|
|
160
|
+
|
|
161
|
+
test("returns undefined when segmentDurationMs is not available", ({
|
|
162
|
+
expect,
|
|
163
|
+
}) => {
|
|
164
|
+
const renditionWithoutDuration = {
|
|
165
|
+
trackId: 1,
|
|
166
|
+
src: "test.mp4",
|
|
167
|
+
segmentDurationMs: undefined,
|
|
168
|
+
} as AudioRendition;
|
|
169
|
+
|
|
170
|
+
expect(computeSegmentId(1000, renditionWithoutDuration)).toBeUndefined();
|
|
171
|
+
});
|
|
172
|
+
|
|
173
|
+
test("handles edge case of negative time", ({
|
|
174
|
+
mockAudioRendition,
|
|
175
|
+
expect,
|
|
176
|
+
}) => {
|
|
177
|
+
expect(computeSegmentId(-100, mockAudioRendition)).toBe(1); // Should clamp to segment 1
|
|
178
|
+
});
|
|
179
|
+
});
|
|
180
|
+
|
|
181
|
+
describe("calculateSegmentRange", () => {
|
|
182
|
+
test("calculates segment range for single segment", ({
|
|
183
|
+
mockAudioRendition,
|
|
184
|
+
expect,
|
|
185
|
+
}) => {
|
|
186
|
+
const result = calculateSegmentRange(100, 800, mockAudioRendition);
|
|
187
|
+
expect(result).toEqual([1]);
|
|
188
|
+
});
|
|
189
|
+
|
|
190
|
+
test("calculates segment range spanning multiple segments", ({
|
|
191
|
+
mockAudioRendition,
|
|
192
|
+
expect,
|
|
193
|
+
}) => {
|
|
194
|
+
const result = calculateSegmentRange(500, 2500, mockAudioRendition);
|
|
195
|
+
expect(result).toEqual([1, 2, 3]);
|
|
196
|
+
});
|
|
197
|
+
|
|
198
|
+
test("calculates segment range for exact segment boundaries", ({
|
|
199
|
+
mockAudioRendition,
|
|
200
|
+
expect,
|
|
201
|
+
}) => {
|
|
202
|
+
const result = calculateSegmentRange(1000, 2000, mockAudioRendition);
|
|
203
|
+
expect(result).toEqual([2, 3]);
|
|
204
|
+
});
|
|
205
|
+
|
|
206
|
+
test("handles single time point (start equals end)", ({
|
|
207
|
+
mockAudioRendition,
|
|
208
|
+
expect,
|
|
209
|
+
}) => {
|
|
210
|
+
const result = calculateSegmentRange(1500, 1500, mockAudioRendition);
|
|
211
|
+
expect(result).toEqual([2]);
|
|
212
|
+
});
|
|
213
|
+
|
|
214
|
+
test("returns empty array when segmentDurationMs is not available", ({
|
|
215
|
+
expect,
|
|
216
|
+
}) => {
|
|
217
|
+
const renditionWithoutDuration = {
|
|
218
|
+
trackId: 1,
|
|
219
|
+
src: "test.mp4",
|
|
220
|
+
segmentDurationMs: undefined,
|
|
221
|
+
} as AudioRendition;
|
|
222
|
+
|
|
223
|
+
const result = calculateSegmentRange(
|
|
224
|
+
1000,
|
|
225
|
+
2000,
|
|
226
|
+
renditionWithoutDuration,
|
|
227
|
+
);
|
|
228
|
+
expect(result).toEqual([]);
|
|
229
|
+
});
|
|
230
|
+
|
|
231
|
+
test("works with video renditions too", ({
|
|
232
|
+
mockVideoRendition,
|
|
233
|
+
expect,
|
|
234
|
+
}) => {
|
|
235
|
+
const result = calculateSegmentRange(1500, 3500, mockVideoRendition);
|
|
236
|
+
expect(result).toEqual([2, 3, 4]);
|
|
237
|
+
});
|
|
238
|
+
|
|
239
|
+
test("handles edge case where start time is negative", ({
|
|
240
|
+
mockAudioRendition,
|
|
241
|
+
expect,
|
|
242
|
+
}) => {
|
|
243
|
+
const result = calculateSegmentRange(-500, 1500, mockAudioRendition);
|
|
244
|
+
expect(result).toEqual([1, 2]);
|
|
245
|
+
});
|
|
246
|
+
});
|
|
247
|
+
});
|