@editframe/elements 0.17.6-beta.0 → 0.18.7-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (211) hide show
  1. package/dist/EF_FRAMEGEN.js +1 -1
  2. package/dist/elements/EFAudio.d.ts +21 -2
  3. package/dist/elements/EFAudio.js +41 -11
  4. package/dist/elements/EFImage.d.ts +1 -0
  5. package/dist/elements/EFImage.js +11 -3
  6. package/dist/elements/EFMedia/AssetIdMediaEngine.d.ts +18 -0
  7. package/dist/elements/EFMedia/AssetIdMediaEngine.js +41 -0
  8. package/dist/elements/EFMedia/AssetMediaEngine.browsertest.d.ts +0 -0
  9. package/dist/elements/EFMedia/AssetMediaEngine.d.ts +45 -0
  10. package/dist/elements/EFMedia/AssetMediaEngine.js +135 -0
  11. package/dist/elements/EFMedia/BaseMediaEngine.d.ts +55 -0
  12. package/dist/elements/EFMedia/BaseMediaEngine.js +115 -0
  13. package/dist/elements/EFMedia/BufferedSeekingInput.d.ts +43 -0
  14. package/dist/elements/EFMedia/BufferedSeekingInput.js +179 -0
  15. package/dist/elements/EFMedia/JitMediaEngine.browsertest.d.ts +0 -0
  16. package/dist/elements/EFMedia/JitMediaEngine.d.ts +31 -0
  17. package/dist/elements/EFMedia/JitMediaEngine.js +81 -0
  18. package/dist/elements/EFMedia/audioTasks/makeAudioBufferTask.browsertest.d.ts +9 -0
  19. package/dist/elements/EFMedia/audioTasks/makeAudioBufferTask.d.ts +16 -0
  20. package/dist/elements/EFMedia/audioTasks/makeAudioBufferTask.js +48 -0
  21. package/dist/elements/EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.d.ts +3 -0
  22. package/dist/elements/EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.js +141 -0
  23. package/dist/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.browsertest.d.ts +9 -0
  24. package/dist/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.d.ts +4 -0
  25. package/dist/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.js +16 -0
  26. package/dist/elements/EFMedia/audioTasks/makeAudioInputTask.browsertest.d.ts +9 -0
  27. package/dist/elements/EFMedia/audioTasks/makeAudioInputTask.d.ts +3 -0
  28. package/dist/elements/EFMedia/audioTasks/makeAudioInputTask.js +30 -0
  29. package/dist/elements/EFMedia/audioTasks/makeAudioSeekTask.chunkboundary.regression.browsertest.d.ts +0 -0
  30. package/dist/elements/EFMedia/audioTasks/makeAudioSeekTask.d.ts +7 -0
  31. package/dist/elements/EFMedia/audioTasks/makeAudioSeekTask.js +32 -0
  32. package/dist/elements/EFMedia/audioTasks/makeAudioSegmentFetchTask.d.ts +4 -0
  33. package/dist/elements/EFMedia/audioTasks/makeAudioSegmentFetchTask.js +28 -0
  34. package/dist/elements/EFMedia/audioTasks/makeAudioSegmentIdTask.d.ts +4 -0
  35. package/dist/elements/EFMedia/audioTasks/makeAudioSegmentIdTask.js +17 -0
  36. package/dist/elements/EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.d.ts +3 -0
  37. package/dist/elements/EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.js +107 -0
  38. package/dist/elements/EFMedia/shared/AudioSpanUtils.d.ts +7 -0
  39. package/dist/elements/EFMedia/shared/AudioSpanUtils.js +54 -0
  40. package/dist/elements/EFMedia/shared/BufferUtils.d.ts +70 -0
  41. package/dist/elements/EFMedia/shared/BufferUtils.js +89 -0
  42. package/dist/elements/EFMedia/shared/MediaTaskUtils.d.ts +23 -0
  43. package/dist/elements/EFMedia/shared/PrecisionUtils.d.ts +28 -0
  44. package/dist/elements/EFMedia/shared/PrecisionUtils.js +29 -0
  45. package/dist/elements/EFMedia/shared/RenditionHelpers.d.ts +19 -0
  46. package/dist/elements/EFMedia/tasks/makeMediaEngineTask.d.ts +18 -0
  47. package/dist/elements/EFMedia/tasks/makeMediaEngineTask.js +60 -0
  48. package/dist/elements/EFMedia/videoTasks/makeVideoBufferTask.browsertest.d.ts +9 -0
  49. package/dist/elements/EFMedia/videoTasks/makeVideoBufferTask.d.ts +16 -0
  50. package/dist/elements/EFMedia/videoTasks/makeVideoBufferTask.js +46 -0
  51. package/dist/elements/EFMedia/videoTasks/makeVideoInitSegmentFetchTask.browsertest.d.ts +9 -0
  52. package/dist/elements/EFMedia/videoTasks/makeVideoInitSegmentFetchTask.d.ts +4 -0
  53. package/dist/elements/EFMedia/videoTasks/makeVideoInitSegmentFetchTask.js +16 -0
  54. package/dist/elements/EFMedia/videoTasks/makeVideoInputTask.browsertest.d.ts +9 -0
  55. package/dist/elements/EFMedia/videoTasks/makeVideoInputTask.d.ts +3 -0
  56. package/dist/elements/EFMedia/videoTasks/makeVideoInputTask.js +27 -0
  57. package/dist/elements/EFMedia/videoTasks/makeVideoSeekTask.d.ts +7 -0
  58. package/dist/elements/EFMedia/videoTasks/makeVideoSeekTask.js +34 -0
  59. package/dist/elements/EFMedia/videoTasks/makeVideoSegmentFetchTask.browsertest.d.ts +9 -0
  60. package/dist/elements/EFMedia/videoTasks/makeVideoSegmentFetchTask.d.ts +4 -0
  61. package/dist/elements/EFMedia/videoTasks/makeVideoSegmentFetchTask.js +28 -0
  62. package/dist/elements/EFMedia/videoTasks/makeVideoSegmentIdTask.browsertest.d.ts +9 -0
  63. package/dist/elements/EFMedia/videoTasks/makeVideoSegmentIdTask.d.ts +4 -0
  64. package/dist/elements/EFMedia/videoTasks/makeVideoSegmentIdTask.js +17 -0
  65. package/dist/elements/EFMedia.browsertest.d.ts +1 -0
  66. package/dist/elements/EFMedia.d.ts +63 -111
  67. package/dist/elements/EFMedia.js +117 -1113
  68. package/dist/elements/EFTemporal.d.ts +1 -1
  69. package/dist/elements/EFTemporal.js +1 -1
  70. package/dist/elements/EFTimegroup.d.ts +11 -0
  71. package/dist/elements/EFTimegroup.js +83 -13
  72. package/dist/elements/EFVideo.d.ts +54 -32
  73. package/dist/elements/EFVideo.js +100 -207
  74. package/dist/elements/EFWaveform.js +2 -2
  75. package/dist/elements/SampleBuffer.d.ts +14 -0
  76. package/dist/elements/SampleBuffer.js +52 -0
  77. package/dist/getRenderInfo.js +2 -1
  78. package/dist/gui/ContextMixin.js +3 -2
  79. package/dist/gui/EFFilmstrip.d.ts +3 -3
  80. package/dist/gui/EFFilmstrip.js +1 -1
  81. package/dist/gui/EFFitScale.d.ts +2 -2
  82. package/dist/gui/TWMixin.js +1 -1
  83. package/dist/style.css +1 -1
  84. package/dist/transcoding/cache/CacheManager.d.ts +73 -0
  85. package/dist/transcoding/cache/RequestDeduplicator.d.ts +29 -0
  86. package/dist/transcoding/cache/RequestDeduplicator.js +53 -0
  87. package/dist/transcoding/cache/RequestDeduplicator.test.d.ts +1 -0
  88. package/dist/transcoding/types/index.d.ts +242 -0
  89. package/dist/transcoding/utils/MediaUtils.d.ts +9 -0
  90. package/dist/transcoding/utils/UrlGenerator.d.ts +26 -0
  91. package/dist/transcoding/utils/UrlGenerator.js +45 -0
  92. package/dist/transcoding/utils/constants.d.ts +27 -0
  93. package/dist/utils/LRUCache.d.ts +34 -0
  94. package/dist/utils/LRUCache.js +115 -0
  95. package/package.json +3 -3
  96. package/src/elements/EFAudio.browsertest.ts +189 -49
  97. package/src/elements/EFAudio.ts +59 -13
  98. package/src/elements/EFImage.browsertest.ts +42 -0
  99. package/src/elements/EFImage.ts +23 -3
  100. package/src/elements/EFMedia/AssetIdMediaEngine.test.ts +222 -0
  101. package/src/elements/EFMedia/AssetIdMediaEngine.ts +70 -0
  102. package/src/elements/EFMedia/AssetMediaEngine.browsertest.ts +100 -0
  103. package/src/elements/EFMedia/AssetMediaEngine.ts +255 -0
  104. package/src/elements/EFMedia/BaseMediaEngine.test.ts +164 -0
  105. package/src/elements/EFMedia/BaseMediaEngine.ts +219 -0
  106. package/src/elements/EFMedia/BufferedSeekingInput.browsertest.ts +481 -0
  107. package/src/elements/EFMedia/BufferedSeekingInput.ts +324 -0
  108. package/src/elements/EFMedia/JitMediaEngine.browsertest.ts +165 -0
  109. package/src/elements/EFMedia/JitMediaEngine.ts +166 -0
  110. package/src/elements/EFMedia/audioTasks/makeAudioBufferTask.browsertest.ts +554 -0
  111. package/src/elements/EFMedia/audioTasks/makeAudioBufferTask.ts +81 -0
  112. package/src/elements/EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.ts +250 -0
  113. package/src/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.browsertest.ts +59 -0
  114. package/src/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.ts +23 -0
  115. package/src/elements/EFMedia/audioTasks/makeAudioInputTask.browsertest.ts +55 -0
  116. package/src/elements/EFMedia/audioTasks/makeAudioInputTask.ts +43 -0
  117. package/src/elements/EFMedia/audioTasks/makeAudioSeekTask.chunkboundary.regression.browsertest.ts +199 -0
  118. package/src/elements/EFMedia/audioTasks/makeAudioSeekTask.ts +64 -0
  119. package/src/elements/EFMedia/audioTasks/makeAudioSegmentFetchTask.ts +45 -0
  120. package/src/elements/EFMedia/audioTasks/makeAudioSegmentIdTask.ts +24 -0
  121. package/src/elements/EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.ts +183 -0
  122. package/src/elements/EFMedia/shared/AudioSpanUtils.ts +128 -0
  123. package/src/elements/EFMedia/shared/BufferUtils.ts +310 -0
  124. package/src/elements/EFMedia/shared/MediaTaskUtils.ts +44 -0
  125. package/src/elements/EFMedia/shared/PrecisionUtils.ts +46 -0
  126. package/src/elements/EFMedia/shared/RenditionHelpers.browsertest.ts +247 -0
  127. package/src/elements/EFMedia/shared/RenditionHelpers.ts +79 -0
  128. package/src/elements/EFMedia/tasks/makeMediaEngineTask.browsertest.ts +128 -0
  129. package/src/elements/EFMedia/tasks/makeMediaEngineTask.test.ts +233 -0
  130. package/src/elements/EFMedia/tasks/makeMediaEngineTask.ts +89 -0
  131. package/src/elements/EFMedia/videoTasks/makeVideoBufferTask.browsertest.ts +555 -0
  132. package/src/elements/EFMedia/videoTasks/makeVideoBufferTask.ts +79 -0
  133. package/src/elements/EFMedia/videoTasks/makeVideoInitSegmentFetchTask.browsertest.ts +59 -0
  134. package/src/elements/EFMedia/videoTasks/makeVideoInitSegmentFetchTask.ts +23 -0
  135. package/src/elements/EFMedia/videoTasks/makeVideoInputTask.browsertest.ts +55 -0
  136. package/src/elements/EFMedia/videoTasks/makeVideoInputTask.ts +45 -0
  137. package/src/elements/EFMedia/videoTasks/makeVideoSeekTask.ts +68 -0
  138. package/src/elements/EFMedia/videoTasks/makeVideoSegmentFetchTask.browsertest.ts +57 -0
  139. package/src/elements/EFMedia/videoTasks/makeVideoSegmentFetchTask.ts +43 -0
  140. package/src/elements/EFMedia/videoTasks/makeVideoSegmentIdTask.browsertest.ts +56 -0
  141. package/src/elements/EFMedia/videoTasks/makeVideoSegmentIdTask.ts +24 -0
  142. package/src/elements/EFMedia.browsertest.ts +706 -273
  143. package/src/elements/EFMedia.ts +136 -1769
  144. package/src/elements/EFTemporal.ts +3 -4
  145. package/src/elements/EFTimegroup.browsertest.ts +6 -3
  146. package/src/elements/EFTimegroup.ts +147 -21
  147. package/src/elements/EFVideo.browsertest.ts +980 -169
  148. package/src/elements/EFVideo.ts +113 -458
  149. package/src/elements/EFWaveform.ts +1 -1
  150. package/src/elements/MediaController.ts +2 -12
  151. package/src/elements/SampleBuffer.ts +95 -0
  152. package/src/gui/ContextMixin.ts +3 -6
  153. package/src/transcoding/cache/CacheManager.ts +208 -0
  154. package/src/transcoding/cache/RequestDeduplicator.test.ts +170 -0
  155. package/src/transcoding/cache/RequestDeduplicator.ts +65 -0
  156. package/src/transcoding/types/index.ts +269 -0
  157. package/src/transcoding/utils/MediaUtils.ts +63 -0
  158. package/src/transcoding/utils/UrlGenerator.ts +68 -0
  159. package/src/transcoding/utils/constants.ts +36 -0
  160. package/src/utils/LRUCache.ts +153 -0
  161. package/test/EFVideo.framegen.browsertest.ts +39 -30
  162. package/test/__cache__/GET__api_v1_transcode_audio_1_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__32da3954ba60c96ad732020c65a08ebc/data.bin +0 -0
  163. package/test/__cache__/GET__api_v1_transcode_audio_1_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__32da3954ba60c96ad732020c65a08ebc/metadata.json +21 -0
  164. package/test/__cache__/GET__api_v1_transcode_audio_1_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__9ed2d25c675aa6bb6ff5b3ae23887c71/data.bin +0 -0
  165. package/test/__cache__/GET__api_v1_transcode_audio_1_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__9ed2d25c675aa6bb6ff5b3ae23887c71/metadata.json +22 -0
  166. package/test/__cache__/GET__api_v1_transcode_audio_2_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__b0b2b07efcf607de8ee0f650328c32f7/data.bin +0 -0
  167. package/test/__cache__/GET__api_v1_transcode_audio_2_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__b0b2b07efcf607de8ee0f650328c32f7/metadata.json +21 -0
  168. package/test/__cache__/GET__api_v1_transcode_audio_2_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__d5a3309a2bf756dd6e304807eb402f56/data.bin +0 -0
  169. package/test/__cache__/GET__api_v1_transcode_audio_2_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__d5a3309a2bf756dd6e304807eb402f56/metadata.json +22 -0
  170. package/test/__cache__/GET__api_v1_transcode_audio_3_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a75c2252b542e0c152c780e9a8d7b154/data.bin +0 -0
  171. package/test/__cache__/GET__api_v1_transcode_audio_3_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a75c2252b542e0c152c780e9a8d7b154/metadata.json +21 -0
  172. package/test/__cache__/GET__api_v1_transcode_audio_3_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__773254bb671e3466fca8677139fb239e/data.bin +0 -0
  173. package/test/__cache__/GET__api_v1_transcode_audio_3_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__773254bb671e3466fca8677139fb239e/metadata.json +22 -0
  174. package/test/__cache__/GET__api_v1_transcode_audio_4_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a64ff1cfb1b52cae14df4b5dfa1e222b/data.bin +0 -0
  175. package/test/__cache__/GET__api_v1_transcode_audio_4_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a64ff1cfb1b52cae14df4b5dfa1e222b/metadata.json +21 -0
  176. package/test/__cache__/GET__api_v1_transcode_audio_5_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__91e8a522f950809b9f09f4173113b4b0/data.bin +0 -0
  177. package/test/__cache__/GET__api_v1_transcode_audio_5_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__91e8a522f950809b9f09f4173113b4b0/metadata.json +21 -0
  178. package/test/__cache__/GET__api_v1_transcode_audio_init_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__e66d2c831d951e74ad0aeaa6489795d0/data.bin +0 -0
  179. package/test/__cache__/GET__api_v1_transcode_audio_init_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__e66d2c831d951e74ad0aeaa6489795d0/metadata.json +21 -0
  180. package/test/__cache__/GET__api_v1_transcode_high_1_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__26197f6f7c46cacb0a71134131c3f775/data.bin +0 -0
  181. package/test/__cache__/GET__api_v1_transcode_high_1_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__26197f6f7c46cacb0a71134131c3f775/metadata.json +21 -0
  182. package/test/__cache__/GET__api_v1_transcode_high_2_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__4cb6774cd3650ccf59c8f8dc6678c0b9/data.bin +0 -0
  183. package/test/__cache__/GET__api_v1_transcode_high_2_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__4cb6774cd3650ccf59c8f8dc6678c0b9/metadata.json +21 -0
  184. package/test/__cache__/GET__api_v1_transcode_high_3_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__0b3b2b1c8933f7fcf8a9ecaa88d58b41/data.bin +0 -0
  185. package/test/__cache__/GET__api_v1_transcode_high_3_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__0b3b2b1c8933f7fcf8a9ecaa88d58b41/metadata.json +21 -0
  186. package/test/__cache__/GET__api_v1_transcode_high_4_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a6fb05a22b18d850f7f2950bbcdbdeed/data.bin +0 -0
  187. package/test/__cache__/GET__api_v1_transcode_high_4_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a6fb05a22b18d850f7f2950bbcdbdeed/metadata.json +21 -0
  188. package/test/__cache__/GET__api_v1_transcode_high_5_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a50058c7c3602e90879fe3428ed891f4/data.bin +0 -0
  189. package/test/__cache__/GET__api_v1_transcode_high_5_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a50058c7c3602e90879fe3428ed891f4/metadata.json +21 -0
  190. package/test/__cache__/GET__api_v1_transcode_high_init_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__0798c479b44aaeef850609a430f6e613/data.bin +0 -0
  191. package/test/__cache__/GET__api_v1_transcode_high_init_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__0798c479b44aaeef850609a430f6e613/metadata.json +21 -0
  192. package/test/__cache__/GET__api_v1_transcode_manifest_json_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__3be92a0437de726b431ed5af2369158a/data.bin +1 -0
  193. package/test/__cache__/GET__api_v1_transcode_manifest_json_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__3be92a0437de726b431ed5af2369158a/metadata.json +19 -0
  194. package/test/createJitTestClips.ts +320 -188
  195. package/test/recordReplayProxyPlugin.js +352 -0
  196. package/test/useAssetMSW.ts +1 -1
  197. package/test/useMSW.ts +35 -22
  198. package/types.json +1 -1
  199. package/dist/JitTranscodingClient.d.ts +0 -167
  200. package/dist/JitTranscodingClient.js +0 -373
  201. package/dist/ScrubTrackManager.d.ts +0 -96
  202. package/dist/ScrubTrackManager.js +0 -216
  203. package/dist/elements/printTaskStatus.js +0 -11
  204. package/src/elements/__screenshots__/EFMedia.browsertest.ts/EFMedia-JIT-audio-playback-audioBufferTask-should-work-in-JIT-mode-without-URL-errors-1.png +0 -0
  205. package/test/EFVideo.frame-tasks.browsertest.ts +0 -524
  206. /package/dist/{DecoderResetFrequency.test.d.ts → elements/EFMedia/AssetIdMediaEngine.test.d.ts} +0 -0
  207. /package/dist/{DecoderResetRecovery.test.d.ts → elements/EFMedia/BaseMediaEngine.test.d.ts} +0 -0
  208. /package/dist/{JitTranscodingClient.browsertest.d.ts → elements/EFMedia/BufferedSeekingInput.browsertest.d.ts} +0 -0
  209. /package/dist/{JitTranscodingClient.test.d.ts → elements/EFMedia/shared/RenditionHelpers.browsertest.d.ts} +0 -0
  210. /package/dist/{ScrubTrackIntegration.test.d.ts → elements/EFMedia/tasks/makeMediaEngineTask.browsertest.d.ts} +0 -0
  211. /package/dist/{SegmentSwitchLoading.test.d.ts → elements/EFMedia/tasks/makeMediaEngineTask.test.d.ts} +0 -0
@@ -0,0 +1,28 @@
1
+ import { getLatestMediaEngine } from "../tasks/makeMediaEngineTask.js";
2
+ import { Task } from "@lit/task";
3
+ const makeAudioSegmentFetchTask = (host) => {
4
+ return new Task(host, {
5
+ args: () => [host.mediaEngineTask.value, host.audioSegmentIdTask.value],
6
+ onError: (error) => {
7
+ console.error("audioSegmentFetchTask error", error);
8
+ },
9
+ onComplete: (_value) => {},
10
+ task: async (_, { signal }) => {
11
+ const mediaEngine = await getLatestMediaEngine(host, signal);
12
+ const segmentId = await host.audioSegmentIdTask.taskComplete;
13
+ if (segmentId === void 0) {
14
+ const rendition = mediaEngine.audioRendition;
15
+ const debugInfo = {
16
+ hasRendition: !!rendition,
17
+ segmentDurationMs: rendition?.segmentDurationMs,
18
+ segmentDurationsMs: rendition?.segmentDurationsMs?.length || 0,
19
+ desiredSeekTimeMs: host.desiredSeekTimeMs,
20
+ intrinsicDurationMs: host.intrinsicDurationMs
21
+ };
22
+ throw new Error(`Segment ID is not available for audio. Debug info: ${JSON.stringify(debugInfo)}`);
23
+ }
24
+ return mediaEngine.fetchMediaSegment(segmentId, mediaEngine.getAudioRendition(), signal);
25
+ }
26
+ });
27
+ };
28
+ export { makeAudioSegmentFetchTask };
@@ -0,0 +1,4 @@
1
+ import { Task } from '@lit/task';
2
+ import { MediaEngine } from '../../../transcoding/types';
3
+ import { EFMedia } from '../../EFMedia';
4
+ export declare const makeAudioSegmentIdTask: (host: EFMedia) => Task<readonly [MediaEngine | undefined, number], number | undefined>;
@@ -0,0 +1,17 @@
1
+ import { getLatestMediaEngine } from "../tasks/makeMediaEngineTask.js";
2
+ import { Task } from "@lit/task";
3
+ const makeAudioSegmentIdTask = (host) => {
4
+ return new Task(host, {
5
+ args: () => [host.mediaEngineTask.value, host.desiredSeekTimeMs],
6
+ onError: (error) => {
7
+ console.error("audioSegmentIdTask error", error);
8
+ },
9
+ onComplete: (_value) => {},
10
+ task: async ([, targetSeekTimeMs], { signal }) => {
11
+ const mediaEngine = await getLatestMediaEngine(host, signal);
12
+ signal.throwIfAborted();
13
+ return mediaEngine.computeSegmentId(targetSeekTimeMs, mediaEngine.getAudioRendition());
14
+ }
15
+ });
16
+ };
17
+ export { makeAudioSegmentIdTask };
@@ -0,0 +1,3 @@
1
+ import { Task } from '@lit/task';
2
+ import { EFMedia } from '../../EFMedia.js';
3
+ export declare function makeAudioTimeDomainAnalysisTask(element: EFMedia): Task<readonly [import('@lit/task').TaskStatus, number, number, number, number, boolean], Uint8Array | null>;
@@ -0,0 +1,107 @@
1
+ import { EF_INTERACTIVE } from "../../../EF_INTERACTIVE.js";
2
+ import { LRUCache } from "../../../utils/LRUCache.js";
3
+ import { Task } from "@lit/task";
4
+ const DECAY_WEIGHT = .8;
5
+ function makeAudioTimeDomainAnalysisTask(element) {
6
+ const cache = new LRUCache(1e3);
7
+ return new Task(element, {
8
+ autoRun: EF_INTERACTIVE,
9
+ onError: (error) => {
10
+ console.error("byteTimeDomainTask error", error);
11
+ },
12
+ args: () => [
13
+ element.audioBufferTask.status,
14
+ element.currentSourceTimeMs,
15
+ element.fftSize,
16
+ element.fftDecay,
17
+ element.fftGain,
18
+ element.shouldInterpolateFrequencies
19
+ ],
20
+ task: async () => {
21
+ await element.audioBufferTask.taskComplete;
22
+ if (!element.audioBufferTask.value) return null;
23
+ if (element.currentSourceTimeMs < 0) return null;
24
+ const currentTimeMs = element.currentSourceTimeMs;
25
+ const analysisWindowMs = 5e3;
26
+ const fromMs = Math.max(0, currentTimeMs);
27
+ const maxToMs = fromMs + analysisWindowMs;
28
+ const videoDurationMs = element.intrinsicDurationMs || 0;
29
+ const toMs = videoDurationMs > 0 ? Math.min(maxToMs, videoDurationMs) : maxToMs;
30
+ if (fromMs >= toMs) return null;
31
+ const { fetchAudioSpanningTime: fetchAudioSpan } = await import("../shared/AudioSpanUtils.js");
32
+ const audioSpan = await fetchAudioSpan(element, fromMs, toMs, new AbortController().signal);
33
+ if (!audioSpan || !audioSpan.blob) {
34
+ console.warn("Time domain analysis skipped: no audio data available");
35
+ return null;
36
+ }
37
+ const tempAudioContext = new OfflineAudioContext(2, 48e3, 48e3);
38
+ const arrayBuffer = await audioSpan.blob.arrayBuffer();
39
+ const audioBuffer = await tempAudioContext.decodeAudioData(arrayBuffer);
40
+ const startOffsetMs = audioSpan.startMs;
41
+ const smoothedKey = `${element.shouldInterpolateFrequencies}:${element.fftSize}:${element.fftDecay}:${element.fftGain}:${startOffsetMs}:${currentTimeMs}`;
42
+ const cachedData = cache.get(smoothedKey);
43
+ if (cachedData) return cachedData;
44
+ const framesData = await Promise.all(Array.from({ length: element.fftDecay }, async (_, frameIndex) => {
45
+ const frameOffset = frameIndex * (1e3 / 30);
46
+ const startTime = Math.max(0, (currentTimeMs - frameOffset - startOffsetMs) / 1e3);
47
+ const cacheKey = `${element.shouldInterpolateFrequencies}:${element.fftSize}:${element.fftGain}:${startOffsetMs}:${startTime}`;
48
+ const cachedFrame = cache.get(cacheKey);
49
+ if (cachedFrame) return cachedFrame;
50
+ let audioContext;
51
+ try {
52
+ audioContext = new OfflineAudioContext(2, 48e3 * (1 / 30), 48e3);
53
+ } catch (error) {
54
+ throw new Error(`[EFMedia.byteTimeDomainTask] Failed to create OfflineAudioContext(2, ${48e3 * (1 / 30)}, 48000) for frame ${frameIndex} at time ${startTime}s: ${error instanceof Error ? error.message : String(error)}. This is for audio time domain analysis.`);
55
+ }
56
+ const source = audioContext.createBufferSource();
57
+ source.buffer = audioBuffer;
58
+ const analyser = audioContext.createAnalyser();
59
+ analyser.fftSize = element.fftSize;
60
+ analyser.minDecibels = -90;
61
+ analyser.maxDecibels = -20;
62
+ const gainNode = audioContext.createGain();
63
+ gainNode.gain.value = element.fftGain;
64
+ source.connect(gainNode);
65
+ gainNode.connect(analyser);
66
+ analyser.connect(audioContext.destination);
67
+ source.start(0, startTime, 1 / 30);
68
+ const dataLength = analyser.fftSize / 2;
69
+ try {
70
+ await audioContext.startRendering();
71
+ const frameData = new Uint8Array(dataLength);
72
+ analyser.getByteTimeDomainData(frameData);
73
+ const points = new Uint8Array(dataLength);
74
+ for (let i = 0; i < dataLength; i++) {
75
+ const pointSamples = frameData.slice(i * (frameData.length / dataLength), (i + 1) * (frameData.length / dataLength));
76
+ const rms = Math.sqrt(pointSamples.reduce((sum, sample) => {
77
+ const normalized = (sample - 128) / 128;
78
+ return sum + normalized * normalized;
79
+ }, 0) / pointSamples.length);
80
+ const avgSign = Math.sign(pointSamples.reduce((sum, sample) => sum + (sample - 128), 0));
81
+ points[i] = Math.min(255, Math.round(128 + avgSign * rms * 128));
82
+ }
83
+ cache.set(cacheKey, points);
84
+ return points;
85
+ } finally {
86
+ source.disconnect();
87
+ analyser.disconnect();
88
+ }
89
+ }));
90
+ const frameLength = framesData[0]?.length ?? 0;
91
+ const smoothedData = new Uint8Array(frameLength);
92
+ for (let i = 0; i < frameLength; i++) {
93
+ let weightedSum = 0;
94
+ let weightSum = 0;
95
+ framesData.forEach((frame, frameIndex) => {
96
+ const decayWeight = DECAY_WEIGHT ** frameIndex;
97
+ weightedSum += (frame[i] ?? 0) * decayWeight;
98
+ weightSum += decayWeight;
99
+ });
100
+ smoothedData[i] = Math.min(255, Math.round(weightedSum / weightSum));
101
+ }
102
+ cache.set(smoothedKey, smoothedData);
103
+ return smoothedData;
104
+ }
105
+ });
106
+ }
107
+ export { makeAudioTimeDomainAnalysisTask };
@@ -0,0 +1,7 @@
1
+ import { AudioSpan } from '../../../transcoding/types';
2
+ import { EFMedia } from '../../EFMedia';
3
+ /**
4
+ * Fetch audio spanning a time range
5
+ * Main function that orchestrates segment calculation, fetching, and blob creation
6
+ */
7
+ export declare const fetchAudioSpanningTime: (host: EFMedia, fromMs: number, toMs: number, signal: AbortSignal) => Promise<AudioSpan>;
@@ -0,0 +1,54 @@
1
+ /**
2
+ * Fetch audio segment data using MediaEngine
3
+ * Pure function with explicit dependencies
4
+ */
5
+ const fetchAudioSegmentData = async (segmentIds, mediaEngine, signal) => {
6
+ const audioRendition = mediaEngine.audioRendition;
7
+ if (!audioRendition) throw new Error("Audio rendition not available");
8
+ const segmentData = /* @__PURE__ */ new Map();
9
+ const fetchPromises = segmentIds.map(async (segmentId) => {
10
+ const arrayBuffer = await mediaEngine.fetchMediaSegment(segmentId, audioRendition, signal);
11
+ return [segmentId, arrayBuffer];
12
+ });
13
+ const fetchedSegments = await Promise.all(fetchPromises);
14
+ signal.throwIfAborted();
15
+ for (const [segmentId, arrayBuffer] of fetchedSegments) segmentData.set(segmentId, arrayBuffer);
16
+ return segmentData;
17
+ };
18
+ /**
19
+ * Create audio span blob from init segment and media segments
20
+ * Pure function for blob creation
21
+ */
22
+ const createAudioSpanBlob = (initSegment, mediaSegments) => {
23
+ const chunks = [initSegment, ...mediaSegments];
24
+ return new Blob(chunks, { type: "audio/mp4" });
25
+ };
26
+ /**
27
+ * Fetch audio spanning a time range
28
+ * Main function that orchestrates segment calculation, fetching, and blob creation
29
+ */
30
+ const fetchAudioSpanningTime = async (host, fromMs, toMs, signal) => {
31
+ if (fromMs >= toMs || fromMs < 0) throw new Error(`Invalid time range: fromMs=${fromMs}, toMs=${toMs}`);
32
+ const mediaEngine = await host.mediaEngineTask.taskComplete;
33
+ const initSegment = await host.audioInitSegmentFetchTask.taskComplete;
34
+ if (!mediaEngine?.audioRendition) throw new Error("Audio rendition not available");
35
+ if (!initSegment) throw new Error("Audio init segment is not available");
36
+ const segmentRanges = mediaEngine.calculateAudioSegmentRange(fromMs, toMs, mediaEngine.audioRendition, host.intrinsicDurationMs || 1e4);
37
+ if (segmentRanges.length === 0) throw new Error(`No segments found for time range ${fromMs}-${toMs}ms`);
38
+ const segmentIds = segmentRanges.map((r) => r.segmentId);
39
+ const segmentData = await fetchAudioSegmentData(segmentIds, mediaEngine, signal);
40
+ const orderedSegments = segmentIds.map((id) => {
41
+ const segment = segmentData.get(id);
42
+ if (!segment) throw new Error(`Missing segment data for segment ID ${id}`);
43
+ return segment;
44
+ });
45
+ const blob = createAudioSpanBlob(initSegment, orderedSegments);
46
+ const actualStartMs = Math.min(...segmentRanges.map((r) => r.startMs));
47
+ const actualEndMs = Math.max(...segmentRanges.map((r) => r.endMs));
48
+ return {
49
+ startMs: actualStartMs,
50
+ endMs: actualEndMs,
51
+ blob
52
+ };
53
+ };
54
+ export { fetchAudioSpanningTime };
@@ -0,0 +1,70 @@
1
+ import { AudioRendition, VideoRendition } from '../../../transcoding/types';
2
+ /**
3
+ * State interface for media buffering - generic for both audio and video
4
+ */
5
+ export interface MediaBufferState {
6
+ currentSeekTimeMs: number;
7
+ activeRequests: Set<number>;
8
+ cachedSegments: Set<number>;
9
+ requestQueue: number[];
10
+ }
11
+ /**
12
+ * Configuration interface for media buffering - generic for both audio and video
13
+ */
14
+ export interface MediaBufferConfig {
15
+ bufferDurationMs: number;
16
+ maxParallelFetches: number;
17
+ enableBuffering: boolean;
18
+ enableContinuousBuffering?: boolean;
19
+ }
20
+ /**
21
+ * Dependencies interface for media buffering - generic for both audio and video
22
+ */
23
+ export interface MediaBufferDependencies<T extends AudioRendition | VideoRendition> {
24
+ computeSegmentId: (timeMs: number, rendition: T) => Promise<number | undefined>;
25
+ fetchSegment: (segmentId: number, rendition: T) => Promise<ArrayBuffer>;
26
+ getRendition: () => Promise<T>;
27
+ logError: (message: string, error: any) => void;
28
+ }
29
+ /**
30
+ * Compute segment range for a time window
31
+ * Pure function - determines which segments are needed for a time range
32
+ */
33
+ export declare const computeSegmentRange: <T extends AudioRendition | VideoRendition>(startTimeMs: number, endTimeMs: number, rendition: T, computeSegmentId: (timeMs: number, rendition: T) => number | undefined) => number[];
34
+ /**
35
+ * Async version of computeSegmentRange for when computeSegmentId is async
36
+ */
37
+ export declare const computeSegmentRangeAsync: <T extends AudioRendition | VideoRendition>(startTimeMs: number, endTimeMs: number, durationMs: number, rendition: T, computeSegmentId: (timeMs: number, rendition: T) => Promise<number | undefined>) => Promise<number[]>;
38
+ /**
39
+ * Compute buffer queue based on current state and desired segments
40
+ * Pure function - determines what segments should be fetched
41
+ */
42
+ export declare const computeBufferQueue: (desiredSegments: number[], activeRequests: Set<number>, cachedSegments: Set<number>) => number[];
43
+ /**
44
+ * Handle seek time change and recompute buffer queue
45
+ * Pure function - computes new queue when seek time changes
46
+ */
47
+ export declare const handleSeekTimeChange: <T extends AudioRendition | VideoRendition>(newSeekTimeMs: number, bufferDurationMs: number, rendition: T, currentState: MediaBufferState, computeSegmentId: (timeMs: number, rendition: T) => number | undefined) => {
48
+ newQueue: number[];
49
+ overlappingRequests: number[];
50
+ };
51
+ /**
52
+ * Check if a specific segment is cached in the buffer
53
+ * Pure function for accessing buffer cache state
54
+ */
55
+ export declare const getCachedSegment: (segmentId: number, bufferState: MediaBufferState | undefined) => boolean;
56
+ /**
57
+ * Get cached segments from a list of segment IDs
58
+ * Pure function that returns which segments are available in cache
59
+ */
60
+ export declare const getCachedSegments: (segmentIds: number[], bufferState: MediaBufferState | undefined) => Set<number>;
61
+ /**
62
+ * Get missing segments from a list of segment IDs
63
+ * Pure function that returns which segments need to be fetched
64
+ */
65
+ export declare const getMissingSegments: (segmentIds: number[], bufferState: MediaBufferState | undefined) => number[];
66
+ /**
67
+ * Core media buffering logic with explicit dependencies
68
+ * Generic implementation that works for both audio and video
69
+ */
70
+ export declare const manageMediaBuffer: <T extends AudioRendition | VideoRendition>(seekTimeMs: number, config: MediaBufferConfig, currentState: MediaBufferState, durationMs: number, signal: AbortSignal, deps: MediaBufferDependencies<T>) => Promise<MediaBufferState>;
@@ -0,0 +1,89 @@
1
+ /**
2
+ * Async version of computeSegmentRange for when computeSegmentId is async
3
+ */
4
+ const computeSegmentRangeAsync = async (startTimeMs, endTimeMs, durationMs, rendition, computeSegmentId) => {
5
+ const segments = [];
6
+ const segmentDurationMs = rendition.segmentDurationMs || 1e3;
7
+ const startSegmentIndex = Math.floor(startTimeMs / segmentDurationMs);
8
+ const endSegmentIndex = Math.floor(Math.min(endTimeMs, durationMs) / segmentDurationMs);
9
+ for (let i = startSegmentIndex; i <= endSegmentIndex; i++) {
10
+ const timeMs = i * segmentDurationMs;
11
+ if (timeMs < durationMs) {
12
+ const segmentId = await computeSegmentId(timeMs, rendition);
13
+ if (segmentId !== void 0) segments.push(segmentId);
14
+ }
15
+ }
16
+ return segments.filter((id, index, arr) => arr.indexOf(id) === index);
17
+ };
18
+ /**
19
+ * Compute buffer queue based on current state and desired segments
20
+ * Pure function - determines what segments should be fetched
21
+ */
22
+ const computeBufferQueue = (desiredSegments, activeRequests, cachedSegments) => {
23
+ return desiredSegments.filter((segmentId) => !activeRequests.has(segmentId) && !cachedSegments.has(segmentId));
24
+ };
25
+ /**
26
+ * Core media buffering logic with explicit dependencies
27
+ * Generic implementation that works for both audio and video
28
+ */
29
+ const manageMediaBuffer = async (seekTimeMs, config, currentState, durationMs, signal, deps) => {
30
+ if (!config.enableBuffering) return currentState;
31
+ const rendition = await deps.getRendition();
32
+ const endTimeMs = seekTimeMs + config.bufferDurationMs;
33
+ const desiredSegments = await computeSegmentRangeAsync(seekTimeMs, endTimeMs, durationMs, rendition, deps.computeSegmentId);
34
+ const newQueue = computeBufferQueue(desiredSegments, currentState.activeRequests, currentState.cachedSegments);
35
+ const segmentsToFetch = newQueue.slice(0, config.maxParallelFetches);
36
+ const newActiveRequests = new Set(currentState.activeRequests);
37
+ const newCachedSegments = new Set(currentState.cachedSegments);
38
+ const startNextSegment = (remainingQueue) => {
39
+ if (remainingQueue.length === 0 || signal.aborted) return;
40
+ const availableSlots = config.maxParallelFetches - newActiveRequests.size;
41
+ if (availableSlots <= 0) return;
42
+ const nextSegmentId = remainingQueue[0];
43
+ if (nextSegmentId === void 0) return;
44
+ if (newActiveRequests.has(nextSegmentId) || newCachedSegments.has(nextSegmentId)) {
45
+ startNextSegment(remainingQueue.slice(1));
46
+ return;
47
+ }
48
+ newActiveRequests.add(nextSegmentId);
49
+ deps.fetchSegment(nextSegmentId, rendition).then(() => {
50
+ if (signal.aborted) return;
51
+ newActiveRequests.delete(nextSegmentId);
52
+ newCachedSegments.add(nextSegmentId);
53
+ startNextSegment(remainingQueue.slice(1));
54
+ }).catch((error) => {
55
+ if (signal.aborted) return;
56
+ newActiveRequests.delete(nextSegmentId);
57
+ deps.logError(`Failed to fetch segment ${nextSegmentId}`, error);
58
+ startNextSegment(remainingQueue.slice(1));
59
+ });
60
+ };
61
+ for (const segmentId of segmentsToFetch) {
62
+ if (signal.aborted) break;
63
+ newActiveRequests.add(segmentId);
64
+ deps.fetchSegment(segmentId, rendition).then(() => {
65
+ if (signal.aborted) return;
66
+ newActiveRequests.delete(segmentId);
67
+ newCachedSegments.add(segmentId);
68
+ if (config.enableContinuousBuffering ?? true) {
69
+ const remainingQueue = newQueue.slice(segmentsToFetch.length);
70
+ startNextSegment(remainingQueue);
71
+ }
72
+ }).catch((error) => {
73
+ if (signal.aborted) return;
74
+ newActiveRequests.delete(segmentId);
75
+ deps.logError(`Failed to fetch segment ${segmentId}`, error);
76
+ if (config.enableContinuousBuffering ?? true) {
77
+ const remainingQueue = newQueue.slice(segmentsToFetch.length);
78
+ startNextSegment(remainingQueue);
79
+ }
80
+ });
81
+ }
82
+ return {
83
+ currentSeekTimeMs: seekTimeMs,
84
+ activeRequests: newActiveRequests,
85
+ cachedSegments: newCachedSegments,
86
+ requestQueue: newQueue.slice(segmentsToFetch.length)
87
+ };
88
+ };
89
+ export { manageMediaBuffer };
@@ -0,0 +1,23 @@
1
+ import { Task } from '@lit/task';
2
+ import { AudioRendition, MediaEngine, VideoRendition } from '../../../transcoding/types';
3
+ import { BufferedSeekingInput } from '../BufferedSeekingInput';
4
+ /**
5
+ * Generic rendition type that can be either audio or video
6
+ */
7
+ export type MediaRendition = AudioRendition | VideoRendition;
8
+ /**
9
+ * Generic task type for init segment fetch
10
+ */
11
+ export type InitSegmentFetchTask = Task<readonly [MediaEngine | undefined], ArrayBuffer>;
12
+ /**
13
+ * Generic task type for segment ID calculation
14
+ */
15
+ export type SegmentIdTask = Task<readonly [MediaEngine | undefined, number], number | undefined>;
16
+ /**
17
+ * Generic task type for segment fetch
18
+ */
19
+ export type SegmentFetchTask = Task<readonly [MediaEngine | undefined, number | undefined], ArrayBuffer>;
20
+ /**
21
+ * Generic task type for input creation
22
+ */
23
+ export type InputTask = Task<readonly [ArrayBuffer, ArrayBuffer], BufferedSeekingInput>;
@@ -0,0 +1,28 @@
1
+ /**
2
+ * Centralized precision utilities for consistent timing calculations across the media pipeline.
3
+ *
4
+ * The key insight is that floating-point precision errors can cause inconsistencies between:
5
+ * 1. Segment selection logic (in AssetMediaEngine.computeSegmentId)
6
+ * 2. Sample finding logic (in SampleBuffer.find)
7
+ * 3. Timeline mapping (in BufferedSeekingInput.seek)
8
+ *
9
+ * All timing calculations must use the same rounding strategy to ensure consistency.
10
+ */
11
+ /**
12
+ * Round time to millisecond precision to handle floating-point precision issues.
13
+ * Uses Math.round for consistent behavior across the entire pipeline.
14
+ *
15
+ * This function should be used for ALL time-related calculations that need to be
16
+ * compared between different parts of the system.
17
+ */
18
+ export declare const roundToMilliseconds: (timeMs: number) => number;
19
+ /**
20
+ * Convert media time (in seconds) to scaled time units using consistent rounding.
21
+ * This is used in segment selection to convert from milliseconds to timescale units.
22
+ */
23
+ export declare const convertToScaledTime: (timeMs: number, timescale: number) => number;
24
+ /**
25
+ * Convert scaled time units back to media time (in milliseconds) using consistent rounding.
26
+ * This is the inverse of convertToScaledTime.
27
+ */
28
+ export declare const convertFromScaledTime: (scaledTime: number, timescale: number) => number;
@@ -0,0 +1,29 @@
1
+ /**
2
+ * Centralized precision utilities for consistent timing calculations across the media pipeline.
3
+ *
4
+ * The key insight is that floating-point precision errors can cause inconsistencies between:
5
+ * 1. Segment selection logic (in AssetMediaEngine.computeSegmentId)
6
+ * 2. Sample finding logic (in SampleBuffer.find)
7
+ * 3. Timeline mapping (in BufferedSeekingInput.seek)
8
+ *
9
+ * All timing calculations must use the same rounding strategy to ensure consistency.
10
+ */
11
+ /**
12
+ * Round time to millisecond precision to handle floating-point precision issues.
13
+ * Uses Math.round for consistent behavior across the entire pipeline.
14
+ *
15
+ * This function should be used for ALL time-related calculations that need to be
16
+ * compared between different parts of the system.
17
+ */
18
+ const roundToMilliseconds = (timeMs) => {
19
+ return Math.round(timeMs * 1e3) / 1e3;
20
+ };
21
+ /**
22
+ * Convert media time (in seconds) to scaled time units using consistent rounding.
23
+ * This is used in segment selection to convert from milliseconds to timescale units.
24
+ */
25
+ const convertToScaledTime = (timeMs, timescale) => {
26
+ const scaledTime = timeMs / 1e3 * timescale;
27
+ return Math.round(scaledTime);
28
+ };
29
+ export { convertToScaledTime, roundToMilliseconds };
@@ -0,0 +1,19 @@
1
+ import { AudioRendition, MediaEngine, VideoRendition } from '../../../transcoding/types';
2
+ /**
3
+ * Get audio rendition from media engine, throwing if not available
4
+ */
5
+ export declare const getAudioRendition: (mediaEngine: MediaEngine) => AudioRendition;
6
+ /**
7
+ * Get video rendition from media engine, throwing if not available
8
+ */
9
+ export declare const getVideoRendition: (mediaEngine: MediaEngine) => VideoRendition;
10
+ /**
11
+ * Calculate which segment contains a given timestamp
12
+ * Returns 1-based segment ID, or undefined if segmentDurationMs is not available
13
+ */
14
+ export declare const computeSegmentId: (timeMs: number, rendition: AudioRendition | VideoRendition) => number | undefined;
15
+ /**
16
+ * Calculate range of segment IDs that overlap with a time range
17
+ * Returns array of 1-based segment IDs, or empty array if segmentDurationMs is not available
18
+ */
19
+ export declare const calculateSegmentRange: (startTimeMs: number, endTimeMs: number, rendition: AudioRendition | VideoRendition) => number[];
@@ -0,0 +1,18 @@
1
+ import { Task } from '@lit/task';
2
+ import { MediaEngine, VideoRendition } from '../../../transcoding/types';
3
+ import { EFMedia } from '../../EFMedia';
4
+ export declare const getLatestMediaEngine: (host: EFMedia, signal: AbortSignal) => Promise<MediaEngine>;
5
+ export declare const getVideoRendition: (mediaEngine: MediaEngine) => VideoRendition;
6
+ /**
7
+ * Core logic for creating a MediaEngine with explicit dependencies.
8
+ * Pure function that requires all dependencies to be provided.
9
+ */
10
+ export declare const createMediaEngine: (host: EFMedia) => Promise<MediaEngine>;
11
+ /**
12
+ * Handle completion of media engine task - triggers necessary updates.
13
+ * Extracted for testability.
14
+ */
15
+ export declare const handleMediaEngineComplete: (host: EFMedia) => void;
16
+ type MediaEngineTask = Task<readonly [string, string | null], MediaEngine>;
17
+ export declare const makeMediaEngineTask: (host: EFMedia) => MediaEngineTask;
18
+ export {};
@@ -0,0 +1,60 @@
1
+ import { EF_INTERACTIVE } from "../../../EF_INTERACTIVE.js";
2
+ import { AssetMediaEngine } from "../AssetMediaEngine.js";
3
+ import { AssetIdMediaEngine } from "../AssetIdMediaEngine.js";
4
+ import { JitMediaEngine } from "../JitMediaEngine.js";
5
+ import { Task } from "@lit/task";
6
+ const getLatestMediaEngine = async (host, signal) => {
7
+ const mediaEngine = await host.mediaEngineTask.taskComplete;
8
+ signal.throwIfAborted();
9
+ if (!mediaEngine) throw new Error("Media engine is not available");
10
+ return mediaEngine;
11
+ };
12
+ const getVideoRendition = (mediaEngine) => {
13
+ const videoRendition = mediaEngine.videoRendition;
14
+ if (!videoRendition) throw new Error("Video rendition is not available");
15
+ return videoRendition;
16
+ };
17
+ /**
18
+ * Core logic for creating a MediaEngine with explicit dependencies.
19
+ * Pure function that requires all dependencies to be provided.
20
+ */
21
+ const createMediaEngine = (host) => {
22
+ const { src, assetId, urlGenerator, apiHost } = host;
23
+ if (assetId !== null && assetId !== void 0 && assetId.trim() !== "") {
24
+ if (!apiHost) return Promise.reject(/* @__PURE__ */ new Error("API host is required for AssetID mode"));
25
+ return AssetIdMediaEngine.fetchByAssetId(host, urlGenerator, assetId, apiHost);
26
+ }
27
+ if (!src || typeof src !== "string" || src.trim() === "") {
28
+ console.error(`Unsupported media source: ${src}, assetId: ${assetId}`);
29
+ return Promise.reject(/* @__PURE__ */ new Error("Unsupported media source"));
30
+ }
31
+ const lowerSrc = src.toLowerCase();
32
+ if (lowerSrc.startsWith("http://") || lowerSrc.startsWith("https://")) {
33
+ const url = urlGenerator.generateManifestUrl(src);
34
+ return JitMediaEngine.fetch(host, urlGenerator, url);
35
+ }
36
+ return AssetMediaEngine.fetch(host, urlGenerator, src);
37
+ };
38
+ /**
39
+ * Handle completion of media engine task - triggers necessary updates.
40
+ * Extracted for testability.
41
+ */
42
+ const handleMediaEngineComplete = (host) => {
43
+ host.requestUpdate("intrinsicDurationMs");
44
+ host.requestUpdate("ownCurrentTimeMs");
45
+ host.rootTimegroup?.requestUpdate("ownCurrentTimeMs");
46
+ host.rootTimegroup?.requestUpdate("durationMs");
47
+ };
48
+ const makeMediaEngineTask = (host) => {
49
+ return new Task(host, {
50
+ autoRun: EF_INTERACTIVE,
51
+ args: () => [host.src, host.assetId],
52
+ task: async () => {
53
+ return createMediaEngine(host);
54
+ },
55
+ onComplete: (_value) => {
56
+ handleMediaEngineComplete(host);
57
+ }
58
+ });
59
+ };
60
+ export { getLatestMediaEngine, getVideoRendition, makeMediaEngineTask };
@@ -0,0 +1,9 @@
1
+ import { EFVideo } from '../../EFVideo';
2
+ declare class TestMediaVideoBuffer extends EFVideo {
3
+ }
4
+ declare global {
5
+ interface HTMLElementTagNameMap {
6
+ "test-media-video-buffer": TestMediaVideoBuffer;
7
+ }
8
+ }
9
+ export {};
@@ -0,0 +1,16 @@
1
+ import { Task } from '@lit/task';
2
+ import { EFVideo } from '../../EFVideo';
3
+ import { MediaBufferConfig, MediaBufferState } from '../shared/BufferUtils';
4
+ /**
5
+ * Configuration for video buffering - extends the generic interface
6
+ */
7
+ export interface VideoBufferConfig extends MediaBufferConfig {
8
+ }
9
+ /**
10
+ * State of the video buffer - uses the generic interface
11
+ */
12
+ export interface VideoBufferState extends MediaBufferState {
13
+ }
14
+ type VideoBufferTask = Task<readonly [number], VideoBufferState>;
15
+ export declare const makeVideoBufferTask: (host: EFVideo) => VideoBufferTask;
16
+ export {};
@@ -0,0 +1,46 @@
1
+ import { EF_INTERACTIVE } from "../../../EF_INTERACTIVE.js";
2
+ import { EF_RENDERING } from "../../../EF_RENDERING.js";
3
+ import { manageMediaBuffer } from "../shared/BufferUtils.js";
4
+ import { getLatestMediaEngine, getVideoRendition } from "../tasks/makeMediaEngineTask.js";
5
+ import { Task } from "@lit/task";
6
+ const makeVideoBufferTask = (host) => {
7
+ let currentState = {
8
+ currentSeekTimeMs: 0,
9
+ activeRequests: /* @__PURE__ */ new Set(),
10
+ cachedSegments: /* @__PURE__ */ new Set(),
11
+ requestQueue: []
12
+ };
13
+ return new Task(host, {
14
+ autoRun: EF_INTERACTIVE,
15
+ args: () => [host.desiredSeekTimeMs],
16
+ onError: (error) => {
17
+ console.error("videoBufferTask error", error);
18
+ },
19
+ onComplete: (value) => {
20
+ currentState = value;
21
+ },
22
+ task: async ([seekTimeMs], { signal }) => {
23
+ const currentConfig = {
24
+ bufferDurationMs: host.videoBufferDurationMs,
25
+ maxParallelFetches: host.maxVideoBufferFetches,
26
+ enableBuffering: host.enableVideoBuffering && !EF_RENDERING
27
+ };
28
+ return manageMediaBuffer(seekTimeMs, currentConfig, currentState, host.intrinsicDurationMs || 1e4, signal, {
29
+ computeSegmentId: async (timeMs, rendition) => {
30
+ const mediaEngine = await getLatestMediaEngine(host, signal);
31
+ return mediaEngine.computeSegmentId(timeMs, rendition);
32
+ },
33
+ fetchSegment: async (segmentId, rendition) => {
34
+ const mediaEngine = await getLatestMediaEngine(host, signal);
35
+ return mediaEngine.fetchMediaSegment(segmentId, rendition);
36
+ },
37
+ getRendition: async () => {
38
+ const mediaEngine = await getLatestMediaEngine(host, signal);
39
+ return getVideoRendition(mediaEngine);
40
+ },
41
+ logError: console.error
42
+ });
43
+ }
44
+ });
45
+ };
46
+ export { makeVideoBufferTask };
@@ -0,0 +1,9 @@
1
+ import { EFVideo } from '../../EFVideo';
2
+ declare class TestMediaVideoInitSegmentFetch extends EFVideo {
3
+ }
4
+ declare global {
5
+ interface HTMLElementTagNameMap {
6
+ "test-media-video-init-segment-fetch": TestMediaVideoInitSegmentFetch;
7
+ }
8
+ }
9
+ export {};