@editframe/elements 0.17.6-beta.0 → 0.18.7-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (211) hide show
  1. package/dist/EF_FRAMEGEN.js +1 -1
  2. package/dist/elements/EFAudio.d.ts +21 -2
  3. package/dist/elements/EFAudio.js +41 -11
  4. package/dist/elements/EFImage.d.ts +1 -0
  5. package/dist/elements/EFImage.js +11 -3
  6. package/dist/elements/EFMedia/AssetIdMediaEngine.d.ts +18 -0
  7. package/dist/elements/EFMedia/AssetIdMediaEngine.js +41 -0
  8. package/dist/elements/EFMedia/AssetMediaEngine.browsertest.d.ts +0 -0
  9. package/dist/elements/EFMedia/AssetMediaEngine.d.ts +45 -0
  10. package/dist/elements/EFMedia/AssetMediaEngine.js +135 -0
  11. package/dist/elements/EFMedia/BaseMediaEngine.d.ts +55 -0
  12. package/dist/elements/EFMedia/BaseMediaEngine.js +115 -0
  13. package/dist/elements/EFMedia/BufferedSeekingInput.d.ts +43 -0
  14. package/dist/elements/EFMedia/BufferedSeekingInput.js +179 -0
  15. package/dist/elements/EFMedia/JitMediaEngine.browsertest.d.ts +0 -0
  16. package/dist/elements/EFMedia/JitMediaEngine.d.ts +31 -0
  17. package/dist/elements/EFMedia/JitMediaEngine.js +81 -0
  18. package/dist/elements/EFMedia/audioTasks/makeAudioBufferTask.browsertest.d.ts +9 -0
  19. package/dist/elements/EFMedia/audioTasks/makeAudioBufferTask.d.ts +16 -0
  20. package/dist/elements/EFMedia/audioTasks/makeAudioBufferTask.js +48 -0
  21. package/dist/elements/EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.d.ts +3 -0
  22. package/dist/elements/EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.js +141 -0
  23. package/dist/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.browsertest.d.ts +9 -0
  24. package/dist/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.d.ts +4 -0
  25. package/dist/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.js +16 -0
  26. package/dist/elements/EFMedia/audioTasks/makeAudioInputTask.browsertest.d.ts +9 -0
  27. package/dist/elements/EFMedia/audioTasks/makeAudioInputTask.d.ts +3 -0
  28. package/dist/elements/EFMedia/audioTasks/makeAudioInputTask.js +30 -0
  29. package/dist/elements/EFMedia/audioTasks/makeAudioSeekTask.chunkboundary.regression.browsertest.d.ts +0 -0
  30. package/dist/elements/EFMedia/audioTasks/makeAudioSeekTask.d.ts +7 -0
  31. package/dist/elements/EFMedia/audioTasks/makeAudioSeekTask.js +32 -0
  32. package/dist/elements/EFMedia/audioTasks/makeAudioSegmentFetchTask.d.ts +4 -0
  33. package/dist/elements/EFMedia/audioTasks/makeAudioSegmentFetchTask.js +28 -0
  34. package/dist/elements/EFMedia/audioTasks/makeAudioSegmentIdTask.d.ts +4 -0
  35. package/dist/elements/EFMedia/audioTasks/makeAudioSegmentIdTask.js +17 -0
  36. package/dist/elements/EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.d.ts +3 -0
  37. package/dist/elements/EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.js +107 -0
  38. package/dist/elements/EFMedia/shared/AudioSpanUtils.d.ts +7 -0
  39. package/dist/elements/EFMedia/shared/AudioSpanUtils.js +54 -0
  40. package/dist/elements/EFMedia/shared/BufferUtils.d.ts +70 -0
  41. package/dist/elements/EFMedia/shared/BufferUtils.js +89 -0
  42. package/dist/elements/EFMedia/shared/MediaTaskUtils.d.ts +23 -0
  43. package/dist/elements/EFMedia/shared/PrecisionUtils.d.ts +28 -0
  44. package/dist/elements/EFMedia/shared/PrecisionUtils.js +29 -0
  45. package/dist/elements/EFMedia/shared/RenditionHelpers.d.ts +19 -0
  46. package/dist/elements/EFMedia/tasks/makeMediaEngineTask.d.ts +18 -0
  47. package/dist/elements/EFMedia/tasks/makeMediaEngineTask.js +60 -0
  48. package/dist/elements/EFMedia/videoTasks/makeVideoBufferTask.browsertest.d.ts +9 -0
  49. package/dist/elements/EFMedia/videoTasks/makeVideoBufferTask.d.ts +16 -0
  50. package/dist/elements/EFMedia/videoTasks/makeVideoBufferTask.js +46 -0
  51. package/dist/elements/EFMedia/videoTasks/makeVideoInitSegmentFetchTask.browsertest.d.ts +9 -0
  52. package/dist/elements/EFMedia/videoTasks/makeVideoInitSegmentFetchTask.d.ts +4 -0
  53. package/dist/elements/EFMedia/videoTasks/makeVideoInitSegmentFetchTask.js +16 -0
  54. package/dist/elements/EFMedia/videoTasks/makeVideoInputTask.browsertest.d.ts +9 -0
  55. package/dist/elements/EFMedia/videoTasks/makeVideoInputTask.d.ts +3 -0
  56. package/dist/elements/EFMedia/videoTasks/makeVideoInputTask.js +27 -0
  57. package/dist/elements/EFMedia/videoTasks/makeVideoSeekTask.d.ts +7 -0
  58. package/dist/elements/EFMedia/videoTasks/makeVideoSeekTask.js +34 -0
  59. package/dist/elements/EFMedia/videoTasks/makeVideoSegmentFetchTask.browsertest.d.ts +9 -0
  60. package/dist/elements/EFMedia/videoTasks/makeVideoSegmentFetchTask.d.ts +4 -0
  61. package/dist/elements/EFMedia/videoTasks/makeVideoSegmentFetchTask.js +28 -0
  62. package/dist/elements/EFMedia/videoTasks/makeVideoSegmentIdTask.browsertest.d.ts +9 -0
  63. package/dist/elements/EFMedia/videoTasks/makeVideoSegmentIdTask.d.ts +4 -0
  64. package/dist/elements/EFMedia/videoTasks/makeVideoSegmentIdTask.js +17 -0
  65. package/dist/elements/EFMedia.browsertest.d.ts +1 -0
  66. package/dist/elements/EFMedia.d.ts +63 -111
  67. package/dist/elements/EFMedia.js +117 -1113
  68. package/dist/elements/EFTemporal.d.ts +1 -1
  69. package/dist/elements/EFTemporal.js +1 -1
  70. package/dist/elements/EFTimegroup.d.ts +11 -0
  71. package/dist/elements/EFTimegroup.js +83 -13
  72. package/dist/elements/EFVideo.d.ts +54 -32
  73. package/dist/elements/EFVideo.js +100 -207
  74. package/dist/elements/EFWaveform.js +2 -2
  75. package/dist/elements/SampleBuffer.d.ts +14 -0
  76. package/dist/elements/SampleBuffer.js +52 -0
  77. package/dist/getRenderInfo.js +2 -1
  78. package/dist/gui/ContextMixin.js +3 -2
  79. package/dist/gui/EFFilmstrip.d.ts +3 -3
  80. package/dist/gui/EFFilmstrip.js +1 -1
  81. package/dist/gui/EFFitScale.d.ts +2 -2
  82. package/dist/gui/TWMixin.js +1 -1
  83. package/dist/style.css +1 -1
  84. package/dist/transcoding/cache/CacheManager.d.ts +73 -0
  85. package/dist/transcoding/cache/RequestDeduplicator.d.ts +29 -0
  86. package/dist/transcoding/cache/RequestDeduplicator.js +53 -0
  87. package/dist/transcoding/cache/RequestDeduplicator.test.d.ts +1 -0
  88. package/dist/transcoding/types/index.d.ts +242 -0
  89. package/dist/transcoding/utils/MediaUtils.d.ts +9 -0
  90. package/dist/transcoding/utils/UrlGenerator.d.ts +26 -0
  91. package/dist/transcoding/utils/UrlGenerator.js +45 -0
  92. package/dist/transcoding/utils/constants.d.ts +27 -0
  93. package/dist/utils/LRUCache.d.ts +34 -0
  94. package/dist/utils/LRUCache.js +115 -0
  95. package/package.json +3 -3
  96. package/src/elements/EFAudio.browsertest.ts +189 -49
  97. package/src/elements/EFAudio.ts +59 -13
  98. package/src/elements/EFImage.browsertest.ts +42 -0
  99. package/src/elements/EFImage.ts +23 -3
  100. package/src/elements/EFMedia/AssetIdMediaEngine.test.ts +222 -0
  101. package/src/elements/EFMedia/AssetIdMediaEngine.ts +70 -0
  102. package/src/elements/EFMedia/AssetMediaEngine.browsertest.ts +100 -0
  103. package/src/elements/EFMedia/AssetMediaEngine.ts +255 -0
  104. package/src/elements/EFMedia/BaseMediaEngine.test.ts +164 -0
  105. package/src/elements/EFMedia/BaseMediaEngine.ts +219 -0
  106. package/src/elements/EFMedia/BufferedSeekingInput.browsertest.ts +481 -0
  107. package/src/elements/EFMedia/BufferedSeekingInput.ts +324 -0
  108. package/src/elements/EFMedia/JitMediaEngine.browsertest.ts +165 -0
  109. package/src/elements/EFMedia/JitMediaEngine.ts +166 -0
  110. package/src/elements/EFMedia/audioTasks/makeAudioBufferTask.browsertest.ts +554 -0
  111. package/src/elements/EFMedia/audioTasks/makeAudioBufferTask.ts +81 -0
  112. package/src/elements/EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.ts +250 -0
  113. package/src/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.browsertest.ts +59 -0
  114. package/src/elements/EFMedia/audioTasks/makeAudioInitSegmentFetchTask.ts +23 -0
  115. package/src/elements/EFMedia/audioTasks/makeAudioInputTask.browsertest.ts +55 -0
  116. package/src/elements/EFMedia/audioTasks/makeAudioInputTask.ts +43 -0
  117. package/src/elements/EFMedia/audioTasks/makeAudioSeekTask.chunkboundary.regression.browsertest.ts +199 -0
  118. package/src/elements/EFMedia/audioTasks/makeAudioSeekTask.ts +64 -0
  119. package/src/elements/EFMedia/audioTasks/makeAudioSegmentFetchTask.ts +45 -0
  120. package/src/elements/EFMedia/audioTasks/makeAudioSegmentIdTask.ts +24 -0
  121. package/src/elements/EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.ts +183 -0
  122. package/src/elements/EFMedia/shared/AudioSpanUtils.ts +128 -0
  123. package/src/elements/EFMedia/shared/BufferUtils.ts +310 -0
  124. package/src/elements/EFMedia/shared/MediaTaskUtils.ts +44 -0
  125. package/src/elements/EFMedia/shared/PrecisionUtils.ts +46 -0
  126. package/src/elements/EFMedia/shared/RenditionHelpers.browsertest.ts +247 -0
  127. package/src/elements/EFMedia/shared/RenditionHelpers.ts +79 -0
  128. package/src/elements/EFMedia/tasks/makeMediaEngineTask.browsertest.ts +128 -0
  129. package/src/elements/EFMedia/tasks/makeMediaEngineTask.test.ts +233 -0
  130. package/src/elements/EFMedia/tasks/makeMediaEngineTask.ts +89 -0
  131. package/src/elements/EFMedia/videoTasks/makeVideoBufferTask.browsertest.ts +555 -0
  132. package/src/elements/EFMedia/videoTasks/makeVideoBufferTask.ts +79 -0
  133. package/src/elements/EFMedia/videoTasks/makeVideoInitSegmentFetchTask.browsertest.ts +59 -0
  134. package/src/elements/EFMedia/videoTasks/makeVideoInitSegmentFetchTask.ts +23 -0
  135. package/src/elements/EFMedia/videoTasks/makeVideoInputTask.browsertest.ts +55 -0
  136. package/src/elements/EFMedia/videoTasks/makeVideoInputTask.ts +45 -0
  137. package/src/elements/EFMedia/videoTasks/makeVideoSeekTask.ts +68 -0
  138. package/src/elements/EFMedia/videoTasks/makeVideoSegmentFetchTask.browsertest.ts +57 -0
  139. package/src/elements/EFMedia/videoTasks/makeVideoSegmentFetchTask.ts +43 -0
  140. package/src/elements/EFMedia/videoTasks/makeVideoSegmentIdTask.browsertest.ts +56 -0
  141. package/src/elements/EFMedia/videoTasks/makeVideoSegmentIdTask.ts +24 -0
  142. package/src/elements/EFMedia.browsertest.ts +706 -273
  143. package/src/elements/EFMedia.ts +136 -1769
  144. package/src/elements/EFTemporal.ts +3 -4
  145. package/src/elements/EFTimegroup.browsertest.ts +6 -3
  146. package/src/elements/EFTimegroup.ts +147 -21
  147. package/src/elements/EFVideo.browsertest.ts +980 -169
  148. package/src/elements/EFVideo.ts +113 -458
  149. package/src/elements/EFWaveform.ts +1 -1
  150. package/src/elements/MediaController.ts +2 -12
  151. package/src/elements/SampleBuffer.ts +95 -0
  152. package/src/gui/ContextMixin.ts +3 -6
  153. package/src/transcoding/cache/CacheManager.ts +208 -0
  154. package/src/transcoding/cache/RequestDeduplicator.test.ts +170 -0
  155. package/src/transcoding/cache/RequestDeduplicator.ts +65 -0
  156. package/src/transcoding/types/index.ts +269 -0
  157. package/src/transcoding/utils/MediaUtils.ts +63 -0
  158. package/src/transcoding/utils/UrlGenerator.ts +68 -0
  159. package/src/transcoding/utils/constants.ts +36 -0
  160. package/src/utils/LRUCache.ts +153 -0
  161. package/test/EFVideo.framegen.browsertest.ts +39 -30
  162. package/test/__cache__/GET__api_v1_transcode_audio_1_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__32da3954ba60c96ad732020c65a08ebc/data.bin +0 -0
  163. package/test/__cache__/GET__api_v1_transcode_audio_1_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__32da3954ba60c96ad732020c65a08ebc/metadata.json +21 -0
  164. package/test/__cache__/GET__api_v1_transcode_audio_1_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__9ed2d25c675aa6bb6ff5b3ae23887c71/data.bin +0 -0
  165. package/test/__cache__/GET__api_v1_transcode_audio_1_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__9ed2d25c675aa6bb6ff5b3ae23887c71/metadata.json +22 -0
  166. package/test/__cache__/GET__api_v1_transcode_audio_2_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__b0b2b07efcf607de8ee0f650328c32f7/data.bin +0 -0
  167. package/test/__cache__/GET__api_v1_transcode_audio_2_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__b0b2b07efcf607de8ee0f650328c32f7/metadata.json +21 -0
  168. package/test/__cache__/GET__api_v1_transcode_audio_2_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__d5a3309a2bf756dd6e304807eb402f56/data.bin +0 -0
  169. package/test/__cache__/GET__api_v1_transcode_audio_2_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__d5a3309a2bf756dd6e304807eb402f56/metadata.json +22 -0
  170. package/test/__cache__/GET__api_v1_transcode_audio_3_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a75c2252b542e0c152c780e9a8d7b154/data.bin +0 -0
  171. package/test/__cache__/GET__api_v1_transcode_audio_3_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a75c2252b542e0c152c780e9a8d7b154/metadata.json +21 -0
  172. package/test/__cache__/GET__api_v1_transcode_audio_3_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__773254bb671e3466fca8677139fb239e/data.bin +0 -0
  173. package/test/__cache__/GET__api_v1_transcode_audio_3_mp4_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4_bytes_0__773254bb671e3466fca8677139fb239e/metadata.json +22 -0
  174. package/test/__cache__/GET__api_v1_transcode_audio_4_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a64ff1cfb1b52cae14df4b5dfa1e222b/data.bin +0 -0
  175. package/test/__cache__/GET__api_v1_transcode_audio_4_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a64ff1cfb1b52cae14df4b5dfa1e222b/metadata.json +21 -0
  176. package/test/__cache__/GET__api_v1_transcode_audio_5_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__91e8a522f950809b9f09f4173113b4b0/data.bin +0 -0
  177. package/test/__cache__/GET__api_v1_transcode_audio_5_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__91e8a522f950809b9f09f4173113b4b0/metadata.json +21 -0
  178. package/test/__cache__/GET__api_v1_transcode_audio_init_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__e66d2c831d951e74ad0aeaa6489795d0/data.bin +0 -0
  179. package/test/__cache__/GET__api_v1_transcode_audio_init_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__e66d2c831d951e74ad0aeaa6489795d0/metadata.json +21 -0
  180. package/test/__cache__/GET__api_v1_transcode_high_1_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__26197f6f7c46cacb0a71134131c3f775/data.bin +0 -0
  181. package/test/__cache__/GET__api_v1_transcode_high_1_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__26197f6f7c46cacb0a71134131c3f775/metadata.json +21 -0
  182. package/test/__cache__/GET__api_v1_transcode_high_2_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__4cb6774cd3650ccf59c8f8dc6678c0b9/data.bin +0 -0
  183. package/test/__cache__/GET__api_v1_transcode_high_2_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__4cb6774cd3650ccf59c8f8dc6678c0b9/metadata.json +21 -0
  184. package/test/__cache__/GET__api_v1_transcode_high_3_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__0b3b2b1c8933f7fcf8a9ecaa88d58b41/data.bin +0 -0
  185. package/test/__cache__/GET__api_v1_transcode_high_3_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__0b3b2b1c8933f7fcf8a9ecaa88d58b41/metadata.json +21 -0
  186. package/test/__cache__/GET__api_v1_transcode_high_4_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a6fb05a22b18d850f7f2950bbcdbdeed/data.bin +0 -0
  187. package/test/__cache__/GET__api_v1_transcode_high_4_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a6fb05a22b18d850f7f2950bbcdbdeed/metadata.json +21 -0
  188. package/test/__cache__/GET__api_v1_transcode_high_5_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a50058c7c3602e90879fe3428ed891f4/data.bin +0 -0
  189. package/test/__cache__/GET__api_v1_transcode_high_5_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__a50058c7c3602e90879fe3428ed891f4/metadata.json +21 -0
  190. package/test/__cache__/GET__api_v1_transcode_high_init_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__0798c479b44aaeef850609a430f6e613/data.bin +0 -0
  191. package/test/__cache__/GET__api_v1_transcode_high_init_m4s_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__0798c479b44aaeef850609a430f6e613/metadata.json +21 -0
  192. package/test/__cache__/GET__api_v1_transcode_manifest_json_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__3be92a0437de726b431ed5af2369158a/data.bin +1 -0
  193. package/test/__cache__/GET__api_v1_transcode_manifest_json_url_http_3A_2F_2Fweb_3A3000_2Fhead_moov_480p_mp4__3be92a0437de726b431ed5af2369158a/metadata.json +19 -0
  194. package/test/createJitTestClips.ts +320 -188
  195. package/test/recordReplayProxyPlugin.js +352 -0
  196. package/test/useAssetMSW.ts +1 -1
  197. package/test/useMSW.ts +35 -22
  198. package/types.json +1 -1
  199. package/dist/JitTranscodingClient.d.ts +0 -167
  200. package/dist/JitTranscodingClient.js +0 -373
  201. package/dist/ScrubTrackManager.d.ts +0 -96
  202. package/dist/ScrubTrackManager.js +0 -216
  203. package/dist/elements/printTaskStatus.js +0 -11
  204. package/src/elements/__screenshots__/EFMedia.browsertest.ts/EFMedia-JIT-audio-playback-audioBufferTask-should-work-in-JIT-mode-without-URL-errors-1.png +0 -0
  205. package/test/EFVideo.frame-tasks.browsertest.ts +0 -524
  206. /package/dist/{DecoderResetFrequency.test.d.ts → elements/EFMedia/AssetIdMediaEngine.test.d.ts} +0 -0
  207. /package/dist/{DecoderResetRecovery.test.d.ts → elements/EFMedia/BaseMediaEngine.test.d.ts} +0 -0
  208. /package/dist/{JitTranscodingClient.browsertest.d.ts → elements/EFMedia/BufferedSeekingInput.browsertest.d.ts} +0 -0
  209. /package/dist/{JitTranscodingClient.test.d.ts → elements/EFMedia/shared/RenditionHelpers.browsertest.d.ts} +0 -0
  210. /package/dist/{ScrubTrackIntegration.test.d.ts → elements/EFMedia/tasks/makeMediaEngineTask.browsertest.d.ts} +0 -0
  211. /package/dist/{SegmentSwitchLoading.test.d.ts → elements/EFMedia/tasks/makeMediaEngineTask.test.d.ts} +0 -0
@@ -1,621 +1,73 @@
1
- import { EF_INTERACTIVE } from "../EF_INTERACTIVE.js";
2
- import { JitTranscodingClient } from "../JitTranscodingClient.js";
1
+ import { UrlGenerator } from "../transcoding/utils/UrlGenerator.js";
2
+ import { makeMediaEngineTask } from "./EFMedia/tasks/makeMediaEngineTask.js";
3
+ import { makeAudioBufferTask } from "./EFMedia/audioTasks/makeAudioBufferTask.js";
4
+ import { makeAudioFrequencyAnalysisTask } from "./EFMedia/audioTasks/makeAudioFrequencyAnalysisTask.js";
5
+ import { makeAudioInitSegmentFetchTask } from "./EFMedia/audioTasks/makeAudioInitSegmentFetchTask.js";
6
+ import { makeAudioInputTask } from "./EFMedia/audioTasks/makeAudioInputTask.js";
7
+ import { makeAudioSeekTask } from "./EFMedia/audioTasks/makeAudioSeekTask.js";
8
+ import { makeAudioSegmentFetchTask } from "./EFMedia/audioTasks/makeAudioSegmentFetchTask.js";
9
+ import { makeAudioSegmentIdTask } from "./EFMedia/audioTasks/makeAudioSegmentIdTask.js";
10
+ import { makeAudioTimeDomainAnalysisTask } from "./EFMedia/audioTasks/makeAudioTimeDomainAnalysisTask.js";
11
+ import { fetchAudioSpanningTime } from "./EFMedia/shared/AudioSpanUtils.js";
3
12
  import { EFSourceMixin } from "./EFSourceMixin.js";
4
13
  import { EFTemporal } from "./EFTemporal.js";
5
14
  import { FetchMixin } from "./FetchMixin.js";
6
15
  import { EFTargetable } from "./TargetController.js";
7
16
  import { updateAnimations } from "./updateAnimations.js";
8
- import { Task } from "@lit/task";
9
- import debug from "debug";
10
17
  import { LitElement, css } from "lit";
11
18
  import { property, state } from "lit/decorators.js";
12
19
  import _decorate from "@oxc-project/runtime/helpers/decorate";
13
- import { VideoAsset } from "@editframe/assets/EncodedAsset.js";
14
- import { MP4File } from "@editframe/assets/MP4File.js";
15
- import { deepArrayEquals } from "@lit/task/deep-equals.js";
16
- const log = debug("ef:elements:EFMedia");
17
20
  const freqWeightsCache = /* @__PURE__ */ new Map();
18
- var LRUCache = class {
19
- constructor(maxSize) {
20
- this.cache = /* @__PURE__ */ new Map();
21
- this.maxSize = maxSize;
22
- }
23
- get(key) {
24
- const value = this.cache.get(key);
25
- if (value) {
26
- this.cache.delete(key);
27
- this.cache.set(key, value);
28
- }
29
- return value;
30
- }
31
- set(key, value) {
32
- if (this.cache.has(key)) this.cache.delete(key);
33
- else if (this.cache.size >= this.maxSize) {
34
- const firstKey = this.cache.keys().next().value;
35
- if (firstKey) this.cache.delete(firstKey);
36
- }
37
- this.cache.set(key, value);
38
- }
39
- };
21
+ var IgnorableError = class extends Error {};
40
22
  const deepGetMediaElements = (element, medias = []) => {
41
23
  for (const child of Array.from(element.children)) if (child instanceof EFMedia) medias.push(child);
42
24
  else deepGetMediaElements(child, medias);
43
25
  return medias;
44
26
  };
45
- var EFMedia = class EFMedia extends EFTargetable(EFSourceMixin(EFTemporal(FetchMixin(LitElement)), { assetType: "isobmff_files" })) {
27
+ var EFMedia = class extends EFTargetable(EFSourceMixin(EFTemporal(FetchMixin(LitElement)), { assetType: "isobmff_files" })) {
46
28
  constructor(..._args) {
47
29
  super(..._args);
48
30
  this.currentTimeMs = 0;
49
- this._mode = "auto";
50
- this.prefetchSegments = 3;
51
- this.cacheSize = 20;
52
- this.enablePrefetch = true;
53
- this.jitLoadingState = "idle";
54
- this.jitErrorMessage = null;
55
- this.jitCacheStats = null;
56
- this.jitClientTask = new Task(this, {
57
- autoRun: EF_INTERACTIVE,
58
- onError: (error) => {
59
- console.error("jitClientTask error", error);
60
- },
61
- args: () => [
62
- this.apiHost,
63
- this.cacheSize,
64
- this.enablePrefetch,
65
- this.prefetchSegments
66
- ],
67
- task: ([apiHost, cacheSize, enablePrefetch, prefetchSegments]) => {
68
- const baseUrl = apiHost && apiHost !== "https://editframe.dev" ? apiHost : "http://localhost:3000";
69
- return new JitTranscodingClient({
70
- baseUrl,
71
- segmentCacheSize: cacheSize,
72
- enableNetworkAdaptation: enablePrefetch,
73
- enablePrefetch,
74
- prefetchSegments
75
- });
76
- }
77
- });
78
- this.jitMetadataLoader = new Task(this, {
79
- autoRun: EF_INTERACTIVE,
80
- onError: (error) => {
81
- console.error("jitMetadataLoader error", error);
82
- },
83
- args: () => [this.src, this.jitClientTask.value],
84
- task: async ([src, _jitClient], { signal: _signal }) => {
85
- if (this.effectiveMode !== "jit-transcode") return null;
86
- await this.jitClientTask.taskComplete;
87
- const jitClient = this.jitClientTask.value;
88
- if (!src || !jitClient) return null;
89
- try {
90
- this.jitLoadingState = "metadata";
91
- this.jitErrorMessage = null;
92
- const metadata = await jitClient.loadVideoMetadata(src);
93
- this.jitLoadingState = "idle";
94
- return metadata;
95
- } catch (error) {
96
- this.jitLoadingState = "error";
97
- this.jitErrorMessage = error instanceof Error ? error.message : "Failed to load video metadata";
98
- log("Failed to load JIT metadata:", error);
99
- return null;
100
- }
101
- },
102
- onComplete: () => {
103
- if (this.jitLoadingState === "metadata") this.jitLoadingState = "idle";
104
- this.requestUpdate("intrinsicDurationMs");
105
- this.requestUpdate("ownCurrentTimeMs");
106
- this.rootTimegroup?.requestUpdate("ownCurrentTimeMs");
107
- this.rootTimegroup?.requestUpdate("durationMs");
108
- }
109
- });
110
- this.audioBufferTask = new Task(this, {
111
- autoRun: EF_INTERACTIVE,
112
- onError: (error) => {
113
- console.error("audioBufferTask error", error);
114
- },
115
- args: () => [this.mediaSegmentsTask.value, this.seekTask.value],
116
- task: async ([files, segments], { signal: _signal }) => {
117
- if (!files || !segments) return;
118
- if (!this.defaultAudioTrackId) return;
119
- const segment = segments[this.defaultAudioTrackId];
120
- if (!segment) return;
121
- const audioFile = files[this.defaultAudioTrackId];
122
- if (!audioFile) return;
123
- return {
124
- buffer: await this.#audioContext.decodeAudioData(await audioFile.arrayBuffer()),
125
- startOffsetMs: segment.segment.cts / segment.track.timescale * 1e3
126
- };
127
- }
128
- });
129
- this.byteTimeDomainTask = new Task(this, {
130
- autoRun: EF_INTERACTIVE,
131
- onError: (error) => {
132
- console.error("byteTimeDomainTask error", error);
133
- },
134
- args: () => [
135
- this.audioBufferTask.status,
136
- this.currentSourceTimeMs,
137
- this.fftSize,
138
- this.fftDecay,
139
- this.fftGain,
140
- this.shouldInterpolateFrequencies
141
- ],
142
- task: async () => {
143
- await this.audioBufferTask.taskComplete;
144
- if (!this.audioBufferTask.value) return null;
145
- if (this.currentSourceTimeMs < 0) return null;
146
- const currentTimeMs = this.currentSourceTimeMs;
147
- const startOffsetMs = this.audioBufferTask.value.startOffsetMs;
148
- const audioBuffer = this.audioBufferTask.value.buffer;
149
- const smoothedKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftDecay}:${this.fftGain}:${startOffsetMs}:${currentTimeMs}`;
150
- const cachedData = this.#byteTimeDomainCache.get(smoothedKey);
151
- if (cachedData) return cachedData;
152
- const framesData = await Promise.all(Array.from({ length: this.fftDecay }, async (_, frameIndex) => {
153
- const frameOffset = frameIndex * (1e3 / 30);
154
- const startTime = Math.max(0, (currentTimeMs - frameOffset - startOffsetMs) / 1e3);
155
- const cacheKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftGain}:${startOffsetMs}:${startTime}`;
156
- const cachedFrame = this.#byteTimeDomainCache.get(cacheKey);
157
- if (cachedFrame) return cachedFrame;
158
- let audioContext;
159
- try {
160
- audioContext = new OfflineAudioContext(2, 48e3 * (1 / 30), 48e3);
161
- } catch (error) {
162
- throw new Error(`[EFMedia.byteTimeDomainTask] Failed to create OfflineAudioContext(2, ${48e3 * (1 / 30)}, 48000) for frame ${frameIndex} at time ${startTime}s: ${error instanceof Error ? error.message : String(error)}. This is for audio time domain analysis.`);
163
- }
164
- const source = audioContext.createBufferSource();
165
- source.buffer = audioBuffer;
166
- const analyser = audioContext.createAnalyser();
167
- analyser.fftSize = this.fftSize;
168
- analyser.minDecibels = -90;
169
- analyser.maxDecibels = -20;
170
- const gainNode = audioContext.createGain();
171
- gainNode.gain.value = this.fftGain;
172
- source.connect(gainNode);
173
- gainNode.connect(analyser);
174
- analyser.connect(audioContext.destination);
175
- source.start(0, startTime, 1 / 30);
176
- const dataLength = analyser.fftSize / 2;
177
- try {
178
- await audioContext.startRendering();
179
- const frameData = new Uint8Array(dataLength);
180
- analyser.getByteTimeDomainData(frameData);
181
- const points = new Uint8Array(dataLength);
182
- for (let i = 0; i < dataLength; i++) {
183
- const pointSamples = frameData.slice(i * (frameData.length / dataLength), (i + 1) * (frameData.length / dataLength));
184
- const rms = Math.sqrt(pointSamples.reduce((sum, sample) => {
185
- const normalized = (sample - 128) / 128;
186
- return sum + normalized * normalized;
187
- }, 0) / pointSamples.length);
188
- const avgSign = Math.sign(pointSamples.reduce((sum, sample) => sum + (sample - 128), 0));
189
- points[i] = Math.min(255, Math.round(128 + avgSign * rms * 128));
190
- }
191
- this.#byteTimeDomainCache.set(cacheKey, points);
192
- return points;
193
- } finally {
194
- source.disconnect();
195
- analyser.disconnect();
196
- }
197
- }));
198
- const frameLength = framesData[0]?.length ?? 0;
199
- const smoothedData = new Uint8Array(frameLength);
200
- for (let i = 0; i < frameLength; i++) {
201
- let weightedSum = 0;
202
- let weightSum = 0;
203
- framesData.forEach((frame, frameIndex) => {
204
- const decayWeight = EFMedia.DECAY_WEIGHT ** frameIndex;
205
- weightedSum += (frame[i] ?? 0) * decayWeight;
206
- weightSum += decayWeight;
207
- });
208
- smoothedData[i] = Math.min(255, Math.round(weightedSum / weightSum));
209
- }
210
- this.#byteTimeDomainCache.set(smoothedKey, smoothedData);
211
- return smoothedData;
212
- }
213
- });
214
- this.frequencyDataTask = new Task(this, {
215
- autoRun: EF_INTERACTIVE,
216
- onError: (error) => {
217
- console.error("frequencyDataTask error", error);
218
- },
219
- args: () => [
220
- this.audioBufferTask.status,
221
- this.currentSourceTimeMs,
222
- this.fftSize,
223
- this.fftDecay,
224
- this.fftGain,
225
- this.shouldInterpolateFrequencies
226
- ],
227
- task: async () => {
228
- await this.audioBufferTask.taskComplete;
229
- if (!this.audioBufferTask.value) return null;
230
- if (this.currentSourceTimeMs < 0) return null;
231
- const currentTimeMs = this.currentSourceTimeMs;
232
- const startOffsetMs = this.audioBufferTask.value.startOffsetMs;
233
- const audioBuffer = this.audioBufferTask.value.buffer;
234
- const smoothedKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftDecay}:${this.fftGain}:${startOffsetMs}:${currentTimeMs}`;
235
- const cachedSmoothedData = this.#frequencyDataCache.get(smoothedKey);
236
- if (cachedSmoothedData) return cachedSmoothedData;
237
- const framesData = await Promise.all(Array.from({ length: this.fftDecay }, async (_, i) => {
238
- const frameOffset = i * (1e3 / 30);
239
- const startTime = Math.max(0, (currentTimeMs - frameOffset - startOffsetMs) / 1e3);
240
- const cacheKey = `${this.shouldInterpolateFrequencies}:${this.fftSize}:${this.fftGain}:${startOffsetMs}:${startTime}`;
241
- const cachedFrame = this.#frequencyDataCache.get(cacheKey);
242
- if (cachedFrame) return cachedFrame;
243
- const SIZE = 48e3 / 30;
244
- let audioContext;
245
- try {
246
- audioContext = new OfflineAudioContext(2, SIZE, 48e3);
247
- } catch (error) {
248
- throw new Error(`[EFMedia.frequencyDataTask] Failed to create OfflineAudioContext(2, ${SIZE}, 48000) for frame ${i} at time ${startTime}s: ${error instanceof Error ? error.message : String(error)}. This is for audio frequency analysis.`);
249
- }
250
- const analyser = audioContext.createAnalyser();
251
- analyser.fftSize = this.fftSize;
252
- analyser.minDecibels = -90;
253
- analyser.maxDecibels = -10;
254
- const gainNode = audioContext.createGain();
255
- gainNode.gain.value = this.fftGain;
256
- const filter = audioContext.createBiquadFilter();
257
- filter.type = "bandpass";
258
- filter.frequency.value = 15e3;
259
- filter.Q.value = .05;
260
- const audioBufferSource = audioContext.createBufferSource();
261
- audioBufferSource.buffer = audioBuffer;
262
- audioBufferSource.connect(filter);
263
- filter.connect(gainNode);
264
- gainNode.connect(analyser);
265
- analyser.connect(audioContext.destination);
266
- audioBufferSource.start(0, startTime, 1 / 30);
267
- try {
268
- await audioContext.startRendering();
269
- const frameData = new Uint8Array(this.fftSize / 2);
270
- analyser.getByteFrequencyData(frameData);
271
- this.#frequencyDataCache.set(cacheKey, frameData);
272
- return frameData;
273
- } finally {
274
- audioBufferSource.disconnect();
275
- analyser.disconnect();
276
- }
277
- }));
278
- const frameLength = framesData[0]?.length ?? 0;
279
- const smoothedData = new Uint8Array(frameLength);
280
- for (let i = 0; i < frameLength; i++) {
281
- let weightedSum = 0;
282
- let weightSum = 0;
283
- framesData.forEach((frame, frameIndex) => {
284
- const decayWeight = EFMedia.DECAY_WEIGHT ** frameIndex;
285
- weightedSum += (frame[i] ?? 0) * decayWeight;
286
- weightSum += decayWeight;
287
- });
288
- smoothedData[i] = Math.min(255, Math.round(weightedSum / weightSum));
289
- }
290
- smoothedData.forEach((value, i) => {
291
- const freqWeight = this.FREQ_WEIGHTS[i] ?? 0;
292
- smoothedData[i] = Math.min(255, Math.round(value * freqWeight));
293
- });
294
- const slicedData = smoothedData.slice(0, Math.floor(smoothedData.length / 2));
295
- const processedData = this.shouldInterpolateFrequencies ? processFFTData(slicedData) : slicedData;
296
- this.#frequencyDataCache.set(smoothedKey, processedData);
297
- return processedData;
298
- }
299
- });
300
- this.videoAssetTask = new Task(this, {
301
- autoRun: EF_INTERACTIVE,
302
- onError: (error) => {
303
- console.error("videoAssetTask error", error);
304
- },
305
- args: () => [this.effectiveMode, this.mediaSegmentsTask.value],
306
- task: async ([mode, files], { signal: _signal }) => {
307
- if (!files) return;
308
- const fragmentIndex = this.fragmentIndexTask.value;
309
- const computedVideoTrackId = Object.values(fragmentIndex ?? {}).find((track) => track.type === "video")?.track;
310
- if (computedVideoTrackId === void 0) return;
311
- const videoFile = files[computedVideoTrackId];
312
- if (!videoFile) return;
313
- const existingAsset = this.videoAssetTask.value;
314
- if (existingAsset) {
315
- for (const frame of existingAsset?.decodedFrames || []) frame.close();
316
- const maybeDecoder = existingAsset?.videoDecoder;
317
- if (maybeDecoder?.state !== "closed") maybeDecoder.close();
318
- }
319
- if (mode === "jit-transcode") return await VideoAsset.createFromCompleteMP4(`jit-segment-${computedVideoTrackId}`, videoFile);
320
- return await VideoAsset.createFromReadableStream("video.mp4", videoFile.stream(), videoFile);
321
- }
322
- });
323
- this._desiredSeekTimeMs = -1;
324
- this.assetIndexLoader = new Task(this, {
325
- autoRun: EF_INTERACTIVE,
326
- onError: (error) => {
327
- console.error("assetIndexLoader error", error);
328
- },
329
- args: () => [this.effectiveMode === "asset" ? this.fragmentIndexPath() : null, this.fetch],
330
- task: async ([path, fetch], { signal }) => {
331
- if (!path) return null;
332
- try {
333
- const response = await fetch(path, { signal });
334
- return await response.json();
335
- } catch (error) {
336
- console.error("Failed to load asset fragment index", error);
337
- return null;
338
- }
339
- },
340
- onComplete: () => {
341
- this.requestUpdate("intrinsicDurationMs");
342
- this.requestUpdate("ownCurrentTimeMs");
343
- this.rootTimegroup?.requestUpdate("ownCurrentTimeMs");
344
- this.rootTimegroup?.requestUpdate("durationMs");
345
- }
346
- });
347
- this.assetSegmentKeysTask = new Task(this, {
348
- autoRun: EF_INTERACTIVE,
349
- onError: (error) => {
350
- console.error("assetSegmentKeysTask error", error);
351
- },
352
- args: () => [this.effectiveMode === "asset" ? this.assetIndexLoader.value : null, this.desiredSeekTimeMs],
353
- task: async ([fragmentIndex, seekMs]) => {
354
- if (this.effectiveMode === "asset") {
355
- await this.assetIndexLoader.taskComplete;
356
- fragmentIndex = this.assetIndexLoader.value;
357
- }
358
- if (!fragmentIndex || seekMs == null) return null;
359
- return this.calculateAssetSegmentKeys(fragmentIndex, seekMs);
360
- }
361
- });
362
- this.assetInitSegmentsTask = new Task(this, {
363
- autoRun: EF_INTERACTIVE,
364
- onError: (error) => {
365
- console.error("assetInitSegmentsTask error", error);
366
- },
367
- args: () => [this.effectiveMode === "asset" ? this.assetIndexLoader.value : null, this.fetch],
368
- task: async ([fragmentIndex, fetch], { signal }) => {
369
- if (this.effectiveMode === "asset") {
370
- await this.assetIndexLoader.taskComplete;
371
- fragmentIndex = this.assetIndexLoader.value;
372
- }
373
- if (!fragmentIndex) return null;
374
- return await Promise.all(Object.entries(fragmentIndex).map(async ([trackId, track]) => {
375
- const start = track.initSegment.offset;
376
- const end = track.initSegment.offset + track.initSegment.size;
377
- const response = await fetch(this.fragmentTrackPath(trackId), {
378
- signal,
379
- headers: { Range: `bytes=${start}-${end - 1}` }
380
- });
381
- const buffer = await response.arrayBuffer();
382
- buffer.fileStart = 0;
383
- const mp4File = new MP4File();
384
- mp4File.appendBuffer(buffer, true);
385
- mp4File.flush();
386
- await mp4File.readyPromise;
387
- return {
388
- trackId,
389
- buffer,
390
- mp4File
391
- };
392
- }));
393
- }
394
- });
395
- this.assetSegmentLoader = new Task(this, {
396
- autoRun: EF_INTERACTIVE,
397
- onError: (error) => {
398
- console.error("assetSegmentLoader error", error);
399
- },
400
- argsEqual: deepArrayEquals,
401
- args: () => [
402
- this.assetIndexLoader.value,
403
- this.assetSegmentKeysTask.value,
404
- this.assetInitSegmentsTask.value,
405
- this.fetch
406
- ],
407
- task: async ([fragmentIndex, segmentKeys, initSegments, fetch], { signal }) => {
408
- if (this.effectiveMode === "asset") {
409
- await this.assetIndexLoader.taskComplete;
410
- fragmentIndex = this.assetIndexLoader.value;
411
- await this.assetSegmentKeysTask.taskComplete;
412
- segmentKeys = this.assetSegmentKeysTask.value;
413
- await this.assetInitSegmentsTask.taskComplete;
414
- initSegments = this.assetInitSegmentsTask.value;
415
- }
416
- if (!fragmentIndex || !segmentKeys || !initSegments) return null;
417
- const seekMs = this.desiredSeekTimeMs;
418
- if (seekMs == null) return null;
419
- const files = {};
420
- const seekResult = this.calculateAssetSeekResult(fragmentIndex, initSegments, seekMs);
421
- if (!seekResult) return null;
422
- for (const [trackId, { segment, track, nextSegment }] of Object.entries(seekResult)) {
423
- const start = segment.offset;
424
- const end = segment.offset + segment.size;
425
- const response = await fetch(this.fragmentTrackPath(trackId), {
426
- signal,
427
- headers: { Range: `bytes=${start}-${end - 1}` }
428
- });
429
- if (nextSegment) {
430
- const nextStart = nextSegment.offset;
431
- const nextEnd = nextSegment.offset + nextSegment.size;
432
- fetch(this.fragmentTrackPath(trackId), {
433
- signal,
434
- headers: { Range: `bytes=${nextStart}-${nextEnd - 1}` }
435
- }).catch(() => {});
436
- }
437
- const initSegment = initSegments.find((seg) => seg.trackId === String(track.id));
438
- if (!initSegment) continue;
439
- const mediaBuffer = await response.arrayBuffer();
440
- files[trackId] = new File([initSegment.buffer, mediaBuffer], "video.mp4", { type: "video/mp4" });
441
- }
442
- return files;
443
- }
444
- });
445
- this.jitSegmentKeysTask = new Task(this, {
446
- autoRun: EF_INTERACTIVE,
447
- onError: (error) => {
448
- console.error("jitSegmentKeysTask error", error);
449
- },
450
- args: () => [this.effectiveMode === "jit-transcode" ? this.jitMetadataLoader.value : null, this.desiredSeekTimeMs],
451
- task: ([metadata, seekMs]) => {
452
- if (!metadata || seekMs == null) return null;
453
- return this.calculateJitSegmentKeys(metadata, seekMs);
454
- }
455
- });
456
- this.jitSegmentLoader = new Task(this, {
457
- autoRun: EF_INTERACTIVE,
458
- onError: (error) => {
459
- console.error("jitSegmentLoader error", error);
460
- },
461
- argsEqual: deepArrayEquals,
462
- args: () => [
463
- this.src,
464
- this.jitSegmentKeysTask.value,
465
- this.jitMetadataLoader.value
466
- ],
467
- task: async ([src, segmentKeys, metadata], { signal: _signal }) => {
468
- await this.jitSegmentKeysTask.taskComplete;
469
- await this.jitMetadataLoader.taskComplete;
470
- if (!src || !segmentKeys || !metadata || !this.jitClientTask.value) return null;
471
- const seekMs = this.desiredSeekTimeMs;
472
- if (seekMs == null) return null;
473
- try {
474
- this.jitLoadingState = "segments";
475
- this.jitErrorMessage = null;
476
- const files = {};
477
- const quality = await this.jitClientTask.value.getAdaptiveQuality();
478
- const fragmentIndex = this.synthesizeFragmentIndex(metadata);
479
- const seekResult = this.calculateJitSeekResult(fragmentIndex, seekMs);
480
- for (const [trackId, { segment, track, nextSegment }] of Object.entries(seekResult)) {
481
- const startTimeMs = segment.dts / track.timescale * 1e3;
482
- const segmentBuffer = await this.jitClientTask.value.fetchSegment(src, startTimeMs, quality);
483
- files[trackId] = new File([segmentBuffer], "segment.mp4", { type: "video/mp4" });
484
- if (nextSegment && this.enablePrefetch) {
485
- const nextStartTimeMs = nextSegment.dts / track.timescale * 1e3;
486
- this.jitClientTask.value.fetchSegment(src, nextStartTimeMs, quality).catch(() => {});
487
- }
488
- }
489
- this.jitCacheStats = this.jitClientTask.value.getCacheStats();
490
- this.jitLoadingState = "idle";
491
- return files;
492
- } catch (error) {
493
- this.jitLoadingState = "error";
494
- this.jitErrorMessage = error instanceof Error ? error.message : "Failed to load video segments";
495
- throw error;
496
- }
497
- }
498
- });
499
- this.fragmentIndexTask = new Task(this, {
500
- autoRun: EF_INTERACTIVE,
501
- onError: (error) => {
502
- console.error("fragmentIndexTask error", error);
503
- },
504
- args: () => [this.assetIndexLoader.value, this.jitMetadataLoader.value],
505
- task: async ([assetIndex, jitMetadata]) => {
506
- await this.assetIndexLoader.taskComplete;
507
- await this.jitMetadataLoader.taskComplete;
508
- if (assetIndex) return assetIndex;
509
- if (jitMetadata) return this.synthesizeFragmentIndex(jitMetadata);
510
- return null;
511
- }
512
- });
513
- this.mediaSegmentsTask = new Task(this, {
514
- autoRun: EF_INTERACTIVE,
515
- onError: (error) => {
516
- console.error("mediaSegmentsTask error", error);
517
- },
518
- args: () => [this.assetSegmentLoader.value, this.jitSegmentLoader.value],
519
- task: async ([_assetFiles, _jitFiles], { signal }) => {
520
- log("🔍 SIGNAL: mediaSegmentsTask starting", { signalAborted: signal.aborted });
521
- await this.assetSegmentLoader.taskComplete;
522
- if (signal.aborted) {
523
- log("🔍 SIGNAL: mediaSegmentsTask aborted after assetSegmentLoader.taskComplete");
524
- return null;
525
- }
526
- await this.jitSegmentLoader.taskComplete;
527
- if (signal.aborted) {
528
- log("🔍 SIGNAL: mediaSegmentsTask aborted after jitSegmentLoader.taskComplete");
529
- return null;
530
- }
531
- const assetFiles = this.assetSegmentLoader.value;
532
- const jitFiles = this.jitSegmentLoader.value;
533
- log("🔍 SIGNAL: mediaSegmentsTask using fresh values", {
534
- hasAssetFiles: !!assetFiles,
535
- hasJitFiles: !!jitFiles,
536
- signalAborted: signal.aborted
537
- });
538
- const result = assetFiles || jitFiles || null;
539
- log("🔍 SIGNAL: mediaSegmentsTask resolved", {
540
- hasResult: !!result,
541
- signalAborted: signal.aborted
542
- });
543
- return result;
544
- }
545
- });
546
- this.seekTask = new Task(this, {
547
- autoRun: EF_INTERACTIVE,
548
- onError: (error) => {
549
- console.error("seekTask error", error);
550
- },
551
- args: () => [
552
- this.fragmentIndexTask.value,
553
- this.mediaSegmentsTask.value,
554
- this.desiredSeekTimeMs
555
- ],
556
- task: async ([_fragmentIndex, _files, seekMs], { signal }) => {
557
- log("🔍 SIGNAL: seekTask starting", {
558
- seekMs,
559
- signalAborted: signal.aborted
560
- });
561
- await this.fragmentIndexTask.taskComplete;
562
- if (signal.aborted) {
563
- log("🔍 SIGNAL: seekTask aborted after fragmentIndexTask.taskComplete");
564
- return null;
565
- }
566
- await this.mediaSegmentsTask.taskComplete;
567
- if (signal.aborted) {
568
- log("🔍 SIGNAL: seekTask aborted after mediaSegmentsTask.taskComplete");
569
- return null;
570
- }
571
- const fragmentIndex = this.fragmentIndexTask.value;
572
- const files = this.mediaSegmentsTask.value;
573
- log("🔍 SIGNAL: seekTask using fresh values", {
574
- hasFragmentIndex: !!fragmentIndex,
575
- hasFiles: !!files,
576
- seekMs,
577
- signalAborted: signal.aborted
578
- });
579
- const typedFragmentIndex = fragmentIndex;
580
- if (!typedFragmentIndex || !files) {
581
- log("🔍 SIGNAL: seekTask calculation aborted - missing required data");
582
- return null;
583
- }
584
- const result = {};
585
- for (const index of Object.values(typedFragmentIndex)) {
586
- const track = this.createTrackInfo(index);
587
- log("trace: processing track", {
588
- trackId: index.track,
589
- type: index.type
590
- });
591
- const segment = index.segments.toReversed().find((segment$1) => {
592
- const segmentStartMs = segment$1.dts / track.timescale * 1e3;
593
- return segmentStartMs <= seekMs;
594
- });
595
- const nextSegment = index.segments.find((segment$1) => {
596
- const segmentStartMs = segment$1.dts / track.timescale * 1e3;
597
- return segmentStartMs > seekMs;
598
- });
599
- if (segment) {
600
- result[index.track] = {
601
- segment,
602
- track,
603
- nextSegment
604
- };
605
- log("trace: found segment for track", {
606
- trackId: index.track,
607
- segmentDts: segment.dts,
608
- hasNextSegment: !!nextSegment
609
- });
610
- }
611
- }
612
- log("🔍 SIGNAL: seekTask calculation complete", {
613
- trackCount: Object.keys(result).length,
614
- signalAborted: signal.aborted
615
- });
616
- return result;
617
- }
618
- });
31
+ this.audioBufferDurationMs = 3e4;
32
+ this.maxAudioBufferFetches = 2;
33
+ this.enableAudioBuffering = true;
34
+ this.mute = false;
35
+ this.fftSize = 128;
36
+ this.fftDecay = 8;
37
+ this.fftGain = 3;
38
+ this.interpolateFrequencies = false;
39
+ this.mediaEngineTask = makeMediaEngineTask(this);
40
+ this.audioSegmentIdTask = makeAudioSegmentIdTask(this);
41
+ this.audioInitSegmentFetchTask = makeAudioInitSegmentFetchTask(this);
42
+ this.audioSegmentFetchTask = makeAudioSegmentFetchTask(this);
43
+ this.audioInputTask = makeAudioInputTask(this);
44
+ this.audioSeekTask = makeAudioSeekTask(this);
45
+ this.audioBufferTask = makeAudioBufferTask(this);
46
+ this.byteTimeDomainTask = makeAudioTimeDomainAnalysisTask(this);
47
+ this.frequencyDataTask = makeAudioFrequencyAnalysisTask(this);
48
+ this.assetId = null;
49
+ this._desiredSeekTimeMs = 0;
50
+ }
51
+ static {
52
+ this.VIDEO_SAMPLE_BUFFER_SIZE = 30;
53
+ }
54
+ static {
55
+ this.AUDIO_SAMPLE_BUFFER_SIZE = 120;
56
+ }
57
+ static get observedAttributes() {
58
+ const parentAttributes = super.observedAttributes || [];
59
+ return [
60
+ ...parentAttributes,
61
+ "mute",
62
+ "fft-size",
63
+ "fft-decay",
64
+ "fft-gain",
65
+ "interpolate-frequencies",
66
+ "asset-id",
67
+ "audio-buffer-duration",
68
+ "max-audio-buffer-fetches",
69
+ "enable-audio-buffering"
70
+ ];
619
71
  }
620
72
  static {
621
73
  this.styles = [css`
@@ -626,307 +78,6 @@ var EFMedia = class EFMedia extends EFTargetable(EFSourceMixin(EFTemporal(FetchM
626
78
  }
627
79
  `];
628
80
  }
629
- /**
630
- * Get the mode, prioritizing attribute values over property values
631
- */
632
- get mode() {
633
- const attr = this.getAttribute("mode");
634
- return attr || this._mode || "auto";
635
- }
636
- set mode(value) {
637
- const oldValue = this.mode;
638
- this._mode = value;
639
- this.setAttribute("mode", value);
640
- this.requestUpdate("mode", oldValue);
641
- }
642
- connectedCallback() {
643
- super.connectedCallback();
644
- const modeAttr = this.getAttribute("mode");
645
- if (modeAttr && modeAttr !== this._mode) {
646
- this._mode = modeAttr;
647
- this.requestUpdate("mode");
648
- }
649
- const prefetchSegmentsAttr = this.getAttribute("prefetch-segments");
650
- if (prefetchSegmentsAttr !== null) this.prefetchSegments = Number.parseInt(prefetchSegmentsAttr, 10) || 3;
651
- const cacheSizeAttr = this.getAttribute("cache-size");
652
- if (cacheSizeAttr !== null) this.cacheSize = Number.parseInt(cacheSizeAttr, 10) || 20;
653
- const enablePrefetchAttr = this.getAttribute("enable-prefetch");
654
- if (enablePrefetchAttr !== null) this.enablePrefetch = enablePrefetchAttr === "true";
655
- }
656
- /**
657
- * Detected loading mode based on URL patterns and manual override
658
- */
659
- get effectiveMode() {
660
- const actualMode = this.mode;
661
- if (actualMode === "asset" || actualMode === "jit-transcode") return actualMode;
662
- if (this.assetId) return "asset";
663
- if (!this.src) return "asset";
664
- if (JitTranscodingClient.isJitTranscodeEligible(this.src)) return "jit-transcode";
665
- return "asset";
666
- }
667
- #assetId = null;
668
- /**
669
- * The unique identifier for the media asset.
670
- * This property can be set programmatically or via the "asset-id" attribute.
671
- * @domAttribute "asset-id"
672
- */
673
- set assetId(value) {
674
- this.#assetId = value;
675
- }
676
- get assetId() {
677
- return this.#assetId || this.getAttribute("asset-id");
678
- }
679
- fragmentIndexPath() {
680
- if (this.assetId) return `${this.apiHost}/api/v1/isobmff_files/${this.assetId}/index`;
681
- const src = this.src ?? "";
682
- if (!src) return "/@ef-track-fragment-index/no-src-available";
683
- const normalizedSrc = src.startsWith("/") ? src.slice(1) : src;
684
- if (normalizedSrc.startsWith("@ef-")) return `/@ef-track-fragment-index/${normalizedSrc}`;
685
- return `/@ef-track-fragment-index/${normalizedSrc}`;
686
- }
687
- fragmentTrackPath(trackId) {
688
- if (this.assetId) return `${this.apiHost}/api/v1/isobmff_tracks/${this.assetId}/${trackId}`;
689
- const src = this.src ?? "";
690
- if (!src) return `/@ef-track/no-src-available?trackId=${trackId}`;
691
- const normalizedSrc = src.startsWith("/") ? src.slice(1) : src;
692
- if (normalizedSrc.startsWith("@ef-")) return `/@ef-track/${normalizedSrc}?trackId=${trackId}`;
693
- return `/@ef-track/${normalizedSrc}?trackId=${trackId}`;
694
- }
695
- get mediaDurationTask() {
696
- return this.fragmentIndexTask;
697
- }
698
- get defaultVideoTrackId() {
699
- const fragmentIndex = this.fragmentIndexTask.value;
700
- return Object.values(fragmentIndex ?? {}).find((track) => track.type === "video")?.track;
701
- }
702
- get defaultAudioTrackId() {
703
- const fragmentIndex = this.fragmentIndexTask.value;
704
- return Object.values(fragmentIndex ?? {}).find((track) => track.type === "audio")?.track;
705
- }
706
- get intrinsicDurationMs() {
707
- const fragmentIndex = this.fragmentIndexTask.value;
708
- if (!fragmentIndex) return 0;
709
- const durations = Object.values(fragmentIndex).map((track) => track.duration / track.timescale * 1e3);
710
- if (durations.length === 0) return 0;
711
- return Math.max(...durations);
712
- }
713
- #audioContext = (() => {
714
- try {
715
- return new OfflineAudioContext(2, 48e3 / 30, 48e3);
716
- } catch (error) {
717
- throw new Error(`[EFMedia.audioBufferTask] Failed to create OfflineAudioContext(2, ${48e3 / 30}, 48000): ${error instanceof Error ? error.message : String(error)}. This is the class field audioContext for audio buffer task processing.`);
718
- }
719
- })();
720
- async fetchAudioSpanningTime(fromMs, toMs) {
721
- toMs = Math.min(toMs, this.durationMs);
722
- if (this.sourceInMs) fromMs -= this.startTimeMs - (this.trimStartMs ?? 0) - (this.sourceInMs ?? 0);
723
- if (this.sourceOutMs) toMs -= this.startTimeMs - (this.trimStartMs ?? 0) - (this.sourceOutMs ?? 0);
724
- fromMs -= this.startTimeMs - (this.trimStartMs ?? 0);
725
- toMs -= this.startTimeMs - (this.trimStartMs ?? 0);
726
- await this.fragmentIndexTask.taskComplete;
727
- const fragmentIndex = this.fragmentIndexTask.value;
728
- const audioTrackId = this.defaultAudioTrackId;
729
- if (!audioTrackId) return void 0;
730
- const audioTrackIndex = fragmentIndex?.[audioTrackId];
731
- if (!audioTrackIndex) return void 0;
732
- if (this.effectiveMode === "jit-transcode" && this.src) {
733
- const jitClient = this.jitClientTask.value;
734
- if (!jitClient) return void 0;
735
- try {
736
- const segmentDuration = 2e3;
737
- const startSegmentIndex = Math.floor(fromMs / segmentDuration);
738
- const maxSegmentIndex = Math.floor(this.durationMs / segmentDuration) - 1;
739
- const endSegmentIndex = Math.min(Math.floor(toMs / segmentDuration), maxSegmentIndex);
740
- const quality = await jitClient.getAdaptiveQuality();
741
- const segmentPromises = [];
742
- for (let i = startSegmentIndex; i <= endSegmentIndex; i++) {
743
- const segmentStartMs = i * segmentDuration;
744
- const segmentEndMs = (i + 1) * segmentDuration;
745
- segmentPromises.push(jitClient.fetchSegment(this.src, segmentStartMs, quality).then((buffer) => ({
746
- buffer,
747
- startMs: segmentStartMs,
748
- endMs: segmentEndMs
749
- })));
750
- }
751
- const segments = await Promise.all(segmentPromises);
752
- const audioBuffers = [];
753
- for (const segment of segments) try {
754
- let tempContext;
755
- try {
756
- tempContext = new OfflineAudioContext(2, 48e3, 48e3);
757
- } catch (error) {
758
- throw new Error(`[EFMedia.fetchAudioSpanningTime JIT] Failed to create temp OfflineAudioContext(2, 48000, 48000) for segment ${segment.startMs}-${segment.endMs}ms: ${error instanceof Error ? error.message : String(error)}. This is for decoding audio from JIT video segments.`);
759
- }
760
- const clonedBuffer = segment.buffer.slice(0);
761
- const audioBuffer = await tempContext.decodeAudioData(clonedBuffer);
762
- audioBuffers.push({
763
- buffer: audioBuffer,
764
- startMs: segment.startMs,
765
- endMs: segment.endMs
766
- });
767
- } catch (error) {
768
- log(`Failed to decode audio from segment ${segment.startMs}-${segment.endMs}ms:`, error);
769
- throw error;
770
- }
771
- if (audioBuffers.length === 0) return void 0;
772
- const firstAudioBuffer = audioBuffers[0];
773
- const lastAudioBuffer = audioBuffers[audioBuffers.length - 1];
774
- if (!firstAudioBuffer || !lastAudioBuffer) return void 0;
775
- const sampleRate = firstAudioBuffer.buffer.sampleRate;
776
- const numberOfChannels = firstAudioBuffer.buffer.numberOfChannels;
777
- const actualStartMs = Math.max(fromMs, firstAudioBuffer.startMs);
778
- const actualEndMs = Math.min(toMs, lastAudioBuffer.endMs);
779
- const totalDurationMs = actualEndMs - actualStartMs;
780
- const totalSamples = Math.floor(totalDurationMs / 1e3 * sampleRate);
781
- if (totalSamples <= 0) return void 0;
782
- let finalContext;
783
- try {
784
- finalContext = new OfflineAudioContext(numberOfChannels, totalSamples, sampleRate);
785
- } catch (error) {
786
- throw new Error(`[EFMedia.fetchAudioSpanningTime final] Failed to create final OfflineAudioContext(${numberOfChannels}, ${totalSamples}, ${sampleRate}) for time range ${actualStartMs}-${actualEndMs}ms: ${error instanceof Error ? error.message : String(error)}. This is for creating the final concatenated audio buffer.`);
787
- }
788
- const finalBuffer = finalContext.createBuffer(numberOfChannels, totalSamples, sampleRate);
789
- let outputOffset = 0;
790
- for (const { buffer: audioBuffer, startMs: segmentStartMs, endMs: segmentEndMs } of audioBuffers) {
791
- const segmentNeedStart = Math.max(actualStartMs, segmentStartMs);
792
- const segmentNeedEnd = Math.min(actualEndMs, segmentEndMs);
793
- if (segmentNeedStart >= segmentNeedEnd) continue;
794
- const segmentStartSample = Math.floor((segmentNeedStart - segmentStartMs) / 1e3 * sampleRate);
795
- const segmentDurationSamples = Math.floor((segmentNeedEnd - segmentNeedStart) / 1e3 * sampleRate);
796
- const actualSamples = Math.min(segmentDurationSamples, audioBuffer.length - segmentStartSample, totalSamples - outputOffset);
797
- if (actualSamples <= 0) continue;
798
- for (let channel = 0; channel < numberOfChannels; channel++) {
799
- const sourceData = audioBuffer.getChannelData(channel);
800
- const targetData = finalBuffer.getChannelData(channel);
801
- for (let i = 0; i < actualSamples; i++) {
802
- const sourceIndex = segmentStartSample + i;
803
- const targetIndex = outputOffset + i;
804
- if (sourceIndex < sourceData.length && targetIndex < targetData.length) {
805
- const sample = sourceData[sourceIndex];
806
- if (sample !== void 0) targetData[targetIndex] = sample;
807
- }
808
- }
809
- }
810
- outputOffset += actualSamples;
811
- }
812
- const wavBlob = this.encodeWAVBuffer(finalBuffer);
813
- const result = {
814
- blob: wavBlob,
815
- startMs: actualStartMs - (this.trimStartMs ?? 0),
816
- endMs: actualEndMs - (this.trimEndMs ?? 0)
817
- };
818
- return result;
819
- } catch (error) {
820
- log("Failed to extract and concatenate audio from JIT video segments:", error);
821
- return void 0;
822
- }
823
- }
824
- const start = audioTrackIndex.initSegment.offset;
825
- const end = audioTrackIndex.initSegment.offset + audioTrackIndex.initSegment.size;
826
- const audioInitFragmentRequest = this.fetch(this.fragmentTrackPath(String(audioTrackId)), { headers: { Range: `bytes=${start}-${end - 1}` } });
827
- const fragments = Object.values(audioTrackIndex.segments).filter((segment) => {
828
- const segmentStartsBeforeEnd = segment.dts <= toMs * audioTrackIndex.timescale / 1e3;
829
- const segmentEndsAfterStart = segment.dts + segment.duration >= fromMs * audioTrackIndex.timescale / 1e3;
830
- return segmentStartsBeforeEnd && segmentEndsAfterStart;
831
- });
832
- const firstFragment = fragments[0];
833
- if (!firstFragment) return void 0;
834
- const lastFragment = fragments[fragments.length - 1];
835
- if (!lastFragment) return void 0;
836
- const fragmentStart = firstFragment.offset;
837
- const fragmentEnd = lastFragment.offset + lastFragment.size;
838
- const audioFragmentRequest = this.fetch(this.fragmentTrackPath(String(audioTrackId)), { headers: { Range: `bytes=${fragmentStart}-${fragmentEnd - 1}` } });
839
- const initResponse = await audioInitFragmentRequest;
840
- const dataResponse = await audioFragmentRequest;
841
- const initBuffer = await initResponse.arrayBuffer();
842
- const dataBuffer = await dataResponse.arrayBuffer();
843
- const audioBlob = new Blob([initBuffer, dataBuffer], { type: "audio/mp4" });
844
- return {
845
- blob: audioBlob,
846
- startMs: firstFragment.dts / audioTrackIndex.timescale * 1e3 - (this.trimStartMs ?? 0),
847
- endMs: lastFragment.dts / audioTrackIndex.timescale * 1e3 + lastFragment.duration / audioTrackIndex.timescale * 1e3 - (this.trimEndMs ?? 0)
848
- };
849
- }
850
- /**
851
- * Encode an AudioBuffer to a WAV blob
852
- */
853
- encodeWAVBuffer(audioBuffer) {
854
- const numberOfChannels = audioBuffer.numberOfChannels;
855
- const sampleRate = audioBuffer.sampleRate;
856
- const length = audioBuffer.length;
857
- const bytesPerSample = 2;
858
- const blockAlign = numberOfChannels * bytesPerSample;
859
- const byteRate = sampleRate * blockAlign;
860
- const dataSize = length * blockAlign;
861
- const fileSize = 36 + dataSize;
862
- const buffer = new ArrayBuffer(44 + dataSize);
863
- const view = new DataView(buffer);
864
- let offset = 0;
865
- view.setUint32(offset, 1380533830, false);
866
- offset += 4;
867
- view.setUint32(offset, fileSize, true);
868
- offset += 4;
869
- view.setUint32(offset, 1463899717, false);
870
- offset += 4;
871
- view.setUint32(offset, 1718449184, false);
872
- offset += 4;
873
- view.setUint32(offset, 16, true);
874
- offset += 4;
875
- view.setUint16(offset, 1, true);
876
- offset += 2;
877
- view.setUint16(offset, numberOfChannels, true);
878
- offset += 2;
879
- view.setUint32(offset, sampleRate, true);
880
- offset += 4;
881
- view.setUint32(offset, byteRate, true);
882
- offset += 4;
883
- view.setUint16(offset, blockAlign, true);
884
- offset += 2;
885
- view.setUint16(offset, 16, true);
886
- offset += 2;
887
- view.setUint32(offset, 1684108385, false);
888
- offset += 4;
889
- view.setUint32(offset, dataSize, true);
890
- offset += 4;
891
- for (let i = 0; i < length; i++) for (let channel = 0; channel < numberOfChannels; channel++) {
892
- const sample = audioBuffer.getChannelData(channel)[i] || 0;
893
- const pcmSample = Math.max(-32768, Math.min(32767, Math.floor(sample * 32767)));
894
- view.setInt16(offset, pcmSample, true);
895
- offset += 2;
896
- }
897
- return new Blob([buffer], { type: "audio/wav" });
898
- }
899
- set fftSize(value) {
900
- const oldValue = this.fftSize;
901
- this.setAttribute("fft-size", String(value));
902
- this.requestUpdate("fft-size", oldValue);
903
- }
904
- set fftDecay(value) {
905
- const oldValue = this.fftDecay;
906
- this.setAttribute("fft-decay", String(value));
907
- this.requestUpdate("fft-decay", oldValue);
908
- }
909
- get fftSize() {
910
- return Number.parseInt(this.getAttribute("fft-size") ?? "128", 10);
911
- }
912
- get fftDecay() {
913
- return Number.parseInt(this.getAttribute("fft-decay") ?? "8", 10);
914
- }
915
- set interpolateFrequencies(value) {
916
- const oldValue = this.interpolateFrequencies;
917
- this.setAttribute("interpolate-frequencies", String(value));
918
- this.requestUpdate("interpolate-frequencies", oldValue);
919
- }
920
- get interpolateFrequencies() {
921
- return this.getAttribute("interpolate-frequencies") !== "false";
922
- }
923
- get shouldInterpolateFrequencies() {
924
- if (this.hasAttribute("interpolate-frequencies")) return this.getAttribute("interpolate-frequencies") !== "false";
925
- return false;
926
- }
927
- static {
928
- this.DECAY_WEIGHT = .7;
929
- }
930
81
  get FREQ_WEIGHTS() {
931
82
  if (freqWeightsCache.has(this.fftSize)) return freqWeightsCache.get(this.fftSize);
932
83
  const weights = new Float32Array(this.fftSize / 2).map((_, i) => {
@@ -942,178 +93,19 @@ var EFMedia = class EFMedia extends EFTargetable(EFSourceMixin(EFTemporal(FetchM
942
93
  freqWeightsCache.set(this.fftSize, weights);
943
94
  return weights;
944
95
  }
945
- #byteTimeDomainCache = new LRUCache(100);
946
- #frequencyDataCache = new LRUCache(100);
947
- set fftGain(value) {
948
- const oldValue = this.fftGain;
949
- this.setAttribute("fft-gain", String(value));
950
- this.requestUpdate("fft-gain", oldValue);
951
- }
952
- get fftGain() {
953
- return Number.parseFloat(this.getAttribute("fft-gain") ?? "3.0");
954
- }
955
- synthesizeFragmentIndex(jitMetadata) {
956
- const segmentDuration = jitMetadata.segmentDuration || 2e3;
957
- const numSegments = Math.ceil(jitMetadata.durationMs / segmentDuration);
958
- const fragmentIndex = {};
959
- const videoStream = jitMetadata.streams.find((s) => s.type === "video");
960
- if (videoStream) {
961
- const segments = [];
962
- for (let i = 0; i < numSegments; i++) {
963
- const startMs = i * segmentDuration;
964
- const endMs = Math.min(startMs + segmentDuration, jitMetadata.durationMs);
965
- segments.push({
966
- dts: Math.floor(startMs * 90),
967
- cts: Math.floor(startMs * 90),
968
- duration: Math.floor((endMs - startMs) * 90),
969
- offset: 0,
970
- size: 0
971
- });
972
- }
973
- fragmentIndex[videoStream.index] = {
974
- track: videoStream.index,
975
- type: "video",
976
- timescale: 9e4,
977
- duration: Math.floor(jitMetadata.durationMs * 90),
978
- width: videoStream.width || 1920,
979
- height: videoStream.height || 1080,
980
- sample_count: numSegments * 50,
981
- codec: videoStream.codecName || "h264",
982
- segments,
983
- initSegment: {
984
- offset: 0,
985
- size: 0
986
- }
987
- };
988
- }
989
- const audioStream = jitMetadata.streams.find((s) => s.type === "audio");
990
- if (audioStream) {
991
- const segments = [];
992
- const audioTimescale = audioStream.sampleRate || 48e3;
993
- for (let i = 0; i < numSegments; i++) {
994
- const startMs = i * segmentDuration;
995
- const endMs = Math.min(startMs + segmentDuration, jitMetadata.durationMs);
996
- segments.push({
997
- dts: Math.floor(startMs * audioTimescale / 1e3),
998
- cts: Math.floor(startMs * audioTimescale / 1e3),
999
- duration: Math.floor((endMs - startMs) * audioTimescale / 1e3),
1000
- offset: 0,
1001
- size: 0
1002
- });
1003
- }
1004
- fragmentIndex[audioStream.index] = {
1005
- track: audioStream.index,
1006
- type: "audio",
1007
- timescale: audioTimescale,
1008
- duration: Math.floor(jitMetadata.durationMs * audioTimescale / 1e3),
1009
- channel_count: audioStream.channels || 2,
1010
- sample_rate: audioStream.sampleRate || 48e3,
1011
- sample_size: 16,
1012
- sample_count: Math.floor(jitMetadata.durationMs * (audioStream.sampleRate || 48e3) / 1e3),
1013
- codec: audioStream.codecName || "aac",
1014
- segments,
1015
- initSegment: {
1016
- offset: 0,
1017
- size: 0
1018
- }
1019
- };
1020
- }
1021
- return fragmentIndex;
1022
- }
1023
- calculateAssetSegmentKeys(fragmentIndex, seekMs) {
1024
- const segmentKeys = {};
1025
- for (const [trackId, index] of Object.entries(fragmentIndex)) {
1026
- const segment = index.segments.toReversed().find((segment$1) => {
1027
- const segmentStartMs = segment$1.dts / index.timescale * 1e3;
1028
- return segmentStartMs <= seekMs;
1029
- });
1030
- if (segment) {
1031
- const startTimeMs = segment.dts / index.timescale * 1e3;
1032
- segmentKeys[trackId] = {
1033
- startTimeMs,
1034
- trackId
1035
- };
1036
- }
1037
- }
1038
- return segmentKeys;
1039
- }
1040
- calculateJitSegmentKeys(metadata, seekMs) {
1041
- const segmentKeys = {};
1042
- const segmentDuration = metadata.segmentDuration || 2e3;
1043
- for (const stream of metadata.streams) {
1044
- const segmentIndex = Math.floor(seekMs / segmentDuration);
1045
- const startTimeMs = segmentIndex * segmentDuration;
1046
- segmentKeys[stream.index] = {
1047
- startTimeMs,
1048
- trackId: String(stream.index)
1049
- };
1050
- }
1051
- return segmentKeys;
1052
- }
1053
- calculateAssetSeekResult(fragmentIndex, initSegments, seekMs) {
1054
- const result = {};
1055
- for (const index of Object.values(fragmentIndex)) {
1056
- const initTrack = initSegments.find((segment$1) => segment$1.trackId === String(index.track))?.mp4File.getInfo().tracks[0];
1057
- if (!initTrack) continue;
1058
- const segment = index.segments.toReversed().find((segment$1) => {
1059
- const segmentStartMs = segment$1.dts / initTrack.timescale * 1e3;
1060
- return segmentStartMs <= seekMs;
1061
- });
1062
- const nextSegment = index.segments.find((segment$1) => {
1063
- return segment$1.dts / initTrack.timescale * 1e3 > seekMs;
1064
- });
1065
- if (segment) result[index.track] = {
1066
- segment,
1067
- track: initTrack,
1068
- nextSegment
1069
- };
1070
- }
1071
- return result;
96
+ get shouldInterpolateFrequencies() {
97
+ return this.interpolateFrequencies;
1072
98
  }
1073
- calculateJitSeekResult(fragmentIndex, seekMs) {
1074
- const result = {};
1075
- for (const index of Object.values(fragmentIndex)) {
1076
- const track = this.createTrackInfo(index);
1077
- const segment = index.segments.toReversed().find((segment$1) => {
1078
- const segmentStartMs = segment$1.dts / track.timescale * 1e3;
1079
- return segmentStartMs <= seekMs;
1080
- });
1081
- const nextSegment = index.segments.find((segment$1) => {
1082
- return segment$1.dts / track.timescale * 1e3 > seekMs;
1083
- });
1084
- if (segment) result[index.track] = {
1085
- segment,
1086
- track,
1087
- nextSegment
1088
- };
1089
- }
1090
- return result;
99
+ get urlGenerator() {
100
+ return new UrlGenerator(() => this.apiHost ?? "");
1091
101
  }
1092
- createTrackInfo(index) {
1093
- return {
1094
- id: index.track,
1095
- name: index.type,
1096
- type: index.type,
1097
- timescale: index.timescale,
1098
- duration: index.duration,
1099
- bitrate: index.type === "video" ? 1e6 : 128e3,
1100
- created: /* @__PURE__ */ new Date(),
1101
- modified: /* @__PURE__ */ new Date(),
1102
- movie_duration: index.duration,
1103
- movie_timescale: index.timescale,
1104
- layer: 0,
1105
- alternate_group: 0,
1106
- volume: index.type === "audio" ? 1 : 0,
1107
- track_width: index.type === "video" ? index.width || 0 : 0,
1108
- track_height: index.type === "video" ? index.height || 0 : 0,
1109
- samples_duration: index.duration,
1110
- codec: index.codec || "unknown",
1111
- language: "und",
1112
- nb_samples: index.sample_count || 0
1113
- };
102
+ get intrinsicDurationMs() {
103
+ return this.mediaEngineTask.value?.durationMs ?? 0;
1114
104
  }
1115
105
  updated(changedProperties) {
1116
106
  super.updated(changedProperties);
107
+ const newCurrentSourceTimeMs = this.currentSourceTimeMs;
108
+ if (newCurrentSourceTimeMs !== this.desiredSeekTimeMs) this.executeSeek(newCurrentSourceTimeMs);
1117
109
  if (changedProperties.has("ownCurrentTimeMs")) this.executeSeek(this.currentSourceTimeMs);
1118
110
  if (changedProperties.has("currentTime") || changedProperties.has("ownCurrentTimeMs")) updateAnimations(this);
1119
111
  }
@@ -1129,60 +121,72 @@ var EFMedia = class EFMedia extends EFTargetable(EFSourceMixin(EFTemporal(FetchM
1129
121
  async executeSeek(seekToMs) {
1130
122
  this.desiredSeekTimeMs = seekToMs;
1131
123
  }
124
+ /**
125
+ * Main integration method for EFTimegroup audio playback
126
+ * Now powered by clean, testable utility functions
127
+ */
128
+ async fetchAudioSpanningTime(fromMs, toMs, signal = new AbortController().signal) {
129
+ return fetchAudioSpanningTime(this, fromMs, toMs, signal);
130
+ }
131
+ /**
132
+ * Check if an audio segment is cached in the unified buffer system
133
+ * Now uses the same caching approach as video for consistency
134
+ */
135
+ getCachedAudioSegment(segmentId) {
136
+ return this.audioBufferTask.value?.cachedSegments.has(segmentId) ?? false;
137
+ }
138
+ /**
139
+ * Get cached audio segments from the unified buffer system
140
+ * Now uses the same caching approach as video for consistency
141
+ */
142
+ getCachedAudioSegments(segmentIds) {
143
+ const bufferState = this.audioBufferTask.value;
144
+ if (!bufferState) return /* @__PURE__ */ new Set();
145
+ return new Set(segmentIds.filter((id) => bufferState.cachedSegments.has(id)));
146
+ }
1132
147
  };
1133
148
  _decorate([property({ type: Number })], EFMedia.prototype, "currentTimeMs", void 0);
1134
149
  _decorate([property({
1135
150
  type: Number,
1136
- attribute: "prefetch-segments"
1137
- })], EFMedia.prototype, "prefetchSegments", void 0);
151
+ attribute: "audio-buffer-duration"
152
+ })], EFMedia.prototype, "audioBufferDurationMs", void 0);
153
+ _decorate([property({
154
+ type: Number,
155
+ attribute: "max-audio-buffer-fetches"
156
+ })], EFMedia.prototype, "maxAudioBufferFetches", void 0);
157
+ _decorate([property({
158
+ type: Boolean,
159
+ attribute: "enable-audio-buffering"
160
+ })], EFMedia.prototype, "enableAudioBuffering", void 0);
161
+ _decorate([property({
162
+ type: Boolean,
163
+ attribute: "mute",
164
+ reflect: true
165
+ })], EFMedia.prototype, "mute", void 0);
166
+ _decorate([property({
167
+ type: Number,
168
+ attribute: "fft-size",
169
+ reflect: true
170
+ })], EFMedia.prototype, "fftSize", void 0);
1138
171
  _decorate([property({
1139
172
  type: Number,
1140
- attribute: "cache-size"
1141
- })], EFMedia.prototype, "cacheSize", void 0);
173
+ attribute: "fft-decay",
174
+ reflect: true
175
+ })], EFMedia.prototype, "fftDecay", void 0);
176
+ _decorate([property({
177
+ type: Number,
178
+ attribute: "fft-gain",
179
+ reflect: true
180
+ })], EFMedia.prototype, "fftGain", void 0);
1142
181
  _decorate([property({
1143
182
  type: Boolean,
1144
- attribute: "enable-prefetch"
1145
- })], EFMedia.prototype, "enablePrefetch", void 0);
1146
- _decorate([state()], EFMedia.prototype, "jitLoadingState", void 0);
1147
- _decorate([state()], EFMedia.prototype, "jitErrorMessage", void 0);
1148
- _decorate([state()], EFMedia.prototype, "jitCacheStats", void 0);
183
+ attribute: "interpolate-frequencies",
184
+ reflect: true
185
+ })], EFMedia.prototype, "interpolateFrequencies", void 0);
1149
186
  _decorate([property({
1150
187
  type: String,
1151
188
  attribute: "asset-id",
1152
189
  reflect: true
1153
- })], EFMedia.prototype, "assetId", null);
190
+ })], EFMedia.prototype, "assetId", void 0);
1154
191
  _decorate([state()], EFMedia.prototype, "_desiredSeekTimeMs", void 0);
1155
- function processFFTData(fftData, zeroThresholdPercent = .1) {
1156
- const totalBins = fftData.length;
1157
- const zeroThresholdCount = Math.floor(totalBins * zeroThresholdPercent);
1158
- let zeroCount = 0;
1159
- let cutoffIndex = totalBins;
1160
- for (let i = totalBins - 1; i >= 0; i--) if (fftData[i] ?? true) zeroCount++;
1161
- else if (zeroCount >= zeroThresholdCount) {
1162
- cutoffIndex = i + 1;
1163
- break;
1164
- }
1165
- if (cutoffIndex < zeroThresholdCount) return fftData;
1166
- const goodData = fftData.slice(0, cutoffIndex);
1167
- const resampledData = interpolateData(goodData, fftData.length);
1168
- const attenuationStartIndex = Math.floor(totalBins * .9);
1169
- for (let i = attenuationStartIndex; i < totalBins; i++) {
1170
- const attenuationProgress = (i - attenuationStartIndex) / (totalBins - attenuationStartIndex) + .2;
1171
- const attenuationFactor = Math.max(0, 1 - attenuationProgress);
1172
- resampledData[i] = Math.floor((resampledData[i] ?? 0) * attenuationFactor);
1173
- }
1174
- return resampledData;
1175
- }
1176
- function interpolateData(data, targetSize) {
1177
- const resampled = new Uint8Array(targetSize);
1178
- const dataLength = data.length;
1179
- for (let i = 0; i < targetSize; i++) {
1180
- const ratio = i / (targetSize - 1) * (dataLength - 1);
1181
- const index = Math.floor(ratio);
1182
- const fraction = ratio - index;
1183
- if (index >= dataLength - 1) resampled[i] = data[dataLength - 1] ?? 0;
1184
- else resampled[i] = Math.round((data[index] ?? 0) * (1 - fraction) + (data[index + 1] ?? 0) * fraction);
1185
- }
1186
- return resampled;
1187
- }
1188
- export { EFMedia, deepGetMediaElements };
192
+ export { EFMedia, IgnorableError, deepGetMediaElements };