@videojs/html 10.0.0-beta.1 → 10.0.0-beta.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (463) hide show
  1. package/cdn/audio-minimal.dev.d.ts +1 -0
  2. package/cdn/audio-minimal.dev.js +113 -0
  3. package/cdn/audio-minimal.dev.js.map +1 -0
  4. package/cdn/audio-minimal.js +2 -0
  5. package/cdn/audio-minimal.js.map +1 -0
  6. package/cdn/audio.dev.d.ts +1 -0
  7. package/cdn/audio.dev.js +104 -0
  8. package/cdn/audio.dev.js.map +1 -0
  9. package/cdn/audio.js +2 -0
  10. package/cdn/audio.js.map +1 -0
  11. package/cdn/background.dev.d.ts +1 -0
  12. package/cdn/background.dev.js +160 -0
  13. package/cdn/background.dev.js.map +1 -0
  14. package/cdn/background.js +2 -0
  15. package/cdn/background.js.map +1 -0
  16. package/cdn/context-Be8C5kVd.js +166 -0
  17. package/cdn/context-Be8C5kVd.js.map +1 -0
  18. package/cdn/context-CUBywtsB.js +14 -0
  19. package/cdn/context-CUBywtsB.js.map +1 -0
  20. package/cdn/create-player-AcfnN3li.js +3218 -0
  21. package/cdn/create-player-AcfnN3li.js.map +1 -0
  22. package/cdn/create-player-s_qISCpw.js +7 -0
  23. package/cdn/create-player-s_qISCpw.js.map +1 -0
  24. package/cdn/custom-media-element-DqevSVgS.js +47 -0
  25. package/cdn/custom-media-element-DqevSVgS.js.map +1 -0
  26. package/cdn/custom-media-element-moFa3UZp.js +303 -0
  27. package/cdn/custom-media-element-moFa3UZp.js.map +1 -0
  28. package/cdn/default-GgKND7a8.js +2 -0
  29. package/cdn/default-GgKND7a8.js.map +1 -0
  30. package/cdn/default-cLso8BHO.js +28 -0
  31. package/cdn/default-cLso8BHO.js.map +1 -0
  32. package/cdn/delegate-CzAcT1xm.js +44 -0
  33. package/cdn/delegate-CzAcT1xm.js.map +1 -0
  34. package/cdn/delegate-Uc-6tQDR.js +2 -0
  35. package/cdn/delegate-Uc-6tQDR.js.map +1 -0
  36. package/cdn/listen-4jqsRSKo.js +2 -0
  37. package/cdn/listen-4jqsRSKo.js.map +1 -0
  38. package/cdn/listen-YSH3Jfyk.js +9 -0
  39. package/cdn/listen-YSH3Jfyk.js.map +1 -0
  40. package/cdn/media/dash-video.dev.d.ts +1 -0
  41. package/cdn/media/dash-video.dev.js +39167 -0
  42. package/cdn/media/dash-video.dev.js.map +1 -0
  43. package/cdn/media/dash-video.js +21 -0
  44. package/cdn/media/dash-video.js.map +1 -0
  45. package/cdn/media/hls-video.dev.d.ts +1 -0
  46. package/cdn/media/hls-video.dev.js +28359 -0
  47. package/cdn/media/hls-video.dev.js.map +1 -0
  48. package/cdn/media/hls-video.js +41 -0
  49. package/cdn/media/hls-video.js.map +1 -0
  50. package/cdn/media/simple-hls-video.dev.d.ts +1 -0
  51. package/cdn/media/simple-hls-video.dev.js +3603 -0
  52. package/cdn/media/simple-hls-video.dev.js.map +1 -0
  53. package/cdn/media/simple-hls-video.js +2 -0
  54. package/cdn/media/simple-hls-video.js.map +1 -0
  55. package/cdn/media-attach-mixin-D5_nfJpa.js +2 -0
  56. package/cdn/media-attach-mixin-D5_nfJpa.js.map +1 -0
  57. package/cdn/media-attach-mixin-U_KQB_9O.js +44 -0
  58. package/cdn/media-attach-mixin-U_KQB_9O.js.map +1 -0
  59. package/cdn/minimal-BJfleQcQ.js +2 -0
  60. package/cdn/minimal-BJfleQcQ.js.map +1 -0
  61. package/cdn/minimal-DBMdC_0I.js +28 -0
  62. package/cdn/minimal-DBMdC_0I.js.map +1 -0
  63. package/cdn/player-C46h14iP.js +2 -0
  64. package/cdn/player-C46h14iP.js.map +1 -0
  65. package/cdn/player-CvrOeLpy.js +15 -0
  66. package/cdn/player-CvrOeLpy.js.map +1 -0
  67. package/cdn/poster-Olv5zDI_.js +195 -0
  68. package/cdn/poster-Olv5zDI_.js.map +1 -0
  69. package/cdn/poster-odJ4iwIv.js +2 -0
  70. package/cdn/poster-odJ4iwIv.js.map +1 -0
  71. package/cdn/predicate-BG-dj_kF.js +26 -0
  72. package/cdn/predicate-BG-dj_kF.js.map +1 -0
  73. package/cdn/predicate-Y9jDHLpX.js +2 -0
  74. package/cdn/predicate-Y9jDHLpX.js.map +1 -0
  75. package/cdn/safe-define-B8lHgj_K.js +9 -0
  76. package/cdn/safe-define-B8lHgj_K.js.map +1 -0
  77. package/cdn/safe-define-GrHW3P9e.js +2 -0
  78. package/cdn/safe-define-GrHW3P9e.js.map +1 -0
  79. package/cdn/video-minimal.dev.d.ts +1 -0
  80. package/cdn/video-minimal.dev.js +156 -0
  81. package/cdn/video-minimal.dev.js.map +1 -0
  82. package/cdn/video-minimal.js +2 -0
  83. package/cdn/video-minimal.js.map +1 -0
  84. package/cdn/video.dev.d.ts +1 -0
  85. package/cdn/video.dev.js +170 -0
  86. package/cdn/video.dev.js.map +1 -0
  87. package/cdn/video.js +2 -0
  88. package/cdn/video.js.map +1 -0
  89. package/cdn/volume-slider-D7BOdSDF.js +2459 -0
  90. package/cdn/volume-slider-D7BOdSDF.js.map +1 -0
  91. package/cdn/volume-slider-DPeFF5tt.js +8 -0
  92. package/cdn/volume-slider-DPeFF5tt.js.map +1 -0
  93. package/dist/default/_virtual/inline-css_src/define/audio/minimal-skin.js +1 -1
  94. package/dist/default/_virtual/inline-css_src/define/audio/minimal-skin.js.map +1 -1
  95. package/dist/default/_virtual/inline-css_src/define/audio/skin.js +1 -1
  96. package/dist/default/_virtual/inline-css_src/define/audio/skin.js.map +1 -1
  97. package/dist/default/_virtual/inline-css_src/define/background/skin.js +6 -0
  98. package/dist/default/_virtual/inline-css_src/define/background/skin.js.map +1 -0
  99. package/dist/default/_virtual/inline-css_src/define/base.js +6 -0
  100. package/dist/default/_virtual/inline-css_src/define/base.js.map +1 -0
  101. package/dist/default/_virtual/inline-css_src/define/shared.js +6 -0
  102. package/dist/default/_virtual/inline-css_src/define/shared.js.map +1 -0
  103. package/dist/default/_virtual/inline-css_src/define/video/minimal-skin.js +1 -1
  104. package/dist/default/_virtual/inline-css_src/define/video/minimal-skin.js.map +1 -1
  105. package/dist/default/_virtual/inline-css_src/define/video/skin.js +1 -1
  106. package/dist/default/_virtual/inline-css_src/define/video/skin.js.map +1 -1
  107. package/dist/default/define/audio/minimal-skin.css +237 -157
  108. package/dist/default/define/audio/minimal-skin.js +2 -79
  109. package/dist/default/define/audio/minimal-skin.js.map +1 -1
  110. package/dist/default/define/audio/minimal-skin.tailwind.js +5 -82
  111. package/dist/default/define/audio/minimal-skin.tailwind.js.map +1 -1
  112. package/dist/default/define/audio/skin.css +234 -153
  113. package/dist/default/define/audio/skin.js +2 -70
  114. package/dist/default/define/audio/skin.js.map +1 -1
  115. package/dist/default/define/audio/skin.tailwind.js +6 -75
  116. package/dist/default/define/audio/skin.tailwind.js.map +1 -1
  117. package/dist/default/define/background/skin.css +1 -1
  118. package/dist/default/define/background/skin.js +11 -5
  119. package/dist/default/define/background/skin.js.map +1 -1
  120. package/dist/default/define/base.css +23 -0
  121. package/dist/default/define/media/dash-video.js +14 -0
  122. package/dist/default/define/media/dash-video.js.map +1 -0
  123. package/dist/default/define/shared.css +13 -0
  124. package/dist/default/define/skin-mixin.js +16 -33
  125. package/dist/default/define/skin-mixin.js.map +1 -1
  126. package/dist/default/define/video/minimal-skin.css +486 -301
  127. package/dist/default/define/video/minimal-skin.js +3 -121
  128. package/dist/default/define/video/minimal-skin.js.map +1 -1
  129. package/dist/default/define/video/minimal-skin.tailwind.js +8 -136
  130. package/dist/default/define/video/minimal-skin.tailwind.js.map +1 -1
  131. package/dist/default/define/video/skin.css +484 -289
  132. package/dist/default/define/video/skin.js +3 -116
  133. package/dist/default/define/video/skin.js.map +1 -1
  134. package/dist/default/define/video/skin.tailwind.js +8 -128
  135. package/dist/default/define/video/skin.tailwind.js.map +1 -1
  136. package/dist/default/icons/dist/render/default/index.js +2 -1
  137. package/dist/default/icons/dist/render/default/index.js.map +1 -1
  138. package/dist/default/icons/dist/render/minimal/index.js +2 -1
  139. package/dist/default/icons/dist/render/minimal/index.js.map +1 -1
  140. package/dist/default/index.js +4 -2
  141. package/dist/default/media/background-video/index.js +6 -19
  142. package/dist/default/media/background-video/index.js.map +1 -1
  143. package/dist/default/media/container-element.js +5 -2
  144. package/dist/default/media/container-element.js.map +1 -1
  145. package/dist/default/media/dash-video/index.js +26 -0
  146. package/dist/default/media/dash-video/index.js.map +1 -0
  147. package/dist/default/media/hls-video/index.js +2 -1
  148. package/dist/default/media/hls-video/index.js.map +1 -1
  149. package/dist/default/media/simple-hls-video/index.js +3 -1
  150. package/dist/default/media/simple-hls-video/index.js.map +1 -1
  151. package/dist/default/player/context.js +6 -2
  152. package/dist/default/player/context.js.map +1 -1
  153. package/dist/default/player/create-player.js +11 -3
  154. package/dist/default/player/create-player.js.map +1 -1
  155. package/dist/default/skins/dist/default/default/tailwind/audio.tailwind.js +10 -26
  156. package/dist/default/skins/dist/default/default/tailwind/audio.tailwind.js.map +1 -1
  157. package/dist/default/skins/dist/default/default/tailwind/components/button.js +4 -3
  158. package/dist/default/skins/dist/default/default/tailwind/components/button.js.map +1 -1
  159. package/dist/default/skins/dist/default/default/tailwind/components/controls.js +1 -1
  160. package/dist/default/skins/dist/default/default/tailwind/components/controls.js.map +1 -1
  161. package/dist/default/skins/dist/default/default/tailwind/components/error.js +3 -3
  162. package/dist/default/skins/dist/default/default/tailwind/components/error.js.map +1 -1
  163. package/dist/default/skins/dist/default/default/tailwind/components/overlay.js +1 -1
  164. package/dist/default/skins/dist/default/default/tailwind/components/overlay.js.map +1 -1
  165. package/dist/default/skins/dist/default/default/tailwind/components/popup.js +3 -3
  166. package/dist/default/skins/dist/default/default/tailwind/components/popup.js.map +1 -1
  167. package/dist/default/skins/dist/default/default/tailwind/components/poster.js +16 -0
  168. package/dist/default/skins/dist/default/default/tailwind/components/poster.js.map +1 -0
  169. package/dist/default/skins/dist/default/default/tailwind/components/preview.js +13 -0
  170. package/dist/default/skins/dist/default/default/tailwind/components/preview.js.map +1 -0
  171. package/dist/default/skins/dist/default/default/tailwind/components/root.js +1 -1
  172. package/dist/default/skins/dist/default/default/tailwind/components/root.js.map +1 -1
  173. package/dist/default/skins/dist/default/default/tailwind/components/seek.js +1 -1
  174. package/dist/default/skins/dist/default/default/tailwind/components/seek.js.map +1 -1
  175. package/dist/default/skins/dist/default/default/tailwind/components/slider.js +1 -1
  176. package/dist/default/skins/dist/default/default/tailwind/components/slider.js.map +1 -1
  177. package/dist/default/skins/dist/default/default/tailwind/components/surface.js +1 -1
  178. package/dist/default/skins/dist/default/default/tailwind/components/surface.js.map +1 -1
  179. package/dist/default/skins/dist/default/default/tailwind/video.tailwind.js +27 -16
  180. package/dist/default/skins/dist/default/default/tailwind/video.tailwind.js.map +1 -1
  181. package/dist/default/skins/dist/default/minimal/tailwind/audio.tailwind.js +11 -24
  182. package/dist/default/skins/dist/default/minimal/tailwind/audio.tailwind.js.map +1 -1
  183. package/dist/default/skins/dist/default/minimal/tailwind/components/button.js +4 -3
  184. package/dist/default/skins/dist/default/minimal/tailwind/components/button.js.map +1 -1
  185. package/dist/default/skins/dist/default/minimal/tailwind/components/controls.js +1 -1
  186. package/dist/default/skins/dist/default/minimal/tailwind/components/controls.js.map +1 -1
  187. package/dist/default/skins/dist/default/minimal/tailwind/components/error.js +3 -3
  188. package/dist/default/skins/dist/default/minimal/tailwind/components/error.js.map +1 -1
  189. package/dist/default/skins/dist/default/minimal/tailwind/components/overlay.js +1 -1
  190. package/dist/default/skins/dist/default/minimal/tailwind/components/overlay.js.map +1 -1
  191. package/dist/default/skins/dist/default/minimal/tailwind/components/playback-rate.js +6 -0
  192. package/dist/default/skins/dist/default/minimal/tailwind/components/playback-rate.js.map +1 -0
  193. package/dist/default/skins/dist/default/minimal/tailwind/components/popup.js +4 -3
  194. package/dist/default/skins/dist/default/minimal/tailwind/components/popup.js.map +1 -1
  195. package/dist/default/skins/dist/default/minimal/tailwind/components/poster.js +16 -0
  196. package/dist/default/skins/dist/default/minimal/tailwind/components/poster.js.map +1 -0
  197. package/dist/default/skins/dist/default/minimal/tailwind/components/preview.js +14 -0
  198. package/dist/default/skins/dist/default/minimal/tailwind/components/preview.js.map +1 -0
  199. package/dist/default/skins/dist/default/minimal/tailwind/components/root.js +1 -1
  200. package/dist/default/skins/dist/default/minimal/tailwind/components/root.js.map +1 -1
  201. package/dist/default/skins/dist/default/minimal/tailwind/components/seek.js +1 -1
  202. package/dist/default/skins/dist/default/minimal/tailwind/components/seek.js.map +1 -1
  203. package/dist/default/skins/dist/default/minimal/tailwind/components/slider.js +1 -1
  204. package/dist/default/skins/dist/default/minimal/tailwind/components/slider.js.map +1 -1
  205. package/dist/default/skins/dist/default/minimal/tailwind/components/time.js +1 -1
  206. package/dist/default/skins/dist/default/minimal/tailwind/components/time.js.map +1 -1
  207. package/dist/default/skins/dist/default/minimal/tailwind/video.tailwind.js +33 -16
  208. package/dist/default/skins/dist/default/minimal/tailwind/video.tailwind.js.map +1 -1
  209. package/dist/default/skins/dist/default/{default/tailwind/components → shared/tailwind}/icon-state.js +6 -1
  210. package/dist/default/skins/dist/default/shared/tailwind/icon-state.js.map +1 -0
  211. package/dist/{dev/skins/dist/default/default/tailwind/components → default/skins/dist/default/shared/tailwind}/tooltip-state.js +1 -1
  212. package/dist/default/skins/dist/default/shared/tailwind/tooltip-state.js.map +1 -0
  213. package/dist/default/store/container-mixin.js +16 -60
  214. package/dist/default/store/container-mixin.js.map +1 -1
  215. package/dist/default/store/media-attach-mixin.js +45 -0
  216. package/dist/default/store/media-attach-mixin.js.map +1 -0
  217. package/dist/default/store/provider-mixin.js +99 -9
  218. package/dist/default/store/provider-mixin.js.map +1 -1
  219. package/dist/default/ui/alert-dialog/alert-dialog-element.js +1 -1
  220. package/dist/default/ui/buffering-indicator/buffering-indicator-element.js +1 -1
  221. package/dist/default/ui/captions-button/captions-button-element.js +1 -1
  222. package/dist/default/ui/controls/controls-element.js +1 -1
  223. package/dist/default/ui/fullscreen-button/fullscreen-button-element.js +1 -1
  224. package/dist/default/ui/mute-button/mute-button-element.js +1 -1
  225. package/dist/default/ui/pip-button/pip-button-element.js +1 -1
  226. package/dist/default/ui/play-button/play-button-element.js +1 -1
  227. package/dist/default/ui/playback-rate-button/playback-rate-button-element.js +1 -1
  228. package/dist/default/ui/popover/popover-element.js +55 -4
  229. package/dist/default/ui/popover/popover-element.js.map +1 -1
  230. package/dist/default/ui/poster/poster-element.js +1 -1
  231. package/dist/default/ui/seek-button/seek-button-element.js +1 -1
  232. package/dist/default/ui/slider/slider-element.js +1 -1
  233. package/dist/default/ui/thumbnail/thumbnail-element.js +1 -1
  234. package/dist/default/ui/time/time-element.js +1 -1
  235. package/dist/default/ui/time-slider/time-slider-element.js +2 -2
  236. package/dist/default/ui/time-slider/time-slider-element.js.map +1 -1
  237. package/dist/default/ui/tooltip/tooltip-element.js +54 -4
  238. package/dist/default/ui/tooltip/tooltip-element.js.map +1 -1
  239. package/dist/default/ui/tooltip/tooltip-group-element.js +5 -2
  240. package/dist/default/ui/tooltip/tooltip-group-element.js.map +1 -1
  241. package/dist/default/ui/volume-slider/volume-slider-element.js +3 -3
  242. package/dist/default/ui/volume-slider/volume-slider-element.js.map +1 -1
  243. package/dist/dev/_virtual/inline-css_src/define/audio/minimal-skin.js +1 -1
  244. package/dist/dev/_virtual/inline-css_src/define/audio/minimal-skin.js.map +1 -1
  245. package/dist/dev/_virtual/inline-css_src/define/audio/skin.js +1 -1
  246. package/dist/dev/_virtual/inline-css_src/define/audio/skin.js.map +1 -1
  247. package/dist/dev/_virtual/inline-css_src/define/background/skin.js +6 -0
  248. package/dist/dev/_virtual/inline-css_src/define/background/skin.js.map +1 -0
  249. package/dist/dev/_virtual/inline-css_src/define/base.js +6 -0
  250. package/dist/dev/_virtual/inline-css_src/define/base.js.map +1 -0
  251. package/dist/dev/_virtual/inline-css_src/define/shared.js +6 -0
  252. package/dist/dev/_virtual/inline-css_src/define/shared.js.map +1 -0
  253. package/dist/dev/_virtual/inline-css_src/define/video/minimal-skin.js +1 -1
  254. package/dist/dev/_virtual/inline-css_src/define/video/minimal-skin.js.map +1 -1
  255. package/dist/dev/_virtual/inline-css_src/define/video/skin.js +1 -1
  256. package/dist/dev/_virtual/inline-css_src/define/video/skin.js.map +1 -1
  257. package/dist/dev/define/audio/minimal-skin.css +237 -157
  258. package/dist/dev/define/audio/minimal-skin.d.ts.map +1 -1
  259. package/dist/dev/define/audio/minimal-skin.js +69 -64
  260. package/dist/dev/define/audio/minimal-skin.js.map +1 -1
  261. package/dist/dev/define/audio/minimal-skin.tailwind.d.ts.map +1 -1
  262. package/dist/dev/define/audio/minimal-skin.tailwind.js +73 -66
  263. package/dist/dev/define/audio/minimal-skin.tailwind.js.map +1 -1
  264. package/dist/dev/define/audio/skin.css +234 -153
  265. package/dist/dev/define/audio/skin.d.ts.map +1 -1
  266. package/dist/dev/define/audio/skin.js +61 -56
  267. package/dist/dev/define/audio/skin.js.map +1 -1
  268. package/dist/dev/define/audio/skin.tailwind.d.ts.map +1 -1
  269. package/dist/dev/define/audio/skin.tailwind.js +67 -61
  270. package/dist/dev/define/audio/skin.tailwind.js.map +1 -1
  271. package/dist/dev/define/background/skin.css +1 -1
  272. package/dist/dev/define/background/skin.d.ts.map +1 -1
  273. package/dist/dev/define/background/skin.js +13 -1
  274. package/dist/dev/define/background/skin.js.map +1 -1
  275. package/dist/dev/define/base.css +23 -0
  276. package/dist/dev/define/media/dash-video.d.ts +14 -0
  277. package/dist/dev/define/media/dash-video.d.ts.map +1 -0
  278. package/dist/dev/define/media/dash-video.js +14 -0
  279. package/dist/dev/define/media/dash-video.js.map +1 -0
  280. package/dist/dev/define/shared.css +13 -0
  281. package/dist/dev/define/skin-mixin.d.ts +2 -2
  282. package/dist/dev/define/skin-mixin.d.ts.map +1 -1
  283. package/dist/dev/define/skin-mixin.js +16 -33
  284. package/dist/dev/define/skin-mixin.js.map +1 -1
  285. package/dist/dev/define/video/minimal-skin.css +486 -301
  286. package/dist/dev/define/video/minimal-skin.d.ts.map +1 -1
  287. package/dist/dev/define/video/minimal-skin.js +110 -103
  288. package/dist/dev/define/video/minimal-skin.js.map +1 -1
  289. package/dist/dev/define/video/minimal-skin.tailwind.d.ts.map +1 -1
  290. package/dist/dev/define/video/minimal-skin.tailwind.js +123 -116
  291. package/dist/dev/define/video/minimal-skin.tailwind.js.map +1 -1
  292. package/dist/dev/define/video/skin.css +484 -289
  293. package/dist/dev/define/video/skin.d.ts.map +1 -1
  294. package/dist/dev/define/video/skin.js +105 -100
  295. package/dist/dev/define/video/skin.js.map +1 -1
  296. package/dist/dev/define/video/skin.tailwind.d.ts.map +1 -1
  297. package/dist/dev/define/video/skin.tailwind.js +114 -108
  298. package/dist/dev/define/video/skin.tailwind.js.map +1 -1
  299. package/dist/dev/icons/dist/render/default/index.js +2 -1
  300. package/dist/dev/icons/dist/render/default/index.js.map +1 -1
  301. package/dist/dev/icons/dist/render/minimal/index.js +2 -1
  302. package/dist/dev/icons/dist/render/minimal/index.js.map +1 -1
  303. package/dist/dev/index.d.ts +6 -4
  304. package/dist/dev/index.js +4 -2
  305. package/dist/dev/media/background-video/index.d.ts +8 -1
  306. package/dist/dev/media/background-video/index.d.ts.map +1 -1
  307. package/dist/dev/media/background-video/index.js +5 -1
  308. package/dist/dev/media/background-video/index.js.map +1 -1
  309. package/dist/dev/media/container-element.js +5 -2
  310. package/dist/dev/media/container-element.js.map +1 -1
  311. package/dist/dev/media/dash-video/index.d.ts +13 -0
  312. package/dist/dev/media/dash-video/index.d.ts.map +1 -0
  313. package/dist/dev/media/dash-video/index.js +26 -0
  314. package/dist/dev/media/dash-video/index.js.map +1 -0
  315. package/dist/dev/media/hls-video/index.d.ts +2 -1
  316. package/dist/dev/media/hls-video/index.d.ts.map +1 -1
  317. package/dist/dev/media/hls-video/index.js +2 -1
  318. package/dist/dev/media/hls-video/index.js.map +1 -1
  319. package/dist/dev/media/simple-hls-video/index.d.ts +2 -1
  320. package/dist/dev/media/simple-hls-video/index.d.ts.map +1 -1
  321. package/dist/dev/media/simple-hls-video/index.js +3 -1
  322. package/dist/dev/media/simple-hls-video/index.js.map +1 -1
  323. package/dist/dev/player/context.d.ts +16 -2
  324. package/dist/dev/player/context.d.ts.map +1 -1
  325. package/dist/dev/player/context.js +6 -2
  326. package/dist/dev/player/context.js.map +1 -1
  327. package/dist/dev/player/create-player.d.ts +1 -1
  328. package/dist/dev/player/create-player.js +11 -3
  329. package/dist/dev/player/create-player.js.map +1 -1
  330. package/dist/dev/skins/dist/default/default/tailwind/audio.tailwind.js +10 -26
  331. package/dist/dev/skins/dist/default/default/tailwind/audio.tailwind.js.map +1 -1
  332. package/dist/dev/skins/dist/default/default/tailwind/components/button.js +4 -3
  333. package/dist/dev/skins/dist/default/default/tailwind/components/button.js.map +1 -1
  334. package/dist/dev/skins/dist/default/default/tailwind/components/controls.js +1 -1
  335. package/dist/dev/skins/dist/default/default/tailwind/components/controls.js.map +1 -1
  336. package/dist/dev/skins/dist/default/default/tailwind/components/error.js +3 -3
  337. package/dist/dev/skins/dist/default/default/tailwind/components/error.js.map +1 -1
  338. package/dist/dev/skins/dist/default/default/tailwind/components/overlay.js +1 -1
  339. package/dist/dev/skins/dist/default/default/tailwind/components/overlay.js.map +1 -1
  340. package/dist/dev/skins/dist/default/default/tailwind/components/popup.js +3 -3
  341. package/dist/dev/skins/dist/default/default/tailwind/components/popup.js.map +1 -1
  342. package/dist/dev/skins/dist/default/default/tailwind/components/poster.js +16 -0
  343. package/dist/dev/skins/dist/default/default/tailwind/components/poster.js.map +1 -0
  344. package/dist/dev/skins/dist/default/default/tailwind/components/preview.js +13 -0
  345. package/dist/dev/skins/dist/default/default/tailwind/components/preview.js.map +1 -0
  346. package/dist/dev/skins/dist/default/default/tailwind/components/root.js +1 -1
  347. package/dist/dev/skins/dist/default/default/tailwind/components/root.js.map +1 -1
  348. package/dist/dev/skins/dist/default/default/tailwind/components/seek.js +1 -1
  349. package/dist/dev/skins/dist/default/default/tailwind/components/seek.js.map +1 -1
  350. package/dist/dev/skins/dist/default/default/tailwind/components/slider.js +1 -1
  351. package/dist/dev/skins/dist/default/default/tailwind/components/slider.js.map +1 -1
  352. package/dist/dev/skins/dist/default/default/tailwind/components/surface.js +1 -1
  353. package/dist/dev/skins/dist/default/default/tailwind/components/surface.js.map +1 -1
  354. package/dist/dev/skins/dist/default/default/tailwind/video.tailwind.js +27 -16
  355. package/dist/dev/skins/dist/default/default/tailwind/video.tailwind.js.map +1 -1
  356. package/dist/dev/skins/dist/default/minimal/tailwind/audio.tailwind.js +11 -24
  357. package/dist/dev/skins/dist/default/minimal/tailwind/audio.tailwind.js.map +1 -1
  358. package/dist/dev/skins/dist/default/minimal/tailwind/components/button.js +4 -3
  359. package/dist/dev/skins/dist/default/minimal/tailwind/components/button.js.map +1 -1
  360. package/dist/dev/skins/dist/default/minimal/tailwind/components/controls.js +1 -1
  361. package/dist/dev/skins/dist/default/minimal/tailwind/components/controls.js.map +1 -1
  362. package/dist/dev/skins/dist/default/minimal/tailwind/components/error.js +3 -3
  363. package/dist/dev/skins/dist/default/minimal/tailwind/components/error.js.map +1 -1
  364. package/dist/dev/skins/dist/default/minimal/tailwind/components/overlay.js +1 -1
  365. package/dist/dev/skins/dist/default/minimal/tailwind/components/overlay.js.map +1 -1
  366. package/dist/dev/skins/dist/default/minimal/tailwind/components/playback-rate.js +6 -0
  367. package/dist/dev/skins/dist/default/minimal/tailwind/components/playback-rate.js.map +1 -0
  368. package/dist/dev/skins/dist/default/minimal/tailwind/components/popup.js +4 -3
  369. package/dist/dev/skins/dist/default/minimal/tailwind/components/popup.js.map +1 -1
  370. package/dist/dev/skins/dist/default/minimal/tailwind/components/poster.js +16 -0
  371. package/dist/dev/skins/dist/default/minimal/tailwind/components/poster.js.map +1 -0
  372. package/dist/dev/skins/dist/default/minimal/tailwind/components/preview.js +14 -0
  373. package/dist/dev/skins/dist/default/minimal/tailwind/components/preview.js.map +1 -0
  374. package/dist/dev/skins/dist/default/minimal/tailwind/components/root.js +1 -1
  375. package/dist/dev/skins/dist/default/minimal/tailwind/components/root.js.map +1 -1
  376. package/dist/dev/skins/dist/default/minimal/tailwind/components/seek.js +1 -1
  377. package/dist/dev/skins/dist/default/minimal/tailwind/components/seek.js.map +1 -1
  378. package/dist/dev/skins/dist/default/minimal/tailwind/components/slider.js +1 -1
  379. package/dist/dev/skins/dist/default/minimal/tailwind/components/slider.js.map +1 -1
  380. package/dist/dev/skins/dist/default/minimal/tailwind/components/time.js +1 -1
  381. package/dist/dev/skins/dist/default/minimal/tailwind/components/time.js.map +1 -1
  382. package/dist/dev/skins/dist/default/minimal/tailwind/video.tailwind.js +33 -16
  383. package/dist/dev/skins/dist/default/minimal/tailwind/video.tailwind.js.map +1 -1
  384. package/dist/{default/skins/dist/default/minimal/tailwind/components → dev/skins/dist/default/shared/tailwind}/icon-state.js +6 -1
  385. package/dist/dev/skins/dist/default/shared/tailwind/icon-state.js.map +1 -0
  386. package/dist/dev/skins/dist/default/{minimal/tailwind/components → shared/tailwind}/tooltip-state.js +1 -1
  387. package/dist/dev/skins/dist/default/shared/tailwind/tooltip-state.js.map +1 -0
  388. package/dist/dev/store/container-mixin.d.ts +10 -5
  389. package/dist/dev/store/container-mixin.d.ts.map +1 -1
  390. package/dist/dev/store/container-mixin.js +16 -60
  391. package/dist/dev/store/container-mixin.js.map +1 -1
  392. package/dist/dev/store/media-attach-mixin.d.ts +19 -0
  393. package/dist/dev/store/media-attach-mixin.d.ts.map +1 -0
  394. package/dist/dev/store/media-attach-mixin.js +45 -0
  395. package/dist/dev/store/media-attach-mixin.js.map +1 -0
  396. package/dist/dev/store/provider-mixin.d.ts +19 -6
  397. package/dist/dev/store/provider-mixin.d.ts.map +1 -1
  398. package/dist/dev/store/provider-mixin.js +99 -9
  399. package/dist/dev/store/provider-mixin.js.map +1 -1
  400. package/dist/dev/ui/alert-dialog/alert-dialog-description-element.d.ts +1 -1
  401. package/dist/dev/ui/alert-dialog/alert-dialog-element.js +1 -1
  402. package/dist/dev/ui/alert-dialog/alert-dialog-title-element.d.ts +1 -1
  403. package/dist/dev/ui/buffering-indicator/buffering-indicator-element.js +1 -1
  404. package/dist/dev/ui/captions-button/captions-button-element.d.ts +1 -1
  405. package/dist/dev/ui/captions-button/captions-button-element.js +1 -1
  406. package/dist/dev/ui/context-part-element.d.ts +1 -1
  407. package/dist/dev/ui/controls/controls-element.js +1 -1
  408. package/dist/dev/ui/controls/controls-group-element.d.ts +1 -1
  409. package/dist/dev/ui/fullscreen-button/fullscreen-button-element.d.ts +1 -1
  410. package/dist/dev/ui/fullscreen-button/fullscreen-button-element.js +1 -1
  411. package/dist/dev/ui/media-button-element.d.ts +1 -1
  412. package/dist/dev/ui/media-ui-element.d.ts +1 -1
  413. package/dist/dev/ui/mute-button/mute-button-element.d.ts +1 -1
  414. package/dist/dev/ui/mute-button/mute-button-element.js +1 -1
  415. package/dist/dev/ui/pip-button/pip-button-element.d.ts +1 -1
  416. package/dist/dev/ui/pip-button/pip-button-element.js +1 -1
  417. package/dist/dev/ui/play-button/play-button-element.d.ts +1 -1
  418. package/dist/dev/ui/play-button/play-button-element.js +1 -1
  419. package/dist/dev/ui/playback-rate-button/playback-rate-button-element.d.ts +1 -1
  420. package/dist/dev/ui/playback-rate-button/playback-rate-button-element.js +1 -1
  421. package/dist/dev/ui/popover/popover-element.d.ts +1 -1
  422. package/dist/dev/ui/popover/popover-element.d.ts.map +1 -1
  423. package/dist/dev/ui/popover/popover-element.js +55 -4
  424. package/dist/dev/ui/popover/popover-element.js.map +1 -1
  425. package/dist/dev/ui/poster/poster-element.d.ts +1 -1
  426. package/dist/dev/ui/poster/poster-element.js +1 -1
  427. package/dist/dev/ui/seek-button/seek-button-element.d.ts +1 -1
  428. package/dist/dev/ui/seek-button/seek-button-element.js +1 -1
  429. package/dist/dev/ui/slider/context.d.ts +1 -1
  430. package/dist/dev/ui/slider/slider-buffer-element.d.ts +1 -1
  431. package/dist/dev/ui/slider/slider-element.d.ts +1 -1
  432. package/dist/dev/ui/slider/slider-element.js +1 -1
  433. package/dist/dev/ui/slider/slider-fill-element.d.ts +1 -1
  434. package/dist/dev/ui/slider/slider-track-element.d.ts +1 -1
  435. package/dist/dev/ui/thumbnail/thumbnail-element.d.ts +1 -1
  436. package/dist/dev/ui/thumbnail/thumbnail-element.js +1 -1
  437. package/dist/dev/ui/time/time-element.d.ts +1 -1
  438. package/dist/dev/ui/time/time-element.js +1 -1
  439. package/dist/dev/ui/time-slider/time-slider-element.d.ts +1 -1
  440. package/dist/dev/ui/time-slider/time-slider-element.js +2 -2
  441. package/dist/dev/ui/time-slider/time-slider-element.js.map +1 -1
  442. package/dist/dev/ui/tooltip/tooltip-element.d.ts +1 -1
  443. package/dist/dev/ui/tooltip/tooltip-element.d.ts.map +1 -1
  444. package/dist/dev/ui/tooltip/tooltip-element.js +54 -4
  445. package/dist/dev/ui/tooltip/tooltip-element.js.map +1 -1
  446. package/dist/dev/ui/tooltip/tooltip-group-element.js +5 -2
  447. package/dist/dev/ui/tooltip/tooltip-group-element.js.map +1 -1
  448. package/dist/dev/ui/volume-slider/volume-slider-element.d.ts +1 -1
  449. package/dist/dev/ui/volume-slider/volume-slider-element.js +3 -3
  450. package/dist/dev/ui/volume-slider/volume-slider-element.js.map +1 -1
  451. package/package.json +26 -13
  452. package/dist/default/skins/dist/default/default/tailwind/components/icon-state.js.map +0 -1
  453. package/dist/default/skins/dist/default/default/tailwind/components/tooltip-state.js +0 -28
  454. package/dist/default/skins/dist/default/default/tailwind/components/tooltip-state.js.map +0 -1
  455. package/dist/default/skins/dist/default/minimal/tailwind/components/icon-state.js.map +0 -1
  456. package/dist/default/skins/dist/default/minimal/tailwind/components/tooltip-state.js +0 -28
  457. package/dist/default/skins/dist/default/minimal/tailwind/components/tooltip-state.js.map +0 -1
  458. package/dist/dev/skins/dist/default/default/tailwind/components/icon-state.js +0 -29
  459. package/dist/dev/skins/dist/default/default/tailwind/components/icon-state.js.map +0 -1
  460. package/dist/dev/skins/dist/default/default/tailwind/components/tooltip-state.js.map +0 -1
  461. package/dist/dev/skins/dist/default/minimal/tailwind/components/icon-state.js +0 -29
  462. package/dist/dev/skins/dist/default/minimal/tailwind/components/icon-state.js.map +0 -1
  463. package/dist/dev/skins/dist/default/minimal/tailwind/components/tooltip-state.js.map +0 -1
@@ -0,0 +1,3603 @@
1
+ import { n as isNil } from "../predicate-BG-dj_kF.js";
2
+ import "../context-Be8C5kVd.js";
3
+ import { t as listen } from "../listen-YSH3Jfyk.js";
4
+ import { t as DelegateMixin } from "../delegate-CzAcT1xm.js";
5
+ import { t as MediaAttachMixin } from "../media-attach-mixin-U_KQB_9O.js";
6
+ import { t as CustomMediaMixin } from "../custom-media-element-moFa3UZp.js";
7
+
8
+ //#region ../spf/dist/dev/core/state/create-state.js
9
+ /**
10
+ * Reactive state container with selectors, custom equality, and batched updates.
11
+ *
12
+ * Manages both immutable state values and mutable object references (e.g., HTMLMediaElement).
13
+ */
14
+ const STATE_SYMBOL = Symbol("@videojs/spf/state");
15
+ /**
16
+ * Default equality function using Object.is.
17
+ */
18
+ function defaultEquality(a, b) {
19
+ return Object.is(a, b);
20
+ }
21
+ /**
22
+ * State container implementation.
23
+ */
24
+ var StateContainer = class {
25
+ [STATE_SYMBOL] = true;
26
+ #current;
27
+ #pending = null;
28
+ #pendingFlush = false;
29
+ #equalityFn;
30
+ #listeners = /* @__PURE__ */ new Set();
31
+ #selectorListeners = /* @__PURE__ */ new Set();
32
+ constructor(initial, config) {
33
+ this.#current = typeof initial === "object" && initial !== null ? { ...initial } : initial;
34
+ this.#equalityFn = config?.equalityFn ?? defaultEquality;
35
+ }
36
+ get current() {
37
+ return this.#pending ?? this.#current;
38
+ }
39
+ patch(partial) {
40
+ const base = this.#pending ?? this.#current;
41
+ if (typeof base !== "object" || base === null) {
42
+ const value = partial;
43
+ if (!Object.is(base, value)) {
44
+ this.#pending = value;
45
+ this.#scheduleFlush();
46
+ }
47
+ return;
48
+ }
49
+ const next = { ...base };
50
+ let changed = false;
51
+ for (const key in partial) {
52
+ if (!Object.hasOwn(partial, key)) continue;
53
+ const value = partial[key];
54
+ if (!Object.is(base[key], value)) {
55
+ next[key] = value;
56
+ changed = true;
57
+ }
58
+ }
59
+ if (changed) {
60
+ this.#pending = next;
61
+ this.#scheduleFlush();
62
+ }
63
+ }
64
+ subscribe(selectorOrListener, maybeListener, options) {
65
+ if (maybeListener === void 0) {
66
+ const listener = selectorOrListener;
67
+ this.#listeners.add(listener);
68
+ listener(this.current);
69
+ return () => {
70
+ this.#listeners.delete(listener);
71
+ };
72
+ }
73
+ const selector = selectorOrListener;
74
+ const listener = maybeListener;
75
+ const entry = {
76
+ selector,
77
+ listener,
78
+ options: options ?? {}
79
+ };
80
+ this.#selectorListeners.add(entry);
81
+ listener(selector(this.current));
82
+ return () => {
83
+ this.#selectorListeners.delete(entry);
84
+ };
85
+ }
86
+ flush() {
87
+ if (this.#pending === null) return;
88
+ const prev = this.#current;
89
+ const next = this.#pending;
90
+ this.#pending = null;
91
+ this.#pendingFlush = false;
92
+ if (this.#equalityFn(prev, next)) return;
93
+ this.#current = next;
94
+ for (const listener of this.#listeners) listener(this.#current);
95
+ for (const entry of this.#selectorListeners) {
96
+ const prevSelected = entry.selector(prev);
97
+ const nextSelected = entry.selector(this.#current);
98
+ if (!(entry.options.equalityFn ?? Object.is)(prevSelected, nextSelected)) entry.listener(nextSelected);
99
+ }
100
+ }
101
+ #scheduleFlush() {
102
+ if (this.#pendingFlush) return;
103
+ this.#pendingFlush = true;
104
+ queueMicrotask(() => this.flush());
105
+ }
106
+ };
107
+ /**
108
+ * Create a reactive state container.
109
+ *
110
+ * @example
111
+ * ```typescript
112
+ * const state = createState({ count: 0 });
113
+ *
114
+ * // Subscribe to changes
115
+ * state.subscribe((current, prev) => {
116
+ * console.log('Changed:', prev, '->', current);
117
+ * });
118
+ *
119
+ * // Updates are batched
120
+ * state.patch({ count: 1 });
121
+ * state.patch({ count: 2 });
122
+ * // Only one notification fires (with count: 2)
123
+ * ```
124
+ *
125
+ * @example Selector subscriptions
126
+ * ```typescript
127
+ * const state = createState({ count: 0, name: 'test' });
128
+ *
129
+ * // Only notified when count changes
130
+ * state.subscribe(
131
+ * s => s.count,
132
+ * (current, prev) => console.log(current, prev)
133
+ * );
134
+ * ```
135
+ *
136
+ * @example Custom equality
137
+ * ```typescript
138
+ * const state = createState(
139
+ * { count: 0, name: 'test' },
140
+ * { equalityFn: (a, b) => a.count === b.count }
141
+ * );
142
+ * ```
143
+ */
144
+ function createState(initial, config) {
145
+ return new StateContainer(initial, config);
146
+ }
147
+
148
+ //#endregion
149
+ //#region ../spf/dist/dev/core/abr/ewma.js
150
+ /**
151
+ * Exponentially Weighted Moving Average (EWMA)
152
+ *
153
+ * Pure functional implementation of EWMA calculations.
154
+ * Based on Shaka Player's EWMA algorithm.
155
+ */
156
+ /**
157
+ * Calculate alpha (decay factor) from half-life.
158
+ *
159
+ * Alpha determines how quickly old data "expires":
160
+ * - alpha close to 1 = slow decay (long memory)
161
+ * - alpha close to 0 = fast decay (short memory)
162
+ *
163
+ * @param halfLife - The quantity of prior samples (by weight) that make up
164
+ * half of the new estimate. Must be positive.
165
+ * @returns Alpha value between 0 and 1
166
+ *
167
+ * @example
168
+ * const alpha = calculateAlpha(2); // ≈ 0.7071 for 2-second half-life
169
+ */
170
+ function calculateAlpha(halfLife) {
171
+ return Math.exp(Math.log(.5) / halfLife);
172
+ }
173
+ /**
174
+ * Calculate exponentially weighted moving average.
175
+ *
176
+ * Updates an estimate by blending a new value with the previous estimate,
177
+ * weighted by the sample duration. Longer samples have more influence.
178
+ *
179
+ * @param prevEstimate - Previous EWMA estimate
180
+ * @param value - New sample value to incorporate
181
+ * @param weight - Sample weight (typically duration in seconds)
182
+ * @param halfLife - Half-life for decay (typically 2-5 seconds)
183
+ * @returns Updated EWMA estimate
184
+ *
185
+ * @example
186
+ * let estimate = 0;
187
+ * estimate = calculateEwma(estimate, 1_000_000, 1, 2); // First sample
188
+ * estimate = calculateEwma(estimate, 2_000_000, 1, 2); // Second sample
189
+ */
190
+ function calculateEwma(prevEstimate, value, weight, halfLife) {
191
+ const adjAlpha = calculateAlpha(halfLife) ** weight;
192
+ return value * (1 - adjAlpha) + adjAlpha * prevEstimate;
193
+ }
194
+ /**
195
+ * Apply zero-factor correction to EWMA estimate.
196
+ *
197
+ * The zero-factor correction compensates for bias when starting from zero.
198
+ * Without this correction, early estimates would be artificially low.
199
+ *
200
+ * As totalWeight increases, the correction factor approaches 1, meaning
201
+ * the estimate becomes more reliable and needs less correction.
202
+ *
203
+ * @param estimate - Raw EWMA estimate (uncorrected)
204
+ * @param totalWeight - Accumulated weight from all samples
205
+ * @param halfLife - Half-life used in EWMA calculation
206
+ * @returns Corrected estimate, or 0 if totalWeight is 0
207
+ *
208
+ * @example
209
+ * const raw = calculateEwma(0, 1_000_000, 1, 2);
210
+ * const corrected = applyZeroFactor(raw, 1, 2); // ≈ 1_000_000
211
+ */
212
+ function applyZeroFactor(estimate, totalWeight, halfLife) {
213
+ if (totalWeight === 0) return 0;
214
+ return estimate / (1 - calculateAlpha(halfLife) ** totalWeight);
215
+ }
216
+
217
+ //#endregion
218
+ //#region ../spf/dist/dev/core/abr/bandwidth-estimator.js
219
+ /**
220
+ * Dual EWMA Bandwidth Estimator
221
+ *
222
+ * Estimates available bandwidth using two EWMA calculations with different
223
+ * half-lives, taking the minimum of both. This approach (from Shaka Player):
224
+ *
225
+ * - **Fast EWMA** (2s half-life): Reacts quickly to bandwidth drops
226
+ * - **Slow EWMA** (5s half-life): Provides stability during fluctuations
227
+ * - **min(fast, slow)**: Adapts down quickly, up slowly
228
+ *
229
+ * This naturally provides asymmetric behavior needed for good QoE:
230
+ * avoiding stalls (quick downgrade) while preventing oscillation (slow upgrade).
231
+ */
232
+ /**
233
+ * Default bandwidth estimator configuration.
234
+ *
235
+ * Values match Shaka Player defaults based on experimentation.
236
+ */
237
+ const DEFAULT_BANDWIDTH_CONFIG = {
238
+ fastHalfLife: 2,
239
+ slowHalfLife: 5,
240
+ minTotalBytes: 128e3,
241
+ minBytes: 16e3,
242
+ minDuration: 5
243
+ };
244
+ /**
245
+ * Add a bandwidth sample from a segment download.
246
+ *
247
+ * Samples are filtered based on:
248
+ * - Minimum bytes (filters TTFB-dominated small segments)
249
+ * - Minimum duration (filters cached responses)
250
+ *
251
+ * Valid samples update both fast and slow EWMA estimates.
252
+ *
253
+ * @param state - Current estimator state
254
+ * @param durationMs - Download duration in milliseconds
255
+ * @param numBytes - Number of bytes downloaded
256
+ * @param config - Optional estimator configuration (uses defaults if not provided)
257
+ * @returns New estimator state with sample incorporated (or unchanged if filtered)
258
+ *
259
+ * @example
260
+ * let state = { fastEstimate: 0, fastTotalWeight: 0, ... };
261
+ * // Sample: 1MB in 1 second
262
+ * state = sampleBandwidth(state, 1000, 1_000_000);
263
+ */
264
+ function sampleBandwidth(state, durationMs, numBytes, config = DEFAULT_BANDWIDTH_CONFIG) {
265
+ const updatedBytesSampled = state.bytesSampled + numBytes;
266
+ if (numBytes < config.minBytes) return {
267
+ ...state,
268
+ bytesSampled: updatedBytesSampled
269
+ };
270
+ if (durationMs < config.minDuration) return {
271
+ ...state,
272
+ bytesSampled: updatedBytesSampled
273
+ };
274
+ const bandwidth = 8e3 * numBytes / durationMs;
275
+ const weight = durationMs / 1e3;
276
+ return {
277
+ fastEstimate: calculateEwma(state.fastEstimate, bandwidth, weight, config.fastHalfLife),
278
+ fastTotalWeight: state.fastTotalWeight + weight,
279
+ slowEstimate: calculateEwma(state.slowEstimate, bandwidth, weight, config.slowHalfLife),
280
+ slowTotalWeight: state.slowTotalWeight + weight,
281
+ bytesSampled: updatedBytesSampled
282
+ };
283
+ }
284
+ /**
285
+ * Get the current bandwidth estimate.
286
+ *
287
+ * Returns the **minimum** of the fast and slow EWMA estimates.
288
+ * This provides the key asymmetric behavior:
289
+ * - When bandwidth drops, fast EWMA reacts first and dominates (quick adaptation)
290
+ * - When bandwidth rises, slow EWMA lags behind and dominates (slow adaptation)
291
+ *
292
+ * Uses default estimate until enough data has been sampled.
293
+ *
294
+ * @param state - Current estimator state
295
+ * @param defaultEstimate - Fallback estimate before sufficient samples (bps)
296
+ * @param config - Optional estimator configuration (uses defaults if not provided)
297
+ * @returns Bandwidth estimate in bits per second
298
+ *
299
+ * @example
300
+ * const estimate = getBandwidthEstimate(state, 5_000_000); // 5 Mbps default
301
+ */
302
+ function getBandwidthEstimate(state, defaultEstimate, config = DEFAULT_BANDWIDTH_CONFIG) {
303
+ if (state.bytesSampled < config.minTotalBytes) return defaultEstimate;
304
+ const fastEstimate = applyZeroFactor(state.fastEstimate, state.fastTotalWeight, config.fastHalfLife);
305
+ const slowEstimate = applyZeroFactor(state.slowEstimate, state.slowTotalWeight, config.slowHalfLife);
306
+ return Math.min(fastEstimate, slowEstimate);
307
+ }
308
+
309
+ //#endregion
310
+ //#region ../spf/dist/dev/core/buffer/forward-buffer.js
311
+ /**
312
+ * Default forward buffer configuration.
313
+ */
314
+ const DEFAULT_FORWARD_BUFFER_CONFIG = { bufferDuration: 30 };
315
+ /**
316
+ * Get segments that need to be loaded for forward buffer.
317
+ *
318
+ * Determines which segments to load to maintain target buffer duration.
319
+ * Handles discontiguous buffering (gaps after seeks).
320
+ *
321
+ * Algorithm:
322
+ * 1. Calculate target time: currentTime + bufferDuration
323
+ * 2. Find all segments in range [currentTime, targetTime)
324
+ * 3. Filter out segments already buffered at that time position
325
+ * 4. Return segments to load (fills gaps + extends to target)
326
+ *
327
+ * @param segments - All available segments from playlist
328
+ * @param bufferedSegments - Segments already buffered (ordered by startTime)
329
+ * @param currentTime - Current playback position in seconds
330
+ * @param config - Optional forward buffer configuration
331
+ * @returns Array of segments to load (empty if buffer is sufficient)
332
+ *
333
+ * @example
334
+ * // After seek: buffered [0-12, 18-30], playing at 7s
335
+ * const toLoad = getSegmentsToLoad(segments, buffered, 7, { bufferDuration: 24 });
336
+ * // Returns [seg-12, seg-30] (fills gap, extends to target 31s)
337
+ */
338
+ /**
339
+ * Calculate the start time from which to flush forward buffer content.
340
+ *
341
+ * Content that starts at or beyond `currentTime + bufferDuration` is no
342
+ * longer needed for the current playback position and should be removed
343
+ * from the SourceBuffer. This prevents unbounded accumulation of scattered
344
+ * SourceBuffer content after seeks, which can cause QuotaExceededError on
345
+ * long-form content.
346
+ *
347
+ * Returns `Infinity` when nothing needs flushing (no buffered segments
348
+ * exist beyond the threshold).
349
+ *
350
+ * @param bufferedSegments - Segments currently tracked in the buffer model
351
+ * @param currentTime - Current playback position in seconds
352
+ * @param config - Optional forward buffer configuration
353
+ * @returns Start time to flush from (flush range: [flushStart, Infinity)),
354
+ * or Infinity if no flush is needed
355
+ *
356
+ * @example
357
+ * // Playing at 0s, buffered [0,6,12,18,24,30,36], bufferDuration=30
358
+ * const flushStart = calculateForwardFlushPoint(segments, 0);
359
+ * // Returns 30 — flush [30, Infinity), keep [0, 30)
360
+ */
361
+ function calculateForwardFlushPoint(bufferedSegments, currentTime, config = DEFAULT_FORWARD_BUFFER_CONFIG) {
362
+ if (bufferedSegments.length === 0) return Infinity;
363
+ const threshold = currentTime + config.bufferDuration;
364
+ const beyond = bufferedSegments.filter((seg) => seg.startTime >= threshold);
365
+ if (beyond.length === 0) return Infinity;
366
+ return Math.min(...beyond.map((seg) => seg.startTime));
367
+ }
368
+ function getSegmentsToLoad(segments, bufferedSegments, currentTime, config = DEFAULT_FORWARD_BUFFER_CONFIG) {
369
+ if (segments.length === 0) return [];
370
+ const targetTime = currentTime + config.bufferDuration;
371
+ const bufferedStartTimes = new Set(bufferedSegments.map((seg) => seg.startTime));
372
+ return segments.filter((seg) => {
373
+ const segmentEnd = seg.startTime + seg.duration;
374
+ const isInRange = seg.startTime < targetTime && segmentEnd > currentTime;
375
+ const isNotBuffered = !bufferedStartTimes.has(seg.startTime);
376
+ return isInRange && isNotBuffered;
377
+ });
378
+ }
379
+
380
+ //#endregion
381
+ //#region ../spf/dist/dev/core/types/index.js
382
+ function isResolvedTrack(track) {
383
+ return "segments" in track;
384
+ }
385
+ /**
386
+ * Check if a presentation has duration (at least one track resolved).
387
+ * Narrows type to include required duration.
388
+ */
389
+ function hasPresentationDuration(presentation) {
390
+ return presentation.duration !== void 0;
391
+ }
392
+
393
+ //#endregion
394
+ //#region ../spf/dist/dev/dom/network/chunked-stream-iterable.js
395
+ const DEFAULT_MIN_CHUNK_SIZE = 2 ** 17;
396
+ /**
397
+ * Adapts a `ReadableStream<Uint8Array>` (e.g. `response.body`) into an
398
+ * `AsyncIterable<Uint8Array>` that yields chunks no smaller than
399
+ * `minChunkSize` bytes. Smaller network chunks are accumulated and yielded
400
+ * together once the threshold is met. Any remainder is flushed on stream end.
401
+ *
402
+ * Errors from the underlying stream propagate naturally — the reader lock is
403
+ * always released via `finally`.
404
+ */
405
+ var ChunkedStreamIterable = class {
406
+ minChunkSize;
407
+ #readableStream;
408
+ constructor(readableStream, { minChunkSize = DEFAULT_MIN_CHUNK_SIZE } = {}) {
409
+ this.#readableStream = readableStream;
410
+ this.minChunkSize = minChunkSize;
411
+ }
412
+ async *[Symbol.asyncIterator]() {
413
+ let pending;
414
+ const reader = this.#readableStream.getReader();
415
+ try {
416
+ while (true) {
417
+ const { done, value } = await reader.read();
418
+ if (done) {
419
+ if (pending) yield pending;
420
+ break;
421
+ }
422
+ pending = pending ? concat(pending, value) : value;
423
+ if (pending.length >= this.minChunkSize) {
424
+ yield pending;
425
+ pending = void 0;
426
+ }
427
+ }
428
+ } finally {
429
+ reader.releaseLock();
430
+ }
431
+ }
432
+ };
433
+ function concat(a, b) {
434
+ const result = new Uint8Array(a.length + b.length);
435
+ result.set(a);
436
+ result.set(b, a.length);
437
+ return result;
438
+ }
439
+
440
+ //#endregion
441
+ //#region ../spf/dist/dev/dom/network/fetch.js
442
+ /**
443
+ * Fetch resolvable from AddressableObject.
444
+ *
445
+ * Handles byte range requests if byteRange is present.
446
+ * Returns native fetch Response for composability (can extract text, stream, etc.).
447
+ *
448
+ * @param addressable - Resource to fetch (url + optional byteRange)
449
+ * @returns Promise resolving to Response
450
+ *
451
+ * @example
452
+ * const response = await fetchResolvable({ url: 'https://example.com/segment.m4s' });
453
+ * const text = await getResponseText(response);
454
+ *
455
+ * @example
456
+ * // With byte range
457
+ * const response = await fetchResolvable({
458
+ * url: 'https://example.com/file.mp4',
459
+ * byteRange: { start: 1000, end: 1999 }
460
+ * });
461
+ */
462
+ async function fetchResolvable(addressable, options) {
463
+ const headers = new Headers(options?.headers);
464
+ if (addressable.byteRange) {
465
+ const { start, end } = addressable.byteRange;
466
+ headers.set("Range", `bytes=${start}-${end}`);
467
+ }
468
+ const request = new Request(addressable.url, {
469
+ method: "GET",
470
+ headers,
471
+ ...options
472
+ });
473
+ return fetch(request);
474
+ }
475
+ /**
476
+ * Extract text from Response.
477
+ *
478
+ * Accepts minimal Response-like object (just needs text() method).
479
+ * Returns promise from response.text().
480
+ *
481
+ * @param response - Response-like object with text() method
482
+ * @returns Promise resolving to text content
483
+ *
484
+ * @example
485
+ * const response = await fetchResolvable(addressable);
486
+ * const text = await getResponseText(response);
487
+ */
488
+ function getResponseText(response) {
489
+ return response.text();
490
+ }
491
+
492
+ //#endregion
493
+ //#region ../spf/dist/dev/core/reactive/combine-latest.js
494
+ /**
495
+ * Combines multiple Observable sources into a single Observable.
496
+ *
497
+ * Emits an array of latest values whenever any source emits.
498
+ * Only emits after all sources have emitted at least once.
499
+ *
500
+ * Supports selector-based subscriptions (fires only when the selected value
501
+ * changes, per the optional equalityFn) mirroring the createState API.
502
+ *
503
+ * @param sources - Array of Observable sources
504
+ * @returns Combined Observable
505
+ *
506
+ * @example
507
+ * ```ts
508
+ * const state = createState({ count: 0 });
509
+ * const events = createEventStream<Action>();
510
+ *
511
+ * combineLatest([state, events]).subscribe(([state, event]) => {
512
+ * if (event.type === 'PLAY' && state.count > 0) {
513
+ * // React to event + state condition
514
+ * }
515
+ * });
516
+ * ```
517
+ *
518
+ * @example Selector subscription
519
+ * ```ts
520
+ * combineLatest([state, owners]).subscribe(
521
+ * ([s, o]) => deriveKey(s, o),
522
+ * (key) => { ... },
523
+ * { equalityFn: keyEq }
524
+ * );
525
+ * ```
526
+ */
527
+ function combineLatest(sources) {
528
+ const subscribeToSources = (listener) => {
529
+ const latest = new Array(sources.length);
530
+ const hasValue = new Array(sources.length).fill(false);
531
+ const unsubscribers = [];
532
+ for (let i = 0; i < sources.length; i++) {
533
+ const unsubscribe = sources[i].subscribe((value) => {
534
+ latest[i] = value;
535
+ hasValue[i] = true;
536
+ if (hasValue.every((has) => has)) listener([...latest]);
537
+ });
538
+ unsubscribers.push(unsubscribe);
539
+ }
540
+ return () => {
541
+ for (const unsubscribe of unsubscribers) unsubscribe();
542
+ };
543
+ };
544
+ return { subscribe(listenerOrSelector, maybeListener, options) {
545
+ if (maybeListener === void 0) return subscribeToSources(listenerOrSelector);
546
+ const selector = listenerOrSelector;
547
+ const listener = maybeListener;
548
+ const equalityFn = options?.equalityFn ?? Object.is;
549
+ let prevSelected;
550
+ let initialized = false;
551
+ return subscribeToSources((values) => {
552
+ const nextSelected = selector(values);
553
+ if (!initialized || !equalityFn(prevSelected, nextSelected)) {
554
+ prevSelected = nextSelected;
555
+ initialized = true;
556
+ listener(nextSelected);
557
+ }
558
+ });
559
+ } };
560
+ }
561
+
562
+ //#endregion
563
+ //#region ../spf/dist/dev/core/hls/resolve-url.js
564
+ /**
565
+ * Resolve a potentially relative URL against a base URL using native URL API.
566
+ */
567
+ function resolveUrl(url, baseUrl) {
568
+ return new URL(url, baseUrl).href;
569
+ }
570
+
571
+ //#endregion
572
+ //#region ../spf/dist/dev/core/hls/parse-attributes.js
573
+ /**
574
+ * Parse HLS attribute list from a tag line.
575
+ * Handles both quoted and unquoted values.
576
+ */
577
+ function parseAttributeList(line) {
578
+ const attributes = /* @__PURE__ */ new Map();
579
+ for (const match of line.matchAll(/([A-Z0-9-]+)=(?:"([^"]*)"|([^,]*))/g)) {
580
+ const key = match[1];
581
+ const value = match[2] ?? match[3] ?? "";
582
+ if (key) attributes.set(key, value);
583
+ }
584
+ return attributes;
585
+ }
586
+ /**
587
+ * Parse RESOLUTION attribute value (WIDTHxHEIGHT).
588
+ */
589
+ function parseResolution(value) {
590
+ const match = /^(\d+)x(\d+)$/.exec(value);
591
+ if (!match) return null;
592
+ return {
593
+ width: Number.parseInt(match[1], 10),
594
+ height: Number.parseInt(match[2], 10)
595
+ };
596
+ }
597
+ /**
598
+ * Parse FRAME-RATE attribute to rational frame rate.
599
+ */
600
+ function parseFrameRate(value) {
601
+ const fps = Number.parseFloat(value);
602
+ if (Number.isNaN(fps) || fps <= 0) return void 0;
603
+ if (Math.abs(fps - 23.976) < .01) return {
604
+ frameRateNumerator: 24e3,
605
+ frameRateDenominator: 1001
606
+ };
607
+ if (Math.abs(fps - 29.97) < .01) return {
608
+ frameRateNumerator: 3e4,
609
+ frameRateDenominator: 1001
610
+ };
611
+ if (Math.abs(fps - 59.94) < .01) return {
612
+ frameRateNumerator: 6e4,
613
+ frameRateDenominator: 1001
614
+ };
615
+ if (fps % 1 === 0) return { frameRateNumerator: Math.round(fps) };
616
+ return { frameRateNumerator: Math.round(fps) };
617
+ }
618
+ /**
619
+ * Parse CODECS attribute into separate video and audio codecs.
620
+ */
621
+ function parseCodecs(codecs) {
622
+ const parts = codecs.split(",").map((s) => s.trim());
623
+ const result = {};
624
+ for (const codec of parts) if (codec.startsWith("avc1.") || codec.startsWith("hvc1.") || codec.startsWith("hev1.")) result.video = codec;
625
+ else if (codec.startsWith("mp4a.")) result.audio = codec;
626
+ return result;
627
+ }
628
+ /**
629
+ * Parse #EXTINF duration value.
630
+ */
631
+ function parseExtInfDuration(value) {
632
+ const durationPart = value.split(",")[0] ?? value;
633
+ const duration = Number.parseFloat(durationPart);
634
+ return Number.isNaN(duration) ? 0 : duration;
635
+ }
636
+ /**
637
+ * Parse BYTERANGE attribute value.
638
+ * Format: "length[@offset]"
639
+ * If offset is omitted, it continues from the previous byte range end.
640
+ */
641
+ function parseByteRange(value, previousEnd) {
642
+ const match = /^(\d+)(?:@(\d+))?$/.exec(value);
643
+ if (!match) return null;
644
+ const length = Number.parseInt(match[1], 10);
645
+ if (Number.isNaN(length)) return null;
646
+ let start;
647
+ if (match[2] !== void 0) {
648
+ start = Number.parseInt(match[2], 10);
649
+ if (Number.isNaN(start)) return null;
650
+ } else if (previousEnd !== void 0) start = previousEnd;
651
+ else return null;
652
+ return {
653
+ start,
654
+ end: start + length - 1
655
+ };
656
+ }
657
+ /**
658
+ * Create AttributeList from raw attribute string.
659
+ */
660
+ function createAttributeList(line) {
661
+ const map = parseAttributeList(line);
662
+ return {
663
+ get(key) {
664
+ return map.get(key);
665
+ },
666
+ getInt(key, defaultValue) {
667
+ const value = map.get(key);
668
+ if (value === void 0) return defaultValue;
669
+ const parsed = Number.parseInt(value, 10);
670
+ return Number.isNaN(parsed) ? defaultValue : parsed;
671
+ },
672
+ getFloat(key, defaultValue) {
673
+ const value = map.get(key);
674
+ if (value === void 0) return defaultValue;
675
+ const parsed = Number.parseFloat(value);
676
+ return Number.isNaN(parsed) ? defaultValue : parsed;
677
+ },
678
+ getBool(key) {
679
+ return map.get(key) === "YES";
680
+ },
681
+ getResolution(key) {
682
+ const value = map.get(key);
683
+ if (!value) return void 0;
684
+ return parseResolution(value) ?? void 0;
685
+ },
686
+ getFrameRate(key) {
687
+ const value = map.get(key);
688
+ if (!value) return void 0;
689
+ return parseFrameRate(value);
690
+ }
691
+ };
692
+ }
693
+ /**
694
+ * Match a tag and extract its attributes.
695
+ * Returns null if the line doesn't match the tag.
696
+ */
697
+ function matchTag(line, tag) {
698
+ const prefix = `#${tag}:`;
699
+ if (!line.startsWith(prefix)) return null;
700
+ return createAttributeList(line.slice(prefix.length));
701
+ }
702
+
703
+ //#endregion
704
+ //#region ../spf/dist/dev/core/hls/parse-media-playlist.js
705
+ /**
706
+ * Parse HLS media playlist and resolve track with segments.
707
+ *
708
+ * Takes an unresolved track (from multivariant playlist) and media playlist text,
709
+ * returns a HAM-compliant resolved track with segments.
710
+ *
711
+ * @param text - Media playlist text content
712
+ * @param unresolved - Unresolved track from parseMultivariantPlaylist
713
+ * @returns Resolved track with segments (type inferred from input)
714
+ */
715
+ function parseMediaPlaylist(text, unresolved) {
716
+ const lines = text.split(/\r?\n/);
717
+ const baseUrl = unresolved.url;
718
+ const segments = [];
719
+ let initSegmentUrl;
720
+ let initSegmentByteRange;
721
+ let currentDuration = 0;
722
+ let currentByteRange;
723
+ let currentTime = 0;
724
+ let segmentIndex = 0;
725
+ let previousByteRangeEnd;
726
+ for (const line of lines) {
727
+ const trimmed = line.trim();
728
+ if (!trimmed || trimmed.startsWith("#") && !trimmed.startsWith("#EXT")) continue;
729
+ if (trimmed === "#EXTM3U" || trimmed.startsWith("#EXT-X-VERSION:") || trimmed.startsWith("#EXT-X-TARGETDURATION:") || trimmed.startsWith("#EXT-X-PLAYLIST-TYPE:") || trimmed.startsWith("#EXT-X-INDEPENDENT-SEGMENTS")) continue;
730
+ const mapAttrs = matchTag(trimmed, "EXT-X-MAP");
731
+ if (mapAttrs) {
732
+ const uri = mapAttrs.get("URI");
733
+ if (uri) {
734
+ initSegmentUrl = resolveUrl(uri, baseUrl);
735
+ const byteRangeStr = mapAttrs.get("BYTERANGE");
736
+ if (byteRangeStr) initSegmentByteRange = parseByteRange(byteRangeStr, 0) ?? void 0;
737
+ }
738
+ continue;
739
+ }
740
+ if (trimmed.startsWith("#EXTINF:")) {
741
+ currentDuration = parseExtInfDuration(trimmed.slice(8));
742
+ continue;
743
+ }
744
+ if (trimmed.startsWith("#EXT-X-BYTERANGE:")) {
745
+ currentByteRange = parseByteRange(trimmed.slice(17), previousByteRangeEnd) ?? void 0;
746
+ continue;
747
+ }
748
+ if (trimmed === "#EXT-X-ENDLIST") continue;
749
+ if (!trimmed.startsWith("#") && currentDuration > 0) {
750
+ const segment = {
751
+ id: `segment-${segmentIndex}`,
752
+ url: resolveUrl(trimmed, baseUrl),
753
+ duration: currentDuration,
754
+ startTime: currentTime
755
+ };
756
+ if (currentByteRange) {
757
+ segment.byteRange = currentByteRange;
758
+ previousByteRangeEnd = currentByteRange.end + 1;
759
+ } else previousByteRangeEnd = void 0;
760
+ segments.push(segment);
761
+ currentTime += currentDuration;
762
+ segmentIndex++;
763
+ currentDuration = 0;
764
+ currentByteRange = void 0;
765
+ }
766
+ }
767
+ const totalDuration = currentTime;
768
+ const initialization = unresolved.type === "text" && !initSegmentUrl ? void 0 : initSegmentUrl ? {
769
+ url: initSegmentUrl,
770
+ ...initSegmentByteRange ? { byteRange: initSegmentByteRange } : {}
771
+ } : { url: "" };
772
+ return {
773
+ ...unresolved,
774
+ startTime: 0,
775
+ duration: totalDuration,
776
+ segments,
777
+ initialization
778
+ };
779
+ }
780
+
781
+ //#endregion
782
+ //#region ../spf/dist/dev/core/utils/generate-id.js
783
+ /**
784
+ * Generate unique ID for HAM objects.
785
+ *
786
+ * Uses timestamp + random number for sufficient uniqueness.
787
+ * IDs are strings without decimals.
788
+ *
789
+ * @returns Unique string ID in format: timestamp-random
790
+ *
791
+ * @example
792
+ * ```ts
793
+ * const id = generateId(); // "1738423156789-542891"
794
+ * ```
795
+ */
796
+ function generateId() {
797
+ return `${Date.now()}-${Math.floor(Math.random() * 1e6)}`;
798
+ }
799
+
800
+ //#endregion
801
+ //#region ../spf/dist/dev/core/hls/parse-multivariant.js
802
+ /**
803
+ * Parse HLS multivariant playlist into a Presentation.
804
+ *
805
+ * Returns Presentation with partially resolved tracks (no segment information).
806
+ * Tracks contain metadata from multivariant playlist (bandwidth, resolution, codecs)
807
+ * but segment information is added when media playlists are fetched.
808
+ *
809
+ * @param text - Raw playlist text content
810
+ * @param unresolved - Unresolved presentation (contains URL for base URL resolution)
811
+ * @returns Presentation with partially resolved tracks (duration is undefined)
812
+ */
813
+ function parseMultivariantPlaylist(text, unresolved) {
814
+ const baseUrl = unresolved.url;
815
+ const lines = text.split(/\r?\n/);
816
+ const streams = [];
817
+ const audioRenditions = [];
818
+ const subtitleRenditions = [];
819
+ let pendingStreamInfo = null;
820
+ for (const line of lines) {
821
+ const trimmed = line.trim();
822
+ if (!trimmed || trimmed.startsWith("#") && !trimmed.startsWith("#EXT")) continue;
823
+ if (trimmed === "#EXTM3U" || trimmed.startsWith("#EXT-X-VERSION:") || trimmed.startsWith("#EXT-X-INDEPENDENT-SEGMENTS")) continue;
824
+ const mediaAttrs = matchTag(trimmed, "EXT-X-MEDIA");
825
+ if (mediaAttrs) {
826
+ const type = mediaAttrs.get("TYPE");
827
+ const groupId = mediaAttrs.get("GROUP-ID");
828
+ const name = mediaAttrs.get("NAME");
829
+ if (type === "AUDIO" && groupId && name) {
830
+ const uri = mediaAttrs.get("URI");
831
+ audioRenditions.push({
832
+ groupId,
833
+ name,
834
+ language: mediaAttrs.get("LANGUAGE"),
835
+ uri: uri ? resolveUrl(uri, baseUrl) : void 0,
836
+ default: mediaAttrs.getBool("DEFAULT"),
837
+ autoselect: mediaAttrs.getBool("AUTOSELECT")
838
+ });
839
+ }
840
+ if (type === "SUBTITLES" && groupId && name) {
841
+ const uri = mediaAttrs.get("URI");
842
+ if (uri) subtitleRenditions.push({
843
+ groupId,
844
+ name,
845
+ language: mediaAttrs.get("LANGUAGE"),
846
+ uri: resolveUrl(uri, baseUrl),
847
+ default: mediaAttrs.getBool("DEFAULT"),
848
+ autoselect: mediaAttrs.getBool("AUTOSELECT"),
849
+ forced: mediaAttrs.getBool("FORCED")
850
+ });
851
+ }
852
+ continue;
853
+ }
854
+ const streamInfAttrs = matchTag(trimmed, "EXT-X-STREAM-INF");
855
+ if (streamInfAttrs) {
856
+ pendingStreamInfo = {
857
+ bandwidth: streamInfAttrs.getInt("BANDWIDTH", 0),
858
+ resolution: streamInfAttrs.getResolution("RESOLUTION"),
859
+ codecs: streamInfAttrs.get("CODECS"),
860
+ frameRate: streamInfAttrs.getFrameRate("FRAME-RATE"),
861
+ audioGroupId: streamInfAttrs.get("AUDIO")
862
+ };
863
+ continue;
864
+ }
865
+ if (!trimmed.startsWith("#") && pendingStreamInfo) {
866
+ streams.push({
867
+ ...pendingStreamInfo,
868
+ uri: resolveUrl(trimmed, baseUrl)
869
+ });
870
+ pendingStreamInfo = null;
871
+ }
872
+ }
873
+ const videoStreams = [];
874
+ const audioOnlyStreams = [];
875
+ for (const stream of streams) {
876
+ if (!stream.codecs) {
877
+ videoStreams.push(stream);
878
+ continue;
879
+ }
880
+ const parsedCodecs = parseCodecs(stream.codecs);
881
+ if (stream.codecs.split(",").length === 1) if (parsedCodecs.audio && !parsedCodecs.video) audioOnlyStreams.push(stream);
882
+ else videoStreams.push(stream);
883
+ else videoStreams.push(stream);
884
+ }
885
+ const videoTracks = videoStreams.map((stream) => {
886
+ const codecs = stream.codecs ? parseCodecs(stream.codecs) : void 0;
887
+ const track = {
888
+ type: "video",
889
+ id: generateId(),
890
+ url: stream.uri,
891
+ bandwidth: stream.bandwidth,
892
+ mimeType: "video/mp4",
893
+ codecs: []
894
+ };
895
+ if (stream.resolution?.width !== void 0) track.width = stream.resolution.width;
896
+ if (stream.resolution?.height !== void 0) track.height = stream.resolution.height;
897
+ if (codecs?.video) track.codecs = [codecs.video];
898
+ if (stream.frameRate) track.frameRate = stream.frameRate;
899
+ if (stream.audioGroupId) track.audioGroupId = stream.audioGroupId;
900
+ return track;
901
+ });
902
+ const audioOnlyTracks = audioOnlyStreams.map((stream) => {
903
+ const codecs = stream.codecs ? parseCodecs(stream.codecs) : void 0;
904
+ return {
905
+ type: "audio",
906
+ id: generateId(),
907
+ url: stream.uri,
908
+ bandwidth: stream.bandwidth,
909
+ mimeType: "audio/mp4",
910
+ codecs: codecs?.audio ? [codecs.audio] : [],
911
+ groupId: stream.audioGroupId || "default",
912
+ name: "Default",
913
+ sampleRate: 48e3,
914
+ channels: 2
915
+ };
916
+ });
917
+ const audioTracks = [...audioRenditions.map((rendition) => {
918
+ let audioCodecs;
919
+ for (const stream of streams) if (stream.audioGroupId === rendition.groupId && stream.codecs) {
920
+ const codecs = parseCodecs(stream.codecs);
921
+ if (codecs.audio) {
922
+ audioCodecs = [codecs.audio];
923
+ break;
924
+ }
925
+ }
926
+ const track = {
927
+ type: "audio",
928
+ id: generateId(),
929
+ url: rendition.uri ?? "",
930
+ groupId: rendition.groupId,
931
+ name: rendition.name,
932
+ mimeType: "audio/mp4",
933
+ bandwidth: 0,
934
+ sampleRate: 48e3,
935
+ channels: 2,
936
+ codecs: []
937
+ };
938
+ if (rendition.language) track.language = rendition.language;
939
+ if (audioCodecs) track.codecs = audioCodecs;
940
+ if (rendition.default) track.default = rendition.default;
941
+ if (rendition.autoselect) track.autoselect = rendition.autoselect;
942
+ return track;
943
+ }), ...audioOnlyTracks];
944
+ const textTracks = subtitleRenditions.map((rendition) => {
945
+ const track = {
946
+ type: "text",
947
+ id: generateId(),
948
+ url: rendition.uri,
949
+ groupId: rendition.groupId,
950
+ label: rendition.name,
951
+ kind: "subtitles",
952
+ mimeType: "text/vtt",
953
+ bandwidth: 0
954
+ };
955
+ if (rendition.language) track.language = rendition.language;
956
+ if (rendition.default && rendition.autoselect) track.default = true;
957
+ if (rendition.autoselect) track.autoselect = rendition.autoselect;
958
+ if (rendition.forced) track.forced = rendition.forced;
959
+ return track;
960
+ });
961
+ const selectionSets = [];
962
+ if (videoTracks.length > 0) {
963
+ const videoSwitchingSet = {
964
+ id: generateId(),
965
+ type: "video",
966
+ tracks: videoTracks
967
+ };
968
+ const videoSelectionSet = {
969
+ id: generateId(),
970
+ type: "video",
971
+ switchingSets: [videoSwitchingSet]
972
+ };
973
+ selectionSets.push(videoSelectionSet);
974
+ }
975
+ if (audioTracks.length > 0) {
976
+ const audioSwitchingSet = {
977
+ id: generateId(),
978
+ type: "audio",
979
+ tracks: audioTracks
980
+ };
981
+ const audioSelectionSet = {
982
+ id: generateId(),
983
+ type: "audio",
984
+ switchingSets: [audioSwitchingSet]
985
+ };
986
+ selectionSets.push(audioSelectionSet);
987
+ }
988
+ if (textTracks.length > 0) {
989
+ const textSwitchingSet = {
990
+ id: generateId(),
991
+ type: "text",
992
+ tracks: textTracks
993
+ };
994
+ const textSelectionSet = {
995
+ id: generateId(),
996
+ type: "text",
997
+ switchingSets: [textSwitchingSet]
998
+ };
999
+ selectionSets.push(textSelectionSet);
1000
+ }
1001
+ return {
1002
+ id: generateId(),
1003
+ url: unresolved.url,
1004
+ startTime: 0,
1005
+ selectionSets
1006
+ };
1007
+ }
1008
+
1009
+ //#endregion
1010
+ //#region ../spf/dist/dev/core/abr/quality-selection.js
1011
+ /**
1012
+ * Default quality selection configuration.
1013
+ * Values match Shaka Player upgrade threshold (0.85 = 15% headroom).
1014
+ */
1015
+ const DEFAULT_QUALITY_CONFIG = { safetyMargin: .85 };
1016
+ /**
1017
+ * Select the best video track based on current bandwidth estimate.
1018
+ *
1019
+ * Selects the highest quality track where bandwidth is sufficient with safety margin:
1020
+ * - currentBandwidth >= track.bandwidth / safetyMargin
1021
+ * - Default safetyMargin 0.85 means track uses ≤85% of bandwidth (15% headroom)
1022
+ * - At same bandwidth, prefers higher resolution
1023
+ *
1024
+ * @param tracks - Available video tracks (can be unsorted)
1025
+ * @param currentBandwidth - Current bandwidth estimate in bits per second
1026
+ * @param config - Optional quality selection configuration
1027
+ * @returns Selected track, or undefined if no tracks available
1028
+ *
1029
+ * @example
1030
+ * const tracks = [
1031
+ * { id: '360p', bandwidth: 500_000, ... },
1032
+ * { id: '720p', bandwidth: 2_000_000, ... },
1033
+ * { id: '1080p', bandwidth: 4_000_000, ... },
1034
+ * ];
1035
+ *
1036
+ * // With 2.5 Mbps, selects 720p (1080p needs 4M/0.85 = 4.7 Mbps)
1037
+ * const selected = selectQuality(tracks, 2_500_000);
1038
+ */
1039
+ function selectQuality(tracks, currentBandwidth, config = DEFAULT_QUALITY_CONFIG) {
1040
+ if (tracks.length === 0) return;
1041
+ const sortedTracks = tracks.slice().sort((a, b) => a.bandwidth - b.bandwidth);
1042
+ let chosen;
1043
+ for (const track of sortedTracks) if (currentBandwidth >= track.bandwidth / config.safetyMargin) {
1044
+ if (!chosen || track.bandwidth > chosen.bandwidth || track.bandwidth === chosen.bandwidth && hasHigherResolution(track, chosen)) chosen = track;
1045
+ }
1046
+ return chosen ?? sortedTracks[0];
1047
+ }
1048
+ /**
1049
+ * Check if track A has higher resolution than track B.
1050
+ * Compares by total pixel count (width × height).
1051
+ *
1052
+ * @param trackA - First track to compare
1053
+ * @param trackB - Second track to compare
1054
+ * @returns True if trackA has more pixels than trackB
1055
+ */
1056
+ function hasHigherResolution(trackA, trackB) {
1057
+ return (trackA.width ?? 0) * (trackA.height ?? 0) > (trackB.width ?? 0) * (trackB.height ?? 0);
1058
+ }
1059
+
1060
+ //#endregion
1061
+ //#region ../spf/dist/dev/core/buffer/back-buffer.js
1062
+ /**
1063
+ * Default back buffer configuration.
1064
+ */
1065
+ const DEFAULT_BACK_BUFFER_CONFIG = { keepSegments: 2 };
1066
+ /**
1067
+ * Calculate back buffer flush point.
1068
+ *
1069
+ * Determines where to flush old segments from the back buffer.
1070
+ * Keeps a fixed number of segments behind the current playback position.
1071
+ *
1072
+ * Algorithm:
1073
+ * 1. Find segments before currentTime
1074
+ * 2. Count back N segments (keepSegments)
1075
+ * 3. Return startTime of segment N+1 back (flush everything before this)
1076
+ *
1077
+ * @param segments - Available segments (should be sorted by startTime)
1078
+ * @param currentTime - Current playback position in seconds
1079
+ * @param config - Optional back buffer configuration
1080
+ * @returns Time in seconds to flush up to (flush range: [0, flushEnd))
1081
+ *
1082
+ * @example
1083
+ * const segments = [
1084
+ * { startTime: 0, duration: 6, ... },
1085
+ * { startTime: 6, duration: 6, ... },
1086
+ * { startTime: 12, duration: 6, ... },
1087
+ * { startTime: 18, duration: 6, ... },
1088
+ * ];
1089
+ *
1090
+ * // Playing at 18s, keep 2 segments
1091
+ * const flushEnd = calculateBackBufferFlushPoint(segments, 18);
1092
+ * // Returns 6 (flush [0, 6), keep [6-18))
1093
+ */
1094
+ function calculateBackBufferFlushPoint(segments, currentTime, config = DEFAULT_BACK_BUFFER_CONFIG) {
1095
+ if (segments.length === 0) return 0;
1096
+ const segmentsBefore = segments.filter((seg) => seg.startTime < currentTime);
1097
+ if (segmentsBefore.length === 0) return 0;
1098
+ const segmentsToFlush = segmentsBefore.length - config.keepSegments;
1099
+ if (segmentsToFlush <= 0) return 0;
1100
+ if (segmentsToFlush >= segmentsBefore.length) return currentTime;
1101
+ return segmentsBefore[segmentsToFlush].startTime;
1102
+ }
1103
+
1104
+ //#endregion
1105
+ //#region ../spf/dist/dev/dom/media/mediasource-setup.js
1106
+ /**
1107
+ * MediaSource Setup
1108
+ *
1109
+ * Utilities for creating and configuring MediaSource/ManagedMediaSource
1110
+ * for MSE (Media Source Extensions) playback.
1111
+ *
1112
+ * Global ManagedMediaSource types are defined in ./mediasource.d.ts
1113
+ */
1114
+ /**
1115
+ * Check if MediaSource API is supported.
1116
+ */
1117
+ function supportsMediaSource() {
1118
+ return typeof MediaSource !== "undefined";
1119
+ }
1120
+ /**
1121
+ * Check if ManagedMediaSource API is supported.
1122
+ * ManagedMediaSource is a newer Safari API with better lifecycle management.
1123
+ */
1124
+ function supportsManagedMediaSource() {
1125
+ return typeof ManagedMediaSource !== "undefined";
1126
+ }
1127
+ /**
1128
+ * Create a MediaSource or ManagedMediaSource instance.
1129
+ *
1130
+ * @param options - Creation options
1131
+ * @returns A MediaSource or ManagedMediaSource instance
1132
+ * @throws Error if no MediaSource API is available
1133
+ *
1134
+ * @example
1135
+ * const mediaSource = createMediaSource();
1136
+ * const mediaElement = document.querySelector('video');
1137
+ * attachMediaSource(mediaSource, mediaElement);
1138
+ */
1139
+ function createMediaSource(options = {}) {
1140
+ const { preferManaged = false } = options;
1141
+ if (preferManaged && supportsManagedMediaSource()) return new ManagedMediaSource();
1142
+ if (supportsMediaSource()) return new MediaSource();
1143
+ throw new Error("MediaSource API is not supported");
1144
+ }
1145
+ /**
1146
+ * Attach a MediaSource to an HTMLMediaElement.
1147
+ *
1148
+ * Uses srcObject for ManagedMediaSource (Safari), or createObjectURL for regular MediaSource.
1149
+ *
1150
+ * @param mediaSource - The MediaSource to attach
1151
+ * @param mediaElement - The media element to attach to
1152
+ * @returns Object with URL and detach function
1153
+ *
1154
+ * @example
1155
+ * const mediaSource = createMediaSource();
1156
+ * const { detach } = attachMediaSource(mediaSource, videoElement);
1157
+ * await waitForSourceOpen(mediaSource);
1158
+ * // Use mediaSource...
1159
+ * // Later, to clean up:
1160
+ * detach();
1161
+ */
1162
+ function attachMediaSource(mediaSource, mediaElement) {
1163
+ if (supportsManagedMediaSource() && mediaSource instanceof ManagedMediaSource) {
1164
+ mediaElement.disableRemotePlayback = true;
1165
+ mediaElement.srcObject = mediaSource;
1166
+ const detach = () => {
1167
+ mediaElement.srcObject = null;
1168
+ mediaElement.load();
1169
+ };
1170
+ return {
1171
+ url: "",
1172
+ detach
1173
+ };
1174
+ }
1175
+ const url = URL.createObjectURL(mediaSource);
1176
+ mediaElement.src = url;
1177
+ const detach = () => {
1178
+ mediaElement.removeAttribute("src");
1179
+ mediaElement.load();
1180
+ URL.revokeObjectURL(url);
1181
+ };
1182
+ return {
1183
+ url,
1184
+ detach
1185
+ };
1186
+ }
1187
+ /**
1188
+ * Wait for a MediaSource to reach the 'open' state.
1189
+ * Resolves immediately if already open.
1190
+ *
1191
+ * @param mediaSource - The MediaSource to wait for
1192
+ * @param signal - Optional AbortSignal for cancellation
1193
+ * @returns Promise that resolves when the MediaSource is open
1194
+ *
1195
+ * @example
1196
+ * const mediaSource = createMediaSource();
1197
+ * attachMediaSource(mediaSource, videoElement);
1198
+ * await waitForSourceOpen(mediaSource);
1199
+ * // MediaSource is now ready for SourceBuffer creation
1200
+ */
1201
+ function waitForSourceOpen(mediaSource, signal) {
1202
+ return new Promise((resolve, reject) => {
1203
+ if (mediaSource.readyState === "open") {
1204
+ resolve();
1205
+ return;
1206
+ }
1207
+ if (signal?.aborted) {
1208
+ reject(new DOMException("Aborted", "AbortError"));
1209
+ return;
1210
+ }
1211
+ const controller = new AbortController();
1212
+ const options = { signal: controller.signal };
1213
+ mediaSource.addEventListener("sourceopen", () => {
1214
+ controller.abort();
1215
+ resolve();
1216
+ }, options);
1217
+ signal?.addEventListener("abort", () => {
1218
+ controller.abort();
1219
+ reject(new DOMException("Aborted", "AbortError"));
1220
+ }, options);
1221
+ });
1222
+ }
1223
+ /**
1224
+ * Create a SourceBuffer on a MediaSource.
1225
+ *
1226
+ * @param mediaSource - The MediaSource (must be in 'open' state)
1227
+ * @param mimeCodec - MIME type with codecs (e.g., 'video/mp4; codecs="avc1.42E01E"')
1228
+ * @returns The created SourceBuffer
1229
+ * @throws Error if MediaSource is not open or codec is unsupported
1230
+ *
1231
+ * @example
1232
+ * await waitForSourceOpen(mediaSource);
1233
+ * const buffer = createSourceBuffer(mediaSource, 'video/mp4; codecs="avc1.42E01E"');
1234
+ */
1235
+ function createSourceBuffer(mediaSource, mimeCodec) {
1236
+ if (mediaSource.readyState !== "open") throw new Error("MediaSource is not open");
1237
+ if (!isCodecSupported(mimeCodec)) throw new Error(`Codec not supported: ${mimeCodec}`);
1238
+ return mediaSource.addSourceBuffer(mimeCodec);
1239
+ }
1240
+ /**
1241
+ * Check if a codec is supported.
1242
+ *
1243
+ * @param mimeCodec - MIME type with codecs string
1244
+ * @returns True if the codec is supported
1245
+ *
1246
+ * @example
1247
+ * if (isCodecSupported('video/mp4; codecs="avc1.42E01E"')) {
1248
+ * // Create source buffer
1249
+ * }
1250
+ */
1251
+ function isCodecSupported(mimeCodec) {
1252
+ if (!supportsMediaSource()) return false;
1253
+ return MediaSource.isTypeSupported(mimeCodec);
1254
+ }
1255
+
1256
+ //#endregion
1257
+ //#region ../spf/dist/dev/core/events/create-event-stream.js
1258
+ /**
1259
+ * Minimal event stream with Observable-like shape.
1260
+ *
1261
+ * Simple Subject/Observable-like implementation for dispatching discrete events.
1262
+ * Events are dispatched synchronously to all subscribers.
1263
+ */
1264
+ const EVENT_STREAM_SYMBOL = Symbol("@videojs/event-stream");
1265
+ /**
1266
+ * Creates a minimal event stream for dispatching discrete events.
1267
+ *
1268
+ * Events are dispatched synchronously to all subscribers.
1269
+ * Conforms to Observable-like shape for future compatibility.
1270
+ *
1271
+ * Events must have a 'type' property for discriminated union type narrowing.
1272
+ *
1273
+ * @example
1274
+ * ```ts
1275
+ * type Action = { type: 'PLAY' } | { type: 'PAUSE' };
1276
+ * const events = createEventStream<Action>();
1277
+ *
1278
+ * events.subscribe((action) => {
1279
+ * if (action.type === 'PLAY') {
1280
+ * // Type narrowed to { type: 'PLAY' }
1281
+ * }
1282
+ * });
1283
+ *
1284
+ * events.dispatch({ type: 'PLAY' });
1285
+ * ```
1286
+ */
1287
+ function createEventStream() {
1288
+ const subscribers = /* @__PURE__ */ new Set();
1289
+ return {
1290
+ [EVENT_STREAM_SYMBOL]: true,
1291
+ dispatch(event) {
1292
+ const current = Array.from(subscribers);
1293
+ for (const listener of current) listener(event);
1294
+ },
1295
+ subscribe(listener) {
1296
+ subscribers.add(listener);
1297
+ return () => subscribers.delete(listener);
1298
+ }
1299
+ };
1300
+ }
1301
+
1302
+ //#endregion
1303
+ //#region ../spf/dist/dev/core/features/resolve-presentation.js
1304
+ /**
1305
+ * Type guard to check if presentation is unresolved.
1306
+ */
1307
+ function isUnresolved(presentation) {
1308
+ return presentation !== void 0 && "url" in presentation && !("id" in presentation);
1309
+ }
1310
+ function canResolve$1(state) {
1311
+ return isUnresolved(state.presentation);
1312
+ }
1313
+ /**
1314
+ * Determines if resolution conditions are met based on preload policy and event.
1315
+ *
1316
+ * Resolution conditions:
1317
+ * - State-driven: preload is 'auto' or 'metadata'
1318
+ * - Event-driven: play event
1319
+ *
1320
+ * @param state - Current presentation state
1321
+ * @param event - Current action/event
1322
+ * @returns true if resolution conditions are met
1323
+ */
1324
+ function shouldResolve$1(state, event) {
1325
+ const { preload } = state;
1326
+ return ["auto", "metadata"].includes(preload) || event.type === "play";
1327
+ }
1328
+ /**
1329
+ * Syncs preload attribute from mediaElement to state.
1330
+ *
1331
+ * Watches the owners state for mediaElement changes and copies the
1332
+ * preload attribute to the immutable state.
1333
+ *
1334
+ * @param state - Immutable state container
1335
+ * @param owners - Mutable platform objects container
1336
+ * @returns Cleanup function to stop syncing
1337
+ */
1338
+ function syncPreloadAttribute(state, owners) {
1339
+ return owners.subscribe((current) => {
1340
+ if (state.current.preload !== void 0) return;
1341
+ const preload = current.mediaElement?.preload || void 0;
1342
+ state.patch({ preload });
1343
+ });
1344
+ }
1345
+ /**
1346
+ * Resolves unresolved presentations using reactive composition.
1347
+ *
1348
+ * Uses combineLatest to compose state + events, enabling both state-driven
1349
+ * and event-driven resolution triggers.
1350
+ *
1351
+ * Triggers resolution when:
1352
+ * - State-driven: Unresolved presentation + preload allows (auto/metadata)
1353
+ * - Event-driven: PLAY event when preload="none"
1354
+ *
1355
+ * @example
1356
+ * ```ts
1357
+ * const state = createState({ presentation: undefined, preload: 'auto' });
1358
+ * const events = createEventStream<PresentationAction>();
1359
+ *
1360
+ * const cleanup = resolvePresentation({ state, events });
1361
+ *
1362
+ * // State-driven: resolves immediately when preload allows
1363
+ * state.patch({ presentation: { url: 'http://example.com/playlist.m3u8' } });
1364
+ *
1365
+ * // Event-driven: resolves on PLAY when preload="none"
1366
+ * state.patch({ preload: 'none', presentation: { url: '...' } });
1367
+ * events.dispatch({ type: 'PLAY' });
1368
+ * ```
1369
+ */
1370
+ function resolvePresentation({ state, events }) {
1371
+ let resolving = false;
1372
+ let abortController = null;
1373
+ const cleanup = combineLatest([state, events]).subscribe(async ([currentState, event]) => {
1374
+ if (!canResolve$1(currentState) || !shouldResolve$1(currentState, event) || resolving) return;
1375
+ try {
1376
+ resolving = true;
1377
+ abortController = new AbortController();
1378
+ const { presentation } = currentState;
1379
+ const parsed = parseMultivariantPlaylist(await getResponseText(await fetchResolvable(presentation, { signal: abortController.signal })), presentation);
1380
+ state.patch({ presentation: parsed });
1381
+ } catch (error) {
1382
+ if (error instanceof Error && error.name === "AbortError") return;
1383
+ throw error;
1384
+ } finally {
1385
+ resolving = false;
1386
+ abortController = null;
1387
+ }
1388
+ });
1389
+ return () => {
1390
+ abortController?.abort();
1391
+ cleanup();
1392
+ };
1393
+ }
1394
+
1395
+ //#endregion
1396
+ //#region ../spf/dist/dev/core/features/quality-switching.js
1397
+ /**
1398
+ * Default quality switching configuration.
1399
+ */
1400
+ const DEFAULT_SWITCHING_CONFIG = {
1401
+ safetyMargin: .85,
1402
+ minUpgradeInterval: 8e3,
1403
+ defaultBandwidth: 5e6
1404
+ };
1405
+ /**
1406
+ * Get all video tracks from a presentation's first switching set.
1407
+ * Returns [] when the presentation is still unresolved (no selectionSets yet).
1408
+ */
1409
+ function getVideoTracks(presentation) {
1410
+ return (presentation.selectionSets?.find((s) => s.type === "video"))?.switchingSets[0]?.tracks ?? [];
1411
+ }
1412
+ /**
1413
+ * Quality switching orchestration (F9).
1414
+ *
1415
+ * Reacts to bandwidth estimate changes and updates `selectedVideoTrackId`
1416
+ * when a different quality is optimal:
1417
+ *
1418
+ * - **Downgrades** happen immediately to avoid buffering stalls.
1419
+ * - **Upgrades** are gated by `minUpgradeInterval` to prevent oscillation.
1420
+ * - The first switch (from any track, or no track) is always immediate.
1421
+ *
1422
+ * Smooth switching is handled downstream: when `selectedVideoTrackId` changes,
1423
+ * `resolveTrack` fetches the new playlist and `loadSegments` reloads the init
1424
+ * segment, then appends media segments from the current position in the new
1425
+ * quality. The browser's SourceBuffer replaces the overlapping buffered range.
1426
+ *
1427
+ * @example
1428
+ * const cleanup = switchQuality({ state });
1429
+ * // Later, when done:
1430
+ * cleanup();
1431
+ */
1432
+ function switchQuality({ state }, config = {}) {
1433
+ const safetyMargin = config.safetyMargin ?? DEFAULT_SWITCHING_CONFIG.safetyMargin;
1434
+ const minUpgradeInterval = config.minUpgradeInterval ?? DEFAULT_SWITCHING_CONFIG.minUpgradeInterval;
1435
+ const defaultBandwidth = config.defaultBandwidth ?? DEFAULT_SWITCHING_CONFIG.defaultBandwidth;
1436
+ let lastUpgradeTime = Date.now();
1437
+ let firstMeaningfulFire = true;
1438
+ return state.subscribe((currentState) => {
1439
+ const { presentation, bandwidthState, selectedVideoTrackId, abrDisabled } = currentState;
1440
+ if (abrDisabled === true) return;
1441
+ if (!presentation || !bandwidthState) return;
1442
+ const videoTracks = getVideoTracks(presentation);
1443
+ if (videoTracks.length === 0) return;
1444
+ const isFirst = firstMeaningfulFire;
1445
+ firstMeaningfulFire = false;
1446
+ const optimal = selectQuality(videoTracks, getBandwidthEstimate(bandwidthState, defaultBandwidth), { safetyMargin });
1447
+ if (!optimal || optimal.id === selectedVideoTrackId) return;
1448
+ const currentTrack = videoTracks.find((t) => t.id === selectedVideoTrackId);
1449
+ if (!currentTrack || optimal.bandwidth > currentTrack.bandwidth) {
1450
+ const now = Date.now();
1451
+ if (!isFirst && now - lastUpgradeTime < minUpgradeInterval) return;
1452
+ lastUpgradeTime = now;
1453
+ }
1454
+ state.patch({ selectedVideoTrackId: optimal.id });
1455
+ });
1456
+ }
1457
+
1458
+ //#endregion
1459
+ //#region ../spf/dist/dev/core/utils/track-selection.js
1460
+ /**
1461
+ * Map track type to selected track ID property key in state.
1462
+ */
1463
+ const SelectedTrackIdKeyByType = {
1464
+ video: "selectedVideoTrackId",
1465
+ audio: "selectedAudioTrackId",
1466
+ text: "selectedTextTrackId"
1467
+ };
1468
+ /**
1469
+ * Get selected track from state by type.
1470
+ * Returns properly typed track (partially or fully resolved) or undefined.
1471
+ * Type parameter T is inferred from the type argument.
1472
+ *
1473
+ * @example
1474
+ * const videoTrack = getSelectedTrack(state, 'video');
1475
+ * if (videoTrack && isResolvedTrack(videoTrack)) {
1476
+ * // videoTrack is VideoTrack
1477
+ * }
1478
+ */
1479
+ function getSelectedTrack(state, type) {
1480
+ const { presentation } = state;
1481
+ /** @TODO Consider moving and reusing isUnresolved(presentation) (CJP) */
1482
+ if (!presentation || !("id" in presentation)) return void 0;
1483
+ const trackId = state[SelectedTrackIdKeyByType[type]];
1484
+ return presentation.selectionSets.find(({ type: selectionSetType }) => selectionSetType === type)?.switchingSets[0]?.tracks.find(({ id }) => id === trackId);
1485
+ }
1486
+
1487
+ //#endregion
1488
+ //#region ../spf/dist/dev/dom/features/segment-loader-actor.js
1489
+ /**
1490
+ * Creates a SegmentLoaderActor for one track type (video or audio).
1491
+ *
1492
+ * Receives load assignments via `send()` and owns all execution: planning,
1493
+ * removes, fetches, and appends. Coordinates with the SourceBufferActor for
1494
+ * all physical SourceBuffer operations.
1495
+ *
1496
+ * Planning (Cases 1–3) happens in `send()` on every incoming message, producing
1497
+ * an ordered LoadTask list. The runner drains that list sequentially. When a new
1498
+ * message arrives mid-run, send() replans and either continues the in-flight
1499
+ * operation (if still needed) or preempts it.
1500
+ *
1501
+ * @param sourceBufferActor - Shared SourceBufferActor reference (not owned)
1502
+ * @param fetchBytes - Tracked fetch closure (owns throughput sampling for segments).
1503
+ * Accepts an optional `minChunkSize` in options; init segments pass `Infinity`
1504
+ * so the entire body accumulates as one chunk before appending.
1505
+ */
1506
+ function createSegmentLoaderActor(sourceBufferActor, fetchBytes) {
1507
+ let pendingTasks = null;
1508
+ let inFlightInitTrackId = null;
1509
+ let inFlightSegmentId = null;
1510
+ let abortController = null;
1511
+ let running = false;
1512
+ let destroyed = false;
1513
+ const getBufferedSegments = (allSegments) => {
1514
+ const bufferedIds = new Set(sourceBufferActor.snapshot.context.segments.filter((s) => !s.partial).map((s) => s.id));
1515
+ return allSegments.filter((s) => bufferedIds.has(s.id));
1516
+ };
1517
+ /**
1518
+ * Translate a load message into an ordered LoadTask list based on committed
1519
+ * actor state. In-flight awareness is handled separately in send().
1520
+ *
1521
+ * @todo Rename alongside LoadTask (e.g. planOps).
1522
+ *
1523
+ * Case 1 — Removes: forward and back buffer flush points, segment-aligned.
1524
+ * No flush on track switch: appending new content overwrites existing buffer
1525
+ * ranges, and the actor's time-aligned deduplication keeps the segment model
1526
+ * accurate as new segments arrive.
1527
+ *
1528
+ * Case 2 — Init: schedule if not yet committed for this track.
1529
+ *
1530
+ * Case 3 — Segments: all segments in the load window not yet committed.
1531
+ */
1532
+ const planTasks = (message) => {
1533
+ const { track, range } = message;
1534
+ const actorCtx = sourceBufferActor.snapshot.context;
1535
+ const bufferedSegments = getBufferedSegments(track.segments);
1536
+ const currentTime = range?.start ?? 0;
1537
+ const tasks = [];
1538
+ if (range) {
1539
+ const forwardFlushStart = calculateForwardFlushPoint(bufferedSegments, currentTime);
1540
+ if (forwardFlushStart < Infinity) tasks.push({
1541
+ type: "remove",
1542
+ start: forwardFlushStart,
1543
+ end: Infinity
1544
+ });
1545
+ const backFlushEnd = calculateBackBufferFlushPoint(bufferedSegments, currentTime);
1546
+ if (backFlushEnd > 0) tasks.push({
1547
+ type: "remove",
1548
+ start: 0,
1549
+ end: backFlushEnd
1550
+ });
1551
+ }
1552
+ if (actorCtx.initTrackId !== track.id) tasks.push({
1553
+ type: "append-init",
1554
+ meta: { trackId: track.id },
1555
+ url: track.initialization.url,
1556
+ ...track.initialization.byteRange !== void 0 && { byteRange: track.initialization.byteRange }
1557
+ });
1558
+ if (range) {
1559
+ const EPSILON = 1e-4;
1560
+ const segmentsToLoad = getSegmentsToLoad(track.segments, bufferedSegments, currentTime).filter((seg) => {
1561
+ const existing = actorCtx.segments.find((s) => Math.abs(s.startTime - seg.startTime) < EPSILON);
1562
+ if (existing?.partial) return true;
1563
+ if (!existing?.trackBandwidth || !track.bandwidth) return true;
1564
+ return track.bandwidth > existing.trackBandwidth;
1565
+ });
1566
+ for (const segment of segmentsToLoad) tasks.push({
1567
+ type: "append-segment",
1568
+ meta: {
1569
+ id: segment.id,
1570
+ startTime: segment.startTime,
1571
+ duration: segment.duration,
1572
+ trackId: track.id,
1573
+ trackBandwidth: track.bandwidth
1574
+ },
1575
+ url: segment.url,
1576
+ ...segment.byteRange !== void 0 && { byteRange: segment.byteRange }
1577
+ });
1578
+ }
1579
+ return tasks;
1580
+ };
1581
+ /**
1582
+ * Execute a single LoadTask: fetch (if needed) then forward to SourceBufferActor.
1583
+ * Sets/clears in-flight tracking around async operations so send() can make
1584
+ * accurate continue/preempt decisions at any point during execution.
1585
+ *
1586
+ * @todo Rename alongside LoadTask (e.g. executeOp).
1587
+ */
1588
+ const executeLoadTask = async (task) => {
1589
+ const signal = abortController.signal;
1590
+ try {
1591
+ if (task.type === "remove") {
1592
+ await sourceBufferActor.send(task, signal);
1593
+ return;
1594
+ }
1595
+ if (task.type === "append-init") {
1596
+ inFlightInitTrackId = task.meta.trackId;
1597
+ if (!signal.aborted) {
1598
+ const data = await fetchBytes(task, {
1599
+ signal,
1600
+ minChunkSize: Infinity
1601
+ });
1602
+ const isTrackSwitch = pendingTasks?.some((t) => t.type === "append-init" && t.meta.trackId !== task.meta.trackId);
1603
+ if (!signal.aborted || !isTrackSwitch) {
1604
+ const appendSignal = signal.aborted ? new AbortController().signal : signal;
1605
+ await sourceBufferActor.send({
1606
+ type: "append-init",
1607
+ data,
1608
+ meta: task.meta
1609
+ }, appendSignal);
1610
+ }
1611
+ }
1612
+ return;
1613
+ }
1614
+ inFlightSegmentId = task.meta.id;
1615
+ if (!signal.aborted) {
1616
+ const stream = await fetchBytes(task, { signal });
1617
+ if (!signal.aborted) await sourceBufferActor.send({
1618
+ type: "append-segment",
1619
+ data: stream,
1620
+ meta: task.meta
1621
+ }, signal);
1622
+ }
1623
+ } finally {
1624
+ inFlightInitTrackId = null;
1625
+ inFlightSegmentId = null;
1626
+ }
1627
+ };
1628
+ /**
1629
+ * Drain the scheduled task list sequentially.
1630
+ * After each task completes, checks for a pending replacement plan from send().
1631
+ * If the signal was aborted and no new plan arrived, stops immediately.
1632
+ */
1633
+ const runScheduled = async (initialTasks) => {
1634
+ running = true;
1635
+ abortController = new AbortController();
1636
+ let scheduled = initialTasks;
1637
+ while (scheduled.length > 0 && !destroyed) {
1638
+ const task = scheduled[0];
1639
+ scheduled = scheduled.slice(1);
1640
+ try {
1641
+ await executeLoadTask(task);
1642
+ } catch (error) {
1643
+ if (error instanceof Error && error.name === "AbortError") {} else {
1644
+ console.error("Unexpected error in segment loader:", error);
1645
+ scheduled = [];
1646
+ }
1647
+ }
1648
+ if (pendingTasks !== null) {
1649
+ scheduled = pendingTasks;
1650
+ pendingTasks = null;
1651
+ abortController = new AbortController();
1652
+ } else if (abortController.signal.aborted) break;
1653
+ }
1654
+ abortController = null;
1655
+ running = false;
1656
+ };
1657
+ return {
1658
+ send(message) {
1659
+ if (destroyed) return;
1660
+ const allTasks = planTasks(message);
1661
+ if (!running) {
1662
+ if (allTasks.length === 0) return;
1663
+ runScheduled(allTasks);
1664
+ return;
1665
+ }
1666
+ if (inFlightSegmentId !== null && allTasks.some((t) => t.type === "append-segment" && t.meta.id === inFlightSegmentId) || inFlightInitTrackId !== null && allTasks.some((t) => t.type === "append-init" && t.meta.trackId === inFlightInitTrackId)) pendingTasks = allTasks.filter((t) => !(t.type === "append-segment" && t.meta.id === inFlightSegmentId) && !(t.type === "append-init" && t.meta.trackId === inFlightInitTrackId));
1667
+ else {
1668
+ pendingTasks = allTasks;
1669
+ abortController?.abort();
1670
+ }
1671
+ },
1672
+ destroy() {
1673
+ destroyed = true;
1674
+ abortController?.abort();
1675
+ }
1676
+ };
1677
+ }
1678
+
1679
+ //#endregion
1680
+ //#region ../spf/dist/dev/dom/features/load-segments.js
1681
+ const ActorKeyByType = {
1682
+ video: "videoBufferActor",
1683
+ audio: "audioBufferActor"
1684
+ };
1685
+ function createTrackedFetch(throughput, onSample) {
1686
+ return async (addressable, options) => {
1687
+ const { minChunkSize, ...fetchOptions } = options ?? {};
1688
+ const response = await fetchResolvable(addressable, fetchOptions);
1689
+ if (!response.body) throw new Error("Response has no body");
1690
+ const body = response.body;
1691
+ return { [Symbol.asyncIterator]: async function* () {
1692
+ let chunkStart = performance.now();
1693
+ for await (const chunk of new ChunkedStreamIterable(body, ...minChunkSize !== void 0 ? [{ minChunkSize }] : [])) {
1694
+ const elapsed = performance.now() - chunkStart;
1695
+ const next = sampleBandwidth(throughput.current, elapsed, chunk.byteLength);
1696
+ throughput.patch(next);
1697
+ throughput.flush();
1698
+ onSample?.(next);
1699
+ yield chunk;
1700
+ chunkStart = performance.now();
1701
+ }
1702
+ } };
1703
+ };
1704
+ }
1705
+ /**
1706
+ * Non-tracking fetch: eagerly starts the request and returns the response body
1707
+ * as a lazy chunk iterable. Used for audio tracks which don't sample bandwidth.
1708
+ * Pass `minChunkSize: Infinity` to accumulate the full body as a single chunk
1709
+ * (equivalent to arrayBuffer() but through the same streaming path).
1710
+ */
1711
+ async function fetchStream(addressable, options) {
1712
+ const { minChunkSize, ...fetchOptions } = options ?? {};
1713
+ const response = await fetchResolvable(addressable, fetchOptions);
1714
+ if (!response.body) throw new Error("Response has no body");
1715
+ return new ChunkedStreamIterable(response.body, ...minChunkSize !== void 0 ? [{ minChunkSize }] : []);
1716
+ }
1717
+ function selectLoadingInputs([segmentsCanLoad, state], type) {
1718
+ const { playbackInitiated, preload, currentTime } = state;
1719
+ return {
1720
+ playbackInitiated,
1721
+ preload,
1722
+ currentTime,
1723
+ track: getSelectedTrack(state, type),
1724
+ segmentsCanLoad
1725
+ };
1726
+ }
1727
+ /**
1728
+ * Equality function encoding the condition hierarchy for relevant changes.
1729
+ *
1730
+ * Pre-play (!playbackInitiated):
1731
+ * Only preload changes matter. currentTime and resolvedTrackId are ignored
1732
+ * (track changes not supported pre-play; currentTime value is used at
1733
+ * trigger time but changes don't re-trigger).
1734
+ *
1735
+ * playbackInitiated transition:
1736
+ * Always fires (handled in the subscriber; preload='auto' suppression
1737
+ * applied there since equality functions have no memory of prior values).
1738
+ *
1739
+ * Post-play (playbackInitiated):
1740
+ * resolvedTrackId changes (track switch or previously-unresolved track
1741
+ * resolving) and currentTime changes both trigger. preload is irrelevant.
1742
+ */
1743
+ const segmentStartFor = (currentTime, track) => {
1744
+ if (currentTime == null) return void 0;
1745
+ return track?.segments.find(({ startTime, duration }, i, segments) => currentTime >= startTime && (currentTime < startTime + duration || i === segments.length - 1))?.startTime;
1746
+ };
1747
+ /**
1748
+ * Returns true when the inputs are equal (no meaningful change — don't fire).
1749
+ * Returns false when the inputs differ in a way that requires a new message.
1750
+ *
1751
+ * This IS the shouldLoadSegments logic, expressed as an equality function.
1752
+ */
1753
+ function loadingInputsEq(prevState, curState) {
1754
+ if (!curState.segmentsCanLoad) return true;
1755
+ if (!curState.playbackInitiated) {
1756
+ if (curState.preload === "none") return true;
1757
+ return curState.preload === prevState.preload;
1758
+ }
1759
+ if (!prevState.playbackInitiated && curState.playbackInitiated) {
1760
+ if (prevState.preload !== "auto") return false;
1761
+ }
1762
+ if (!curState.track || !isResolvedTrack(curState.track)) return true;
1763
+ if (prevState.track?.id !== curState.track.id && isResolvedTrack(curState.track)) return false;
1764
+ return segmentStartFor(prevState.currentTime, curState.track) === segmentStartFor(curState.currentTime, curState.track);
1765
+ }
1766
+ /**
1767
+ * Load segments orchestration — Reactor layer.
1768
+ *
1769
+ * Sends typed load messages to a SegmentLoaderActor when relevant conditions
1770
+ * change. Uses targeted subscriptions rather than broad combineLatest so only
1771
+ * meaningful state changes trigger evaluation.
1772
+ *
1773
+ * Condition hierarchy (see SegmentLoadingKey for detail):
1774
+ *
1775
+ * !playbackInitiated
1776
+ * preload==='none' (or unset) → dormant; no trigger
1777
+ * preload==='metadata' → trigger on transition to 'metadata'
1778
+ * preload==='auto' → trigger on transition to 'auto'
1779
+ *
1780
+ * !playbackInitiated → playbackInitiated
1781
+ * preload !== 'auto' → trigger (message shape changes)
1782
+ * preload === 'auto' → suppressed (was already full-range mode;
1783
+ * let segmentStart take over post-play)
1784
+ * KNOWN LIMITATION: seek-before-play with
1785
+ * preload='auto' is not supported — if the
1786
+ * user seeks before pressing play, the
1787
+ * first re-send is delayed until the next
1788
+ * segment boundary crossing post-play.
1789
+ *
1790
+ * playbackInitiated
1791
+ * resolvedTrackId changes → trigger
1792
+ * segmentStart(currentTime) changes → trigger (segment boundary only)
1793
+ *
1794
+ * @example
1795
+ * const cleanup = loadSegments({ state, owners }, { type: 'video' });
1796
+ */
1797
+ function loadSegments({ state, owners }, config) {
1798
+ const { type } = config;
1799
+ const actorKey = ActorKeyByType[type];
1800
+ const initialBandwidth = state.current.bandwidthState;
1801
+ const throughput = createState(initialBandwidth ?? {
1802
+ fastEstimate: 0,
1803
+ fastTotalWeight: 0,
1804
+ slowEstimate: 0,
1805
+ slowTotalWeight: 0,
1806
+ bytesSampled: 0
1807
+ });
1808
+ const fetchBytes = type === "video" ? createTrackedFetch(throughput, initialBandwidth !== void 0 ? (next) => {
1809
+ state.patch({ bandwidthState: next });
1810
+ state.flush();
1811
+ } : void 0) : fetchStream;
1812
+ const segmentLoader = createState(void 0);
1813
+ const unsubActorLifecycle = owners.subscribe((o) => o[actorKey], (actor) => {
1814
+ if (actor) segmentLoader.patch(createSegmentLoaderActor(actor, fetchBytes));
1815
+ else if (!actor && segmentLoader.current) {
1816
+ segmentLoader.current.destroy();
1817
+ segmentLoader.patch(void 0);
1818
+ }
1819
+ return () => {
1820
+ segmentLoader.current?.destroy();
1821
+ segmentLoader.patch(void 0);
1822
+ };
1823
+ });
1824
+ const segmentsCanLoad = createState(false);
1825
+ const unsubscribeCanLoadSegments = combineLatest([state, segmentLoader]).subscribe(([currentState, currentSegmentLoader]) => {
1826
+ const track = getSelectedTrack(currentState, type);
1827
+ const trackResolved = !!track && isResolvedTrack(track);
1828
+ const segmentLoaderActorExists = !!currentSegmentLoader;
1829
+ segmentsCanLoad.patch(trackResolved && segmentLoaderActorExists);
1830
+ });
1831
+ const unsubscribeShouldLoadSegments = combineLatest([segmentsCanLoad, state]).subscribe(([segmentsCanLoad, state]) => selectLoadingInputs([segmentsCanLoad, state], type), ({ preload, playbackInitiated, currentTime, track }) => {
1832
+ if (!(preload === "auto" || !!playbackInitiated))
1833
+ /** @ts-expect-error */
1834
+ segmentLoader.current?.send({
1835
+ type: "load",
1836
+ track
1837
+ });
1838
+ else segmentLoader.current?.send({
1839
+ type: "load",
1840
+ track,
1841
+ range: {
1842
+ start: currentTime,
1843
+ end: currentTime + DEFAULT_FORWARD_BUFFER_CONFIG.bufferDuration
1844
+ }
1845
+ });
1846
+ }, { equalityFn: loadingInputsEq });
1847
+ return () => {
1848
+ unsubscribeCanLoadSegments();
1849
+ unsubscribeShouldLoadSegments();
1850
+ unsubActorLifecycle();
1851
+ };
1852
+ }
1853
+
1854
+ //#endregion
1855
+ //#region ../spf/dist/dev/dom/text/parse-vtt-segment.js
1856
+ /**
1857
+ * Parse a VTT segment using browser's native parser.
1858
+ *
1859
+ * Creates a dummy video element with a track element to leverage
1860
+ * the browser's optimized VTT parsing. Returns parsed VTTCue objects.
1861
+ */
1862
+ let dummyVideo = null;
1863
+ function ensureDummyVideo() {
1864
+ if (!dummyVideo) {
1865
+ dummyVideo = document.createElement("video");
1866
+ dummyVideo.muted = true;
1867
+ dummyVideo.preload = "none";
1868
+ dummyVideo.style.display = "none";
1869
+ dummyVideo.crossOrigin = "anonymous";
1870
+ }
1871
+ return dummyVideo;
1872
+ }
1873
+ function parseVttSegment(url) {
1874
+ const video = ensureDummyVideo();
1875
+ const track = document.createElement("track");
1876
+ track.kind = "subtitles";
1877
+ track.default = true;
1878
+ return new Promise((resolve, reject) => {
1879
+ const onLoad = () => {
1880
+ const cues = [];
1881
+ const textTrack = track.track;
1882
+ if (textTrack.cues) for (let i = 0; i < textTrack.cues.length; i++) {
1883
+ const cue = textTrack.cues[i];
1884
+ if (cue) cues.push(cue);
1885
+ }
1886
+ cleanup();
1887
+ resolve(cues);
1888
+ };
1889
+ const onError = () => {
1890
+ cleanup();
1891
+ reject(/* @__PURE__ */ new Error(`Failed to load VTT segment: ${url}`));
1892
+ };
1893
+ const cleanup = () => {
1894
+ track.removeEventListener("load", onLoad);
1895
+ track.removeEventListener("error", onError);
1896
+ video.removeChild(track);
1897
+ };
1898
+ track.addEventListener("load", onLoad);
1899
+ track.addEventListener("error", onError);
1900
+ video.appendChild(track);
1901
+ track.src = url;
1902
+ });
1903
+ }
1904
+ function destroyVttParser() {
1905
+ dummyVideo = null;
1906
+ }
1907
+
1908
+ //#endregion
1909
+ //#region ../spf/dist/dev/dom/features/load-text-track-cues.js
1910
+ function isDuplicateCue(cue, textTrack) {
1911
+ const { cues } = textTrack;
1912
+ if (!cues) return false;
1913
+ for (let i = 0; i < cues.length; i++) {
1914
+ const existing = cues[i];
1915
+ if (existing.startTime === cue.startTime && existing.endTime === cue.endTime && existing.text === cue.text) return true;
1916
+ }
1917
+ return false;
1918
+ }
1919
+ const loadVttSegmentTask = async ({ segment }, context) => {
1920
+ const cues = await parseVttSegment(segment.url);
1921
+ for (const cue of cues) if (!isDuplicateCue(cue, context.textTrack)) context.textTrack.addCue(cue);
1922
+ };
1923
+ /**
1924
+ * Load text track cues task (composite - orchestrates VTT segment subtasks).
1925
+ */
1926
+ const loadTextTrackCuesTask = async ({ currentState }, context) => {
1927
+ const track = findSelectedTextTrack(currentState);
1928
+ if (!track || !isResolvedTrack(track)) return;
1929
+ const { segments } = track;
1930
+ if (segments.length === 0) return;
1931
+ const trackId = track.id;
1932
+ const loadedIds = new Set((currentState.textBufferState?.[trackId]?.segments ?? []).map((s) => s.id));
1933
+ const segmentsToLoad = getSegmentsToLoad(segments, segments.filter((s) => loadedIds.has(s.id)), currentState.currentTime ?? 0).filter((s) => !loadedIds.has(s.id));
1934
+ if (segmentsToLoad.length === 0) return;
1935
+ for (const segment of segmentsToLoad) {
1936
+ if (context.signal.aborted) break;
1937
+ try {
1938
+ await loadVttSegmentTask({ segment }, { textTrack: context.textTrack });
1939
+ const latest = context.state.current.textBufferState ?? {};
1940
+ const trackState = latest[trackId] ?? { segments: [] };
1941
+ context.state.patch({ textBufferState: {
1942
+ ...latest,
1943
+ [trackId]: { segments: [...trackState.segments, { id: segment.id }] }
1944
+ } });
1945
+ } catch (error) {
1946
+ if (error instanceof Error && error.name === "AbortError") break;
1947
+ console.error("Failed to load VTT segment:", error);
1948
+ }
1949
+ }
1950
+ if (context.textTrack.mode === "showing" && context.textTrack.cues) Array.from(context.textTrack.cues).forEach((cue) => {
1951
+ context.textTrack.addCue(cue);
1952
+ });
1953
+ await new Promise((resolve) => requestAnimationFrame(resolve));
1954
+ };
1955
+ /**
1956
+ * Find the selected text track in the presentation.
1957
+ */
1958
+ function findSelectedTextTrack(state) {
1959
+ if (!state.presentation || !state.selectedTextTrackId) return;
1960
+ const textSet = state.presentation.selectionSets.find((set) => set.type === "text");
1961
+ if (!textSet?.switchingSets?.[0]?.tracks) return;
1962
+ return textSet.switchingSets[0].tracks.find((t) => t.id === state.selectedTextTrackId);
1963
+ }
1964
+ /**
1965
+ * Get the browser's TextTrack object for the selected text track.
1966
+ *
1967
+ * Retrieves the live TextTrack interface from the track element in owners,
1968
+ * which is used for adding cues, checking mode, and managing track state.
1969
+ *
1970
+ * Note: Returns the DOM TextTrack interface (HTMLTrackElement.track),
1971
+ * not the presentation Track metadata type.
1972
+ *
1973
+ * @param state - Current playback state (track selection)
1974
+ * @param owners - DOM owners containing track elements map
1975
+ * @returns DOM TextTrack interface or undefined if not found
1976
+ */
1977
+ function getSelectedTextTrackFromOwners(state, owners) {
1978
+ const trackId = state.selectedTextTrackId;
1979
+ if (!trackId || !owners.textTracks) return;
1980
+ return owners.textTracks.get(trackId)?.track;
1981
+ }
1982
+ /**
1983
+ * Check if we can load text track cues.
1984
+ *
1985
+ * Requires:
1986
+ * - Selected text track ID exists
1987
+ * - Track elements map exists
1988
+ * - Track element exists for selected track
1989
+ */
1990
+ function canLoadTextTrackCues(state, owners) {
1991
+ return !!state.selectedTextTrackId && !!owners.textTracks && owners.textTracks.has(state.selectedTextTrackId);
1992
+ }
1993
+ /**
1994
+ * Check if we should load text track cues.
1995
+ *
1996
+ * Only load if:
1997
+ * - Track is resolved (has segments)
1998
+ * - Track has at least one segment
1999
+ * - Track element exists
2000
+ */
2001
+ function shouldLoadTextTrackCues(state, owners) {
2002
+ if (!canLoadTextTrackCues(state, owners)) return false;
2003
+ const track = findSelectedTextTrack(state);
2004
+ if (!track || !isResolvedTrack(track) || track.segments.length === 0) return false;
2005
+ if (!getSelectedTextTrackFromOwners(state, owners)) return false;
2006
+ return true;
2007
+ }
2008
+ /**
2009
+ * Load text track cues orchestration.
2010
+ *
2011
+ * Triggers when:
2012
+ * - Text track is selected
2013
+ * - Track is resolved (has segments)
2014
+ * - Track element exists
2015
+ *
2016
+ * Fetches and parses VTT segments within the forward buffer window, then adds
2017
+ * cues to the track incrementally. Continues on segment errors to provide
2018
+ * partial subtitles.
2019
+ *
2020
+ * @example
2021
+ * const cleanup = loadTextTrackCues({ state, owners });
2022
+ */
2023
+ function loadTextTrackCues({ state, owners }) {
2024
+ let currentTask = null;
2025
+ let abortController = null;
2026
+ let lastTrackId;
2027
+ const cleanup = combineLatest([state, owners]).subscribe(async ([currentState, currentOwners]) => {
2028
+ if (currentState.selectedTextTrackId !== lastTrackId) {
2029
+ lastTrackId = currentState.selectedTextTrackId;
2030
+ abortController?.abort();
2031
+ currentTask = null;
2032
+ }
2033
+ if (currentTask) return;
2034
+ if (!shouldLoadTextTrackCues(currentState, currentOwners)) return;
2035
+ const textTrack = getSelectedTextTrackFromOwners(currentState, currentOwners);
2036
+ if (!textTrack) return;
2037
+ abortController = new AbortController();
2038
+ currentTask = loadTextTrackCuesTask({ currentState }, {
2039
+ signal: abortController.signal,
2040
+ textTrack,
2041
+ state
2042
+ }).finally(() => {
2043
+ currentTask = null;
2044
+ });
2045
+ });
2046
+ return () => {
2047
+ abortController?.abort();
2048
+ cleanup();
2049
+ };
2050
+ }
2051
+
2052
+ //#endregion
2053
+ //#region ../spf/dist/dev/dom/features/track-current-time.js
2054
+ /**
2055
+ * Track current playback position from the media element.
2056
+ *
2057
+ * Mirrors `mediaElement.currentTime` into reactive state on:
2058
+ * - `timeupdate` — fires during playback (~4 Hz)
2059
+ * - `seeking` — fires when a seek begins; per spec, `currentTime` is
2060
+ * already at the new position when this event dispatches, so buffer
2061
+ * management can react immediately rather than waiting for `timeupdate`,
2062
+ * which does not fire while paused.
2063
+ *
2064
+ * Also syncs immediately when a media element becomes available.
2065
+ *
2066
+ * @example
2067
+ * const cleanup = trackCurrentTime({ state, owners });
2068
+ */
2069
+ function trackCurrentTime({ state, owners }) {
2070
+ let lastMediaElement;
2071
+ let removeListeners = null;
2072
+ const unsubscribe = owners.subscribe((currentOwners) => {
2073
+ const { mediaElement } = currentOwners;
2074
+ if (mediaElement === lastMediaElement) return;
2075
+ removeListeners?.();
2076
+ removeListeners = null;
2077
+ lastMediaElement = mediaElement;
2078
+ if (!mediaElement) return;
2079
+ state.patch({ currentTime: mediaElement.currentTime });
2080
+ const sync = () => state.patch({ currentTime: mediaElement.currentTime });
2081
+ const removeTimeupdate = listen(mediaElement, "timeupdate", sync);
2082
+ const removeSeeking = listen(mediaElement, "seeking", sync);
2083
+ removeListeners = () => {
2084
+ removeTimeupdate();
2085
+ removeSeeking();
2086
+ };
2087
+ });
2088
+ return () => {
2089
+ removeListeners?.();
2090
+ unsubscribe();
2091
+ };
2092
+ }
2093
+
2094
+ //#endregion
2095
+ //#region ../spf/dist/dev/dom/features/track-playback-initiated.js
2096
+ /**
2097
+ * Track whether playback has been initiated by the user.
2098
+ *
2099
+ * Sets `state.playbackInitiated = true` when the media element fires a `play`
2100
+ * event (via `element.play()`, native controls, or autoplay) and simultaneously
2101
+ * dispatches `{ type: 'play' }` to the event stream so `resolvePresentation`
2102
+ * can react.
2103
+ *
2104
+ * Resets `state.playbackInitiated = false` when `presentation.url` changes,
2105
+ * so a new source with `preload="none"` won't load segments until play is
2106
+ * triggered again.
2107
+ *
2108
+ * This flag is used by `shouldLoadSegments` to allow segment loading after
2109
+ * play is initiated regardless of the initial `preload` setting — `preload`
2110
+ * is a startup hint, not a runtime gate.
2111
+ *
2112
+ * @example
2113
+ * const cleanup = trackPlaybackInitiated({ state, owners, events });
2114
+ */
2115
+ function trackPlaybackInitiated({ state, owners, events }) {
2116
+ let lastMediaElement;
2117
+ let removeListener = null;
2118
+ let lastPresentationUrl;
2119
+ const unsubscribeState = state.subscribe((currentState) => {
2120
+ const url = currentState.presentation?.url;
2121
+ if (url !== lastPresentationUrl) {
2122
+ if (lastPresentationUrl !== void 0) state.patch({ playbackInitiated: false });
2123
+ lastPresentationUrl = url;
2124
+ }
2125
+ });
2126
+ const unsubscribeOwners = owners.subscribe((currentOwners) => {
2127
+ const { mediaElement } = currentOwners;
2128
+ if (mediaElement === lastMediaElement) return;
2129
+ removeListener?.();
2130
+ removeListener = null;
2131
+ lastMediaElement = mediaElement;
2132
+ if (!mediaElement) return;
2133
+ removeListener = listen(mediaElement, "play", () => {
2134
+ state.patch({ playbackInitiated: true });
2135
+ events.dispatch({ type: "play" });
2136
+ });
2137
+ });
2138
+ return () => {
2139
+ removeListener?.();
2140
+ unsubscribeState();
2141
+ unsubscribeOwners();
2142
+ };
2143
+ }
2144
+
2145
+ //#endregion
2146
+ //#region ../spf/dist/dev/dom/media/append-segment.js
2147
+ /**
2148
+ * Append media data to a SourceBuffer.
2149
+ *
2150
+ * Accepts either a full ArrayBuffer (single append) or an AsyncIterable of
2151
+ * Uint8Array chunks (one append per chunk, in order). Waits for `updateend`
2152
+ * between each call so appends are serialized correctly.
2153
+ *
2154
+ * Errors from the SourceBuffer (`error` event) or from the iterable are
2155
+ * propagated as rejections.
2156
+ */
2157
+ async function appendSegment(sourceBuffer, data, signal) {
2158
+ if (data instanceof ArrayBuffer) await appendChunk(sourceBuffer, data);
2159
+ else try {
2160
+ for await (const chunk of data) {
2161
+ if (signal?.aborted) throw signal.reason ?? new DOMException("Aborted", "AbortError");
2162
+ await appendChunk(sourceBuffer, chunk);
2163
+ }
2164
+ } catch (e) {
2165
+ if (e instanceof DOMException && e.name === "AbortError" && !sourceBuffer.updating) try {
2166
+ sourceBuffer.abort();
2167
+ } catch {}
2168
+ throw e;
2169
+ }
2170
+ }
2171
+ async function appendChunk(sourceBuffer, data) {
2172
+ if (sourceBuffer.updating) await new Promise((resolve) => {
2173
+ const onUpdateEnd = () => {
2174
+ sourceBuffer.removeEventListener("updateend", onUpdateEnd);
2175
+ resolve();
2176
+ };
2177
+ sourceBuffer.addEventListener("updateend", onUpdateEnd);
2178
+ });
2179
+ return new Promise((resolve, reject) => {
2180
+ const onUpdateEnd = () => {
2181
+ cleanup();
2182
+ resolve();
2183
+ };
2184
+ const onError = (event) => {
2185
+ cleanup();
2186
+ reject(/* @__PURE__ */ new Error(`SourceBuffer append error: ${event.type}`));
2187
+ };
2188
+ const cleanup = () => {
2189
+ sourceBuffer.removeEventListener("updateend", onUpdateEnd);
2190
+ sourceBuffer.removeEventListener("error", onError);
2191
+ };
2192
+ sourceBuffer.addEventListener("updateend", onUpdateEnd);
2193
+ sourceBuffer.addEventListener("error", onError);
2194
+ try {
2195
+ sourceBuffer.appendBuffer(data);
2196
+ } catch (error) {
2197
+ cleanup();
2198
+ reject(error);
2199
+ }
2200
+ });
2201
+ }
2202
+
2203
+ //#endregion
2204
+ //#region ../spf/dist/dev/dom/media/buffer-flusher.js
2205
+ /**
2206
+ * Buffer flusher helper (P12)
2207
+ *
2208
+ * Removes a time range from a SourceBuffer to manage memory.
2209
+ */
2210
+ /**
2211
+ * Remove a time range from a SourceBuffer.
2212
+ *
2213
+ * Waits for the SourceBuffer to be ready (not updating), then removes
2214
+ * the specified range. Returns a promise that resolves when removal completes.
2215
+ *
2216
+ * @param sourceBuffer - The SourceBuffer to remove data from
2217
+ * @param start - Start of the time range to remove (seconds)
2218
+ * @param end - End of the time range to remove (seconds)
2219
+ * @returns Promise that resolves when removal completes
2220
+ *
2221
+ * @example
2222
+ * await flushBuffer(videoSourceBuffer, 0, 30);
2223
+ */
2224
+ async function flushBuffer(sourceBuffer, start, end) {
2225
+ if (sourceBuffer.updating) await new Promise((resolve) => {
2226
+ const onUpdateEnd = () => {
2227
+ sourceBuffer.removeEventListener("updateend", onUpdateEnd);
2228
+ resolve();
2229
+ };
2230
+ sourceBuffer.addEventListener("updateend", onUpdateEnd);
2231
+ });
2232
+ return new Promise((resolve, reject) => {
2233
+ const onUpdateEnd = () => {
2234
+ cleanup();
2235
+ resolve();
2236
+ };
2237
+ const onError = (event) => {
2238
+ cleanup();
2239
+ reject(/* @__PURE__ */ new Error(`SourceBuffer remove error: ${event.type}`));
2240
+ };
2241
+ const cleanup = () => {
2242
+ sourceBuffer.removeEventListener("updateend", onUpdateEnd);
2243
+ sourceBuffer.removeEventListener("error", onError);
2244
+ };
2245
+ sourceBuffer.addEventListener("updateend", onUpdateEnd);
2246
+ sourceBuffer.addEventListener("error", onError);
2247
+ try {
2248
+ sourceBuffer.remove(start, end);
2249
+ } catch (error) {
2250
+ cleanup();
2251
+ reject(error);
2252
+ }
2253
+ });
2254
+ }
2255
+
2256
+ //#endregion
2257
+ //#region ../spf/dist/dev/core/features/calculate-presentation-duration.js
2258
+ /**
2259
+ * Check if we can calculate presentation duration (have required data).
2260
+ */
2261
+ function canCalculateDuration(state) {
2262
+ if (!state.presentation) return false;
2263
+ return !!(state.selectedVideoTrackId || state.selectedAudioTrackId);
2264
+ }
2265
+ /**
2266
+ * Check if we should calculate presentation duration (conditions met).
2267
+ */
2268
+ function shouldCalculateDuration(state) {
2269
+ if (!canCalculateDuration(state)) return false;
2270
+ const { presentation } = state;
2271
+ if (presentation.duration !== void 0) return false;
2272
+ const videoTrack = state.selectedVideoTrackId ? getSelectedTrack(state, "video") : void 0;
2273
+ const audioTrack = state.selectedAudioTrackId ? getSelectedTrack(state, "audio") : void 0;
2274
+ return !!(videoTrack && isResolvedTrack(videoTrack) || audioTrack && isResolvedTrack(audioTrack));
2275
+ }
2276
+ /**
2277
+ * Get duration from the first resolved track (prefer video, fallback to audio).
2278
+ */
2279
+ function getDurationFromResolvedTracks(state) {
2280
+ const videoTrack = state.selectedVideoTrackId ? getSelectedTrack(state, "video") : void 0;
2281
+ if (videoTrack && isResolvedTrack(videoTrack)) return videoTrack.duration;
2282
+ const audioTrack = state.selectedAudioTrackId ? getSelectedTrack(state, "audio") : void 0;
2283
+ if (audioTrack && isResolvedTrack(audioTrack)) return audioTrack.duration;
2284
+ }
2285
+ /**
2286
+ * Calculate and set presentation duration from resolved tracks.
2287
+ */
2288
+ function calculatePresentationDuration({ state }) {
2289
+ return combineLatest([state]).subscribe(([currentState]) => {
2290
+ if (!shouldCalculateDuration(currentState)) return;
2291
+ const duration = getDurationFromResolvedTracks(currentState);
2292
+ if (duration === void 0 || !Number.isFinite(duration)) return;
2293
+ const { presentation } = currentState;
2294
+ state.patch({ presentation: {
2295
+ ...presentation,
2296
+ duration
2297
+ } });
2298
+ });
2299
+ }
2300
+
2301
+ //#endregion
2302
+ //#region ../spf/dist/dev/core/task.js
2303
+ /**
2304
+ * Generic reusable task that wraps an async run function.
2305
+ *
2306
+ * Owns its own AbortController so it can always be aborted independently.
2307
+ * Optionally composes an external AbortSignal so that a parent's cancellation
2308
+ * propagates into the task's work without requiring the caller to track the
2309
+ * task separately.
2310
+ *
2311
+ * Ordering guarantee: `value` is written before `status` transitions to `'done'`;
2312
+ * `error` is written before `status` transitions to `'error'`. Any reader
2313
+ * observing `status === 'done'` is guaranteed `value` is already present.
2314
+ */
2315
+ var Task = class {
2316
+ id;
2317
+ #runFn;
2318
+ #abortController = new AbortController();
2319
+ #signal;
2320
+ #status = "pending";
2321
+ #value = void 0;
2322
+ #error = void 0;
2323
+ constructor(runFn, config) {
2324
+ this.#runFn = runFn;
2325
+ const rawId = config?.id;
2326
+ this.id = typeof rawId === "function" ? rawId() : rawId ?? generateId();
2327
+ this.#signal = config?.signal ? AbortSignal.any([this.#abortController.signal, config.signal]) : this.#abortController.signal;
2328
+ }
2329
+ get status() {
2330
+ return this.#status;
2331
+ }
2332
+ get value() {
2333
+ return this.#value;
2334
+ }
2335
+ get error() {
2336
+ return this.#error;
2337
+ }
2338
+ async run() {
2339
+ this.#status = "running";
2340
+ try {
2341
+ const result = await this.#runFn(this.#signal);
2342
+ this.#value = result;
2343
+ this.#status = "done";
2344
+ return result;
2345
+ } catch (e) {
2346
+ this.#error = e;
2347
+ this.#status = "error";
2348
+ throw e;
2349
+ }
2350
+ }
2351
+ abort() {
2352
+ this.#abortController.abort();
2353
+ }
2354
+ };
2355
+ /**
2356
+ * Runs tasks concurrently, deduplicated by task id.
2357
+ *
2358
+ * If a task with a given id is already in flight, subsequent schedule() calls
2359
+ * for that id are silently ignored until the first completes. Tasks are stored
2360
+ * so abortAll() can cancel any in-flight work (e.g. on engine cleanup).
2361
+ */
2362
+ var ConcurrentRunner = class {
2363
+ #pending = /* @__PURE__ */ new Map();
2364
+ schedule(task) {
2365
+ if (this.#pending.has(task.id)) return;
2366
+ this.#pending.set(task.id, task);
2367
+ task.run().catch((error) => {
2368
+ if (!(error instanceof Error && error.name === "AbortError")) throw error;
2369
+ }).finally(() => {
2370
+ this.#pending.delete(task.id);
2371
+ });
2372
+ }
2373
+ abortAll() {
2374
+ for (const task of this.#pending.values()) task.abort();
2375
+ this.#pending.clear();
2376
+ }
2377
+ };
2378
+ /**
2379
+ * Runs tasks one at a time in submission order.
2380
+ *
2381
+ * Each schedule() call returns a Promise that resolves or rejects with the
2382
+ * task's result when it is eventually executed. Tasks wait in queue until the
2383
+ * prior task completes.
2384
+ *
2385
+ * Serialization is achieved by chaining each task's run() onto the tail of a
2386
+ * shared promise chain — no explicit queue or drain loop needed.
2387
+ *
2388
+ * abortAll() aborts all pending (not yet started) tasks and the currently
2389
+ * in-flight task. Pending tasks still run briefly but receive an aborted
2390
+ * signal and are expected to exit early.
2391
+ */
2392
+ var SerialRunner = class {
2393
+ #chain = Promise.resolve();
2394
+ #pending = /* @__PURE__ */ new Set();
2395
+ #current = null;
2396
+ schedule(task) {
2397
+ const t = task;
2398
+ this.#pending.add(t);
2399
+ const result = this.#chain.then(() => {
2400
+ this.#pending.delete(t);
2401
+ this.#current = t;
2402
+ return task.run();
2403
+ }).finally(() => {
2404
+ this.#current = null;
2405
+ });
2406
+ this.#chain = result.then(() => {}, () => {});
2407
+ return result;
2408
+ }
2409
+ abortAll() {
2410
+ for (const task of this.#pending) task.abort();
2411
+ this.#pending.clear();
2412
+ this.#current?.abort();
2413
+ }
2414
+ destroy() {
2415
+ this.abortAll();
2416
+ }
2417
+ };
2418
+
2419
+ //#endregion
2420
+ //#region ../spf/dist/dev/core/features/resolve-track.js
2421
+ function canResolve(state, config) {
2422
+ const track = getSelectedTrack(state, config.type);
2423
+ if (!track) return false;
2424
+ return !isResolvedTrack(track);
2425
+ }
2426
+ /**
2427
+ * Determines if track resolution conditions are met.
2428
+ *
2429
+ * Currently always returns true - conditions are checked by canResolveTrack()
2430
+ * and resolving flag. Kept as placeholder for future conditional logic.
2431
+ *
2432
+ * @param state - Current track resolution state
2433
+ * @param event - Current action/event
2434
+ * @returns true (conditions checked elsewhere)
2435
+ */
2436
+ function shouldResolve(_state, _event) {
2437
+ return true;
2438
+ }
2439
+ /**
2440
+ * Updates a track within a presentation (immutably).
2441
+ * Generic - works for video, audio, or text tracks.
2442
+ */
2443
+ function updateTrackInPresentation(presentation, resolvedTrack) {
2444
+ const trackId = resolvedTrack.id;
2445
+ return {
2446
+ ...presentation,
2447
+ selectionSets: presentation.selectionSets.map((selectionSet) => ({
2448
+ ...selectionSet,
2449
+ switchingSets: selectionSet.switchingSets.map((switchingSet) => ({
2450
+ ...switchingSet,
2451
+ tracks: switchingSet.tracks.map((track) => track.id === trackId ? resolvedTrack : track)
2452
+ }))
2453
+ }))
2454
+ };
2455
+ }
2456
+ /**
2457
+ * Resolves unresolved tracks using reactive composition.
2458
+ *
2459
+ * The subscribe closure is pure scheduling logic: it checks conditions and
2460
+ * creates a Task for the selected track when appropriate. The ConcurrentRunner
2461
+ * handles all concurrency concerns — deduplication, parallel execution, and
2462
+ * cleanup.
2463
+ *
2464
+ * Generic version that works for video, audio, or text tracks based on config.
2465
+ * Type parameter T is inferred from config.type (use 'as const' for inference).
2466
+ */
2467
+ function resolveTrack({ state, events }, config) {
2468
+ const runner = new ConcurrentRunner();
2469
+ const cleanup = combineLatest([state, events]).subscribe(([currentState, event]) => {
2470
+ if (!canResolve(currentState, config) || !shouldResolve(currentState, event)) return;
2471
+ const track = getSelectedTrack(currentState, config.type);
2472
+ if (!track) return;
2473
+ const resolvedTrack = track;
2474
+ runner.schedule(new Task(async (signal) => {
2475
+ const mediaTrack = parseMediaPlaylist(await getResponseText(await fetchResolvable(resolvedTrack, { signal })), resolvedTrack);
2476
+ const latestPresentation = state.current.presentation;
2477
+ const updatedPresentation = updateTrackInPresentation(latestPresentation, mediaTrack);
2478
+ state.patch({ presentation: updatedPresentation });
2479
+ }, { id: track.id }));
2480
+ });
2481
+ return () => {
2482
+ runner.abortAll();
2483
+ cleanup();
2484
+ };
2485
+ }
2486
+
2487
+ //#endregion
2488
+ //#region ../spf/dist/dev/core/features/select-tracks.js
2489
+ /**
2490
+ * Pick text track to activate.
2491
+ *
2492
+ * Selection priority (if enabled):
2493
+ * 1. User preference (preferredSubtitleLanguage)
2494
+ * 2. DEFAULT track (if enableDefaultTrack is true and track has DEFAULT=YES + AUTOSELECT=YES)
2495
+ * 3. No auto-selection (user opt-in)
2496
+ *
2497
+ * By default, FORCED tracks are excluded per Apple's HLS spec.
2498
+ *
2499
+ * @param presentation - Presentation with text tracks
2500
+ * @param config - Selection configuration
2501
+ * @returns Track ID or undefined (no auto-selection)
2502
+ */
2503
+ function pickTextTrack(presentation, config) {
2504
+ const textSet = presentation.selectionSets.find((set) => set.type === "text");
2505
+ if (!textSet?.switchingSets?.[0]?.tracks.length) return void 0;
2506
+ const tracks = textSet.switchingSets[0].tracks;
2507
+ const availableTracks = config.includeForcedTracks ? tracks : tracks.filter((track) => !track.forced);
2508
+ if (availableTracks.length === 0) return void 0;
2509
+ const { preferredSubtitleLanguage, enableDefaultTrack = false } = config;
2510
+ if (preferredSubtitleLanguage) {
2511
+ const languageMatch = availableTracks.find((track) => track.language === preferredSubtitleLanguage);
2512
+ if (languageMatch) return languageMatch.id;
2513
+ }
2514
+ if (enableDefaultTrack) {
2515
+ const defaultTrack = availableTracks.find((track) => track.default === true);
2516
+ if (defaultTrack) return defaultTrack.id;
2517
+ }
2518
+ }
2519
+ /**
2520
+ * Check if we can select a track of the given type.
2521
+ *
2522
+ * Returns true when:
2523
+ * - Presentation exists
2524
+ * - Has tracks of the specified type
2525
+ *
2526
+ * Generic over track type - works for video, audio, or text.
2527
+ */
2528
+ function canSelectTrack(state, config) {
2529
+ return !!state?.presentation?.selectionSets?.find(({ type }) => type === config.type)?.switchingSets?.[0]?.tracks.length;
2530
+ }
2531
+ /**
2532
+ * Check if we should select a track of the given type.
2533
+ *
2534
+ * Returns true when:
2535
+ * - Track of this type is not already selected
2536
+ *
2537
+ * Generic over track type - works for video, audio, or text.
2538
+ *
2539
+ * @TODO figure out reactive model for ABR cases - right now we're only selecting
2540
+ * if we have nothing selected (CJP)
2541
+ */
2542
+ function shouldSelectTrack(state, config) {
2543
+ return !state[SelectedTrackIdKeyByType[config.type]];
2544
+ }
2545
+ /**
2546
+ * Select video track orchestration.
2547
+ *
2548
+ * Selects video track when:
2549
+ * - Presentation exists
2550
+ * - No video track is selected yet
2551
+ *
2552
+ * Uses bandwidth-based quality selection algorithm.
2553
+ *
2554
+ * @example
2555
+ * const cleanup = selectVideoTrack(
2556
+ * { state, owners, events },
2557
+ * { initialBandwidth: 2_000_000 }
2558
+ * );
2559
+ */
2560
+ function selectVideoTrack({ state }, config = { type: "video" }) {
2561
+ let selecting = false;
2562
+ return state.subscribe(async (currentState) => {
2563
+ if (!canSelectTrack(currentState, config) || !shouldSelectTrack(currentState, config) || selecting) return;
2564
+ try {
2565
+ selecting = true;
2566
+ const selectedTrackId = currentState.presentation?.selectionSets.find(({ type }) => type === config.type)?.switchingSets[0]?.tracks[0]?.id;
2567
+ if (selectedTrackId) {
2568
+ const selectedTrackKey = SelectedTrackIdKeyByType[config.type];
2569
+ state.patch({ [selectedTrackKey]: selectedTrackId });
2570
+ }
2571
+ } finally {
2572
+ selecting = false;
2573
+ }
2574
+ });
2575
+ }
2576
+ /**
2577
+ * Select audio track orchestration.
2578
+ *
2579
+ * Selects audio track when:
2580
+ * - Presentation exists
2581
+ * - No audio track is selected yet
2582
+ *
2583
+ * Uses language and preference-based selection.
2584
+ *
2585
+ * @example
2586
+ * const cleanup = selectAudioTrack(
2587
+ * { state, owners, events },
2588
+ * { preferredAudioLanguage: 'en' }
2589
+ * );
2590
+ */
2591
+ function selectAudioTrack({ state }, config = { type: "audio" }) {
2592
+ let selecting = false;
2593
+ return state.subscribe(async (currentState) => {
2594
+ if (!canSelectTrack(currentState, config) || !shouldSelectTrack(currentState, config) || selecting) return;
2595
+ try {
2596
+ selecting = true;
2597
+ const selectedTrackId = currentState.presentation?.selectionSets.find(({ type }) => type === "audio")?.switchingSets[0]?.tracks[0]?.id;
2598
+ if (selectedTrackId) state.patch({ selectedAudioTrackId: selectedTrackId });
2599
+ } finally {
2600
+ selecting = false;
2601
+ }
2602
+ });
2603
+ }
2604
+ /**
2605
+ * Select text track orchestration.
2606
+ *
2607
+ * Selects text track when:
2608
+ * - Presentation exists
2609
+ * - No text track is selected yet
2610
+ *
2611
+ * Note: Currently does not auto-select (user opt-in).
2612
+ *
2613
+ * @example
2614
+ * const cleanup = selectTextTrack({ state, owners, events }, {});
2615
+ */
2616
+ function selectTextTrack({ state }, config = { type: "text" }) {
2617
+ let selecting = false;
2618
+ return state.subscribe(async (currentState) => {
2619
+ if (!canSelectTrack(currentState, config) || !shouldSelectTrack(currentState, config) || selecting) return;
2620
+ try {
2621
+ selecting = true;
2622
+ const selectedTextTrackId = pickTextTrack(currentState.presentation, config);
2623
+ if (selectedTextTrackId) state.patch({ selectedTextTrackId });
2624
+ } finally {
2625
+ selecting = false;
2626
+ }
2627
+ });
2628
+ }
2629
+
2630
+ //#endregion
2631
+ //#region ../spf/dist/dev/dom/features/end-of-stream.js
2632
+ /**
2633
+ * Check if the last segment of a track has been appended to a SourceBuffer.
2634
+ *
2635
+ * Checks by segment ID rather than a pipeline flag, so it is robust across
2636
+ * quality switches (different tracks have different segment IDs) and
2637
+ * back-buffer flushes (flushed segment IDs are removed from the model).
2638
+ */
2639
+ function isLastSegmentAppended(segments, actor) {
2640
+ if (segments.length === 0) return true;
2641
+ const lastSeg = segments[segments.length - 1];
2642
+ if (!lastSeg) return false;
2643
+ return actor?.snapshot.context.segments.some((s) => s.id === lastSeg.id && !s.partial) ?? false;
2644
+ }
2645
+ /**
2646
+ * Check if the last segment has been appended for each selected track.
2647
+ *
2648
+ * Handles video-only, audio-only, and video+audio scenarios.
2649
+ * A track with no segments (e.g. unresolved) is considered not ready.
2650
+ */
2651
+ function hasLastSegmentLoaded(state, owners) {
2652
+ const videoTrack = state.selectedVideoTrackId ? getSelectedTrack(state, "video") : void 0;
2653
+ const audioTrack = state.selectedAudioTrackId ? getSelectedTrack(state, "audio") : void 0;
2654
+ if (videoTrack && !isResolvedTrack(videoTrack)) return false;
2655
+ if (audioTrack && !isResolvedTrack(audioTrack)) return false;
2656
+ if (videoTrack && isResolvedTrack(videoTrack)) {
2657
+ if (!isLastSegmentAppended(videoTrack.segments, owners.videoBufferActor)) return false;
2658
+ }
2659
+ if (audioTrack && isResolvedTrack(audioTrack)) {
2660
+ if (!isLastSegmentAppended(audioTrack.segments, owners.audioBufferActor)) return false;
2661
+ }
2662
+ return true;
2663
+ }
2664
+ /**
2665
+ * Check if we can call endOfStream.
2666
+ */
2667
+ function canEndStream(state, owners) {
2668
+ return !!(owners.mediaSource && state.presentation);
2669
+ }
2670
+ /**
2671
+ * Check if we should call endOfStream.
2672
+ */
2673
+ function shouldEndStream(state, owners) {
2674
+ if (!canEndStream(state, owners)) return false;
2675
+ const { mediaSource, mediaElement } = owners;
2676
+ if (mediaSource.readyState !== "open") return false;
2677
+ if (mediaElement && mediaElement.readyState < HTMLMediaElement.HAVE_METADATA) return false;
2678
+ const hasVideoTrack = !!state.selectedVideoTrackId;
2679
+ const hasAudioTrack = !!state.selectedAudioTrackId;
2680
+ if (hasVideoTrack && !owners.videoBuffer) return false;
2681
+ if (hasAudioTrack && !owners.audioBuffer) return false;
2682
+ if (owners.videoBufferActor?.snapshot.status === "updating") return false;
2683
+ if (owners.audioBufferActor?.snapshot.status === "updating") return false;
2684
+ if (!hasLastSegmentLoaded(state, owners)) return false;
2685
+ if (mediaElement) {
2686
+ const videoTrack = hasVideoTrack ? getSelectedTrack(state, "video") : void 0;
2687
+ const audioTrack = hasAudioTrack ? getSelectedTrack(state, "audio") : void 0;
2688
+ const refTrack = videoTrack && isResolvedTrack(videoTrack) ? videoTrack : audioTrack && isResolvedTrack(audioTrack) ? audioTrack : void 0;
2689
+ if (refTrack && refTrack.segments.length > 0) {
2690
+ const lastSeg = refTrack.segments[refTrack.segments.length - 1];
2691
+ if (mediaElement.currentTime < lastSeg.startTime) return false;
2692
+ }
2693
+ }
2694
+ return true;
2695
+ }
2696
+ /**
2697
+ * Wait for all currently-updating SourceBufferActors to finish.
2698
+ * Uses actor status rather than raw SourceBuffer.updating so the wait is
2699
+ * aligned with the same abstraction that owns all buffer operations.
2700
+ */
2701
+ function waitForSourceBuffersReady$1(owners) {
2702
+ const updatingActors = [owners.videoBufferActor, owners.audioBufferActor].filter((actor) => actor !== void 0 && actor.snapshot.status === "updating");
2703
+ if (updatingActors.length === 0) return Promise.resolve();
2704
+ return Promise.all(updatingActors.map((actor) => new Promise((resolve) => {
2705
+ const unsub = actor.subscribe((snapshot) => {
2706
+ if (snapshot.status !== "updating") {
2707
+ unsub();
2708
+ resolve();
2709
+ }
2710
+ });
2711
+ }))).then(() => void 0);
2712
+ }
2713
+ /**
2714
+ * Get the highest buffered end time across all active SourceBuffers.
2715
+ * Used to set the final duration from actual container timestamps rather
2716
+ * than playlist metadata, which handles both shorter and longer cases.
2717
+ */
2718
+ function getMaxBufferedEnd$1(owners) {
2719
+ let max = 0;
2720
+ for (const buf of [owners.videoBuffer, owners.audioBuffer]) if (buf && buf.buffered.length > 0) {
2721
+ const end = buf.buffered.end(buf.buffered.length - 1);
2722
+ if (end > max) max = end;
2723
+ }
2724
+ return max;
2725
+ }
2726
+ /**
2727
+ * End of stream task (module-level, pure).
2728
+ * Sets the final duration from actual buffered end time, then calls endOfStream().
2729
+ */
2730
+ const endOfStreamTask = async ({ currentOwners }, _context) => {
2731
+ const { mediaSource } = currentOwners;
2732
+ if (mediaSource.readyState === "ended") return;
2733
+ await waitForSourceBuffersReady$1(currentOwners);
2734
+ if (mediaSource.readyState !== "open") return;
2735
+ const bufferedEnd = getMaxBufferedEnd$1(currentOwners);
2736
+ if (bufferedEnd > 0) mediaSource.duration = bufferedEnd;
2737
+ mediaSource.endOfStream();
2738
+ await new Promise((resolve) => requestAnimationFrame(resolve));
2739
+ };
2740
+ /**
2741
+ * Call endOfStream when the last segment has been appended.
2742
+ * This signals to the browser that the stream is complete.
2743
+ *
2744
+ * Per the MSE spec, appendBuffer() remains valid after endOfStream() —
2745
+ * seeks that require re-appending earlier segments will still work.
2746
+ * What becomes blocked is calling endOfStream() again, addSourceBuffer(),
2747
+ * and MediaSource.duration updates.
2748
+ */
2749
+ function endOfStream({ state, owners }) {
2750
+ let hasEnded = false;
2751
+ let destroyed = false;
2752
+ const activeActorUnsubs = [];
2753
+ const runEvaluate = async () => {
2754
+ if (destroyed) return;
2755
+ const currentState = state.current;
2756
+ const currentOwners = owners.current;
2757
+ if (hasEnded) {
2758
+ if (currentOwners.mediaSource?.readyState !== "open") return;
2759
+ hasEnded = false;
2760
+ }
2761
+ if (!shouldEndStream(currentState, currentOwners)) return;
2762
+ hasEnded = true;
2763
+ try {
2764
+ await endOfStreamTask({ currentOwners }, {});
2765
+ } catch (error) {
2766
+ console.error("Failed to call endOfStream:", error);
2767
+ }
2768
+ };
2769
+ const cleanupOwners = owners.subscribe((currentOwners) => {
2770
+ activeActorUnsubs.forEach((u) => u());
2771
+ activeActorUnsubs.length = 0;
2772
+ for (const actor of [currentOwners.videoBufferActor, currentOwners.audioBufferActor]) {
2773
+ if (!actor) continue;
2774
+ let isFirst = true;
2775
+ activeActorUnsubs.push(actor.subscribe(() => {
2776
+ if (isFirst) {
2777
+ isFirst = false;
2778
+ return;
2779
+ }
2780
+ runEvaluate();
2781
+ }));
2782
+ }
2783
+ });
2784
+ const cleanupCombineLatest = combineLatest([state, owners]).subscribe(async () => runEvaluate());
2785
+ return () => {
2786
+ destroyed = true;
2787
+ activeActorUnsubs.forEach((u) => u());
2788
+ cleanupOwners();
2789
+ cleanupCombineLatest();
2790
+ };
2791
+ }
2792
+
2793
+ //#endregion
2794
+ //#region ../spf/dist/dev/dom/features/setup-mediasource.js
2795
+ /**
2796
+ * Check if we have the minimum requirements to create MediaSource.
2797
+ */
2798
+ function canSetup(state, owners) {
2799
+ return !isNil(owners.mediaElement) && !isNil(state.presentation?.url);
2800
+ }
2801
+ /**
2802
+ * Check if we should proceed with MediaSource creation.
2803
+ * Placeholder for future conditions (e.g., checking if already created).
2804
+ */
2805
+ function shouldSetup(_state, owners) {
2806
+ return isNil(owners.mediaSource);
2807
+ }
2808
+ /**
2809
+ * Setup MediaSource orchestration.
2810
+ *
2811
+ * Creates and attaches MediaSource when:
2812
+ * - mediaElement exists in owners
2813
+ * - presentation.url exists in state
2814
+ *
2815
+ * Updates owners.mediaSource after successful setup.
2816
+ */
2817
+ function setupMediaSource({ state, owners }) {
2818
+ let settingUp = false;
2819
+ let abortController = null;
2820
+ const unsubscribe = combineLatest([state, owners]).subscribe(async ([currentState, currentOwners]) => {
2821
+ if (!canSetup(currentState, currentOwners) || !shouldSetup(currentState, currentOwners) || settingUp) return;
2822
+ try {
2823
+ settingUp = true;
2824
+ abortController = new AbortController();
2825
+ const mediaSource = createMediaSource({ preferManaged: true });
2826
+ attachMediaSource(mediaSource, currentOwners.mediaElement);
2827
+ await waitForSourceOpen(mediaSource, abortController.signal);
2828
+ owners.patch({ mediaSource });
2829
+ } catch (error) {
2830
+ if (error instanceof DOMException && error.name === "AbortError") return;
2831
+ throw error;
2832
+ } finally {
2833
+ settingUp = false;
2834
+ }
2835
+ });
2836
+ return () => {
2837
+ abortController?.abort();
2838
+ unsubscribe();
2839
+ };
2840
+ }
2841
+
2842
+ //#endregion
2843
+ //#region ../spf/dist/dev/dom/media/source-buffer-actor.js
2844
+ /**
2845
+ * Thrown when a message is sent to the actor in a state that does not
2846
+ * accept messages (currently: 'updating').
2847
+ */
2848
+ var SourceBufferActorError = class extends Error {
2849
+ constructor(message) {
2850
+ super(message);
2851
+ this.name = "SourceBufferActorError";
2852
+ }
2853
+ };
2854
+ function snapshotBuffered(buffered) {
2855
+ const ranges = [];
2856
+ for (let i = 0; i < buffered.length; i++) ranges.push({
2857
+ start: buffered.start(i),
2858
+ end: buffered.end(i)
2859
+ });
2860
+ return ranges;
2861
+ }
2862
+ function appendInitTask(message, { signal, getCtx, sourceBuffer }) {
2863
+ return new Task(async (taskSignal) => {
2864
+ const ctx = getCtx();
2865
+ if (taskSignal.aborted) return ctx;
2866
+ await appendSegment(sourceBuffer, message.data);
2867
+ return {
2868
+ ...ctx,
2869
+ initTrackId: message.meta.trackId
2870
+ };
2871
+ }, { signal });
2872
+ }
2873
+ function appendSegmentTask(message, { signal, getCtx, sourceBuffer, onPartialContext }) {
2874
+ return new Task(async (taskSignal) => {
2875
+ const ctx = getCtx();
2876
+ if (taskSignal.aborted) return ctx;
2877
+ const { meta } = message;
2878
+ const EPSILON = 1e-4;
2879
+ const filtered = ctx.segments.filter((s) => Math.abs(s.startTime - meta.startTime) >= EPSILON);
2880
+ if (!(message.data instanceof ArrayBuffer)) onPartialContext({
2881
+ ...ctx,
2882
+ segments: [...filtered, {
2883
+ id: meta.id,
2884
+ startTime: meta.startTime,
2885
+ duration: meta.duration,
2886
+ trackId: meta.trackId,
2887
+ ...meta.trackBandwidth !== void 0 && { trackBandwidth: meta.trackBandwidth },
2888
+ partial: true
2889
+ }],
2890
+ bufferedRanges: ctx.bufferedRanges
2891
+ });
2892
+ await appendSegment(sourceBuffer, message.data, taskSignal);
2893
+ return {
2894
+ ...ctx,
2895
+ segments: [...filtered, {
2896
+ id: meta.id,
2897
+ startTime: meta.startTime,
2898
+ duration: meta.duration,
2899
+ trackId: meta.trackId,
2900
+ ...meta.trackBandwidth !== void 0 && { trackBandwidth: meta.trackBandwidth }
2901
+ }],
2902
+ bufferedRanges: snapshotBuffered(sourceBuffer.buffered)
2903
+ };
2904
+ }, { signal });
2905
+ }
2906
+ function removeTask(message, { signal, getCtx, sourceBuffer }) {
2907
+ return new Task(async (taskSignal) => {
2908
+ const ctx = getCtx();
2909
+ if (taskSignal.aborted) return ctx;
2910
+ await flushBuffer(sourceBuffer, message.start, message.end);
2911
+ const bufferedRanges = snapshotBuffered(sourceBuffer.buffered);
2912
+ const filtered = ctx.segments.filter((s) => {
2913
+ const midpoint = s.startTime + s.duration / 2;
2914
+ return bufferedRanges.some((r) => midpoint >= r.start && midpoint < r.end);
2915
+ });
2916
+ return {
2917
+ ...ctx,
2918
+ segments: filtered,
2919
+ bufferedRanges
2920
+ };
2921
+ }, { signal });
2922
+ }
2923
+ const messageTaskFactories = {
2924
+ "append-init": appendInitTask,
2925
+ "append-segment": appendSegmentTask,
2926
+ remove: removeTask
2927
+ };
2928
+ function messageToTask(message, options) {
2929
+ const factory = messageTaskFactories[message.type];
2930
+ return factory(message, options);
2931
+ }
2932
+ function createSourceBufferActor(sourceBuffer, initialContext) {
2933
+ const state = createState({
2934
+ status: "idle",
2935
+ context: {
2936
+ segments: [],
2937
+ bufferedRanges: [],
2938
+ initTrackId: void 0,
2939
+ ...initialContext
2940
+ }
2941
+ });
2942
+ const runner = new SerialRunner();
2943
+ function applyResult(newContext) {
2944
+ const status = state.current.status === "destroyed" ? "destroyed" : "idle";
2945
+ state.patch({
2946
+ status,
2947
+ context: newContext
2948
+ });
2949
+ state.flush();
2950
+ }
2951
+ function handleError(e) {
2952
+ const status = state.current.status === "destroyed" ? "destroyed" : "idle";
2953
+ state.patch({ status });
2954
+ state.flush();
2955
+ throw e;
2956
+ }
2957
+ return {
2958
+ get snapshot() {
2959
+ return state.current;
2960
+ },
2961
+ subscribe(listener) {
2962
+ return state.subscribe(listener);
2963
+ },
2964
+ send(message, signal) {
2965
+ if (state.current.status !== "idle") return Promise.reject(new SourceBufferActorError(`send() called while actor is ${state.current.status}`));
2966
+ state.patch({ status: "updating" });
2967
+ const onPartialContext = (ctx) => {
2968
+ state.patch({
2969
+ status: "updating",
2970
+ context: ctx
2971
+ });
2972
+ state.flush();
2973
+ };
2974
+ const task = messageToTask(message, {
2975
+ signal,
2976
+ getCtx: () => state.current.context,
2977
+ sourceBuffer,
2978
+ onPartialContext
2979
+ });
2980
+ return runner.schedule(task).then(applyResult).catch(handleError);
2981
+ },
2982
+ batch(messages, signal) {
2983
+ if (state.current.status !== "idle") return Promise.reject(new SourceBufferActorError(`batch() called while actor is ${state.current.status}`));
2984
+ if (messages.length === 0) return Promise.resolve();
2985
+ state.patch({ status: "updating" });
2986
+ let workingCtx = state.current.context;
2987
+ const onPartialContext = (ctx) => {
2988
+ state.patch({
2989
+ status: "updating",
2990
+ context: ctx
2991
+ });
2992
+ state.flush();
2993
+ };
2994
+ for (const message of messages.slice(0, -1)) {
2995
+ const task = messageToTask(message, {
2996
+ signal,
2997
+ getCtx: () => workingCtx,
2998
+ sourceBuffer,
2999
+ onPartialContext
3000
+ });
3001
+ runner.schedule(task).then((newCtx) => {
3002
+ workingCtx = newCtx;
3003
+ });
3004
+ }
3005
+ const lastTask = messageToTask(messages[messages.length - 1], {
3006
+ signal,
3007
+ getCtx: () => workingCtx,
3008
+ sourceBuffer,
3009
+ onPartialContext
3010
+ });
3011
+ return runner.schedule(lastTask).then(applyResult).catch(handleError);
3012
+ },
3013
+ destroy() {
3014
+ state.patch({ status: "destroyed" });
3015
+ state.flush();
3016
+ runner.destroy();
3017
+ }
3018
+ };
3019
+ }
3020
+
3021
+ //#endregion
3022
+ //#region ../spf/dist/dev/dom/features/setup-sourcebuffer.js
3023
+ /**
3024
+ * Build MIME codec string from track metadata.
3025
+ *
3026
+ * @param track - Resolved track with mimeType and codecs
3027
+ * @returns MIME codec string (e.g., 'video/mp4; codecs="avc1.42E01E,mp4a.40.2"')
3028
+ *
3029
+ * @example
3030
+ * buildMimeCodec({ mimeType: 'video/mp4', codecs: ['avc1.42E01E'] })
3031
+ * // => 'video/mp4; codecs="avc1.42E01E"'
3032
+ */
3033
+ function buildMimeCodec(track) {
3034
+ const codecString = track.codecs?.join(",") ?? "";
3035
+ return `${track.mimeType}; codecs="${codecString}"`;
3036
+ }
3037
+ /**
3038
+ * Setup all needed SourceBuffers as a single coordinated operation.
3039
+ *
3040
+ * Waits until ALL selected tracks (video and/or audio) are resolved with
3041
+ * codecs, then creates every SourceBuffer in one synchronous block before
3042
+ * patching owners. This guarantees that downstream consumers (e.g.
3043
+ * loadSegments) never see a partial set of SourceBuffers — preventing the
3044
+ * Firefox bug where appending to a video SourceBuffer before the audio
3045
+ * SourceBuffer exists causes mozHasAudio to be permanently false.
3046
+ *
3047
+ * Handles video-only, audio-only, and combined presentations correctly:
3048
+ * only the tracks that are actually selected are waited on and created.
3049
+ *
3050
+ * @example
3051
+ * const cleanup = setupSourceBuffers({ state, owners });
3052
+ */
3053
+ function setupSourceBuffers({ state, owners }) {
3054
+ let setupDone = false;
3055
+ return combineLatest([state, owners]).subscribe(async ([currentState, currentOwners]) => {
3056
+ if (setupDone) return;
3057
+ if (!currentOwners.mediaSource) return;
3058
+ const videoSelected = !!currentState.selectedVideoTrackId;
3059
+ const audioSelected = !!currentState.selectedAudioTrackId;
3060
+ if (!videoSelected && !audioSelected) return;
3061
+ const videoTrack = videoSelected ? getSelectedTrack(currentState, "video") : null;
3062
+ const audioTrack = audioSelected ? getSelectedTrack(currentState, "audio") : null;
3063
+ if (videoSelected && (!videoTrack || !isResolvedTrack(videoTrack) || !videoTrack.codecs?.length)) return;
3064
+ if (audioSelected && (!audioTrack || !isResolvedTrack(audioTrack) || !audioTrack.codecs?.length)) return;
3065
+ setupDone = true;
3066
+ const patch = {};
3067
+ if (videoSelected && videoTrack && isResolvedTrack(videoTrack)) {
3068
+ const buffer = createSourceBuffer(currentOwners.mediaSource, buildMimeCodec(videoTrack));
3069
+ patch.videoBuffer = buffer;
3070
+ patch.videoBufferActor = createSourceBufferActor(buffer);
3071
+ }
3072
+ if (audioSelected && audioTrack && isResolvedTrack(audioTrack)) {
3073
+ const buffer = createSourceBuffer(currentOwners.mediaSource, buildMimeCodec(audioTrack));
3074
+ patch.audioBuffer = buffer;
3075
+ patch.audioBufferActor = createSourceBufferActor(buffer);
3076
+ }
3077
+ owners.patch(patch);
3078
+ await new Promise((resolve) => requestAnimationFrame(resolve));
3079
+ });
3080
+ }
3081
+
3082
+ //#endregion
3083
+ //#region ../spf/dist/dev/dom/features/setup-text-tracks.js
3084
+ /**
3085
+ * Get all text tracks from presentation.
3086
+ */
3087
+ function getTextTracks(presentation) {
3088
+ if (!presentation?.selectionSets) return [];
3089
+ const textSet = presentation.selectionSets.find((set) => set.type === "text");
3090
+ if (!textSet?.switchingSets?.[0]?.tracks) return [];
3091
+ return textSet.switchingSets[0].tracks;
3092
+ }
3093
+ /**
3094
+ * Check if we can setup text tracks.
3095
+ *
3096
+ * Requires:
3097
+ * - mediaElement exists
3098
+ * - presentation has text tracks to setup
3099
+ */
3100
+ function canSetupTextTracks(state, owners) {
3101
+ return !!owners.mediaElement && !!getTextTracks(state.presentation).length;
3102
+ }
3103
+ /**
3104
+ * Check if we should setup text tracks (not already set up).
3105
+ */
3106
+ function shouldSetupTextTracks(owners) {
3107
+ return !owners.textTracks;
3108
+ }
3109
+ /**
3110
+ * Create a track element for a text track.
3111
+ *
3112
+ * Note: We use DOM <track> elements instead of the TextTrack JS API
3113
+ * because there's no way to remove TextTracks added via addTextTrack().
3114
+ */
3115
+ function createTrackElement(track) {
3116
+ const trackElement = document.createElement("track");
3117
+ trackElement.id = track.id;
3118
+ trackElement.kind = track.kind;
3119
+ trackElement.label = track.label;
3120
+ if (track.language) trackElement.srclang = track.language;
3121
+ if (track.default) trackElement.default = true;
3122
+ trackElement.src = track.url;
3123
+ return trackElement;
3124
+ }
3125
+ /**
3126
+ * Setup text tracks orchestration.
3127
+ *
3128
+ * Triggers when:
3129
+ * - mediaElement exists
3130
+ * - presentation is resolved (has text tracks)
3131
+ *
3132
+ * Creates <track> elements for all text tracks and adds them as children
3133
+ * to the media element. This allows the browser's native text track rendering.
3134
+ *
3135
+ * Note: Uses DOM track elements instead of TextTrack API because tracks
3136
+ * added via addTextTrack() cannot be removed.
3137
+ *
3138
+ * @example
3139
+ * const cleanup = setupTextTracks({ state, owners });
3140
+ */
3141
+ function setupTextTracks({ state, owners }) {
3142
+ let hasSetup = false;
3143
+ let createdTracks = [];
3144
+ const unsubscribe = combineLatest([state, owners]).subscribe(([s, o]) => {
3145
+ if (hasSetup) return;
3146
+ if (!canSetupTextTracks(s, o) || !shouldSetupTextTracks(o)) return;
3147
+ hasSetup = true;
3148
+ const textTracks = getTextTracks(s.presentation);
3149
+ if (textTracks.length === 0) return;
3150
+ const trackMap = /* @__PURE__ */ new Map();
3151
+ for (const track of textTracks) {
3152
+ const trackElement = createTrackElement(track);
3153
+ o.mediaElement.appendChild(trackElement);
3154
+ trackMap.set(track.id, trackElement);
3155
+ createdTracks.push(trackElement);
3156
+ }
3157
+ owners.patch({ textTracks: trackMap });
3158
+ });
3159
+ return () => {
3160
+ for (const trackElement of createdTracks) trackElement.remove();
3161
+ createdTracks = [];
3162
+ unsubscribe();
3163
+ };
3164
+ }
3165
+
3166
+ //#endregion
3167
+ //#region ../spf/dist/dev/dom/features/sync-selected-text-track-from-dom.js
3168
+ /**
3169
+ * Sync selectedTextTrackId from DOM text track mode changes.
3170
+ *
3171
+ * Listens to the `change` event on `media.textTracks` and updates
3172
+ * `selectedTextTrackId` when external code (e.g. the captions button via
3173
+ * `toggleSubtitles()`) changes a subtitle/caption track mode to 'showing'.
3174
+ *
3175
+ * This bridges the core store's `toggleSubtitles()` with SPF's reactive text
3176
+ * track pipeline (`syncTextTrackModes`, `loadTextTrackCues`). Without this
3177
+ * bridge, direct DOM mode changes would be immediately overridden by
3178
+ * `syncTextTrackModes` on the next SPF state update.
3179
+ *
3180
+ * When a subtitle/caption track's mode is 'showing', its DOM `id` — which
3181
+ * matches the SPF track ID set by `setupTextTracks` — is written to
3182
+ * `selectedTextTrackId`. When no subtitle/caption track is 'showing',
3183
+ * `selectedTextTrackId` is cleared along with the deselected track's
3184
+ * `textBufferState` entry — setting mode to 'disabled' clears native cues from
3185
+ * the track element, so the buffer must be reset to re-fetch cues on re-enable.
3186
+ *
3187
+ * @example
3188
+ * const cleanup = syncSelectedTextTrackFromDom({ state, owners });
3189
+ */
3190
+ function syncSelectedTextTrackFromDom({ state, owners }) {
3191
+ let lastMediaElement;
3192
+ let removeListener = null;
3193
+ const unsubscribe = owners.subscribe((currentOwners) => {
3194
+ const { mediaElement } = currentOwners;
3195
+ if (mediaElement === lastMediaElement) return;
3196
+ removeListener?.();
3197
+ removeListener = null;
3198
+ lastMediaElement = mediaElement;
3199
+ if (!mediaElement) return;
3200
+ const sync = () => {
3201
+ const newId = Array.from(mediaElement.textTracks).find((t) => t.mode === "showing" && (t.kind === "subtitles" || t.kind === "captions"))?.id || void 0;
3202
+ const current = state.current;
3203
+ if (current.selectedTextTrackId === newId) return;
3204
+ if (newId) state.patch({ selectedTextTrackId: newId });
3205
+ else {
3206
+ const prevId = current.selectedTextTrackId;
3207
+ if (prevId && current.textBufferState?.[prevId]) {
3208
+ const next = { ...current.textBufferState };
3209
+ delete next[prevId];
3210
+ state.patch({
3211
+ selectedTextTrackId: void 0,
3212
+ textBufferState: next
3213
+ });
3214
+ } else state.patch({ selectedTextTrackId: void 0 });
3215
+ }
3216
+ };
3217
+ removeListener = listen(mediaElement.textTracks, "change", sync);
3218
+ });
3219
+ return () => {
3220
+ removeListener?.();
3221
+ unsubscribe();
3222
+ };
3223
+ }
3224
+
3225
+ //#endregion
3226
+ //#region ../spf/dist/dev/dom/features/sync-text-track-modes.js
3227
+ /**
3228
+ * Check if we can sync text track modes.
3229
+ *
3230
+ * Requires:
3231
+ * - textTracks map exists (track elements created)
3232
+ */
3233
+ function canSyncTextTrackModes(owners) {
3234
+ return !!owners.textTracks && owners.textTracks.size > 0;
3235
+ }
3236
+ /**
3237
+ * Sync text track modes orchestration.
3238
+ *
3239
+ * Manages track element modes based on selectedTextTrackId:
3240
+ * - Selected track: mode = "showing"
3241
+ * - Other tracks: mode = "hidden"
3242
+ * - No selection: all tracks mode = "hidden"
3243
+ *
3244
+ * Note: Uses "hidden" instead of "disabled" for non-selected tracks
3245
+ * so they remain available in the browser's track menu.
3246
+ *
3247
+ * @example
3248
+ * const cleanup = syncTextTrackModes({ state, owners });
3249
+ */
3250
+ function syncTextTrackModes({ state, owners }) {
3251
+ return combineLatest([state, owners]).subscribe(([s, o]) => {
3252
+ if (!canSyncTextTrackModes(o)) return;
3253
+ const selectedId = s.selectedTextTrackId;
3254
+ for (const [trackId, trackElement] of o.textTracks) if (trackId === selectedId) trackElement.track.mode = "showing";
3255
+ else trackElement.track.mode = "hidden";
3256
+ });
3257
+ }
3258
+
3259
+ //#endregion
3260
+ //#region ../spf/dist/dev/dom/features/update-duration.js
3261
+ /**
3262
+ * Check if we can update MediaSource duration (have required data).
3263
+ */
3264
+ function canUpdateDuration(state, owners) {
3265
+ return !!(owners.mediaSource && state.presentation && hasPresentationDuration(state.presentation));
3266
+ }
3267
+ /**
3268
+ * Get the maximum buffered end time across all SourceBuffers.
3269
+ */
3270
+ function getMaxBufferedEnd(owners) {
3271
+ let maxEnd = 0;
3272
+ const buffers = [owners.videoSourceBuffer, owners.audioSourceBuffer].filter((buf) => buf !== void 0);
3273
+ for (const buffer of buffers) {
3274
+ const { buffered } = buffer;
3275
+ if (buffered.length > 0) {
3276
+ const end = buffered.end(buffered.length - 1);
3277
+ if (end > maxEnd) maxEnd = end;
3278
+ }
3279
+ }
3280
+ return maxEnd;
3281
+ }
3282
+ /**
3283
+ * Check if we should update MediaSource duration (conditions met).
3284
+ */
3285
+ function shouldUpdateDuration(state, owners) {
3286
+ if (!canUpdateDuration(state, owners)) return false;
3287
+ const { mediaSource } = owners;
3288
+ const { presentation } = state;
3289
+ if (mediaSource.readyState !== "open") return false;
3290
+ const duration = presentation.duration;
3291
+ if (!Number.isFinite(duration) || Number.isNaN(duration) || duration <= 0) return false;
3292
+ return Number.isNaN(mediaSource.duration);
3293
+ }
3294
+ /**
3295
+ * Wait for all currently-updating SourceBuffers to finish.
3296
+ *
3297
+ * The MSE spec forbids setting MediaSource.duration while any attached
3298
+ * SourceBuffer has updating === true. This defers until all are idle.
3299
+ */
3300
+ function waitForSourceBuffersReady(owners) {
3301
+ const updating = [owners.videoSourceBuffer, owners.audioSourceBuffer].filter((buf) => buf?.updating === true);
3302
+ if (updating.length === 0) return Promise.resolve();
3303
+ return Promise.all(updating.map((buf) => new Promise((resolve) => buf.addEventListener("updateend", () => resolve(), { once: true })))).then(() => void 0);
3304
+ }
3305
+ /**
3306
+ * Update MediaSource duration when presentation duration becomes available.
3307
+ */
3308
+ function updateDuration({ state, owners }) {
3309
+ let destroyed = false;
3310
+ const unsubscribe = combineLatest([state, owners]).subscribe(async ([currentState, currentOwners]) => {
3311
+ if (!shouldUpdateDuration(currentState, currentOwners)) return;
3312
+ const { mediaSource } = currentOwners;
3313
+ await waitForSourceBuffersReady(currentOwners);
3314
+ if (destroyed || mediaSource.readyState !== "open") return;
3315
+ let duration = currentState.presentation.duration;
3316
+ const maxBufferedEnd = getMaxBufferedEnd(currentOwners);
3317
+ if (maxBufferedEnd > duration) duration = maxBufferedEnd;
3318
+ mediaSource.duration = duration;
3319
+ });
3320
+ return () => {
3321
+ destroyed = true;
3322
+ unsubscribe();
3323
+ };
3324
+ }
3325
+
3326
+ //#endregion
3327
+ //#region ../spf/dist/dev/dom/playback-engine/engine.js
3328
+ /**
3329
+ * Create a POC playback engine.
3330
+ *
3331
+ * Wires together all orchestrations to create a reactive playback pipeline:
3332
+ * 1. Resolve presentation (multivariant playlist)
3333
+ * 2. Select initial video and audio tracks
3334
+ * 3. Resolve selected tracks (media playlists)
3335
+ * 4. Setup MediaSource
3336
+ * 5. Setup SourceBuffers for video and audio
3337
+ *
3338
+ * Note: This is a POC - does not yet load/append segments.
3339
+ *
3340
+ * @param config - Playback engine configuration
3341
+ * @returns Playback engine instance with state, owners, and destroy function
3342
+ *
3343
+ * @example
3344
+ * const engine = createPlaybackEngine({
3345
+ * initialBandwidth: 2_000_000,
3346
+ * preferredAudioLanguage: 'en',
3347
+ * });
3348
+ *
3349
+ * // Initialize by patching state and owners
3350
+ * engine.owners.patch({ mediaElement: document.querySelector('video') });
3351
+ * engine.state.patch({
3352
+ * presentation: { url: 'https://example.com/playlist.m3u8' },
3353
+ * preload: 'auto',
3354
+ * });
3355
+ *
3356
+ * // Inspect state
3357
+ * console.log(engine.state.current);
3358
+ *
3359
+ * // Cleanup
3360
+ * engine.destroy();
3361
+ */
3362
+ function createPlaybackEngine(config = {}) {
3363
+ const state = createState({ bandwidthState: {
3364
+ fastEstimate: 0,
3365
+ fastTotalWeight: 0,
3366
+ slowEstimate: 0,
3367
+ slowTotalWeight: 0,
3368
+ bytesSampled: 0
3369
+ } });
3370
+ const owners = createState({});
3371
+ const events = createEventStream();
3372
+ const cleanups = [
3373
+ syncPreloadAttribute(state, owners),
3374
+ trackPlaybackInitiated({
3375
+ state,
3376
+ owners,
3377
+ events
3378
+ }),
3379
+ resolvePresentation({
3380
+ state,
3381
+ events
3382
+ }),
3383
+ selectVideoTrack({
3384
+ state,
3385
+ owners,
3386
+ events
3387
+ }, {
3388
+ type: "video",
3389
+ ...config.initialBandwidth !== void 0 && { initialBandwidth: config.initialBandwidth }
3390
+ }),
3391
+ selectAudioTrack({
3392
+ state,
3393
+ owners,
3394
+ events
3395
+ }, {
3396
+ type: "audio",
3397
+ ...config.preferredAudioLanguage !== void 0 && { preferredAudioLanguage: config.preferredAudioLanguage }
3398
+ }),
3399
+ selectTextTrack({
3400
+ state,
3401
+ owners,
3402
+ events
3403
+ }, {
3404
+ type: "text",
3405
+ ...config.preferredSubtitleLanguage !== void 0 && { preferredSubtitleLanguage: config.preferredSubtitleLanguage },
3406
+ ...config.includeForcedTracks !== void 0 && { includeForcedTracks: config.includeForcedTracks },
3407
+ ...config.enableDefaultTrack !== void 0 && { enableDefaultTrack: config.enableDefaultTrack }
3408
+ }),
3409
+ resolveTrack({
3410
+ state,
3411
+ events
3412
+ }, { type: "video" }),
3413
+ resolveTrack({
3414
+ state,
3415
+ events
3416
+ }, { type: "audio" }),
3417
+ resolveTrack({
3418
+ state,
3419
+ events
3420
+ }, { type: "text" }),
3421
+ calculatePresentationDuration({ state }),
3422
+ setupMediaSource({
3423
+ state,
3424
+ owners
3425
+ }),
3426
+ updateDuration({
3427
+ state,
3428
+ owners
3429
+ }),
3430
+ setupSourceBuffers({
3431
+ state,
3432
+ owners
3433
+ }),
3434
+ trackCurrentTime({
3435
+ state,
3436
+ owners
3437
+ }),
3438
+ switchQuality({ state }),
3439
+ loadSegments({
3440
+ state,
3441
+ owners
3442
+ }, { type: "video" }),
3443
+ loadSegments({
3444
+ state,
3445
+ owners
3446
+ }, { type: "audio" }),
3447
+ endOfStream({
3448
+ state,
3449
+ owners
3450
+ }),
3451
+ setupTextTracks({
3452
+ state,
3453
+ owners
3454
+ }),
3455
+ syncTextTrackModes({
3456
+ state,
3457
+ owners
3458
+ }),
3459
+ syncSelectedTextTrackFromDom({
3460
+ state,
3461
+ owners
3462
+ }),
3463
+ loadTextTrackCues({
3464
+ state,
3465
+ owners
3466
+ })
3467
+ ];
3468
+ events.dispatch({ type: "@@INITIALIZE@@" });
3469
+ return {
3470
+ state,
3471
+ owners,
3472
+ events,
3473
+ destroy: () => {
3474
+ cleanups.forEach((cleanup) => cleanup());
3475
+ destroyVttParser();
3476
+ }
3477
+ };
3478
+ }
3479
+
3480
+ //#endregion
3481
+ //#region ../spf/dist/dev/dom/playback-engine/adapter.js
3482
+ /**
3483
+ * HTMLMediaElement-compatible adapter for the SPF playback engine.
3484
+ *
3485
+ * Implements the src/play() contract per the WHATWG HTML spec so that SPF can
3486
+ * be used anywhere a media element API is expected.
3487
+ *
3488
+ * A new engine is created on every src assignment — this fully tears down all
3489
+ * state, SourceBuffers, and in-flight requests from the previous source before
3490
+ * the next one begins. The media element reference is preserved across src
3491
+ * changes and re-applied to the new engine automatically.
3492
+ *
3493
+ * @example
3494
+ * const media = new SpfMedia({ preferredAudioLanguage: 'en' });
3495
+ * media.attach(document.querySelector('video'));
3496
+ * media.src = 'https://stream.mux.com/abc123.m3u8';
3497
+ *
3498
+ * // Change source — old engine is destroyed, new one starts clean:
3499
+ * media.src = 'https://stream.mux.com/xyz456.m3u8';
3500
+ *
3501
+ * // Explicit teardown:
3502
+ * media.destroy();
3503
+ */
3504
+ var SpfMedia = class {
3505
+ #engine;
3506
+ #config;
3507
+ #preload = "";
3508
+ /** Pending loadstart listener from a deferred play() retry, if any. */
3509
+ #loadstartListener = null;
3510
+ constructor(config = {}) {
3511
+ this.#config = config;
3512
+ this.#engine = createPlaybackEngine(config);
3513
+ }
3514
+ get engine() {
3515
+ return this.#engine;
3516
+ }
3517
+ attach(mediaElement) {
3518
+ this.#engine.owners.patch({ mediaElement });
3519
+ }
3520
+ detach() {
3521
+ this.#cancelPendingPlay();
3522
+ this.#engine.owners.patch({ mediaElement: void 0 });
3523
+ }
3524
+ destroy() {
3525
+ this.#cancelPendingPlay();
3526
+ this.#engine.destroy();
3527
+ }
3528
+ get preload() {
3529
+ return this.#preload;
3530
+ }
3531
+ set preload(value) {
3532
+ this.#preload = value;
3533
+ if (value) this.#engine.state.patch({ preload: value });
3534
+ }
3535
+ get src() {
3536
+ return this.#engine.state.current.presentation?.url ?? "";
3537
+ }
3538
+ set src(value) {
3539
+ const prevMediaElement = this.#engine.owners.current.mediaElement;
3540
+ this.#cancelPendingPlay();
3541
+ this.#engine.destroy();
3542
+ this.#engine = createPlaybackEngine(this.#config);
3543
+ if (this.#preload) this.#engine.state.patch({ preload: this.#preload });
3544
+ if (prevMediaElement) this.#engine.owners.patch({ mediaElement: prevMediaElement });
3545
+ if (value) this.#engine.state.patch({ presentation: { url: value } });
3546
+ }
3547
+ play() {
3548
+ const { mediaElement } = this.#engine.owners.current;
3549
+ if (!mediaElement) return Promise.reject(/* @__PURE__ */ new Error("SpfMedia: no media element attached"));
3550
+ this.#engine.state.patch({ playbackInitiated: true });
3551
+ return mediaElement.play().catch((err) => {
3552
+ if (this.src) return new Promise((resolve, reject) => {
3553
+ const listener = () => {
3554
+ this.#loadstartListener = null;
3555
+ mediaElement.play().then(resolve, reject);
3556
+ };
3557
+ this.#loadstartListener = listener;
3558
+ mediaElement.addEventListener("loadstart", listener, { once: true });
3559
+ });
3560
+ throw err;
3561
+ });
3562
+ }
3563
+ #cancelPendingPlay() {
3564
+ if (!this.#loadstartListener) return;
3565
+ const { mediaElement } = this.#engine.owners.current;
3566
+ mediaElement?.removeEventListener("loadstart", this.#loadstartListener);
3567
+ this.#loadstartListener = null;
3568
+ }
3569
+ };
3570
+
3571
+ //#endregion
3572
+ //#region ../core/dist/dev/dom/media/simple-hls/index.js
3573
+ var SimpleHlsCustomMedia = class extends DelegateMixin(CustomMediaMixin(globalThis.HTMLElement ?? class {}, { tag: "video" }), SpfMedia) {};
3574
+
3575
+ //#endregion
3576
+ //#region src/media/simple-hls-video/index.ts
3577
+ var SimpleHlsVideo = class extends MediaAttachMixin(SimpleHlsCustomMedia) {
3578
+ static getTemplateHTML(attrs) {
3579
+ const { src, ...rest } = attrs;
3580
+ return super.getTemplateHTML(rest);
3581
+ }
3582
+ constructor() {
3583
+ super();
3584
+ this.attach(this.target);
3585
+ }
3586
+ attributeChangedCallback(attrName, oldValue, newValue) {
3587
+ if (attrName !== "src") super.attributeChangedCallback(attrName, oldValue, newValue);
3588
+ if (attrName === "src" && oldValue !== newValue) this.src = newValue ?? "";
3589
+ if (attrName === "preload" && oldValue !== newValue) this.preload = newValue ?? "";
3590
+ }
3591
+ };
3592
+
3593
+ //#endregion
3594
+ //#region src/define/media/simple-hls-video.ts
3595
+ var SimpleHlsVideoElement = class extends SimpleHlsVideo {
3596
+ static {
3597
+ this.tagName = "simple-hls-video";
3598
+ }
3599
+ };
3600
+ customElements.define(SimpleHlsVideoElement.tagName, SimpleHlsVideoElement);
3601
+
3602
+ //#endregion
3603
+ //# sourceMappingURL=simple-hls-video.dev.js.map