StreamingCommunity 2.5.2__py3-none-any.whl → 2.5.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of StreamingCommunity might be problematic. Click here for more details.

Files changed (268) hide show
  1. StreamingCommunity/Api/Player/Helper/Vixcloud/js_parser.py +143 -143
  2. StreamingCommunity/Api/Player/Helper/Vixcloud/util.py +136 -136
  3. StreamingCommunity/Api/Player/ddl.py +89 -89
  4. StreamingCommunity/Api/Player/maxstream.py +151 -151
  5. StreamingCommunity/Api/Player/supervideo.py +193 -193
  6. StreamingCommunity/Api/Player/vixcloud.py +272 -272
  7. StreamingCommunity/Api/Site/1337xx/__init__.py +51 -50
  8. StreamingCommunity/Api/Site/1337xx/costant.py +14 -14
  9. StreamingCommunity/Api/Site/1337xx/site.py +87 -89
  10. StreamingCommunity/Api/Site/1337xx/title.py +63 -64
  11. StreamingCommunity/Api/Site/altadefinizionegratis/__init__.py +74 -50
  12. StreamingCommunity/Api/Site/altadefinizionegratis/costant.py +21 -19
  13. StreamingCommunity/Api/Site/altadefinizionegratis/film.py +81 -72
  14. StreamingCommunity/Api/Site/altadefinizionegratis/site.py +116 -94
  15. StreamingCommunity/Api/Site/animeunity/__init__.py +75 -50
  16. StreamingCommunity/Api/Site/animeunity/costant.py +21 -19
  17. StreamingCommunity/Api/Site/animeunity/film_serie.py +171 -134
  18. StreamingCommunity/Api/Site/animeunity/site.py +191 -174
  19. StreamingCommunity/Api/Site/animeunity/util/ScrapeSerie.py +97 -97
  20. StreamingCommunity/Api/Site/cb01new/__init__.py +51 -51
  21. StreamingCommunity/Api/Site/cb01new/costant.py +19 -19
  22. StreamingCommunity/Api/Site/cb01new/film.py +61 -71
  23. StreamingCommunity/Api/Site/cb01new/site.py +82 -82
  24. StreamingCommunity/Api/Site/ddlstreamitaly/__init__.py +55 -55
  25. StreamingCommunity/Api/Site/ddlstreamitaly/costant.py +20 -20
  26. StreamingCommunity/Api/Site/ddlstreamitaly/series.py +149 -145
  27. StreamingCommunity/Api/Site/ddlstreamitaly/site.py +98 -98
  28. StreamingCommunity/Api/Site/ddlstreamitaly/util/ScrapeSerie.py +84 -84
  29. StreamingCommunity/Api/Site/guardaserie/__init__.py +50 -50
  30. StreamingCommunity/Api/Site/guardaserie/costant.py +19 -19
  31. StreamingCommunity/Api/Site/guardaserie/series.py +199 -198
  32. StreamingCommunity/Api/Site/guardaserie/site.py +89 -89
  33. StreamingCommunity/Api/Site/guardaserie/util/ScrapeSerie.py +110 -110
  34. StreamingCommunity/Api/Site/ilcorsaronero/__init__.py +51 -51
  35. StreamingCommunity/Api/Site/ilcorsaronero/costant.py +18 -18
  36. StreamingCommunity/Api/Site/ilcorsaronero/site.py +71 -71
  37. StreamingCommunity/Api/Site/ilcorsaronero/title.py +44 -44
  38. StreamingCommunity/Api/Site/ilcorsaronero/util/ilCorsarScraper.py +149 -149
  39. StreamingCommunity/Api/Site/mostraguarda/__init__.py +48 -48
  40. StreamingCommunity/Api/Site/mostraguarda/costant.py +18 -18
  41. StreamingCommunity/Api/Site/mostraguarda/film.py +90 -101
  42. StreamingCommunity/Api/Site/streamingcommunity/__init__.py +79 -55
  43. StreamingCommunity/Api/Site/streamingcommunity/costant.py +21 -19
  44. StreamingCommunity/Api/Site/streamingcommunity/film.py +86 -75
  45. StreamingCommunity/Api/Site/streamingcommunity/series.py +259 -207
  46. StreamingCommunity/Api/Site/streamingcommunity/site.py +156 -142
  47. StreamingCommunity/Api/Site/streamingcommunity/util/ScrapeSerie.py +124 -124
  48. StreamingCommunity/Api/Template/Class/SearchType.py +101 -101
  49. StreamingCommunity/Api/Template/Util/__init__.py +4 -4
  50. StreamingCommunity/Api/Template/Util/get_domain.py +201 -201
  51. StreamingCommunity/Api/Template/Util/manage_ep.py +178 -178
  52. StreamingCommunity/Api/Template/Util/recall_search.py +37 -37
  53. StreamingCommunity/Api/Template/__init__.py +2 -2
  54. StreamingCommunity/Api/Template/site.py +87 -87
  55. StreamingCommunity/Lib/Downloader/HLS/downloader.py +529 -1008
  56. StreamingCommunity/Lib/Downloader/HLS/proxyes.py +110 -110
  57. StreamingCommunity/Lib/Downloader/HLS/segments.py +446 -573
  58. StreamingCommunity/Lib/Downloader/MP4/downloader.py +181 -155
  59. StreamingCommunity/Lib/Downloader/TOR/downloader.py +297 -295
  60. StreamingCommunity/Lib/Downloader/__init__.py +4 -4
  61. StreamingCommunity/Lib/FFmpeg/__init__.py +4 -4
  62. StreamingCommunity/Lib/FFmpeg/capture.py +170 -170
  63. StreamingCommunity/Lib/FFmpeg/command.py +264 -296
  64. StreamingCommunity/Lib/FFmpeg/util.py +248 -248
  65. StreamingCommunity/Lib/M3U8/__init__.py +5 -5
  66. StreamingCommunity/Lib/M3U8/decryptor.py +164 -164
  67. StreamingCommunity/Lib/M3U8/estimator.py +146 -228
  68. StreamingCommunity/Lib/M3U8/parser.py +666 -666
  69. StreamingCommunity/Lib/M3U8/url_fixer.py +57 -57
  70. StreamingCommunity/Lib/TMBD/__init__.py +1 -1
  71. StreamingCommunity/Lib/TMBD/obj_tmbd.py +39 -39
  72. StreamingCommunity/Lib/TMBD/tmdb.py +345 -345
  73. StreamingCommunity/TelegramHelp/__init__.py +0 -0
  74. StreamingCommunity/TelegramHelp/request_manager.py +82 -0
  75. StreamingCommunity/TelegramHelp/session.py +56 -0
  76. StreamingCommunity/TelegramHelp/telegram_bot.py +561 -0
  77. StreamingCommunity/Upload/update.py +75 -67
  78. StreamingCommunity/Upload/version.py +5 -5
  79. StreamingCommunity/Util/_jsonConfig.py +227 -228
  80. StreamingCommunity/Util/call_stack.py +42 -42
  81. StreamingCommunity/Util/color.py +20 -20
  82. StreamingCommunity/Util/console.py +12 -12
  83. StreamingCommunity/Util/ffmpeg_installer.py +342 -370
  84. StreamingCommunity/Util/headers.py +159 -159
  85. StreamingCommunity/Util/logger.py +61 -61
  86. StreamingCommunity/Util/message.py +36 -64
  87. StreamingCommunity/Util/os.py +500 -507
  88. StreamingCommunity/Util/table.py +271 -228
  89. StreamingCommunity/run.py +352 -245
  90. {StreamingCommunity-2.5.2.dist-info → StreamingCommunity-2.5.5.dist-info}/LICENSE +674 -674
  91. {StreamingCommunity-2.5.2.dist-info → StreamingCommunity-2.5.5.dist-info}/METADATA +601 -543
  92. StreamingCommunity-2.5.5.dist-info/RECORD +96 -0
  93. {StreamingCommunity-2.5.2.dist-info → StreamingCommunity-2.5.5.dist-info}/entry_points.txt +0 -1
  94. StreamingCommunity/Api/Player/Helper/Vixcloud/__pycache__/js_parser.cpython-313.pyc +0 -0
  95. StreamingCommunity/Api/Player/Helper/Vixcloud/__pycache__/js_parser.cpython-39.pyc +0 -0
  96. StreamingCommunity/Api/Player/Helper/Vixcloud/__pycache__/util.cpython-313.pyc +0 -0
  97. StreamingCommunity/Api/Player/Helper/Vixcloud/__pycache__/util.cpython-39.pyc +0 -0
  98. StreamingCommunity/Api/Player/__pycache__/ddl.cpython-313.pyc +0 -0
  99. StreamingCommunity/Api/Player/__pycache__/ddl.cpython-39.pyc +0 -0
  100. StreamingCommunity/Api/Player/__pycache__/maxstream.cpython-313.pyc +0 -0
  101. StreamingCommunity/Api/Player/__pycache__/maxstream.cpython-39.pyc +0 -0
  102. StreamingCommunity/Api/Player/__pycache__/supervideo.cpython-313.pyc +0 -0
  103. StreamingCommunity/Api/Player/__pycache__/supervideo.cpython-39.pyc +0 -0
  104. StreamingCommunity/Api/Player/__pycache__/vixcloud.cpython-313.pyc +0 -0
  105. StreamingCommunity/Api/Player/__pycache__/vixcloud.cpython-39.pyc +0 -0
  106. StreamingCommunity/Api/Site/1337xx/__pycache__/__init__.cpython-313.pyc +0 -0
  107. StreamingCommunity/Api/Site/1337xx/__pycache__/__init__.cpython-39.pyc +0 -0
  108. StreamingCommunity/Api/Site/1337xx/__pycache__/costant.cpython-313.pyc +0 -0
  109. StreamingCommunity/Api/Site/1337xx/__pycache__/costant.cpython-39.pyc +0 -0
  110. StreamingCommunity/Api/Site/1337xx/__pycache__/site.cpython-313.pyc +0 -0
  111. StreamingCommunity/Api/Site/1337xx/__pycache__/site.cpython-39.pyc +0 -0
  112. StreamingCommunity/Api/Site/1337xx/__pycache__/title.cpython-313.pyc +0 -0
  113. StreamingCommunity/Api/Site/1337xx/__pycache__/title.cpython-39.pyc +0 -0
  114. StreamingCommunity/Api/Site/altadefinizionegratis/__pycache__/__init__.cpython-313.pyc +0 -0
  115. StreamingCommunity/Api/Site/altadefinizionegratis/__pycache__/__init__.cpython-39.pyc +0 -0
  116. StreamingCommunity/Api/Site/altadefinizionegratis/__pycache__/costant.cpython-313.pyc +0 -0
  117. StreamingCommunity/Api/Site/altadefinizionegratis/__pycache__/costant.cpython-39.pyc +0 -0
  118. StreamingCommunity/Api/Site/altadefinizionegratis/__pycache__/film.cpython-313.pyc +0 -0
  119. StreamingCommunity/Api/Site/altadefinizionegratis/__pycache__/film.cpython-39.pyc +0 -0
  120. StreamingCommunity/Api/Site/altadefinizionegratis/__pycache__/site.cpython-313.pyc +0 -0
  121. StreamingCommunity/Api/Site/altadefinizionegratis/__pycache__/site.cpython-39.pyc +0 -0
  122. StreamingCommunity/Api/Site/animeunity/__pycache__/__init__.cpython-313.pyc +0 -0
  123. StreamingCommunity/Api/Site/animeunity/__pycache__/__init__.cpython-39.pyc +0 -0
  124. StreamingCommunity/Api/Site/animeunity/__pycache__/costant.cpython-313.pyc +0 -0
  125. StreamingCommunity/Api/Site/animeunity/__pycache__/costant.cpython-39.pyc +0 -0
  126. StreamingCommunity/Api/Site/animeunity/__pycache__/film_serie.cpython-313.pyc +0 -0
  127. StreamingCommunity/Api/Site/animeunity/__pycache__/film_serie.cpython-39.pyc +0 -0
  128. StreamingCommunity/Api/Site/animeunity/__pycache__/site.cpython-313.pyc +0 -0
  129. StreamingCommunity/Api/Site/animeunity/__pycache__/site.cpython-39.pyc +0 -0
  130. StreamingCommunity/Api/Site/animeunity/util/__pycache__/ScrapeSerie.cpython-313.pyc +0 -0
  131. StreamingCommunity/Api/Site/animeunity/util/__pycache__/ScrapeSerie.cpython-39.pyc +0 -0
  132. StreamingCommunity/Api/Site/cb01new/__pycache__/__init__.cpython-313.pyc +0 -0
  133. StreamingCommunity/Api/Site/cb01new/__pycache__/__init__.cpython-39.pyc +0 -0
  134. StreamingCommunity/Api/Site/cb01new/__pycache__/costant.cpython-313.pyc +0 -0
  135. StreamingCommunity/Api/Site/cb01new/__pycache__/costant.cpython-39.pyc +0 -0
  136. StreamingCommunity/Api/Site/cb01new/__pycache__/film.cpython-313.pyc +0 -0
  137. StreamingCommunity/Api/Site/cb01new/__pycache__/film.cpython-39.pyc +0 -0
  138. StreamingCommunity/Api/Site/cb01new/__pycache__/site.cpython-313.pyc +0 -0
  139. StreamingCommunity/Api/Site/cb01new/__pycache__/site.cpython-39.pyc +0 -0
  140. StreamingCommunity/Api/Site/ddlstreamitaly/__pycache__/__init__.cpython-313.pyc +0 -0
  141. StreamingCommunity/Api/Site/ddlstreamitaly/__pycache__/__init__.cpython-39.pyc +0 -0
  142. StreamingCommunity/Api/Site/ddlstreamitaly/__pycache__/costant.cpython-313.pyc +0 -0
  143. StreamingCommunity/Api/Site/ddlstreamitaly/__pycache__/costant.cpython-39.pyc +0 -0
  144. StreamingCommunity/Api/Site/ddlstreamitaly/__pycache__/series.cpython-313.pyc +0 -0
  145. StreamingCommunity/Api/Site/ddlstreamitaly/__pycache__/series.cpython-39.pyc +0 -0
  146. StreamingCommunity/Api/Site/ddlstreamitaly/__pycache__/site.cpython-313.pyc +0 -0
  147. StreamingCommunity/Api/Site/ddlstreamitaly/__pycache__/site.cpython-39.pyc +0 -0
  148. StreamingCommunity/Api/Site/ddlstreamitaly/util/__pycache__/ScrapeSerie.cpython-313.pyc +0 -0
  149. StreamingCommunity/Api/Site/ddlstreamitaly/util/__pycache__/ScrapeSerie.cpython-39.pyc +0 -0
  150. StreamingCommunity/Api/Site/guardaserie/__pycache__/__init__.cpython-313.pyc +0 -0
  151. StreamingCommunity/Api/Site/guardaserie/__pycache__/__init__.cpython-39.pyc +0 -0
  152. StreamingCommunity/Api/Site/guardaserie/__pycache__/costant.cpython-313.pyc +0 -0
  153. StreamingCommunity/Api/Site/guardaserie/__pycache__/costant.cpython-39.pyc +0 -0
  154. StreamingCommunity/Api/Site/guardaserie/__pycache__/series.cpython-313.pyc +0 -0
  155. StreamingCommunity/Api/Site/guardaserie/__pycache__/series.cpython-39.pyc +0 -0
  156. StreamingCommunity/Api/Site/guardaserie/__pycache__/site.cpython-313.pyc +0 -0
  157. StreamingCommunity/Api/Site/guardaserie/__pycache__/site.cpython-39.pyc +0 -0
  158. StreamingCommunity/Api/Site/guardaserie/util/__pycache__/ScrapeSerie.cpython-313.pyc +0 -0
  159. StreamingCommunity/Api/Site/guardaserie/util/__pycache__/ScrapeSerie.cpython-39.pyc +0 -0
  160. StreamingCommunity/Api/Site/ilcorsaronero/__pycache__/__init__.cpython-313.pyc +0 -0
  161. StreamingCommunity/Api/Site/ilcorsaronero/__pycache__/__init__.cpython-39.pyc +0 -0
  162. StreamingCommunity/Api/Site/ilcorsaronero/__pycache__/costant.cpython-313.pyc +0 -0
  163. StreamingCommunity/Api/Site/ilcorsaronero/__pycache__/costant.cpython-39.pyc +0 -0
  164. StreamingCommunity/Api/Site/ilcorsaronero/__pycache__/site.cpython-313.pyc +0 -0
  165. StreamingCommunity/Api/Site/ilcorsaronero/__pycache__/site.cpython-39.pyc +0 -0
  166. StreamingCommunity/Api/Site/ilcorsaronero/__pycache__/title.cpython-313.pyc +0 -0
  167. StreamingCommunity/Api/Site/ilcorsaronero/__pycache__/title.cpython-39.pyc +0 -0
  168. StreamingCommunity/Api/Site/ilcorsaronero/util/__pycache__/ilCorsarScraper.cpython-313.pyc +0 -0
  169. StreamingCommunity/Api/Site/ilcorsaronero/util/__pycache__/ilCorsarScraper.cpython-39.pyc +0 -0
  170. StreamingCommunity/Api/Site/mostraguarda/__pycache__/__init__.cpython-313.pyc +0 -0
  171. StreamingCommunity/Api/Site/mostraguarda/__pycache__/__init__.cpython-39.pyc +0 -0
  172. StreamingCommunity/Api/Site/mostraguarda/__pycache__/costant.cpython-313.pyc +0 -0
  173. StreamingCommunity/Api/Site/mostraguarda/__pycache__/costant.cpython-39.pyc +0 -0
  174. StreamingCommunity/Api/Site/mostraguarda/__pycache__/film.cpython-313.pyc +0 -0
  175. StreamingCommunity/Api/Site/mostraguarda/__pycache__/film.cpython-39.pyc +0 -0
  176. StreamingCommunity/Api/Site/streamingcommunity/__pycache__/__init__.cpython-313.pyc +0 -0
  177. StreamingCommunity/Api/Site/streamingcommunity/__pycache__/__init__.cpython-39.pyc +0 -0
  178. StreamingCommunity/Api/Site/streamingcommunity/__pycache__/costant.cpython-313.pyc +0 -0
  179. StreamingCommunity/Api/Site/streamingcommunity/__pycache__/costant.cpython-39.pyc +0 -0
  180. StreamingCommunity/Api/Site/streamingcommunity/__pycache__/film.cpython-313.pyc +0 -0
  181. StreamingCommunity/Api/Site/streamingcommunity/__pycache__/film.cpython-39.pyc +0 -0
  182. StreamingCommunity/Api/Site/streamingcommunity/__pycache__/series.cpython-313.pyc +0 -0
  183. StreamingCommunity/Api/Site/streamingcommunity/__pycache__/series.cpython-39.pyc +0 -0
  184. StreamingCommunity/Api/Site/streamingcommunity/__pycache__/site.cpython-313.pyc +0 -0
  185. StreamingCommunity/Api/Site/streamingcommunity/__pycache__/site.cpython-39.pyc +0 -0
  186. StreamingCommunity/Api/Site/streamingcommunity/util/__pycache__/ScrapeSerie.cpython-313.pyc +0 -0
  187. StreamingCommunity/Api/Site/streamingcommunity/util/__pycache__/ScrapeSerie.cpython-39.pyc +0 -0
  188. StreamingCommunity/Api/Template/Class/__pycache__/SearchType.cpython-313.pyc +0 -0
  189. StreamingCommunity/Api/Template/Class/__pycache__/SearchType.cpython-39.pyc +0 -0
  190. StreamingCommunity/Api/Template/Util/__pycache__/__init__.cpython-313.pyc +0 -0
  191. StreamingCommunity/Api/Template/Util/__pycache__/__init__.cpython-39.pyc +0 -0
  192. StreamingCommunity/Api/Template/Util/__pycache__/get_domain.cpython-313.pyc +0 -0
  193. StreamingCommunity/Api/Template/Util/__pycache__/get_domain.cpython-39.pyc +0 -0
  194. StreamingCommunity/Api/Template/Util/__pycache__/manage_ep.cpython-313.pyc +0 -0
  195. StreamingCommunity/Api/Template/Util/__pycache__/manage_ep.cpython-39.pyc +0 -0
  196. StreamingCommunity/Api/Template/Util/__pycache__/recall_search.cpython-313.pyc +0 -0
  197. StreamingCommunity/Api/Template/Util/__pycache__/recall_search.cpython-39.pyc +0 -0
  198. StreamingCommunity/Api/Template/__pycache__/__init__.cpython-313.pyc +0 -0
  199. StreamingCommunity/Api/Template/__pycache__/__init__.cpython-39.pyc +0 -0
  200. StreamingCommunity/Api/Template/__pycache__/site.cpython-313.pyc +0 -0
  201. StreamingCommunity/Api/Template/__pycache__/site.cpython-39.pyc +0 -0
  202. StreamingCommunity/Lib/Downloader/HLS/__pycache__/downloader.cpython-313.pyc +0 -0
  203. StreamingCommunity/Lib/Downloader/HLS/__pycache__/downloader.cpython-39.pyc +0 -0
  204. StreamingCommunity/Lib/Downloader/HLS/__pycache__/proxyes.cpython-313.pyc +0 -0
  205. StreamingCommunity/Lib/Downloader/HLS/__pycache__/proxyes.cpython-39.pyc +0 -0
  206. StreamingCommunity/Lib/Downloader/HLS/__pycache__/segments.cpython-313.pyc +0 -0
  207. StreamingCommunity/Lib/Downloader/HLS/__pycache__/segments.cpython-39.pyc +0 -0
  208. StreamingCommunity/Lib/Downloader/MP4/__pycache__/downloader.cpython-313.pyc +0 -0
  209. StreamingCommunity/Lib/Downloader/MP4/__pycache__/downloader.cpython-39.pyc +0 -0
  210. StreamingCommunity/Lib/Downloader/TOR/__pycache__/downloader.cpython-313.pyc +0 -0
  211. StreamingCommunity/Lib/Downloader/TOR/__pycache__/downloader.cpython-39.pyc +0 -0
  212. StreamingCommunity/Lib/Downloader/__pycache__/__init__.cpython-313.pyc +0 -0
  213. StreamingCommunity/Lib/Downloader/__pycache__/__init__.cpython-39.pyc +0 -0
  214. StreamingCommunity/Lib/FFmpeg/__pycache__/__init__.cpython-313.pyc +0 -0
  215. StreamingCommunity/Lib/FFmpeg/__pycache__/__init__.cpython-39.pyc +0 -0
  216. StreamingCommunity/Lib/FFmpeg/__pycache__/capture.cpython-313.pyc +0 -0
  217. StreamingCommunity/Lib/FFmpeg/__pycache__/capture.cpython-39.pyc +0 -0
  218. StreamingCommunity/Lib/FFmpeg/__pycache__/command.cpython-313.pyc +0 -0
  219. StreamingCommunity/Lib/FFmpeg/__pycache__/command.cpython-39.pyc +0 -0
  220. StreamingCommunity/Lib/FFmpeg/__pycache__/util.cpython-313.pyc +0 -0
  221. StreamingCommunity/Lib/FFmpeg/__pycache__/util.cpython-39.pyc +0 -0
  222. StreamingCommunity/Lib/M3U8/__pycache__/__init__.cpython-313.pyc +0 -0
  223. StreamingCommunity/Lib/M3U8/__pycache__/__init__.cpython-39.pyc +0 -0
  224. StreamingCommunity/Lib/M3U8/__pycache__/decryptor.cpython-313.pyc +0 -0
  225. StreamingCommunity/Lib/M3U8/__pycache__/decryptor.cpython-39.pyc +0 -0
  226. StreamingCommunity/Lib/M3U8/__pycache__/estimator.cpython-313.pyc +0 -0
  227. StreamingCommunity/Lib/M3U8/__pycache__/estimator.cpython-39.pyc +0 -0
  228. StreamingCommunity/Lib/M3U8/__pycache__/parser.cpython-313.pyc +0 -0
  229. StreamingCommunity/Lib/M3U8/__pycache__/parser.cpython-39.pyc +0 -0
  230. StreamingCommunity/Lib/M3U8/__pycache__/url_fixer.cpython-313.pyc +0 -0
  231. StreamingCommunity/Lib/M3U8/__pycache__/url_fixer.cpython-39.pyc +0 -0
  232. StreamingCommunity/Lib/TMBD/__pycache__/__init__.cpython-313.pyc +0 -0
  233. StreamingCommunity/Lib/TMBD/__pycache__/__init__.cpython-39.pyc +0 -0
  234. StreamingCommunity/Lib/TMBD/__pycache__/obj_tmbd.cpython-313.pyc +0 -0
  235. StreamingCommunity/Lib/TMBD/__pycache__/obj_tmbd.cpython-39.pyc +0 -0
  236. StreamingCommunity/Lib/TMBD/__pycache__/tmdb.cpython-313.pyc +0 -0
  237. StreamingCommunity/Lib/TMBD/__pycache__/tmdb.cpython-39.pyc +0 -0
  238. StreamingCommunity/Upload/__pycache__/update.cpython-313.pyc +0 -0
  239. StreamingCommunity/Upload/__pycache__/update.cpython-39.pyc +0 -0
  240. StreamingCommunity/Upload/__pycache__/version.cpython-313.pyc +0 -0
  241. StreamingCommunity/Upload/__pycache__/version.cpython-39.pyc +0 -0
  242. StreamingCommunity/Util/__pycache__/_jsonConfig.cpython-313.pyc +0 -0
  243. StreamingCommunity/Util/__pycache__/_jsonConfig.cpython-39.pyc +0 -0
  244. StreamingCommunity/Util/__pycache__/call_stack.cpython-313.pyc +0 -0
  245. StreamingCommunity/Util/__pycache__/call_stack.cpython-39.pyc +0 -0
  246. StreamingCommunity/Util/__pycache__/color.cpython-313.pyc +0 -0
  247. StreamingCommunity/Util/__pycache__/color.cpython-39.pyc +0 -0
  248. StreamingCommunity/Util/__pycache__/console.cpython-313.pyc +0 -0
  249. StreamingCommunity/Util/__pycache__/console.cpython-39.pyc +0 -0
  250. StreamingCommunity/Util/__pycache__/ffmpeg_installer.cpython-313.pyc +0 -0
  251. StreamingCommunity/Util/__pycache__/ffmpeg_installer.cpython-39.pyc +0 -0
  252. StreamingCommunity/Util/__pycache__/headers.cpython-313.pyc +0 -0
  253. StreamingCommunity/Util/__pycache__/headers.cpython-39.pyc +0 -0
  254. StreamingCommunity/Util/__pycache__/logger.cpython-313.pyc +0 -0
  255. StreamingCommunity/Util/__pycache__/logger.cpython-39.pyc +0 -0
  256. StreamingCommunity/Util/__pycache__/message.cpython-313.pyc +0 -0
  257. StreamingCommunity/Util/__pycache__/message.cpython-39.pyc +0 -0
  258. StreamingCommunity/Util/__pycache__/os.cpython-313.pyc +0 -0
  259. StreamingCommunity/Util/__pycache__/os.cpython-39.pyc +0 -0
  260. StreamingCommunity/Util/__pycache__/table.cpython-313.pyc +0 -0
  261. StreamingCommunity/Util/__pycache__/table.cpython-39.pyc +0 -0
  262. StreamingCommunity/__pycache__/__init__.cpython-313.pyc +0 -0
  263. StreamingCommunity/__pycache__/__init__.cpython-39.pyc +0 -0
  264. StreamingCommunity/__pycache__/run.cpython-313.pyc +0 -0
  265. StreamingCommunity/__pycache__/run.cpython-39.pyc +0 -0
  266. StreamingCommunity-2.5.2.dist-info/RECORD +0 -264
  267. {StreamingCommunity-2.5.2.dist-info → StreamingCommunity-2.5.5.dist-info}/WHEEL +0 -0
  268. {StreamingCommunity-2.5.2.dist-info → StreamingCommunity-2.5.5.dist-info}/top_level.txt +0 -0
@@ -1,573 +1,446 @@
1
- # 18.04.24
2
-
3
- import os
4
- import sys
5
- import time
6
- import queue
7
- import signal
8
- import logging
9
- import binascii
10
- import threading
11
-
12
- from queue import PriorityQueue
13
- from urllib.parse import urljoin, urlparse
14
- from concurrent.futures import ThreadPoolExecutor, as_completed
15
-
16
-
17
- # External libraries
18
- import httpx
19
- from tqdm import tqdm
20
-
21
-
22
- # Internal utilities
23
- from StreamingCommunity.Util.console import console
24
- from StreamingCommunity.Util.headers import get_headers, random_headers
25
- from StreamingCommunity.Util.color import Colors
26
- from StreamingCommunity.Util._jsonConfig import config_manager
27
- from StreamingCommunity.Util.os import os_manager
28
- from StreamingCommunity.Util.call_stack import get_call_stack
29
-
30
-
31
- # Logic class
32
- from ...M3U8 import (
33
- M3U8_Decryption,
34
- M3U8_Ts_Estimator,
35
- M3U8_Parser,
36
- M3U8_UrlFix
37
- )
38
- from ...FFmpeg.util import print_duration_table, format_duration
39
- from .proxyes import main_test_proxy
40
-
41
- # Config
42
- TQDM_DELAY_WORKER = config_manager.get_float('M3U8_DOWNLOAD', 'tqdm_delay')
43
- TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar')
44
-
45
- REQUEST_MAX_RETRY = config_manager.get_int('REQUESTS', 'max_retry')
46
- REQUEST_VERIFY = False
47
-
48
- THERE_IS_PROXY_LIST = os_manager.check_file("list_proxy.txt")
49
- PROXY_START_MIN = config_manager.get_float('REQUESTS', 'proxy_start_min')
50
- PROXY_START_MAX = config_manager.get_float('REQUESTS', 'proxy_start_max')
51
-
52
- DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_workser')
53
- DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workser')
54
-
55
-
56
-
57
- # Variable
58
- max_timeout = config_manager.get_int("REQUESTS", "timeout")
59
-
60
-
61
-
62
- class M3U8_Segments:
63
- def __init__(self, url: str, tmp_folder: str, is_index_url: bool = True):
64
- """
65
- Initializes the M3U8_Segments object.
66
-
67
- Parameters:
68
- - url (str): The URL of the M3U8 playlist.
69
- - tmp_folder (str): The temporary folder to store downloaded segments.
70
- - is_index_url (bool): Flag indicating if `m3u8_index` is a URL (default True).
71
- """
72
- self.url = url
73
- self.tmp_folder = tmp_folder
74
- self.is_index_url = is_index_url
75
- self.expected_real_time = None
76
- self.max_timeout = max_timeout
77
-
78
- self.tmp_file_path = os.path.join(self.tmp_folder, "0.ts")
79
- os.makedirs(self.tmp_folder, exist_ok=True)
80
-
81
- # Util class
82
- self.decryption: M3U8_Decryption = None
83
- self.class_ts_estimator = M3U8_Ts_Estimator(0)
84
- self.class_url_fixer = M3U8_UrlFix(url)
85
-
86
- # Sync
87
- self.queue = PriorityQueue()
88
- self.stop_event = threading.Event()
89
- self.downloaded_segments = set()
90
- self.base_timeout = 1.0
91
- self.current_timeout = 5.0
92
-
93
- # Stopping
94
- self.interrupt_flag = threading.Event()
95
- self.download_interrupted = False
96
-
97
- # OTHER INFO
98
- self.info_maxRetry = 0
99
- self.info_nRetry = 0
100
- self.info_nFailed = 0
101
-
102
- def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes:
103
- """
104
- Retrieves the encryption key from the M3U8 playlist.
105
-
106
- Parameters:
107
- - m3u8_parser (M3U8_Parser): The parser object containing M3U8 playlist information.
108
-
109
- Returns:
110
- bytes: The encryption key in bytes.
111
- """
112
-
113
- # Construct the full URL of the key
114
- key_uri = urljoin(self.url, m3u8_parser.keys.get('uri'))
115
- parsed_url = urlparse(key_uri)
116
- self.key_base_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
117
- logging.info(f"Uri key: {key_uri}")
118
-
119
- # Make request to get porxy
120
- try:
121
- response = httpx.get(
122
- url=key_uri,
123
- headers={'User-Agent': get_headers()},
124
- timeout=max_timeout
125
- )
126
- response.raise_for_status()
127
-
128
- except Exception as e:
129
- raise Exception(f"Failed to fetch key from {key_uri}: {e}")
130
-
131
- # Convert the content of the response to hexadecimal and then to bytes
132
- hex_content = binascii.hexlify(response.content).decode('utf-8')
133
- byte_content = bytes.fromhex(hex_content)
134
- logging.info(f"URI: Hex content: {hex_content}, Byte content: {byte_content}")
135
-
136
- #console.print(f"[cyan]Find key: [red]{hex_content}")
137
- return byte_content
138
-
139
- def parse_data(self, m3u8_content: str) -> None:
140
- """
141
- Parses the M3U8 content to extract segment information.
142
-
143
- Parameters:
144
- - m3u8_content (str): The content of the M3U8 file.
145
- """
146
- m3u8_parser = M3U8_Parser()
147
- m3u8_parser.parse_data(uri=self.url, raw_content=m3u8_content)
148
-
149
- self.expected_real_time = m3u8_parser.get_duration(return_string=False)
150
- self.expected_real_time_s = m3u8_parser.duration
151
-
152
- # Check if there is an encryption key in the playlis
153
- if m3u8_parser.keys is not None:
154
- try:
155
-
156
- # Extract byte from the key
157
- key = self.__get_key__(m3u8_parser)
158
-
159
- except Exception as e:
160
- raise Exception(f"Failed to retrieve encryption key {e}.")
161
-
162
- iv = m3u8_parser.keys.get('iv')
163
- method = m3u8_parser.keys.get('method')
164
- logging.info(f"M3U8_Decryption - IV: {iv}, method: {method}")
165
-
166
- # Create a decryption object with the key and set the method
167
- self.decryption = M3U8_Decryption(key, iv, method)
168
-
169
- # Store the segment information parsed from the playlist
170
- self.segments = m3u8_parser.segments
171
-
172
- # Fix URL if it is incomplete (missing 'http')
173
- for i in range(len(self.segments)):
174
- segment_url = self.segments[i]
175
-
176
- if "http" not in segment_url:
177
- self.segments[i] = self.class_url_fixer.generate_full_url(segment_url)
178
- logging.info(f"Generated new URL: {self.segments[i]}, from: {segment_url}")
179
-
180
- # Update segments for estimator
181
- self.class_ts_estimator.total_segments = len(self.segments)
182
- logging.info(f"Segmnets to download: [{len(self.segments)}]")
183
-
184
- # Proxy
185
- if THERE_IS_PROXY_LIST:
186
- console.log("[red]Start validation proxy.")
187
- self.valid_proxy = main_test_proxy(self.segments[0])
188
- console.log(f"[cyan]N. Valid ip: [red]{len(self.valid_proxy)}")
189
-
190
- if len(self.valid_proxy) == 0:
191
- sys.exit(0)
192
-
193
- def get_info(self) -> None:
194
- """
195
- Makes a request to the index M3U8 file to get information about segments.
196
- """
197
- if self.is_index_url:
198
-
199
- try:
200
-
201
- # Send a GET request to retrieve the index M3U8 file
202
- response = httpx.get(
203
- self.url,
204
- headers={'User-Agent': get_headers()},
205
- timeout=max_timeout,
206
- follow_redirects=True
207
- )
208
- response.raise_for_status()
209
-
210
- # Save the M3U8 file to the temporary folder
211
- path_m3u8_file = os.path.join(self.tmp_folder, "playlist.m3u8")
212
- open(path_m3u8_file, "w+").write(response.text)
213
-
214
- # Parse the text from the M3U8 index file
215
- self.parse_data(response.text)
216
-
217
- except Exception as e:
218
- print(f"Error during M3U8 index request: {e}")
219
-
220
- else:
221
- # Parser data of content of index pass in input to class
222
- self.parse_data(self.url)
223
-
224
- def setup_interrupt_handler(self):
225
- """
226
- Set up a signal handler for graceful interruption.
227
- """
228
- def interrupt_handler(signum, frame):
229
- if not self.interrupt_flag.is_set():
230
- console.log("\n[red] Stopping download gracefully...")
231
- self.interrupt_flag.set()
232
- self.download_interrupted = True
233
- self.stop_event.set()
234
-
235
- if threading.current_thread() is threading.main_thread():
236
- signal.signal(signal.SIGINT, interrupt_handler)
237
- else:
238
- print("Signal handler must be set in the main thread")
239
-
240
- def make_requests_stream(self, ts_url: str, index: int, progress_bar: tqdm, backoff_factor: float = 1.5) -> None:
241
- """
242
- Downloads a TS segment and adds it to the segment queue with retry logic.
243
-
244
- Parameters:
245
- - ts_url (str): The URL of the TS segment.
246
- - index (int): The index of the segment.
247
- - progress_bar (tqdm): Progress counter for tracking download progress.
248
- - retries (int): The number of times to retry on failure (default is 3).
249
- - backoff_factor (float): The backoff factor for exponential backoff (default is 1.5 seconds).
250
- """
251
- for attempt in range(REQUEST_MAX_RETRY):
252
- if self.interrupt_flag.is_set():
253
- return
254
-
255
- try:
256
- start_time = time.time()
257
-
258
- # Make request to get content
259
- if THERE_IS_PROXY_LIST:
260
-
261
- # Get proxy from list
262
- proxy = self.valid_proxy[index % len(self.valid_proxy)]
263
- logging.info(f"Use proxy: {proxy}")
264
-
265
- with httpx.Client(proxies=proxy, verify=REQUEST_VERIFY) as client:
266
- if 'key_base_url' in self.__dict__:
267
- response = client.get(
268
- url=ts_url,
269
- headers=random_headers(self.key_base_url),
270
- timeout=max_timeout,
271
- follow_redirects=True
272
- )
273
-
274
- else:
275
- response = client.get(
276
- url=ts_url,
277
- headers={'User-Agent': get_headers()},
278
- timeout=max_timeout,
279
- follow_redirects=True
280
- )
281
-
282
- else:
283
- with httpx.Client(verify=REQUEST_VERIFY) as client_2:
284
- if 'key_base_url' in self.__dict__:
285
- response = client_2.get(
286
- url=ts_url,
287
- headers=random_headers(self.key_base_url),
288
- timeout=max_timeout,
289
- follow_redirects=True
290
- )
291
-
292
- else:
293
- response = client_2.get(
294
- url=ts_url,
295
- headers={'User-Agent': get_headers()},
296
- timeout=max_timeout,
297
- follow_redirects=True
298
- )
299
-
300
- # Validate response and content
301
- response.raise_for_status()
302
- segment_content = response.content
303
- content_size = len(segment_content)
304
- duration = time.time() - start_time
305
-
306
- # Decrypt if needed and verify decrypted content
307
- if self.decryption is not None:
308
- try:
309
- segment_content = self.decryption.decrypt(segment_content)
310
-
311
- except Exception as e:
312
- logging.error(f"Decryption failed for segment {index}: {str(e)}")
313
- self.interrupt_flag.set() # Interrupt the download process
314
- self.stop_event.set() # Trigger the stopping event for all threads
315
- break # Stop the current task immediately
316
-
317
- # Update progress and queue
318
- self.class_ts_estimator.update_progress_bar(content_size, duration, progress_bar)
319
-
320
- # Add the segment to the queue
321
- self.queue.put((index, segment_content))
322
-
323
- # Track successfully downloaded segments
324
- self.downloaded_segments.add(index)
325
- progress_bar.update(1)
326
-
327
- # Break out of the loop on success
328
- return
329
-
330
- except Exception as e:
331
- logging.info(f"Attempt {attempt + 1} failed for segment {index} - '{ts_url}': {e}")
332
-
333
- # Update stat variable class
334
- if attempt > self.info_maxRetry:
335
- self.info_maxRetry = ( attempt + 1 )
336
- self.info_nRetry += 1
337
-
338
- if attempt + 1 == REQUEST_MAX_RETRY:
339
- console.log(f"[red]Final retry failed for segment: {index}")
340
- self.queue.put((index, None)) # Marker for failed segment
341
- progress_bar.update(1)
342
- self.info_nFailed += 1
343
-
344
- #break
345
-
346
- sleep_time = backoff_factor * (2 ** attempt)
347
- logging.info(f"Retrying segment {index} in {sleep_time} seconds...")
348
- time.sleep(sleep_time)
349
-
350
- def write_segments_to_file(self):
351
- """
352
- Writes segments to file with additional verification.
353
- """
354
- buffer = {}
355
- expected_index = 0
356
- segments_written = set()
357
-
358
- with open(self.tmp_file_path, 'wb') as f:
359
- while not self.stop_event.is_set() or not self.queue.empty():
360
- if self.interrupt_flag.is_set():
361
- break
362
-
363
- try:
364
- index, segment_content = self.queue.get(timeout=self.current_timeout)
365
-
366
- # Successful queue retrieval: reduce timeout
367
- self.current_timeout = max(self.base_timeout, self.current_timeout / 2)
368
-
369
- # Handle failed segments
370
- if segment_content is None:
371
- if index == expected_index:
372
- expected_index += 1
373
- continue
374
-
375
- # Write segment if it's the next expected one
376
- if index == expected_index:
377
- f.write(segment_content)
378
- segments_written.add(index)
379
- f.flush()
380
- expected_index += 1
381
-
382
- # Write any buffered segments that are now in order
383
- while expected_index in buffer:
384
- next_segment = buffer.pop(expected_index)
385
-
386
- if next_segment is not None:
387
- f.write(next_segment)
388
- segments_written.add(expected_index)
389
- f.flush()
390
-
391
- expected_index += 1
392
-
393
- else:
394
- buffer[index] = segment_content
395
-
396
- except queue.Empty:
397
- self.current_timeout = min(self.max_timeout, self.current_timeout * 1.25)
398
-
399
- if self.stop_event.is_set():
400
- break
401
-
402
- except Exception as e:
403
- logging.error(f"Error writing segment {index}: {str(e)}")
404
-
405
- def download_streams(self, description: str, type: str):
406
- """
407
- Downloads all TS segments in parallel and writes them to a file.
408
-
409
- Parameters:
410
- - description: Description to insert on tqdm bar
411
- - type (str): Type of download: 'video' or 'audio'
412
- """
413
- self.setup_interrupt_handler()
414
-
415
- # Get config site from prev stack
416
- frames = get_call_stack()
417
- logging.info(f"Extract info from: {frames}")
418
- config_site = str(frames[-4]['folder_base'])
419
- logging.info(f"Use frame: {frames[-1]}")
420
-
421
- # Workers to use for downloading
422
- TQDM_MAX_WORKER = 0
423
-
424
- # Select audio workers from folder of frames stack prev call.
425
- try:
426
- VIDEO_WORKERS = int(config_manager.get_dict('SITE', config_site)['video_workers'])
427
- except:
428
- #VIDEO_WORKERS = os.cpu_count()
429
- VIDEO_WORKERS = DEFAULT_VIDEO_WORKERS
430
-
431
- try:
432
- AUDIO_WORKERS = int(config_manager.get_dict('SITE', config_site)['audio_workers'])
433
- except:
434
- #AUDIO_WORKERS = os.cpu_count()
435
- AUDIO_WORKERS = DEFAULT_AUDIO_WORKERS
436
-
437
- # Differnt workers for audio and video
438
- if "video" in str(type):
439
- TQDM_MAX_WORKER = VIDEO_WORKERS
440
-
441
- if "audio" in str(type):
442
- TQDM_MAX_WORKER = AUDIO_WORKERS
443
-
444
- #console.print(f"[cyan]Video workers[white]: [green]{VIDEO_WORKERS} [white]| [cyan]Audio workers[white]: [green]{AUDIO_WORKERS}")
445
-
446
- # Custom bar for mobile and pc
447
- if TQDM_USE_LARGE_BAR:
448
- bar_format = (
449
- f"{Colors.YELLOW}[HLS] {Colors.WHITE}({Colors.CYAN}{description}{Colors.WHITE}): "
450
- f"{Colors.RED}{{percentage:.2f}}% "
451
- f"{Colors.MAGENTA}{{bar}} "
452
- f"{Colors.WHITE}[ {Colors.YELLOW}{{n_fmt}}{Colors.WHITE} / {Colors.RED}{{total_fmt}} {Colors.WHITE}] "
453
- f"{Colors.YELLOW}{{elapsed}} {Colors.WHITE}< {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
454
- )
455
- else:
456
- bar_format = (
457
- f"{Colors.YELLOW}Proc{Colors.WHITE}: "
458
- f"{Colors.RED}{{percentage:.2f}}% "
459
- f"{Colors.WHITE}| "
460
- f"{Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
461
- )
462
-
463
- # Create progress bar
464
- progress_bar = tqdm(
465
- total=len(self.segments),
466
- unit='s',
467
- ascii='░▒█',
468
- bar_format=bar_format,
469
- mininterval=0.05
470
- )
471
-
472
- try:
473
-
474
- # Start writer thread
475
- writer_thread = threading.Thread(target=self.write_segments_to_file)
476
- writer_thread.daemon = True
477
- writer_thread.start()
478
-
479
- # Configure workers and delay
480
- max_workers = len(self.valid_proxy) if THERE_IS_PROXY_LIST else TQDM_MAX_WORKER
481
- delay = max(PROXY_START_MIN, min(PROXY_START_MAX, 1 / (len(self.valid_proxy) + 1))) if THERE_IS_PROXY_LIST else TQDM_DELAY_WORKER
482
-
483
- # Download segments with completion verification
484
- with ThreadPoolExecutor(max_workers=max_workers) as executor:
485
- futures = []
486
- for index, segment_url in enumerate(self.segments):
487
- # Check for interrupt before submitting each task
488
- if self.interrupt_flag.is_set():
489
- break
490
-
491
- time.sleep(delay)
492
- futures.append(executor.submit(self.make_requests_stream, segment_url, index, progress_bar))
493
-
494
- # Wait for futures with interrupt handling
495
- for future in as_completed(futures):
496
- if self.interrupt_flag.is_set():
497
- break
498
- try:
499
- future.result()
500
- except Exception as e:
501
- logging.error(f"Error in download thread: {str(e)}")
502
-
503
- # Interrupt handling for missing segments
504
- if not self.interrupt_flag.is_set():
505
- total_segments = len(self.segments)
506
- completed_segments = len(self.downloaded_segments)
507
-
508
- if completed_segments < total_segments:
509
- missing_segments = set(range(total_segments)) - self.downloaded_segments
510
- logging.warning(f"Missing segments: {sorted(missing_segments)}")
511
-
512
- # Retry missing segments with interrupt check
513
- for index in missing_segments:
514
- if self.interrupt_flag.is_set():
515
- break
516
-
517
- try:
518
- self.make_requests_stream(self.segments[index], index, progress_bar)
519
-
520
- except Exception as e:
521
- logging.error(f"Failed to retry segment {index}: {str(e)}")
522
-
523
- except Exception as e:
524
- logging.error(f"Download failed: {str(e)}")
525
- raise
526
-
527
- finally:
528
-
529
- # Clean up resources
530
- self.stop_event.set()
531
- writer_thread.join(timeout=30)
532
- progress_bar.close()
533
-
534
- # Check if download was interrupted
535
- if self.download_interrupted:
536
- console.log("[red] Download was manually stopped.")
537
-
538
- # Clean up
539
- self.stop_event.set()
540
- writer_thread.join(timeout=30)
541
- progress_bar.close()
542
-
543
- # Final verification
544
- try:
545
- final_completion = (len(self.downloaded_segments) / total_segments) * 100
546
- if final_completion < 99.9: # Less than 99.9% complete
547
- missing = set(range(total_segments)) - self.downloaded_segments
548
- raise Exception(f"Download incomplete ({final_completion:.1f}%). Missing segments: {sorted(missing)}")
549
-
550
- except:
551
- pass
552
-
553
- # Verify output file
554
- if not os.path.exists(self.tmp_file_path):
555
- raise Exception("Output file missing")
556
-
557
- file_size = os.path.getsize(self.tmp_file_path)
558
- if file_size == 0:
559
- raise Exception("Output file is empty")
560
-
561
- # Display additional info when there is missing stream file
562
- if self.info_nFailed > 0:
563
-
564
- # Get expected time
565
- ex_hours, ex_minutes, ex_seconds = format_duration(self.expected_real_time_s)
566
- ex_formatted_duration = f"[yellow]{int(ex_hours)}[red]h [yellow]{int(ex_minutes)}[red]m [yellow]{int(ex_seconds)}[red]s"
567
- console.print(f"[cyan]Max retry per URL[white]: [green]{self.info_maxRetry}[green] [white]| [cyan]Total retry done[white]: [green]{self.info_nRetry}[green] [white]| [cyan]Missing TS: [red]{self.info_nFailed} [white]| [cyan]Duration: {print_duration_table(self.tmp_file_path, None, True)} [white]| [cyan]Expected duation: {ex_formatted_duration} \n")
568
-
569
- if self.info_nRetry >= len(self.segments) * 0.3:
570
- console.print("[yellow]⚠ Warning:[/yellow] Too many retries detected! Consider reducing the number of [cyan]workers[/cyan] in the [magenta]config.json[/magenta] file. This will impact [bold]performance[/bold]. \n")
571
-
572
- # Info to return
573
- return {'type': type, 'nFailed': self.info_nFailed}
1
+ # 18.04.24
2
+
3
+ import os
4
+ import sys
5
+ import time
6
+ import queue
7
+ import signal
8
+ import logging
9
+ import binascii
10
+ import threading
11
+ from queue import PriorityQueue
12
+ from urllib.parse import urljoin, urlparse
13
+ from concurrent.futures import ThreadPoolExecutor, as_completed
14
+ from typing import Dict
15
+
16
+
17
+ # External libraries
18
+ import httpx
19
+ from tqdm import tqdm
20
+
21
+
22
+ # Internal utilities
23
+ from StreamingCommunity.Util.color import Colors
24
+ from StreamingCommunity.Util.console import console
25
+ from StreamingCommunity.Util.headers import get_headers, random_headers
26
+ from StreamingCommunity.Util._jsonConfig import config_manager
27
+ from StreamingCommunity.Util.os import os_manager
28
+
29
+
30
+ # Logic class
31
+ from ...M3U8 import (
32
+ M3U8_Decryption,
33
+ M3U8_Ts_Estimator,
34
+ M3U8_Parser,
35
+ M3U8_UrlFix
36
+ )
37
+ from .proxyes import main_test_proxy
38
+
39
+ # Config
40
+ TQDM_DELAY_WORKER = config_manager.get_float('M3U8_DOWNLOAD', 'tqdm_delay')
41
+ USE_LARGE_BAR = not ("android" in sys.platform or "ios" in sys.platform)
42
+ REQUEST_MAX_RETRY = config_manager.get_int('REQUESTS', 'max_retry')
43
+ REQUEST_VERIFY = False
44
+ THERE_IS_PROXY_LIST = os_manager.check_file("list_proxy.txt")
45
+ PROXY_START_MIN = config_manager.get_float('REQUESTS', 'proxy_start_min')
46
+ PROXY_START_MAX = config_manager.get_float('REQUESTS', 'proxy_start_max')
47
+ DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_workser')
48
+ DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workser')
49
+ MAX_TIMEOOUT = config_manager.get_int("REQUESTS", "timeout")
50
+
51
+
52
+
53
+ class M3U8_Segments:
54
+ def __init__(self, url: str, tmp_folder: str, is_index_url: bool = True):
55
+ """
56
+ Initializes the M3U8_Segments object.
57
+
58
+ Parameters:
59
+ - url (str): The URL of the M3U8 playlist.
60
+ - tmp_folder (str): The temporary folder to store downloaded segments.
61
+ - is_index_url (bool): Flag indicating if `m3u8_index` is a URL (default True).
62
+ """
63
+ self.url = url
64
+ self.tmp_folder = tmp_folder
65
+ self.is_index_url = is_index_url
66
+ self.expected_real_time = None
67
+ self.tmp_file_path = os.path.join(self.tmp_folder, "0.ts")
68
+ os.makedirs(self.tmp_folder, exist_ok=True)
69
+
70
+ # Util class
71
+ self.decryption: M3U8_Decryption = None
72
+ self.class_ts_estimator = M3U8_Ts_Estimator(0, self)
73
+ self.class_url_fixer = M3U8_UrlFix(url)
74
+
75
+ # Sync
76
+ self.queue = PriorityQueue()
77
+ self.stop_event = threading.Event()
78
+ self.downloaded_segments = set()
79
+ self.base_timeout = 0.5
80
+ self.current_timeout = 3.0
81
+
82
+ # Stopping
83
+ self.interrupt_flag = threading.Event()
84
+ self.download_interrupted = False
85
+
86
+ # OTHER INFO
87
+ self.info_maxRetry = 0
88
+ self.info_nRetry = 0
89
+ self.info_nFailed = 0
90
+
91
+ self.active_retries = 0
92
+ self.active_retries_lock = threading.Lock()
93
+
94
+ def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes:
95
+ key_uri = urljoin(self.url, m3u8_parser.keys.get('uri'))
96
+ parsed_url = urlparse(key_uri)
97
+ self.key_base_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
98
+
99
+ try:
100
+ client_params = {'headers': {'User-Agent': get_headers()}, 'timeout': MAX_TIMEOOUT}
101
+ response = httpx.get(url=key_uri, **client_params)
102
+ response.raise_for_status()
103
+
104
+ hex_content = binascii.hexlify(response.content).decode('utf-8')
105
+ return bytes.fromhex(hex_content)
106
+
107
+ except Exception as e:
108
+ raise Exception(f"Failed to fetch key: {e}")
109
+
110
+ def parse_data(self, m3u8_content: str) -> None:
111
+ m3u8_parser = M3U8_Parser()
112
+ m3u8_parser.parse_data(uri=self.url, raw_content=m3u8_content)
113
+
114
+ self.expected_real_time_s = m3u8_parser.duration
115
+
116
+ if m3u8_parser.keys:
117
+ key = self.__get_key__(m3u8_parser)
118
+ self.decryption = M3U8_Decryption(
119
+ key,
120
+ m3u8_parser.keys.get('iv'),
121
+ m3u8_parser.keys.get('method')
122
+ )
123
+
124
+ self.segments = [
125
+ self.class_url_fixer.generate_full_url(seg)
126
+ if "http" not in seg else seg
127
+ for seg in m3u8_parser.segments
128
+ ]
129
+ self.class_ts_estimator.total_segments = len(self.segments)
130
+
131
+ # Proxy
132
+ if THERE_IS_PROXY_LIST:
133
+ console.log("[red]Start validation proxy.")
134
+ self.valid_proxy = main_test_proxy(self.segments[0])
135
+ console.log(f"[cyan]N. Valid ip: [red]{len(self.valid_proxy)}")
136
+
137
+ if len(self.valid_proxy) == 0:
138
+ sys.exit(0)
139
+
140
+ def get_info(self) -> None:
141
+ if self.is_index_url:
142
+ try:
143
+ client_params = {'headers': {'User-Agent': get_headers()}, 'timeout': MAX_TIMEOOUT}
144
+ response = httpx.get(self.url, **client_params)
145
+ response.raise_for_status()
146
+
147
+ self.parse_data(response.text)
148
+ with open(os.path.join(self.tmp_folder, "playlist.m3u8"), "w") as f:
149
+ f.write(response.text)
150
+
151
+ except Exception as e:
152
+ raise RuntimeError(f"M3U8 info retrieval failed: {e}")
153
+
154
+ def setup_interrupt_handler(self):
155
+ """
156
+ Set up a signal handler for graceful interruption.
157
+ """
158
+ def interrupt_handler(signum, frame):
159
+ if not self.interrupt_flag.is_set():
160
+ console.log("\n[red] Stopping download gracefully...")
161
+ self.interrupt_flag.set()
162
+ self.download_interrupted = True
163
+ self.stop_event.set()
164
+
165
+ if threading.current_thread() is threading.main_thread():
166
+ signal.signal(signal.SIGINT, interrupt_handler)
167
+ else:
168
+ print("Signal handler must be set in the main thread")
169
+
170
+ def _get_http_client(self, index: int = None):
171
+ client_params = {
172
+ 'headers': random_headers(self.key_base_url) if hasattr(self, 'key_base_url') else {'User-Agent': get_headers()},
173
+ 'timeout': MAX_TIMEOOUT,
174
+ 'follow_redirects': True,
175
+ 'http2': False
176
+ }
177
+
178
+ if THERE_IS_PROXY_LIST and index is not None and hasattr(self, 'valid_proxy'):
179
+ client_params['proxies'] = self.valid_proxy[index % len(self.valid_proxy)]
180
+
181
+ return httpx.Client(**client_params)
182
+
183
+ def download_segment(self, ts_url: str, index: int, progress_bar: tqdm, backoff_factor: float = 1.1) -> None:
184
+ """
185
+ Downloads a TS segment and adds it to the segment queue with retry logic.
186
+
187
+ Parameters:
188
+ - ts_url (str): The URL of the TS segment.
189
+ - index (int): The index of the segment.
190
+ - progress_bar (tqdm): Progress counter for tracking download progress.
191
+ - backoff_factor (float): The backoff factor for exponential backoff (default is 1.5 seconds).
192
+ """
193
+ for attempt in range(REQUEST_MAX_RETRY):
194
+ if self.interrupt_flag.is_set():
195
+ return
196
+
197
+ try:
198
+ with self._get_http_client(index) as client:
199
+ start_time = time.time()
200
+ response = client.get(ts_url)
201
+
202
+ # Validate response and content
203
+ response.raise_for_status()
204
+ segment_content = response.content
205
+ content_size = len(segment_content)
206
+ duration = time.time() - start_time
207
+
208
+ # Decrypt if needed and verify decrypted content
209
+ if self.decryption is not None:
210
+ try:
211
+ segment_content = self.decryption.decrypt(segment_content)
212
+
213
+ except Exception as e:
214
+ logging.error(f"Decryption failed for segment {index}: {str(e)}")
215
+ self.interrupt_flag.set() # Interrupt the download process
216
+ self.stop_event.set() # Trigger the stopping event for all threads
217
+ break # Stop the current task immediately
218
+
219
+ self.class_ts_estimator.update_progress_bar(content_size, duration, progress_bar)
220
+ self.queue.put((index, segment_content))
221
+ self.downloaded_segments.add(index)
222
+ progress_bar.update(1)
223
+ return
224
+
225
+ except Exception as e:
226
+ logging.info(f"Attempt {attempt + 1} failed for segment {index} - '{ts_url}': {e}")
227
+
228
+ if attempt > self.info_maxRetry:
229
+ self.info_maxRetry = ( attempt + 1 )
230
+ self.info_nRetry += 1
231
+
232
+ if attempt + 1 == REQUEST_MAX_RETRY:
233
+ console.log(f"[red]Final retry failed for segment: {index}")
234
+ self.queue.put((index, None)) # Marker for failed segment
235
+ progress_bar.update(1)
236
+ self.info_nFailed += 1
237
+ return
238
+
239
+ with self.active_retries_lock:
240
+ self.active_retries += 1
241
+
242
+ sleep_time = backoff_factor * (2 ** attempt)
243
+ logging.info(f"Retrying segment {index} in {sleep_time} seconds...")
244
+ time.sleep(sleep_time)
245
+
246
+ with self.active_retries_lock:
247
+ self.active_retries -= 1
248
+
249
+ def write_segments_to_file(self):
250
+ """
251
+ Writes segments to file with additional verification.
252
+ """
253
+ buffer = {}
254
+ expected_index = 0
255
+
256
+ with open(self.tmp_file_path, 'wb') as f:
257
+ while not self.stop_event.is_set() or not self.queue.empty():
258
+ if self.interrupt_flag.is_set():
259
+ break
260
+
261
+ try:
262
+ index, segment_content = self.queue.get(timeout=self.current_timeout)
263
+
264
+ # Successful queue retrieval: reduce timeout
265
+ self.current_timeout = max(self.base_timeout, self.current_timeout / 2)
266
+
267
+ # Handle failed segments
268
+ if segment_content is None:
269
+ if index == expected_index:
270
+ expected_index += 1
271
+ continue
272
+
273
+ # Write segment if it's the next expected one
274
+ if index == expected_index:
275
+ f.write(segment_content)
276
+ f.flush()
277
+ expected_index += 1
278
+
279
+ # Write any buffered segments that are now in order
280
+ while expected_index in buffer:
281
+ next_segment = buffer.pop(expected_index)
282
+
283
+ if next_segment is not None:
284
+ f.write(next_segment)
285
+ f.flush()
286
+
287
+ expected_index += 1
288
+
289
+ else:
290
+ buffer[index] = segment_content
291
+
292
+ except queue.Empty:
293
+ self.current_timeout = min(MAX_TIMEOOUT, self.current_timeout * 1.1)
294
+ if self.stop_event.is_set():
295
+ break
296
+
297
+ except Exception as e:
298
+ logging.error(f"Error writing segment {index}: {str(e)}")
299
+
300
+ def download_streams(self, description: str, type: str):
301
+ """
302
+ Downloads all TS segments in parallel and writes them to a file.
303
+
304
+ Parameters:
305
+ - description: Description to insert on tqdm bar
306
+ - type (str): Type of download: 'video' or 'audio'
307
+ """
308
+ self.get_info()
309
+ self.setup_interrupt_handler()
310
+
311
+ progress_bar = tqdm(
312
+ total=len(self.segments),
313
+ unit='s',
314
+ ascii='░▒█',
315
+ bar_format=self._get_bar_format(description),
316
+ mininterval=0.05
317
+ )
318
+
319
+ try:
320
+ writer_thread = threading.Thread(target=self.write_segments_to_file)
321
+ writer_thread.daemon = True
322
+ writer_thread.start()
323
+
324
+ # Configure workers and delay
325
+ max_workers = self._get_worker_count(type)
326
+ delay = max(PROXY_START_MIN, min(PROXY_START_MAX, 1 / (len(self.valid_proxy) + 1))) if THERE_IS_PROXY_LIST else TQDM_DELAY_WORKER
327
+
328
+ # Download segments with completion verification
329
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
330
+ futures = []
331
+ for index, segment_url in enumerate(self.segments):
332
+
333
+ # Check for interrupt before submitting each task
334
+ if self.interrupt_flag.is_set():
335
+ break
336
+
337
+ time.sleep(delay)
338
+ futures.append(executor.submit(self.download_segment, segment_url, index, progress_bar))
339
+
340
+ # Wait for futures with interrupt handling
341
+ for future in as_completed(futures):
342
+ if self.interrupt_flag.is_set():
343
+ break
344
+ try:
345
+ future.result()
346
+ except Exception as e:
347
+ logging.error(f"Error in download thread: {str(e)}")
348
+
349
+ # Interrupt handling for missing segments
350
+ if not self.interrupt_flag.is_set():
351
+ total_segments = len(self.segments)
352
+ completed_segments = len(self.downloaded_segments)
353
+
354
+ if completed_segments < total_segments:
355
+ missing_segments = set(range(total_segments)) - self.downloaded_segments
356
+ logging.warning(f"Missing segments: {sorted(missing_segments)}")
357
+
358
+ # Retry missing segments with interrupt check
359
+ for index in missing_segments:
360
+ if self.interrupt_flag.is_set():
361
+ break
362
+
363
+ try:
364
+ self.download_segment(self.segments[index], index, progress_bar)
365
+
366
+ except Exception as e:
367
+ logging.error(f"Failed to retry segment {index}: {str(e)}")
368
+
369
+ finally:
370
+ self._cleanup_resources(writer_thread, progress_bar)
371
+
372
+ if not self.interrupt_flag.is_set():
373
+ self._verify_download_completion()
374
+
375
+ return self._generate_results(type)
376
+
377
+ def _get_bar_format(self, description: str) -> str:
378
+ """
379
+ Generate platform-appropriate progress bar format.
380
+ """
381
+ if not USE_LARGE_BAR:
382
+ return (
383
+ f"{Colors.YELLOW}Proc{Colors.WHITE}: "
384
+ f"{Colors.RED}{{percentage:.2f}}% "
385
+ f"{Colors.WHITE}| "
386
+ f"{Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
387
+ )
388
+
389
+ else:
390
+ return (
391
+ f"{Colors.YELLOW}[HLS] {Colors.WHITE}({Colors.CYAN}{description}{Colors.WHITE}): "
392
+ f"{Colors.RED}{{percentage:.2f}}% "
393
+ f"{Colors.MAGENTA}{{bar}} "
394
+ f"{Colors.WHITE}[ {Colors.YELLOW}{{n_fmt}}{Colors.WHITE} / {Colors.RED}{{total_fmt}} {Colors.WHITE}] "
395
+ f"{Colors.YELLOW}{{elapsed}} {Colors.WHITE}< {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
396
+ )
397
+
398
+ def _get_worker_count(self, stream_type: str) -> int:
399
+ """
400
+ Calculate optimal parallel workers based on stream type and infrastructure.
401
+ """
402
+ base_workers = {
403
+ 'video': DEFAULT_VIDEO_WORKERS,
404
+ 'audio': DEFAULT_AUDIO_WORKERS
405
+ }.get(stream_type.lower(), 1)
406
+
407
+ if THERE_IS_PROXY_LIST:
408
+ return min(len(self.valid_proxy), base_workers * 2)
409
+ return base_workers
410
+
411
+ def _generate_results(self, stream_type: str) -> Dict:
412
+ """Package final download results."""
413
+ return {
414
+ 'type': stream_type,
415
+ 'nFailed': self.info_nFailed,
416
+ 'stopped': self.download_interrupted
417
+ }
418
+
419
+ def _verify_download_completion(self) -> None:
420
+ """Validate final download integrity."""
421
+ total = len(self.segments)
422
+ if len(self.downloaded_segments) / total < 0.999:
423
+ missing = sorted(set(range(total)) - self.downloaded_segments)
424
+ raise RuntimeError(f"Download incomplete ({len(self.downloaded_segments)/total:.1%}). Missing segments: {missing}")
425
+
426
+ def _cleanup_resources(self, writer_thread: threading.Thread, progress_bar: tqdm) -> None:
427
+ """Ensure resource cleanup and final reporting."""
428
+ self.stop_event.set()
429
+ writer_thread.join(timeout=30)
430
+ progress_bar.close()
431
+
432
+ if self.download_interrupted:
433
+ console.print("\n[red]Download terminated by user")
434
+
435
+ if self.info_nFailed > 0:
436
+ self._display_error_summary()
437
+
438
+ def _display_error_summary(self) -> None:
439
+ """Generate final error report."""
440
+ console.print(f"\n[cyan]Retry Summary: "
441
+ f"[white]Max retries: [green]{self.info_maxRetry} "
442
+ f"[white]Total retries: [green]{self.info_nRetry} "
443
+ f"[white]Failed segments: [red]{self.info_nFailed}")
444
+
445
+ if self.info_nRetry > len(self.segments) * 0.3:
446
+ console.print("[yellow]Warning: High retry count detected. Consider reducing worker count in config.")