StreamingCommunity 1.9.5__py3-none-any.whl → 1.9.90__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of StreamingCommunity might be problematic. Click here for more details.

Files changed (93) hide show
  1. StreamingCommunity/Api/Player/Helper/Vixcloud/js_parser.py +143 -0
  2. StreamingCommunity/Api/Player/Helper/Vixcloud/util.py +145 -0
  3. StreamingCommunity/Api/Player/ddl.py +89 -0
  4. StreamingCommunity/Api/Player/maxstream.py +151 -0
  5. StreamingCommunity/Api/Player/supervideo.py +194 -0
  6. StreamingCommunity/Api/Player/vixcloud.py +273 -0
  7. StreamingCommunity/Api/Site/1337xx/__init__.py +51 -0
  8. StreamingCommunity/Api/Site/1337xx/costant.py +15 -0
  9. StreamingCommunity/Api/Site/1337xx/site.py +86 -0
  10. StreamingCommunity/Api/Site/1337xx/title.py +66 -0
  11. StreamingCommunity/Api/Site/altadefinizione/__init__.py +51 -0
  12. StreamingCommunity/Api/Site/altadefinizione/costant.py +15 -0
  13. StreamingCommunity/Api/Site/altadefinizione/film.py +74 -0
  14. StreamingCommunity/Api/Site/altadefinizione/site.py +89 -0
  15. StreamingCommunity/Api/Site/animeunity/__init__.py +51 -0
  16. StreamingCommunity/Api/Site/animeunity/costant.py +15 -0
  17. StreamingCommunity/Api/Site/animeunity/film_serie.py +135 -0
  18. StreamingCommunity/Api/Site/animeunity/site.py +167 -0
  19. StreamingCommunity/Api/Site/animeunity/util/ScrapeSerie.py +97 -0
  20. StreamingCommunity/Api/Site/cb01new/__init__.py +52 -0
  21. StreamingCommunity/Api/Site/cb01new/costant.py +15 -0
  22. StreamingCommunity/Api/Site/cb01new/film.py +73 -0
  23. StreamingCommunity/Api/Site/cb01new/site.py +76 -0
  24. StreamingCommunity/Api/Site/ddlstreamitaly/__init__.py +58 -0
  25. StreamingCommunity/Api/Site/ddlstreamitaly/costant.py +16 -0
  26. StreamingCommunity/Api/Site/ddlstreamitaly/series.py +146 -0
  27. StreamingCommunity/Api/Site/ddlstreamitaly/site.py +95 -0
  28. StreamingCommunity/Api/Site/ddlstreamitaly/util/ScrapeSerie.py +85 -0
  29. StreamingCommunity/Api/Site/guardaserie/__init__.py +53 -0
  30. StreamingCommunity/Api/Site/guardaserie/costant.py +15 -0
  31. StreamingCommunity/Api/Site/guardaserie/series.py +199 -0
  32. StreamingCommunity/Api/Site/guardaserie/site.py +86 -0
  33. StreamingCommunity/Api/Site/guardaserie/util/ScrapeSerie.py +110 -0
  34. StreamingCommunity/Api/Site/ilcorsaronero/__init__.py +52 -0
  35. StreamingCommunity/Api/Site/ilcorsaronero/costant.py +15 -0
  36. StreamingCommunity/Api/Site/ilcorsaronero/site.py +63 -0
  37. StreamingCommunity/Api/Site/ilcorsaronero/title.py +46 -0
  38. StreamingCommunity/Api/Site/ilcorsaronero/util/ilCorsarScraper.py +141 -0
  39. StreamingCommunity/Api/Site/mostraguarda/__init__.py +49 -0
  40. StreamingCommunity/Api/Site/mostraguarda/costant.py +15 -0
  41. StreamingCommunity/Api/Site/mostraguarda/film.py +99 -0
  42. StreamingCommunity/Api/Site/streamingcommunity/__init__.py +56 -0
  43. StreamingCommunity/Api/Site/streamingcommunity/costant.py +15 -0
  44. StreamingCommunity/Api/Site/streamingcommunity/film.py +75 -0
  45. StreamingCommunity/Api/Site/streamingcommunity/series.py +206 -0
  46. StreamingCommunity/Api/Site/streamingcommunity/site.py +137 -0
  47. StreamingCommunity/Api/Site/streamingcommunity/util/ScrapeSerie.py +123 -0
  48. StreamingCommunity/Api/Template/Class/SearchType.py +101 -0
  49. StreamingCommunity/Api/Template/Util/__init__.py +5 -0
  50. StreamingCommunity/Api/Template/Util/get_domain.py +173 -0
  51. StreamingCommunity/Api/Template/Util/manage_ep.py +179 -0
  52. StreamingCommunity/Api/Template/Util/recall_search.py +37 -0
  53. StreamingCommunity/Api/Template/__init__.py +3 -0
  54. StreamingCommunity/Api/Template/site.py +87 -0
  55. StreamingCommunity/Lib/Downloader/HLS/downloader.py +946 -0
  56. StreamingCommunity/Lib/Downloader/HLS/proxyes.py +110 -0
  57. StreamingCommunity/Lib/Downloader/HLS/segments.py +561 -0
  58. StreamingCommunity/Lib/Downloader/MP4/downloader.py +155 -0
  59. StreamingCommunity/Lib/Downloader/TOR/downloader.py +296 -0
  60. StreamingCommunity/Lib/Downloader/__init__.py +5 -0
  61. StreamingCommunity/Lib/FFmpeg/__init__.py +4 -0
  62. StreamingCommunity/Lib/FFmpeg/capture.py +170 -0
  63. StreamingCommunity/Lib/FFmpeg/command.py +296 -0
  64. StreamingCommunity/Lib/FFmpeg/util.py +249 -0
  65. StreamingCommunity/Lib/M3U8/__init__.py +6 -0
  66. StreamingCommunity/Lib/M3U8/decryptor.py +164 -0
  67. StreamingCommunity/Lib/M3U8/estimator.py +176 -0
  68. StreamingCommunity/Lib/M3U8/parser.py +666 -0
  69. StreamingCommunity/Lib/M3U8/url_fixer.py +52 -0
  70. StreamingCommunity/Lib/TMBD/__init__.py +2 -0
  71. StreamingCommunity/Lib/TMBD/obj_tmbd.py +39 -0
  72. StreamingCommunity/Lib/TMBD/tmdb.py +346 -0
  73. StreamingCommunity/Upload/update.py +68 -0
  74. StreamingCommunity/Upload/version.py +5 -0
  75. StreamingCommunity/Util/_jsonConfig.py +204 -0
  76. StreamingCommunity/Util/call_stack.py +42 -0
  77. StreamingCommunity/Util/color.py +20 -0
  78. StreamingCommunity/Util/console.py +12 -0
  79. StreamingCommunity/Util/ffmpeg_installer.py +311 -0
  80. StreamingCommunity/Util/headers.py +147 -0
  81. StreamingCommunity/Util/logger.py +53 -0
  82. StreamingCommunity/Util/message.py +64 -0
  83. StreamingCommunity/Util/os.py +554 -0
  84. StreamingCommunity/Util/table.py +229 -0
  85. StreamingCommunity/__init__.py +0 -0
  86. StreamingCommunity/run.py +2 -11
  87. {StreamingCommunity-1.9.5.dist-info → StreamingCommunity-1.9.90.dist-info}/METADATA +10 -27
  88. StreamingCommunity-1.9.90.dist-info/RECORD +92 -0
  89. {StreamingCommunity-1.9.5.dist-info → StreamingCommunity-1.9.90.dist-info}/WHEEL +1 -1
  90. {StreamingCommunity-1.9.5.dist-info → StreamingCommunity-1.9.90.dist-info}/entry_points.txt +0 -1
  91. StreamingCommunity-1.9.5.dist-info/RECORD +0 -7
  92. {StreamingCommunity-1.9.5.dist-info → StreamingCommunity-1.9.90.dist-info}/LICENSE +0 -0
  93. {StreamingCommunity-1.9.5.dist-info → StreamingCommunity-1.9.90.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,110 @@
1
+ # 09.06.24
2
+
3
+ import os
4
+ import sys
5
+ import logging
6
+ from concurrent.futures import ThreadPoolExecutor
7
+
8
+
9
+ # External libraries
10
+ import httpx
11
+
12
+
13
+ # Internal utilities
14
+ from StreamingCommunity.Util._jsonConfig import config_manager
15
+ from StreamingCommunity.Util.headers import get_headers
16
+ from StreamingCommunity.Util.os import os_manager
17
+
18
+
19
+ class ProxyManager:
20
+ def __init__(self, proxy_list=None, url=None):
21
+ """
22
+ Initialize ProxyManager with a list of proxies and timeout.
23
+
24
+ Parameters:
25
+ - proxy_list: List of proxy strings
26
+ - timeout: Timeout for proxy requests
27
+ """
28
+ self.proxy_list = proxy_list or []
29
+ self.verified_proxies = []
30
+ self.timeout = config_manager.get_float('REQUESTS', 'timeout')
31
+ self.url = url
32
+
33
+ def _check_proxy(self, proxy):
34
+ """
35
+ Check if a single proxy is working by making a request to Google.
36
+
37
+ Parameters:
38
+ - proxy: Proxy string to be checked
39
+
40
+ Returns:
41
+ - Proxy string if working, None otherwise
42
+ """
43
+ protocol = proxy.split(":")[0].lower()
44
+ protocol = f'{protocol}://'
45
+ proxy = {protocol: proxy, "https://": proxy}
46
+
47
+ try:
48
+ with httpx.Client(proxies=proxy, verify=False) as client:
49
+ response = client.get(self.url, timeout=self.timeout, headers={'user-agent': get_headers()})
50
+
51
+ if response.status_code == 200:
52
+ logging.info(f"Proxy {proxy} is working.")
53
+ return proxy
54
+
55
+ except Exception as e:
56
+ logging.error(f"Test proxy {proxy} failed: {e}")
57
+ return None
58
+
59
+ def verify_proxies(self):
60
+ """
61
+ Verify all proxies in the list and store the working ones.
62
+ """
63
+ logging.info("Starting proxy verification...")
64
+ with ThreadPoolExecutor(max_workers=os.cpu_count()) as executor:
65
+ self.verified_proxies = list(executor.map(self._check_proxy, self.proxy_list))
66
+
67
+ self.verified_proxies = [proxy for proxy in self.verified_proxies if proxy]
68
+ logging.info(f"Verification complete. {len(self.verified_proxies)} proxies are working.")
69
+
70
+ def get_verified_proxies(self):
71
+ """
72
+ Get validate proxies.
73
+ """
74
+
75
+ if len(self.verified_proxies) > 0:
76
+ return self.verified_proxies
77
+
78
+ else:
79
+ logging.error("Cant find valid proxy.")
80
+ sys.exit(0)
81
+
82
+
83
+ def main_test_proxy(url_test):
84
+
85
+ path_file_proxt_list = "list_proxy.txt"
86
+
87
+ if os_manager.check_file(path_file_proxt_list):
88
+
89
+ # Read file
90
+ with open(path_file_proxt_list, 'r') as file:
91
+ ip_addresses = file.readlines()
92
+
93
+ # Formatt ip
94
+ ip_addresses = [ip.strip() for ip in ip_addresses]
95
+ formatted_ips = [f"http://{ip}" for ip in ip_addresses]
96
+
97
+ # Get list of proxy from config.json
98
+ proxy_list = formatted_ips
99
+
100
+ # Verify proxy
101
+ manager = ProxyManager(proxy_list, url_test)
102
+ manager.verify_proxies()
103
+
104
+ # Write valid ip in txt file
105
+ with open(path_file_proxt_list, 'w') as file:
106
+ for ip in ip_addresses:
107
+ file.write(f"{ip}\n")
108
+
109
+ # Return valid proxy
110
+ return manager.get_verified_proxies()
@@ -0,0 +1,561 @@
1
+ # 18.04.24
2
+
3
+ import os
4
+ import sys
5
+ import time
6
+ import queue
7
+ import signal
8
+ import logging
9
+ import binascii
10
+ import threading
11
+
12
+ from queue import PriorityQueue
13
+ from urllib.parse import urljoin, urlparse
14
+ from concurrent.futures import ThreadPoolExecutor, as_completed
15
+
16
+
17
+ # External libraries
18
+ import httpx
19
+ from tqdm import tqdm
20
+
21
+
22
+ # Internal utilities
23
+ from StreamingCommunity.Util.console import console
24
+ from StreamingCommunity.Util.headers import get_headers, random_headers
25
+ from StreamingCommunity.Util.color import Colors
26
+ from StreamingCommunity.Util._jsonConfig import config_manager
27
+ from StreamingCommunity.Util.os import os_manager
28
+ from StreamingCommunity.Util.call_stack import get_call_stack
29
+
30
+
31
+ # Logic class
32
+ from ...M3U8 import (
33
+ M3U8_Decryption,
34
+ M3U8_Ts_Estimator,
35
+ M3U8_Parser,
36
+ M3U8_UrlFix
37
+ )
38
+ from ...FFmpeg.util import print_duration_table, format_duration
39
+ from .proxyes import main_test_proxy
40
+
41
+ # Config
42
+ TQDM_DELAY_WORKER = config_manager.get_float('M3U8_DOWNLOAD', 'tqdm_delay')
43
+ TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar')
44
+
45
+ REQUEST_MAX_RETRY = config_manager.get_int('REQUESTS', 'max_retry')
46
+ REQUEST_VERIFY = False
47
+
48
+ THERE_IS_PROXY_LIST = os_manager.check_file("list_proxy.txt")
49
+ PROXY_START_MIN = config_manager.get_float('REQUESTS', 'proxy_start_min')
50
+ PROXY_START_MAX = config_manager.get_float('REQUESTS', 'proxy_start_max')
51
+
52
+ DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_workser')
53
+ DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workser')
54
+
55
+
56
+
57
+ # Variable
58
+ max_timeout = config_manager.get_int("REQUESTS", "timeout")
59
+
60
+
61
+
62
+ class M3U8_Segments:
63
+ def __init__(self, url: str, tmp_folder: str, is_index_url: bool = True):
64
+ """
65
+ Initializes the M3U8_Segments object.
66
+
67
+ Parameters:
68
+ - url (str): The URL of the M3U8 playlist.
69
+ - tmp_folder (str): The temporary folder to store downloaded segments.
70
+ - is_index_url (bool): Flag indicating if `m3u8_index` is a URL (default True).
71
+ """
72
+ self.url = url
73
+ self.tmp_folder = tmp_folder
74
+ self.is_index_url = is_index_url
75
+ self.expected_real_time = None
76
+ self.max_timeout = max_timeout
77
+
78
+ self.tmp_file_path = os.path.join(self.tmp_folder, "0.ts")
79
+ os.makedirs(self.tmp_folder, exist_ok=True)
80
+
81
+ # Util class
82
+ self.decryption: M3U8_Decryption = None
83
+ self.class_ts_estimator = M3U8_Ts_Estimator(0)
84
+ self.class_url_fixer = M3U8_UrlFix(url)
85
+
86
+ # Sync
87
+ self.queue = PriorityQueue()
88
+ self.stop_event = threading.Event()
89
+ self.downloaded_segments = set()
90
+ self.base_timeout = 1.0
91
+ self.current_timeout = 5.0
92
+
93
+ # Stopping
94
+ self.interrupt_flag = threading.Event()
95
+ self.download_interrupted = False
96
+
97
+ # OTHER INFO
98
+ self.info_maxRetry = 0
99
+ self.info_nRetry = 0
100
+ self.info_nFailed = 0
101
+
102
+ def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes:
103
+ """
104
+ Retrieves the encryption key from the M3U8 playlist.
105
+
106
+ Parameters:
107
+ - m3u8_parser (M3U8_Parser): The parser object containing M3U8 playlist information.
108
+
109
+ Returns:
110
+ bytes: The encryption key in bytes.
111
+ """
112
+
113
+ # Construct the full URL of the key
114
+ key_uri = urljoin(self.url, m3u8_parser.keys.get('uri'))
115
+ parsed_url = urlparse(key_uri)
116
+ self.key_base_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
117
+ logging.info(f"Uri key: {key_uri}")
118
+
119
+ # Make request to get porxy
120
+ try:
121
+ response = httpx.get(
122
+ url=key_uri,
123
+ headers={'User-Agent': get_headers()},
124
+ timeout=max_timeout
125
+ )
126
+ response.raise_for_status()
127
+
128
+ except Exception as e:
129
+ raise Exception(f"Failed to fetch key from {key_uri}: {e}")
130
+
131
+ # Convert the content of the response to hexadecimal and then to bytes
132
+ hex_content = binascii.hexlify(response.content).decode('utf-8')
133
+ byte_content = bytes.fromhex(hex_content)
134
+
135
+ #console.print(f"[cyan]Find key: [red]{hex_content}")
136
+ return byte_content
137
+
138
+ def parse_data(self, m3u8_content: str) -> None:
139
+ """
140
+ Parses the M3U8 content to extract segment information.
141
+
142
+ Parameters:
143
+ - m3u8_content (str): The content of the M3U8 file.
144
+ """
145
+ m3u8_parser = M3U8_Parser()
146
+ m3u8_parser.parse_data(uri=self.url, raw_content=m3u8_content)
147
+
148
+ self.expected_real_time = m3u8_parser.get_duration(return_string=False)
149
+ self.expected_real_time_s = m3u8_parser.duration
150
+
151
+ # Check if there is an encryption key in the playlis
152
+ if m3u8_parser.keys is not None:
153
+ try:
154
+
155
+ # Extract byte from the key
156
+ key = self.__get_key__(m3u8_parser)
157
+
158
+ except Exception as e:
159
+ raise Exception(f"Failed to retrieve encryption key {e}.")
160
+
161
+ iv = m3u8_parser.keys.get('iv')
162
+ method = m3u8_parser.keys.get('method')
163
+
164
+ # Create a decryption object with the key and set the method
165
+ self.decryption = M3U8_Decryption(key, iv, method)
166
+
167
+ # Store the segment information parsed from the playlist
168
+ self.segments = m3u8_parser.segments
169
+
170
+ # Fix URL if it is incomplete (missing 'http')
171
+ for i in range(len(self.segments)):
172
+ segment_url = self.segments[i]
173
+
174
+ if "http" not in segment_url:
175
+ self.segments[i] = self.class_url_fixer.generate_full_url(segment_url)
176
+ logging.info(f"Generated new URL: {self.segments[i]}, from: {segment_url}")
177
+
178
+ # Update segments for estimator
179
+ self.class_ts_estimator.total_segments = len(self.segments)
180
+ logging.info(f"Segmnets to download: [{len(self.segments)}]")
181
+
182
+ # Proxy
183
+ if THERE_IS_PROXY_LIST:
184
+ console.log("[red]Start validation proxy.")
185
+ self.valid_proxy = main_test_proxy(self.segments[0])
186
+ console.log(f"[cyan]N. Valid ip: [red]{len(self.valid_proxy)}")
187
+
188
+ if len(self.valid_proxy) == 0:
189
+ sys.exit(0)
190
+
191
+ def get_info(self) -> None:
192
+ """
193
+ Makes a request to the index M3U8 file to get information about segments.
194
+ """
195
+ if self.is_index_url:
196
+
197
+ # Send a GET request to retrieve the index M3U8 file
198
+ response = httpx.get(
199
+ self.url,
200
+ headers={'User-Agent': get_headers()},
201
+ timeout=max_timeout
202
+ )
203
+ response.raise_for_status()
204
+
205
+ # Save the M3U8 file to the temporary folder
206
+ path_m3u8_file = os.path.join(self.tmp_folder, "playlist.m3u8")
207
+ open(path_m3u8_file, "w+").write(response.text)
208
+
209
+ # Parse the text from the M3U8 index file
210
+ self.parse_data(response.text)
211
+
212
+ else:
213
+
214
+ # Parser data of content of index pass in input to class
215
+ self.parse_data(self.url)
216
+
217
+ def setup_interrupt_handler(self):
218
+ """
219
+ Set up a signal handler for graceful interruption.
220
+ """
221
+ def interrupt_handler(signum, frame):
222
+ if not self.interrupt_flag.is_set():
223
+ console.log("\n[red] Stopping download gracefully...")
224
+ self.interrupt_flag.set()
225
+ self.download_interrupted = True
226
+ self.stop_event.set()
227
+
228
+ if threading.current_thread() is threading.main_thread():
229
+ signal.signal(signal.SIGINT, interrupt_handler)
230
+ else:
231
+ print("Signal handler must be set in the main thread")
232
+
233
+ def make_requests_stream(self, ts_url: str, index: int, progress_bar: tqdm, backoff_factor: float = 1.5) -> None:
234
+ """
235
+ Downloads a TS segment and adds it to the segment queue with retry logic.
236
+
237
+ Parameters:
238
+ - ts_url (str): The URL of the TS segment.
239
+ - index (int): The index of the segment.
240
+ - progress_bar (tqdm): Progress counter for tracking download progress.
241
+ - retries (int): The number of times to retry on failure (default is 3).
242
+ - backoff_factor (float): The backoff factor for exponential backoff (default is 1.5 seconds).
243
+ """
244
+ for attempt in range(REQUEST_MAX_RETRY):
245
+ if self.interrupt_flag.is_set():
246
+ return
247
+
248
+ try:
249
+ start_time = time.time()
250
+
251
+ # Make request to get content
252
+ if THERE_IS_PROXY_LIST:
253
+
254
+ # Get proxy from list
255
+ proxy = self.valid_proxy[index % len(self.valid_proxy)]
256
+ logging.info(f"Use proxy: {proxy}")
257
+
258
+ with httpx.Client(proxies=proxy, verify=REQUEST_VERIFY) as client:
259
+ if 'key_base_url' in self.__dict__:
260
+ response = client.get(
261
+ url=ts_url,
262
+ headers=random_headers(self.key_base_url),
263
+ timeout=max_timeout,
264
+ follow_redirects=True
265
+ )
266
+
267
+ else:
268
+ response = client.get(
269
+ url=ts_url,
270
+ headers={'User-Agent': get_headers()},
271
+ timeout=max_timeout,
272
+ follow_redirects=True
273
+ )
274
+
275
+ else:
276
+ with httpx.Client(verify=REQUEST_VERIFY) as client_2:
277
+ if 'key_base_url' in self.__dict__:
278
+ response = client_2.get(
279
+ url=ts_url,
280
+ headers=random_headers(self.key_base_url),
281
+ timeout=max_timeout,
282
+ follow_redirects=True
283
+ )
284
+
285
+ else:
286
+ response = client_2.get(
287
+ url=ts_url,
288
+ headers={'User-Agent': get_headers()},
289
+ timeout=max_timeout,
290
+ follow_redirects=True
291
+ )
292
+
293
+ # Validate response and content
294
+ response.raise_for_status()
295
+ segment_content = response.content
296
+ content_size = len(segment_content)
297
+ duration = time.time() - start_time
298
+
299
+ # Decrypt if needed and verify decrypted content
300
+ if self.decryption is not None:
301
+ try:
302
+ segment_content = self.decryption.decrypt(segment_content)
303
+
304
+ except Exception as e:
305
+ logging.error(f"Decryption failed for segment {index}: {str(e)}")
306
+ raise
307
+
308
+ # Update progress and queue
309
+ self.class_ts_estimator.update_progress_bar(content_size, duration, progress_bar)
310
+
311
+ # Add the segment to the queue
312
+ self.queue.put((index, segment_content))
313
+
314
+ # Track successfully downloaded segments
315
+ self.downloaded_segments.add(index)
316
+ progress_bar.update(1)
317
+
318
+ # Break out of the loop on success
319
+ return
320
+
321
+ except Exception as e:
322
+ logging.info(f"Attempt {attempt + 1} failed for segment {index} - '{ts_url}': {e}")
323
+
324
+ # Update stat variable class
325
+ if attempt > self.info_maxRetry:
326
+ self.info_maxRetry = ( attempt + 1 )
327
+ self.info_nRetry += 1
328
+
329
+ if attempt + 1 == REQUEST_MAX_RETRY:
330
+ console.log(f"[red]Final retry failed for segment: {index}")
331
+ self.queue.put((index, None)) # Marker for failed segment
332
+ progress_bar.update(1)
333
+ self.info_nFailed += 1
334
+
335
+ #break
336
+
337
+ sleep_time = backoff_factor * (2 ** attempt)
338
+ logging.info(f"Retrying segment {index} in {sleep_time} seconds...")
339
+ time.sleep(sleep_time)
340
+
341
+ def write_segments_to_file(self):
342
+ """
343
+ Writes segments to file with additional verification.
344
+ """
345
+ buffer = {}
346
+ expected_index = 0
347
+ segments_written = set()
348
+
349
+ with open(self.tmp_file_path, 'wb') as f:
350
+ while not self.stop_event.is_set() or not self.queue.empty():
351
+ if self.interrupt_flag.is_set():
352
+ break
353
+
354
+ try:
355
+ index, segment_content = self.queue.get(timeout=self.current_timeout)
356
+
357
+ # Successful queue retrieval: reduce timeout
358
+ self.current_timeout = max(self.base_timeout, self.current_timeout / 2)
359
+
360
+ # Handle failed segments
361
+ if segment_content is None:
362
+ if index == expected_index:
363
+ expected_index += 1
364
+ continue
365
+
366
+ # Write segment if it's the next expected one
367
+ if index == expected_index:
368
+ f.write(segment_content)
369
+ segments_written.add(index)
370
+ f.flush()
371
+ expected_index += 1
372
+
373
+ # Write any buffered segments that are now in order
374
+ while expected_index in buffer:
375
+ next_segment = buffer.pop(expected_index)
376
+
377
+ if next_segment is not None:
378
+ f.write(next_segment)
379
+ segments_written.add(expected_index)
380
+ f.flush()
381
+
382
+ expected_index += 1
383
+
384
+ else:
385
+ buffer[index] = segment_content
386
+
387
+ except queue.Empty:
388
+ self.current_timeout = min(self.max_timeout, self.current_timeout * 1.5)
389
+
390
+ if self.stop_event.is_set():
391
+ break
392
+
393
+ except Exception as e:
394
+ logging.error(f"Error writing segment {index}: {str(e)}")
395
+
396
+ def download_streams(self, description: str, type: str):
397
+ """
398
+ Downloads all TS segments in parallel and writes them to a file.
399
+
400
+ Parameters:
401
+ - description: Description to insert on tqdm bar
402
+ - type (str): Type of download: 'video' or 'audio'
403
+ """
404
+ self.setup_interrupt_handler()
405
+
406
+ # Get config site from prev stack
407
+ frames = get_call_stack()
408
+ logging.info(f"Extract info from: {frames}")
409
+ config_site = str(frames[-4]['folder_base'])
410
+ logging.info(f"Use frame: {frames[-1]}")
411
+
412
+ # Workers to use for downloading
413
+ TQDM_MAX_WORKER = 0
414
+
415
+ # Select audio workers from folder of frames stack prev call.
416
+ try:
417
+ VIDEO_WORKERS = int(config_manager.get_dict('SITE', config_site)['video_workers'])
418
+ except:
419
+ #VIDEO_WORKERS = os.cpu_count()
420
+ VIDEO_WORKERS = DEFAULT_VIDEO_WORKERS
421
+
422
+ try:
423
+ AUDIO_WORKERS = int(config_manager.get_dict('SITE', config_site)['audio_workers'])
424
+ except:
425
+ #AUDIO_WORKERS = os.cpu_count()
426
+ AUDIO_WORKERS = DEFAULT_AUDIO_WORKERS
427
+
428
+ # Differnt workers for audio and video
429
+ if "video" in str(type):
430
+ TQDM_MAX_WORKER = VIDEO_WORKERS
431
+
432
+ if "audio" in str(type):
433
+ TQDM_MAX_WORKER = AUDIO_WORKERS
434
+
435
+ console.print(f"[cyan]Video workers[white]: [green]{VIDEO_WORKERS} [white]| [cyan]Audio workers[white]: [green]{AUDIO_WORKERS}")
436
+
437
+ # Custom bar for mobile and pc
438
+ if TQDM_USE_LARGE_BAR:
439
+ bar_format = (
440
+ f"{Colors.YELLOW}[HLS] {Colors.WHITE}({Colors.CYAN}{description}{Colors.WHITE}): "
441
+ f"{Colors.RED}{{percentage:.2f}}% "
442
+ f"{Colors.MAGENTA}{{bar}} "
443
+ f"{Colors.WHITE}[ {Colors.YELLOW}{{n_fmt}}{Colors.WHITE} / {Colors.RED}{{total_fmt}} {Colors.WHITE}] "
444
+ f"{Colors.YELLOW}{{elapsed}} {Colors.WHITE}< {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
445
+ )
446
+ else:
447
+ bar_format = (
448
+ f"{Colors.YELLOW}Proc{Colors.WHITE}: "
449
+ f"{Colors.RED}{{percentage:.2f}}% "
450
+ f"{Colors.WHITE}| "
451
+ f"{Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
452
+ )
453
+
454
+ # Create progress bar
455
+ progress_bar = tqdm(
456
+ total=len(self.segments),
457
+ unit='s',
458
+ ascii='░▒█',
459
+ bar_format=bar_format,
460
+ mininterval=0.05
461
+ )
462
+
463
+ try:
464
+
465
+ # Start writer thread
466
+ writer_thread = threading.Thread(target=self.write_segments_to_file)
467
+ writer_thread.daemon = True
468
+ writer_thread.start()
469
+
470
+ # Configure workers and delay
471
+ max_workers = len(self.valid_proxy) if THERE_IS_PROXY_LIST else TQDM_MAX_WORKER
472
+ delay = max(PROXY_START_MIN, min(PROXY_START_MAX, 1 / (len(self.valid_proxy) + 1))) if THERE_IS_PROXY_LIST else TQDM_DELAY_WORKER
473
+
474
+ # Download segments with completion verification
475
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
476
+ futures = []
477
+ for index, segment_url in enumerate(self.segments):
478
+ # Check for interrupt before submitting each task
479
+ if self.interrupt_flag.is_set():
480
+ break
481
+
482
+ time.sleep(delay)
483
+ futures.append(executor.submit(self.make_requests_stream, segment_url, index, progress_bar))
484
+
485
+ # Wait for futures with interrupt handling
486
+ for future in as_completed(futures):
487
+ if self.interrupt_flag.is_set():
488
+ break
489
+ try:
490
+ future.result()
491
+ except Exception as e:
492
+ logging.error(f"Error in download thread: {str(e)}")
493
+
494
+ # Interrupt handling for missing segments
495
+ if not self.interrupt_flag.is_set():
496
+ total_segments = len(self.segments)
497
+ completed_segments = len(self.downloaded_segments)
498
+
499
+ if completed_segments < total_segments:
500
+ missing_segments = set(range(total_segments)) - self.downloaded_segments
501
+ logging.warning(f"Missing segments: {sorted(missing_segments)}")
502
+
503
+ # Retry missing segments with interrupt check
504
+ for index in missing_segments:
505
+ if self.interrupt_flag.is_set():
506
+ break
507
+
508
+ try:
509
+ self.make_requests_stream(self.segments[index], index, progress_bar)
510
+
511
+ except Exception as e:
512
+ logging.error(f"Failed to retry segment {index}: {str(e)}")
513
+
514
+ except Exception as e:
515
+ logging.error(f"Download failed: {str(e)}")
516
+ raise
517
+
518
+ finally:
519
+
520
+ # Clean up resources
521
+ self.stop_event.set()
522
+ writer_thread.join(timeout=30)
523
+ progress_bar.close()
524
+
525
+ # Check if download was interrupted
526
+ if self.download_interrupted:
527
+ console.log("[red] Download was manually stopped.")
528
+
529
+ # Clean up
530
+ self.stop_event.set()
531
+ writer_thread.join(timeout=30)
532
+ progress_bar.close()
533
+
534
+ # Final verification
535
+ final_completion = (len(self.downloaded_segments) / total_segments) * 100
536
+ if final_completion < 99.9: # Less than 99.9% complete
537
+ missing = set(range(total_segments)) - self.downloaded_segments
538
+ raise Exception(f"Download incomplete ({final_completion:.1f}%). Missing segments: {sorted(missing)}")
539
+
540
+ # Verify output file
541
+ if not os.path.exists(self.tmp_file_path):
542
+ raise Exception("Output file missing")
543
+
544
+ file_size = os.path.getsize(self.tmp_file_path)
545
+ if file_size == 0:
546
+ raise Exception("Output file is empty")
547
+
548
+ # Get expected time
549
+ ex_hours, ex_minutes, ex_seconds = format_duration(self.expected_real_time_s)
550
+ ex_formatted_duration = f"[yellow]{int(ex_hours)}[red]h [yellow]{int(ex_minutes)}[red]m [yellow]{int(ex_seconds)}[red]s"
551
+ console.print(f"[cyan]Max retry per URL[white]: [green]{self.info_maxRetry}[green] [white]| [cyan]Total retry done[white]: [green]{self.info_nRetry}[green] [white]| [cyan]Missing TS: [red]{self.info_nFailed} [white]| [cyan]Duration: {print_duration_table(self.tmp_file_path, None, True)} [white]| [cyan]Expected duation: {ex_formatted_duration} \n")
552
+
553
+ if self.info_nRetry >= len(self.segments) * (1/3.33):
554
+ console.print(
555
+ "[yellow]⚠ Warning:[/yellow] Too many retries detected! "
556
+ "Consider reducing the number of [cyan]workers[/cyan] in the [magenta]config.json[/magenta] file. "
557
+ "This will impact [bold]performance[/bold]. \n"
558
+ )
559
+
560
+ # Info to return
561
+ return {'type': type, 'nFailed': self.info_nFailed}