StreamingCommunity 2.3.0__py3-none-any.whl → 2.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of StreamingCommunity might be problematic. Click here for more details.

Files changed (92) hide show
  1. StreamingCommunity/run.py +61 -7
  2. {StreamingCommunity-2.3.0.dist-info → StreamingCommunity-2.5.0.dist-info}/METADATA +88 -18
  3. StreamingCommunity-2.5.0.dist-info/RECORD +8 -0
  4. StreamingCommunity/Api/Player/Helper/Vixcloud/js_parser.py +0 -143
  5. StreamingCommunity/Api/Player/Helper/Vixcloud/util.py +0 -136
  6. StreamingCommunity/Api/Player/ddl.py +0 -89
  7. StreamingCommunity/Api/Player/maxstream.py +0 -151
  8. StreamingCommunity/Api/Player/supervideo.py +0 -194
  9. StreamingCommunity/Api/Player/vixcloud.py +0 -273
  10. StreamingCommunity/Api/Site/1337xx/__init__.py +0 -51
  11. StreamingCommunity/Api/Site/1337xx/costant.py +0 -15
  12. StreamingCommunity/Api/Site/1337xx/site.py +0 -89
  13. StreamingCommunity/Api/Site/1337xx/title.py +0 -66
  14. StreamingCommunity/Api/Site/altadefinizione/__init__.py +0 -51
  15. StreamingCommunity/Api/Site/altadefinizione/costant.py +0 -19
  16. StreamingCommunity/Api/Site/altadefinizione/film.py +0 -74
  17. StreamingCommunity/Api/Site/altadefinizione/site.py +0 -95
  18. StreamingCommunity/Api/Site/animeunity/__init__.py +0 -51
  19. StreamingCommunity/Api/Site/animeunity/costant.py +0 -19
  20. StreamingCommunity/Api/Site/animeunity/film_serie.py +0 -135
  21. StreamingCommunity/Api/Site/animeunity/site.py +0 -175
  22. StreamingCommunity/Api/Site/animeunity/util/ScrapeSerie.py +0 -97
  23. StreamingCommunity/Api/Site/cb01new/__init__.py +0 -52
  24. StreamingCommunity/Api/Site/cb01new/costant.py +0 -19
  25. StreamingCommunity/Api/Site/cb01new/film.py +0 -73
  26. StreamingCommunity/Api/Site/cb01new/site.py +0 -83
  27. StreamingCommunity/Api/Site/ddlstreamitaly/__init__.py +0 -56
  28. StreamingCommunity/Api/Site/ddlstreamitaly/costant.py +0 -20
  29. StreamingCommunity/Api/Site/ddlstreamitaly/series.py +0 -146
  30. StreamingCommunity/Api/Site/ddlstreamitaly/site.py +0 -99
  31. StreamingCommunity/Api/Site/ddlstreamitaly/util/ScrapeSerie.py +0 -85
  32. StreamingCommunity/Api/Site/guardaserie/__init__.py +0 -51
  33. StreamingCommunity/Api/Site/guardaserie/costant.py +0 -19
  34. StreamingCommunity/Api/Site/guardaserie/series.py +0 -198
  35. StreamingCommunity/Api/Site/guardaserie/site.py +0 -90
  36. StreamingCommunity/Api/Site/guardaserie/util/ScrapeSerie.py +0 -110
  37. StreamingCommunity/Api/Site/ilcorsaronero/__init__.py +0 -52
  38. StreamingCommunity/Api/Site/ilcorsaronero/costant.py +0 -19
  39. StreamingCommunity/Api/Site/ilcorsaronero/site.py +0 -72
  40. StreamingCommunity/Api/Site/ilcorsaronero/title.py +0 -46
  41. StreamingCommunity/Api/Site/ilcorsaronero/util/ilCorsarScraper.py +0 -149
  42. StreamingCommunity/Api/Site/mostraguarda/__init__.py +0 -49
  43. StreamingCommunity/Api/Site/mostraguarda/costant.py +0 -19
  44. StreamingCommunity/Api/Site/mostraguarda/film.py +0 -101
  45. StreamingCommunity/Api/Site/streamingcommunity/__init__.py +0 -56
  46. StreamingCommunity/Api/Site/streamingcommunity/costant.py +0 -19
  47. StreamingCommunity/Api/Site/streamingcommunity/film.py +0 -75
  48. StreamingCommunity/Api/Site/streamingcommunity/series.py +0 -206
  49. StreamingCommunity/Api/Site/streamingcommunity/site.py +0 -139
  50. StreamingCommunity/Api/Site/streamingcommunity/util/ScrapeSerie.py +0 -123
  51. StreamingCommunity/Api/Template/Class/SearchType.py +0 -101
  52. StreamingCommunity/Api/Template/Util/__init__.py +0 -5
  53. StreamingCommunity/Api/Template/Util/get_domain.py +0 -137
  54. StreamingCommunity/Api/Template/Util/manage_ep.py +0 -179
  55. StreamingCommunity/Api/Template/Util/recall_search.py +0 -37
  56. StreamingCommunity/Api/Template/__init__.py +0 -3
  57. StreamingCommunity/Api/Template/site.py +0 -87
  58. StreamingCommunity/Lib/Downloader/HLS/downloader.py +0 -955
  59. StreamingCommunity/Lib/Downloader/HLS/proxyes.py +0 -110
  60. StreamingCommunity/Lib/Downloader/HLS/segments.py +0 -564
  61. StreamingCommunity/Lib/Downloader/MP4/downloader.py +0 -155
  62. StreamingCommunity/Lib/Downloader/TOR/downloader.py +0 -296
  63. StreamingCommunity/Lib/Downloader/__init__.py +0 -5
  64. StreamingCommunity/Lib/FFmpeg/__init__.py +0 -4
  65. StreamingCommunity/Lib/FFmpeg/capture.py +0 -170
  66. StreamingCommunity/Lib/FFmpeg/command.py +0 -296
  67. StreamingCommunity/Lib/FFmpeg/util.py +0 -249
  68. StreamingCommunity/Lib/M3U8/__init__.py +0 -6
  69. StreamingCommunity/Lib/M3U8/decryptor.py +0 -164
  70. StreamingCommunity/Lib/M3U8/estimator.py +0 -229
  71. StreamingCommunity/Lib/M3U8/parser.py +0 -666
  72. StreamingCommunity/Lib/M3U8/url_fixer.py +0 -52
  73. StreamingCommunity/Lib/TMBD/__init__.py +0 -2
  74. StreamingCommunity/Lib/TMBD/obj_tmbd.py +0 -39
  75. StreamingCommunity/Lib/TMBD/tmdb.py +0 -346
  76. StreamingCommunity/Upload/update.py +0 -67
  77. StreamingCommunity/Upload/version.py +0 -5
  78. StreamingCommunity/Util/_jsonConfig.py +0 -204
  79. StreamingCommunity/Util/call_stack.py +0 -42
  80. StreamingCommunity/Util/color.py +0 -20
  81. StreamingCommunity/Util/console.py +0 -12
  82. StreamingCommunity/Util/ffmpeg_installer.py +0 -351
  83. StreamingCommunity/Util/headers.py +0 -147
  84. StreamingCommunity/Util/logger.py +0 -53
  85. StreamingCommunity/Util/message.py +0 -64
  86. StreamingCommunity/Util/os.py +0 -545
  87. StreamingCommunity/Util/table.py +0 -229
  88. StreamingCommunity-2.3.0.dist-info/RECORD +0 -92
  89. {StreamingCommunity-2.3.0.dist-info → StreamingCommunity-2.5.0.dist-info}/LICENSE +0 -0
  90. {StreamingCommunity-2.3.0.dist-info → StreamingCommunity-2.5.0.dist-info}/WHEEL +0 -0
  91. {StreamingCommunity-2.3.0.dist-info → StreamingCommunity-2.5.0.dist-info}/entry_points.txt +0 -0
  92. {StreamingCommunity-2.3.0.dist-info → StreamingCommunity-2.5.0.dist-info}/top_level.txt +0 -0
@@ -1,110 +0,0 @@
1
- # 09.06.24
2
-
3
- import os
4
- import sys
5
- import logging
6
- from concurrent.futures import ThreadPoolExecutor
7
-
8
-
9
- # External libraries
10
- import httpx
11
-
12
-
13
- # Internal utilities
14
- from StreamingCommunity.Util._jsonConfig import config_manager
15
- from StreamingCommunity.Util.headers import get_headers
16
- from StreamingCommunity.Util.os import os_manager
17
-
18
-
19
- class ProxyManager:
20
- def __init__(self, proxy_list=None, url=None):
21
- """
22
- Initialize ProxyManager with a list of proxies and timeout.
23
-
24
- Parameters:
25
- - proxy_list: List of proxy strings
26
- - timeout: Timeout for proxy requests
27
- """
28
- self.proxy_list = proxy_list or []
29
- self.verified_proxies = []
30
- self.timeout = config_manager.get_float('REQUESTS', 'timeout')
31
- self.url = url
32
-
33
- def _check_proxy(self, proxy):
34
- """
35
- Check if a single proxy is working by making a request to Google.
36
-
37
- Parameters:
38
- - proxy: Proxy string to be checked
39
-
40
- Returns:
41
- - Proxy string if working, None otherwise
42
- """
43
- protocol = proxy.split(":")[0].lower()
44
- protocol = f'{protocol}://'
45
- proxy = {protocol: proxy, "https://": proxy}
46
-
47
- try:
48
- with httpx.Client(proxies=proxy, verify=False) as client:
49
- response = client.get(self.url, timeout=self.timeout, headers={'user-agent': get_headers()})
50
-
51
- if response.status_code == 200:
52
- logging.info(f"Proxy {proxy} is working.")
53
- return proxy
54
-
55
- except Exception as e:
56
- logging.error(f"Test proxy {proxy} failed: {e}")
57
- return None
58
-
59
- def verify_proxies(self):
60
- """
61
- Verify all proxies in the list and store the working ones.
62
- """
63
- logging.info("Starting proxy verification...")
64
- with ThreadPoolExecutor(max_workers=os.cpu_count()) as executor:
65
- self.verified_proxies = list(executor.map(self._check_proxy, self.proxy_list))
66
-
67
- self.verified_proxies = [proxy for proxy in self.verified_proxies if proxy]
68
- logging.info(f"Verification complete. {len(self.verified_proxies)} proxies are working.")
69
-
70
- def get_verified_proxies(self):
71
- """
72
- Get validate proxies.
73
- """
74
-
75
- if len(self.verified_proxies) > 0:
76
- return self.verified_proxies
77
-
78
- else:
79
- logging.error("Cant find valid proxy.")
80
- sys.exit(0)
81
-
82
-
83
- def main_test_proxy(url_test):
84
-
85
- path_file_proxt_list = "list_proxy.txt"
86
-
87
- if os_manager.check_file(path_file_proxt_list):
88
-
89
- # Read file
90
- with open(path_file_proxt_list, 'r') as file:
91
- ip_addresses = file.readlines()
92
-
93
- # Formatt ip
94
- ip_addresses = [ip.strip() for ip in ip_addresses]
95
- formatted_ips = [f"http://{ip}" for ip in ip_addresses]
96
-
97
- # Get list of proxy from config.json
98
- proxy_list = formatted_ips
99
-
100
- # Verify proxy
101
- manager = ProxyManager(proxy_list, url_test)
102
- manager.verify_proxies()
103
-
104
- # Write valid ip in txt file
105
- with open(path_file_proxt_list, 'w') as file:
106
- for ip in ip_addresses:
107
- file.write(f"{ip}\n")
108
-
109
- # Return valid proxy
110
- return manager.get_verified_proxies()
@@ -1,564 +0,0 @@
1
- # 18.04.24
2
-
3
- import os
4
- import sys
5
- import time
6
- import queue
7
- import signal
8
- import logging
9
- import binascii
10
- import threading
11
-
12
- from queue import PriorityQueue
13
- from urllib.parse import urljoin, urlparse
14
- from concurrent.futures import ThreadPoolExecutor, as_completed
15
-
16
-
17
- # External libraries
18
- import httpx
19
- from tqdm import tqdm
20
-
21
-
22
- # Internal utilities
23
- from StreamingCommunity.Util.console import console
24
- from StreamingCommunity.Util.headers import get_headers, random_headers
25
- from StreamingCommunity.Util.color import Colors
26
- from StreamingCommunity.Util._jsonConfig import config_manager
27
- from StreamingCommunity.Util.os import os_manager
28
- from StreamingCommunity.Util.call_stack import get_call_stack
29
-
30
-
31
- # Logic class
32
- from ...M3U8 import (
33
- M3U8_Decryption,
34
- M3U8_Ts_Estimator,
35
- M3U8_Parser,
36
- M3U8_UrlFix
37
- )
38
- from ...FFmpeg.util import print_duration_table, format_duration
39
- from .proxyes import main_test_proxy
40
-
41
- # Config
42
- TQDM_DELAY_WORKER = config_manager.get_float('M3U8_DOWNLOAD', 'tqdm_delay')
43
- TQDM_USE_LARGE_BAR = config_manager.get_int('M3U8_DOWNLOAD', 'tqdm_use_large_bar')
44
-
45
- REQUEST_MAX_RETRY = config_manager.get_int('REQUESTS', 'max_retry')
46
- REQUEST_VERIFY = False
47
-
48
- THERE_IS_PROXY_LIST = os_manager.check_file("list_proxy.txt")
49
- PROXY_START_MIN = config_manager.get_float('REQUESTS', 'proxy_start_min')
50
- PROXY_START_MAX = config_manager.get_float('REQUESTS', 'proxy_start_max')
51
-
52
- DEFAULT_VIDEO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_video_workser')
53
- DEFAULT_AUDIO_WORKERS = config_manager.get_int('M3U8_DOWNLOAD', 'default_audio_workser')
54
-
55
-
56
-
57
- # Variable
58
- max_timeout = config_manager.get_int("REQUESTS", "timeout")
59
-
60
-
61
-
62
- class M3U8_Segments:
63
- def __init__(self, url: str, tmp_folder: str, is_index_url: bool = True):
64
- """
65
- Initializes the M3U8_Segments object.
66
-
67
- Parameters:
68
- - url (str): The URL of the M3U8 playlist.
69
- - tmp_folder (str): The temporary folder to store downloaded segments.
70
- - is_index_url (bool): Flag indicating if `m3u8_index` is a URL (default True).
71
- """
72
- self.url = url
73
- self.tmp_folder = tmp_folder
74
- self.is_index_url = is_index_url
75
- self.expected_real_time = None
76
- self.max_timeout = max_timeout
77
-
78
- self.tmp_file_path = os.path.join(self.tmp_folder, "0.ts")
79
- os.makedirs(self.tmp_folder, exist_ok=True)
80
-
81
- # Util class
82
- self.decryption: M3U8_Decryption = None
83
- self.class_ts_estimator = M3U8_Ts_Estimator(0)
84
- self.class_url_fixer = M3U8_UrlFix(url)
85
-
86
- # Sync
87
- self.queue = PriorityQueue()
88
- self.stop_event = threading.Event()
89
- self.downloaded_segments = set()
90
- self.base_timeout = 1.0
91
- self.current_timeout = 5.0
92
-
93
- # Stopping
94
- self.interrupt_flag = threading.Event()
95
- self.download_interrupted = False
96
-
97
- # OTHER INFO
98
- self.info_maxRetry = 0
99
- self.info_nRetry = 0
100
- self.info_nFailed = 0
101
-
102
- def __get_key__(self, m3u8_parser: M3U8_Parser) -> bytes:
103
- """
104
- Retrieves the encryption key from the M3U8 playlist.
105
-
106
- Parameters:
107
- - m3u8_parser (M3U8_Parser): The parser object containing M3U8 playlist information.
108
-
109
- Returns:
110
- bytes: The encryption key in bytes.
111
- """
112
-
113
- # Construct the full URL of the key
114
- key_uri = urljoin(self.url, m3u8_parser.keys.get('uri'))
115
- parsed_url = urlparse(key_uri)
116
- self.key_base_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
117
- logging.info(f"Uri key: {key_uri}")
118
-
119
- # Make request to get porxy
120
- try:
121
- response = httpx.get(
122
- url=key_uri,
123
- headers={'User-Agent': get_headers()},
124
- timeout=max_timeout
125
- )
126
- response.raise_for_status()
127
-
128
- except Exception as e:
129
- raise Exception(f"Failed to fetch key from {key_uri}: {e}")
130
-
131
- # Convert the content of the response to hexadecimal and then to bytes
132
- hex_content = binascii.hexlify(response.content).decode('utf-8')
133
- byte_content = bytes.fromhex(hex_content)
134
-
135
- #console.print(f"[cyan]Find key: [red]{hex_content}")
136
- return byte_content
137
-
138
- def parse_data(self, m3u8_content: str) -> None:
139
- """
140
- Parses the M3U8 content to extract segment information.
141
-
142
- Parameters:
143
- - m3u8_content (str): The content of the M3U8 file.
144
- """
145
- m3u8_parser = M3U8_Parser()
146
- m3u8_parser.parse_data(uri=self.url, raw_content=m3u8_content)
147
-
148
- self.expected_real_time = m3u8_parser.get_duration(return_string=False)
149
- self.expected_real_time_s = m3u8_parser.duration
150
-
151
- # Check if there is an encryption key in the playlis
152
- if m3u8_parser.keys is not None:
153
- try:
154
-
155
- # Extract byte from the key
156
- key = self.__get_key__(m3u8_parser)
157
-
158
- except Exception as e:
159
- raise Exception(f"Failed to retrieve encryption key {e}.")
160
-
161
- iv = m3u8_parser.keys.get('iv')
162
- method = m3u8_parser.keys.get('method')
163
-
164
- # Create a decryption object with the key and set the method
165
- self.decryption = M3U8_Decryption(key, iv, method)
166
-
167
- # Store the segment information parsed from the playlist
168
- self.segments = m3u8_parser.segments
169
-
170
- # Fix URL if it is incomplete (missing 'http')
171
- for i in range(len(self.segments)):
172
- segment_url = self.segments[i]
173
-
174
- if "http" not in segment_url:
175
- self.segments[i] = self.class_url_fixer.generate_full_url(segment_url)
176
- logging.info(f"Generated new URL: {self.segments[i]}, from: {segment_url}")
177
-
178
- # Update segments for estimator
179
- self.class_ts_estimator.total_segments = len(self.segments)
180
- logging.info(f"Segmnets to download: [{len(self.segments)}]")
181
-
182
- # Proxy
183
- if THERE_IS_PROXY_LIST:
184
- console.log("[red]Start validation proxy.")
185
- self.valid_proxy = main_test_proxy(self.segments[0])
186
- console.log(f"[cyan]N. Valid ip: [red]{len(self.valid_proxy)}")
187
-
188
- if len(self.valid_proxy) == 0:
189
- sys.exit(0)
190
-
191
- def get_info(self) -> None:
192
- """
193
- Makes a request to the index M3U8 file to get information about segments.
194
- """
195
- if self.is_index_url:
196
-
197
- try:
198
-
199
- # Send a GET request to retrieve the index M3U8 file
200
- response = httpx.get(
201
- self.url,
202
- headers={'User-Agent': get_headers()},
203
- timeout=max_timeout,
204
- follow_redirects=True
205
- )
206
- response.raise_for_status()
207
-
208
- # Save the M3U8 file to the temporary folder
209
- path_m3u8_file = os.path.join(self.tmp_folder, "playlist.m3u8")
210
- open(path_m3u8_file, "w+").write(response.text)
211
-
212
- # Parse the text from the M3U8 index file
213
- self.parse_data(response.text)
214
-
215
- except Exception as e:
216
- print(f"Error during M3U8 index request: {e}")
217
-
218
- else:
219
- # Parser data of content of index pass in input to class
220
- self.parse_data(self.url)
221
-
222
- def setup_interrupt_handler(self):
223
- """
224
- Set up a signal handler for graceful interruption.
225
- """
226
- def interrupt_handler(signum, frame):
227
- if not self.interrupt_flag.is_set():
228
- console.log("\n[red] Stopping download gracefully...")
229
- self.interrupt_flag.set()
230
- self.download_interrupted = True
231
- self.stop_event.set()
232
-
233
- if threading.current_thread() is threading.main_thread():
234
- signal.signal(signal.SIGINT, interrupt_handler)
235
- else:
236
- print("Signal handler must be set in the main thread")
237
-
238
- def make_requests_stream(self, ts_url: str, index: int, progress_bar: tqdm, backoff_factor: float = 1.5) -> None:
239
- """
240
- Downloads a TS segment and adds it to the segment queue with retry logic.
241
-
242
- Parameters:
243
- - ts_url (str): The URL of the TS segment.
244
- - index (int): The index of the segment.
245
- - progress_bar (tqdm): Progress counter for tracking download progress.
246
- - retries (int): The number of times to retry on failure (default is 3).
247
- - backoff_factor (float): The backoff factor for exponential backoff (default is 1.5 seconds).
248
- """
249
- for attempt in range(REQUEST_MAX_RETRY):
250
- if self.interrupt_flag.is_set():
251
- return
252
-
253
- try:
254
- start_time = time.time()
255
-
256
- # Make request to get content
257
- if THERE_IS_PROXY_LIST:
258
-
259
- # Get proxy from list
260
- proxy = self.valid_proxy[index % len(self.valid_proxy)]
261
- logging.info(f"Use proxy: {proxy}")
262
-
263
- with httpx.Client(proxies=proxy, verify=REQUEST_VERIFY) as client:
264
- if 'key_base_url' in self.__dict__:
265
- response = client.get(
266
- url=ts_url,
267
- headers=random_headers(self.key_base_url),
268
- timeout=max_timeout,
269
- follow_redirects=True
270
- )
271
-
272
- else:
273
- response = client.get(
274
- url=ts_url,
275
- headers={'User-Agent': get_headers()},
276
- timeout=max_timeout,
277
- follow_redirects=True
278
- )
279
-
280
- else:
281
- with httpx.Client(verify=REQUEST_VERIFY) as client_2:
282
- if 'key_base_url' in self.__dict__:
283
- response = client_2.get(
284
- url=ts_url,
285
- headers=random_headers(self.key_base_url),
286
- timeout=max_timeout,
287
- follow_redirects=True
288
- )
289
-
290
- else:
291
- response = client_2.get(
292
- url=ts_url,
293
- headers={'User-Agent': get_headers()},
294
- timeout=max_timeout,
295
- follow_redirects=True
296
- )
297
-
298
- # Validate response and content
299
- response.raise_for_status()
300
- segment_content = response.content
301
- content_size = len(segment_content)
302
- duration = time.time() - start_time
303
-
304
- # Decrypt if needed and verify decrypted content
305
- if self.decryption is not None:
306
- try:
307
- segment_content = self.decryption.decrypt(segment_content)
308
-
309
- except Exception as e:
310
- logging.error(f"Decryption failed for segment {index}: {str(e)}")
311
- raise
312
-
313
- # Update progress and queue
314
- self.class_ts_estimator.update_progress_bar(content_size, duration, progress_bar)
315
-
316
- # Add the segment to the queue
317
- self.queue.put((index, segment_content))
318
-
319
- # Track successfully downloaded segments
320
- self.downloaded_segments.add(index)
321
- progress_bar.update(1)
322
-
323
- # Break out of the loop on success
324
- return
325
-
326
- except Exception as e:
327
- logging.info(f"Attempt {attempt + 1} failed for segment {index} - '{ts_url}': {e}")
328
-
329
- # Update stat variable class
330
- if attempt > self.info_maxRetry:
331
- self.info_maxRetry = ( attempt + 1 )
332
- self.info_nRetry += 1
333
-
334
- if attempt + 1 == REQUEST_MAX_RETRY:
335
- console.log(f"[red]Final retry failed for segment: {index}")
336
- self.queue.put((index, None)) # Marker for failed segment
337
- progress_bar.update(1)
338
- self.info_nFailed += 1
339
-
340
- #break
341
-
342
- sleep_time = backoff_factor * (2 ** attempt)
343
- logging.info(f"Retrying segment {index} in {sleep_time} seconds...")
344
- time.sleep(sleep_time)
345
-
346
- def write_segments_to_file(self):
347
- """
348
- Writes segments to file with additional verification.
349
- """
350
- buffer = {}
351
- expected_index = 0
352
- segments_written = set()
353
-
354
- with open(self.tmp_file_path, 'wb') as f:
355
- while not self.stop_event.is_set() or not self.queue.empty():
356
- if self.interrupt_flag.is_set():
357
- break
358
-
359
- try:
360
- index, segment_content = self.queue.get(timeout=self.current_timeout)
361
-
362
- # Successful queue retrieval: reduce timeout
363
- self.current_timeout = max(self.base_timeout, self.current_timeout / 2)
364
-
365
- # Handle failed segments
366
- if segment_content is None:
367
- if index == expected_index:
368
- expected_index += 1
369
- continue
370
-
371
- # Write segment if it's the next expected one
372
- if index == expected_index:
373
- f.write(segment_content)
374
- segments_written.add(index)
375
- f.flush()
376
- expected_index += 1
377
-
378
- # Write any buffered segments that are now in order
379
- while expected_index in buffer:
380
- next_segment = buffer.pop(expected_index)
381
-
382
- if next_segment is not None:
383
- f.write(next_segment)
384
- segments_written.add(expected_index)
385
- f.flush()
386
-
387
- expected_index += 1
388
-
389
- else:
390
- buffer[index] = segment_content
391
-
392
- except queue.Empty:
393
- self.current_timeout = min(self.max_timeout, self.current_timeout * 1.25)
394
-
395
- if self.stop_event.is_set():
396
- break
397
-
398
- except Exception as e:
399
- logging.error(f"Error writing segment {index}: {str(e)}")
400
-
401
- def download_streams(self, description: str, type: str):
402
- """
403
- Downloads all TS segments in parallel and writes them to a file.
404
-
405
- Parameters:
406
- - description: Description to insert on tqdm bar
407
- - type (str): Type of download: 'video' or 'audio'
408
- """
409
- self.setup_interrupt_handler()
410
-
411
- # Get config site from prev stack
412
- frames = get_call_stack()
413
- logging.info(f"Extract info from: {frames}")
414
- config_site = str(frames[-4]['folder_base'])
415
- logging.info(f"Use frame: {frames[-1]}")
416
-
417
- # Workers to use for downloading
418
- TQDM_MAX_WORKER = 0
419
-
420
- # Select audio workers from folder of frames stack prev call.
421
- try:
422
- VIDEO_WORKERS = int(config_manager.get_dict('SITE', config_site)['video_workers'])
423
- except:
424
- #VIDEO_WORKERS = os.cpu_count()
425
- VIDEO_WORKERS = DEFAULT_VIDEO_WORKERS
426
-
427
- try:
428
- AUDIO_WORKERS = int(config_manager.get_dict('SITE', config_site)['audio_workers'])
429
- except:
430
- #AUDIO_WORKERS = os.cpu_count()
431
- AUDIO_WORKERS = DEFAULT_AUDIO_WORKERS
432
-
433
- # Differnt workers for audio and video
434
- if "video" in str(type):
435
- TQDM_MAX_WORKER = VIDEO_WORKERS
436
-
437
- if "audio" in str(type):
438
- TQDM_MAX_WORKER = AUDIO_WORKERS
439
-
440
- #console.print(f"[cyan]Video workers[white]: [green]{VIDEO_WORKERS} [white]| [cyan]Audio workers[white]: [green]{AUDIO_WORKERS}")
441
-
442
- # Custom bar for mobile and pc
443
- if TQDM_USE_LARGE_BAR:
444
- bar_format = (
445
- f"{Colors.YELLOW}[HLS] {Colors.WHITE}({Colors.CYAN}{description}{Colors.WHITE}): "
446
- f"{Colors.RED}{{percentage:.2f}}% "
447
- f"{Colors.MAGENTA}{{bar}} "
448
- f"{Colors.WHITE}[ {Colors.YELLOW}{{n_fmt}}{Colors.WHITE} / {Colors.RED}{{total_fmt}} {Colors.WHITE}] "
449
- f"{Colors.YELLOW}{{elapsed}} {Colors.WHITE}< {Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
450
- )
451
- else:
452
- bar_format = (
453
- f"{Colors.YELLOW}Proc{Colors.WHITE}: "
454
- f"{Colors.RED}{{percentage:.2f}}% "
455
- f"{Colors.WHITE}| "
456
- f"{Colors.CYAN}{{remaining}}{{postfix}} {Colors.WHITE}]"
457
- )
458
-
459
- # Create progress bar
460
- progress_bar = tqdm(
461
- total=len(self.segments),
462
- unit='s',
463
- ascii='░▒█',
464
- bar_format=bar_format,
465
- mininterval=0.05
466
- )
467
-
468
- try:
469
-
470
- # Start writer thread
471
- writer_thread = threading.Thread(target=self.write_segments_to_file)
472
- writer_thread.daemon = True
473
- writer_thread.start()
474
-
475
- # Configure workers and delay
476
- max_workers = len(self.valid_proxy) if THERE_IS_PROXY_LIST else TQDM_MAX_WORKER
477
- delay = max(PROXY_START_MIN, min(PROXY_START_MAX, 1 / (len(self.valid_proxy) + 1))) if THERE_IS_PROXY_LIST else TQDM_DELAY_WORKER
478
-
479
- # Download segments with completion verification
480
- with ThreadPoolExecutor(max_workers=max_workers) as executor:
481
- futures = []
482
- for index, segment_url in enumerate(self.segments):
483
- # Check for interrupt before submitting each task
484
- if self.interrupt_flag.is_set():
485
- break
486
-
487
- time.sleep(delay)
488
- futures.append(executor.submit(self.make_requests_stream, segment_url, index, progress_bar))
489
-
490
- # Wait for futures with interrupt handling
491
- for future in as_completed(futures):
492
- if self.interrupt_flag.is_set():
493
- break
494
- try:
495
- future.result()
496
- except Exception as e:
497
- logging.error(f"Error in download thread: {str(e)}")
498
-
499
- # Interrupt handling for missing segments
500
- if not self.interrupt_flag.is_set():
501
- total_segments = len(self.segments)
502
- completed_segments = len(self.downloaded_segments)
503
-
504
- if completed_segments < total_segments:
505
- missing_segments = set(range(total_segments)) - self.downloaded_segments
506
- logging.warning(f"Missing segments: {sorted(missing_segments)}")
507
-
508
- # Retry missing segments with interrupt check
509
- for index in missing_segments:
510
- if self.interrupt_flag.is_set():
511
- break
512
-
513
- try:
514
- self.make_requests_stream(self.segments[index], index, progress_bar)
515
-
516
- except Exception as e:
517
- logging.error(f"Failed to retry segment {index}: {str(e)}")
518
-
519
- except Exception as e:
520
- logging.error(f"Download failed: {str(e)}")
521
- raise
522
-
523
- finally:
524
-
525
- # Clean up resources
526
- self.stop_event.set()
527
- writer_thread.join(timeout=30)
528
- progress_bar.close()
529
-
530
- # Check if download was interrupted
531
- if self.download_interrupted:
532
- console.log("[red] Download was manually stopped.")
533
-
534
- # Clean up
535
- self.stop_event.set()
536
- writer_thread.join(timeout=30)
537
- progress_bar.close()
538
-
539
- # Final verification
540
- final_completion = (len(self.downloaded_segments) / total_segments) * 100
541
- if final_completion < 99.9: # Less than 99.9% complete
542
- missing = set(range(total_segments)) - self.downloaded_segments
543
- raise Exception(f"Download incomplete ({final_completion:.1f}%). Missing segments: {sorted(missing)}")
544
-
545
- # Verify output file
546
- if not os.path.exists(self.tmp_file_path):
547
- raise Exception("Output file missing")
548
-
549
- file_size = os.path.getsize(self.tmp_file_path)
550
- if file_size == 0:
551
- raise Exception("Output file is empty")
552
-
553
- # Display additional
554
- if self.info_nRetry >= len(self.segments) * 0.3:
555
-
556
- # Get expected time
557
- ex_hours, ex_minutes, ex_seconds = format_duration(self.expected_real_time_s)
558
- ex_formatted_duration = f"[yellow]{int(ex_hours)}[red]h [yellow]{int(ex_minutes)}[red]m [yellow]{int(ex_seconds)}[red]s"
559
- console.print(f"[cyan]Max retry per URL[white]: [green]{self.info_maxRetry}[green] [white]| [cyan]Total retry done[white]: [green]{self.info_nRetry}[green] [white]| [cyan]Missing TS: [red]{self.info_nFailed} [white]| [cyan]Duration: {print_duration_table(self.tmp_file_path, None, True)} [white]| [cyan]Expected duation: {ex_formatted_duration} \n")
560
-
561
- console.print("[yellow]⚠ Warning:[/yellow] Too many retries detected! Consider reducing the number of [cyan]workers[/cyan] in the [magenta]config.json[/magenta] file. This will impact [bold]performance[/bold]. \n")
562
-
563
- # Info to return
564
- return {'type': type, 'nFailed': self.info_nFailed}